@ngommans/codefocus 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of @ngommans/codefocus might be problematic. Click here for more details.

Files changed (40) hide show
  1. package/README.md +124 -0
  2. package/dist/benchmark-43DOYNYR.js +465 -0
  3. package/dist/benchmark-43DOYNYR.js.map +1 -0
  4. package/dist/chunk-6XH2ZLP6.js +127 -0
  5. package/dist/chunk-6XH2ZLP6.js.map +1 -0
  6. package/dist/chunk-7RYHZOYF.js +27 -0
  7. package/dist/chunk-7RYHZOYF.js.map +1 -0
  8. package/dist/chunk-ITVAEU6K.js +250 -0
  9. package/dist/chunk-ITVAEU6K.js.map +1 -0
  10. package/dist/chunk-Q6DOBQ4F.js +231 -0
  11. package/dist/chunk-Q6DOBQ4F.js.map +1 -0
  12. package/dist/chunk-X7DRJUEX.js +543 -0
  13. package/dist/chunk-X7DRJUEX.js.map +1 -0
  14. package/dist/cli.js +111 -0
  15. package/dist/cli.js.map +1 -0
  16. package/dist/commands-ICBN54MT.js +64 -0
  17. package/dist/commands-ICBN54MT.js.map +1 -0
  18. package/dist/config-OCBWYENF.js +12 -0
  19. package/dist/config-OCBWYENF.js.map +1 -0
  20. package/dist/extended-benchmark-5RUXDG3D.js +323 -0
  21. package/dist/extended-benchmark-5RUXDG3D.js.map +1 -0
  22. package/dist/find-W5UDE4US.js +63 -0
  23. package/dist/find-W5UDE4US.js.map +1 -0
  24. package/dist/graph-DZNBEATA.js +189 -0
  25. package/dist/graph-DZNBEATA.js.map +1 -0
  26. package/dist/map-6WOMDLCP.js +131 -0
  27. package/dist/map-6WOMDLCP.js.map +1 -0
  28. package/dist/mcp-7WYTXIQS.js +354 -0
  29. package/dist/mcp-7WYTXIQS.js.map +1 -0
  30. package/dist/mcp-server.js +369 -0
  31. package/dist/mcp-server.js.map +1 -0
  32. package/dist/query-DJNWYYJD.js +427 -0
  33. package/dist/query-DJNWYYJD.js.map +1 -0
  34. package/dist/query-PS6QVPXP.js +538 -0
  35. package/dist/query-PS6QVPXP.js.map +1 -0
  36. package/dist/root-ODTOXM2J.js +10 -0
  37. package/dist/root-ODTOXM2J.js.map +1 -0
  38. package/dist/watcher-LFBZAM5E.js +73 -0
  39. package/dist/watcher-LFBZAM5E.js.map +1 -0
  40. package/package.json +61 -0
@@ -0,0 +1,127 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/config.ts
4
+ import { existsSync, readFileSync } from "fs";
5
+ import { resolve } from "path";
6
+ var DEFAULT_SCORING_CONFIG = {
7
+ scoreFloorRatio: 0.2,
8
+ elbowDropRatio: 0.6,
9
+ minMarginalValue: 3e-5,
10
+ tfBoostFormula: "log2",
11
+ symbolProximityBoost: 1.5,
12
+ importEdgeWeight: 0.4,
13
+ typeRefWeight: 0.2,
14
+ defaultBudget: 8e3,
15
+ defaultDepth: 2
16
+ };
17
+ var CONFIG_FILENAMES = [
18
+ ".codefocus/config.json",
19
+ "codefocus.config.json"
20
+ ];
21
+ function loadScoringConfig(rootDir) {
22
+ for (const filename of CONFIG_FILENAMES) {
23
+ const configPath = resolve(rootDir, filename);
24
+ if (!existsSync(configPath)) continue;
25
+ let raw;
26
+ try {
27
+ raw = readFileSync(configPath, "utf-8");
28
+ } catch (err) {
29
+ throw new Error(
30
+ `Cannot read config file ${configPath}: ${err instanceof Error ? err.message : String(err)}`
31
+ );
32
+ }
33
+ let parsed;
34
+ try {
35
+ parsed = JSON.parse(raw);
36
+ } catch {
37
+ throw new Error(
38
+ `Invalid JSON in ${configPath}`
39
+ );
40
+ }
41
+ if (parsed && typeof parsed === "object" && "scoring" in parsed) {
42
+ const scoring = parsed.scoring;
43
+ if (typeof scoring !== "object" || scoring === null) {
44
+ throw new Error(
45
+ `"scoring" in ${configPath} must be an object`
46
+ );
47
+ }
48
+ return mergeConfig(scoring, configPath);
49
+ }
50
+ return { ...DEFAULT_SCORING_CONFIG };
51
+ }
52
+ return { ...DEFAULT_SCORING_CONFIG };
53
+ }
54
+ var PARAM_BOUNDS = {
55
+ scoreFloorRatio: { min: 0, max: 1, description: "B1 score floor ratio (0\u20131)" },
56
+ elbowDropRatio: { min: 0, max: 1, description: "B3 elbow drop ratio (0\u20131)" },
57
+ minMarginalValue: { min: 0, max: 1, description: "B2 min marginal value (0\u20131)" },
58
+ symbolProximityBoost: { min: 1, max: 10, description: "A2 symbol proximity multiplier (1\u201310)" },
59
+ importEdgeWeight: { min: 0, max: 1, description: "import edge weight (0\u20131)" },
60
+ typeRefWeight: { min: 0, max: 1, description: "type_ref edge weight (0\u20131)" },
61
+ defaultBudget: { min: 1, max: 1e5, integer: true, description: "default token budget (1\u2013100000)" },
62
+ defaultDepth: { min: 0, max: 10, integer: true, description: "default graph depth (0\u201310)" }
63
+ };
64
+ function validateParam(key, value, configPath) {
65
+ const bound = PARAM_BOUNDS[key];
66
+ if (!bound) {
67
+ throw new Error(
68
+ `Unknown scoring parameter "${key}" in ${configPath}`
69
+ );
70
+ }
71
+ if (typeof value !== "number" || !isFinite(value)) {
72
+ throw new Error(
73
+ `Invalid value for "${key}" in ${configPath}: expected a number, got ${typeof value}`
74
+ );
75
+ }
76
+ if (bound.integer && !Number.isInteger(value)) {
77
+ throw new Error(
78
+ `Invalid value for "${key}" in ${configPath}: expected an integer, got ${value}
79
+ ${bound.description}`
80
+ );
81
+ }
82
+ if (value < bound.min || value > bound.max) {
83
+ throw new Error(
84
+ `Out-of-bounds value for "${key}" in ${configPath}: ${value}
85
+ ${bound.description}`
86
+ );
87
+ }
88
+ return value;
89
+ }
90
+ function mergeConfig(overrides, configPath) {
91
+ const config = { ...DEFAULT_SCORING_CONFIG };
92
+ const knownKeys = /* @__PURE__ */ new Set([...Object.keys(PARAM_BOUNDS), "tfBoostFormula"]);
93
+ for (const key of Object.keys(overrides)) {
94
+ if (!knownKeys.has(key)) {
95
+ throw new Error(
96
+ `Unknown scoring parameter "${key}" in ${configPath}
97
+ Valid parameters: ${[...knownKeys].join(", ")}`
98
+ );
99
+ }
100
+ }
101
+ for (const key of Object.keys(PARAM_BOUNDS)) {
102
+ if (key in overrides) {
103
+ const validated = validateParam(key, overrides[key], configPath);
104
+ config[key] = validated;
105
+ }
106
+ }
107
+ if ("tfBoostFormula" in overrides) {
108
+ if (overrides.tfBoostFormula !== "log2") {
109
+ throw new Error(
110
+ `Invalid value for "tfBoostFormula" in ${configPath}: "${String(overrides.tfBoostFormula)}"
111
+ Currently only "log2" is supported`
112
+ );
113
+ }
114
+ config.tfBoostFormula = "log2";
115
+ }
116
+ return config;
117
+ }
118
+ function serializeConfig(config) {
119
+ return JSON.stringify({ scoring: config }, null, 2);
120
+ }
121
+
122
+ export {
123
+ DEFAULT_SCORING_CONFIG,
124
+ loadScoringConfig,
125
+ serializeConfig
126
+ };
127
+ //# sourceMappingURL=chunk-6XH2ZLP6.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/config.ts"],"sourcesContent":["import { existsSync, readFileSync } from \"node:fs\";\nimport { resolve } from \"node:path\";\n\n// ── scoring config types ──────────────────────────────────────────────\n\nexport interface ScoringConfig {\n /** B1: Drop sections scoring below this fraction of the top score (0-1) */\n scoreFloorRatio: number;\n /** B3: Relative drop between consecutive scores that signals an elbow (0-1) */\n elbowDropRatio: number;\n /** B2: Minimum marginal value (score / tokens) to include a section */\n minMarginalValue: number;\n /** A1: TF boost formula — \"log2\" uses log2(1 + count) */\n tfBoostFormula: \"log2\";\n /** A2: Multiplier for files containing a symbol whose name matches the term */\n symbolProximityBoost: number;\n /** Graph edge weight for import references */\n importEdgeWeight: number;\n /** Graph edge weight for type_ref references */\n typeRefWeight: number;\n /** Default token budget for query output */\n defaultBudget: number;\n /** Default graph traversal depth */\n defaultDepth: number;\n}\n\n// ── defaults ──────────────────────────────────────────────────────────\n\nexport const DEFAULT_SCORING_CONFIG: Readonly<ScoringConfig> = {\n scoreFloorRatio: 0.20,\n elbowDropRatio: 0.60,\n minMarginalValue: 0.00003,\n tfBoostFormula: \"log2\",\n symbolProximityBoost: 1.5,\n importEdgeWeight: 0.4,\n typeRefWeight: 0.2,\n defaultBudget: 8000,\n defaultDepth: 2,\n};\n\n// ── config file resolution ────────────────────────────────────────────\n\nconst CONFIG_FILENAMES = [\n \".codefocus/config.json\",\n \"codefocus.config.json\",\n] as const;\n\n/**\n * Load scoring config for a project. Looks for config files in:\n * 1. `.codefocus/config.json`\n * 2. `codefocus.config.json`\n *\n * Any keys found in the `scoring` object override the defaults.\n * Throws on invalid JSON, unknown keys, wrong types, or out-of-bounds values.\n */\nexport function loadScoringConfig(rootDir: string): ScoringConfig {\n for (const filename of CONFIG_FILENAMES) {\n const configPath = resolve(rootDir, filename);\n if (!existsSync(configPath)) continue;\n\n let raw: string;\n try {\n raw = readFileSync(configPath, \"utf-8\");\n } catch (err) {\n throw new Error(\n `Cannot read config file ${configPath}: ${err instanceof Error ? err.message : String(err)}`,\n );\n }\n\n let parsed: unknown;\n try {\n parsed = JSON.parse(raw);\n } catch {\n throw new Error(\n `Invalid JSON in ${configPath}`,\n );\n }\n\n if (parsed && typeof parsed === \"object\" && \"scoring\" in parsed) {\n const scoring = (parsed as Record<string, unknown>).scoring;\n if (typeof scoring !== \"object\" || scoring === null) {\n throw new Error(\n `\"scoring\" in ${configPath} must be an object`,\n );\n }\n return mergeConfig(scoring as Record<string, unknown>, configPath);\n }\n\n // Config file exists but has no \"scoring\" key — use defaults\n return { ...DEFAULT_SCORING_CONFIG };\n }\n\n return { ...DEFAULT_SCORING_CONFIG };\n}\n\n// ── parameter bounds ──────────────────────────────────────────────────\n\ninterface ParamBound {\n min: number;\n max: number;\n integer?: boolean;\n description: string;\n}\n\nconst PARAM_BOUNDS: Record<string, ParamBound> = {\n scoreFloorRatio: { min: 0, max: 1, description: \"B1 score floor ratio (0–1)\" },\n elbowDropRatio: { min: 0, max: 1, description: \"B3 elbow drop ratio (0–1)\" },\n minMarginalValue: { min: 0, max: 1, description: \"B2 min marginal value (0–1)\" },\n symbolProximityBoost: { min: 1, max: 10, description: \"A2 symbol proximity multiplier (1–10)\" },\n importEdgeWeight: { min: 0, max: 1, description: \"import edge weight (0–1)\" },\n typeRefWeight: { min: 0, max: 1, description: \"type_ref edge weight (0–1)\" },\n defaultBudget: { min: 1, max: 100_000, integer: true, description: \"default token budget (1–100000)\" },\n defaultDepth: { min: 0, max: 10, integer: true, description: \"default graph depth (0–10)\" },\n};\n\n/**\n * Validate a single config value against its bounds.\n * Throws a descriptive error if the value is out of range.\n */\nfunction validateParam(\n key: string,\n value: unknown,\n configPath: string,\n): number {\n const bound = PARAM_BOUNDS[key];\n if (!bound) {\n throw new Error(\n `Unknown scoring parameter \"${key}\" in ${configPath}`,\n );\n }\n\n if (typeof value !== \"number\" || !isFinite(value)) {\n throw new Error(\n `Invalid value for \"${key}\" in ${configPath}: expected a number, got ${typeof value}`,\n );\n }\n\n if (bound.integer && !Number.isInteger(value)) {\n throw new Error(\n `Invalid value for \"${key}\" in ${configPath}: expected an integer, got ${value}` +\n `\\n ${bound.description}`,\n );\n }\n\n if (value < bound.min || value > bound.max) {\n throw new Error(\n `Out-of-bounds value for \"${key}\" in ${configPath}: ${value}` +\n `\\n ${bound.description}`,\n );\n }\n\n return value;\n}\n\n/**\n * Merge user-provided scoring overrides with defaults.\n * Throws on unknown keys, wrong types, or out-of-bounds values.\n */\nfunction mergeConfig(\n overrides: Record<string, unknown>,\n configPath: string,\n): ScoringConfig {\n const config = { ...DEFAULT_SCORING_CONFIG };\n const knownKeys = new Set([...Object.keys(PARAM_BOUNDS), \"tfBoostFormula\"]);\n\n for (const key of Object.keys(overrides)) {\n if (!knownKeys.has(key)) {\n throw new Error(\n `Unknown scoring parameter \"${key}\" in ${configPath}` +\n `\\n Valid parameters: ${[...knownKeys].join(\", \")}`,\n );\n }\n }\n\n for (const key of Object.keys(PARAM_BOUNDS)) {\n if (key in overrides) {\n const validated = validateParam(key, overrides[key], configPath);\n (config as Record<string, unknown>)[key] = validated;\n }\n }\n\n if (\"tfBoostFormula\" in overrides) {\n if (overrides.tfBoostFormula !== \"log2\") {\n throw new Error(\n `Invalid value for \"tfBoostFormula\" in ${configPath}: \"${String(overrides.tfBoostFormula)}\"` +\n `\\n Currently only \"log2\" is supported`,\n );\n }\n config.tfBoostFormula = \"log2\";\n }\n\n return config;\n}\n\n/**\n * Serialize current config to JSON (for --emit-config).\n */\nexport function serializeConfig(config: ScoringConfig): string {\n return JSON.stringify({ scoring: config }, null, 2);\n}\n"],"mappings":";;;AAAA,SAAS,YAAY,oBAAoB;AACzC,SAAS,eAAe;AA2BjB,IAAM,yBAAkD;AAAA,EAC7D,iBAAiB;AAAA,EACjB,gBAAgB;AAAA,EAChB,kBAAkB;AAAA,EAClB,gBAAgB;AAAA,EAChB,sBAAsB;AAAA,EACtB,kBAAkB;AAAA,EAClB,eAAe;AAAA,EACf,eAAe;AAAA,EACf,cAAc;AAChB;AAIA,IAAM,mBAAmB;AAAA,EACvB;AAAA,EACA;AACF;AAUO,SAAS,kBAAkB,SAAgC;AAChE,aAAW,YAAY,kBAAkB;AACvC,UAAM,aAAa,QAAQ,SAAS,QAAQ;AAC5C,QAAI,CAAC,WAAW,UAAU,EAAG;AAE7B,QAAI;AACJ,QAAI;AACF,YAAM,aAAa,YAAY,OAAO;AAAA,IACxC,SAAS,KAAK;AACZ,YAAM,IAAI;AAAA,QACR,2BAA2B,UAAU,KAAK,eAAe,QAAQ,IAAI,UAAU,OAAO,GAAG,CAAC;AAAA,MAC5F;AAAA,IACF;AAEA,QAAI;AACJ,QAAI;AACF,eAAS,KAAK,MAAM,GAAG;AAAA,IACzB,QAAQ;AACN,YAAM,IAAI;AAAA,QACR,mBAAmB,UAAU;AAAA,MAC/B;AAAA,IACF;AAEA,QAAI,UAAU,OAAO,WAAW,YAAY,aAAa,QAAQ;AAC/D,YAAM,UAAW,OAAmC;AACpD,UAAI,OAAO,YAAY,YAAY,YAAY,MAAM;AACnD,cAAM,IAAI;AAAA,UACR,gBAAgB,UAAU;AAAA,QAC5B;AAAA,MACF;AACA,aAAO,YAAY,SAAoC,UAAU;AAAA,IACnE;AAGA,WAAO,EAAE,GAAG,uBAAuB;AAAA,EACrC;AAEA,SAAO,EAAE,GAAG,uBAAuB;AACrC;AAWA,IAAM,eAA2C;AAAA,EAC/C,iBAAsB,EAAE,KAAK,GAAG,KAAK,GAAG,aAAa,kCAA6B;AAAA,EAClF,gBAAsB,EAAE,KAAK,GAAG,KAAK,GAAG,aAAa,iCAA4B;AAAA,EACjF,kBAAsB,EAAE,KAAK,GAAG,KAAK,GAAG,aAAa,mCAA8B;AAAA,EACnF,sBAAsB,EAAE,KAAK,GAAG,KAAK,IAAI,aAAa,6CAAwC;AAAA,EAC9F,kBAAsB,EAAE,KAAK,GAAG,KAAK,GAAG,aAAa,gCAA2B;AAAA,EAChF,eAAsB,EAAE,KAAK,GAAG,KAAK,GAAG,aAAa,kCAA6B;AAAA,EAClF,eAAsB,EAAE,KAAK,GAAG,KAAK,KAAS,SAAS,MAAM,aAAa,uCAAkC;AAAA,EAC5G,cAAsB,EAAE,KAAK,GAAG,KAAK,IAAI,SAAS,MAAM,aAAa,kCAA6B;AACpG;AAMA,SAAS,cACP,KACA,OACA,YACQ;AACR,QAAM,QAAQ,aAAa,GAAG;AAC9B,MAAI,CAAC,OAAO;AACV,UAAM,IAAI;AAAA,MACR,8BAA8B,GAAG,QAAQ,UAAU;AAAA,IACrD;AAAA,EACF;AAEA,MAAI,OAAO,UAAU,YAAY,CAAC,SAAS,KAAK,GAAG;AACjD,UAAM,IAAI;AAAA,MACR,sBAAsB,GAAG,QAAQ,UAAU,4BAA4B,OAAO,KAAK;AAAA,IACrF;AAAA,EACF;AAEA,MAAI,MAAM,WAAW,CAAC,OAAO,UAAU,KAAK,GAAG;AAC7C,UAAM,IAAI;AAAA,MACR,sBAAsB,GAAG,QAAQ,UAAU,8BAA8B,KAAK;AAAA,IACrE,MAAM,WAAW;AAAA,IAC5B;AAAA,EACF;AAEA,MAAI,QAAQ,MAAM,OAAO,QAAQ,MAAM,KAAK;AAC1C,UAAM,IAAI;AAAA,MACR,4BAA4B,GAAG,QAAQ,UAAU,KAAK,KAAK;AAAA,IAClD,MAAM,WAAW;AAAA,IAC5B;AAAA,EACF;AAEA,SAAO;AACT;AAMA,SAAS,YACP,WACA,YACe;AACf,QAAM,SAAS,EAAE,GAAG,uBAAuB;AAC3C,QAAM,YAAY,oBAAI,IAAI,CAAC,GAAG,OAAO,KAAK,YAAY,GAAG,gBAAgB,CAAC;AAE1E,aAAW,OAAO,OAAO,KAAK,SAAS,GAAG;AACxC,QAAI,CAAC,UAAU,IAAI,GAAG,GAAG;AACvB,YAAM,IAAI;AAAA,QACR,8BAA8B,GAAG,QAAQ,UAAU;AAAA,sBACxB,CAAC,GAAG,SAAS,EAAE,KAAK,IAAI,CAAC;AAAA,MACtD;AAAA,IACF;AAAA,EACF;AAEA,aAAW,OAAO,OAAO,KAAK,YAAY,GAAG;AAC3C,QAAI,OAAO,WAAW;AACpB,YAAM,YAAY,cAAc,KAAK,UAAU,GAAG,GAAG,UAAU;AAC/D,MAAC,OAAmC,GAAG,IAAI;AAAA,IAC7C;AAAA,EACF;AAEA,MAAI,oBAAoB,WAAW;AACjC,QAAI,UAAU,mBAAmB,QAAQ;AACvC,YAAM,IAAI;AAAA,QACR,yCAAyC,UAAU,MAAM,OAAO,UAAU,cAAc,CAAC;AAAA;AAAA,MAE3F;AAAA,IACF;AACA,WAAO,iBAAiB;AAAA,EAC1B;AAEA,SAAO;AACT;AAKO,SAAS,gBAAgB,QAA+B;AAC7D,SAAO,KAAK,UAAU,EAAE,SAAS,OAAO,GAAG,MAAM,CAAC;AACpD;","names":[]}
@@ -0,0 +1,27 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/root.ts
4
+ import { existsSync } from "fs";
5
+ import { resolve, dirname } from "path";
6
+ var ROOT_MARKERS = [".git", "package.json"];
7
+ function findProjectRoot(startDir) {
8
+ let dir = resolve(startDir);
9
+ while (true) {
10
+ for (const marker of ROOT_MARKERS) {
11
+ if (existsSync(resolve(dir, marker))) return dir;
12
+ }
13
+ const parent = dirname(dir);
14
+ if (parent === dir) return null;
15
+ dir = parent;
16
+ }
17
+ }
18
+ function resolveRoot(flagRoot) {
19
+ if (typeof flagRoot === "string" && flagRoot) return resolve(flagRoot);
20
+ return findProjectRoot(process.cwd()) ?? resolve(".");
21
+ }
22
+
23
+ export {
24
+ findProjectRoot,
25
+ resolveRoot
26
+ };
27
+ //# sourceMappingURL=chunk-7RYHZOYF.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/root.ts"],"sourcesContent":["import { existsSync } from \"node:fs\";\nimport { resolve, dirname } from \"node:path\";\n\nconst ROOT_MARKERS = [\".git\", \"package.json\"] as const;\n\n/**\n * Walk up from `startDir` looking for a directory that contains a root\n * marker (`.git` directory or `package.json`). Returns the first match,\n * or `null` if the filesystem root is reached without finding one.\n *\n * `.git` is checked first so that in a monorepo the repository root wins\n * over a nested package.json.\n */\nexport function findProjectRoot(startDir: string): string | null {\n let dir = resolve(startDir);\n // eslint-disable-next-line no-constant-condition\n while (true) {\n for (const marker of ROOT_MARKERS) {\n if (existsSync(resolve(dir, marker))) return dir;\n }\n const parent = dirname(dir);\n if (parent === dir) return null; // filesystem root\n dir = parent;\n }\n}\n\n/**\n * Resolve the effective project root:\n * 1. Explicit `--root` flag (highest priority)\n * 2. Auto-detected via `.git` / `package.json` walk-up\n * 3. Current working directory (fallback)\n */\nexport function resolveRoot(flagRoot: string | boolean | undefined): string {\n if (typeof flagRoot === \"string\" && flagRoot) return resolve(flagRoot);\n return findProjectRoot(process.cwd()) ?? resolve(\".\");\n}\n"],"mappings":";;;AAAA,SAAS,kBAAkB;AAC3B,SAAS,SAAS,eAAe;AAEjC,IAAM,eAAe,CAAC,QAAQ,cAAc;AAUrC,SAAS,gBAAgB,UAAiC;AAC/D,MAAI,MAAM,QAAQ,QAAQ;AAE1B,SAAO,MAAM;AACX,eAAW,UAAU,cAAc;AACjC,UAAI,WAAW,QAAQ,KAAK,MAAM,CAAC,EAAG,QAAO;AAAA,IAC/C;AACA,UAAM,SAAS,QAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAQO,SAAS,YAAY,UAAgD;AAC1E,MAAI,OAAO,aAAa,YAAY,SAAU,QAAO,QAAQ,QAAQ;AACrE,SAAO,gBAAgB,QAAQ,IAAI,CAAC,KAAK,QAAQ,GAAG;AACtD;","names":[]}
@@ -0,0 +1,250 @@
1
+ // src/db.ts
2
+ import { createRequire } from "module";
3
+ import { mkdirSync, existsSync } from "fs";
4
+ import { dirname } from "path";
5
+ var require2 = createRequire(import.meta.url);
6
+ var Database = require2("better-sqlite3");
7
+ var IndexDatabase = class {
8
+ db;
9
+ constructor(dbPath) {
10
+ const dir = dirname(dbPath);
11
+ if (!existsSync(dir)) {
12
+ mkdirSync(dir, { recursive: true });
13
+ }
14
+ this.db = new Database(dbPath);
15
+ this.db.pragma("journal_mode = WAL");
16
+ this.db.pragma("foreign_keys = ON");
17
+ this.init();
18
+ }
19
+ init() {
20
+ this.db.exec(`
21
+ CREATE TABLE IF NOT EXISTS files (
22
+ path TEXT PRIMARY KEY,
23
+ content_hash TEXT NOT NULL,
24
+ language TEXT,
25
+ last_indexed INTEGER
26
+ );
27
+
28
+ CREATE TABLE IF NOT EXISTS symbols (
29
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
30
+ file_path TEXT NOT NULL,
31
+ name TEXT NOT NULL,
32
+ kind TEXT NOT NULL,
33
+ start_byte INTEGER,
34
+ end_byte INTEGER,
35
+ start_line INTEGER,
36
+ end_line INTEGER,
37
+ start_column INTEGER,
38
+ end_column INTEGER,
39
+ signature TEXT,
40
+ FOREIGN KEY (file_path) REFERENCES files(path) ON DELETE CASCADE
41
+ );
42
+
43
+ CREATE TABLE IF NOT EXISTS imports (
44
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
45
+ file_path TEXT NOT NULL,
46
+ specifier TEXT NOT NULL,
47
+ source_path TEXT,
48
+ raw_module TEXT NOT NULL,
49
+ is_type_only INTEGER DEFAULT 0,
50
+ FOREIGN KEY (file_path) REFERENCES files(path) ON DELETE CASCADE
51
+ );
52
+
53
+ CREATE TABLE IF NOT EXISTS "references" (
54
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
55
+ source_symbol_id INTEGER NOT NULL,
56
+ target_symbol_id INTEGER NOT NULL,
57
+ ref_type TEXT NOT NULL DEFAULT 'import',
58
+ FOREIGN KEY (source_symbol_id) REFERENCES symbols(id) ON DELETE CASCADE,
59
+ FOREIGN KEY (target_symbol_id) REFERENCES symbols(id) ON DELETE CASCADE
60
+ );
61
+
62
+ CREATE INDEX IF NOT EXISTS idx_symbols_file ON symbols(file_path);
63
+ CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);
64
+ CREATE INDEX IF NOT EXISTS idx_symbols_kind ON symbols(kind);
65
+ CREATE INDEX IF NOT EXISTS idx_imports_file ON imports(file_path);
66
+ CREATE INDEX IF NOT EXISTS idx_imports_source ON imports(source_path);
67
+ CREATE INDEX IF NOT EXISTS idx_refs_source ON "references"(source_symbol_id);
68
+ CREATE INDEX IF NOT EXISTS idx_refs_target ON "references"(target_symbol_id);
69
+
70
+ CREATE VIRTUAL TABLE IF NOT EXISTS content_fts USING fts5(
71
+ file_path,
72
+ content,
73
+ tokenize='unicode61'
74
+ );
75
+ `);
76
+ }
77
+ clearFile(filePath) {
78
+ this.db.prepare("DELETE FROM symbols WHERE file_path = ?").run(filePath);
79
+ this.db.prepare("DELETE FROM imports WHERE file_path = ?").run(filePath);
80
+ this.db.prepare("DELETE FROM files WHERE path = ?").run(filePath);
81
+ }
82
+ clearAllImports() {
83
+ this.db.prepare("DELETE FROM imports").run();
84
+ }
85
+ clearAllReferences() {
86
+ this.db.prepare('DELETE FROM "references"').run();
87
+ }
88
+ upsertFile(file) {
89
+ this.db.prepare(
90
+ `INSERT OR REPLACE INTO files (path, content_hash, language, last_indexed)
91
+ VALUES (@path, @content_hash, @language, @last_indexed)`
92
+ ).run(file);
93
+ }
94
+ insertSymbol(symbol) {
95
+ const result = this.db.prepare(
96
+ `INSERT INTO symbols (file_path, name, kind, start_byte, end_byte, start_line, end_line, start_column, end_column, signature)
97
+ VALUES (@file_path, @name, @kind, @start_byte, @end_byte, @start_line, @end_line, @start_column, @end_column, @signature)`
98
+ ).run(symbol);
99
+ return Number(result.lastInsertRowid);
100
+ }
101
+ insertImport(imp) {
102
+ const result = this.db.prepare(
103
+ `INSERT INTO imports (file_path, specifier, source_path, raw_module, is_type_only)
104
+ VALUES (@file_path, @specifier, @source_path, @raw_module, @is_type_only)`
105
+ ).run(imp);
106
+ return Number(result.lastInsertRowid);
107
+ }
108
+ insertReference(ref) {
109
+ this.db.prepare(
110
+ `INSERT INTO "references" (source_symbol_id, target_symbol_id, ref_type)
111
+ VALUES (@source_symbol_id, @target_symbol_id, @ref_type)`
112
+ ).run(ref);
113
+ }
114
+ upsertFileContent(filePath, content) {
115
+ this.db.prepare("DELETE FROM content_fts WHERE file_path = ?").run(filePath);
116
+ this.db.prepare("INSERT INTO content_fts (file_path, content) VALUES (?, ?)").run(filePath, content);
117
+ }
118
+ /**
119
+ * Search file content using FTS5 full-text search.
120
+ * Returns files with BM25 relevance scores and matching line ranges.
121
+ */
122
+ searchContent(term) {
123
+ return this.db.prepare(
124
+ `SELECT file_path, -rank AS rank
125
+ FROM content_fts
126
+ WHERE content_fts MATCH ?
127
+ ORDER BY rank DESC`
128
+ ).all(term);
129
+ }
130
+ getFileHash(filePath) {
131
+ const row = this.db.prepare("SELECT content_hash FROM files WHERE path = ?").get(filePath);
132
+ return row?.content_hash;
133
+ }
134
+ getSymbolsByFile(filePath) {
135
+ return this.db.prepare("SELECT * FROM symbols WHERE file_path = ?").all(filePath);
136
+ }
137
+ getSymbolById(id) {
138
+ return this.db.prepare("SELECT * FROM symbols WHERE id = ?").get(id);
139
+ }
140
+ getSymbolByName(name) {
141
+ return this.db.prepare("SELECT * FROM symbols WHERE name = ?").get(name);
142
+ }
143
+ findSymbolsByName(name) {
144
+ return this.db.prepare("SELECT * FROM symbols WHERE name LIKE ?").all(`%${name}%`);
145
+ }
146
+ findSymbols(name, kind) {
147
+ if (kind && kind !== "all") {
148
+ return this.db.prepare(
149
+ "SELECT * FROM symbols WHERE name LIKE ? AND kind = ? ORDER BY file_path, start_line"
150
+ ).all(`%${name}%`, kind);
151
+ }
152
+ return this.db.prepare(
153
+ "SELECT * FROM symbols WHERE name LIKE ? ORDER BY file_path, start_line"
154
+ ).all(`%${name}%`);
155
+ }
156
+ /** Get all file-to-file import edges (for file-level graph). */
157
+ getFileImportEdges() {
158
+ return this.db.prepare(
159
+ `SELECT file_path AS source_file,
160
+ source_path AS target_file,
161
+ GROUP_CONCAT(specifier, ', ') AS specifiers,
162
+ MAX(is_type_only) AS has_type_only
163
+ FROM imports
164
+ WHERE source_path IS NOT NULL
165
+ GROUP BY file_path, source_path
166
+ ORDER BY file_path, source_path`
167
+ ).all();
168
+ }
169
+ /** Get outgoing symbol references (symbols this symbol depends on). */
170
+ getOutgoingReferences(symbolId) {
171
+ return this.db.prepare(
172
+ `SELECT s.id AS target_id, s.name AS target_name, s.kind AS target_kind,
173
+ s.file_path AS target_file, s.start_line AS target_line, r.ref_type
174
+ FROM "references" r
175
+ JOIN symbols s ON r.target_symbol_id = s.id
176
+ WHERE r.source_symbol_id = ?
177
+ ORDER BY s.file_path, s.start_line`
178
+ ).all(symbolId);
179
+ }
180
+ /** Get incoming symbol references (symbols that depend on this symbol). */
181
+ getIncomingReferences(symbolId) {
182
+ return this.db.prepare(
183
+ `SELECT s.id AS source_id, s.name AS source_name, s.kind AS source_kind,
184
+ s.file_path AS source_file, s.start_line AS source_line, r.ref_type
185
+ FROM "references" r
186
+ JOIN symbols s ON r.source_symbol_id = s.id
187
+ WHERE r.target_symbol_id = ?
188
+ ORDER BY s.file_path, s.start_line`
189
+ ).all(symbolId);
190
+ }
191
+ /** Get in-degree (number of incoming references) for a symbol. */
192
+ getSymbolInDegree(symbolId) {
193
+ const row = this.db.prepare(
194
+ 'SELECT COUNT(*) as count FROM "references" WHERE target_symbol_id = ?'
195
+ ).get(symbolId);
196
+ return row.count;
197
+ }
198
+ /** Get all files in the index. */
199
+ getAllFiles() {
200
+ return this.db.prepare("SELECT * FROM files ORDER BY path").all();
201
+ }
202
+ countFiles() {
203
+ const row = this.db.prepare("SELECT COUNT(*) as count FROM files").get();
204
+ return row.count;
205
+ }
206
+ countSymbols() {
207
+ const row = this.db.prepare("SELECT COUNT(*) as count FROM symbols").get();
208
+ return row.count;
209
+ }
210
+ countImports() {
211
+ const row = this.db.prepare("SELECT COUNT(*) as count FROM imports").get();
212
+ return row.count;
213
+ }
214
+ countReferences() {
215
+ const row = this.db.prepare('SELECT COUNT(*) as count FROM "references"').get();
216
+ return row.count;
217
+ }
218
+ transaction(fn) {
219
+ return this.db.transaction(fn)();
220
+ }
221
+ close() {
222
+ this.db.close();
223
+ }
224
+ };
225
+
226
+ // src/root.ts
227
+ import { existsSync as existsSync2 } from "fs";
228
+ import { resolve, dirname as dirname2 } from "path";
229
+ var ROOT_MARKERS = [".git", "package.json"];
230
+ function findProjectRoot(startDir) {
231
+ let dir = resolve(startDir);
232
+ while (true) {
233
+ for (const marker of ROOT_MARKERS) {
234
+ if (existsSync2(resolve(dir, marker))) return dir;
235
+ }
236
+ const parent = dirname2(dir);
237
+ if (parent === dir) return null;
238
+ dir = parent;
239
+ }
240
+ }
241
+ function resolveRoot(flagRoot) {
242
+ if (typeof flagRoot === "string" && flagRoot) return resolve(flagRoot);
243
+ return findProjectRoot(process.cwd()) ?? resolve(".");
244
+ }
245
+
246
+ export {
247
+ IndexDatabase,
248
+ resolveRoot
249
+ };
250
+ //# sourceMappingURL=chunk-ITVAEU6K.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/db.ts","../src/root.ts"],"sourcesContent":["import { createRequire } from \"node:module\";\nimport { mkdirSync, existsSync } from \"node:fs\";\nimport { dirname } from \"node:path\";\n\nconst require = createRequire(import.meta.url);\nconst Database = require(\"better-sqlite3\");\n\nexport interface FileRow {\n path: string;\n content_hash: string;\n language: string;\n last_indexed: number;\n}\n\nexport interface SymbolRow {\n id?: number;\n file_path: string;\n name: string;\n kind: string;\n start_byte: number;\n end_byte: number;\n start_line: number;\n end_line: number;\n start_column: number;\n end_column: number;\n signature: string | null;\n}\n\nexport interface ImportRow {\n id?: number;\n file_path: string;\n specifier: string;\n source_path: string | null;\n raw_module: string;\n is_type_only: number;\n}\n\nexport interface ReferenceRow {\n id?: number;\n source_symbol_id: number;\n target_symbol_id: number;\n ref_type: string;\n}\n\nexport class IndexDatabase {\n private db: InstanceType<typeof Database>;\n\n constructor(dbPath: string) {\n const dir = dirname(dbPath);\n if (!existsSync(dir)) {\n mkdirSync(dir, { recursive: true });\n }\n this.db = new Database(dbPath);\n this.db.pragma(\"journal_mode = WAL\");\n this.db.pragma(\"foreign_keys = ON\");\n this.init();\n }\n\n private init(): void {\n this.db.exec(`\n CREATE TABLE IF NOT EXISTS files (\n path TEXT PRIMARY KEY,\n content_hash TEXT NOT NULL,\n language TEXT,\n last_indexed INTEGER\n );\n\n CREATE TABLE IF NOT EXISTS symbols (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n file_path TEXT NOT NULL,\n name TEXT NOT NULL,\n kind TEXT NOT NULL,\n start_byte INTEGER,\n end_byte INTEGER,\n start_line INTEGER,\n end_line INTEGER,\n start_column INTEGER,\n end_column INTEGER,\n signature TEXT,\n FOREIGN KEY (file_path) REFERENCES files(path) ON DELETE CASCADE\n );\n\n CREATE TABLE IF NOT EXISTS imports (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n file_path TEXT NOT NULL,\n specifier TEXT NOT NULL,\n source_path TEXT,\n raw_module TEXT NOT NULL,\n is_type_only INTEGER DEFAULT 0,\n FOREIGN KEY (file_path) REFERENCES files(path) ON DELETE CASCADE\n );\n\n CREATE TABLE IF NOT EXISTS \"references\" (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n source_symbol_id INTEGER NOT NULL,\n target_symbol_id INTEGER NOT NULL,\n ref_type TEXT NOT NULL DEFAULT 'import',\n FOREIGN KEY (source_symbol_id) REFERENCES symbols(id) ON DELETE CASCADE,\n FOREIGN KEY (target_symbol_id) REFERENCES symbols(id) ON DELETE CASCADE\n );\n\n CREATE INDEX IF NOT EXISTS idx_symbols_file ON symbols(file_path);\n CREATE INDEX IF NOT EXISTS idx_symbols_name ON symbols(name);\n CREATE INDEX IF NOT EXISTS idx_symbols_kind ON symbols(kind);\n CREATE INDEX IF NOT EXISTS idx_imports_file ON imports(file_path);\n CREATE INDEX IF NOT EXISTS idx_imports_source ON imports(source_path);\n CREATE INDEX IF NOT EXISTS idx_refs_source ON \"references\"(source_symbol_id);\n CREATE INDEX IF NOT EXISTS idx_refs_target ON \"references\"(target_symbol_id);\n\n CREATE VIRTUAL TABLE IF NOT EXISTS content_fts USING fts5(\n file_path,\n content,\n tokenize='unicode61'\n );\n `);\n }\n\n clearFile(filePath: string): void {\n this.db.prepare(\"DELETE FROM symbols WHERE file_path = ?\").run(filePath);\n this.db.prepare(\"DELETE FROM imports WHERE file_path = ?\").run(filePath);\n this.db.prepare(\"DELETE FROM files WHERE path = ?\").run(filePath);\n }\n\n clearAllImports(): void {\n this.db.prepare(\"DELETE FROM imports\").run();\n }\n\n clearAllReferences(): void {\n this.db.prepare('DELETE FROM \"references\"').run();\n }\n\n upsertFile(file: FileRow): void {\n this.db\n .prepare(\n `INSERT OR REPLACE INTO files (path, content_hash, language, last_indexed)\n VALUES (@path, @content_hash, @language, @last_indexed)`,\n )\n .run(file);\n }\n\n insertSymbol(symbol: SymbolRow): number {\n const result = this.db\n .prepare(\n `INSERT INTO symbols (file_path, name, kind, start_byte, end_byte, start_line, end_line, start_column, end_column, signature)\n VALUES (@file_path, @name, @kind, @start_byte, @end_byte, @start_line, @end_line, @start_column, @end_column, @signature)`,\n )\n .run(symbol);\n return Number(result.lastInsertRowid);\n }\n\n insertImport(imp: ImportRow): number {\n const result = this.db\n .prepare(\n `INSERT INTO imports (file_path, specifier, source_path, raw_module, is_type_only)\n VALUES (@file_path, @specifier, @source_path, @raw_module, @is_type_only)`,\n )\n .run(imp);\n return Number(result.lastInsertRowid);\n }\n\n insertReference(ref: ReferenceRow): void {\n this.db\n .prepare(\n `INSERT INTO \"references\" (source_symbol_id, target_symbol_id, ref_type)\n VALUES (@source_symbol_id, @target_symbol_id, @ref_type)`,\n )\n .run(ref);\n }\n\n upsertFileContent(filePath: string, content: string): void {\n // Delete old entry first (FTS5 doesn't support REPLACE)\n this.db.prepare(\"DELETE FROM content_fts WHERE file_path = ?\").run(filePath);\n this.db\n .prepare(\"INSERT INTO content_fts (file_path, content) VALUES (?, ?)\")\n .run(filePath, content);\n }\n\n /**\n * Search file content using FTS5 full-text search.\n * Returns files with BM25 relevance scores and matching line ranges.\n */\n searchContent(\n term: string,\n ): Array<{ file_path: string; rank: number }> {\n // FTS5 rank is negative (more negative = more relevant), so we negate it\n return this.db\n .prepare(\n `SELECT file_path, -rank AS rank\n FROM content_fts\n WHERE content_fts MATCH ?\n ORDER BY rank DESC`,\n )\n .all(term) as Array<{ file_path: string; rank: number }>;\n }\n\n getFileHash(filePath: string): string | undefined {\n const row = this.db\n .prepare(\"SELECT content_hash FROM files WHERE path = ?\")\n .get(filePath) as { content_hash: string } | undefined;\n return row?.content_hash;\n }\n\n getSymbolsByFile(filePath: string): SymbolRow[] {\n return this.db\n .prepare(\"SELECT * FROM symbols WHERE file_path = ?\")\n .all(filePath) as SymbolRow[];\n }\n\n getSymbolById(id: number): SymbolRow | undefined {\n return this.db\n .prepare(\"SELECT * FROM symbols WHERE id = ?\")\n .get(id) as SymbolRow | undefined;\n }\n\n getSymbolByName(name: string): SymbolRow | undefined {\n return this.db\n .prepare(\"SELECT * FROM symbols WHERE name = ?\")\n .get(name) as SymbolRow | undefined;\n }\n\n findSymbolsByName(name: string): SymbolRow[] {\n return this.db\n .prepare(\"SELECT * FROM symbols WHERE name LIKE ?\")\n .all(`%${name}%`) as SymbolRow[];\n }\n\n findSymbols(name: string, kind?: string): SymbolRow[] {\n if (kind && kind !== \"all\") {\n return this.db\n .prepare(\n \"SELECT * FROM symbols WHERE name LIKE ? AND kind = ? ORDER BY file_path, start_line\",\n )\n .all(`%${name}%`, kind) as SymbolRow[];\n }\n return this.db\n .prepare(\n \"SELECT * FROM symbols WHERE name LIKE ? ORDER BY file_path, start_line\",\n )\n .all(`%${name}%`) as SymbolRow[];\n }\n\n /** Get all file-to-file import edges (for file-level graph). */\n getFileImportEdges(): Array<{\n source_file: string;\n target_file: string;\n specifiers: string;\n has_type_only: number;\n }> {\n return this.db\n .prepare(\n `SELECT file_path AS source_file,\n source_path AS target_file,\n GROUP_CONCAT(specifier, ', ') AS specifiers,\n MAX(is_type_only) AS has_type_only\n FROM imports\n WHERE source_path IS NOT NULL\n GROUP BY file_path, source_path\n ORDER BY file_path, source_path`,\n )\n .all() as Array<{\n source_file: string;\n target_file: string;\n specifiers: string;\n has_type_only: number;\n }>;\n }\n\n /** Get outgoing symbol references (symbols this symbol depends on). */\n getOutgoingReferences(\n symbolId: number,\n ): Array<{\n target_id: number;\n target_name: string;\n target_kind: string;\n target_file: string;\n target_line: number;\n ref_type: string;\n }> {\n return this.db\n .prepare(\n `SELECT s.id AS target_id, s.name AS target_name, s.kind AS target_kind,\n s.file_path AS target_file, s.start_line AS target_line, r.ref_type\n FROM \"references\" r\n JOIN symbols s ON r.target_symbol_id = s.id\n WHERE r.source_symbol_id = ?\n ORDER BY s.file_path, s.start_line`,\n )\n .all(symbolId) as Array<{\n target_id: number;\n target_name: string;\n target_kind: string;\n target_file: string;\n target_line: number;\n ref_type: string;\n }>;\n }\n\n /** Get incoming symbol references (symbols that depend on this symbol). */\n getIncomingReferences(\n symbolId: number,\n ): Array<{\n source_id: number;\n source_name: string;\n source_kind: string;\n source_file: string;\n source_line: number;\n ref_type: string;\n }> {\n return this.db\n .prepare(\n `SELECT s.id AS source_id, s.name AS source_name, s.kind AS source_kind,\n s.file_path AS source_file, s.start_line AS source_line, r.ref_type\n FROM \"references\" r\n JOIN symbols s ON r.source_symbol_id = s.id\n WHERE r.target_symbol_id = ?\n ORDER BY s.file_path, s.start_line`,\n )\n .all(symbolId) as Array<{\n source_id: number;\n source_name: string;\n source_kind: string;\n source_file: string;\n source_line: number;\n ref_type: string;\n }>;\n }\n\n /** Get in-degree (number of incoming references) for a symbol. */\n getSymbolInDegree(symbolId: number): number {\n const row = this.db\n .prepare(\n 'SELECT COUNT(*) as count FROM \"references\" WHERE target_symbol_id = ?',\n )\n .get(symbolId) as { count: number };\n return row.count;\n }\n\n /** Get all files in the index. */\n getAllFiles(): FileRow[] {\n return this.db\n .prepare(\"SELECT * FROM files ORDER BY path\")\n .all() as FileRow[];\n }\n\n countFiles(): number {\n const row = this.db\n .prepare(\"SELECT COUNT(*) as count FROM files\")\n .get() as { count: number };\n return row.count;\n }\n\n countSymbols(): number {\n const row = this.db\n .prepare(\"SELECT COUNT(*) as count FROM symbols\")\n .get() as { count: number };\n return row.count;\n }\n\n countImports(): number {\n const row = this.db\n .prepare(\"SELECT COUNT(*) as count FROM imports\")\n .get() as { count: number };\n return row.count;\n }\n\n countReferences(): number {\n const row = this.db\n .prepare('SELECT COUNT(*) as count FROM \"references\"')\n .get() as { count: number };\n return row.count;\n }\n\n transaction<T>(fn: () => T): T {\n return this.db.transaction(fn)();\n }\n\n close(): void {\n this.db.close();\n }\n}\n","import { existsSync } from \"node:fs\";\nimport { resolve, dirname } from \"node:path\";\n\nconst ROOT_MARKERS = [\".git\", \"package.json\"] as const;\n\n/**\n * Walk up from `startDir` looking for a directory that contains a root\n * marker (`.git` directory or `package.json`). Returns the first match,\n * or `null` if the filesystem root is reached without finding one.\n *\n * `.git` is checked first so that in a monorepo the repository root wins\n * over a nested package.json.\n */\nexport function findProjectRoot(startDir: string): string | null {\n let dir = resolve(startDir);\n // eslint-disable-next-line no-constant-condition\n while (true) {\n for (const marker of ROOT_MARKERS) {\n if (existsSync(resolve(dir, marker))) return dir;\n }\n const parent = dirname(dir);\n if (parent === dir) return null; // filesystem root\n dir = parent;\n }\n}\n\n/**\n * Resolve the effective project root:\n * 1. Explicit `--root` flag (highest priority)\n * 2. Auto-detected via `.git` / `package.json` walk-up\n * 3. Current working directory (fallback)\n */\nexport function resolveRoot(flagRoot: string | boolean | undefined): string {\n if (typeof flagRoot === \"string\" && flagRoot) return resolve(flagRoot);\n return findProjectRoot(process.cwd()) ?? resolve(\".\");\n}\n"],"mappings":";AAAA,SAAS,qBAAqB;AAC9B,SAAS,WAAW,kBAAkB;AACtC,SAAS,eAAe;AAExB,IAAMA,WAAU,cAAc,YAAY,GAAG;AAC7C,IAAM,WAAWA,SAAQ,gBAAgB;AAuClC,IAAM,gBAAN,MAAoB;AAAA,EACjB;AAAA,EAER,YAAY,QAAgB;AAC1B,UAAM,MAAM,QAAQ,MAAM;AAC1B,QAAI,CAAC,WAAW,GAAG,GAAG;AACpB,gBAAU,KAAK,EAAE,WAAW,KAAK,CAAC;AAAA,IACpC;AACA,SAAK,KAAK,IAAI,SAAS,MAAM;AAC7B,SAAK,GAAG,OAAO,oBAAoB;AACnC,SAAK,GAAG,OAAO,mBAAmB;AAClC,SAAK,KAAK;AAAA,EACZ;AAAA,EAEQ,OAAa;AACnB,SAAK,GAAG,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,KAuDZ;AAAA,EACH;AAAA,EAEA,UAAU,UAAwB;AAChC,SAAK,GAAG,QAAQ,yCAAyC,EAAE,IAAI,QAAQ;AACvE,SAAK,GAAG,QAAQ,yCAAyC,EAAE,IAAI,QAAQ;AACvE,SAAK,GAAG,QAAQ,kCAAkC,EAAE,IAAI,QAAQ;AAAA,EAClE;AAAA,EAEA,kBAAwB;AACtB,SAAK,GAAG,QAAQ,qBAAqB,EAAE,IAAI;AAAA,EAC7C;AAAA,EAEA,qBAA2B;AACzB,SAAK,GAAG,QAAQ,0BAA0B,EAAE,IAAI;AAAA,EAClD;AAAA,EAEA,WAAW,MAAqB;AAC9B,SAAK,GACF;AAAA,MACC;AAAA;AAAA,IAEF,EACC,IAAI,IAAI;AAAA,EACb;AAAA,EAEA,aAAa,QAA2B;AACtC,UAAM,SAAS,KAAK,GACjB;AAAA,MACC;AAAA;AAAA,IAEF,EACC,IAAI,MAAM;AACb,WAAO,OAAO,OAAO,eAAe;AAAA,EACtC;AAAA,EAEA,aAAa,KAAwB;AACnC,UAAM,SAAS,KAAK,GACjB;AAAA,MACC;AAAA;AAAA,IAEF,EACC,IAAI,GAAG;AACV,WAAO,OAAO,OAAO,eAAe;AAAA,EACtC;AAAA,EAEA,gBAAgB,KAAyB;AACvC,SAAK,GACF;AAAA,MACC;AAAA;AAAA,IAEF,EACC,IAAI,GAAG;AAAA,EACZ;AAAA,EAEA,kBAAkB,UAAkB,SAAuB;AAEzD,SAAK,GAAG,QAAQ,6CAA6C,EAAE,IAAI,QAAQ;AAC3E,SAAK,GACF,QAAQ,4DAA4D,EACpE,IAAI,UAAU,OAAO;AAAA,EAC1B;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,cACE,MAC4C;AAE5C,WAAO,KAAK,GACT;AAAA,MACC;AAAA;AAAA;AAAA;AAAA,IAIF,EACC,IAAI,IAAI;AAAA,EACb;AAAA,EAEA,YAAY,UAAsC;AAChD,UAAM,MAAM,KAAK,GACd,QAAQ,+CAA+C,EACvD,IAAI,QAAQ;AACf,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,iBAAiB,UAA+B;AAC9C,WAAO,KAAK,GACT,QAAQ,2CAA2C,EACnD,IAAI,QAAQ;AAAA,EACjB;AAAA,EAEA,cAAc,IAAmC;AAC/C,WAAO,KAAK,GACT,QAAQ,oCAAoC,EAC5C,IAAI,EAAE;AAAA,EACX;AAAA,EAEA,gBAAgB,MAAqC;AACnD,WAAO,KAAK,GACT,QAAQ,sCAAsC,EAC9C,IAAI,IAAI;AAAA,EACb;AAAA,EAEA,kBAAkB,MAA2B;AAC3C,WAAO,KAAK,GACT,QAAQ,yCAAyC,EACjD,IAAI,IAAI,IAAI,GAAG;AAAA,EACpB;AAAA,EAEA,YAAY,MAAc,MAA4B;AACpD,QAAI,QAAQ,SAAS,OAAO;AAC1B,aAAO,KAAK,GACT;AAAA,QACC;AAAA,MACF,EACC,IAAI,IAAI,IAAI,KAAK,IAAI;AAAA,IAC1B;AACA,WAAO,KAAK,GACT;AAAA,MACC;AAAA,IACF,EACC,IAAI,IAAI,IAAI,GAAG;AAAA,EACpB;AAAA;AAAA,EAGA,qBAKG;AACD,WAAO,KAAK,GACT;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAQF,EACC,IAAI;AAAA,EAMT;AAAA;AAAA,EAGA,sBACE,UAQC;AACD,WAAO,KAAK,GACT;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMF,EACC,IAAI,QAAQ;AAAA,EAQjB;AAAA;AAAA,EAGA,sBACE,UAQC;AACD,WAAO,KAAK,GACT;AAAA,MACC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAMF,EACC,IAAI,QAAQ;AAAA,EAQjB;AAAA;AAAA,EAGA,kBAAkB,UAA0B;AAC1C,UAAM,MAAM,KAAK,GACd;AAAA,MACC;AAAA,IACF,EACC,IAAI,QAAQ;AACf,WAAO,IAAI;AAAA,EACb;AAAA;AAAA,EAGA,cAAyB;AACvB,WAAO,KAAK,GACT,QAAQ,mCAAmC,EAC3C,IAAI;AAAA,EACT;AAAA,EAEA,aAAqB;AACnB,UAAM,MAAM,KAAK,GACd,QAAQ,qCAAqC,EAC7C,IAAI;AACP,WAAO,IAAI;AAAA,EACb;AAAA,EAEA,eAAuB;AACrB,UAAM,MAAM,KAAK,GACd,QAAQ,uCAAuC,EAC/C,IAAI;AACP,WAAO,IAAI;AAAA,EACb;AAAA,EAEA,eAAuB;AACrB,UAAM,MAAM,KAAK,GACd,QAAQ,uCAAuC,EAC/C,IAAI;AACP,WAAO,IAAI;AAAA,EACb;AAAA,EAEA,kBAA0B;AACxB,UAAM,MAAM,KAAK,GACd,QAAQ,4CAA4C,EACpD,IAAI;AACP,WAAO,IAAI;AAAA,EACb;AAAA,EAEA,YAAe,IAAgB;AAC7B,WAAO,KAAK,GAAG,YAAY,EAAE,EAAE;AAAA,EACjC;AAAA,EAEA,QAAc;AACZ,SAAK,GAAG,MAAM;AAAA,EAChB;AACF;;;AC3XA,SAAS,cAAAC,mBAAkB;AAC3B,SAAS,SAAS,WAAAC,gBAAe;AAEjC,IAAM,eAAe,CAAC,QAAQ,cAAc;AAUrC,SAAS,gBAAgB,UAAiC;AAC/D,MAAI,MAAM,QAAQ,QAAQ;AAE1B,SAAO,MAAM;AACX,eAAW,UAAU,cAAc;AACjC,UAAID,YAAW,QAAQ,KAAK,MAAM,CAAC,EAAG,QAAO;AAAA,IAC/C;AACA,UAAM,SAASC,SAAQ,GAAG;AAC1B,QAAI,WAAW,IAAK,QAAO;AAC3B,UAAM;AAAA,EACR;AACF;AAQO,SAAS,YAAY,UAAgD;AAC1E,MAAI,OAAO,aAAa,YAAY,SAAU,QAAO,QAAQ,QAAQ;AACrE,SAAO,gBAAgB,QAAQ,IAAI,CAAC,KAAK,QAAQ,GAAG;AACtD;","names":["require","existsSync","dirname"]}