@snevins/repo-mapper 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Steven Nevins
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,103 @@
1
+ # repo-mapper
2
+
3
+ [![npm version](https://img.shields.io/npm/v/repo-mapper.svg)](https://www.npmjs.com/package/repo-mapper)
4
+ [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
5
+
6
+ Generate token-budgeted maps of code repositories for LLM context. Uses tree-sitter for accurate parsing and PageRank for importance ranking.
7
+
8
+ ## Features
9
+
10
+ - **Token budgeting**: Fit the most important code into your context window
11
+ - **Smart ranking**: PageRank algorithm prioritizes frequently-referenced code
12
+ - **Focus mode**: Bias output toward files you're working on
13
+ - **Fast caching**: Only re-parses changed files
14
+
15
+ ## Supported Languages
16
+
17
+ | Language | Extensions |
18
+ |----------|------------|
19
+ | TypeScript | .ts, .tsx, .mts, .cts |
20
+ | JavaScript | .js, .jsx, .mjs, .cjs |
21
+ | Python | .py, .pyw |
22
+ | Go | .go |
23
+ | Rust | .rs |
24
+ | Solidity | .sol |
25
+
26
+ ## Installation
27
+
28
+ ```bash
29
+ # Run directly with npx
30
+ npx repo-mapper@latest . --tokens 4000
31
+
32
+ # Or install globally
33
+ npm install -g repo-mapper
34
+ repo-mapper . --tokens 4000
35
+ ```
36
+
37
+ ## Usage
38
+
39
+ ```bash
40
+ repo-mapper [paths...] [options]
41
+ ```
42
+
43
+ ### Options
44
+
45
+ | Option | Default | Description |
46
+ |--------|---------|-------------|
47
+ | `-t, --tokens <n>` | 1024 | Maximum tokens for output |
48
+ | `-f, --focus <file>` | | Files to prioritize (repeatable) |
49
+ | `-o, --output <file>` | stdout | Write output to file |
50
+ | `-r, --refresh` | false | Ignore cache, re-parse all |
51
+ | `-v, --verbose` | false | Print progress info |
52
+ | `--ignore <pattern>` | | Additional ignore patterns (repeatable) |
53
+ | `--no-ignore` | false | Disable default ignores |
54
+ | `--max-files <n>` | 10000 | Maximum files to process |
55
+
56
+ ### Examples
57
+
58
+ ```bash
59
+ # Map current directory with 2000 token budget
60
+ repo-mapper . -t 2000
61
+
62
+ # Focus on specific files you're editing
63
+ repo-mapper . -f src/api.ts -f src/types.ts -t 4000
64
+
65
+ # Save to file for LLM prompt
66
+ repo-mapper . -t 8000 -o context.md
67
+
68
+ # Full refresh, verbose output
69
+ repo-mapper . -r -v
70
+ ```
71
+
72
+ ## Output Format
73
+
74
+ ```
75
+ src/ranking.ts:
76
+ (Rank: 0.1234)
77
+
78
+ 15: export function rankDefinitions(tags: Tag[], graph: FileGraph): RankedDefinition[] {
79
+ 42: export function buildPersonalization(focusFiles: string[]): Map<string, number> {
80
+
81
+ src/pagerank.ts:
82
+ (Rank: 0.0891)
83
+
84
+ 8: export function computePageRank(graph: FileGraph, options?: PageRankOptions): Map<string, number> {
85
+ ```
86
+
87
+ ## Caching
88
+
89
+ Cache is stored in `.repomap.cache.v1/` in the repository root. Files are re-parsed only when modified (mtime-based invalidation).
90
+
91
+ Clear cache with: `rm -rf .repomap.cache.v1`
92
+
93
+ ## How It Works
94
+
95
+ 1. **Discover** - Find all supported source files
96
+ 2. **Parse** - Extract definitions and references using tree-sitter
97
+ 3. **Graph** - Build reference graph (file A calls function in file B)
98
+ 4. **Rank** - Run PageRank to score files by importance
99
+ 5. **Budget** - Binary search to fit top definitions within token limit
100
+
101
+ ## License
102
+
103
+ MIT
@@ -0,0 +1,21 @@
1
+ import type { Cache, Tag } from "./types.js";
2
+ /**
3
+ * Get the shard key (first 2 hex chars of SHA-256 hash).
4
+ */
5
+ export declare function getShardKey(absPath: string): string;
6
+ /**
7
+ * Load cache from disk. Returns empty cache if missing, invalid, or refresh=true.
8
+ */
9
+ export declare function loadCache(rootDir: string, refresh: boolean): Promise<Cache>;
10
+ /**
11
+ * Get cached tags for a file if mtime matches.
12
+ */
13
+ export declare function getCachedTags(cache: Cache, absPath: string, mtimeMs: number): readonly Tag[] | undefined;
14
+ /**
15
+ * Update cache entry for a file.
16
+ */
17
+ export declare function setCacheEntry(cache: Cache, absPath: string, mtimeMs: number, tags: readonly Tag[]): void;
18
+ /**
19
+ * Save cache to disk. Uses sharded files with atomic writes.
20
+ */
21
+ export declare function saveCache(cache: Cache, rootDir: string): Promise<void>;
package/dist/cache.js ADDED
@@ -0,0 +1,123 @@
1
+ import { readFile, writeFile, mkdir, rename, readdir } from "node:fs/promises";
2
+ import { join } from "node:path";
3
+ import { createHash } from "node:crypto";
4
+ const CACHE_VERSION = 2;
5
+ const CACHE_DIR_NAME = `.repomap.cache.v${String(CACHE_VERSION)}`;
6
+ const TAGS_DIR_NAME = "tags";
7
+ /**
8
+ * Get the shard key (first 2 hex chars of SHA-256 hash).
9
+ */
10
+ export function getShardKey(absPath) {
11
+ const hash = createHash("sha256").update(absPath).digest("hex");
12
+ return hash.slice(0, 2);
13
+ }
14
+ function getTagsDir(rootDir) {
15
+ return join(rootDir, CACHE_DIR_NAME, TAGS_DIR_NAME);
16
+ }
17
+ function getShardPath(rootDir, shardKey) {
18
+ return join(getTagsDir(rootDir), `${shardKey}.json`);
19
+ }
20
+ /**
21
+ * Load a single shard file.
22
+ */
23
+ async function loadShard(shardPath) {
24
+ try {
25
+ const content = await readFile(shardPath, "utf-8");
26
+ const parsed = JSON.parse(content);
27
+ if (!isValidShardFile(parsed)) {
28
+ return new Map();
29
+ }
30
+ const entries = new Map();
31
+ for (const [key, value] of Object.entries(parsed.entries)) {
32
+ entries.set(key, value);
33
+ }
34
+ return entries;
35
+ }
36
+ catch {
37
+ return new Map();
38
+ }
39
+ }
40
+ function isValidShardFile(data) {
41
+ if (typeof data !== "object" || data === null) {
42
+ return false;
43
+ }
44
+ const obj = data;
45
+ return typeof obj.entries === "object" && obj.entries !== null;
46
+ }
47
+ /**
48
+ * Load cache from disk. Returns empty cache if missing, invalid, or refresh=true.
49
+ */
50
+ export async function loadCache(rootDir, refresh) {
51
+ if (refresh) {
52
+ return { version: CACHE_VERSION, entries: new Map() };
53
+ }
54
+ const tagsDir = getTagsDir(rootDir);
55
+ const entries = new Map();
56
+ try {
57
+ const files = await readdir(tagsDir);
58
+ const shardFiles = files.filter((f) => /^[0-9a-f]{2}\.json$/.test(f));
59
+ await Promise.all(shardFiles.map(async (file) => {
60
+ const shardPath = join(tagsDir, file);
61
+ const shardEntries = await loadShard(shardPath);
62
+ for (const [key, value] of shardEntries) {
63
+ entries.set(key, value);
64
+ }
65
+ }));
66
+ }
67
+ catch {
68
+ // Directory doesn't exist or can't be read
69
+ }
70
+ return { version: CACHE_VERSION, entries };
71
+ }
72
+ /**
73
+ * Get cached tags for a file if mtime matches.
74
+ */
75
+ export function getCachedTags(cache, absPath, mtimeMs) {
76
+ const entry = cache.entries.get(absPath);
77
+ if (entry === undefined) {
78
+ return undefined;
79
+ }
80
+ if (entry.mtime !== mtimeMs) {
81
+ return undefined;
82
+ }
83
+ return entry.tags;
84
+ }
85
+ /**
86
+ * Update cache entry for a file.
87
+ */
88
+ export function setCacheEntry(cache, absPath, mtimeMs, tags) {
89
+ cache.entries.set(absPath, {
90
+ mtime: mtimeMs,
91
+ tags,
92
+ });
93
+ }
94
+ /**
95
+ * Save cache to disk. Uses sharded files with atomic writes.
96
+ */
97
+ export async function saveCache(cache, rootDir) {
98
+ const tagsDir = getTagsDir(rootDir);
99
+ await mkdir(tagsDir, { recursive: true });
100
+ // Group entries by shard
101
+ const shards = new Map();
102
+ for (const [absPath, entry] of cache.entries) {
103
+ const shardKey = getShardKey(absPath);
104
+ let shard = shards.get(shardKey);
105
+ if (!shard) {
106
+ shard = new Map();
107
+ shards.set(shardKey, shard);
108
+ }
109
+ shard.set(absPath, entry);
110
+ }
111
+ // Write each shard atomically
112
+ await Promise.all([...shards.entries()].map(async ([shardKey, entries]) => {
113
+ const shardPath = getShardPath(rootDir, shardKey);
114
+ const tmpPath = `${shardPath}.tmp`;
115
+ const entriesObj = {};
116
+ for (const [key, value] of entries) {
117
+ entriesObj[key] = value;
118
+ }
119
+ const shardFile = { entries: entriesObj };
120
+ await writeFile(tmpPath, JSON.stringify(shardFile, null, 2));
121
+ await rename(tmpPath, shardPath);
122
+ }));
123
+ }
package/dist/cli.d.ts ADDED
@@ -0,0 +1,6 @@
1
+ import type { ParsedArgs } from "./types.js";
2
+ /**
3
+ * Parse CLI arguments into structured format.
4
+ * Throws on invalid input.
5
+ */
6
+ export declare function parseCliArgs(argv: readonly string[]): ParsedArgs;
package/dist/cli.js ADDED
@@ -0,0 +1,65 @@
1
+ import { parseArgs } from "node:util";
2
+ const DEFAULT_TOKENS = 1024;
3
+ const DEFAULT_MAX_FILES = 10000;
4
+ /**
5
+ * Parse CLI arguments into structured format.
6
+ * Throws on invalid input.
7
+ */
8
+ export function parseCliArgs(argv) {
9
+ const { values, positionals } = parseArgs({
10
+ args: argv,
11
+ options: {
12
+ tokens: { type: "string", short: "t" },
13
+ focus: { type: "string", short: "f", multiple: true },
14
+ output: { type: "string", short: "o" },
15
+ refresh: { type: "boolean", short: "r" },
16
+ verbose: { type: "boolean", short: "v" },
17
+ ignore: { type: "string", multiple: true },
18
+ "no-ignore": { type: "boolean" },
19
+ "max-files": { type: "string" },
20
+ },
21
+ allowPositionals: true,
22
+ strict: true,
23
+ });
24
+ const tokens = parseTokens(values.tokens);
25
+ const maxFiles = parseMaxFiles(values["max-files"]);
26
+ return {
27
+ paths: positionals,
28
+ options: {
29
+ tokens,
30
+ focus: values.focus ?? [],
31
+ output: values.output,
32
+ refresh: values.refresh ?? false,
33
+ verbose: values.verbose ?? false,
34
+ ignore: values.ignore ?? [],
35
+ noIgnore: values["no-ignore"] ?? false,
36
+ maxFiles,
37
+ },
38
+ };
39
+ }
40
+ function parseTokens(value) {
41
+ if (value === undefined) {
42
+ return DEFAULT_TOKENS;
43
+ }
44
+ const parsed = Number(value);
45
+ if (!Number.isFinite(parsed) || !Number.isInteger(parsed)) {
46
+ throw new Error(`Invalid tokens value: "${value}" must be a positive integer`);
47
+ }
48
+ if (parsed <= 0) {
49
+ throw new Error(`Invalid tokens value: "${value}" must be a positive integer`);
50
+ }
51
+ return parsed;
52
+ }
53
+ function parseMaxFiles(value) {
54
+ if (value === undefined) {
55
+ return DEFAULT_MAX_FILES;
56
+ }
57
+ const parsed = Number(value);
58
+ if (!Number.isFinite(parsed) || !Number.isInteger(parsed)) {
59
+ throw new Error(`Invalid max-files value: "${value}" must be a non-negative integer`);
60
+ }
61
+ if (parsed < 0) {
62
+ throw new Error(`Invalid max-files value: "${value}" must be a non-negative integer`);
63
+ }
64
+ return parsed;
65
+ }
@@ -0,0 +1,20 @@
1
+ import type { FileDiscoveryOptions, FileDiscoveryResult } from "./types.js";
2
+ /**
3
+ * Directories always skipped during discovery.
4
+ */
5
+ export declare const DEFAULT_IGNORED_DIRS: ReadonlySet<string>;
6
+ /**
7
+ * Default patterns for files to ignore (generated, tests, mocks).
8
+ * Uses gitignore-style patterns compatible with `ignore` package.
9
+ */
10
+ export declare const DEFAULT_IGNORED_PATTERNS: readonly string[];
11
+ /**
12
+ * File extensions supported for parsing.
13
+ * Derived from LANGUAGE_REGISTRY to ensure single source of truth.
14
+ */
15
+ export declare const SUPPORTED_EXTENSIONS: ReadonlySet<string>;
16
+ /**
17
+ * Discover files in a directory tree.
18
+ * Returns absolute paths sorted alphabetically, limited by maxFiles.
19
+ */
20
+ export declare function discoverFiles(options: FileDiscoveryOptions): Promise<FileDiscoveryResult>;
package/dist/files.js ADDED
@@ -0,0 +1,206 @@
1
+ import { readdir, readFile } from "node:fs/promises";
2
+ import { join, extname, resolve, relative } from "node:path";
3
+ import ignore from "ignore";
4
+ import { LANGUAGE_REGISTRY } from "./languages.js";
5
+ /**
6
+ * Directories always skipped during discovery.
7
+ */
8
+ export const DEFAULT_IGNORED_DIRS = new Set([
9
+ "node_modules",
10
+ ".git",
11
+ ".svn",
12
+ ".hg",
13
+ "__pycache__",
14
+ ".pytest_cache",
15
+ ".mypy_cache",
16
+ ".tox",
17
+ ".nox",
18
+ "venv",
19
+ ".venv",
20
+ "env",
21
+ ".env",
22
+ "dist",
23
+ "build",
24
+ "target",
25
+ ".next",
26
+ ".nuxt",
27
+ "coverage",
28
+ ".nyc_output",
29
+ ".cache",
30
+ "vendor",
31
+ "third_party",
32
+ ]);
33
+ /**
34
+ * Default patterns for files to ignore (generated, tests, mocks).
35
+ * Uses gitignore-style patterns compatible with `ignore` package.
36
+ */
37
+ export const DEFAULT_IGNORED_PATTERNS = [
38
+ // Generated files
39
+ "**/*.pb.go",
40
+ "**/*.pb.gw.go",
41
+ "**/*_pb.go",
42
+ "**/*_generated.go",
43
+ "**/*_gen.go",
44
+ "**/*.generated.ts",
45
+ "**/*.generated.js",
46
+ "**/*.gen.ts",
47
+ "**/*.gen.js",
48
+ // Test files
49
+ "**/*_test.go",
50
+ "**/*_test.py",
51
+ "**/test_*.py",
52
+ "**/conftest.py",
53
+ "**/*.test.ts",
54
+ "**/*.test.tsx",
55
+ "**/*.test.js",
56
+ "**/*.test.jsx",
57
+ "**/*.spec.ts",
58
+ "**/*.spec.tsx",
59
+ "**/*.spec.js",
60
+ "**/*.spec.jsx",
61
+ "**/__tests__/**",
62
+ // Mocks/fixtures
63
+ "**/__mocks__/**",
64
+ "**/mocks/**",
65
+ "**/mock_*/**",
66
+ "**/*_mock.go",
67
+ "**/fixtures/**",
68
+ "**/testdata/**",
69
+ // Bundled/minified files (Phase 16)
70
+ "**/*.min.js",
71
+ "**/*.min.mjs",
72
+ "**/*.min.cjs",
73
+ "**/*.bundle.js",
74
+ "**/*.bundle.mjs",
75
+ "**/*.bundle.cjs",
76
+ "**/*-bundle.js",
77
+ "**/*.chunk.js",
78
+ "**/bundle.js",
79
+ "**/vendor*.js",
80
+ "**/runtime*.js",
81
+ "**/edge-runtime/**",
82
+ ];
83
+ /**
84
+ * File extensions supported for parsing.
85
+ * Derived from LANGUAGE_REGISTRY to ensure single source of truth.
86
+ */
87
+ export const SUPPORTED_EXTENSIONS = new Set(Object.values(LANGUAGE_REGISTRY).flatMap((config) => [...config.extensions]));
88
+ const DEFAULT_MAX_DEPTH = 20;
89
+ /**
90
+ * Discover files in a directory tree.
91
+ * Returns absolute paths sorted alphabetically, limited by maxFiles.
92
+ */
93
+ export async function discoverFiles(options) {
94
+ const rootDir = resolve(options.rootDir);
95
+ const maxDepth = options.maxDepth ?? DEFAULT_MAX_DEPTH;
96
+ const extensions = options.extensions ?? SUPPORTED_EXTENSIONS;
97
+ const ignoredDirs = options.ignoredDirs ?? DEFAULT_IGNORED_DIRS;
98
+ const ignoredPatterns = options.ignoredPatterns;
99
+ const respectGitignore = options.respectGitignore ?? true;
100
+ const includeHidden = options.includeHidden ?? false;
101
+ const maxFiles = options.maxFiles;
102
+ const ignoreMatchers = new Map();
103
+ const files = [];
104
+ // Compile pattern matcher once (if patterns provided)
105
+ let patternMatcher;
106
+ if (ignoredPatterns && ignoredPatterns.length > 0) {
107
+ try {
108
+ patternMatcher = ignore().add([...ignoredPatterns]);
109
+ }
110
+ catch (err) {
111
+ const msg = err instanceof Error ? err.message : String(err);
112
+ throw new Error(`Invalid ignore pattern: ${msg}`);
113
+ }
114
+ }
115
+ function isIgnoredByPatterns(relPath) {
116
+ if (!patternMatcher)
117
+ return false;
118
+ return patternMatcher.ignores(relPath);
119
+ }
120
+ async function loadGitignore(dir) {
121
+ if (!respectGitignore)
122
+ return undefined;
123
+ const cached = ignoreMatchers.get(dir);
124
+ if (cached)
125
+ return cached;
126
+ const gitignorePath = join(dir, ".gitignore");
127
+ try {
128
+ const content = await readFile(gitignorePath, "utf-8");
129
+ const ig = ignore().add(content);
130
+ ignoreMatchers.set(dir, ig);
131
+ return ig;
132
+ }
133
+ catch {
134
+ return undefined;
135
+ }
136
+ }
137
+ function isIgnoredByGitignore(absPath, isDirectory) {
138
+ if (!respectGitignore)
139
+ return false;
140
+ for (const [dir, ig] of ignoreMatchers) {
141
+ const relToIgnoreDir = relative(dir, absPath);
142
+ if (relToIgnoreDir.startsWith(".."))
143
+ continue;
144
+ const checkPath = isDirectory ? `${relToIgnoreDir}/` : relToIgnoreDir;
145
+ if (ig.ignores(checkPath))
146
+ return true;
147
+ }
148
+ return false;
149
+ }
150
+ function isHidden(name) {
151
+ return name.startsWith(".");
152
+ }
153
+ async function walk(dir, depth) {
154
+ if (depth > maxDepth)
155
+ return;
156
+ await loadGitignore(dir);
157
+ let entries;
158
+ try {
159
+ entries = await readdir(dir, { withFileTypes: true });
160
+ }
161
+ catch {
162
+ return;
163
+ }
164
+ for (const entry of entries) {
165
+ const name = entry.name;
166
+ const absPath = join(dir, name);
167
+ // Skip symlinks to prevent cycles and path traversal
168
+ if (entry.isSymbolicLink())
169
+ continue;
170
+ if (!includeHidden && isHidden(name))
171
+ continue;
172
+ if (entry.isDirectory()) {
173
+ if (ignoredDirs.has(name))
174
+ continue;
175
+ if (isIgnoredByGitignore(absPath, true))
176
+ continue;
177
+ await walk(absPath, depth + 1);
178
+ }
179
+ else if (entry.isFile()) {
180
+ const ext = extname(name);
181
+ if (!extensions.has(ext))
182
+ continue;
183
+ if (isIgnoredByGitignore(absPath, false))
184
+ continue;
185
+ const relPath = relative(rootDir, absPath);
186
+ if (isIgnoredByPatterns(relPath))
187
+ continue;
188
+ files.push(absPath);
189
+ }
190
+ }
191
+ }
192
+ await walk(rootDir, 0);
193
+ const sorted = files.sort();
194
+ const totalDiscovered = sorted.length;
195
+ // Apply maxFiles limit after discovery (not during walk).
196
+ // File paths are small (~100 bytes each), so 20k paths = ~2MB.
197
+ // OOM risk is from Tag objects during parsing, not path strings.
198
+ // Applying limit post-discovery allows reporting totalDiscovered count.
199
+ const limited = maxFiles !== undefined && maxFiles > 0 && totalDiscovered > maxFiles;
200
+ const resultFiles = limited ? sorted.slice(0, maxFiles) : sorted;
201
+ return {
202
+ files: resultFiles,
203
+ totalDiscovered,
204
+ wasLimited: limited,
205
+ };
206
+ }
@@ -0,0 +1,6 @@
1
+ import type { Tag, FileGraph } from "./types.js";
2
+ /**
3
+ * Build file reference graph from parsed tags.
4
+ * Nodes are files, edges are symbol references from one file to another.
5
+ */
6
+ export declare function buildFileGraph(tags: readonly Tag[]): FileGraph;
package/dist/graph.js ADDED
@@ -0,0 +1,77 @@
1
+ /**
2
+ * Build file reference graph from parsed tags.
3
+ * Nodes are files, edges are symbol references from one file to another.
4
+ */
5
+ export function buildFileGraph(tags) {
6
+ // First pass: collect nodes and index defs
7
+ const nodeSet = new Set();
8
+ const defsByName = new Map();
9
+ for (const tag of tags) {
10
+ nodeSet.add(tag.relPath);
11
+ if (tag.kind === "def") {
12
+ const existing = defsByName.get(tag.name);
13
+ if (existing) {
14
+ existing.push(tag);
15
+ }
16
+ else {
17
+ defsByName.set(tag.name, [tag]);
18
+ }
19
+ }
20
+ }
21
+ // Sort nodes for determinism
22
+ const nodes = [...nodeSet].sort();
23
+ // Second pass: build edges (need defsByName complete first)
24
+ // Track both file-level and symbol-level edges
25
+ const edgesBuilder = new Map();
26
+ const symbolEdgesBuilder = new Map();
27
+ for (const tag of tags) {
28
+ if (tag.kind !== "ref")
29
+ continue;
30
+ const defs = defsByName.get(tag.name);
31
+ if (!defs)
32
+ continue;
33
+ for (const def of defs) {
34
+ // Skip self-edges
35
+ if (def.relPath === tag.relPath)
36
+ continue;
37
+ const from = tag.relPath;
38
+ const to = def.relPath;
39
+ const symbol = tag.name;
40
+ // File-level edge
41
+ let fromEdges = edgesBuilder.get(from);
42
+ if (!fromEdges) {
43
+ fromEdges = new Map();
44
+ edgesBuilder.set(from, fromEdges);
45
+ }
46
+ fromEdges.set(to, (fromEdges.get(to) ?? 0) + 1);
47
+ // Symbol-level edge
48
+ let fromSymbolEdges = symbolEdgesBuilder.get(from);
49
+ if (!fromSymbolEdges) {
50
+ fromSymbolEdges = new Map();
51
+ symbolEdgesBuilder.set(from, fromSymbolEdges);
52
+ }
53
+ let toSymbolEdges = fromSymbolEdges.get(to);
54
+ if (!toSymbolEdges) {
55
+ toSymbolEdges = new Map();
56
+ fromSymbolEdges.set(to, toSymbolEdges);
57
+ }
58
+ toSymbolEdges.set(symbol, (toSymbolEdges.get(symbol) ?? 0) + 1);
59
+ }
60
+ }
61
+ // Compute outWeights
62
+ const outWeights = new Map();
63
+ for (const [from, toMap] of edgesBuilder) {
64
+ let total = 0;
65
+ for (const weight of toMap.values()) {
66
+ total += weight;
67
+ }
68
+ outWeights.set(from, total);
69
+ }
70
+ return {
71
+ nodes,
72
+ edges: edgesBuilder,
73
+ symbolEdges: symbolEdgesBuilder,
74
+ outWeights,
75
+ defsByName,
76
+ };
77
+ }
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};