@winci/local-rag 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude-plugin/plugin.json +24 -0
- package/.mcp.json +11 -0
- package/LICENSE +21 -0
- package/README.md +567 -0
- package/hooks/hooks.json +25 -0
- package/hooks/scripts/reindex-file.sh +19 -0
- package/hooks/scripts/session-start.sh +11 -0
- package/package.json +52 -0
- package/skills/local-rag/SKILL.md +42 -0
- package/src/cli/commands/analytics.ts +58 -0
- package/src/cli/commands/benchmark.ts +30 -0
- package/src/cli/commands/checkpoint.ts +85 -0
- package/src/cli/commands/conversation.ts +102 -0
- package/src/cli/commands/demo.ts +119 -0
- package/src/cli/commands/eval.ts +31 -0
- package/src/cli/commands/index-cmd.ts +26 -0
- package/src/cli/commands/init.ts +35 -0
- package/src/cli/commands/map.ts +21 -0
- package/src/cli/commands/remove.ts +15 -0
- package/src/cli/commands/search-cmd.ts +59 -0
- package/src/cli/commands/serve.ts +5 -0
- package/src/cli/commands/status.ts +13 -0
- package/src/cli/index.ts +117 -0
- package/src/cli/progress.ts +21 -0
- package/src/cli/setup.ts +192 -0
- package/src/config/index.ts +101 -0
- package/src/conversation/indexer.ts +147 -0
- package/src/conversation/parser.ts +323 -0
- package/src/db/analytics.ts +116 -0
- package/src/db/annotations.ts +161 -0
- package/src/db/checkpoints.ts +166 -0
- package/src/db/conversation.ts +241 -0
- package/src/db/files.ts +146 -0
- package/src/db/graph.ts +250 -0
- package/src/db/index.ts +468 -0
- package/src/db/search.ts +244 -0
- package/src/db/types.ts +85 -0
- package/src/embeddings/embed.ts +73 -0
- package/src/graph/resolver.ts +305 -0
- package/src/indexing/chunker.ts +523 -0
- package/src/indexing/indexer.ts +263 -0
- package/src/indexing/parse.ts +99 -0
- package/src/indexing/watcher.ts +84 -0
- package/src/main.ts +8 -0
- package/src/search/benchmark.ts +139 -0
- package/src/search/eval.ts +171 -0
- package/src/search/hybrid.ts +194 -0
- package/src/search/reranker.ts +99 -0
- package/src/search/usages.ts +27 -0
- package/src/server/index.ts +126 -0
- package/src/tools/analytics-tools.ts +58 -0
- package/src/tools/annotation-tools.ts +89 -0
- package/src/tools/checkpoint-tools.ts +147 -0
- package/src/tools/conversation-tools.ts +86 -0
- package/src/tools/git-tools.ts +103 -0
- package/src/tools/graph-tools.ts +163 -0
- package/src/tools/index-tools.ts +91 -0
- package/src/tools/index.ts +33 -0
- package/src/tools/search.ts +238 -0
- package/src/types.ts +9 -0
- package/src/utils/log.ts +39 -0
package/src/db/types.ts
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
export interface StoredChunk {
|
|
2
|
+
id: number;
|
|
3
|
+
fileId: number;
|
|
4
|
+
chunkIndex: number;
|
|
5
|
+
snippet: string;
|
|
6
|
+
entityName: string | null;
|
|
7
|
+
chunkType: string | null;
|
|
8
|
+
startLine: number | null;
|
|
9
|
+
endLine: number | null;
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
export interface StoredFile {
|
|
13
|
+
id: number;
|
|
14
|
+
path: string;
|
|
15
|
+
hash: string;
|
|
16
|
+
indexedAt: string;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
export interface SearchResult {
|
|
20
|
+
path: string;
|
|
21
|
+
score: number;
|
|
22
|
+
snippet: string;
|
|
23
|
+
chunkIndex: number;
|
|
24
|
+
entityName: string | null;
|
|
25
|
+
chunkType: string | null;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
export interface ChunkSearchResult {
|
|
29
|
+
path: string;
|
|
30
|
+
score: number;
|
|
31
|
+
content: string;
|
|
32
|
+
chunkIndex: number;
|
|
33
|
+
entityName: string | null;
|
|
34
|
+
chunkType: string | null;
|
|
35
|
+
startLine: number | null;
|
|
36
|
+
endLine: number | null;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export interface UsageResult {
|
|
40
|
+
path: string;
|
|
41
|
+
line: number | null;
|
|
42
|
+
snippet: string;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export interface AnnotationRow {
|
|
46
|
+
id: number;
|
|
47
|
+
path: string;
|
|
48
|
+
symbolName: string | null;
|
|
49
|
+
note: string;
|
|
50
|
+
author: string | null;
|
|
51
|
+
createdAt: string;
|
|
52
|
+
updatedAt: string;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
export interface SymbolResult {
|
|
56
|
+
path: string;
|
|
57
|
+
symbolName: string;
|
|
58
|
+
symbolType: string;
|
|
59
|
+
snippet: string | null;
|
|
60
|
+
chunkIndex: number | null;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export interface CheckpointRow {
|
|
64
|
+
id: number;
|
|
65
|
+
sessionId: string;
|
|
66
|
+
turnIndex: number;
|
|
67
|
+
timestamp: string;
|
|
68
|
+
type: string;
|
|
69
|
+
title: string;
|
|
70
|
+
summary: string;
|
|
71
|
+
filesInvolved: string[];
|
|
72
|
+
tags: string[];
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
export interface ConversationSearchResult {
|
|
76
|
+
turnId: number;
|
|
77
|
+
turnIndex: number;
|
|
78
|
+
sessionId: string;
|
|
79
|
+
timestamp: string;
|
|
80
|
+
summary: string;
|
|
81
|
+
snippet: string;
|
|
82
|
+
toolsUsed: string[];
|
|
83
|
+
filesReferenced: string[];
|
|
84
|
+
score: number;
|
|
85
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import {
|
|
2
|
+
env,
|
|
3
|
+
pipeline,
|
|
4
|
+
type FeatureExtractionPipeline,
|
|
5
|
+
} from "@huggingface/transformers";
|
|
6
|
+
import { join } from "node:path";
|
|
7
|
+
import { homedir, cpus } from "node:os";
|
|
8
|
+
import { rmSync } from "node:fs";
|
|
9
|
+
|
|
10
|
+
// Use a stable cache directory so models survive bunx temp dir cleanup
|
|
11
|
+
const CACHE_DIR = join(homedir(), ".cache", "local-rag", "models");
|
|
12
|
+
env.cacheDir = CACHE_DIR;
|
|
13
|
+
|
|
14
|
+
const MODEL_ID = "Xenova/all-MiniLM-L6-v2";
|
|
15
|
+
const EMBEDDING_DIM = 384;
|
|
16
|
+
|
|
17
|
+
let extractor: FeatureExtractionPipeline | null = null;
|
|
18
|
+
|
|
19
|
+
function defaultThreadCount(): number {
|
|
20
|
+
return Math.max(2, Math.floor(cpus().length / 3));
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
export async function getEmbedder(threads?: number): Promise<FeatureExtractionPipeline> {
|
|
24
|
+
if (!extractor) {
|
|
25
|
+
const numThreads = threads ?? defaultThreadCount();
|
|
26
|
+
const pipelineOptions = {
|
|
27
|
+
dtype: "fp32" as const,
|
|
28
|
+
session_options: {
|
|
29
|
+
intraOpNumThreads: numThreads,
|
|
30
|
+
interOpNumThreads: numThreads,
|
|
31
|
+
},
|
|
32
|
+
};
|
|
33
|
+
try {
|
|
34
|
+
extractor = await pipeline("feature-extraction", MODEL_ID, pipelineOptions);
|
|
35
|
+
} catch (err) {
|
|
36
|
+
// If the cached model is corrupted, delete it and retry once
|
|
37
|
+
const msg = (err as Error).message || "";
|
|
38
|
+
if (msg.includes("Protobuf parsing failed") || msg.includes("Load model")) {
|
|
39
|
+
const modelDir = join(CACHE_DIR, ...MODEL_ID.split("/"));
|
|
40
|
+
rmSync(modelDir, { recursive: true, force: true });
|
|
41
|
+
extractor = await pipeline("feature-extraction", MODEL_ID, pipelineOptions);
|
|
42
|
+
} else {
|
|
43
|
+
throw err;
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
}
|
|
47
|
+
return extractor;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
export async function embed(text: string, threads?: number): Promise<Float32Array> {
|
|
51
|
+
const model = await getEmbedder(threads);
|
|
52
|
+
const output = await model(text, { pooling: "mean", normalize: true });
|
|
53
|
+
return new Float32Array(output.data as Float64Array);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export async function embedBatch(texts: string[], threads?: number): Promise<Float32Array[]> {
|
|
57
|
+
if (texts.length === 0) return [];
|
|
58
|
+
const model = await getEmbedder(threads);
|
|
59
|
+
const output = await model(texts, { pooling: "mean", normalize: true });
|
|
60
|
+
const flat = new Float32Array(output.data as Float64Array);
|
|
61
|
+
const result: Float32Array[] = [];
|
|
62
|
+
for (let i = 0; i < texts.length; i++) {
|
|
63
|
+
result.push(flat.slice(i * EMBEDDING_DIM, (i + 1) * EMBEDDING_DIM));
|
|
64
|
+
}
|
|
65
|
+
return result;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
/** Reset the singleton — only for testing */
|
|
69
|
+
export function resetEmbedder(): void {
|
|
70
|
+
extractor = null;
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
export { EMBEDDING_DIM };
|
|
@@ -0,0 +1,305 @@
|
|
|
1
|
+
import { dirname, resolve, relative, basename } from "path";
|
|
2
|
+
import { RagDB } from "../db";
|
|
3
|
+
|
|
4
|
+
// Extensions to try when resolving relative imports
|
|
5
|
+
const RESOLVE_EXTENSIONS = [".ts", ".tsx", ".js", ".jsx"];
|
|
6
|
+
const INDEX_FILES = RESOLVE_EXTENSIONS.map((ext) => `/index${ext}`);
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Resolve all unresolved imports in the database by matching import specifiers
|
|
10
|
+
* to indexed file paths.
|
|
11
|
+
*/
|
|
12
|
+
export function resolveImports(db: RagDB, projectDir: string): number {
|
|
13
|
+
const unresolved = db.getUnresolvedImports();
|
|
14
|
+
const pathToId = buildPathToIdMap(db);
|
|
15
|
+
|
|
16
|
+
let resolvedCount = 0;
|
|
17
|
+
|
|
18
|
+
for (const imp of unresolved) {
|
|
19
|
+
// Skip bare/external specifiers (no ./ or ../ prefix)
|
|
20
|
+
if (!imp.source.startsWith(".") && !imp.source.startsWith("/")) {
|
|
21
|
+
continue;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const importerDir = dirname(imp.filePath);
|
|
25
|
+
const basePath = resolve(importerDir, imp.source);
|
|
26
|
+
|
|
27
|
+
const resolved = tryResolvePath(basePath, pathToId);
|
|
28
|
+
if (resolved !== null) {
|
|
29
|
+
db.resolveImport(imp.id, resolved);
|
|
30
|
+
resolvedCount++;
|
|
31
|
+
}
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
return resolvedCount;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Resolve imports for a single file (used by watcher after re-indexing).
|
|
39
|
+
* Accepts optional prebuilt maps to avoid repeated full-table scans
|
|
40
|
+
* when resolving multiple files in sequence.
|
|
41
|
+
*/
|
|
42
|
+
export function resolveImportsForFile(
|
|
43
|
+
db: RagDB,
|
|
44
|
+
fileId: number,
|
|
45
|
+
projectDir: string,
|
|
46
|
+
pathToId?: Map<string, number>,
|
|
47
|
+
idToPath?: Map<number, string>
|
|
48
|
+
): void {
|
|
49
|
+
if (!pathToId) {
|
|
50
|
+
pathToId = buildPathToIdMap(db);
|
|
51
|
+
}
|
|
52
|
+
if (!idToPath) {
|
|
53
|
+
idToPath = buildIdToPathMap(pathToId);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
const imports = db.getImportsForFile(fileId);
|
|
57
|
+
const filePath = idToPath.get(fileId);
|
|
58
|
+
if (!filePath) return;
|
|
59
|
+
|
|
60
|
+
const importerDir = dirname(filePath);
|
|
61
|
+
|
|
62
|
+
for (const imp of imports) {
|
|
63
|
+
if (imp.resolvedFileId !== null) continue;
|
|
64
|
+
if (!imp.source.startsWith(".") && !imp.source.startsWith("/")) continue;
|
|
65
|
+
|
|
66
|
+
const basePath = resolve(importerDir, imp.source);
|
|
67
|
+
const resolved = tryResolvePath(basePath, pathToId);
|
|
68
|
+
if (resolved !== null) {
|
|
69
|
+
db.resolveImport(imp.id, resolved);
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
/** Build path → fileId and fileId → path lookups from all indexed files. */
|
|
75
|
+
export function buildPathToIdMap(db: RagDB): Map<string, number> {
|
|
76
|
+
const allFiles = db.getAllFilePaths();
|
|
77
|
+
const map = new Map<string, number>();
|
|
78
|
+
for (const f of allFiles) {
|
|
79
|
+
map.set(f.path, f.id);
|
|
80
|
+
}
|
|
81
|
+
return map;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
export function buildIdToPathMap(pathToId: Map<string, number>): Map<number, string> {
|
|
85
|
+
const map = new Map<number, string>();
|
|
86
|
+
for (const [path, id] of pathToId) {
|
|
87
|
+
map.set(id, path);
|
|
88
|
+
}
|
|
89
|
+
return map;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
function tryResolvePath(basePath: string, pathToId: Map<string, number>): number | null {
|
|
93
|
+
// Exact match
|
|
94
|
+
if (pathToId.has(basePath)) return pathToId.get(basePath)!;
|
|
95
|
+
|
|
96
|
+
// Try adding extensions
|
|
97
|
+
for (const ext of RESOLVE_EXTENSIONS) {
|
|
98
|
+
const withExt = basePath + ext;
|
|
99
|
+
if (pathToId.has(withExt)) return pathToId.get(withExt)!;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// Try index files (e.g. ./utils → ./utils/index.ts)
|
|
103
|
+
for (const idx of INDEX_FILES) {
|
|
104
|
+
const withIndex = basePath + idx;
|
|
105
|
+
if (pathToId.has(withIndex)) return pathToId.get(withIndex)!;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
return null;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
export interface GraphOptions {
|
|
112
|
+
zoom?: "file" | "directory";
|
|
113
|
+
focus?: string;
|
|
114
|
+
maxNodes?: number;
|
|
115
|
+
maxHops?: number;
|
|
116
|
+
showExternals?: boolean;
|
|
117
|
+
projectDir: string;
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
interface GraphNode {
|
|
121
|
+
id: number;
|
|
122
|
+
path: string;
|
|
123
|
+
exports: { name: string; type: string }[];
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
interface GraphEdge {
|
|
127
|
+
fromId: number;
|
|
128
|
+
fromPath: string;
|
|
129
|
+
toId: number;
|
|
130
|
+
toPath: string;
|
|
131
|
+
source: string;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
/**
|
|
135
|
+
* Generate a structured text dependency map optimized for AI agent consumption.
|
|
136
|
+
* Replaces the old Mermaid format with a more parseable, information-dense output.
|
|
137
|
+
*/
|
|
138
|
+
export function generateProjectMap(
|
|
139
|
+
db: RagDB,
|
|
140
|
+
options: GraphOptions
|
|
141
|
+
): string {
|
|
142
|
+
const {
|
|
143
|
+
zoom = "file",
|
|
144
|
+
focus,
|
|
145
|
+
maxNodes = 50,
|
|
146
|
+
maxHops = 2,
|
|
147
|
+
projectDir,
|
|
148
|
+
} = options;
|
|
149
|
+
|
|
150
|
+
let graph: { nodes: GraphNode[]; edges: GraphEdge[] };
|
|
151
|
+
|
|
152
|
+
if (focus) {
|
|
153
|
+
const file = db.getFileByPath(resolve(projectDir, focus));
|
|
154
|
+
if (file) {
|
|
155
|
+
graph = db.getSubgraph([file.id], maxHops);
|
|
156
|
+
} else {
|
|
157
|
+
graph = { nodes: [], edges: [] };
|
|
158
|
+
}
|
|
159
|
+
} else {
|
|
160
|
+
graph = db.getGraph();
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
if (graph.nodes.length === 0) {
|
|
164
|
+
return "No files indexed or no dependencies found.";
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
// Auto-switch to directory view if too many nodes
|
|
168
|
+
const effectiveZoom = graph.nodes.length > maxNodes ? "directory" : zoom;
|
|
169
|
+
|
|
170
|
+
if (effectiveZoom === "directory") {
|
|
171
|
+
return generateDirectoryMap(graph, projectDir);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return generateFileMap(graph, projectDir);
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
function generateFileMap(
|
|
179
|
+
graph: { nodes: GraphNode[]; edges: GraphEdge[] },
|
|
180
|
+
projectDir: string
|
|
181
|
+
): string {
|
|
182
|
+
// Build adjacency maps
|
|
183
|
+
const dependsOn = new Map<number, string[]>();
|
|
184
|
+
const dependedOnBy = new Map<number, string[]>();
|
|
185
|
+
const idToRel = new Map<number, string>();
|
|
186
|
+
|
|
187
|
+
for (const node of graph.nodes) {
|
|
188
|
+
const relPath = relative(projectDir, node.path);
|
|
189
|
+
idToRel.set(node.id, relPath);
|
|
190
|
+
dependsOn.set(node.id, []);
|
|
191
|
+
dependedOnBy.set(node.id, []);
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
for (const edge of graph.edges) {
|
|
195
|
+
const fromRel = idToRel.get(edge.fromId);
|
|
196
|
+
const toRel = idToRel.get(edge.toId);
|
|
197
|
+
if (fromRel && toRel) {
|
|
198
|
+
dependsOn.get(edge.fromId)!.push(toRel);
|
|
199
|
+
dependedOnBy.get(edge.toId)!.push(fromRel);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
// Identify entry points (no importers)
|
|
204
|
+
const entryPoints: GraphNode[] = [];
|
|
205
|
+
const otherNodes: GraphNode[] = [];
|
|
206
|
+
|
|
207
|
+
for (const node of graph.nodes) {
|
|
208
|
+
if (dependedOnBy.get(node.id)!.length === 0) {
|
|
209
|
+
entryPoints.push(node);
|
|
210
|
+
} else {
|
|
211
|
+
otherNodes.push(node);
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
const lines: string[] = [];
|
|
216
|
+
lines.push(`## Project Map (file-level, ${graph.nodes.length} files)\n`);
|
|
217
|
+
|
|
218
|
+
function formatNode(node: GraphNode) {
|
|
219
|
+
const relPath = idToRel.get(node.id)!;
|
|
220
|
+
lines.push(` ${relPath}`);
|
|
221
|
+
|
|
222
|
+
if (node.exports.length > 0) {
|
|
223
|
+
const exps = node.exports
|
|
224
|
+
.slice(0, 8)
|
|
225
|
+
.map((e) => `${e.name} (${e.type})`)
|
|
226
|
+
.join(", ");
|
|
227
|
+
const suffix = node.exports.length > 8 ? `, +${node.exports.length - 8} more` : "";
|
|
228
|
+
lines.push(` exports: ${exps}${suffix}`);
|
|
229
|
+
}
|
|
230
|
+
|
|
231
|
+
const deps = dependsOn.get(node.id)!;
|
|
232
|
+
if (deps.length > 0) {
|
|
233
|
+
lines.push(` depends_on: ${deps.join(", ")}`);
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
const importers = dependedOnBy.get(node.id)!;
|
|
237
|
+
if (importers.length > 0) {
|
|
238
|
+
lines.push(` depended_on_by: ${importers.join(", ")}`);
|
|
239
|
+
}
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
if (entryPoints.length > 0) {
|
|
243
|
+
lines.push(`### Entry Points (no importers)`);
|
|
244
|
+
for (const node of entryPoints) {
|
|
245
|
+
formatNode(node);
|
|
246
|
+
}
|
|
247
|
+
lines.push("");
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
if (otherNodes.length > 0) {
|
|
251
|
+
lines.push(`### Files`);
|
|
252
|
+
for (const node of otherNodes) {
|
|
253
|
+
formatNode(node);
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
return lines.join("\n");
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
function generateDirectoryMap(
|
|
261
|
+
graph: { nodes: GraphNode[]; edges: GraphEdge[] },
|
|
262
|
+
projectDir: string
|
|
263
|
+
): string {
|
|
264
|
+
// Group nodes by directory
|
|
265
|
+
const dirFiles = new Map<string, string[]>();
|
|
266
|
+
const nodeToDir = new Map<number, string>();
|
|
267
|
+
|
|
268
|
+
for (const node of graph.nodes) {
|
|
269
|
+
const relPath = relative(projectDir, node.path);
|
|
270
|
+
const dir = dirname(relPath) || ".";
|
|
271
|
+
nodeToDir.set(node.id, dir);
|
|
272
|
+
if (!dirFiles.has(dir)) dirFiles.set(dir, []);
|
|
273
|
+
dirFiles.get(dir)!.push(basename(relPath));
|
|
274
|
+
}
|
|
275
|
+
|
|
276
|
+
// Directory-level edges (deduplicated with count)
|
|
277
|
+
const dirEdgeCounts = new Map<string, number>();
|
|
278
|
+
for (const edge of graph.edges) {
|
|
279
|
+
const fromDir = nodeToDir.get(edge.fromId)!;
|
|
280
|
+
const toDir = nodeToDir.get(edge.toId)!;
|
|
281
|
+
if (fromDir !== toDir) {
|
|
282
|
+
const key = `${fromDir} -> ${toDir}`;
|
|
283
|
+
dirEdgeCounts.set(key, (dirEdgeCounts.get(key) || 0) + 1);
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
|
|
287
|
+
const lines: string[] = [];
|
|
288
|
+
lines.push(`## Project Map (directory-level, ${dirFiles.size} directories)\n`);
|
|
289
|
+
|
|
290
|
+
lines.push("### Directories");
|
|
291
|
+
for (const [dir, files] of dirFiles) {
|
|
292
|
+
lines.push(` ${dir}/ (${files.length} files)`);
|
|
293
|
+
lines.push(` files: ${files.join(", ")}`);
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
if (dirEdgeCounts.size > 0) {
|
|
297
|
+
lines.push("");
|
|
298
|
+
lines.push("### Dependencies");
|
|
299
|
+
for (const [edge, count] of dirEdgeCounts) {
|
|
300
|
+
lines.push(` ${edge} (${count} import${count !== 1 ? "s" : ""})`);
|
|
301
|
+
}
|
|
302
|
+
}
|
|
303
|
+
|
|
304
|
+
return lines.join("\n");
|
|
305
|
+
}
|