@harness-engineering/cli 1.6.0 → 1.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/agents/personas/code-reviewer.yaml +2 -0
- package/dist/agents/personas/codebase-health-analyst.yaml +5 -0
- package/dist/agents/personas/performance-guardian.yaml +26 -0
- package/dist/agents/personas/security-reviewer.yaml +35 -0
- package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +494 -0
- package/dist/agents/skills/claude-code/harness-autopilot/skill.yaml +52 -0
- package/dist/agents/skills/claude-code/harness-code-review/SKILL.md +15 -0
- package/dist/agents/skills/claude-code/harness-integrity/SKILL.md +20 -6
- package/dist/agents/skills/claude-code/harness-perf/SKILL.md +231 -0
- package/dist/agents/skills/claude-code/harness-perf/skill.yaml +47 -0
- package/dist/agents/skills/claude-code/harness-perf-tdd/SKILL.md +236 -0
- package/dist/agents/skills/claude-code/harness-perf-tdd/skill.yaml +47 -0
- package/dist/agents/skills/claude-code/harness-pre-commit-review/SKILL.md +27 -2
- package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +657 -0
- package/dist/agents/skills/claude-code/harness-release-readiness/skill.yaml +57 -0
- package/dist/agents/skills/claude-code/harness-security-review/SKILL.md +206 -0
- package/dist/agents/skills/claude-code/harness-security-review/skill.yaml +50 -0
- package/dist/agents/skills/claude-code/harness-security-scan/SKILL.md +102 -0
- package/dist/agents/skills/claude-code/harness-security-scan/skill.yaml +41 -0
- package/dist/agents/skills/claude-code/harness-state-management/SKILL.md +22 -8
- package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +494 -0
- package/dist/agents/skills/gemini-cli/harness-autopilot/skill.yaml +52 -0
- package/dist/agents/skills/gemini-cli/harness-perf/SKILL.md +231 -0
- package/dist/agents/skills/gemini-cli/harness-perf/skill.yaml +47 -0
- package/dist/agents/skills/gemini-cli/harness-perf-tdd/SKILL.md +236 -0
- package/dist/agents/skills/gemini-cli/harness-perf-tdd/skill.yaml +47 -0
- package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +657 -0
- package/dist/agents/skills/gemini-cli/harness-release-readiness/skill.yaml +57 -0
- package/dist/agents/skills/gemini-cli/harness-security-review/skill.yaml +50 -0
- package/dist/agents/skills/gemini-cli/harness-security-scan/SKILL.md +102 -0
- package/dist/agents/skills/gemini-cli/harness-security-scan/skill.yaml +41 -0
- package/dist/bin/harness.js +3 -2
- package/dist/{chunk-VS4OTOKZ.js → chunk-IUFFBBYV.js} +1271 -461
- package/dist/{chunk-3U5VZYR7.js → chunk-UDWGSL3T.js} +4 -1
- package/dist/chunk-USEYPS7F.js +6150 -0
- package/dist/dist-4MYPT3OE.js +2528 -0
- package/dist/dist-RBZXXJHG.js +242 -0
- package/dist/index.js +3 -2
- package/dist/validate-cross-check-CPEPNLOD.js +7 -0
- package/package.json +12 -8
- package/dist/validate-cross-check-LNIZ7KGZ.js +0 -6
|
@@ -0,0 +1,2528 @@
|
|
|
1
|
+
// ../graph/dist/index.mjs
|
|
2
|
+
import { z } from "zod";
|
|
3
|
+
import loki from "lokijs";
|
|
4
|
+
import { readFile, writeFile, mkdir, access } from "fs/promises";
|
|
5
|
+
import { join } from "path";
|
|
6
|
+
import * as fs from "fs/promises";
|
|
7
|
+
import * as path from "path";
|
|
8
|
+
import { execFile } from "child_process";
|
|
9
|
+
import { promisify } from "util";
|
|
10
|
+
import * as path2 from "path";
|
|
11
|
+
import * as fs2 from "fs/promises";
|
|
12
|
+
import * as path3 from "path";
|
|
13
|
+
import * as crypto from "crypto";
|
|
14
|
+
import * as fs3 from "fs/promises";
|
|
15
|
+
import * as path4 from "path";
|
|
16
|
+
import { minimatch } from "minimatch";
|
|
17
|
+
import { relative as relative2 } from "path";
|
|
18
|
+
var NODE_TYPES = [
|
|
19
|
+
// Code
|
|
20
|
+
"repository",
|
|
21
|
+
"module",
|
|
22
|
+
"file",
|
|
23
|
+
"class",
|
|
24
|
+
"interface",
|
|
25
|
+
"function",
|
|
26
|
+
"method",
|
|
27
|
+
"variable",
|
|
28
|
+
// Knowledge
|
|
29
|
+
"adr",
|
|
30
|
+
"decision",
|
|
31
|
+
"learning",
|
|
32
|
+
"failure",
|
|
33
|
+
"issue",
|
|
34
|
+
"document",
|
|
35
|
+
"skill",
|
|
36
|
+
"conversation",
|
|
37
|
+
// VCS
|
|
38
|
+
"commit",
|
|
39
|
+
"build",
|
|
40
|
+
"test_result",
|
|
41
|
+
// Observability (future)
|
|
42
|
+
"span",
|
|
43
|
+
"metric",
|
|
44
|
+
"log",
|
|
45
|
+
// Structural
|
|
46
|
+
"layer",
|
|
47
|
+
"pattern",
|
|
48
|
+
"constraint",
|
|
49
|
+
"violation"
|
|
50
|
+
];
|
|
51
|
+
var EDGE_TYPES = [
|
|
52
|
+
// Code relationships
|
|
53
|
+
"contains",
|
|
54
|
+
"imports",
|
|
55
|
+
"calls",
|
|
56
|
+
"implements",
|
|
57
|
+
"inherits",
|
|
58
|
+
"references",
|
|
59
|
+
// Knowledge relationships
|
|
60
|
+
"applies_to",
|
|
61
|
+
"caused_by",
|
|
62
|
+
"resolved_by",
|
|
63
|
+
"documents",
|
|
64
|
+
"violates",
|
|
65
|
+
"specifies",
|
|
66
|
+
"decided",
|
|
67
|
+
// VCS relationships
|
|
68
|
+
"co_changes_with",
|
|
69
|
+
"triggered_by",
|
|
70
|
+
"failed_in",
|
|
71
|
+
// Execution relationships (future)
|
|
72
|
+
"executed_by",
|
|
73
|
+
"measured_by"
|
|
74
|
+
];
|
|
75
|
+
var OBSERVABILITY_TYPES = /* @__PURE__ */ new Set(["span", "metric", "log"]);
|
|
76
|
+
var CURRENT_SCHEMA_VERSION = 1;
|
|
77
|
+
var GraphNodeSchema = z.object({
|
|
78
|
+
id: z.string(),
|
|
79
|
+
type: z.enum(NODE_TYPES),
|
|
80
|
+
name: z.string(),
|
|
81
|
+
path: z.string().optional(),
|
|
82
|
+
location: z.object({
|
|
83
|
+
fileId: z.string(),
|
|
84
|
+
startLine: z.number(),
|
|
85
|
+
endLine: z.number(),
|
|
86
|
+
startColumn: z.number().optional(),
|
|
87
|
+
endColumn: z.number().optional()
|
|
88
|
+
}).optional(),
|
|
89
|
+
content: z.string().optional(),
|
|
90
|
+
hash: z.string().optional(),
|
|
91
|
+
metadata: z.record(z.unknown()),
|
|
92
|
+
embedding: z.array(z.number()).optional(),
|
|
93
|
+
lastModified: z.string().optional()
|
|
94
|
+
});
|
|
95
|
+
var GraphEdgeSchema = z.object({
|
|
96
|
+
from: z.string(),
|
|
97
|
+
to: z.string(),
|
|
98
|
+
type: z.enum(EDGE_TYPES),
|
|
99
|
+
confidence: z.number().min(0).max(1).optional(),
|
|
100
|
+
metadata: z.record(z.unknown()).optional()
|
|
101
|
+
});
|
|
102
|
+
async function saveGraph(dirPath, nodes, edges) {
|
|
103
|
+
await mkdir(dirPath, { recursive: true });
|
|
104
|
+
const graphData = { nodes, edges };
|
|
105
|
+
const metadata = {
|
|
106
|
+
schemaVersion: CURRENT_SCHEMA_VERSION,
|
|
107
|
+
lastScanTimestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
108
|
+
nodeCount: nodes.length,
|
|
109
|
+
edgeCount: edges.length
|
|
110
|
+
};
|
|
111
|
+
await Promise.all([
|
|
112
|
+
writeFile(join(dirPath, "graph.json"), JSON.stringify(graphData, null, 2), "utf-8"),
|
|
113
|
+
writeFile(join(dirPath, "metadata.json"), JSON.stringify(metadata, null, 2), "utf-8")
|
|
114
|
+
]);
|
|
115
|
+
}
|
|
116
|
+
async function loadGraph(dirPath) {
|
|
117
|
+
const metaPath = join(dirPath, "metadata.json");
|
|
118
|
+
const graphPath = join(dirPath, "graph.json");
|
|
119
|
+
try {
|
|
120
|
+
await access(metaPath);
|
|
121
|
+
await access(graphPath);
|
|
122
|
+
} catch {
|
|
123
|
+
return null;
|
|
124
|
+
}
|
|
125
|
+
const metaContent = await readFile(metaPath, "utf-8");
|
|
126
|
+
const metadata = JSON.parse(metaContent);
|
|
127
|
+
if (metadata.schemaVersion !== CURRENT_SCHEMA_VERSION) {
|
|
128
|
+
return null;
|
|
129
|
+
}
|
|
130
|
+
const graphContent = await readFile(graphPath, "utf-8");
|
|
131
|
+
return JSON.parse(graphContent);
|
|
132
|
+
}
|
|
133
|
+
var GraphStore = class {
|
|
134
|
+
db;
|
|
135
|
+
nodes;
|
|
136
|
+
edges;
|
|
137
|
+
constructor() {
|
|
138
|
+
this.db = new loki("graph.db");
|
|
139
|
+
this.nodes = this.db.addCollection("nodes", {
|
|
140
|
+
unique: ["id"],
|
|
141
|
+
indices: ["type", "name"]
|
|
142
|
+
});
|
|
143
|
+
this.edges = this.db.addCollection("edges", {
|
|
144
|
+
indices: ["from", "to", "type"]
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
// --- Node operations ---
|
|
148
|
+
addNode(node) {
|
|
149
|
+
const existing = this.nodes.by("id", node.id);
|
|
150
|
+
if (existing) {
|
|
151
|
+
Object.assign(existing, node);
|
|
152
|
+
this.nodes.update(existing);
|
|
153
|
+
} else {
|
|
154
|
+
this.nodes.insert({ ...node });
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
batchAddNodes(nodes) {
|
|
158
|
+
for (const node of nodes) {
|
|
159
|
+
this.addNode(node);
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
getNode(id) {
|
|
163
|
+
const doc = this.nodes.by("id", id);
|
|
164
|
+
if (!doc) return null;
|
|
165
|
+
return this.stripLokiMeta(doc);
|
|
166
|
+
}
|
|
167
|
+
findNodes(query) {
|
|
168
|
+
const lokiQuery = {};
|
|
169
|
+
if (query.type !== void 0) lokiQuery["type"] = query.type;
|
|
170
|
+
if (query.name !== void 0) lokiQuery["name"] = query.name;
|
|
171
|
+
if (query.path !== void 0) lokiQuery["path"] = query.path;
|
|
172
|
+
return this.nodes.find(lokiQuery).map((doc) => this.stripLokiMeta(doc));
|
|
173
|
+
}
|
|
174
|
+
removeNode(id) {
|
|
175
|
+
const doc = this.nodes.by("id", id);
|
|
176
|
+
if (doc) {
|
|
177
|
+
this.nodes.remove(doc);
|
|
178
|
+
}
|
|
179
|
+
const edgesToRemove = this.edges.find({
|
|
180
|
+
$or: [{ from: id }, { to: id }]
|
|
181
|
+
});
|
|
182
|
+
for (const edge of edgesToRemove) {
|
|
183
|
+
this.edges.remove(edge);
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
// --- Edge operations ---
|
|
187
|
+
addEdge(edge) {
|
|
188
|
+
const existing = this.edges.findOne({
|
|
189
|
+
from: edge.from,
|
|
190
|
+
to: edge.to,
|
|
191
|
+
type: edge.type
|
|
192
|
+
});
|
|
193
|
+
if (existing) {
|
|
194
|
+
if (edge.metadata) {
|
|
195
|
+
Object.assign(existing, edge);
|
|
196
|
+
this.edges.update(existing);
|
|
197
|
+
}
|
|
198
|
+
return;
|
|
199
|
+
}
|
|
200
|
+
this.edges.insert({ ...edge });
|
|
201
|
+
}
|
|
202
|
+
batchAddEdges(edges) {
|
|
203
|
+
for (const edge of edges) {
|
|
204
|
+
this.addEdge(edge);
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
getEdges(query) {
|
|
208
|
+
const lokiQuery = {};
|
|
209
|
+
if (query.from !== void 0) lokiQuery["from"] = query.from;
|
|
210
|
+
if (query.to !== void 0) lokiQuery["to"] = query.to;
|
|
211
|
+
if (query.type !== void 0) lokiQuery["type"] = query.type;
|
|
212
|
+
return this.edges.find(lokiQuery).map((doc) => this.stripLokiMeta(doc));
|
|
213
|
+
}
|
|
214
|
+
getNeighbors(nodeId, direction = "both") {
|
|
215
|
+
const neighborIds = /* @__PURE__ */ new Set();
|
|
216
|
+
if (direction === "outbound" || direction === "both") {
|
|
217
|
+
const outEdges = this.edges.find({ from: nodeId });
|
|
218
|
+
for (const edge of outEdges) {
|
|
219
|
+
neighborIds.add(edge.to);
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
if (direction === "inbound" || direction === "both") {
|
|
223
|
+
const inEdges = this.edges.find({ to: nodeId });
|
|
224
|
+
for (const edge of inEdges) {
|
|
225
|
+
neighborIds.add(edge.from);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
const results = [];
|
|
229
|
+
for (const nid of neighborIds) {
|
|
230
|
+
const node = this.getNode(nid);
|
|
231
|
+
if (node) results.push(node);
|
|
232
|
+
}
|
|
233
|
+
return results;
|
|
234
|
+
}
|
|
235
|
+
// --- Counts ---
|
|
236
|
+
get nodeCount() {
|
|
237
|
+
return this.nodes.count();
|
|
238
|
+
}
|
|
239
|
+
get edgeCount() {
|
|
240
|
+
return this.edges.count();
|
|
241
|
+
}
|
|
242
|
+
// --- Clear ---
|
|
243
|
+
clear() {
|
|
244
|
+
this.nodes.clear();
|
|
245
|
+
this.edges.clear();
|
|
246
|
+
}
|
|
247
|
+
// --- Persistence ---
|
|
248
|
+
async save(dirPath) {
|
|
249
|
+
const allNodes = this.nodes.find().map((doc) => this.stripLokiMeta(doc));
|
|
250
|
+
const allEdges = this.edges.find().map((doc) => this.stripLokiMeta(doc));
|
|
251
|
+
await saveGraph(dirPath, allNodes, allEdges);
|
|
252
|
+
}
|
|
253
|
+
async load(dirPath) {
|
|
254
|
+
const data = await loadGraph(dirPath);
|
|
255
|
+
if (!data) return false;
|
|
256
|
+
this.clear();
|
|
257
|
+
for (const node of data.nodes) {
|
|
258
|
+
this.nodes.insert({ ...node });
|
|
259
|
+
}
|
|
260
|
+
for (const edge of data.edges) {
|
|
261
|
+
this.edges.insert({ ...edge });
|
|
262
|
+
}
|
|
263
|
+
return true;
|
|
264
|
+
}
|
|
265
|
+
// --- Internal ---
|
|
266
|
+
stripLokiMeta(doc) {
|
|
267
|
+
const { $loki: _, meta: _meta, ...rest } = doc;
|
|
268
|
+
return rest;
|
|
269
|
+
}
|
|
270
|
+
};
|
|
271
|
+
function cosineSimilarity(a, b) {
|
|
272
|
+
let dotProduct = 0;
|
|
273
|
+
let normA = 0;
|
|
274
|
+
let normB = 0;
|
|
275
|
+
for (let i = 0; i < a.length; i++) {
|
|
276
|
+
dotProduct += a[i] * b[i];
|
|
277
|
+
normA += a[i] * a[i];
|
|
278
|
+
normB += b[i] * b[i];
|
|
279
|
+
}
|
|
280
|
+
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
|
281
|
+
return denominator === 0 ? 0 : dotProduct / denominator;
|
|
282
|
+
}
|
|
283
|
+
var VectorStore = class _VectorStore {
|
|
284
|
+
dimensions;
|
|
285
|
+
vectors = /* @__PURE__ */ new Map();
|
|
286
|
+
constructor(dimensions) {
|
|
287
|
+
this.dimensions = dimensions;
|
|
288
|
+
}
|
|
289
|
+
/** Number of vectors currently stored. */
|
|
290
|
+
get size() {
|
|
291
|
+
return this.vectors.size;
|
|
292
|
+
}
|
|
293
|
+
/** Add a vector with the given id. Throws if dimensions do not match. */
|
|
294
|
+
add(id, vector) {
|
|
295
|
+
if (vector.length !== this.dimensions) {
|
|
296
|
+
throw new Error(`Dimension mismatch: expected ${this.dimensions}, got ${vector.length}`);
|
|
297
|
+
}
|
|
298
|
+
this.vectors.set(id, vector);
|
|
299
|
+
}
|
|
300
|
+
/** Remove a vector by id. Returns true if the vector existed. */
|
|
301
|
+
remove(id) {
|
|
302
|
+
return this.vectors.delete(id);
|
|
303
|
+
}
|
|
304
|
+
/** Check whether a vector with the given id exists. */
|
|
305
|
+
has(id) {
|
|
306
|
+
return this.vectors.has(id);
|
|
307
|
+
}
|
|
308
|
+
/** Remove all stored vectors. */
|
|
309
|
+
clear() {
|
|
310
|
+
this.vectors.clear();
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Return the top-K most similar vectors to the query, sorted by descending
|
|
314
|
+
* cosine similarity score. If the store contains fewer than topK vectors,
|
|
315
|
+
* all available results are returned.
|
|
316
|
+
*/
|
|
317
|
+
search(query, topK) {
|
|
318
|
+
if (query.length !== this.dimensions) {
|
|
319
|
+
throw new Error(`Dimension mismatch: expected ${this.dimensions}, got ${query.length}`);
|
|
320
|
+
}
|
|
321
|
+
const results = [];
|
|
322
|
+
for (const [id, vector] of this.vectors) {
|
|
323
|
+
results.push({ id, score: cosineSimilarity(query, vector) });
|
|
324
|
+
}
|
|
325
|
+
results.sort((a, b) => b.score - a.score);
|
|
326
|
+
return results.slice(0, topK);
|
|
327
|
+
}
|
|
328
|
+
/** Serialize the store to a plain object for persistence. */
|
|
329
|
+
serialize() {
|
|
330
|
+
const vectors = [];
|
|
331
|
+
for (const [id, vector] of this.vectors) {
|
|
332
|
+
vectors.push({ id, vector: [...vector] });
|
|
333
|
+
}
|
|
334
|
+
return { dimensions: this.dimensions, vectors };
|
|
335
|
+
}
|
|
336
|
+
/** Deserialize a previously-serialized store. */
|
|
337
|
+
static deserialize(data) {
|
|
338
|
+
const store = new _VectorStore(data.dimensions);
|
|
339
|
+
for (const { id, vector } of data.vectors) {
|
|
340
|
+
store.add(id, vector);
|
|
341
|
+
}
|
|
342
|
+
return store;
|
|
343
|
+
}
|
|
344
|
+
};
|
|
345
|
+
var ContextQL = class {
|
|
346
|
+
store;
|
|
347
|
+
constructor(store) {
|
|
348
|
+
this.store = store;
|
|
349
|
+
}
|
|
350
|
+
execute(params) {
|
|
351
|
+
const maxDepth = params.maxDepth ?? 3;
|
|
352
|
+
const bidirectional = params.bidirectional ?? false;
|
|
353
|
+
const pruneObservability = params.pruneObservability ?? true;
|
|
354
|
+
const visited = /* @__PURE__ */ new Set();
|
|
355
|
+
const resultNodeMap = /* @__PURE__ */ new Map();
|
|
356
|
+
const resultEdges = [];
|
|
357
|
+
const edgeSet = /* @__PURE__ */ new Set();
|
|
358
|
+
let pruned = 0;
|
|
359
|
+
let depthReached = 0;
|
|
360
|
+
const edgeKey = (e) => `${e.from}|${e.to}|${e.type}`;
|
|
361
|
+
const addEdge = (edge) => {
|
|
362
|
+
const key = edgeKey(edge);
|
|
363
|
+
if (!edgeSet.has(key)) {
|
|
364
|
+
edgeSet.add(key);
|
|
365
|
+
resultEdges.push(edge);
|
|
366
|
+
}
|
|
367
|
+
};
|
|
368
|
+
const queue = [];
|
|
369
|
+
for (const rootId of params.rootNodeIds) {
|
|
370
|
+
const node = this.store.getNode(rootId);
|
|
371
|
+
if (node) {
|
|
372
|
+
visited.add(rootId);
|
|
373
|
+
resultNodeMap.set(rootId, node);
|
|
374
|
+
queue.push({ id: rootId, depth: 0 });
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
let head = 0;
|
|
378
|
+
while (head < queue.length) {
|
|
379
|
+
const entry = queue[head++];
|
|
380
|
+
const { id: currentId, depth } = entry;
|
|
381
|
+
if (depth >= maxDepth) continue;
|
|
382
|
+
const nextDepth = depth + 1;
|
|
383
|
+
if (nextDepth > depthReached) depthReached = nextDepth;
|
|
384
|
+
const outEdges = this.store.getEdges({ from: currentId });
|
|
385
|
+
const inEdges = bidirectional ? this.store.getEdges({ to: currentId }) : [];
|
|
386
|
+
const allEdges = [
|
|
387
|
+
...outEdges.map((e) => ({ edge: e, neighborId: e.to })),
|
|
388
|
+
...inEdges.map((e) => ({ edge: e, neighborId: e.from }))
|
|
389
|
+
];
|
|
390
|
+
for (const { edge, neighborId } of allEdges) {
|
|
391
|
+
if (params.includeEdges && !params.includeEdges.includes(edge.type)) {
|
|
392
|
+
continue;
|
|
393
|
+
}
|
|
394
|
+
if (visited.has(neighborId)) {
|
|
395
|
+
addEdge(edge);
|
|
396
|
+
continue;
|
|
397
|
+
}
|
|
398
|
+
const neighbor = this.store.getNode(neighborId);
|
|
399
|
+
if (!neighbor) continue;
|
|
400
|
+
visited.add(neighborId);
|
|
401
|
+
if (pruneObservability && OBSERVABILITY_TYPES.has(neighbor.type)) {
|
|
402
|
+
pruned++;
|
|
403
|
+
continue;
|
|
404
|
+
}
|
|
405
|
+
if (params.includeTypes && !params.includeTypes.includes(neighbor.type)) {
|
|
406
|
+
pruned++;
|
|
407
|
+
continue;
|
|
408
|
+
}
|
|
409
|
+
if (params.excludeTypes && params.excludeTypes.includes(neighbor.type)) {
|
|
410
|
+
pruned++;
|
|
411
|
+
continue;
|
|
412
|
+
}
|
|
413
|
+
resultNodeMap.set(neighborId, neighbor);
|
|
414
|
+
addEdge(edge);
|
|
415
|
+
queue.push({ id: neighborId, depth: nextDepth });
|
|
416
|
+
}
|
|
417
|
+
}
|
|
418
|
+
const resultNodeIds = new Set(resultNodeMap.keys());
|
|
419
|
+
for (const nodeId of resultNodeIds) {
|
|
420
|
+
const outEdges = this.store.getEdges({ from: nodeId });
|
|
421
|
+
for (const edge of outEdges) {
|
|
422
|
+
if (resultNodeIds.has(edge.to)) {
|
|
423
|
+
addEdge(edge);
|
|
424
|
+
}
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
return {
|
|
428
|
+
nodes: Array.from(resultNodeMap.values()),
|
|
429
|
+
edges: resultEdges,
|
|
430
|
+
stats: {
|
|
431
|
+
totalTraversed: visited.size,
|
|
432
|
+
totalReturned: resultNodeMap.size,
|
|
433
|
+
pruned,
|
|
434
|
+
depthReached
|
|
435
|
+
}
|
|
436
|
+
};
|
|
437
|
+
}
|
|
438
|
+
};
|
|
439
|
+
function project(nodes, spec) {
|
|
440
|
+
if (!spec) return nodes.map((n) => ({ ...n }));
|
|
441
|
+
return nodes.map((node) => {
|
|
442
|
+
const projected = {};
|
|
443
|
+
for (const field of spec.fields) {
|
|
444
|
+
if (field in node) {
|
|
445
|
+
projected[field] = node[field];
|
|
446
|
+
}
|
|
447
|
+
}
|
|
448
|
+
return projected;
|
|
449
|
+
});
|
|
450
|
+
}
|
|
451
|
+
var CodeIngestor = class {
|
|
452
|
+
constructor(store) {
|
|
453
|
+
this.store = store;
|
|
454
|
+
}
|
|
455
|
+
async ingest(rootDir) {
|
|
456
|
+
const start = Date.now();
|
|
457
|
+
const errors = [];
|
|
458
|
+
let nodesAdded = 0;
|
|
459
|
+
let edgesAdded = 0;
|
|
460
|
+
const files = await this.findSourceFiles(rootDir);
|
|
461
|
+
const nameToFiles = /* @__PURE__ */ new Map();
|
|
462
|
+
const fileContents = /* @__PURE__ */ new Map();
|
|
463
|
+
for (const filePath of files) {
|
|
464
|
+
try {
|
|
465
|
+
const relativePath = path.relative(rootDir, filePath);
|
|
466
|
+
const content = await fs.readFile(filePath, "utf-8");
|
|
467
|
+
const stat2 = await fs.stat(filePath);
|
|
468
|
+
const fileId = `file:${relativePath}`;
|
|
469
|
+
fileContents.set(relativePath, content);
|
|
470
|
+
const fileNode = {
|
|
471
|
+
id: fileId,
|
|
472
|
+
type: "file",
|
|
473
|
+
name: path.basename(filePath),
|
|
474
|
+
path: relativePath,
|
|
475
|
+
metadata: { language: this.detectLanguage(filePath) },
|
|
476
|
+
lastModified: stat2.mtime.toISOString()
|
|
477
|
+
};
|
|
478
|
+
this.store.addNode(fileNode);
|
|
479
|
+
nodesAdded++;
|
|
480
|
+
const symbols = this.extractSymbols(content, fileId, relativePath);
|
|
481
|
+
for (const { node, edge } of symbols) {
|
|
482
|
+
this.store.addNode(node);
|
|
483
|
+
this.store.addEdge(edge);
|
|
484
|
+
nodesAdded++;
|
|
485
|
+
edgesAdded++;
|
|
486
|
+
if (node.type === "function" || node.type === "method") {
|
|
487
|
+
let files2 = nameToFiles.get(node.name);
|
|
488
|
+
if (!files2) {
|
|
489
|
+
files2 = /* @__PURE__ */ new Set();
|
|
490
|
+
nameToFiles.set(node.name, files2);
|
|
491
|
+
}
|
|
492
|
+
files2.add(relativePath);
|
|
493
|
+
}
|
|
494
|
+
}
|
|
495
|
+
const imports = await this.extractImports(content, fileId, relativePath, rootDir);
|
|
496
|
+
for (const edge of imports) {
|
|
497
|
+
this.store.addEdge(edge);
|
|
498
|
+
edgesAdded++;
|
|
499
|
+
}
|
|
500
|
+
} catch (err) {
|
|
501
|
+
errors.push(`${filePath}: ${err instanceof Error ? err.message : String(err)}`);
|
|
502
|
+
}
|
|
503
|
+
}
|
|
504
|
+
const callsEdges = this.extractCallsEdges(nameToFiles, fileContents);
|
|
505
|
+
for (const edge of callsEdges) {
|
|
506
|
+
this.store.addEdge(edge);
|
|
507
|
+
edgesAdded++;
|
|
508
|
+
}
|
|
509
|
+
return {
|
|
510
|
+
nodesAdded,
|
|
511
|
+
nodesUpdated: 0,
|
|
512
|
+
edgesAdded,
|
|
513
|
+
edgesUpdated: 0,
|
|
514
|
+
errors,
|
|
515
|
+
durationMs: Date.now() - start
|
|
516
|
+
};
|
|
517
|
+
}
|
|
518
|
+
async findSourceFiles(dir) {
|
|
519
|
+
const results = [];
|
|
520
|
+
const entries = await fs.readdir(dir, { withFileTypes: true });
|
|
521
|
+
for (const entry of entries) {
|
|
522
|
+
const fullPath = path.join(dir, entry.name);
|
|
523
|
+
if (entry.isDirectory() && entry.name !== "node_modules" && entry.name !== "dist") {
|
|
524
|
+
results.push(...await this.findSourceFiles(fullPath));
|
|
525
|
+
} else if (entry.isFile() && /\.(ts|tsx|js|jsx)$/.test(entry.name) && !entry.name.endsWith(".d.ts")) {
|
|
526
|
+
results.push(fullPath);
|
|
527
|
+
}
|
|
528
|
+
}
|
|
529
|
+
return results;
|
|
530
|
+
}
|
|
531
|
+
extractSymbols(content, fileId, relativePath) {
|
|
532
|
+
const results = [];
|
|
533
|
+
const lines = content.split("\n");
|
|
534
|
+
let currentClassName = null;
|
|
535
|
+
let currentClassId = null;
|
|
536
|
+
let braceDepth = 0;
|
|
537
|
+
let insideClass = false;
|
|
538
|
+
for (let i = 0; i < lines.length; i++) {
|
|
539
|
+
const line = lines[i];
|
|
540
|
+
const fnMatch = line.match(/(?:export\s+)?(?:async\s+)?function\s+(\w+)/);
|
|
541
|
+
if (fnMatch) {
|
|
542
|
+
const name = fnMatch[1];
|
|
543
|
+
const id = `function:${relativePath}:${name}`;
|
|
544
|
+
const endLine = this.findClosingBrace(lines, i);
|
|
545
|
+
results.push({
|
|
546
|
+
node: {
|
|
547
|
+
id,
|
|
548
|
+
type: "function",
|
|
549
|
+
name,
|
|
550
|
+
path: relativePath,
|
|
551
|
+
location: { fileId, startLine: i + 1, endLine },
|
|
552
|
+
metadata: {
|
|
553
|
+
exported: line.includes("export"),
|
|
554
|
+
cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
|
|
555
|
+
nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
|
|
556
|
+
lineCount: endLine - i,
|
|
557
|
+
parameterCount: this.countParameters(line)
|
|
558
|
+
}
|
|
559
|
+
},
|
|
560
|
+
edge: { from: fileId, to: id, type: "contains" }
|
|
561
|
+
});
|
|
562
|
+
if (!insideClass) {
|
|
563
|
+
currentClassName = null;
|
|
564
|
+
currentClassId = null;
|
|
565
|
+
}
|
|
566
|
+
continue;
|
|
567
|
+
}
|
|
568
|
+
const classMatch = line.match(/(?:export\s+)?class\s+(\w+)/);
|
|
569
|
+
if (classMatch) {
|
|
570
|
+
const name = classMatch[1];
|
|
571
|
+
const id = `class:${relativePath}:${name}`;
|
|
572
|
+
const endLine = this.findClosingBrace(lines, i);
|
|
573
|
+
results.push({
|
|
574
|
+
node: {
|
|
575
|
+
id,
|
|
576
|
+
type: "class",
|
|
577
|
+
name,
|
|
578
|
+
path: relativePath,
|
|
579
|
+
location: { fileId, startLine: i + 1, endLine },
|
|
580
|
+
metadata: { exported: line.includes("export") }
|
|
581
|
+
},
|
|
582
|
+
edge: { from: fileId, to: id, type: "contains" }
|
|
583
|
+
});
|
|
584
|
+
currentClassName = name;
|
|
585
|
+
currentClassId = id;
|
|
586
|
+
insideClass = true;
|
|
587
|
+
braceDepth = 0;
|
|
588
|
+
for (const ch of line) {
|
|
589
|
+
if (ch === "{") braceDepth++;
|
|
590
|
+
if (ch === "}") braceDepth--;
|
|
591
|
+
}
|
|
592
|
+
continue;
|
|
593
|
+
}
|
|
594
|
+
const ifaceMatch = line.match(/(?:export\s+)?interface\s+(\w+)/);
|
|
595
|
+
if (ifaceMatch) {
|
|
596
|
+
const name = ifaceMatch[1];
|
|
597
|
+
const id = `interface:${relativePath}:${name}`;
|
|
598
|
+
const endLine = this.findClosingBrace(lines, i);
|
|
599
|
+
results.push({
|
|
600
|
+
node: {
|
|
601
|
+
id,
|
|
602
|
+
type: "interface",
|
|
603
|
+
name,
|
|
604
|
+
path: relativePath,
|
|
605
|
+
location: { fileId, startLine: i + 1, endLine },
|
|
606
|
+
metadata: { exported: line.includes("export") }
|
|
607
|
+
},
|
|
608
|
+
edge: { from: fileId, to: id, type: "contains" }
|
|
609
|
+
});
|
|
610
|
+
currentClassName = null;
|
|
611
|
+
currentClassId = null;
|
|
612
|
+
insideClass = false;
|
|
613
|
+
continue;
|
|
614
|
+
}
|
|
615
|
+
if (insideClass) {
|
|
616
|
+
for (const ch of line) {
|
|
617
|
+
if (ch === "{") braceDepth++;
|
|
618
|
+
if (ch === "}") braceDepth--;
|
|
619
|
+
}
|
|
620
|
+
if (braceDepth <= 0) {
|
|
621
|
+
currentClassName = null;
|
|
622
|
+
currentClassId = null;
|
|
623
|
+
insideClass = false;
|
|
624
|
+
continue;
|
|
625
|
+
}
|
|
626
|
+
}
|
|
627
|
+
if (insideClass && currentClassName && currentClassId) {
|
|
628
|
+
const methodMatch = line.match(
|
|
629
|
+
/^\s+(?:(?:public|private|protected|readonly|static|abstract)\s+)*(?:async\s+)?(\w+)\s*\(/
|
|
630
|
+
);
|
|
631
|
+
if (methodMatch) {
|
|
632
|
+
const methodName = methodMatch[1];
|
|
633
|
+
if (methodName === "constructor" || methodName === "if" || methodName === "for" || methodName === "while" || methodName === "switch")
|
|
634
|
+
continue;
|
|
635
|
+
const id = `method:${relativePath}:${currentClassName}.${methodName}`;
|
|
636
|
+
const endLine = this.findClosingBrace(lines, i);
|
|
637
|
+
results.push({
|
|
638
|
+
node: {
|
|
639
|
+
id,
|
|
640
|
+
type: "method",
|
|
641
|
+
name: methodName,
|
|
642
|
+
path: relativePath,
|
|
643
|
+
location: { fileId, startLine: i + 1, endLine },
|
|
644
|
+
metadata: {
|
|
645
|
+
className: currentClassName,
|
|
646
|
+
exported: false,
|
|
647
|
+
cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
|
|
648
|
+
nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
|
|
649
|
+
lineCount: endLine - i,
|
|
650
|
+
parameterCount: this.countParameters(line)
|
|
651
|
+
}
|
|
652
|
+
},
|
|
653
|
+
edge: { from: currentClassId, to: id, type: "contains" }
|
|
654
|
+
});
|
|
655
|
+
}
|
|
656
|
+
continue;
|
|
657
|
+
}
|
|
658
|
+
const varMatch = line.match(/(?:export\s+)?(?:const|let|var)\s+(\w+)/);
|
|
659
|
+
if (varMatch) {
|
|
660
|
+
const name = varMatch[1];
|
|
661
|
+
const id = `variable:${relativePath}:${name}`;
|
|
662
|
+
results.push({
|
|
663
|
+
node: {
|
|
664
|
+
id,
|
|
665
|
+
type: "variable",
|
|
666
|
+
name,
|
|
667
|
+
path: relativePath,
|
|
668
|
+
location: { fileId, startLine: i + 1, endLine: i + 1 },
|
|
669
|
+
metadata: { exported: line.includes("export") }
|
|
670
|
+
},
|
|
671
|
+
edge: { from: fileId, to: id, type: "contains" }
|
|
672
|
+
});
|
|
673
|
+
}
|
|
674
|
+
}
|
|
675
|
+
return results;
|
|
676
|
+
}
|
|
677
|
+
/**
|
|
678
|
+
* Find the closing brace for a construct starting at the given line.
|
|
679
|
+
* Uses a simple brace-counting heuristic. Returns 1-indexed line number.
|
|
680
|
+
*/
|
|
681
|
+
findClosingBrace(lines, startIndex) {
|
|
682
|
+
let depth = 0;
|
|
683
|
+
for (let i = startIndex; i < lines.length; i++) {
|
|
684
|
+
const line = lines[i];
|
|
685
|
+
for (const ch of line) {
|
|
686
|
+
if (ch === "{") depth++;
|
|
687
|
+
if (ch === "}") depth--;
|
|
688
|
+
}
|
|
689
|
+
if (depth <= 0 && i > startIndex) {
|
|
690
|
+
return i + 1;
|
|
691
|
+
}
|
|
692
|
+
if (depth === 0 && i === startIndex) {
|
|
693
|
+
if (line.includes("{")) {
|
|
694
|
+
return i + 1;
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
return startIndex + 1;
|
|
699
|
+
}
|
|
700
|
+
/**
|
|
701
|
+
* Second pass: scan each file for identifiers matching known callable names,
|
|
702
|
+
* then create file-to-file "calls" edges. Uses regex heuristic (not AST).
|
|
703
|
+
*/
|
|
704
|
+
extractCallsEdges(nameToFiles, fileContents) {
|
|
705
|
+
const edges = [];
|
|
706
|
+
const seen = /* @__PURE__ */ new Set();
|
|
707
|
+
for (const [filePath, content] of fileContents) {
|
|
708
|
+
const callerFileId = `file:${filePath}`;
|
|
709
|
+
const callPattern = /\b([a-zA-Z_$][\w$]*)\s*\(/g;
|
|
710
|
+
let match;
|
|
711
|
+
while ((match = callPattern.exec(content)) !== null) {
|
|
712
|
+
const name = match[1];
|
|
713
|
+
const targetFiles = nameToFiles.get(name);
|
|
714
|
+
if (!targetFiles) continue;
|
|
715
|
+
for (const targetFile of targetFiles) {
|
|
716
|
+
if (targetFile === filePath) continue;
|
|
717
|
+
const targetFileId = `file:${targetFile}`;
|
|
718
|
+
const key = `${callerFileId}|${targetFileId}`;
|
|
719
|
+
if (seen.has(key)) continue;
|
|
720
|
+
seen.add(key);
|
|
721
|
+
edges.push({
|
|
722
|
+
from: callerFileId,
|
|
723
|
+
to: targetFileId,
|
|
724
|
+
type: "calls",
|
|
725
|
+
metadata: { confidence: "regex" }
|
|
726
|
+
});
|
|
727
|
+
}
|
|
728
|
+
}
|
|
729
|
+
}
|
|
730
|
+
return edges;
|
|
731
|
+
}
|
|
732
|
+
async extractImports(content, fileId, relativePath, rootDir) {
|
|
733
|
+
const edges = [];
|
|
734
|
+
const importRegex = /import\s+(?:type\s+)?(?:\{[^}]*\}|[\w*]+)\s+from\s+['"]([^'"]+)['"]/g;
|
|
735
|
+
let match;
|
|
736
|
+
while ((match = importRegex.exec(content)) !== null) {
|
|
737
|
+
const importPath = match[1];
|
|
738
|
+
if (!importPath.startsWith(".")) continue;
|
|
739
|
+
const resolvedPath = await this.resolveImportPath(relativePath, importPath, rootDir);
|
|
740
|
+
if (resolvedPath) {
|
|
741
|
+
const targetId = `file:${resolvedPath}`;
|
|
742
|
+
const isTypeOnly = match[0].includes("import type");
|
|
743
|
+
edges.push({
|
|
744
|
+
from: fileId,
|
|
745
|
+
to: targetId,
|
|
746
|
+
type: "imports",
|
|
747
|
+
metadata: { importType: isTypeOnly ? "type-only" : "static" }
|
|
748
|
+
});
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
return edges;
|
|
752
|
+
}
|
|
753
|
+
async resolveImportPath(fromFile, importPath, rootDir) {
|
|
754
|
+
const fromDir = path.dirname(fromFile);
|
|
755
|
+
const resolved = path.normalize(path.join(fromDir, importPath));
|
|
756
|
+
const extensions = [".ts", ".tsx", ".js", ".jsx"];
|
|
757
|
+
for (const ext of extensions) {
|
|
758
|
+
const candidate = resolved.replace(/\.js$/, "") + ext;
|
|
759
|
+
const fullPath = path.join(rootDir, candidate);
|
|
760
|
+
try {
|
|
761
|
+
await fs.access(fullPath);
|
|
762
|
+
return candidate;
|
|
763
|
+
} catch {
|
|
764
|
+
}
|
|
765
|
+
}
|
|
766
|
+
for (const ext of extensions) {
|
|
767
|
+
const candidate = path.join(resolved, `index${ext}`);
|
|
768
|
+
const fullPath = path.join(rootDir, candidate);
|
|
769
|
+
try {
|
|
770
|
+
await fs.access(fullPath);
|
|
771
|
+
return candidate;
|
|
772
|
+
} catch {
|
|
773
|
+
}
|
|
774
|
+
}
|
|
775
|
+
return null;
|
|
776
|
+
}
|
|
777
|
+
computeCyclomaticComplexity(lines) {
|
|
778
|
+
let complexity = 1;
|
|
779
|
+
const decisionPattern = /\b(if|else\s+if|while|for|case)\b|\?\s*[^:?]|&&|\|\||catch\b/g;
|
|
780
|
+
for (const line of lines) {
|
|
781
|
+
const trimmed = line.trim();
|
|
782
|
+
if (trimmed.startsWith("//") || trimmed.startsWith("*")) continue;
|
|
783
|
+
const matches = trimmed.match(decisionPattern);
|
|
784
|
+
if (matches) complexity += matches.length;
|
|
785
|
+
}
|
|
786
|
+
return complexity;
|
|
787
|
+
}
|
|
788
|
+
computeMaxNesting(lines) {
|
|
789
|
+
let maxDepth = 0;
|
|
790
|
+
let currentDepth = 0;
|
|
791
|
+
for (const line of lines) {
|
|
792
|
+
const trimmed = line.trim();
|
|
793
|
+
if (trimmed.startsWith("//") || trimmed.startsWith("*")) continue;
|
|
794
|
+
for (const ch of trimmed) {
|
|
795
|
+
if (ch === "{") {
|
|
796
|
+
currentDepth++;
|
|
797
|
+
if (currentDepth > maxDepth) maxDepth = currentDepth;
|
|
798
|
+
} else if (ch === "}") {
|
|
799
|
+
currentDepth--;
|
|
800
|
+
}
|
|
801
|
+
}
|
|
802
|
+
}
|
|
803
|
+
return Math.max(0, maxDepth - 1);
|
|
804
|
+
}
|
|
805
|
+
countParameters(declarationLine) {
|
|
806
|
+
const parenMatch = declarationLine.match(/\(([^)]*)\)/);
|
|
807
|
+
if (!parenMatch || !parenMatch[1].trim()) return 0;
|
|
808
|
+
let depth = 0;
|
|
809
|
+
let count = 1;
|
|
810
|
+
for (const ch of parenMatch[1]) {
|
|
811
|
+
if (ch === "<" || ch === "(") depth++;
|
|
812
|
+
else if (ch === ">" || ch === ")") depth--;
|
|
813
|
+
else if (ch === "," && depth === 0) count++;
|
|
814
|
+
}
|
|
815
|
+
return count;
|
|
816
|
+
}
|
|
817
|
+
detectLanguage(filePath) {
|
|
818
|
+
if (/\.tsx?$/.test(filePath)) return "typescript";
|
|
819
|
+
if (/\.jsx?$/.test(filePath)) return "javascript";
|
|
820
|
+
return "unknown";
|
|
821
|
+
}
|
|
822
|
+
};
|
|
823
|
+
var execFileAsync = promisify(execFile);
|
|
824
|
+
var GitIngestor = class {
|
|
825
|
+
constructor(store, gitRunner) {
|
|
826
|
+
this.store = store;
|
|
827
|
+
this.gitRunner = gitRunner;
|
|
828
|
+
}
|
|
829
|
+
async ingest(rootDir) {
|
|
830
|
+
const start = Date.now();
|
|
831
|
+
const errors = [];
|
|
832
|
+
let nodesAdded = 0;
|
|
833
|
+
let nodesUpdated = 0;
|
|
834
|
+
let edgesAdded = 0;
|
|
835
|
+
let edgesUpdated = 0;
|
|
836
|
+
let output;
|
|
837
|
+
try {
|
|
838
|
+
output = await this.runGit(rootDir, [
|
|
839
|
+
"log",
|
|
840
|
+
"--format=%H|%an|%ae|%aI|%s",
|
|
841
|
+
"--name-only",
|
|
842
|
+
"-n",
|
|
843
|
+
"100"
|
|
844
|
+
]);
|
|
845
|
+
} catch (err) {
|
|
846
|
+
errors.push(`git log failed: ${err instanceof Error ? err.message : String(err)}`);
|
|
847
|
+
return {
|
|
848
|
+
nodesAdded: 0,
|
|
849
|
+
nodesUpdated: 0,
|
|
850
|
+
edgesAdded: 0,
|
|
851
|
+
edgesUpdated: 0,
|
|
852
|
+
errors,
|
|
853
|
+
durationMs: Date.now() - start
|
|
854
|
+
};
|
|
855
|
+
}
|
|
856
|
+
const commits = this.parseGitLog(output);
|
|
857
|
+
for (const commit of commits) {
|
|
858
|
+
const nodeId = `commit:${commit.shortHash}`;
|
|
859
|
+
this.store.addNode({
|
|
860
|
+
id: nodeId,
|
|
861
|
+
type: "commit",
|
|
862
|
+
name: commit.message,
|
|
863
|
+
metadata: {
|
|
864
|
+
author: commit.author,
|
|
865
|
+
email: commit.email,
|
|
866
|
+
date: commit.date,
|
|
867
|
+
hash: commit.hash
|
|
868
|
+
}
|
|
869
|
+
});
|
|
870
|
+
nodesAdded++;
|
|
871
|
+
for (const file of commit.files) {
|
|
872
|
+
const fileNodeId = `file:${file}`;
|
|
873
|
+
const existingNode = this.store.getNode(fileNodeId);
|
|
874
|
+
if (existingNode) {
|
|
875
|
+
this.store.addEdge({
|
|
876
|
+
from: fileNodeId,
|
|
877
|
+
to: nodeId,
|
|
878
|
+
type: "triggered_by"
|
|
879
|
+
});
|
|
880
|
+
edgesAdded++;
|
|
881
|
+
}
|
|
882
|
+
}
|
|
883
|
+
}
|
|
884
|
+
const coChanges = this.computeCoChanges(commits);
|
|
885
|
+
for (const { fileA, fileB, count } of coChanges) {
|
|
886
|
+
const fileAId = `file:${fileA}`;
|
|
887
|
+
const fileBId = `file:${fileB}`;
|
|
888
|
+
const nodeA = this.store.getNode(fileAId);
|
|
889
|
+
const nodeB = this.store.getNode(fileBId);
|
|
890
|
+
if (nodeA && nodeB) {
|
|
891
|
+
this.store.addEdge({
|
|
892
|
+
from: fileAId,
|
|
893
|
+
to: fileBId,
|
|
894
|
+
type: "co_changes_with",
|
|
895
|
+
metadata: { count }
|
|
896
|
+
});
|
|
897
|
+
edgesAdded++;
|
|
898
|
+
}
|
|
899
|
+
}
|
|
900
|
+
return {
|
|
901
|
+
nodesAdded,
|
|
902
|
+
nodesUpdated,
|
|
903
|
+
edgesAdded,
|
|
904
|
+
edgesUpdated,
|
|
905
|
+
errors,
|
|
906
|
+
durationMs: Date.now() - start
|
|
907
|
+
};
|
|
908
|
+
}
|
|
909
|
+
async runGit(rootDir, args) {
|
|
910
|
+
if (this.gitRunner) {
|
|
911
|
+
return this.gitRunner(rootDir, args);
|
|
912
|
+
}
|
|
913
|
+
const { stdout } = await execFileAsync("git", args, { cwd: rootDir });
|
|
914
|
+
return stdout;
|
|
915
|
+
}
|
|
916
|
+
parseGitLog(output) {
|
|
917
|
+
if (!output.trim()) return [];
|
|
918
|
+
const commits = [];
|
|
919
|
+
const lines = output.split("\n");
|
|
920
|
+
let current = null;
|
|
921
|
+
for (const line of lines) {
|
|
922
|
+
const trimmed = line.trim();
|
|
923
|
+
if (!trimmed) {
|
|
924
|
+
if (current && current.hasFiles) {
|
|
925
|
+
commits.push({
|
|
926
|
+
hash: current.hash,
|
|
927
|
+
shortHash: current.shortHash,
|
|
928
|
+
author: current.author,
|
|
929
|
+
email: current.email,
|
|
930
|
+
date: current.date,
|
|
931
|
+
message: current.message,
|
|
932
|
+
files: current.files
|
|
933
|
+
});
|
|
934
|
+
current = null;
|
|
935
|
+
}
|
|
936
|
+
continue;
|
|
937
|
+
}
|
|
938
|
+
const parts = trimmed.split("|");
|
|
939
|
+
if (parts.length >= 5 && /^[0-9a-f]{7,40}$/.test(parts[0])) {
|
|
940
|
+
if (current) {
|
|
941
|
+
commits.push({
|
|
942
|
+
hash: current.hash,
|
|
943
|
+
shortHash: current.shortHash,
|
|
944
|
+
author: current.author,
|
|
945
|
+
email: current.email,
|
|
946
|
+
date: current.date,
|
|
947
|
+
message: current.message,
|
|
948
|
+
files: current.files
|
|
949
|
+
});
|
|
950
|
+
}
|
|
951
|
+
current = {
|
|
952
|
+
hash: parts[0],
|
|
953
|
+
shortHash: parts[0].substring(0, 7),
|
|
954
|
+
author: parts[1],
|
|
955
|
+
email: parts[2],
|
|
956
|
+
date: parts[3],
|
|
957
|
+
message: parts.slice(4).join("|"),
|
|
958
|
+
// message may contain |
|
|
959
|
+
files: [],
|
|
960
|
+
hasFiles: false
|
|
961
|
+
};
|
|
962
|
+
} else if (current) {
|
|
963
|
+
current.files.push(trimmed);
|
|
964
|
+
current.hasFiles = true;
|
|
965
|
+
}
|
|
966
|
+
}
|
|
967
|
+
if (current) {
|
|
968
|
+
commits.push({
|
|
969
|
+
hash: current.hash,
|
|
970
|
+
shortHash: current.shortHash,
|
|
971
|
+
author: current.author,
|
|
972
|
+
email: current.email,
|
|
973
|
+
date: current.date,
|
|
974
|
+
message: current.message,
|
|
975
|
+
files: current.files
|
|
976
|
+
});
|
|
977
|
+
}
|
|
978
|
+
return commits;
|
|
979
|
+
}
|
|
980
|
+
computeCoChanges(commits) {
|
|
981
|
+
const pairCounts = /* @__PURE__ */ new Map();
|
|
982
|
+
for (const commit of commits) {
|
|
983
|
+
const files = [...commit.files].sort();
|
|
984
|
+
for (let i = 0; i < files.length; i++) {
|
|
985
|
+
for (let j = i + 1; j < files.length; j++) {
|
|
986
|
+
const key = `${files[i]}||${files[j]}`;
|
|
987
|
+
pairCounts.set(key, (pairCounts.get(key) ?? 0) + 1);
|
|
988
|
+
}
|
|
989
|
+
}
|
|
990
|
+
}
|
|
991
|
+
const results = [];
|
|
992
|
+
for (const [key, count] of pairCounts) {
|
|
993
|
+
if (count >= 2) {
|
|
994
|
+
const [fileA, fileB] = key.split("||");
|
|
995
|
+
results.push({ fileA, fileB, count });
|
|
996
|
+
}
|
|
997
|
+
}
|
|
998
|
+
return results;
|
|
999
|
+
}
|
|
1000
|
+
};
|
|
1001
|
+
var TopologicalLinker = class {
|
|
1002
|
+
constructor(store) {
|
|
1003
|
+
this.store = store;
|
|
1004
|
+
}
|
|
1005
|
+
link() {
|
|
1006
|
+
let edgesAdded = 0;
|
|
1007
|
+
const files = this.store.findNodes({ type: "file" });
|
|
1008
|
+
const directories = /* @__PURE__ */ new Map();
|
|
1009
|
+
for (const file of files) {
|
|
1010
|
+
if (!file.path) continue;
|
|
1011
|
+
const dir = path2.dirname(file.path);
|
|
1012
|
+
if (!directories.has(dir)) {
|
|
1013
|
+
directories.set(dir, []);
|
|
1014
|
+
}
|
|
1015
|
+
directories.get(dir).push(file.id);
|
|
1016
|
+
}
|
|
1017
|
+
for (const [dir, fileIds] of directories) {
|
|
1018
|
+
if (fileIds.length < 1) continue;
|
|
1019
|
+
const moduleId = `module:${dir}`;
|
|
1020
|
+
const moduleName = dir === "." ? "root" : path2.basename(dir);
|
|
1021
|
+
this.store.addNode({
|
|
1022
|
+
id: moduleId,
|
|
1023
|
+
type: "module",
|
|
1024
|
+
name: moduleName,
|
|
1025
|
+
path: dir,
|
|
1026
|
+
metadata: { fileCount: fileIds.length }
|
|
1027
|
+
});
|
|
1028
|
+
for (const fileId of fileIds) {
|
|
1029
|
+
this.store.addEdge({
|
|
1030
|
+
from: moduleId,
|
|
1031
|
+
to: fileId,
|
|
1032
|
+
type: "contains"
|
|
1033
|
+
});
|
|
1034
|
+
edgesAdded++;
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
const cycles = this.detectCycles(files.map((f) => f.id));
|
|
1038
|
+
return { edgesAdded, cycles };
|
|
1039
|
+
}
|
|
1040
|
+
detectCycles(fileIds) {
|
|
1041
|
+
const cycles = [];
|
|
1042
|
+
const visited = /* @__PURE__ */ new Set();
|
|
1043
|
+
const inStack = /* @__PURE__ */ new Set();
|
|
1044
|
+
const stack = [];
|
|
1045
|
+
const dfs = (nodeId) => {
|
|
1046
|
+
if (inStack.has(nodeId)) {
|
|
1047
|
+
const cycleStart = stack.indexOf(nodeId);
|
|
1048
|
+
if (cycleStart !== -1) {
|
|
1049
|
+
cycles.push(stack.slice(cycleStart).concat(nodeId));
|
|
1050
|
+
}
|
|
1051
|
+
return;
|
|
1052
|
+
}
|
|
1053
|
+
if (visited.has(nodeId)) return;
|
|
1054
|
+
visited.add(nodeId);
|
|
1055
|
+
inStack.add(nodeId);
|
|
1056
|
+
stack.push(nodeId);
|
|
1057
|
+
const importEdges = this.store.getEdges({ from: nodeId, type: "imports" });
|
|
1058
|
+
for (const edge of importEdges) {
|
|
1059
|
+
dfs(edge.to);
|
|
1060
|
+
}
|
|
1061
|
+
stack.pop();
|
|
1062
|
+
inStack.delete(nodeId);
|
|
1063
|
+
};
|
|
1064
|
+
for (const fileId of fileIds) {
|
|
1065
|
+
if (!visited.has(fileId)) {
|
|
1066
|
+
dfs(fileId);
|
|
1067
|
+
}
|
|
1068
|
+
}
|
|
1069
|
+
return cycles;
|
|
1070
|
+
}
|
|
1071
|
+
};
|
|
1072
|
+
var CODE_NODE_TYPES = ["file", "function", "class", "method", "interface", "variable"];
|
|
1073
|
+
function hash(text) {
|
|
1074
|
+
return crypto.createHash("md5").update(text).digest("hex").slice(0, 8);
|
|
1075
|
+
}
|
|
1076
|
+
function mergeResults(...results) {
|
|
1077
|
+
return {
|
|
1078
|
+
nodesAdded: results.reduce((s, r) => s + r.nodesAdded, 0),
|
|
1079
|
+
nodesUpdated: results.reduce((s, r) => s + r.nodesUpdated, 0),
|
|
1080
|
+
edgesAdded: results.reduce((s, r) => s + r.edgesAdded, 0),
|
|
1081
|
+
edgesUpdated: results.reduce((s, r) => s + r.edgesUpdated, 0),
|
|
1082
|
+
errors: results.flatMap((r) => r.errors),
|
|
1083
|
+
durationMs: results.reduce((s, r) => s + r.durationMs, 0)
|
|
1084
|
+
};
|
|
1085
|
+
}
|
|
1086
|
+
function emptyResult(durationMs = 0) {
|
|
1087
|
+
return { nodesAdded: 0, nodesUpdated: 0, edgesAdded: 0, edgesUpdated: 0, errors: [], durationMs };
|
|
1088
|
+
}
|
|
1089
|
+
var KnowledgeIngestor = class {
|
|
1090
|
+
constructor(store) {
|
|
1091
|
+
this.store = store;
|
|
1092
|
+
}
|
|
1093
|
+
async ingestADRs(adrDir) {
|
|
1094
|
+
const start = Date.now();
|
|
1095
|
+
const errors = [];
|
|
1096
|
+
let nodesAdded = 0;
|
|
1097
|
+
let edgesAdded = 0;
|
|
1098
|
+
let files;
|
|
1099
|
+
try {
|
|
1100
|
+
files = await this.findMarkdownFiles(adrDir);
|
|
1101
|
+
} catch {
|
|
1102
|
+
return emptyResult(Date.now() - start);
|
|
1103
|
+
}
|
|
1104
|
+
for (const filePath of files) {
|
|
1105
|
+
try {
|
|
1106
|
+
const content = await fs2.readFile(filePath, "utf-8");
|
|
1107
|
+
const filename = path3.basename(filePath, ".md");
|
|
1108
|
+
const titleMatch = content.match(/^#\s+(.+)$/m);
|
|
1109
|
+
const title = titleMatch ? titleMatch[1].trim() : filename;
|
|
1110
|
+
const dateMatch = content.match(/\*\*Date:\*\*\s*(.+)/);
|
|
1111
|
+
const statusMatch = content.match(/\*\*Status:\*\*\s*(.+)/);
|
|
1112
|
+
const date = dateMatch ? dateMatch[1].trim() : void 0;
|
|
1113
|
+
const status = statusMatch ? statusMatch[1].trim() : void 0;
|
|
1114
|
+
const nodeId = `adr:${filename}`;
|
|
1115
|
+
this.store.addNode({
|
|
1116
|
+
id: nodeId,
|
|
1117
|
+
type: "adr",
|
|
1118
|
+
name: title,
|
|
1119
|
+
path: filePath,
|
|
1120
|
+
metadata: { date, status }
|
|
1121
|
+
});
|
|
1122
|
+
nodesAdded++;
|
|
1123
|
+
edgesAdded += this.linkToCode(content, nodeId, "documents");
|
|
1124
|
+
} catch (err) {
|
|
1125
|
+
errors.push(`${filePath}: ${err instanceof Error ? err.message : String(err)}`);
|
|
1126
|
+
}
|
|
1127
|
+
}
|
|
1128
|
+
return {
|
|
1129
|
+
nodesAdded,
|
|
1130
|
+
nodesUpdated: 0,
|
|
1131
|
+
edgesAdded,
|
|
1132
|
+
edgesUpdated: 0,
|
|
1133
|
+
errors,
|
|
1134
|
+
durationMs: Date.now() - start
|
|
1135
|
+
};
|
|
1136
|
+
}
|
|
1137
|
+
async ingestLearnings(projectPath) {
|
|
1138
|
+
const start = Date.now();
|
|
1139
|
+
const filePath = path3.join(projectPath, ".harness", "learnings.md");
|
|
1140
|
+
let content;
|
|
1141
|
+
try {
|
|
1142
|
+
content = await fs2.readFile(filePath, "utf-8");
|
|
1143
|
+
} catch {
|
|
1144
|
+
return emptyResult(Date.now() - start);
|
|
1145
|
+
}
|
|
1146
|
+
const errors = [];
|
|
1147
|
+
let nodesAdded = 0;
|
|
1148
|
+
let edgesAdded = 0;
|
|
1149
|
+
const lines = content.split("\n");
|
|
1150
|
+
let currentDate;
|
|
1151
|
+
for (const line of lines) {
|
|
1152
|
+
const headingMatch = line.match(/^##\s+(\S+)/);
|
|
1153
|
+
if (headingMatch) {
|
|
1154
|
+
currentDate = headingMatch[1];
|
|
1155
|
+
continue;
|
|
1156
|
+
}
|
|
1157
|
+
const bulletMatch = line.match(/^-\s+(.+)/);
|
|
1158
|
+
if (!bulletMatch) continue;
|
|
1159
|
+
const text = bulletMatch[1];
|
|
1160
|
+
const skillMatch = text.match(/\[skill:([^\]]+)\]/);
|
|
1161
|
+
const outcomeMatch = text.match(/\[outcome:([^\]]+)\]/);
|
|
1162
|
+
const skill = skillMatch ? skillMatch[1] : void 0;
|
|
1163
|
+
const outcome = outcomeMatch ? outcomeMatch[1] : void 0;
|
|
1164
|
+
const nodeId = `learning:${hash(text)}`;
|
|
1165
|
+
this.store.addNode({
|
|
1166
|
+
id: nodeId,
|
|
1167
|
+
type: "learning",
|
|
1168
|
+
name: text,
|
|
1169
|
+
metadata: { skill, outcome, date: currentDate }
|
|
1170
|
+
});
|
|
1171
|
+
nodesAdded++;
|
|
1172
|
+
edgesAdded += this.linkToCode(text, nodeId, "applies_to");
|
|
1173
|
+
}
|
|
1174
|
+
return {
|
|
1175
|
+
nodesAdded,
|
|
1176
|
+
nodesUpdated: 0,
|
|
1177
|
+
edgesAdded,
|
|
1178
|
+
edgesUpdated: 0,
|
|
1179
|
+
errors,
|
|
1180
|
+
durationMs: Date.now() - start
|
|
1181
|
+
};
|
|
1182
|
+
}
|
|
1183
|
+
async ingestFailures(projectPath) {
|
|
1184
|
+
const start = Date.now();
|
|
1185
|
+
const filePath = path3.join(projectPath, ".harness", "failures.md");
|
|
1186
|
+
let content;
|
|
1187
|
+
try {
|
|
1188
|
+
content = await fs2.readFile(filePath, "utf-8");
|
|
1189
|
+
} catch {
|
|
1190
|
+
return emptyResult(Date.now() - start);
|
|
1191
|
+
}
|
|
1192
|
+
const errors = [];
|
|
1193
|
+
let nodesAdded = 0;
|
|
1194
|
+
let edgesAdded = 0;
|
|
1195
|
+
const sections = content.split(/^##\s+/m).filter((s) => s.trim());
|
|
1196
|
+
for (const section of sections) {
|
|
1197
|
+
const dateMatch = section.match(/\*\*Date:\*\*\s*(.+)/);
|
|
1198
|
+
const skillMatch = section.match(/\*\*Skill:\*\*\s*(.+)/);
|
|
1199
|
+
const typeMatch = section.match(/\*\*Type:\*\*\s*(.+)/);
|
|
1200
|
+
const descMatch = section.match(/\*\*Description:\*\*\s*(.+)/);
|
|
1201
|
+
const date = dateMatch ? dateMatch[1].trim() : void 0;
|
|
1202
|
+
const skill = skillMatch ? skillMatch[1].trim() : void 0;
|
|
1203
|
+
const failureType = typeMatch ? typeMatch[1].trim() : void 0;
|
|
1204
|
+
const description = descMatch ? descMatch[1].trim() : void 0;
|
|
1205
|
+
if (!description) continue;
|
|
1206
|
+
const nodeId = `failure:${hash(description)}`;
|
|
1207
|
+
this.store.addNode({
|
|
1208
|
+
id: nodeId,
|
|
1209
|
+
type: "failure",
|
|
1210
|
+
name: description,
|
|
1211
|
+
metadata: { date, skill, type: failureType }
|
|
1212
|
+
});
|
|
1213
|
+
nodesAdded++;
|
|
1214
|
+
edgesAdded += this.linkToCode(description, nodeId, "caused_by");
|
|
1215
|
+
}
|
|
1216
|
+
return {
|
|
1217
|
+
nodesAdded,
|
|
1218
|
+
nodesUpdated: 0,
|
|
1219
|
+
edgesAdded,
|
|
1220
|
+
edgesUpdated: 0,
|
|
1221
|
+
errors,
|
|
1222
|
+
durationMs: Date.now() - start
|
|
1223
|
+
};
|
|
1224
|
+
}
|
|
1225
|
+
async ingestAll(projectPath, opts) {
|
|
1226
|
+
const start = Date.now();
|
|
1227
|
+
const adrDir = opts?.adrDir ?? path3.join(projectPath, "docs", "adr");
|
|
1228
|
+
const [adrResult, learningsResult, failuresResult] = await Promise.all([
|
|
1229
|
+
this.ingestADRs(adrDir),
|
|
1230
|
+
this.ingestLearnings(projectPath),
|
|
1231
|
+
this.ingestFailures(projectPath)
|
|
1232
|
+
]);
|
|
1233
|
+
const merged = mergeResults(adrResult, learningsResult, failuresResult);
|
|
1234
|
+
return { ...merged, durationMs: Date.now() - start };
|
|
1235
|
+
}
|
|
1236
|
+
linkToCode(content, sourceNodeId, edgeType) {
|
|
1237
|
+
let count = 0;
|
|
1238
|
+
for (const nodeType of CODE_NODE_TYPES) {
|
|
1239
|
+
const codeNodes = this.store.findNodes({ type: nodeType });
|
|
1240
|
+
for (const node of codeNodes) {
|
|
1241
|
+
let nameMatches = false;
|
|
1242
|
+
if (node.name.length >= 3) {
|
|
1243
|
+
const escaped = node.name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
1244
|
+
const namePattern = new RegExp(`\\b${escaped}\\b`, "i");
|
|
1245
|
+
nameMatches = namePattern.test(content);
|
|
1246
|
+
}
|
|
1247
|
+
let pathMatches = false;
|
|
1248
|
+
if (node.path && node.path.includes(path3.sep)) {
|
|
1249
|
+
pathMatches = content.includes(node.path);
|
|
1250
|
+
}
|
|
1251
|
+
if (nameMatches || pathMatches) {
|
|
1252
|
+
this.store.addEdge({
|
|
1253
|
+
from: sourceNodeId,
|
|
1254
|
+
to: node.id,
|
|
1255
|
+
type: edgeType
|
|
1256
|
+
});
|
|
1257
|
+
count++;
|
|
1258
|
+
}
|
|
1259
|
+
}
|
|
1260
|
+
}
|
|
1261
|
+
return count;
|
|
1262
|
+
}
|
|
1263
|
+
async findMarkdownFiles(dir) {
|
|
1264
|
+
const results = [];
|
|
1265
|
+
const entries = await fs2.readdir(dir, { withFileTypes: true });
|
|
1266
|
+
for (const entry of entries) {
|
|
1267
|
+
const fullPath = path3.join(dir, entry.name);
|
|
1268
|
+
if (entry.isDirectory()) {
|
|
1269
|
+
results.push(...await this.findMarkdownFiles(fullPath));
|
|
1270
|
+
} else if (entry.isFile() && entry.name.endsWith(".md")) {
|
|
1271
|
+
results.push(fullPath);
|
|
1272
|
+
}
|
|
1273
|
+
}
|
|
1274
|
+
return results;
|
|
1275
|
+
}
|
|
1276
|
+
};
|
|
1277
|
+
var CODE_NODE_TYPES2 = ["file", "function", "class", "method", "interface", "variable"];
|
|
1278
|
+
function linkToCode(store, content, sourceNodeId, edgeType, options) {
|
|
1279
|
+
let edgesCreated = 0;
|
|
1280
|
+
for (const type of CODE_NODE_TYPES2) {
|
|
1281
|
+
const nodes = store.findNodes({ type });
|
|
1282
|
+
for (const node of nodes) {
|
|
1283
|
+
if (node.name.length < 3) continue;
|
|
1284
|
+
const escaped = node.name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
|
1285
|
+
const pattern = new RegExp(`\\b${escaped}\\b`, "i");
|
|
1286
|
+
let matched = pattern.test(content);
|
|
1287
|
+
if (!matched && options?.checkPaths && node.path) {
|
|
1288
|
+
matched = content.includes(node.path);
|
|
1289
|
+
}
|
|
1290
|
+
if (matched) {
|
|
1291
|
+
store.addEdge({ from: sourceNodeId, to: node.id, type: edgeType });
|
|
1292
|
+
edgesCreated++;
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
}
|
|
1296
|
+
return edgesCreated;
|
|
1297
|
+
}
|
|
1298
|
+
var SyncManager = class {
|
|
1299
|
+
constructor(store, graphDir) {
|
|
1300
|
+
this.store = store;
|
|
1301
|
+
this.metadataPath = path4.join(graphDir, "sync-metadata.json");
|
|
1302
|
+
}
|
|
1303
|
+
registrations = /* @__PURE__ */ new Map();
|
|
1304
|
+
metadataPath;
|
|
1305
|
+
registerConnector(connector, config) {
|
|
1306
|
+
this.registrations.set(connector.name, { connector, config });
|
|
1307
|
+
}
|
|
1308
|
+
async sync(connectorName) {
|
|
1309
|
+
const registration = this.registrations.get(connectorName);
|
|
1310
|
+
if (!registration) {
|
|
1311
|
+
return {
|
|
1312
|
+
nodesAdded: 0,
|
|
1313
|
+
nodesUpdated: 0,
|
|
1314
|
+
edgesAdded: 0,
|
|
1315
|
+
edgesUpdated: 0,
|
|
1316
|
+
errors: [`Connector "${connectorName}" not registered`],
|
|
1317
|
+
durationMs: 0
|
|
1318
|
+
};
|
|
1319
|
+
}
|
|
1320
|
+
const { connector, config } = registration;
|
|
1321
|
+
const result = await connector.ingest(this.store, config);
|
|
1322
|
+
const metadata = await this.loadMetadata();
|
|
1323
|
+
metadata.connectors[connectorName] = {
|
|
1324
|
+
lastSyncTimestamp: (/* @__PURE__ */ new Date()).toISOString(),
|
|
1325
|
+
lastResult: result
|
|
1326
|
+
};
|
|
1327
|
+
await this.saveMetadata(metadata);
|
|
1328
|
+
return result;
|
|
1329
|
+
}
|
|
1330
|
+
async syncAll() {
|
|
1331
|
+
const combined = {
|
|
1332
|
+
nodesAdded: 0,
|
|
1333
|
+
nodesUpdated: 0,
|
|
1334
|
+
edgesAdded: 0,
|
|
1335
|
+
edgesUpdated: 0,
|
|
1336
|
+
errors: [],
|
|
1337
|
+
durationMs: 0
|
|
1338
|
+
};
|
|
1339
|
+
for (const [name] of this.registrations) {
|
|
1340
|
+
const result = await this.sync(name);
|
|
1341
|
+
combined.nodesAdded += result.nodesAdded;
|
|
1342
|
+
combined.nodesUpdated += result.nodesUpdated;
|
|
1343
|
+
combined.edgesAdded += result.edgesAdded;
|
|
1344
|
+
combined.edgesUpdated += result.edgesUpdated;
|
|
1345
|
+
combined.errors.push(...result.errors);
|
|
1346
|
+
combined.durationMs += result.durationMs;
|
|
1347
|
+
}
|
|
1348
|
+
return combined;
|
|
1349
|
+
}
|
|
1350
|
+
async getMetadata() {
|
|
1351
|
+
return this.loadMetadata();
|
|
1352
|
+
}
|
|
1353
|
+
async loadMetadata() {
|
|
1354
|
+
try {
|
|
1355
|
+
const raw = await fs3.readFile(this.metadataPath, "utf-8");
|
|
1356
|
+
return JSON.parse(raw);
|
|
1357
|
+
} catch {
|
|
1358
|
+
return { connectors: {} };
|
|
1359
|
+
}
|
|
1360
|
+
}
|
|
1361
|
+
async saveMetadata(metadata) {
|
|
1362
|
+
await fs3.mkdir(path4.dirname(this.metadataPath), { recursive: true });
|
|
1363
|
+
await fs3.writeFile(this.metadataPath, JSON.stringify(metadata, null, 2), "utf-8");
|
|
1364
|
+
}
|
|
1365
|
+
};
|
|
1366
|
+
var JiraConnector = class {
|
|
1367
|
+
name = "jira";
|
|
1368
|
+
source = "jira";
|
|
1369
|
+
httpClient;
|
|
1370
|
+
constructor(httpClient) {
|
|
1371
|
+
this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
|
|
1372
|
+
}
|
|
1373
|
+
async ingest(store, config) {
|
|
1374
|
+
const start = Date.now();
|
|
1375
|
+
const errors = [];
|
|
1376
|
+
let nodesAdded = 0;
|
|
1377
|
+
let edgesAdded = 0;
|
|
1378
|
+
const apiKeyEnv = config.apiKeyEnv ?? "JIRA_API_KEY";
|
|
1379
|
+
const apiKey = process.env[apiKeyEnv];
|
|
1380
|
+
if (!apiKey) {
|
|
1381
|
+
return {
|
|
1382
|
+
nodesAdded: 0,
|
|
1383
|
+
nodesUpdated: 0,
|
|
1384
|
+
edgesAdded: 0,
|
|
1385
|
+
edgesUpdated: 0,
|
|
1386
|
+
errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
|
|
1387
|
+
durationMs: Date.now() - start
|
|
1388
|
+
};
|
|
1389
|
+
}
|
|
1390
|
+
const baseUrlEnv = config.baseUrlEnv ?? "JIRA_BASE_URL";
|
|
1391
|
+
const baseUrl = process.env[baseUrlEnv];
|
|
1392
|
+
if (!baseUrl) {
|
|
1393
|
+
return {
|
|
1394
|
+
nodesAdded: 0,
|
|
1395
|
+
nodesUpdated: 0,
|
|
1396
|
+
edgesAdded: 0,
|
|
1397
|
+
edgesUpdated: 0,
|
|
1398
|
+
errors: [`Missing base URL: environment variable "${baseUrlEnv}" is not set`],
|
|
1399
|
+
durationMs: Date.now() - start
|
|
1400
|
+
};
|
|
1401
|
+
}
|
|
1402
|
+
const project2 = config.project;
|
|
1403
|
+
let jql = project2 ? `project=${project2}` : "";
|
|
1404
|
+
const filters = config.filters;
|
|
1405
|
+
if (filters?.status?.length) {
|
|
1406
|
+
jql += `${jql ? " AND " : ""}status IN (${filters.status.map((s) => `"${s}"`).join(",")})`;
|
|
1407
|
+
}
|
|
1408
|
+
if (filters?.labels?.length) {
|
|
1409
|
+
jql += `${jql ? " AND " : ""}labels IN (${filters.labels.map((l) => `"${l}"`).join(",")})`;
|
|
1410
|
+
}
|
|
1411
|
+
const headers = {
|
|
1412
|
+
Authorization: `Basic ${apiKey}`,
|
|
1413
|
+
"Content-Type": "application/json"
|
|
1414
|
+
};
|
|
1415
|
+
let startAt = 0;
|
|
1416
|
+
const maxResults = 50;
|
|
1417
|
+
let total = Infinity;
|
|
1418
|
+
try {
|
|
1419
|
+
while (startAt < total) {
|
|
1420
|
+
const url = `${baseUrl}/rest/api/2/search?jql=${encodeURIComponent(jql)}&startAt=${startAt}&maxResults=${maxResults}`;
|
|
1421
|
+
const response = await this.httpClient(url, { headers });
|
|
1422
|
+
if (!response.ok) {
|
|
1423
|
+
return {
|
|
1424
|
+
nodesAdded,
|
|
1425
|
+
nodesUpdated: 0,
|
|
1426
|
+
edgesAdded,
|
|
1427
|
+
edgesUpdated: 0,
|
|
1428
|
+
errors: ["Jira API request failed"],
|
|
1429
|
+
durationMs: Date.now() - start
|
|
1430
|
+
};
|
|
1431
|
+
}
|
|
1432
|
+
const data = await response.json();
|
|
1433
|
+
total = data.total;
|
|
1434
|
+
for (const issue of data.issues) {
|
|
1435
|
+
const nodeId = `issue:jira:${issue.key}`;
|
|
1436
|
+
store.addNode({
|
|
1437
|
+
id: nodeId,
|
|
1438
|
+
type: "issue",
|
|
1439
|
+
name: issue.fields.summary,
|
|
1440
|
+
metadata: {
|
|
1441
|
+
key: issue.key,
|
|
1442
|
+
status: issue.fields.status?.name,
|
|
1443
|
+
priority: issue.fields.priority?.name,
|
|
1444
|
+
assignee: issue.fields.assignee?.displayName,
|
|
1445
|
+
labels: issue.fields.labels ?? []
|
|
1446
|
+
}
|
|
1447
|
+
});
|
|
1448
|
+
nodesAdded++;
|
|
1449
|
+
const searchText = [issue.fields.summary, issue.fields.description ?? ""].join(" ");
|
|
1450
|
+
edgesAdded += linkToCode(store, searchText, nodeId, "applies_to");
|
|
1451
|
+
}
|
|
1452
|
+
startAt += maxResults;
|
|
1453
|
+
}
|
|
1454
|
+
} catch (err) {
|
|
1455
|
+
return {
|
|
1456
|
+
nodesAdded,
|
|
1457
|
+
nodesUpdated: 0,
|
|
1458
|
+
edgesAdded,
|
|
1459
|
+
edgesUpdated: 0,
|
|
1460
|
+
errors: [`Jira API error: ${err instanceof Error ? err.message : String(err)}`],
|
|
1461
|
+
durationMs: Date.now() - start
|
|
1462
|
+
};
|
|
1463
|
+
}
|
|
1464
|
+
return {
|
|
1465
|
+
nodesAdded,
|
|
1466
|
+
nodesUpdated: 0,
|
|
1467
|
+
edgesAdded,
|
|
1468
|
+
edgesUpdated: 0,
|
|
1469
|
+
errors,
|
|
1470
|
+
durationMs: Date.now() - start
|
|
1471
|
+
};
|
|
1472
|
+
}
|
|
1473
|
+
};
|
|
1474
|
+
var SlackConnector = class {
|
|
1475
|
+
name = "slack";
|
|
1476
|
+
source = "slack";
|
|
1477
|
+
httpClient;
|
|
1478
|
+
constructor(httpClient) {
|
|
1479
|
+
this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
|
|
1480
|
+
}
|
|
1481
|
+
async ingest(store, config) {
|
|
1482
|
+
const start = Date.now();
|
|
1483
|
+
const errors = [];
|
|
1484
|
+
let nodesAdded = 0;
|
|
1485
|
+
let edgesAdded = 0;
|
|
1486
|
+
const apiKeyEnv = config.apiKeyEnv ?? "SLACK_API_KEY";
|
|
1487
|
+
const apiKey = process.env[apiKeyEnv];
|
|
1488
|
+
if (!apiKey) {
|
|
1489
|
+
return {
|
|
1490
|
+
nodesAdded: 0,
|
|
1491
|
+
nodesUpdated: 0,
|
|
1492
|
+
edgesAdded: 0,
|
|
1493
|
+
edgesUpdated: 0,
|
|
1494
|
+
errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
|
|
1495
|
+
durationMs: Date.now() - start
|
|
1496
|
+
};
|
|
1497
|
+
}
|
|
1498
|
+
const channels = config.channels ?? [];
|
|
1499
|
+
const oldest = config.lookbackDays ? String(Math.floor((Date.now() - Number(config.lookbackDays) * 864e5) / 1e3)) : void 0;
|
|
1500
|
+
for (const channel of channels) {
|
|
1501
|
+
try {
|
|
1502
|
+
let url = `https://slack.com/api/conversations.history?channel=${encodeURIComponent(channel)}`;
|
|
1503
|
+
if (oldest) {
|
|
1504
|
+
url += `&oldest=${oldest}`;
|
|
1505
|
+
}
|
|
1506
|
+
const response = await this.httpClient(url, {
|
|
1507
|
+
headers: {
|
|
1508
|
+
Authorization: `Bearer ${apiKey}`,
|
|
1509
|
+
"Content-Type": "application/json"
|
|
1510
|
+
}
|
|
1511
|
+
});
|
|
1512
|
+
if (!response.ok) {
|
|
1513
|
+
errors.push(`Slack API request failed for channel ${channel}`);
|
|
1514
|
+
continue;
|
|
1515
|
+
}
|
|
1516
|
+
const data = await response.json();
|
|
1517
|
+
if (!data.ok) {
|
|
1518
|
+
errors.push(`Slack API error for channel ${channel}`);
|
|
1519
|
+
continue;
|
|
1520
|
+
}
|
|
1521
|
+
for (const message of data.messages) {
|
|
1522
|
+
const nodeId = `conversation:slack:${channel}:${message.ts}`;
|
|
1523
|
+
const snippet = message.text.length > 100 ? message.text.slice(0, 100) : message.text;
|
|
1524
|
+
store.addNode({
|
|
1525
|
+
id: nodeId,
|
|
1526
|
+
type: "conversation",
|
|
1527
|
+
name: snippet,
|
|
1528
|
+
metadata: {
|
|
1529
|
+
author: message.user,
|
|
1530
|
+
channel,
|
|
1531
|
+
timestamp: message.ts
|
|
1532
|
+
}
|
|
1533
|
+
});
|
|
1534
|
+
nodesAdded++;
|
|
1535
|
+
edgesAdded += linkToCode(store, message.text, nodeId, "references", { checkPaths: true });
|
|
1536
|
+
}
|
|
1537
|
+
} catch (err) {
|
|
1538
|
+
errors.push(
|
|
1539
|
+
`Slack API error for channel ${channel}: ${err instanceof Error ? err.message : String(err)}`
|
|
1540
|
+
);
|
|
1541
|
+
}
|
|
1542
|
+
}
|
|
1543
|
+
return {
|
|
1544
|
+
nodesAdded,
|
|
1545
|
+
nodesUpdated: 0,
|
|
1546
|
+
edgesAdded,
|
|
1547
|
+
edgesUpdated: 0,
|
|
1548
|
+
errors,
|
|
1549
|
+
durationMs: Date.now() - start
|
|
1550
|
+
};
|
|
1551
|
+
}
|
|
1552
|
+
};
|
|
1553
|
+
var ConfluenceConnector = class {
|
|
1554
|
+
name = "confluence";
|
|
1555
|
+
source = "confluence";
|
|
1556
|
+
httpClient;
|
|
1557
|
+
constructor(httpClient) {
|
|
1558
|
+
this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
|
|
1559
|
+
}
|
|
1560
|
+
async ingest(store, config) {
|
|
1561
|
+
const start = Date.now();
|
|
1562
|
+
const errors = [];
|
|
1563
|
+
let nodesAdded = 0;
|
|
1564
|
+
let edgesAdded = 0;
|
|
1565
|
+
const apiKeyEnv = config.apiKeyEnv ?? "CONFLUENCE_API_KEY";
|
|
1566
|
+
const apiKey = process.env[apiKeyEnv];
|
|
1567
|
+
if (!apiKey) {
|
|
1568
|
+
return {
|
|
1569
|
+
nodesAdded: 0,
|
|
1570
|
+
nodesUpdated: 0,
|
|
1571
|
+
edgesAdded: 0,
|
|
1572
|
+
edgesUpdated: 0,
|
|
1573
|
+
errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
|
|
1574
|
+
durationMs: Date.now() - start
|
|
1575
|
+
};
|
|
1576
|
+
}
|
|
1577
|
+
const baseUrlEnv = config.baseUrlEnv ?? "CONFLUENCE_BASE_URL";
|
|
1578
|
+
const baseUrl = process.env[baseUrlEnv] ?? "";
|
|
1579
|
+
const spaceKey = config.spaceKey ?? "";
|
|
1580
|
+
try {
|
|
1581
|
+
let nextUrl = `${baseUrl}/wiki/api/v2/pages?spaceKey=${encodeURIComponent(spaceKey)}&limit=25&body-format=storage`;
|
|
1582
|
+
while (nextUrl) {
|
|
1583
|
+
const response = await this.httpClient(nextUrl, {
|
|
1584
|
+
headers: { Authorization: `Bearer ${apiKey}` }
|
|
1585
|
+
});
|
|
1586
|
+
if (!response.ok) {
|
|
1587
|
+
errors.push(`Confluence API error: status ${response.status}`);
|
|
1588
|
+
break;
|
|
1589
|
+
}
|
|
1590
|
+
const data = await response.json();
|
|
1591
|
+
for (const page of data.results) {
|
|
1592
|
+
const nodeId = `confluence:${page.id}`;
|
|
1593
|
+
store.addNode({
|
|
1594
|
+
id: nodeId,
|
|
1595
|
+
type: "document",
|
|
1596
|
+
name: page.title,
|
|
1597
|
+
metadata: {
|
|
1598
|
+
source: "confluence",
|
|
1599
|
+
spaceKey,
|
|
1600
|
+
pageId: page.id,
|
|
1601
|
+
status: page.status,
|
|
1602
|
+
url: page._links?.webui ?? ""
|
|
1603
|
+
}
|
|
1604
|
+
});
|
|
1605
|
+
nodesAdded++;
|
|
1606
|
+
const text = `${page.title} ${page.body?.storage?.value ?? ""}`;
|
|
1607
|
+
edgesAdded += linkToCode(store, text, nodeId, "documents");
|
|
1608
|
+
}
|
|
1609
|
+
nextUrl = data._links?.next ? `${baseUrl}${data._links.next}` : null;
|
|
1610
|
+
}
|
|
1611
|
+
} catch (err) {
|
|
1612
|
+
errors.push(`Confluence fetch error: ${err instanceof Error ? err.message : String(err)}`);
|
|
1613
|
+
}
|
|
1614
|
+
return {
|
|
1615
|
+
nodesAdded,
|
|
1616
|
+
nodesUpdated: 0,
|
|
1617
|
+
edgesAdded,
|
|
1618
|
+
edgesUpdated: 0,
|
|
1619
|
+
errors,
|
|
1620
|
+
durationMs: Date.now() - start
|
|
1621
|
+
};
|
|
1622
|
+
}
|
|
1623
|
+
};
|
|
1624
|
+
var CIConnector = class {
|
|
1625
|
+
name = "ci";
|
|
1626
|
+
source = "github-actions";
|
|
1627
|
+
httpClient;
|
|
1628
|
+
constructor(httpClient) {
|
|
1629
|
+
this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
|
|
1630
|
+
}
|
|
1631
|
+
async ingest(store, config) {
|
|
1632
|
+
const start = Date.now();
|
|
1633
|
+
const errors = [];
|
|
1634
|
+
let nodesAdded = 0;
|
|
1635
|
+
let edgesAdded = 0;
|
|
1636
|
+
const apiKeyEnv = config.apiKeyEnv ?? "GITHUB_TOKEN";
|
|
1637
|
+
const apiKey = process.env[apiKeyEnv];
|
|
1638
|
+
if (!apiKey) {
|
|
1639
|
+
return {
|
|
1640
|
+
nodesAdded: 0,
|
|
1641
|
+
nodesUpdated: 0,
|
|
1642
|
+
edgesAdded: 0,
|
|
1643
|
+
edgesUpdated: 0,
|
|
1644
|
+
errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
|
|
1645
|
+
durationMs: Date.now() - start
|
|
1646
|
+
};
|
|
1647
|
+
}
|
|
1648
|
+
const repo = config.repo ?? "";
|
|
1649
|
+
const maxRuns = config.maxRuns ?? 10;
|
|
1650
|
+
try {
|
|
1651
|
+
const url = `https://api.github.com/repos/${repo}/actions/runs?per_page=${maxRuns}`;
|
|
1652
|
+
const response = await this.httpClient(url, {
|
|
1653
|
+
headers: { Authorization: `Bearer ${apiKey}`, Accept: "application/vnd.github.v3+json" }
|
|
1654
|
+
});
|
|
1655
|
+
if (!response.ok) {
|
|
1656
|
+
errors.push(`GitHub Actions API error: status ${response.status}`);
|
|
1657
|
+
return {
|
|
1658
|
+
nodesAdded: 0,
|
|
1659
|
+
nodesUpdated: 0,
|
|
1660
|
+
edgesAdded: 0,
|
|
1661
|
+
edgesUpdated: 0,
|
|
1662
|
+
errors,
|
|
1663
|
+
durationMs: Date.now() - start
|
|
1664
|
+
};
|
|
1665
|
+
}
|
|
1666
|
+
const data = await response.json();
|
|
1667
|
+
for (const run of data.workflow_runs) {
|
|
1668
|
+
const buildId = `build:${run.id}`;
|
|
1669
|
+
store.addNode({
|
|
1670
|
+
id: buildId,
|
|
1671
|
+
type: "build",
|
|
1672
|
+
name: `${run.name} #${run.id}`,
|
|
1673
|
+
metadata: {
|
|
1674
|
+
source: "github-actions",
|
|
1675
|
+
status: run.status,
|
|
1676
|
+
conclusion: run.conclusion,
|
|
1677
|
+
branch: run.head_branch,
|
|
1678
|
+
sha: run.head_sha,
|
|
1679
|
+
url: run.html_url,
|
|
1680
|
+
createdAt: run.created_at
|
|
1681
|
+
}
|
|
1682
|
+
});
|
|
1683
|
+
nodesAdded++;
|
|
1684
|
+
const commitNode = store.getNode(`commit:${run.head_sha}`);
|
|
1685
|
+
if (commitNode) {
|
|
1686
|
+
store.addEdge({ from: buildId, to: commitNode.id, type: "triggered_by" });
|
|
1687
|
+
edgesAdded++;
|
|
1688
|
+
}
|
|
1689
|
+
if (run.conclusion === "failure") {
|
|
1690
|
+
const testResultId = `test_result:${run.id}`;
|
|
1691
|
+
store.addNode({
|
|
1692
|
+
id: testResultId,
|
|
1693
|
+
type: "test_result",
|
|
1694
|
+
name: `Failed: ${run.name} #${run.id}`,
|
|
1695
|
+
metadata: {
|
|
1696
|
+
source: "github-actions",
|
|
1697
|
+
buildId: String(run.id),
|
|
1698
|
+
conclusion: "failure",
|
|
1699
|
+
branch: run.head_branch,
|
|
1700
|
+
sha: run.head_sha
|
|
1701
|
+
}
|
|
1702
|
+
});
|
|
1703
|
+
nodesAdded++;
|
|
1704
|
+
store.addEdge({ from: testResultId, to: buildId, type: "failed_in" });
|
|
1705
|
+
edgesAdded++;
|
|
1706
|
+
}
|
|
1707
|
+
}
|
|
1708
|
+
} catch (err) {
|
|
1709
|
+
errors.push(
|
|
1710
|
+
`GitHub Actions fetch error: ${err instanceof Error ? err.message : String(err)}`
|
|
1711
|
+
);
|
|
1712
|
+
}
|
|
1713
|
+
return {
|
|
1714
|
+
nodesAdded,
|
|
1715
|
+
nodesUpdated: 0,
|
|
1716
|
+
edgesAdded,
|
|
1717
|
+
edgesUpdated: 0,
|
|
1718
|
+
errors,
|
|
1719
|
+
durationMs: Date.now() - start
|
|
1720
|
+
};
|
|
1721
|
+
}
|
|
1722
|
+
};
|
|
1723
|
+
var STOP_WORDS = /* @__PURE__ */ new Set([
|
|
1724
|
+
"the",
|
|
1725
|
+
"a",
|
|
1726
|
+
"an",
|
|
1727
|
+
"is",
|
|
1728
|
+
"are",
|
|
1729
|
+
"was",
|
|
1730
|
+
"were",
|
|
1731
|
+
"be",
|
|
1732
|
+
"been",
|
|
1733
|
+
"being",
|
|
1734
|
+
"have",
|
|
1735
|
+
"has",
|
|
1736
|
+
"had",
|
|
1737
|
+
"do",
|
|
1738
|
+
"does",
|
|
1739
|
+
"did",
|
|
1740
|
+
"will",
|
|
1741
|
+
"would",
|
|
1742
|
+
"could",
|
|
1743
|
+
"should",
|
|
1744
|
+
"may",
|
|
1745
|
+
"might",
|
|
1746
|
+
"shall",
|
|
1747
|
+
"can",
|
|
1748
|
+
"to",
|
|
1749
|
+
"of",
|
|
1750
|
+
"in",
|
|
1751
|
+
"for",
|
|
1752
|
+
"on",
|
|
1753
|
+
"with",
|
|
1754
|
+
"at",
|
|
1755
|
+
"by",
|
|
1756
|
+
"from",
|
|
1757
|
+
"as",
|
|
1758
|
+
"into",
|
|
1759
|
+
"about",
|
|
1760
|
+
"this",
|
|
1761
|
+
"that",
|
|
1762
|
+
"it",
|
|
1763
|
+
"not",
|
|
1764
|
+
"but",
|
|
1765
|
+
"and",
|
|
1766
|
+
"or",
|
|
1767
|
+
"if",
|
|
1768
|
+
"then",
|
|
1769
|
+
"so"
|
|
1770
|
+
]);
|
|
1771
|
+
var FusionLayer = class {
|
|
1772
|
+
store;
|
|
1773
|
+
vectorStore;
|
|
1774
|
+
keywordWeight;
|
|
1775
|
+
semanticWeight;
|
|
1776
|
+
constructor(store, vectorStore, keywordWeight = 0.6, semanticWeight = 0.4) {
|
|
1777
|
+
this.store = store;
|
|
1778
|
+
this.vectorStore = vectorStore;
|
|
1779
|
+
this.keywordWeight = keywordWeight;
|
|
1780
|
+
this.semanticWeight = semanticWeight;
|
|
1781
|
+
}
|
|
1782
|
+
search(query, topK = 10, queryEmbedding) {
|
|
1783
|
+
const keywords = this.extractKeywords(query);
|
|
1784
|
+
if (keywords.length === 0) {
|
|
1785
|
+
return [];
|
|
1786
|
+
}
|
|
1787
|
+
const allNodes = this.store.findNodes({});
|
|
1788
|
+
const semanticScores = /* @__PURE__ */ new Map();
|
|
1789
|
+
if (queryEmbedding && this.vectorStore) {
|
|
1790
|
+
const vectorResults = this.vectorStore.search(queryEmbedding, allNodes.length);
|
|
1791
|
+
for (const vr of vectorResults) {
|
|
1792
|
+
semanticScores.set(vr.id, vr.score);
|
|
1793
|
+
}
|
|
1794
|
+
}
|
|
1795
|
+
const hasSemanticScores = semanticScores.size > 0;
|
|
1796
|
+
const kwWeight = hasSemanticScores ? this.keywordWeight : 1;
|
|
1797
|
+
const semWeight = hasSemanticScores ? this.semanticWeight : 0;
|
|
1798
|
+
const results = [];
|
|
1799
|
+
for (const node of allNodes) {
|
|
1800
|
+
const kwScore = this.keywordScore(keywords, node);
|
|
1801
|
+
const semScore = semanticScores.get(node.id) ?? 0;
|
|
1802
|
+
const fusedScore = kwWeight * kwScore + semWeight * semScore;
|
|
1803
|
+
if (fusedScore > 0) {
|
|
1804
|
+
results.push({
|
|
1805
|
+
nodeId: node.id,
|
|
1806
|
+
node,
|
|
1807
|
+
score: fusedScore,
|
|
1808
|
+
signals: {
|
|
1809
|
+
keyword: kwScore,
|
|
1810
|
+
semantic: semScore
|
|
1811
|
+
}
|
|
1812
|
+
});
|
|
1813
|
+
}
|
|
1814
|
+
}
|
|
1815
|
+
results.sort((a, b) => b.score - a.score);
|
|
1816
|
+
return results.slice(0, topK);
|
|
1817
|
+
}
|
|
1818
|
+
extractKeywords(query) {
|
|
1819
|
+
const tokens = query.toLowerCase().split(/[\s\-_.,:;!?()[\]{}"'`/\\|@#$%^&*+=<>~]+/).filter((t) => t.length >= 2).filter((t) => !STOP_WORDS.has(t));
|
|
1820
|
+
return [...new Set(tokens)];
|
|
1821
|
+
}
|
|
1822
|
+
keywordScore(keywords, node) {
|
|
1823
|
+
if (keywords.length === 0) return 0;
|
|
1824
|
+
let totalScore = 0;
|
|
1825
|
+
for (const keyword of keywords) {
|
|
1826
|
+
totalScore += this.singleKeywordScore(keyword, node);
|
|
1827
|
+
}
|
|
1828
|
+
return totalScore / keywords.length;
|
|
1829
|
+
}
|
|
1830
|
+
singleKeywordScore(keyword, node) {
|
|
1831
|
+
const nameLower = node.name.toLowerCase();
|
|
1832
|
+
if (nameLower === keyword) {
|
|
1833
|
+
return 1;
|
|
1834
|
+
}
|
|
1835
|
+
if (nameLower.includes(keyword)) {
|
|
1836
|
+
return 0.7;
|
|
1837
|
+
}
|
|
1838
|
+
if (node.path && node.path.toLowerCase().includes(keyword)) {
|
|
1839
|
+
return 0.5;
|
|
1840
|
+
}
|
|
1841
|
+
for (const value of Object.values(node.metadata)) {
|
|
1842
|
+
if (typeof value === "string" && value.toLowerCase().includes(keyword)) {
|
|
1843
|
+
return 0.3;
|
|
1844
|
+
}
|
|
1845
|
+
}
|
|
1846
|
+
return 0;
|
|
1847
|
+
}
|
|
1848
|
+
};
|
|
1849
|
+
var CODE_NODE_TYPES3 = ["file", "function", "class", "method", "interface", "variable"];
|
|
1850
|
+
var GraphEntropyAdapter = class {
|
|
1851
|
+
constructor(store) {
|
|
1852
|
+
this.store = store;
|
|
1853
|
+
}
|
|
1854
|
+
/**
|
|
1855
|
+
* Find all `documents` edges and classify them as stale or missing-target.
|
|
1856
|
+
*
|
|
1857
|
+
* 1. Find all `documents` edges in the graph
|
|
1858
|
+
* 2. For each edge, check if the target code node still exists in the store
|
|
1859
|
+
* 3. If target doesn't exist -> add to missingTargets
|
|
1860
|
+
* 4. If target exists -> compare lastModified timestamps to determine staleness
|
|
1861
|
+
*/
|
|
1862
|
+
computeDriftData() {
|
|
1863
|
+
const documentsEdges = this.store.getEdges({ type: "documents" });
|
|
1864
|
+
const staleEdges = [];
|
|
1865
|
+
const missingTargets = [];
|
|
1866
|
+
let freshEdges = 0;
|
|
1867
|
+
for (const edge of documentsEdges) {
|
|
1868
|
+
const codeNode = this.store.getNode(edge.to);
|
|
1869
|
+
if (!codeNode) {
|
|
1870
|
+
missingTargets.push(edge.to);
|
|
1871
|
+
continue;
|
|
1872
|
+
}
|
|
1873
|
+
const docNode = this.store.getNode(edge.from);
|
|
1874
|
+
const codeLastModified = codeNode.lastModified;
|
|
1875
|
+
const docLastModified = docNode?.lastModified;
|
|
1876
|
+
if (codeLastModified && docLastModified) {
|
|
1877
|
+
if (codeLastModified > docLastModified) {
|
|
1878
|
+
staleEdges.push({
|
|
1879
|
+
docNodeId: edge.from,
|
|
1880
|
+
codeNodeId: edge.to,
|
|
1881
|
+
edgeType: edge.type,
|
|
1882
|
+
codeLastModified,
|
|
1883
|
+
docLastModified
|
|
1884
|
+
});
|
|
1885
|
+
} else {
|
|
1886
|
+
freshEdges++;
|
|
1887
|
+
}
|
|
1888
|
+
} else {
|
|
1889
|
+
staleEdges.push({
|
|
1890
|
+
docNodeId: edge.from,
|
|
1891
|
+
codeNodeId: edge.to,
|
|
1892
|
+
edgeType: edge.type,
|
|
1893
|
+
codeLastModified,
|
|
1894
|
+
docLastModified
|
|
1895
|
+
});
|
|
1896
|
+
}
|
|
1897
|
+
}
|
|
1898
|
+
return { staleEdges, missingTargets, freshEdges };
|
|
1899
|
+
}
|
|
1900
|
+
/**
|
|
1901
|
+
* BFS from entry points to find reachable vs unreachable code nodes.
|
|
1902
|
+
*
|
|
1903
|
+
* 1. Entry points: file nodes named `index.ts` or with metadata `entryPoint: true`
|
|
1904
|
+
* 2. BFS following `imports` and `calls` edges (outbound only)
|
|
1905
|
+
* 3. Unreachable = code nodes NOT in visited set
|
|
1906
|
+
*/
|
|
1907
|
+
computeDeadCodeData() {
|
|
1908
|
+
const allFileNodes = this.store.findNodes({ type: "file" });
|
|
1909
|
+
const entryPoints = [];
|
|
1910
|
+
for (const node of allFileNodes) {
|
|
1911
|
+
if (node.name === "index.ts" || node.metadata?.entryPoint === true) {
|
|
1912
|
+
entryPoints.push(node.id);
|
|
1913
|
+
}
|
|
1914
|
+
}
|
|
1915
|
+
for (const nodeType of CODE_NODE_TYPES3) {
|
|
1916
|
+
if (nodeType === "file") continue;
|
|
1917
|
+
const nodes = this.store.findNodes({ type: nodeType });
|
|
1918
|
+
for (const node of nodes) {
|
|
1919
|
+
if (node.metadata?.entryPoint === true) {
|
|
1920
|
+
entryPoints.push(node.id);
|
|
1921
|
+
}
|
|
1922
|
+
}
|
|
1923
|
+
}
|
|
1924
|
+
const visited = /* @__PURE__ */ new Set();
|
|
1925
|
+
const queue = [...entryPoints];
|
|
1926
|
+
let head = 0;
|
|
1927
|
+
while (head < queue.length) {
|
|
1928
|
+
const nodeId = queue[head++];
|
|
1929
|
+
if (visited.has(nodeId)) continue;
|
|
1930
|
+
visited.add(nodeId);
|
|
1931
|
+
const importEdges = this.store.getEdges({ from: nodeId, type: "imports" });
|
|
1932
|
+
for (const edge of importEdges) {
|
|
1933
|
+
if (!visited.has(edge.to)) {
|
|
1934
|
+
queue.push(edge.to);
|
|
1935
|
+
}
|
|
1936
|
+
}
|
|
1937
|
+
const callEdges = this.store.getEdges({ from: nodeId, type: "calls" });
|
|
1938
|
+
for (const edge of callEdges) {
|
|
1939
|
+
if (!visited.has(edge.to)) {
|
|
1940
|
+
queue.push(edge.to);
|
|
1941
|
+
}
|
|
1942
|
+
}
|
|
1943
|
+
const containsEdges = this.store.getEdges({ from: nodeId, type: "contains" });
|
|
1944
|
+
for (const edge of containsEdges) {
|
|
1945
|
+
if (!visited.has(edge.to)) {
|
|
1946
|
+
queue.push(edge.to);
|
|
1947
|
+
}
|
|
1948
|
+
}
|
|
1949
|
+
}
|
|
1950
|
+
const unreachableNodes = [];
|
|
1951
|
+
for (const nodeType of CODE_NODE_TYPES3) {
|
|
1952
|
+
const nodes = this.store.findNodes({ type: nodeType });
|
|
1953
|
+
for (const node of nodes) {
|
|
1954
|
+
if (!visited.has(node.id)) {
|
|
1955
|
+
unreachableNodes.push({
|
|
1956
|
+
id: node.id,
|
|
1957
|
+
type: node.type,
|
|
1958
|
+
name: node.name,
|
|
1959
|
+
path: node.path
|
|
1960
|
+
});
|
|
1961
|
+
}
|
|
1962
|
+
}
|
|
1963
|
+
}
|
|
1964
|
+
return {
|
|
1965
|
+
reachableNodeIds: visited,
|
|
1966
|
+
unreachableNodes,
|
|
1967
|
+
entryPoints
|
|
1968
|
+
};
|
|
1969
|
+
}
|
|
1970
|
+
/**
|
|
1971
|
+
* Count all nodes and edges by type.
|
|
1972
|
+
*/
|
|
1973
|
+
computeSnapshotSummary() {
|
|
1974
|
+
const nodesByType = {};
|
|
1975
|
+
const edgesByType = {};
|
|
1976
|
+
const allNodes = this.store.findNodes({});
|
|
1977
|
+
for (const node of allNodes) {
|
|
1978
|
+
nodesByType[node.type] = (nodesByType[node.type] ?? 0) + 1;
|
|
1979
|
+
}
|
|
1980
|
+
const allEdges = this.store.getEdges({});
|
|
1981
|
+
for (const edge of allEdges) {
|
|
1982
|
+
edgesByType[edge.type] = (edgesByType[edge.type] ?? 0) + 1;
|
|
1983
|
+
}
|
|
1984
|
+
return {
|
|
1985
|
+
nodeCount: this.store.nodeCount,
|
|
1986
|
+
edgeCount: this.store.edgeCount,
|
|
1987
|
+
nodesByType,
|
|
1988
|
+
edgesByType
|
|
1989
|
+
};
|
|
1990
|
+
}
|
|
1991
|
+
};
|
|
1992
|
+
var GraphComplexityAdapter = class {
|
|
1993
|
+
constructor(store) {
|
|
1994
|
+
this.store = store;
|
|
1995
|
+
}
|
|
1996
|
+
/**
|
|
1997
|
+
* Compute complexity hotspots by combining cyclomatic complexity with change frequency.
|
|
1998
|
+
*
|
|
1999
|
+
* 1. Find all function and method nodes
|
|
2000
|
+
* 2. For each, find the containing file and count commit nodes referencing that file
|
|
2001
|
+
* 3. Compute hotspotScore = changeFrequency * cyclomaticComplexity
|
|
2002
|
+
* 4. Sort descending by score
|
|
2003
|
+
* 5. Compute 95th percentile
|
|
2004
|
+
*/
|
|
2005
|
+
computeComplexityHotspots() {
|
|
2006
|
+
const functionNodes = [
|
|
2007
|
+
...this.store.findNodes({ type: "function" }),
|
|
2008
|
+
...this.store.findNodes({ type: "method" })
|
|
2009
|
+
];
|
|
2010
|
+
if (functionNodes.length === 0) {
|
|
2011
|
+
return { hotspots: [], percentile95Score: 0 };
|
|
2012
|
+
}
|
|
2013
|
+
const fileChangeFrequency = /* @__PURE__ */ new Map();
|
|
2014
|
+
const hotspots = [];
|
|
2015
|
+
for (const fnNode of functionNodes) {
|
|
2016
|
+
const complexity = fnNode.metadata?.cyclomaticComplexity ?? 1;
|
|
2017
|
+
const containsEdges = this.store.getEdges({ to: fnNode.id, type: "contains" });
|
|
2018
|
+
let fileId;
|
|
2019
|
+
for (const edge of containsEdges) {
|
|
2020
|
+
const sourceNode = this.store.getNode(edge.from);
|
|
2021
|
+
if (sourceNode?.type === "file") {
|
|
2022
|
+
fileId = sourceNode.id;
|
|
2023
|
+
break;
|
|
2024
|
+
}
|
|
2025
|
+
if (sourceNode?.type === "class") {
|
|
2026
|
+
const classContainsEdges = this.store.getEdges({ to: sourceNode.id, type: "contains" });
|
|
2027
|
+
for (const classEdge of classContainsEdges) {
|
|
2028
|
+
const parentNode = this.store.getNode(classEdge.from);
|
|
2029
|
+
if (parentNode?.type === "file") {
|
|
2030
|
+
fileId = parentNode.id;
|
|
2031
|
+
break;
|
|
2032
|
+
}
|
|
2033
|
+
}
|
|
2034
|
+
if (fileId) break;
|
|
2035
|
+
}
|
|
2036
|
+
}
|
|
2037
|
+
if (!fileId) continue;
|
|
2038
|
+
let changeFrequency = fileChangeFrequency.get(fileId);
|
|
2039
|
+
if (changeFrequency === void 0) {
|
|
2040
|
+
const referencesEdges = this.store.getEdges({ to: fileId, type: "references" });
|
|
2041
|
+
changeFrequency = referencesEdges.length;
|
|
2042
|
+
fileChangeFrequency.set(fileId, changeFrequency);
|
|
2043
|
+
}
|
|
2044
|
+
const hotspotScore = changeFrequency * complexity;
|
|
2045
|
+
const filePath = fnNode.path ?? fileId.replace(/^file:/, "");
|
|
2046
|
+
hotspots.push({
|
|
2047
|
+
file: filePath,
|
|
2048
|
+
function: fnNode.name,
|
|
2049
|
+
changeFrequency,
|
|
2050
|
+
complexity,
|
|
2051
|
+
hotspotScore
|
|
2052
|
+
});
|
|
2053
|
+
}
|
|
2054
|
+
hotspots.sort((a, b) => b.hotspotScore - a.hotspotScore);
|
|
2055
|
+
const percentile95Score = this.computePercentile(
|
|
2056
|
+
hotspots.map((h) => h.hotspotScore),
|
|
2057
|
+
95
|
|
2058
|
+
);
|
|
2059
|
+
return { hotspots, percentile95Score };
|
|
2060
|
+
}
|
|
2061
|
+
computePercentile(descendingScores, percentile) {
|
|
2062
|
+
if (descendingScores.length === 0) return 0;
|
|
2063
|
+
const ascending = [...descendingScores].sort((a, b) => a - b);
|
|
2064
|
+
const index = Math.ceil(percentile / 100 * ascending.length) - 1;
|
|
2065
|
+
return ascending[Math.min(index, ascending.length - 1)];
|
|
2066
|
+
}
|
|
2067
|
+
};
|
|
2068
|
+
var GraphCouplingAdapter = class {
|
|
2069
|
+
constructor(store) {
|
|
2070
|
+
this.store = store;
|
|
2071
|
+
}
|
|
2072
|
+
/**
|
|
2073
|
+
* Compute coupling data for all file nodes in the graph.
|
|
2074
|
+
*
|
|
2075
|
+
* For each file:
|
|
2076
|
+
* - fanOut: number of outbound 'imports' edges
|
|
2077
|
+
* - fanIn: number of inbound 'imports' edges from other files
|
|
2078
|
+
* - couplingRatio: fanOut / (fanIn + fanOut), rounded to 2 decimals (0 if both are 0)
|
|
2079
|
+
* - transitiveDepth: longest chain of outbound 'imports' edges via BFS
|
|
2080
|
+
*/
|
|
2081
|
+
computeCouplingData() {
|
|
2082
|
+
const fileNodes = this.store.findNodes({ type: "file" });
|
|
2083
|
+
if (fileNodes.length === 0) {
|
|
2084
|
+
return { files: [] };
|
|
2085
|
+
}
|
|
2086
|
+
const files = [];
|
|
2087
|
+
for (const node of fileNodes) {
|
|
2088
|
+
const fileId = node.id;
|
|
2089
|
+
const filePath = node.path ?? node.name;
|
|
2090
|
+
const outEdges = this.store.getEdges({ from: fileId, type: "imports" });
|
|
2091
|
+
const fanOut = outEdges.length;
|
|
2092
|
+
const inEdges = this.store.getEdges({ to: fileId, type: "imports" });
|
|
2093
|
+
const fanIn = inEdges.length;
|
|
2094
|
+
const total = fanIn + fanOut;
|
|
2095
|
+
const couplingRatio = total === 0 ? 0 : Math.round(fanOut / total * 100) / 100;
|
|
2096
|
+
const transitiveDepth = this.computeTransitiveDepth(fileId);
|
|
2097
|
+
files.push({ file: filePath, fanIn, fanOut, couplingRatio, transitiveDepth });
|
|
2098
|
+
}
|
|
2099
|
+
return { files };
|
|
2100
|
+
}
|
|
2101
|
+
/**
|
|
2102
|
+
* BFS from a node following outbound 'imports' edges to find the maximum depth.
|
|
2103
|
+
*/
|
|
2104
|
+
computeTransitiveDepth(startId) {
|
|
2105
|
+
const visited = /* @__PURE__ */ new Set();
|
|
2106
|
+
const queue = [[startId, 0]];
|
|
2107
|
+
visited.add(startId);
|
|
2108
|
+
let maxDepth = 0;
|
|
2109
|
+
let head = 0;
|
|
2110
|
+
while (head < queue.length) {
|
|
2111
|
+
const [nodeId, depth] = queue[head++];
|
|
2112
|
+
if (depth > maxDepth) {
|
|
2113
|
+
maxDepth = depth;
|
|
2114
|
+
}
|
|
2115
|
+
const outEdges = this.store.getEdges({ from: nodeId, type: "imports" });
|
|
2116
|
+
for (const edge of outEdges) {
|
|
2117
|
+
if (!visited.has(edge.to)) {
|
|
2118
|
+
visited.add(edge.to);
|
|
2119
|
+
queue.push([edge.to, depth + 1]);
|
|
2120
|
+
}
|
|
2121
|
+
}
|
|
2122
|
+
}
|
|
2123
|
+
return maxDepth;
|
|
2124
|
+
}
|
|
2125
|
+
};
|
|
2126
|
+
var PHASE_NODE_TYPES = {
|
|
2127
|
+
implement: ["file", "function", "class", "method", "interface", "variable"],
|
|
2128
|
+
review: ["adr", "document", "learning", "commit"],
|
|
2129
|
+
debug: ["failure", "learning", "function", "method"],
|
|
2130
|
+
plan: ["adr", "document", "module", "layer"]
|
|
2131
|
+
};
|
|
2132
|
+
var CODE_NODE_TYPES4 = /* @__PURE__ */ new Set([
|
|
2133
|
+
"file",
|
|
2134
|
+
"function",
|
|
2135
|
+
"class",
|
|
2136
|
+
"interface",
|
|
2137
|
+
"method",
|
|
2138
|
+
"variable"
|
|
2139
|
+
]);
|
|
2140
|
+
function estimateNodeTokens(node) {
|
|
2141
|
+
let chars = (node.name?.length ?? 0) + (node.path?.length ?? 0) + (node.type?.length ?? 0);
|
|
2142
|
+
if (node.metadata) {
|
|
2143
|
+
chars += JSON.stringify(node.metadata).length;
|
|
2144
|
+
}
|
|
2145
|
+
return Math.ceil(chars / 4);
|
|
2146
|
+
}
|
|
2147
|
+
var Assembler = class {
|
|
2148
|
+
store;
|
|
2149
|
+
vectorStore;
|
|
2150
|
+
fusionLayer;
|
|
2151
|
+
constructor(store, vectorStore) {
|
|
2152
|
+
this.store = store;
|
|
2153
|
+
this.vectorStore = vectorStore;
|
|
2154
|
+
}
|
|
2155
|
+
getFusionLayer() {
|
|
2156
|
+
if (!this.fusionLayer) {
|
|
2157
|
+
this.fusionLayer = new FusionLayer(this.store, this.vectorStore);
|
|
2158
|
+
}
|
|
2159
|
+
return this.fusionLayer;
|
|
2160
|
+
}
|
|
2161
|
+
/**
|
|
2162
|
+
* Assemble context relevant to an intent string within a token budget.
|
|
2163
|
+
*/
|
|
2164
|
+
assembleContext(intent, tokenBudget = 4e3) {
|
|
2165
|
+
const fusion = this.getFusionLayer();
|
|
2166
|
+
const topResults = fusion.search(intent, 10);
|
|
2167
|
+
if (topResults.length === 0) {
|
|
2168
|
+
return {
|
|
2169
|
+
nodes: [],
|
|
2170
|
+
edges: [],
|
|
2171
|
+
tokenEstimate: 0,
|
|
2172
|
+
intent,
|
|
2173
|
+
truncated: false
|
|
2174
|
+
};
|
|
2175
|
+
}
|
|
2176
|
+
const contextQL = new ContextQL(this.store);
|
|
2177
|
+
const nodeMap = /* @__PURE__ */ new Map();
|
|
2178
|
+
const edgeSet = /* @__PURE__ */ new Set();
|
|
2179
|
+
const collectedEdges = [];
|
|
2180
|
+
const nodeScores = /* @__PURE__ */ new Map();
|
|
2181
|
+
for (const result of topResults) {
|
|
2182
|
+
nodeScores.set(result.nodeId, result.score);
|
|
2183
|
+
const expanded = contextQL.execute({
|
|
2184
|
+
rootNodeIds: [result.nodeId],
|
|
2185
|
+
maxDepth: 2
|
|
2186
|
+
});
|
|
2187
|
+
for (const node of expanded.nodes) {
|
|
2188
|
+
if (!nodeMap.has(node.id)) {
|
|
2189
|
+
nodeMap.set(node.id, node);
|
|
2190
|
+
if (!nodeScores.has(node.id)) {
|
|
2191
|
+
nodeScores.set(node.id, result.score * 0.5);
|
|
2192
|
+
}
|
|
2193
|
+
}
|
|
2194
|
+
}
|
|
2195
|
+
for (const edge of expanded.edges) {
|
|
2196
|
+
const key = `${edge.from}|${edge.to}|${edge.type}`;
|
|
2197
|
+
if (!edgeSet.has(key)) {
|
|
2198
|
+
edgeSet.add(key);
|
|
2199
|
+
collectedEdges.push(edge);
|
|
2200
|
+
}
|
|
2201
|
+
}
|
|
2202
|
+
}
|
|
2203
|
+
const sortedNodes = Array.from(nodeMap.values()).sort((a, b) => {
|
|
2204
|
+
return (nodeScores.get(b.id) ?? 0) - (nodeScores.get(a.id) ?? 0);
|
|
2205
|
+
});
|
|
2206
|
+
let tokenEstimate = 0;
|
|
2207
|
+
const keptNodes = [];
|
|
2208
|
+
let truncated = false;
|
|
2209
|
+
for (const node of sortedNodes) {
|
|
2210
|
+
const nodeTokens = estimateNodeTokens(node);
|
|
2211
|
+
if (tokenEstimate + nodeTokens > tokenBudget && keptNodes.length > 0) {
|
|
2212
|
+
truncated = true;
|
|
2213
|
+
break;
|
|
2214
|
+
}
|
|
2215
|
+
tokenEstimate += nodeTokens;
|
|
2216
|
+
keptNodes.push(node);
|
|
2217
|
+
}
|
|
2218
|
+
const keptNodeIds = new Set(keptNodes.map((n) => n.id));
|
|
2219
|
+
const keptEdges = collectedEdges.filter(
|
|
2220
|
+
(e) => keptNodeIds.has(e.from) && keptNodeIds.has(e.to)
|
|
2221
|
+
);
|
|
2222
|
+
return {
|
|
2223
|
+
nodes: keptNodes,
|
|
2224
|
+
edges: keptEdges,
|
|
2225
|
+
tokenEstimate,
|
|
2226
|
+
intent,
|
|
2227
|
+
truncated
|
|
2228
|
+
};
|
|
2229
|
+
}
|
|
2230
|
+
/**
|
|
2231
|
+
* Compute a token budget allocation across node types.
|
|
2232
|
+
*/
|
|
2233
|
+
computeBudget(totalTokens, phase) {
|
|
2234
|
+
const allNodes = this.store.findNodes({});
|
|
2235
|
+
const typeCounts = {};
|
|
2236
|
+
for (const node of allNodes) {
|
|
2237
|
+
typeCounts[node.type] = (typeCounts[node.type] ?? 0) + 1;
|
|
2238
|
+
}
|
|
2239
|
+
const density = {};
|
|
2240
|
+
const moduleNodes = this.store.findNodes({ type: "module" });
|
|
2241
|
+
for (const mod of moduleNodes) {
|
|
2242
|
+
const outEdges = this.store.getEdges({ from: mod.id });
|
|
2243
|
+
const inEdges = this.store.getEdges({ to: mod.id });
|
|
2244
|
+
density[mod.name] = outEdges.length + inEdges.length;
|
|
2245
|
+
}
|
|
2246
|
+
const boostTypes = phase ? PHASE_NODE_TYPES[phase] : void 0;
|
|
2247
|
+
const boostFactor = 2;
|
|
2248
|
+
let weightedTotal = 0;
|
|
2249
|
+
const weights = {};
|
|
2250
|
+
for (const [type, count] of Object.entries(typeCounts)) {
|
|
2251
|
+
const isBoosted = boostTypes?.includes(type);
|
|
2252
|
+
const weight = count * (isBoosted ? boostFactor : 1);
|
|
2253
|
+
weights[type] = weight;
|
|
2254
|
+
weightedTotal += weight;
|
|
2255
|
+
}
|
|
2256
|
+
const allocations = {};
|
|
2257
|
+
if (weightedTotal > 0) {
|
|
2258
|
+
let allocated = 0;
|
|
2259
|
+
const types = Object.keys(weights);
|
|
2260
|
+
for (let i = 0; i < types.length; i++) {
|
|
2261
|
+
const type = types[i];
|
|
2262
|
+
if (i === types.length - 1) {
|
|
2263
|
+
allocations[type] = totalTokens - allocated;
|
|
2264
|
+
} else {
|
|
2265
|
+
const share = Math.round(weights[type] / weightedTotal * totalTokens);
|
|
2266
|
+
allocations[type] = share;
|
|
2267
|
+
allocated += share;
|
|
2268
|
+
}
|
|
2269
|
+
}
|
|
2270
|
+
}
|
|
2271
|
+
return { total: totalTokens, allocations, density };
|
|
2272
|
+
}
|
|
2273
|
+
/**
|
|
2274
|
+
* Filter graph nodes relevant to a development phase.
|
|
2275
|
+
*/
|
|
2276
|
+
filterForPhase(phase) {
|
|
2277
|
+
const nodeTypes = PHASE_NODE_TYPES[phase];
|
|
2278
|
+
if (!nodeTypes) {
|
|
2279
|
+
console.warn(
|
|
2280
|
+
`[harness] Unknown phase "${phase}" in filterForPhase. Returning all code nodes.`
|
|
2281
|
+
);
|
|
2282
|
+
}
|
|
2283
|
+
const relevantTypes = nodeTypes ?? PHASE_NODE_TYPES["implement"] ?? [];
|
|
2284
|
+
const nodes = [];
|
|
2285
|
+
const filePathSet = /* @__PURE__ */ new Set();
|
|
2286
|
+
for (const type of relevantTypes) {
|
|
2287
|
+
const found = this.store.findNodes({ type });
|
|
2288
|
+
for (const node of found) {
|
|
2289
|
+
nodes.push(node);
|
|
2290
|
+
if (node.path) {
|
|
2291
|
+
filePathSet.add(node.path);
|
|
2292
|
+
}
|
|
2293
|
+
}
|
|
2294
|
+
}
|
|
2295
|
+
return {
|
|
2296
|
+
phase,
|
|
2297
|
+
nodes,
|
|
2298
|
+
filePaths: Array.from(filePathSet)
|
|
2299
|
+
};
|
|
2300
|
+
}
|
|
2301
|
+
/**
|
|
2302
|
+
* Generate a markdown repository map from graph structure.
|
|
2303
|
+
*/
|
|
2304
|
+
generateMap() {
|
|
2305
|
+
const moduleNodes = this.store.findNodes({ type: "module" });
|
|
2306
|
+
const modulesWithEdgeCount = moduleNodes.map((mod) => {
|
|
2307
|
+
const outEdges = this.store.getEdges({ from: mod.id });
|
|
2308
|
+
const inEdges = this.store.getEdges({ to: mod.id });
|
|
2309
|
+
return { module: mod, edgeCount: outEdges.length + inEdges.length };
|
|
2310
|
+
});
|
|
2311
|
+
modulesWithEdgeCount.sort((a, b) => b.edgeCount - a.edgeCount);
|
|
2312
|
+
const lines = ["# Repository Structure", ""];
|
|
2313
|
+
if (modulesWithEdgeCount.length > 0) {
|
|
2314
|
+
lines.push("## Modules", "");
|
|
2315
|
+
for (const { module: mod, edgeCount } of modulesWithEdgeCount) {
|
|
2316
|
+
lines.push(`### ${mod.name} (${edgeCount} connections)`);
|
|
2317
|
+
lines.push("");
|
|
2318
|
+
const containsEdges = this.store.getEdges({ from: mod.id, type: "contains" });
|
|
2319
|
+
for (const edge of containsEdges) {
|
|
2320
|
+
const fileNode = this.store.getNode(edge.to);
|
|
2321
|
+
if (fileNode && fileNode.type === "file") {
|
|
2322
|
+
const symbolEdges = this.store.getEdges({ from: fileNode.id, type: "contains" });
|
|
2323
|
+
lines.push(`- ${fileNode.path ?? fileNode.name} (${symbolEdges.length} symbols)`);
|
|
2324
|
+
}
|
|
2325
|
+
}
|
|
2326
|
+
lines.push("");
|
|
2327
|
+
}
|
|
2328
|
+
}
|
|
2329
|
+
const fileNodes = this.store.findNodes({ type: "file" });
|
|
2330
|
+
const nonBarrelFiles = fileNodes.filter((n) => !n.name.startsWith("index."));
|
|
2331
|
+
const filesWithOutDegree = nonBarrelFiles.map((f) => {
|
|
2332
|
+
const outEdges = this.store.getEdges({ from: f.id });
|
|
2333
|
+
return { file: f, outDegree: outEdges.length };
|
|
2334
|
+
});
|
|
2335
|
+
filesWithOutDegree.sort((a, b) => b.outDegree - a.outDegree);
|
|
2336
|
+
const entryPoints = filesWithOutDegree.filter((f) => f.outDegree > 0).slice(0, 5);
|
|
2337
|
+
if (entryPoints.length > 0) {
|
|
2338
|
+
lines.push("## Entry Points", "");
|
|
2339
|
+
for (const { file, outDegree } of entryPoints) {
|
|
2340
|
+
lines.push(`- ${file.path ?? file.name} (${outDegree} outbound edges)`);
|
|
2341
|
+
}
|
|
2342
|
+
lines.push("");
|
|
2343
|
+
}
|
|
2344
|
+
return lines.join("\n");
|
|
2345
|
+
}
|
|
2346
|
+
/**
|
|
2347
|
+
* Check documentation coverage of code nodes.
|
|
2348
|
+
*/
|
|
2349
|
+
checkCoverage() {
|
|
2350
|
+
const codeNodes = [];
|
|
2351
|
+
for (const type of CODE_NODE_TYPES4) {
|
|
2352
|
+
codeNodes.push(...this.store.findNodes({ type }));
|
|
2353
|
+
}
|
|
2354
|
+
const documented = [];
|
|
2355
|
+
const undocumented = [];
|
|
2356
|
+
for (const node of codeNodes) {
|
|
2357
|
+
const documentsEdges = this.store.getEdges({ to: node.id, type: "documents" });
|
|
2358
|
+
if (documentsEdges.length > 0) {
|
|
2359
|
+
documented.push(node.id);
|
|
2360
|
+
} else {
|
|
2361
|
+
undocumented.push(node.id);
|
|
2362
|
+
}
|
|
2363
|
+
}
|
|
2364
|
+
const totalCodeNodes = codeNodes.length;
|
|
2365
|
+
const coveragePercentage = totalCodeNodes > 0 ? documented.length / totalCodeNodes * 100 : 0;
|
|
2366
|
+
return {
|
|
2367
|
+
documented,
|
|
2368
|
+
undocumented,
|
|
2369
|
+
coveragePercentage,
|
|
2370
|
+
totalCodeNodes
|
|
2371
|
+
};
|
|
2372
|
+
}
|
|
2373
|
+
};
|
|
2374
|
+
var GraphConstraintAdapter = class {
|
|
2375
|
+
constructor(store) {
|
|
2376
|
+
this.store = store;
|
|
2377
|
+
}
|
|
2378
|
+
computeDependencyGraph() {
|
|
2379
|
+
const fileNodes = this.store.findNodes({ type: "file" });
|
|
2380
|
+
const nodes = fileNodes.map((n) => n.path ?? n.id);
|
|
2381
|
+
const importsEdges = this.store.getEdges({ type: "imports" });
|
|
2382
|
+
const edges = importsEdges.map((e) => {
|
|
2383
|
+
const fromNode = this.store.getNode(e.from);
|
|
2384
|
+
const toNode = this.store.getNode(e.to);
|
|
2385
|
+
const fromPath = fromNode?.path ?? e.from;
|
|
2386
|
+
const toPath = toNode?.path ?? e.to;
|
|
2387
|
+
const importType = e.metadata?.importType ?? "static";
|
|
2388
|
+
const line = e.metadata?.line ?? 0;
|
|
2389
|
+
return { from: fromPath, to: toPath, importType, line };
|
|
2390
|
+
});
|
|
2391
|
+
return { nodes, edges };
|
|
2392
|
+
}
|
|
2393
|
+
computeLayerViolations(layers, rootDir) {
|
|
2394
|
+
const { edges } = this.computeDependencyGraph();
|
|
2395
|
+
const violations = [];
|
|
2396
|
+
for (const edge of edges) {
|
|
2397
|
+
const fromRelative = relative2(rootDir, edge.from);
|
|
2398
|
+
const toRelative = relative2(rootDir, edge.to);
|
|
2399
|
+
const fromLayer = this.resolveLayer(fromRelative, layers);
|
|
2400
|
+
const toLayer = this.resolveLayer(toRelative, layers);
|
|
2401
|
+
if (!fromLayer || !toLayer) continue;
|
|
2402
|
+
if (fromLayer.name === toLayer.name) continue;
|
|
2403
|
+
if (!fromLayer.allowedDependencies.includes(toLayer.name)) {
|
|
2404
|
+
violations.push({
|
|
2405
|
+
file: edge.from,
|
|
2406
|
+
imports: edge.to,
|
|
2407
|
+
fromLayer: fromLayer.name,
|
|
2408
|
+
toLayer: toLayer.name,
|
|
2409
|
+
reason: "WRONG_LAYER",
|
|
2410
|
+
line: edge.line
|
|
2411
|
+
});
|
|
2412
|
+
}
|
|
2413
|
+
}
|
|
2414
|
+
return violations;
|
|
2415
|
+
}
|
|
2416
|
+
resolveLayer(filePath, layers) {
|
|
2417
|
+
for (const layer of layers) {
|
|
2418
|
+
for (const pattern of layer.patterns) {
|
|
2419
|
+
if (minimatch(filePath, pattern)) {
|
|
2420
|
+
return layer;
|
|
2421
|
+
}
|
|
2422
|
+
}
|
|
2423
|
+
}
|
|
2424
|
+
return void 0;
|
|
2425
|
+
}
|
|
2426
|
+
};
|
|
2427
|
+
var GraphFeedbackAdapter = class {
|
|
2428
|
+
constructor(store) {
|
|
2429
|
+
this.store = store;
|
|
2430
|
+
}
|
|
2431
|
+
computeImpactData(changedFiles) {
|
|
2432
|
+
const affectedTests = [];
|
|
2433
|
+
const affectedDocs = [];
|
|
2434
|
+
let impactScope = 0;
|
|
2435
|
+
for (const filePath of changedFiles) {
|
|
2436
|
+
const fileNodes = this.store.findNodes({ path: filePath });
|
|
2437
|
+
if (fileNodes.length === 0) continue;
|
|
2438
|
+
const fileNode = fileNodes[0];
|
|
2439
|
+
const inboundImports = this.store.getEdges({ to: fileNode.id, type: "imports" });
|
|
2440
|
+
for (const edge of inboundImports) {
|
|
2441
|
+
const importerNode = this.store.getNode(edge.from);
|
|
2442
|
+
if (importerNode?.path && /test/i.test(importerNode.path)) {
|
|
2443
|
+
affectedTests.push({
|
|
2444
|
+
testFile: importerNode.path,
|
|
2445
|
+
coversFile: filePath
|
|
2446
|
+
});
|
|
2447
|
+
}
|
|
2448
|
+
impactScope++;
|
|
2449
|
+
}
|
|
2450
|
+
const docsEdges = this.store.getEdges({ to: fileNode.id, type: "documents" });
|
|
2451
|
+
for (const edge of docsEdges) {
|
|
2452
|
+
const docNode = this.store.getNode(edge.from);
|
|
2453
|
+
if (docNode) {
|
|
2454
|
+
affectedDocs.push({
|
|
2455
|
+
docFile: docNode.path ?? docNode.name,
|
|
2456
|
+
documentsFile: filePath
|
|
2457
|
+
});
|
|
2458
|
+
}
|
|
2459
|
+
}
|
|
2460
|
+
}
|
|
2461
|
+
return { affectedTests, affectedDocs, impactScope };
|
|
2462
|
+
}
|
|
2463
|
+
computeHarnessCheckData() {
|
|
2464
|
+
const nodeCount = this.store.nodeCount;
|
|
2465
|
+
const edgeCount = this.store.edgeCount;
|
|
2466
|
+
const violatesEdges = this.store.getEdges({ type: "violates" });
|
|
2467
|
+
const constraintViolations = violatesEdges.length;
|
|
2468
|
+
const fileNodes = this.store.findNodes({ type: "file" });
|
|
2469
|
+
let undocumentedFiles = 0;
|
|
2470
|
+
for (const node of fileNodes) {
|
|
2471
|
+
const docsEdges = this.store.getEdges({ to: node.id, type: "documents" });
|
|
2472
|
+
if (docsEdges.length === 0) {
|
|
2473
|
+
undocumentedFiles++;
|
|
2474
|
+
}
|
|
2475
|
+
}
|
|
2476
|
+
let unreachableNodes = 0;
|
|
2477
|
+
for (const node of fileNodes) {
|
|
2478
|
+
const inboundImports = this.store.getEdges({ to: node.id, type: "imports" });
|
|
2479
|
+
if (inboundImports.length === 0) {
|
|
2480
|
+
const isEntryPoint = node.name === "index.ts" || node.path !== void 0 && node.path.endsWith("/index.ts") || node.metadata?.entryPoint === true;
|
|
2481
|
+
if (!isEntryPoint) {
|
|
2482
|
+
unreachableNodes++;
|
|
2483
|
+
}
|
|
2484
|
+
}
|
|
2485
|
+
}
|
|
2486
|
+
return {
|
|
2487
|
+
graphExists: true,
|
|
2488
|
+
nodeCount,
|
|
2489
|
+
edgeCount,
|
|
2490
|
+
constraintViolations,
|
|
2491
|
+
undocumentedFiles,
|
|
2492
|
+
unreachableNodes
|
|
2493
|
+
};
|
|
2494
|
+
}
|
|
2495
|
+
};
|
|
2496
|
+
var VERSION = "0.2.0";
|
|
2497
|
+
export {
|
|
2498
|
+
Assembler,
|
|
2499
|
+
CIConnector,
|
|
2500
|
+
CURRENT_SCHEMA_VERSION,
|
|
2501
|
+
CodeIngestor,
|
|
2502
|
+
ConfluenceConnector,
|
|
2503
|
+
ContextQL,
|
|
2504
|
+
EDGE_TYPES,
|
|
2505
|
+
FusionLayer,
|
|
2506
|
+
GitIngestor,
|
|
2507
|
+
GraphComplexityAdapter,
|
|
2508
|
+
GraphConstraintAdapter,
|
|
2509
|
+
GraphCouplingAdapter,
|
|
2510
|
+
GraphEdgeSchema,
|
|
2511
|
+
GraphEntropyAdapter,
|
|
2512
|
+
GraphFeedbackAdapter,
|
|
2513
|
+
GraphNodeSchema,
|
|
2514
|
+
GraphStore,
|
|
2515
|
+
JiraConnector,
|
|
2516
|
+
KnowledgeIngestor,
|
|
2517
|
+
NODE_TYPES,
|
|
2518
|
+
OBSERVABILITY_TYPES,
|
|
2519
|
+
SlackConnector,
|
|
2520
|
+
SyncManager,
|
|
2521
|
+
TopologicalLinker,
|
|
2522
|
+
VERSION,
|
|
2523
|
+
VectorStore,
|
|
2524
|
+
linkToCode,
|
|
2525
|
+
loadGraph,
|
|
2526
|
+
project,
|
|
2527
|
+
saveGraph
|
|
2528
|
+
};
|