@harness-engineering/cli 1.6.1 → 1.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/dist/agents/personas/planner.yaml +27 -0
  2. package/dist/agents/personas/verifier.yaml +30 -0
  3. package/dist/agents/skills/claude-code/enforce-architecture/SKILL.md +19 -0
  4. package/dist/agents/skills/claude-code/harness-accessibility/SKILL.md +274 -0
  5. package/dist/agents/skills/claude-code/harness-accessibility/skill.yaml +51 -0
  6. package/dist/agents/skills/claude-code/harness-autopilot/SKILL.md +111 -72
  7. package/dist/agents/skills/claude-code/harness-autopilot/skill.yaml +4 -2
  8. package/dist/agents/skills/claude-code/harness-dependency-health/skill.yaml +1 -1
  9. package/dist/agents/skills/claude-code/harness-design/SKILL.md +265 -0
  10. package/dist/agents/skills/claude-code/harness-design/skill.yaml +53 -0
  11. package/dist/agents/skills/claude-code/harness-design-mobile/SKILL.md +336 -0
  12. package/dist/agents/skills/claude-code/harness-design-mobile/skill.yaml +49 -0
  13. package/dist/agents/skills/claude-code/harness-design-system/SKILL.md +282 -0
  14. package/dist/agents/skills/claude-code/harness-design-system/skill.yaml +50 -0
  15. package/dist/agents/skills/claude-code/harness-design-web/SKILL.md +360 -0
  16. package/dist/agents/skills/claude-code/harness-design-web/skill.yaml +52 -0
  17. package/dist/agents/skills/claude-code/harness-hotspot-detector/skill.yaml +1 -1
  18. package/dist/agents/skills/claude-code/harness-impact-analysis/SKILL.md +16 -0
  19. package/dist/agents/skills/claude-code/harness-integrity/SKILL.md +19 -1
  20. package/dist/agents/skills/claude-code/harness-knowledge-mapper/skill.yaml +1 -1
  21. package/dist/agents/skills/claude-code/harness-onboarding/SKILL.md +19 -1
  22. package/dist/agents/skills/claude-code/harness-release-readiness/SKILL.md +13 -9
  23. package/dist/agents/skills/claude-code/harness-security-scan/skill.yaml +1 -1
  24. package/dist/agents/skills/claude-code/harness-verify/SKILL.md +26 -0
  25. package/dist/agents/skills/gemini-cli/harness-accessibility/SKILL.md +274 -0
  26. package/dist/agents/skills/gemini-cli/harness-accessibility/skill.yaml +51 -0
  27. package/dist/agents/skills/gemini-cli/harness-autopilot/SKILL.md +111 -72
  28. package/dist/agents/skills/gemini-cli/harness-autopilot/skill.yaml +4 -2
  29. package/dist/agents/skills/gemini-cli/harness-dependency-health/skill.yaml +1 -1
  30. package/dist/agents/skills/gemini-cli/harness-design/SKILL.md +265 -0
  31. package/dist/agents/skills/gemini-cli/harness-design/skill.yaml +53 -0
  32. package/dist/agents/skills/gemini-cli/harness-design-mobile/SKILL.md +336 -0
  33. package/dist/agents/skills/gemini-cli/harness-design-mobile/skill.yaml +49 -0
  34. package/dist/agents/skills/gemini-cli/harness-design-system/SKILL.md +282 -0
  35. package/dist/agents/skills/gemini-cli/harness-design-system/skill.yaml +50 -0
  36. package/dist/agents/skills/gemini-cli/harness-design-web/SKILL.md +360 -0
  37. package/dist/agents/skills/gemini-cli/harness-design-web/skill.yaml +52 -0
  38. package/dist/agents/skills/gemini-cli/harness-hotspot-detector/skill.yaml +1 -1
  39. package/dist/agents/skills/gemini-cli/harness-impact-analysis/SKILL.md +16 -0
  40. package/dist/agents/skills/gemini-cli/harness-knowledge-mapper/skill.yaml +1 -1
  41. package/dist/agents/skills/gemini-cli/harness-release-readiness/SKILL.md +13 -9
  42. package/dist/agents/skills/gemini-cli/harness-security-scan/skill.yaml +1 -1
  43. package/dist/agents/skills/node_modules/.bin/vitest +2 -2
  44. package/dist/agents/skills/shared/design-knowledge/anti-patterns/color.yaml +106 -0
  45. package/dist/agents/skills/shared/design-knowledge/anti-patterns/layout.yaml +109 -0
  46. package/dist/agents/skills/shared/design-knowledge/anti-patterns/motion.yaml +109 -0
  47. package/dist/agents/skills/shared/design-knowledge/anti-patterns/typography.yaml +112 -0
  48. package/dist/agents/skills/shared/design-knowledge/industries/creative.yaml +80 -0
  49. package/dist/agents/skills/shared/design-knowledge/industries/ecommerce.yaml +80 -0
  50. package/dist/agents/skills/shared/design-knowledge/industries/emerging-tech.yaml +83 -0
  51. package/dist/agents/skills/shared/design-knowledge/industries/fintech.yaml +80 -0
  52. package/dist/agents/skills/shared/design-knowledge/industries/healthcare.yaml +80 -0
  53. package/dist/agents/skills/shared/design-knowledge/industries/lifestyle.yaml +80 -0
  54. package/dist/agents/skills/shared/design-knowledge/industries/saas.yaml +80 -0
  55. package/dist/agents/skills/shared/design-knowledge/industries/services.yaml +80 -0
  56. package/dist/agents/skills/shared/design-knowledge/palettes/curated.yaml +234 -0
  57. package/dist/agents/skills/shared/design-knowledge/platform-rules/android.yaml +125 -0
  58. package/dist/agents/skills/shared/design-knowledge/platform-rules/flutter.yaml +144 -0
  59. package/dist/agents/skills/shared/design-knowledge/platform-rules/ios.yaml +106 -0
  60. package/dist/agents/skills/shared/design-knowledge/platform-rules/web.yaml +102 -0
  61. package/dist/agents/skills/shared/design-knowledge/typography/pairings.yaml +274 -0
  62. package/dist/bin/harness.js +3 -2
  63. package/dist/{chunk-3U5VZYR7.js → chunk-4WUGOJQ7.js} +6 -3
  64. package/dist/{chunk-O6NEKDYP.js → chunk-FFIX3QVG.js} +697 -349
  65. package/dist/chunk-GA6GN5J2.js +6150 -0
  66. package/dist/dist-C4J67MPP.js +242 -0
  67. package/dist/dist-N4D4QWFV.js +2809 -0
  68. package/dist/index.d.ts +79 -0
  69. package/dist/index.js +3 -2
  70. package/dist/validate-cross-check-WGXQ7K62.js +7 -0
  71. package/package.json +12 -8
  72. package/dist/validate-cross-check-LNIZ7KGZ.js +0 -6
@@ -0,0 +1,2809 @@
1
+ // ../graph/dist/index.mjs
2
+ import { z } from "zod";
3
+ import loki from "lokijs";
4
+ import { readFile, writeFile, mkdir, access } from "fs/promises";
5
+ import { join } from "path";
6
+ import * as fs from "fs/promises";
7
+ import * as path from "path";
8
+ import { execFile } from "child_process";
9
+ import { promisify } from "util";
10
+ import * as path2 from "path";
11
+ import * as fs2 from "fs/promises";
12
+ import * as path3 from "path";
13
+ import * as crypto from "crypto";
14
+ import * as fs3 from "fs/promises";
15
+ import * as path4 from "path";
16
+ import { minimatch } from "minimatch";
17
+ import { relative as relative2 } from "path";
18
+ import * as fs4 from "fs/promises";
19
+ import * as path5 from "path";
20
+ var NODE_TYPES = [
21
+ // Code
22
+ "repository",
23
+ "module",
24
+ "file",
25
+ "class",
26
+ "interface",
27
+ "function",
28
+ "method",
29
+ "variable",
30
+ // Knowledge
31
+ "adr",
32
+ "decision",
33
+ "learning",
34
+ "failure",
35
+ "issue",
36
+ "document",
37
+ "skill",
38
+ "conversation",
39
+ // VCS
40
+ "commit",
41
+ "build",
42
+ "test_result",
43
+ // Observability (future)
44
+ "span",
45
+ "metric",
46
+ "log",
47
+ // Structural
48
+ "layer",
49
+ "pattern",
50
+ "constraint",
51
+ "violation",
52
+ // Design
53
+ "design_token",
54
+ "aesthetic_intent",
55
+ "design_constraint"
56
+ ];
57
+ var EDGE_TYPES = [
58
+ // Code relationships
59
+ "contains",
60
+ "imports",
61
+ "calls",
62
+ "implements",
63
+ "inherits",
64
+ "references",
65
+ // Knowledge relationships
66
+ "applies_to",
67
+ "caused_by",
68
+ "resolved_by",
69
+ "documents",
70
+ "violates",
71
+ "specifies",
72
+ "decided",
73
+ // VCS relationships
74
+ "co_changes_with",
75
+ "triggered_by",
76
+ "failed_in",
77
+ // Execution relationships (future)
78
+ "executed_by",
79
+ "measured_by",
80
+ // Design relationships
81
+ "uses_token",
82
+ "declares_intent",
83
+ "violates_design",
84
+ "platform_binding"
85
+ ];
86
+ var OBSERVABILITY_TYPES = /* @__PURE__ */ new Set(["span", "metric", "log"]);
87
+ var CURRENT_SCHEMA_VERSION = 1;
88
+ var GraphNodeSchema = z.object({
89
+ id: z.string(),
90
+ type: z.enum(NODE_TYPES),
91
+ name: z.string(),
92
+ path: z.string().optional(),
93
+ location: z.object({
94
+ fileId: z.string(),
95
+ startLine: z.number(),
96
+ endLine: z.number(),
97
+ startColumn: z.number().optional(),
98
+ endColumn: z.number().optional()
99
+ }).optional(),
100
+ content: z.string().optional(),
101
+ hash: z.string().optional(),
102
+ metadata: z.record(z.unknown()),
103
+ embedding: z.array(z.number()).optional(),
104
+ lastModified: z.string().optional()
105
+ });
106
+ var GraphEdgeSchema = z.object({
107
+ from: z.string(),
108
+ to: z.string(),
109
+ type: z.enum(EDGE_TYPES),
110
+ confidence: z.number().min(0).max(1).optional(),
111
+ metadata: z.record(z.unknown()).optional()
112
+ });
113
+ async function saveGraph(dirPath, nodes, edges) {
114
+ await mkdir(dirPath, { recursive: true });
115
+ const graphData = { nodes, edges };
116
+ const metadata = {
117
+ schemaVersion: CURRENT_SCHEMA_VERSION,
118
+ lastScanTimestamp: (/* @__PURE__ */ new Date()).toISOString(),
119
+ nodeCount: nodes.length,
120
+ edgeCount: edges.length
121
+ };
122
+ await Promise.all([
123
+ writeFile(join(dirPath, "graph.json"), JSON.stringify(graphData, null, 2), "utf-8"),
124
+ writeFile(join(dirPath, "metadata.json"), JSON.stringify(metadata, null, 2), "utf-8")
125
+ ]);
126
+ }
127
+ async function loadGraph(dirPath) {
128
+ const metaPath = join(dirPath, "metadata.json");
129
+ const graphPath = join(dirPath, "graph.json");
130
+ try {
131
+ await access(metaPath);
132
+ await access(graphPath);
133
+ } catch {
134
+ return null;
135
+ }
136
+ const metaContent = await readFile(metaPath, "utf-8");
137
+ const metadata = JSON.parse(metaContent);
138
+ if (metadata.schemaVersion !== CURRENT_SCHEMA_VERSION) {
139
+ return null;
140
+ }
141
+ const graphContent = await readFile(graphPath, "utf-8");
142
+ return JSON.parse(graphContent);
143
+ }
144
+ var POISONED_KEYS = /* @__PURE__ */ new Set(["__proto__", "constructor", "prototype"]);
145
+ function safeMerge(target, source) {
146
+ for (const key of Object.keys(source)) {
147
+ if (!POISONED_KEYS.has(key)) {
148
+ target[key] = source[key];
149
+ }
150
+ }
151
+ }
152
+ var GraphStore = class {
153
+ db;
154
+ nodes;
155
+ edges;
156
+ constructor() {
157
+ this.db = new loki("graph.db");
158
+ this.nodes = this.db.addCollection("nodes", {
159
+ unique: ["id"],
160
+ indices: ["type", "name"]
161
+ });
162
+ this.edges = this.db.addCollection("edges", {
163
+ indices: ["from", "to", "type"]
164
+ });
165
+ }
166
+ // --- Node operations ---
167
+ addNode(node) {
168
+ const existing = this.nodes.by("id", node.id);
169
+ if (existing) {
170
+ safeMerge(existing, node);
171
+ this.nodes.update(existing);
172
+ } else {
173
+ this.nodes.insert({ ...node });
174
+ }
175
+ }
176
+ batchAddNodes(nodes) {
177
+ for (const node of nodes) {
178
+ this.addNode(node);
179
+ }
180
+ }
181
+ getNode(id) {
182
+ const doc = this.nodes.by("id", id);
183
+ if (!doc) return null;
184
+ return this.stripLokiMeta(doc);
185
+ }
186
+ findNodes(query) {
187
+ const lokiQuery = {};
188
+ if (query.type !== void 0) lokiQuery["type"] = query.type;
189
+ if (query.name !== void 0) lokiQuery["name"] = query.name;
190
+ if (query.path !== void 0) lokiQuery["path"] = query.path;
191
+ return this.nodes.find(lokiQuery).map((doc) => this.stripLokiMeta(doc));
192
+ }
193
+ removeNode(id) {
194
+ const doc = this.nodes.by("id", id);
195
+ if (doc) {
196
+ this.nodes.remove(doc);
197
+ }
198
+ const edgesToRemove = this.edges.find({
199
+ $or: [{ from: id }, { to: id }]
200
+ });
201
+ for (const edge of edgesToRemove) {
202
+ this.edges.remove(edge);
203
+ }
204
+ }
205
+ // --- Edge operations ---
206
+ addEdge(edge) {
207
+ const existing = this.edges.findOne({
208
+ from: edge.from,
209
+ to: edge.to,
210
+ type: edge.type
211
+ });
212
+ if (existing) {
213
+ if (edge.metadata) {
214
+ safeMerge(existing, edge);
215
+ this.edges.update(existing);
216
+ }
217
+ return;
218
+ }
219
+ this.edges.insert({ ...edge });
220
+ }
221
+ batchAddEdges(edges) {
222
+ for (const edge of edges) {
223
+ this.addEdge(edge);
224
+ }
225
+ }
226
+ getEdges(query) {
227
+ const lokiQuery = {};
228
+ if (query.from !== void 0) lokiQuery["from"] = query.from;
229
+ if (query.to !== void 0) lokiQuery["to"] = query.to;
230
+ if (query.type !== void 0) lokiQuery["type"] = query.type;
231
+ return this.edges.find(lokiQuery).map((doc) => this.stripLokiMeta(doc));
232
+ }
233
+ getNeighbors(nodeId, direction = "both") {
234
+ const neighborIds = /* @__PURE__ */ new Set();
235
+ if (direction === "outbound" || direction === "both") {
236
+ const outEdges = this.edges.find({ from: nodeId });
237
+ for (const edge of outEdges) {
238
+ neighborIds.add(edge.to);
239
+ }
240
+ }
241
+ if (direction === "inbound" || direction === "both") {
242
+ const inEdges = this.edges.find({ to: nodeId });
243
+ for (const edge of inEdges) {
244
+ neighborIds.add(edge.from);
245
+ }
246
+ }
247
+ const results = [];
248
+ for (const nid of neighborIds) {
249
+ const node = this.getNode(nid);
250
+ if (node) results.push(node);
251
+ }
252
+ return results;
253
+ }
254
+ // --- Counts ---
255
+ get nodeCount() {
256
+ return this.nodes.count();
257
+ }
258
+ get edgeCount() {
259
+ return this.edges.count();
260
+ }
261
+ // --- Clear ---
262
+ clear() {
263
+ this.nodes.clear();
264
+ this.edges.clear();
265
+ }
266
+ // --- Persistence ---
267
+ async save(dirPath) {
268
+ const allNodes = this.nodes.find().map((doc) => this.stripLokiMeta(doc));
269
+ const allEdges = this.edges.find().map((doc) => this.stripLokiMeta(doc));
270
+ await saveGraph(dirPath, allNodes, allEdges);
271
+ }
272
+ async load(dirPath) {
273
+ const data = await loadGraph(dirPath);
274
+ if (!data) return false;
275
+ this.clear();
276
+ for (const node of data.nodes) {
277
+ this.nodes.insert({ ...node });
278
+ }
279
+ for (const edge of data.edges) {
280
+ this.edges.insert({ ...edge });
281
+ }
282
+ return true;
283
+ }
284
+ // --- Internal ---
285
+ stripLokiMeta(doc) {
286
+ const { $loki: _, meta: _meta, ...rest } = doc;
287
+ return rest;
288
+ }
289
+ };
290
+ function cosineSimilarity(a, b) {
291
+ let dotProduct = 0;
292
+ let normA = 0;
293
+ let normB = 0;
294
+ for (let i = 0; i < a.length; i++) {
295
+ dotProduct += a[i] * b[i];
296
+ normA += a[i] * a[i];
297
+ normB += b[i] * b[i];
298
+ }
299
+ const denominator = Math.sqrt(normA) * Math.sqrt(normB);
300
+ return denominator === 0 ? 0 : dotProduct / denominator;
301
+ }
302
+ var VectorStore = class _VectorStore {
303
+ dimensions;
304
+ vectors = /* @__PURE__ */ new Map();
305
+ constructor(dimensions) {
306
+ this.dimensions = dimensions;
307
+ }
308
+ /** Number of vectors currently stored. */
309
+ get size() {
310
+ return this.vectors.size;
311
+ }
312
+ /** Add a vector with the given id. Throws if dimensions do not match. */
313
+ add(id, vector) {
314
+ if (vector.length !== this.dimensions) {
315
+ throw new Error(`Dimension mismatch: expected ${this.dimensions}, got ${vector.length}`);
316
+ }
317
+ this.vectors.set(id, vector);
318
+ }
319
+ /** Remove a vector by id. Returns true if the vector existed. */
320
+ remove(id) {
321
+ return this.vectors.delete(id);
322
+ }
323
+ /** Check whether a vector with the given id exists. */
324
+ has(id) {
325
+ return this.vectors.has(id);
326
+ }
327
+ /** Remove all stored vectors. */
328
+ clear() {
329
+ this.vectors.clear();
330
+ }
331
+ /**
332
+ * Return the top-K most similar vectors to the query, sorted by descending
333
+ * cosine similarity score. If the store contains fewer than topK vectors,
334
+ * all available results are returned.
335
+ */
336
+ search(query, topK) {
337
+ if (query.length !== this.dimensions) {
338
+ throw new Error(`Dimension mismatch: expected ${this.dimensions}, got ${query.length}`);
339
+ }
340
+ const results = [];
341
+ for (const [id, vector] of this.vectors) {
342
+ results.push({ id, score: cosineSimilarity(query, vector) });
343
+ }
344
+ results.sort((a, b) => b.score - a.score);
345
+ return results.slice(0, topK);
346
+ }
347
+ /** Serialize the store to a plain object for persistence. */
348
+ serialize() {
349
+ const vectors = [];
350
+ for (const [id, vector] of this.vectors) {
351
+ vectors.push({ id, vector: [...vector] });
352
+ }
353
+ return { dimensions: this.dimensions, vectors };
354
+ }
355
+ /** Deserialize a previously-serialized store. */
356
+ static deserialize(data) {
357
+ const store = new _VectorStore(data.dimensions);
358
+ for (const { id, vector } of data.vectors) {
359
+ store.add(id, vector);
360
+ }
361
+ return store;
362
+ }
363
+ };
364
+ var ContextQL = class {
365
+ store;
366
+ constructor(store) {
367
+ this.store = store;
368
+ }
369
+ execute(params) {
370
+ const maxDepth = params.maxDepth ?? 3;
371
+ const bidirectional = params.bidirectional ?? false;
372
+ const pruneObservability = params.pruneObservability ?? true;
373
+ const visited = /* @__PURE__ */ new Set();
374
+ const resultNodeMap = /* @__PURE__ */ new Map();
375
+ const resultEdges = [];
376
+ const edgeSet = /* @__PURE__ */ new Set();
377
+ let pruned = 0;
378
+ let depthReached = 0;
379
+ const edgeKey = (e) => `${e.from}|${e.to}|${e.type}`;
380
+ const addEdge = (edge) => {
381
+ const key = edgeKey(edge);
382
+ if (!edgeSet.has(key)) {
383
+ edgeSet.add(key);
384
+ resultEdges.push(edge);
385
+ }
386
+ };
387
+ const queue = [];
388
+ for (const rootId of params.rootNodeIds) {
389
+ const node = this.store.getNode(rootId);
390
+ if (node) {
391
+ visited.add(rootId);
392
+ resultNodeMap.set(rootId, node);
393
+ queue.push({ id: rootId, depth: 0 });
394
+ }
395
+ }
396
+ let head = 0;
397
+ while (head < queue.length) {
398
+ const entry = queue[head++];
399
+ const { id: currentId, depth } = entry;
400
+ if (depth >= maxDepth) continue;
401
+ const nextDepth = depth + 1;
402
+ if (nextDepth > depthReached) depthReached = nextDepth;
403
+ const outEdges = this.store.getEdges({ from: currentId });
404
+ const inEdges = bidirectional ? this.store.getEdges({ to: currentId }) : [];
405
+ const allEdges = [
406
+ ...outEdges.map((e) => ({ edge: e, neighborId: e.to })),
407
+ ...inEdges.map((e) => ({ edge: e, neighborId: e.from }))
408
+ ];
409
+ for (const { edge, neighborId } of allEdges) {
410
+ if (params.includeEdges && !params.includeEdges.includes(edge.type)) {
411
+ continue;
412
+ }
413
+ if (visited.has(neighborId)) {
414
+ addEdge(edge);
415
+ continue;
416
+ }
417
+ const neighbor = this.store.getNode(neighborId);
418
+ if (!neighbor) continue;
419
+ visited.add(neighborId);
420
+ if (pruneObservability && OBSERVABILITY_TYPES.has(neighbor.type)) {
421
+ pruned++;
422
+ continue;
423
+ }
424
+ if (params.includeTypes && !params.includeTypes.includes(neighbor.type)) {
425
+ pruned++;
426
+ continue;
427
+ }
428
+ if (params.excludeTypes && params.excludeTypes.includes(neighbor.type)) {
429
+ pruned++;
430
+ continue;
431
+ }
432
+ resultNodeMap.set(neighborId, neighbor);
433
+ addEdge(edge);
434
+ queue.push({ id: neighborId, depth: nextDepth });
435
+ }
436
+ }
437
+ const resultNodeIds = new Set(resultNodeMap.keys());
438
+ for (const nodeId of resultNodeIds) {
439
+ const outEdges = this.store.getEdges({ from: nodeId });
440
+ for (const edge of outEdges) {
441
+ if (resultNodeIds.has(edge.to)) {
442
+ addEdge(edge);
443
+ }
444
+ }
445
+ }
446
+ return {
447
+ nodes: Array.from(resultNodeMap.values()),
448
+ edges: resultEdges,
449
+ stats: {
450
+ totalTraversed: visited.size,
451
+ totalReturned: resultNodeMap.size,
452
+ pruned,
453
+ depthReached
454
+ }
455
+ };
456
+ }
457
+ };
458
+ function project(nodes, spec) {
459
+ if (!spec) return nodes.map((n) => ({ ...n }));
460
+ return nodes.map((node) => {
461
+ const projected = {};
462
+ for (const field of spec.fields) {
463
+ if (field in node) {
464
+ projected[field] = node[field];
465
+ }
466
+ }
467
+ return projected;
468
+ });
469
+ }
470
+ var CodeIngestor = class {
471
+ constructor(store) {
472
+ this.store = store;
473
+ }
474
+ async ingest(rootDir) {
475
+ const start = Date.now();
476
+ const errors = [];
477
+ let nodesAdded = 0;
478
+ let edgesAdded = 0;
479
+ const files = await this.findSourceFiles(rootDir);
480
+ const nameToFiles = /* @__PURE__ */ new Map();
481
+ const fileContents = /* @__PURE__ */ new Map();
482
+ for (const filePath of files) {
483
+ try {
484
+ const relativePath = path.relative(rootDir, filePath);
485
+ const content = await fs.readFile(filePath, "utf-8");
486
+ const stat2 = await fs.stat(filePath);
487
+ const fileId = `file:${relativePath}`;
488
+ fileContents.set(relativePath, content);
489
+ const fileNode = {
490
+ id: fileId,
491
+ type: "file",
492
+ name: path.basename(filePath),
493
+ path: relativePath,
494
+ metadata: { language: this.detectLanguage(filePath) },
495
+ lastModified: stat2.mtime.toISOString()
496
+ };
497
+ this.store.addNode(fileNode);
498
+ nodesAdded++;
499
+ const symbols = this.extractSymbols(content, fileId, relativePath);
500
+ for (const { node, edge } of symbols) {
501
+ this.store.addNode(node);
502
+ this.store.addEdge(edge);
503
+ nodesAdded++;
504
+ edgesAdded++;
505
+ if (node.type === "function" || node.type === "method") {
506
+ let files2 = nameToFiles.get(node.name);
507
+ if (!files2) {
508
+ files2 = /* @__PURE__ */ new Set();
509
+ nameToFiles.set(node.name, files2);
510
+ }
511
+ files2.add(relativePath);
512
+ }
513
+ }
514
+ const imports = await this.extractImports(content, fileId, relativePath, rootDir);
515
+ for (const edge of imports) {
516
+ this.store.addEdge(edge);
517
+ edgesAdded++;
518
+ }
519
+ } catch (err) {
520
+ errors.push(`${filePath}: ${err instanceof Error ? err.message : String(err)}`);
521
+ }
522
+ }
523
+ const callsEdges = this.extractCallsEdges(nameToFiles, fileContents);
524
+ for (const edge of callsEdges) {
525
+ this.store.addEdge(edge);
526
+ edgesAdded++;
527
+ }
528
+ return {
529
+ nodesAdded,
530
+ nodesUpdated: 0,
531
+ edgesAdded,
532
+ edgesUpdated: 0,
533
+ errors,
534
+ durationMs: Date.now() - start
535
+ };
536
+ }
537
+ async findSourceFiles(dir) {
538
+ const results = [];
539
+ const entries = await fs.readdir(dir, { withFileTypes: true });
540
+ for (const entry of entries) {
541
+ const fullPath = path.join(dir, entry.name);
542
+ if (entry.isDirectory() && entry.name !== "node_modules" && entry.name !== "dist") {
543
+ results.push(...await this.findSourceFiles(fullPath));
544
+ } else if (entry.isFile() && /\.(ts|tsx|js|jsx)$/.test(entry.name) && !entry.name.endsWith(".d.ts")) {
545
+ results.push(fullPath);
546
+ }
547
+ }
548
+ return results;
549
+ }
550
+ extractSymbols(content, fileId, relativePath) {
551
+ const results = [];
552
+ const lines = content.split("\n");
553
+ let currentClassName = null;
554
+ let currentClassId = null;
555
+ let braceDepth = 0;
556
+ let insideClass = false;
557
+ for (let i = 0; i < lines.length; i++) {
558
+ const line = lines[i];
559
+ const fnMatch = line.match(/(?:export\s+)?(?:async\s+)?function\s+(\w+)/);
560
+ if (fnMatch) {
561
+ const name = fnMatch[1];
562
+ const id = `function:${relativePath}:${name}`;
563
+ const endLine = this.findClosingBrace(lines, i);
564
+ results.push({
565
+ node: {
566
+ id,
567
+ type: "function",
568
+ name,
569
+ path: relativePath,
570
+ location: { fileId, startLine: i + 1, endLine },
571
+ metadata: {
572
+ exported: line.includes("export"),
573
+ cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
574
+ nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
575
+ lineCount: endLine - i,
576
+ parameterCount: this.countParameters(line)
577
+ }
578
+ },
579
+ edge: { from: fileId, to: id, type: "contains" }
580
+ });
581
+ if (!insideClass) {
582
+ currentClassName = null;
583
+ currentClassId = null;
584
+ }
585
+ continue;
586
+ }
587
+ const classMatch = line.match(/(?:export\s+)?class\s+(\w+)/);
588
+ if (classMatch) {
589
+ const name = classMatch[1];
590
+ const id = `class:${relativePath}:${name}`;
591
+ const endLine = this.findClosingBrace(lines, i);
592
+ results.push({
593
+ node: {
594
+ id,
595
+ type: "class",
596
+ name,
597
+ path: relativePath,
598
+ location: { fileId, startLine: i + 1, endLine },
599
+ metadata: { exported: line.includes("export") }
600
+ },
601
+ edge: { from: fileId, to: id, type: "contains" }
602
+ });
603
+ currentClassName = name;
604
+ currentClassId = id;
605
+ insideClass = true;
606
+ braceDepth = 0;
607
+ for (const ch of line) {
608
+ if (ch === "{") braceDepth++;
609
+ if (ch === "}") braceDepth--;
610
+ }
611
+ continue;
612
+ }
613
+ const ifaceMatch = line.match(/(?:export\s+)?interface\s+(\w+)/);
614
+ if (ifaceMatch) {
615
+ const name = ifaceMatch[1];
616
+ const id = `interface:${relativePath}:${name}`;
617
+ const endLine = this.findClosingBrace(lines, i);
618
+ results.push({
619
+ node: {
620
+ id,
621
+ type: "interface",
622
+ name,
623
+ path: relativePath,
624
+ location: { fileId, startLine: i + 1, endLine },
625
+ metadata: { exported: line.includes("export") }
626
+ },
627
+ edge: { from: fileId, to: id, type: "contains" }
628
+ });
629
+ currentClassName = null;
630
+ currentClassId = null;
631
+ insideClass = false;
632
+ continue;
633
+ }
634
+ if (insideClass) {
635
+ for (const ch of line) {
636
+ if (ch === "{") braceDepth++;
637
+ if (ch === "}") braceDepth--;
638
+ }
639
+ if (braceDepth <= 0) {
640
+ currentClassName = null;
641
+ currentClassId = null;
642
+ insideClass = false;
643
+ continue;
644
+ }
645
+ }
646
+ if (insideClass && currentClassName && currentClassId) {
647
+ const methodMatch = line.match(
648
+ /^\s+(?:(?:public|private|protected|readonly|static|abstract)\s+)*(?:async\s+)?(\w+)\s*\(/
649
+ );
650
+ if (methodMatch) {
651
+ const methodName = methodMatch[1];
652
+ if (methodName === "constructor" || methodName === "if" || methodName === "for" || methodName === "while" || methodName === "switch")
653
+ continue;
654
+ const id = `method:${relativePath}:${currentClassName}.${methodName}`;
655
+ const endLine = this.findClosingBrace(lines, i);
656
+ results.push({
657
+ node: {
658
+ id,
659
+ type: "method",
660
+ name: methodName,
661
+ path: relativePath,
662
+ location: { fileId, startLine: i + 1, endLine },
663
+ metadata: {
664
+ className: currentClassName,
665
+ exported: false,
666
+ cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
667
+ nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
668
+ lineCount: endLine - i,
669
+ parameterCount: this.countParameters(line)
670
+ }
671
+ },
672
+ edge: { from: currentClassId, to: id, type: "contains" }
673
+ });
674
+ }
675
+ continue;
676
+ }
677
+ const varMatch = line.match(/(?:export\s+)?(?:const|let|var)\s+(\w+)/);
678
+ if (varMatch) {
679
+ const name = varMatch[1];
680
+ const id = `variable:${relativePath}:${name}`;
681
+ results.push({
682
+ node: {
683
+ id,
684
+ type: "variable",
685
+ name,
686
+ path: relativePath,
687
+ location: { fileId, startLine: i + 1, endLine: i + 1 },
688
+ metadata: { exported: line.includes("export") }
689
+ },
690
+ edge: { from: fileId, to: id, type: "contains" }
691
+ });
692
+ }
693
+ }
694
+ return results;
695
+ }
696
+ /**
697
+ * Find the closing brace for a construct starting at the given line.
698
+ * Uses a simple brace-counting heuristic. Returns 1-indexed line number.
699
+ */
700
+ findClosingBrace(lines, startIndex) {
701
+ let depth = 0;
702
+ for (let i = startIndex; i < lines.length; i++) {
703
+ const line = lines[i];
704
+ for (const ch of line) {
705
+ if (ch === "{") depth++;
706
+ if (ch === "}") depth--;
707
+ }
708
+ if (depth <= 0 && i > startIndex) {
709
+ return i + 1;
710
+ }
711
+ if (depth === 0 && i === startIndex) {
712
+ if (line.includes("{")) {
713
+ return i + 1;
714
+ }
715
+ }
716
+ }
717
+ return startIndex + 1;
718
+ }
719
+ /**
720
+ * Second pass: scan each file for identifiers matching known callable names,
721
+ * then create file-to-file "calls" edges. Uses regex heuristic (not AST).
722
+ */
723
+ extractCallsEdges(nameToFiles, fileContents) {
724
+ const edges = [];
725
+ const seen = /* @__PURE__ */ new Set();
726
+ for (const [filePath, content] of fileContents) {
727
+ const callerFileId = `file:${filePath}`;
728
+ const callPattern = /\b([a-zA-Z_$][\w$]*)\s*\(/g;
729
+ let match;
730
+ while ((match = callPattern.exec(content)) !== null) {
731
+ const name = match[1];
732
+ const targetFiles = nameToFiles.get(name);
733
+ if (!targetFiles) continue;
734
+ for (const targetFile of targetFiles) {
735
+ if (targetFile === filePath) continue;
736
+ const targetFileId = `file:${targetFile}`;
737
+ const key = `${callerFileId}|${targetFileId}`;
738
+ if (seen.has(key)) continue;
739
+ seen.add(key);
740
+ edges.push({
741
+ from: callerFileId,
742
+ to: targetFileId,
743
+ type: "calls",
744
+ metadata: { confidence: "regex" }
745
+ });
746
+ }
747
+ }
748
+ }
749
+ return edges;
750
+ }
751
+ async extractImports(content, fileId, relativePath, rootDir) {
752
+ const edges = [];
753
+ const importRegex = /import\s+(?:type\s+)?(?:\{[^}]*\}|[\w*]+)\s+from\s+['"]([^'"]+)['"]/g;
754
+ let match;
755
+ while ((match = importRegex.exec(content)) !== null) {
756
+ const importPath = match[1];
757
+ if (!importPath.startsWith(".")) continue;
758
+ const resolvedPath = await this.resolveImportPath(relativePath, importPath, rootDir);
759
+ if (resolvedPath) {
760
+ const targetId = `file:${resolvedPath}`;
761
+ const isTypeOnly = match[0].includes("import type");
762
+ edges.push({
763
+ from: fileId,
764
+ to: targetId,
765
+ type: "imports",
766
+ metadata: { importType: isTypeOnly ? "type-only" : "static" }
767
+ });
768
+ }
769
+ }
770
+ return edges;
771
+ }
772
+ async resolveImportPath(fromFile, importPath, rootDir) {
773
+ const fromDir = path.dirname(fromFile);
774
+ const resolved = path.normalize(path.join(fromDir, importPath));
775
+ const extensions = [".ts", ".tsx", ".js", ".jsx"];
776
+ for (const ext of extensions) {
777
+ const candidate = resolved.replace(/\.js$/, "") + ext;
778
+ const fullPath = path.join(rootDir, candidate);
779
+ try {
780
+ await fs.access(fullPath);
781
+ return candidate;
782
+ } catch {
783
+ }
784
+ }
785
+ for (const ext of extensions) {
786
+ const candidate = path.join(resolved, `index${ext}`);
787
+ const fullPath = path.join(rootDir, candidate);
788
+ try {
789
+ await fs.access(fullPath);
790
+ return candidate;
791
+ } catch {
792
+ }
793
+ }
794
+ return null;
795
+ }
796
+ computeCyclomaticComplexity(lines) {
797
+ let complexity = 1;
798
+ const decisionPattern = /\b(if|else\s+if|while|for|case)\b|\?\s*[^:?]|&&|\|\||catch\b/g;
799
+ for (const line of lines) {
800
+ const trimmed = line.trim();
801
+ if (trimmed.startsWith("//") || trimmed.startsWith("*")) continue;
802
+ const matches = trimmed.match(decisionPattern);
803
+ if (matches) complexity += matches.length;
804
+ }
805
+ return complexity;
806
+ }
807
+ computeMaxNesting(lines) {
808
+ let maxDepth = 0;
809
+ let currentDepth = 0;
810
+ for (const line of lines) {
811
+ const trimmed = line.trim();
812
+ if (trimmed.startsWith("//") || trimmed.startsWith("*")) continue;
813
+ for (const ch of trimmed) {
814
+ if (ch === "{") {
815
+ currentDepth++;
816
+ if (currentDepth > maxDepth) maxDepth = currentDepth;
817
+ } else if (ch === "}") {
818
+ currentDepth--;
819
+ }
820
+ }
821
+ }
822
+ return Math.max(0, maxDepth - 1);
823
+ }
824
+ countParameters(declarationLine) {
825
+ const parenMatch = declarationLine.match(/\(([^)]*)\)/);
826
+ if (!parenMatch || !parenMatch[1].trim()) return 0;
827
+ let depth = 0;
828
+ let count = 1;
829
+ for (const ch of parenMatch[1]) {
830
+ if (ch === "<" || ch === "(") depth++;
831
+ else if (ch === ">" || ch === ")") depth--;
832
+ else if (ch === "," && depth === 0) count++;
833
+ }
834
+ return count;
835
+ }
836
+ detectLanguage(filePath) {
837
+ if (/\.tsx?$/.test(filePath)) return "typescript";
838
+ if (/\.jsx?$/.test(filePath)) return "javascript";
839
+ return "unknown";
840
+ }
841
+ };
842
+ var execFileAsync = promisify(execFile);
843
+ var GitIngestor = class {
844
+ constructor(store, gitRunner) {
845
+ this.store = store;
846
+ this.gitRunner = gitRunner;
847
+ }
848
+ async ingest(rootDir) {
849
+ const start = Date.now();
850
+ const errors = [];
851
+ let nodesAdded = 0;
852
+ let nodesUpdated = 0;
853
+ let edgesAdded = 0;
854
+ let edgesUpdated = 0;
855
+ let output;
856
+ try {
857
+ output = await this.runGit(rootDir, [
858
+ "log",
859
+ "--format=%H|%an|%ae|%aI|%s",
860
+ "--name-only",
861
+ "-n",
862
+ "100"
863
+ ]);
864
+ } catch (err) {
865
+ errors.push(`git log failed: ${err instanceof Error ? err.message : String(err)}`);
866
+ return {
867
+ nodesAdded: 0,
868
+ nodesUpdated: 0,
869
+ edgesAdded: 0,
870
+ edgesUpdated: 0,
871
+ errors,
872
+ durationMs: Date.now() - start
873
+ };
874
+ }
875
+ const commits = this.parseGitLog(output);
876
+ for (const commit of commits) {
877
+ const nodeId = `commit:${commit.shortHash}`;
878
+ this.store.addNode({
879
+ id: nodeId,
880
+ type: "commit",
881
+ name: commit.message,
882
+ metadata: {
883
+ author: commit.author,
884
+ email: commit.email,
885
+ date: commit.date,
886
+ hash: commit.hash
887
+ }
888
+ });
889
+ nodesAdded++;
890
+ for (const file of commit.files) {
891
+ const fileNodeId = `file:${file}`;
892
+ const existingNode = this.store.getNode(fileNodeId);
893
+ if (existingNode) {
894
+ this.store.addEdge({
895
+ from: fileNodeId,
896
+ to: nodeId,
897
+ type: "triggered_by"
898
+ });
899
+ edgesAdded++;
900
+ }
901
+ }
902
+ }
903
+ const coChanges = this.computeCoChanges(commits);
904
+ for (const { fileA, fileB, count } of coChanges) {
905
+ const fileAId = `file:${fileA}`;
906
+ const fileBId = `file:${fileB}`;
907
+ const nodeA = this.store.getNode(fileAId);
908
+ const nodeB = this.store.getNode(fileBId);
909
+ if (nodeA && nodeB) {
910
+ this.store.addEdge({
911
+ from: fileAId,
912
+ to: fileBId,
913
+ type: "co_changes_with",
914
+ metadata: { count }
915
+ });
916
+ edgesAdded++;
917
+ }
918
+ }
919
+ return {
920
+ nodesAdded,
921
+ nodesUpdated,
922
+ edgesAdded,
923
+ edgesUpdated,
924
+ errors,
925
+ durationMs: Date.now() - start
926
+ };
927
+ }
928
+ async runGit(rootDir, args) {
929
+ if (this.gitRunner) {
930
+ return this.gitRunner(rootDir, args);
931
+ }
932
+ const { stdout } = await execFileAsync("git", args, { cwd: rootDir });
933
+ return stdout;
934
+ }
935
+ parseGitLog(output) {
936
+ if (!output.trim()) return [];
937
+ const commits = [];
938
+ const lines = output.split("\n");
939
+ let current = null;
940
+ for (const line of lines) {
941
+ const trimmed = line.trim();
942
+ if (!trimmed) {
943
+ if (current && current.hasFiles) {
944
+ commits.push({
945
+ hash: current.hash,
946
+ shortHash: current.shortHash,
947
+ author: current.author,
948
+ email: current.email,
949
+ date: current.date,
950
+ message: current.message,
951
+ files: current.files
952
+ });
953
+ current = null;
954
+ }
955
+ continue;
956
+ }
957
+ const parts = trimmed.split("|");
958
+ if (parts.length >= 5 && /^[0-9a-f]{7,40}$/.test(parts[0])) {
959
+ if (current) {
960
+ commits.push({
961
+ hash: current.hash,
962
+ shortHash: current.shortHash,
963
+ author: current.author,
964
+ email: current.email,
965
+ date: current.date,
966
+ message: current.message,
967
+ files: current.files
968
+ });
969
+ }
970
+ current = {
971
+ hash: parts[0],
972
+ shortHash: parts[0].substring(0, 7),
973
+ author: parts[1],
974
+ email: parts[2],
975
+ date: parts[3],
976
+ message: parts.slice(4).join("|"),
977
+ // message may contain |
978
+ files: [],
979
+ hasFiles: false
980
+ };
981
+ } else if (current) {
982
+ current.files.push(trimmed);
983
+ current.hasFiles = true;
984
+ }
985
+ }
986
+ if (current) {
987
+ commits.push({
988
+ hash: current.hash,
989
+ shortHash: current.shortHash,
990
+ author: current.author,
991
+ email: current.email,
992
+ date: current.date,
993
+ message: current.message,
994
+ files: current.files
995
+ });
996
+ }
997
+ return commits;
998
+ }
999
+ computeCoChanges(commits) {
1000
+ const pairCounts = /* @__PURE__ */ new Map();
1001
+ for (const commit of commits) {
1002
+ const files = [...commit.files].sort();
1003
+ for (let i = 0; i < files.length; i++) {
1004
+ for (let j = i + 1; j < files.length; j++) {
1005
+ const key = `${files[i]}||${files[j]}`;
1006
+ pairCounts.set(key, (pairCounts.get(key) ?? 0) + 1);
1007
+ }
1008
+ }
1009
+ }
1010
+ const results = [];
1011
+ for (const [key, count] of pairCounts) {
1012
+ if (count >= 2) {
1013
+ const [fileA, fileB] = key.split("||");
1014
+ results.push({ fileA, fileB, count });
1015
+ }
1016
+ }
1017
+ return results;
1018
+ }
1019
+ };
1020
+ var TopologicalLinker = class {
1021
+ constructor(store) {
1022
+ this.store = store;
1023
+ }
1024
+ link() {
1025
+ let edgesAdded = 0;
1026
+ const files = this.store.findNodes({ type: "file" });
1027
+ const directories = /* @__PURE__ */ new Map();
1028
+ for (const file of files) {
1029
+ if (!file.path) continue;
1030
+ const dir = path2.dirname(file.path);
1031
+ if (!directories.has(dir)) {
1032
+ directories.set(dir, []);
1033
+ }
1034
+ directories.get(dir).push(file.id);
1035
+ }
1036
+ for (const [dir, fileIds] of directories) {
1037
+ if (fileIds.length < 1) continue;
1038
+ const moduleId = `module:${dir}`;
1039
+ const moduleName = dir === "." ? "root" : path2.basename(dir);
1040
+ this.store.addNode({
1041
+ id: moduleId,
1042
+ type: "module",
1043
+ name: moduleName,
1044
+ path: dir,
1045
+ metadata: { fileCount: fileIds.length }
1046
+ });
1047
+ for (const fileId of fileIds) {
1048
+ this.store.addEdge({
1049
+ from: moduleId,
1050
+ to: fileId,
1051
+ type: "contains"
1052
+ });
1053
+ edgesAdded++;
1054
+ }
1055
+ }
1056
+ const cycles = this.detectCycles(files.map((f) => f.id));
1057
+ return { edgesAdded, cycles };
1058
+ }
1059
+ detectCycles(fileIds) {
1060
+ const cycles = [];
1061
+ const visited = /* @__PURE__ */ new Set();
1062
+ const inStack = /* @__PURE__ */ new Set();
1063
+ const stack = [];
1064
+ const dfs = (nodeId) => {
1065
+ if (inStack.has(nodeId)) {
1066
+ const cycleStart = stack.indexOf(nodeId);
1067
+ if (cycleStart !== -1) {
1068
+ cycles.push(stack.slice(cycleStart).concat(nodeId));
1069
+ }
1070
+ return;
1071
+ }
1072
+ if (visited.has(nodeId)) return;
1073
+ visited.add(nodeId);
1074
+ inStack.add(nodeId);
1075
+ stack.push(nodeId);
1076
+ const importEdges = this.store.getEdges({ from: nodeId, type: "imports" });
1077
+ for (const edge of importEdges) {
1078
+ dfs(edge.to);
1079
+ }
1080
+ stack.pop();
1081
+ inStack.delete(nodeId);
1082
+ };
1083
+ for (const fileId of fileIds) {
1084
+ if (!visited.has(fileId)) {
1085
+ dfs(fileId);
1086
+ }
1087
+ }
1088
+ return cycles;
1089
+ }
1090
+ };
1091
+ function hash(text) {
1092
+ return crypto.createHash("md5").update(text).digest("hex").slice(0, 8);
1093
+ }
1094
+ function mergeResults(...results) {
1095
+ return {
1096
+ nodesAdded: results.reduce((s, r) => s + r.nodesAdded, 0),
1097
+ nodesUpdated: results.reduce((s, r) => s + r.nodesUpdated, 0),
1098
+ edgesAdded: results.reduce((s, r) => s + r.edgesAdded, 0),
1099
+ edgesUpdated: results.reduce((s, r) => s + r.edgesUpdated, 0),
1100
+ errors: results.flatMap((r) => r.errors),
1101
+ durationMs: results.reduce((s, r) => s + r.durationMs, 0)
1102
+ };
1103
+ }
1104
+ function emptyResult(durationMs = 0) {
1105
+ return { nodesAdded: 0, nodesUpdated: 0, edgesAdded: 0, edgesUpdated: 0, errors: [], durationMs };
1106
+ }
1107
+ var CODE_NODE_TYPES = ["file", "function", "class", "method", "interface", "variable"];
1108
+ var KnowledgeIngestor = class {
1109
+ constructor(store) {
1110
+ this.store = store;
1111
+ }
1112
+ async ingestADRs(adrDir) {
1113
+ const start = Date.now();
1114
+ const errors = [];
1115
+ let nodesAdded = 0;
1116
+ let edgesAdded = 0;
1117
+ let files;
1118
+ try {
1119
+ files = await this.findMarkdownFiles(adrDir);
1120
+ } catch {
1121
+ return emptyResult(Date.now() - start);
1122
+ }
1123
+ for (const filePath of files) {
1124
+ try {
1125
+ const content = await fs2.readFile(filePath, "utf-8");
1126
+ const filename = path3.basename(filePath, ".md");
1127
+ const titleMatch = content.match(/^#\s+(.+)$/m);
1128
+ const title = titleMatch ? titleMatch[1].trim() : filename;
1129
+ const dateMatch = content.match(/\*\*Date:\*\*\s*(.+)/);
1130
+ const statusMatch = content.match(/\*\*Status:\*\*\s*(.+)/);
1131
+ const date = dateMatch ? dateMatch[1].trim() : void 0;
1132
+ const status = statusMatch ? statusMatch[1].trim() : void 0;
1133
+ const nodeId = `adr:${filename}`;
1134
+ this.store.addNode({
1135
+ id: nodeId,
1136
+ type: "adr",
1137
+ name: title,
1138
+ path: filePath,
1139
+ metadata: { date, status }
1140
+ });
1141
+ nodesAdded++;
1142
+ edgesAdded += this.linkToCode(content, nodeId, "documents");
1143
+ } catch (err) {
1144
+ errors.push(`${filePath}: ${err instanceof Error ? err.message : String(err)}`);
1145
+ }
1146
+ }
1147
+ return {
1148
+ nodesAdded,
1149
+ nodesUpdated: 0,
1150
+ edgesAdded,
1151
+ edgesUpdated: 0,
1152
+ errors,
1153
+ durationMs: Date.now() - start
1154
+ };
1155
+ }
1156
+ async ingestLearnings(projectPath) {
1157
+ const start = Date.now();
1158
+ const filePath = path3.join(projectPath, ".harness", "learnings.md");
1159
+ let content;
1160
+ try {
1161
+ content = await fs2.readFile(filePath, "utf-8");
1162
+ } catch {
1163
+ return emptyResult(Date.now() - start);
1164
+ }
1165
+ const errors = [];
1166
+ let nodesAdded = 0;
1167
+ let edgesAdded = 0;
1168
+ const lines = content.split("\n");
1169
+ let currentDate;
1170
+ for (const line of lines) {
1171
+ const headingMatch = line.match(/^##\s+(\S+)/);
1172
+ if (headingMatch) {
1173
+ currentDate = headingMatch[1];
1174
+ continue;
1175
+ }
1176
+ const bulletMatch = line.match(/^-\s+(.+)/);
1177
+ if (!bulletMatch) continue;
1178
+ const text = bulletMatch[1];
1179
+ const skillMatch = text.match(/\[skill:([^\]]+)\]/);
1180
+ const outcomeMatch = text.match(/\[outcome:([^\]]+)\]/);
1181
+ const skill = skillMatch ? skillMatch[1] : void 0;
1182
+ const outcome = outcomeMatch ? outcomeMatch[1] : void 0;
1183
+ const nodeId = `learning:${hash(text)}`;
1184
+ this.store.addNode({
1185
+ id: nodeId,
1186
+ type: "learning",
1187
+ name: text,
1188
+ metadata: { skill, outcome, date: currentDate }
1189
+ });
1190
+ nodesAdded++;
1191
+ edgesAdded += this.linkToCode(text, nodeId, "applies_to");
1192
+ }
1193
+ return {
1194
+ nodesAdded,
1195
+ nodesUpdated: 0,
1196
+ edgesAdded,
1197
+ edgesUpdated: 0,
1198
+ errors,
1199
+ durationMs: Date.now() - start
1200
+ };
1201
+ }
1202
+ async ingestFailures(projectPath) {
1203
+ const start = Date.now();
1204
+ const filePath = path3.join(projectPath, ".harness", "failures.md");
1205
+ let content;
1206
+ try {
1207
+ content = await fs2.readFile(filePath, "utf-8");
1208
+ } catch {
1209
+ return emptyResult(Date.now() - start);
1210
+ }
1211
+ const errors = [];
1212
+ let nodesAdded = 0;
1213
+ let edgesAdded = 0;
1214
+ const sections = content.split(/^##\s+/m).filter((s) => s.trim());
1215
+ for (const section of sections) {
1216
+ const dateMatch = section.match(/\*\*Date:\*\*\s*(.+)/);
1217
+ const skillMatch = section.match(/\*\*Skill:\*\*\s*(.+)/);
1218
+ const typeMatch = section.match(/\*\*Type:\*\*\s*(.+)/);
1219
+ const descMatch = section.match(/\*\*Description:\*\*\s*(.+)/);
1220
+ const date = dateMatch ? dateMatch[1].trim() : void 0;
1221
+ const skill = skillMatch ? skillMatch[1].trim() : void 0;
1222
+ const failureType = typeMatch ? typeMatch[1].trim() : void 0;
1223
+ const description = descMatch ? descMatch[1].trim() : void 0;
1224
+ if (!description) continue;
1225
+ const nodeId = `failure:${hash(description)}`;
1226
+ this.store.addNode({
1227
+ id: nodeId,
1228
+ type: "failure",
1229
+ name: description,
1230
+ metadata: { date, skill, type: failureType }
1231
+ });
1232
+ nodesAdded++;
1233
+ edgesAdded += this.linkToCode(description, nodeId, "caused_by");
1234
+ }
1235
+ return {
1236
+ nodesAdded,
1237
+ nodesUpdated: 0,
1238
+ edgesAdded,
1239
+ edgesUpdated: 0,
1240
+ errors,
1241
+ durationMs: Date.now() - start
1242
+ };
1243
+ }
1244
+ async ingestAll(projectPath, opts) {
1245
+ const start = Date.now();
1246
+ const adrDir = opts?.adrDir ?? path3.join(projectPath, "docs", "adr");
1247
+ const [adrResult, learningsResult, failuresResult] = await Promise.all([
1248
+ this.ingestADRs(adrDir),
1249
+ this.ingestLearnings(projectPath),
1250
+ this.ingestFailures(projectPath)
1251
+ ]);
1252
+ const merged = mergeResults(adrResult, learningsResult, failuresResult);
1253
+ return { ...merged, durationMs: Date.now() - start };
1254
+ }
1255
+ linkToCode(content, sourceNodeId, edgeType) {
1256
+ let count = 0;
1257
+ for (const nodeType of CODE_NODE_TYPES) {
1258
+ const codeNodes = this.store.findNodes({ type: nodeType });
1259
+ for (const node of codeNodes) {
1260
+ let nameMatches = false;
1261
+ if (node.name.length >= 3) {
1262
+ const escaped = node.name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
1263
+ const namePattern = new RegExp(`\\b${escaped}\\b`, "i");
1264
+ nameMatches = namePattern.test(content);
1265
+ }
1266
+ let pathMatches = false;
1267
+ if (node.path && node.path.includes(path3.sep)) {
1268
+ pathMatches = content.includes(node.path);
1269
+ }
1270
+ if (nameMatches || pathMatches) {
1271
+ this.store.addEdge({
1272
+ from: sourceNodeId,
1273
+ to: node.id,
1274
+ type: edgeType
1275
+ });
1276
+ count++;
1277
+ }
1278
+ }
1279
+ }
1280
+ return count;
1281
+ }
1282
+ async findMarkdownFiles(dir) {
1283
+ const results = [];
1284
+ const entries = await fs2.readdir(dir, { withFileTypes: true });
1285
+ for (const entry of entries) {
1286
+ const fullPath = path3.join(dir, entry.name);
1287
+ if (entry.isDirectory()) {
1288
+ results.push(...await this.findMarkdownFiles(fullPath));
1289
+ } else if (entry.isFile() && entry.name.endsWith(".md")) {
1290
+ results.push(fullPath);
1291
+ }
1292
+ }
1293
+ return results;
1294
+ }
1295
+ };
1296
+ var CODE_NODE_TYPES2 = ["file", "function", "class", "method", "interface", "variable"];
1297
+ function sanitizeExternalText(text, maxLength = 2e3) {
1298
+ let sanitized = text.replace(
1299
+ /<\/?(?:system|instruction|prompt|role|context|tool_call|function_call|assistant|human|user)[^>]*>/gi,
1300
+ ""
1301
+ ).replace(/^#{1,3}\s*(?:system|instruction|prompt)\s*[::]\s*/gim, "").replace(
1302
+ /(?:ignore|disregard|forget)\s+(?:all\s+)?(?:previous|prior|above)\s+(?:instructions?|prompts?|context)/gi,
1303
+ "[filtered]"
1304
+ ).replace(
1305
+ /you\s+are\s+now\s+(?:a\s+)?(?:helpful\s+)?(?:an?\s+)?(?:assistant|system|ai|bot|agent|tool)\b/gi,
1306
+ "[filtered]"
1307
+ );
1308
+ if (sanitized.length > maxLength) {
1309
+ sanitized = sanitized.slice(0, maxLength) + "\u2026";
1310
+ }
1311
+ return sanitized;
1312
+ }
1313
+ function linkToCode(store, content, sourceNodeId, edgeType, options) {
1314
+ let edgesCreated = 0;
1315
+ for (const type of CODE_NODE_TYPES2) {
1316
+ const nodes = store.findNodes({ type });
1317
+ for (const node of nodes) {
1318
+ if (node.name.length < 3) continue;
1319
+ const escaped = node.name.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
1320
+ const pattern = new RegExp(`\\b${escaped}\\b`, "i");
1321
+ let matched = pattern.test(content);
1322
+ if (!matched && options?.checkPaths && node.path) {
1323
+ matched = content.includes(node.path);
1324
+ }
1325
+ if (matched) {
1326
+ store.addEdge({ from: sourceNodeId, to: node.id, type: edgeType });
1327
+ edgesCreated++;
1328
+ }
1329
+ }
1330
+ }
1331
+ return edgesCreated;
1332
+ }
1333
+ var SyncManager = class {
1334
+ constructor(store, graphDir) {
1335
+ this.store = store;
1336
+ this.metadataPath = path4.join(graphDir, "sync-metadata.json");
1337
+ }
1338
+ registrations = /* @__PURE__ */ new Map();
1339
+ metadataPath;
1340
+ registerConnector(connector, config) {
1341
+ this.registrations.set(connector.name, { connector, config });
1342
+ }
1343
+ async sync(connectorName) {
1344
+ const registration = this.registrations.get(connectorName);
1345
+ if (!registration) {
1346
+ return {
1347
+ nodesAdded: 0,
1348
+ nodesUpdated: 0,
1349
+ edgesAdded: 0,
1350
+ edgesUpdated: 0,
1351
+ errors: [`Connector "${connectorName}" not registered`],
1352
+ durationMs: 0
1353
+ };
1354
+ }
1355
+ const { connector, config } = registration;
1356
+ const result = await connector.ingest(this.store, config);
1357
+ const metadata = await this.loadMetadata();
1358
+ metadata.connectors[connectorName] = {
1359
+ lastSyncTimestamp: (/* @__PURE__ */ new Date()).toISOString(),
1360
+ lastResult: result
1361
+ };
1362
+ await this.saveMetadata(metadata);
1363
+ return result;
1364
+ }
1365
+ async syncAll() {
1366
+ const combined = {
1367
+ nodesAdded: 0,
1368
+ nodesUpdated: 0,
1369
+ edgesAdded: 0,
1370
+ edgesUpdated: 0,
1371
+ errors: [],
1372
+ durationMs: 0
1373
+ };
1374
+ for (const [name] of this.registrations) {
1375
+ const result = await this.sync(name);
1376
+ combined.nodesAdded += result.nodesAdded;
1377
+ combined.nodesUpdated += result.nodesUpdated;
1378
+ combined.edgesAdded += result.edgesAdded;
1379
+ combined.edgesUpdated += result.edgesUpdated;
1380
+ combined.errors.push(...result.errors);
1381
+ combined.durationMs += result.durationMs;
1382
+ }
1383
+ return combined;
1384
+ }
1385
+ async getMetadata() {
1386
+ return this.loadMetadata();
1387
+ }
1388
+ async loadMetadata() {
1389
+ try {
1390
+ const raw = await fs3.readFile(this.metadataPath, "utf-8");
1391
+ return JSON.parse(raw);
1392
+ } catch {
1393
+ return { connectors: {} };
1394
+ }
1395
+ }
1396
+ async saveMetadata(metadata) {
1397
+ await fs3.mkdir(path4.dirname(this.metadataPath), { recursive: true });
1398
+ await fs3.writeFile(this.metadataPath, JSON.stringify(metadata, null, 2), "utf-8");
1399
+ }
1400
+ };
1401
+ var JiraConnector = class {
1402
+ name = "jira";
1403
+ source = "jira";
1404
+ httpClient;
1405
+ constructor(httpClient) {
1406
+ this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
1407
+ }
1408
+ async ingest(store, config) {
1409
+ const start = Date.now();
1410
+ const errors = [];
1411
+ let nodesAdded = 0;
1412
+ let edgesAdded = 0;
1413
+ const apiKeyEnv = config.apiKeyEnv ?? "JIRA_API_KEY";
1414
+ const apiKey = process.env[apiKeyEnv];
1415
+ if (!apiKey) {
1416
+ return {
1417
+ nodesAdded: 0,
1418
+ nodesUpdated: 0,
1419
+ edgesAdded: 0,
1420
+ edgesUpdated: 0,
1421
+ errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
1422
+ durationMs: Date.now() - start
1423
+ };
1424
+ }
1425
+ const baseUrlEnv = config.baseUrlEnv ?? "JIRA_BASE_URL";
1426
+ const baseUrl = process.env[baseUrlEnv];
1427
+ if (!baseUrl) {
1428
+ return {
1429
+ nodesAdded: 0,
1430
+ nodesUpdated: 0,
1431
+ edgesAdded: 0,
1432
+ edgesUpdated: 0,
1433
+ errors: [`Missing base URL: environment variable "${baseUrlEnv}" is not set`],
1434
+ durationMs: Date.now() - start
1435
+ };
1436
+ }
1437
+ const project2 = config.project;
1438
+ let jql = project2 ? `project=${project2}` : "";
1439
+ const filters = config.filters;
1440
+ if (filters?.status?.length) {
1441
+ jql += `${jql ? " AND " : ""}status IN (${filters.status.map((s) => `"${s}"`).join(",")})`;
1442
+ }
1443
+ if (filters?.labels?.length) {
1444
+ jql += `${jql ? " AND " : ""}labels IN (${filters.labels.map((l) => `"${l}"`).join(",")})`;
1445
+ }
1446
+ const headers = {
1447
+ Authorization: `Basic ${apiKey}`,
1448
+ "Content-Type": "application/json"
1449
+ };
1450
+ let startAt = 0;
1451
+ const maxResults = 50;
1452
+ let total = Infinity;
1453
+ try {
1454
+ while (startAt < total) {
1455
+ const url = `${baseUrl}/rest/api/2/search?jql=${encodeURIComponent(jql)}&startAt=${startAt}&maxResults=${maxResults}`;
1456
+ const response = await this.httpClient(url, { headers });
1457
+ if (!response.ok) {
1458
+ return {
1459
+ nodesAdded,
1460
+ nodesUpdated: 0,
1461
+ edgesAdded,
1462
+ edgesUpdated: 0,
1463
+ errors: ["Jira API request failed"],
1464
+ durationMs: Date.now() - start
1465
+ };
1466
+ }
1467
+ const data = await response.json();
1468
+ total = data.total;
1469
+ for (const issue of data.issues) {
1470
+ const nodeId = `issue:jira:${issue.key}`;
1471
+ store.addNode({
1472
+ id: nodeId,
1473
+ type: "issue",
1474
+ name: sanitizeExternalText(issue.fields.summary, 500),
1475
+ metadata: {
1476
+ key: issue.key,
1477
+ status: issue.fields.status?.name,
1478
+ priority: issue.fields.priority?.name,
1479
+ assignee: issue.fields.assignee?.displayName,
1480
+ labels: issue.fields.labels ?? []
1481
+ }
1482
+ });
1483
+ nodesAdded++;
1484
+ const searchText = sanitizeExternalText(
1485
+ [issue.fields.summary, issue.fields.description ?? ""].join(" ")
1486
+ );
1487
+ edgesAdded += linkToCode(store, searchText, nodeId, "applies_to");
1488
+ }
1489
+ startAt += maxResults;
1490
+ }
1491
+ } catch (err) {
1492
+ return {
1493
+ nodesAdded,
1494
+ nodesUpdated: 0,
1495
+ edgesAdded,
1496
+ edgesUpdated: 0,
1497
+ errors: [`Jira API error: ${err instanceof Error ? err.message : String(err)}`],
1498
+ durationMs: Date.now() - start
1499
+ };
1500
+ }
1501
+ return {
1502
+ nodesAdded,
1503
+ nodesUpdated: 0,
1504
+ edgesAdded,
1505
+ edgesUpdated: 0,
1506
+ errors,
1507
+ durationMs: Date.now() - start
1508
+ };
1509
+ }
1510
+ };
1511
+ var SlackConnector = class {
1512
+ name = "slack";
1513
+ source = "slack";
1514
+ httpClient;
1515
+ constructor(httpClient) {
1516
+ this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
1517
+ }
1518
+ async ingest(store, config) {
1519
+ const start = Date.now();
1520
+ const errors = [];
1521
+ let nodesAdded = 0;
1522
+ let edgesAdded = 0;
1523
+ const apiKeyEnv = config.apiKeyEnv ?? "SLACK_API_KEY";
1524
+ const apiKey = process.env[apiKeyEnv];
1525
+ if (!apiKey) {
1526
+ return {
1527
+ nodesAdded: 0,
1528
+ nodesUpdated: 0,
1529
+ edgesAdded: 0,
1530
+ edgesUpdated: 0,
1531
+ errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
1532
+ durationMs: Date.now() - start
1533
+ };
1534
+ }
1535
+ const channels = config.channels ?? [];
1536
+ const oldest = config.lookbackDays ? String(Math.floor((Date.now() - Number(config.lookbackDays) * 864e5) / 1e3)) : void 0;
1537
+ for (const channel of channels) {
1538
+ try {
1539
+ let url = `https://slack.com/api/conversations.history?channel=${encodeURIComponent(channel)}`;
1540
+ if (oldest) {
1541
+ url += `&oldest=${oldest}`;
1542
+ }
1543
+ const response = await this.httpClient(url, {
1544
+ headers: {
1545
+ Authorization: `Bearer ${apiKey}`,
1546
+ "Content-Type": "application/json"
1547
+ }
1548
+ });
1549
+ if (!response.ok) {
1550
+ errors.push(`Slack API request failed for channel ${channel}`);
1551
+ continue;
1552
+ }
1553
+ const data = await response.json();
1554
+ if (!data.ok) {
1555
+ errors.push(`Slack API error for channel ${channel}`);
1556
+ continue;
1557
+ }
1558
+ for (const message of data.messages) {
1559
+ const nodeId = `conversation:slack:${channel}:${message.ts}`;
1560
+ const sanitizedText = sanitizeExternalText(message.text);
1561
+ const snippet = sanitizedText.length > 100 ? sanitizedText.slice(0, 100) : sanitizedText;
1562
+ store.addNode({
1563
+ id: nodeId,
1564
+ type: "conversation",
1565
+ name: snippet,
1566
+ metadata: {
1567
+ author: message.user,
1568
+ channel,
1569
+ timestamp: message.ts
1570
+ }
1571
+ });
1572
+ nodesAdded++;
1573
+ edgesAdded += linkToCode(store, sanitizedText, nodeId, "references", {
1574
+ checkPaths: true
1575
+ });
1576
+ }
1577
+ } catch (err) {
1578
+ errors.push(
1579
+ `Slack API error for channel ${channel}: ${err instanceof Error ? err.message : String(err)}`
1580
+ );
1581
+ }
1582
+ }
1583
+ return {
1584
+ nodesAdded,
1585
+ nodesUpdated: 0,
1586
+ edgesAdded,
1587
+ edgesUpdated: 0,
1588
+ errors,
1589
+ durationMs: Date.now() - start
1590
+ };
1591
+ }
1592
+ };
1593
+ var ConfluenceConnector = class {
1594
+ name = "confluence";
1595
+ source = "confluence";
1596
+ httpClient;
1597
+ constructor(httpClient) {
1598
+ this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
1599
+ }
1600
+ async ingest(store, config) {
1601
+ const start = Date.now();
1602
+ const errors = [];
1603
+ let nodesAdded = 0;
1604
+ let edgesAdded = 0;
1605
+ const apiKeyEnv = config.apiKeyEnv ?? "CONFLUENCE_API_KEY";
1606
+ const apiKey = process.env[apiKeyEnv];
1607
+ if (!apiKey) {
1608
+ return {
1609
+ nodesAdded: 0,
1610
+ nodesUpdated: 0,
1611
+ edgesAdded: 0,
1612
+ edgesUpdated: 0,
1613
+ errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
1614
+ durationMs: Date.now() - start
1615
+ };
1616
+ }
1617
+ const baseUrlEnv = config.baseUrlEnv ?? "CONFLUENCE_BASE_URL";
1618
+ const baseUrl = process.env[baseUrlEnv] ?? "";
1619
+ const spaceKey = config.spaceKey ?? "";
1620
+ try {
1621
+ let nextUrl = `${baseUrl}/wiki/api/v2/pages?spaceKey=${encodeURIComponent(spaceKey)}&limit=25&body-format=storage`;
1622
+ while (nextUrl) {
1623
+ const response = await this.httpClient(nextUrl, {
1624
+ headers: { Authorization: `Bearer ${apiKey}` }
1625
+ });
1626
+ if (!response.ok) {
1627
+ errors.push(`Confluence API error: status ${response.status}`);
1628
+ break;
1629
+ }
1630
+ const data = await response.json();
1631
+ for (const page of data.results) {
1632
+ const nodeId = `confluence:${page.id}`;
1633
+ store.addNode({
1634
+ id: nodeId,
1635
+ type: "document",
1636
+ name: sanitizeExternalText(page.title, 500),
1637
+ metadata: {
1638
+ source: "confluence",
1639
+ spaceKey,
1640
+ pageId: page.id,
1641
+ status: page.status,
1642
+ url: page._links?.webui ?? ""
1643
+ }
1644
+ });
1645
+ nodesAdded++;
1646
+ const text = sanitizeExternalText(`${page.title} ${page.body?.storage?.value ?? ""}`);
1647
+ edgesAdded += linkToCode(store, text, nodeId, "documents");
1648
+ }
1649
+ nextUrl = data._links?.next ? `${baseUrl}${data._links.next}` : null;
1650
+ }
1651
+ } catch (err) {
1652
+ errors.push(`Confluence fetch error: ${err instanceof Error ? err.message : String(err)}`);
1653
+ }
1654
+ return {
1655
+ nodesAdded,
1656
+ nodesUpdated: 0,
1657
+ edgesAdded,
1658
+ edgesUpdated: 0,
1659
+ errors,
1660
+ durationMs: Date.now() - start
1661
+ };
1662
+ }
1663
+ };
1664
+ var CIConnector = class {
1665
+ name = "ci";
1666
+ source = "github-actions";
1667
+ httpClient;
1668
+ constructor(httpClient) {
1669
+ this.httpClient = httpClient ?? ((url, options) => fetch(url, options));
1670
+ }
1671
+ async ingest(store, config) {
1672
+ const start = Date.now();
1673
+ const errors = [];
1674
+ let nodesAdded = 0;
1675
+ let edgesAdded = 0;
1676
+ const apiKeyEnv = config.apiKeyEnv ?? "GITHUB_TOKEN";
1677
+ const apiKey = process.env[apiKeyEnv];
1678
+ if (!apiKey) {
1679
+ return {
1680
+ nodesAdded: 0,
1681
+ nodesUpdated: 0,
1682
+ edgesAdded: 0,
1683
+ edgesUpdated: 0,
1684
+ errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
1685
+ durationMs: Date.now() - start
1686
+ };
1687
+ }
1688
+ const repo = config.repo ?? "";
1689
+ const maxRuns = config.maxRuns ?? 10;
1690
+ try {
1691
+ const url = `https://api.github.com/repos/${repo}/actions/runs?per_page=${maxRuns}`;
1692
+ const response = await this.httpClient(url, {
1693
+ headers: { Authorization: `Bearer ${apiKey}`, Accept: "application/vnd.github.v3+json" }
1694
+ });
1695
+ if (!response.ok) {
1696
+ errors.push(`GitHub Actions API error: status ${response.status}`);
1697
+ return {
1698
+ nodesAdded: 0,
1699
+ nodesUpdated: 0,
1700
+ edgesAdded: 0,
1701
+ edgesUpdated: 0,
1702
+ errors,
1703
+ durationMs: Date.now() - start
1704
+ };
1705
+ }
1706
+ const data = await response.json();
1707
+ for (const run of data.workflow_runs) {
1708
+ const buildId = `build:${run.id}`;
1709
+ const safeName = sanitizeExternalText(run.name, 200);
1710
+ store.addNode({
1711
+ id: buildId,
1712
+ type: "build",
1713
+ name: `${safeName} #${run.id}`,
1714
+ metadata: {
1715
+ source: "github-actions",
1716
+ status: run.status,
1717
+ conclusion: run.conclusion,
1718
+ branch: run.head_branch,
1719
+ sha: run.head_sha,
1720
+ url: run.html_url,
1721
+ createdAt: run.created_at
1722
+ }
1723
+ });
1724
+ nodesAdded++;
1725
+ const commitNode = store.getNode(`commit:${run.head_sha}`);
1726
+ if (commitNode) {
1727
+ store.addEdge({ from: buildId, to: commitNode.id, type: "triggered_by" });
1728
+ edgesAdded++;
1729
+ }
1730
+ if (run.conclusion === "failure") {
1731
+ const testResultId = `test_result:${run.id}`;
1732
+ store.addNode({
1733
+ id: testResultId,
1734
+ type: "test_result",
1735
+ name: `Failed: ${safeName} #${run.id}`,
1736
+ metadata: {
1737
+ source: "github-actions",
1738
+ buildId: String(run.id),
1739
+ conclusion: "failure",
1740
+ branch: run.head_branch,
1741
+ sha: run.head_sha
1742
+ }
1743
+ });
1744
+ nodesAdded++;
1745
+ store.addEdge({ from: testResultId, to: buildId, type: "failed_in" });
1746
+ edgesAdded++;
1747
+ }
1748
+ }
1749
+ } catch (err) {
1750
+ errors.push(
1751
+ `GitHub Actions fetch error: ${err instanceof Error ? err.message : String(err)}`
1752
+ );
1753
+ }
1754
+ return {
1755
+ nodesAdded,
1756
+ nodesUpdated: 0,
1757
+ edgesAdded,
1758
+ edgesUpdated: 0,
1759
+ errors,
1760
+ durationMs: Date.now() - start
1761
+ };
1762
+ }
1763
+ };
1764
+ var STOP_WORDS = /* @__PURE__ */ new Set([
1765
+ "the",
1766
+ "a",
1767
+ "an",
1768
+ "is",
1769
+ "are",
1770
+ "was",
1771
+ "were",
1772
+ "be",
1773
+ "been",
1774
+ "being",
1775
+ "have",
1776
+ "has",
1777
+ "had",
1778
+ "do",
1779
+ "does",
1780
+ "did",
1781
+ "will",
1782
+ "would",
1783
+ "could",
1784
+ "should",
1785
+ "may",
1786
+ "might",
1787
+ "shall",
1788
+ "can",
1789
+ "to",
1790
+ "of",
1791
+ "in",
1792
+ "for",
1793
+ "on",
1794
+ "with",
1795
+ "at",
1796
+ "by",
1797
+ "from",
1798
+ "as",
1799
+ "into",
1800
+ "about",
1801
+ "this",
1802
+ "that",
1803
+ "it",
1804
+ "not",
1805
+ "but",
1806
+ "and",
1807
+ "or",
1808
+ "if",
1809
+ "then",
1810
+ "so"
1811
+ ]);
1812
+ var FusionLayer = class {
1813
+ store;
1814
+ vectorStore;
1815
+ keywordWeight;
1816
+ semanticWeight;
1817
+ constructor(store, vectorStore, keywordWeight = 0.6, semanticWeight = 0.4) {
1818
+ this.store = store;
1819
+ this.vectorStore = vectorStore;
1820
+ this.keywordWeight = keywordWeight;
1821
+ this.semanticWeight = semanticWeight;
1822
+ }
1823
+ search(query, topK = 10, queryEmbedding) {
1824
+ const keywords = this.extractKeywords(query);
1825
+ if (keywords.length === 0) {
1826
+ return [];
1827
+ }
1828
+ const allNodes = this.store.findNodes({});
1829
+ const semanticScores = /* @__PURE__ */ new Map();
1830
+ if (queryEmbedding && this.vectorStore) {
1831
+ const vectorResults = this.vectorStore.search(queryEmbedding, allNodes.length);
1832
+ for (const vr of vectorResults) {
1833
+ semanticScores.set(vr.id, vr.score);
1834
+ }
1835
+ }
1836
+ const hasSemanticScores = semanticScores.size > 0;
1837
+ const kwWeight = hasSemanticScores ? this.keywordWeight : 1;
1838
+ const semWeight = hasSemanticScores ? this.semanticWeight : 0;
1839
+ const results = [];
1840
+ for (const node of allNodes) {
1841
+ const kwScore = this.keywordScore(keywords, node);
1842
+ const semScore = semanticScores.get(node.id) ?? 0;
1843
+ const fusedScore = kwWeight * kwScore + semWeight * semScore;
1844
+ if (fusedScore > 0) {
1845
+ results.push({
1846
+ nodeId: node.id,
1847
+ node,
1848
+ score: fusedScore,
1849
+ signals: {
1850
+ keyword: kwScore,
1851
+ semantic: semScore
1852
+ }
1853
+ });
1854
+ }
1855
+ }
1856
+ results.sort((a, b) => b.score - a.score);
1857
+ return results.slice(0, topK);
1858
+ }
1859
+ extractKeywords(query) {
1860
+ const tokens = query.toLowerCase().split(/[\s\-_.,:;!?()[\]{}"'`/\\|@#$%^&*+=<>~]+/).filter((t) => t.length >= 2).filter((t) => !STOP_WORDS.has(t));
1861
+ return [...new Set(tokens)];
1862
+ }
1863
+ keywordScore(keywords, node) {
1864
+ if (keywords.length === 0) return 0;
1865
+ let totalScore = 0;
1866
+ for (const keyword of keywords) {
1867
+ totalScore += this.singleKeywordScore(keyword, node);
1868
+ }
1869
+ return totalScore / keywords.length;
1870
+ }
1871
+ singleKeywordScore(keyword, node) {
1872
+ const nameLower = node.name.toLowerCase();
1873
+ if (nameLower === keyword) {
1874
+ return 1;
1875
+ }
1876
+ if (nameLower.includes(keyword)) {
1877
+ return 0.7;
1878
+ }
1879
+ if (node.path && node.path.toLowerCase().includes(keyword)) {
1880
+ return 0.5;
1881
+ }
1882
+ for (const value of Object.values(node.metadata)) {
1883
+ if (typeof value === "string" && value.toLowerCase().includes(keyword)) {
1884
+ return 0.3;
1885
+ }
1886
+ }
1887
+ return 0;
1888
+ }
1889
+ };
1890
+ var CODE_NODE_TYPES3 = ["file", "function", "class", "method", "interface", "variable"];
1891
+ var GraphEntropyAdapter = class {
1892
+ constructor(store) {
1893
+ this.store = store;
1894
+ }
1895
+ /**
1896
+ * Find all `documents` edges and classify them as stale or missing-target.
1897
+ *
1898
+ * 1. Find all `documents` edges in the graph
1899
+ * 2. For each edge, check if the target code node still exists in the store
1900
+ * 3. If target doesn't exist -> add to missingTargets
1901
+ * 4. If target exists -> compare lastModified timestamps to determine staleness
1902
+ */
1903
+ computeDriftData() {
1904
+ const documentsEdges = this.store.getEdges({ type: "documents" });
1905
+ const staleEdges = [];
1906
+ const missingTargets = [];
1907
+ let freshEdges = 0;
1908
+ for (const edge of documentsEdges) {
1909
+ const codeNode = this.store.getNode(edge.to);
1910
+ if (!codeNode) {
1911
+ missingTargets.push(edge.to);
1912
+ continue;
1913
+ }
1914
+ const docNode = this.store.getNode(edge.from);
1915
+ const codeLastModified = codeNode.lastModified;
1916
+ const docLastModified = docNode?.lastModified;
1917
+ if (codeLastModified && docLastModified) {
1918
+ if (codeLastModified > docLastModified) {
1919
+ staleEdges.push({
1920
+ docNodeId: edge.from,
1921
+ codeNodeId: edge.to,
1922
+ edgeType: edge.type,
1923
+ codeLastModified,
1924
+ docLastModified
1925
+ });
1926
+ } else {
1927
+ freshEdges++;
1928
+ }
1929
+ } else {
1930
+ staleEdges.push({
1931
+ docNodeId: edge.from,
1932
+ codeNodeId: edge.to,
1933
+ edgeType: edge.type,
1934
+ codeLastModified,
1935
+ docLastModified
1936
+ });
1937
+ }
1938
+ }
1939
+ return { staleEdges, missingTargets, freshEdges };
1940
+ }
1941
+ /**
1942
+ * BFS from entry points to find reachable vs unreachable code nodes.
1943
+ *
1944
+ * 1. Entry points: file nodes named `index.ts` or with metadata `entryPoint: true`
1945
+ * 2. BFS following `imports` and `calls` edges (outbound only)
1946
+ * 3. Unreachable = code nodes NOT in visited set
1947
+ */
1948
+ computeDeadCodeData() {
1949
+ const allFileNodes = this.store.findNodes({ type: "file" });
1950
+ const entryPoints = [];
1951
+ for (const node of allFileNodes) {
1952
+ if (node.name === "index.ts" || node.metadata?.entryPoint === true) {
1953
+ entryPoints.push(node.id);
1954
+ }
1955
+ }
1956
+ for (const nodeType of CODE_NODE_TYPES3) {
1957
+ if (nodeType === "file") continue;
1958
+ const nodes = this.store.findNodes({ type: nodeType });
1959
+ for (const node of nodes) {
1960
+ if (node.metadata?.entryPoint === true) {
1961
+ entryPoints.push(node.id);
1962
+ }
1963
+ }
1964
+ }
1965
+ const visited = /* @__PURE__ */ new Set();
1966
+ const queue = [...entryPoints];
1967
+ let head = 0;
1968
+ while (head < queue.length) {
1969
+ const nodeId = queue[head++];
1970
+ if (visited.has(nodeId)) continue;
1971
+ visited.add(nodeId);
1972
+ const importEdges = this.store.getEdges({ from: nodeId, type: "imports" });
1973
+ for (const edge of importEdges) {
1974
+ if (!visited.has(edge.to)) {
1975
+ queue.push(edge.to);
1976
+ }
1977
+ }
1978
+ const callEdges = this.store.getEdges({ from: nodeId, type: "calls" });
1979
+ for (const edge of callEdges) {
1980
+ if (!visited.has(edge.to)) {
1981
+ queue.push(edge.to);
1982
+ }
1983
+ }
1984
+ const containsEdges = this.store.getEdges({ from: nodeId, type: "contains" });
1985
+ for (const edge of containsEdges) {
1986
+ if (!visited.has(edge.to)) {
1987
+ queue.push(edge.to);
1988
+ }
1989
+ }
1990
+ }
1991
+ const unreachableNodes = [];
1992
+ for (const nodeType of CODE_NODE_TYPES3) {
1993
+ const nodes = this.store.findNodes({ type: nodeType });
1994
+ for (const node of nodes) {
1995
+ if (!visited.has(node.id)) {
1996
+ unreachableNodes.push({
1997
+ id: node.id,
1998
+ type: node.type,
1999
+ name: node.name,
2000
+ path: node.path
2001
+ });
2002
+ }
2003
+ }
2004
+ }
2005
+ return {
2006
+ reachableNodeIds: visited,
2007
+ unreachableNodes,
2008
+ entryPoints
2009
+ };
2010
+ }
2011
+ /**
2012
+ * Count all nodes and edges by type.
2013
+ */
2014
+ computeSnapshotSummary() {
2015
+ const nodesByType = {};
2016
+ const edgesByType = {};
2017
+ const allNodes = this.store.findNodes({});
2018
+ for (const node of allNodes) {
2019
+ nodesByType[node.type] = (nodesByType[node.type] ?? 0) + 1;
2020
+ }
2021
+ const allEdges = this.store.getEdges({});
2022
+ for (const edge of allEdges) {
2023
+ edgesByType[edge.type] = (edgesByType[edge.type] ?? 0) + 1;
2024
+ }
2025
+ return {
2026
+ nodeCount: this.store.nodeCount,
2027
+ edgeCount: this.store.edgeCount,
2028
+ nodesByType,
2029
+ edgesByType
2030
+ };
2031
+ }
2032
+ };
2033
+ var GraphComplexityAdapter = class {
2034
+ constructor(store) {
2035
+ this.store = store;
2036
+ }
2037
+ /**
2038
+ * Compute complexity hotspots by combining cyclomatic complexity with change frequency.
2039
+ *
2040
+ * 1. Find all function and method nodes
2041
+ * 2. For each, find the containing file and count commit nodes referencing that file
2042
+ * 3. Compute hotspotScore = changeFrequency * cyclomaticComplexity
2043
+ * 4. Sort descending by score
2044
+ * 5. Compute 95th percentile
2045
+ */
2046
+ computeComplexityHotspots() {
2047
+ const functionNodes = [
2048
+ ...this.store.findNodes({ type: "function" }),
2049
+ ...this.store.findNodes({ type: "method" })
2050
+ ];
2051
+ if (functionNodes.length === 0) {
2052
+ return { hotspots: [], percentile95Score: 0 };
2053
+ }
2054
+ const fileChangeFrequency = /* @__PURE__ */ new Map();
2055
+ const hotspots = [];
2056
+ for (const fnNode of functionNodes) {
2057
+ const complexity = fnNode.metadata?.cyclomaticComplexity ?? 1;
2058
+ const containsEdges = this.store.getEdges({ to: fnNode.id, type: "contains" });
2059
+ let fileId;
2060
+ for (const edge of containsEdges) {
2061
+ const sourceNode = this.store.getNode(edge.from);
2062
+ if (sourceNode?.type === "file") {
2063
+ fileId = sourceNode.id;
2064
+ break;
2065
+ }
2066
+ if (sourceNode?.type === "class") {
2067
+ const classContainsEdges = this.store.getEdges({ to: sourceNode.id, type: "contains" });
2068
+ for (const classEdge of classContainsEdges) {
2069
+ const parentNode = this.store.getNode(classEdge.from);
2070
+ if (parentNode?.type === "file") {
2071
+ fileId = parentNode.id;
2072
+ break;
2073
+ }
2074
+ }
2075
+ if (fileId) break;
2076
+ }
2077
+ }
2078
+ if (!fileId) continue;
2079
+ let changeFrequency = fileChangeFrequency.get(fileId);
2080
+ if (changeFrequency === void 0) {
2081
+ const referencesEdges = this.store.getEdges({ to: fileId, type: "references" });
2082
+ changeFrequency = referencesEdges.length;
2083
+ fileChangeFrequency.set(fileId, changeFrequency);
2084
+ }
2085
+ const hotspotScore = changeFrequency * complexity;
2086
+ const filePath = fnNode.path ?? fileId.replace(/^file:/, "");
2087
+ hotspots.push({
2088
+ file: filePath,
2089
+ function: fnNode.name,
2090
+ changeFrequency,
2091
+ complexity,
2092
+ hotspotScore
2093
+ });
2094
+ }
2095
+ hotspots.sort((a, b) => b.hotspotScore - a.hotspotScore);
2096
+ const percentile95Score = this.computePercentile(
2097
+ hotspots.map((h) => h.hotspotScore),
2098
+ 95
2099
+ );
2100
+ return { hotspots, percentile95Score };
2101
+ }
2102
+ computePercentile(descendingScores, percentile) {
2103
+ if (descendingScores.length === 0) return 0;
2104
+ const ascending = [...descendingScores].sort((a, b) => a - b);
2105
+ const index = Math.ceil(percentile / 100 * ascending.length) - 1;
2106
+ return ascending[Math.min(index, ascending.length - 1)];
2107
+ }
2108
+ };
2109
+ var GraphCouplingAdapter = class {
2110
+ constructor(store) {
2111
+ this.store = store;
2112
+ }
2113
+ /**
2114
+ * Compute coupling data for all file nodes in the graph.
2115
+ *
2116
+ * For each file:
2117
+ * - fanOut: number of outbound 'imports' edges
2118
+ * - fanIn: number of inbound 'imports' edges from other files
2119
+ * - couplingRatio: fanOut / (fanIn + fanOut), rounded to 2 decimals (0 if both are 0)
2120
+ * - transitiveDepth: longest chain of outbound 'imports' edges via BFS
2121
+ */
2122
+ computeCouplingData() {
2123
+ const fileNodes = this.store.findNodes({ type: "file" });
2124
+ if (fileNodes.length === 0) {
2125
+ return { files: [] };
2126
+ }
2127
+ const files = [];
2128
+ for (const node of fileNodes) {
2129
+ const fileId = node.id;
2130
+ const filePath = node.path ?? node.name;
2131
+ const outEdges = this.store.getEdges({ from: fileId, type: "imports" });
2132
+ const fanOut = outEdges.length;
2133
+ const inEdges = this.store.getEdges({ to: fileId, type: "imports" });
2134
+ const fanIn = inEdges.length;
2135
+ const total = fanIn + fanOut;
2136
+ const couplingRatio = total === 0 ? 0 : Math.round(fanOut / total * 100) / 100;
2137
+ const transitiveDepth = this.computeTransitiveDepth(fileId);
2138
+ files.push({ file: filePath, fanIn, fanOut, couplingRatio, transitiveDepth });
2139
+ }
2140
+ return { files };
2141
+ }
2142
+ /**
2143
+ * BFS from a node following outbound 'imports' edges to find the maximum depth.
2144
+ */
2145
+ computeTransitiveDepth(startId) {
2146
+ const visited = /* @__PURE__ */ new Set();
2147
+ const queue = [[startId, 0]];
2148
+ visited.add(startId);
2149
+ let maxDepth = 0;
2150
+ let head = 0;
2151
+ while (head < queue.length) {
2152
+ const [nodeId, depth] = queue[head++];
2153
+ if (depth > maxDepth) {
2154
+ maxDepth = depth;
2155
+ }
2156
+ const outEdges = this.store.getEdges({ from: nodeId, type: "imports" });
2157
+ for (const edge of outEdges) {
2158
+ if (!visited.has(edge.to)) {
2159
+ visited.add(edge.to);
2160
+ queue.push([edge.to, depth + 1]);
2161
+ }
2162
+ }
2163
+ }
2164
+ return maxDepth;
2165
+ }
2166
+ };
2167
+ var PHASE_NODE_TYPES = {
2168
+ implement: ["file", "function", "class", "method", "interface", "variable"],
2169
+ review: ["adr", "document", "learning", "commit"],
2170
+ debug: ["failure", "learning", "function", "method"],
2171
+ plan: ["adr", "document", "module", "layer"]
2172
+ };
2173
+ var CODE_NODE_TYPES4 = /* @__PURE__ */ new Set([
2174
+ "file",
2175
+ "function",
2176
+ "class",
2177
+ "interface",
2178
+ "method",
2179
+ "variable"
2180
+ ]);
2181
+ function estimateNodeTokens(node) {
2182
+ let chars = (node.name?.length ?? 0) + (node.path?.length ?? 0) + (node.type?.length ?? 0);
2183
+ if (node.metadata) {
2184
+ chars += JSON.stringify(node.metadata).length;
2185
+ }
2186
+ return Math.ceil(chars / 4);
2187
+ }
2188
+ var Assembler = class {
2189
+ store;
2190
+ vectorStore;
2191
+ fusionLayer;
2192
+ constructor(store, vectorStore) {
2193
+ this.store = store;
2194
+ this.vectorStore = vectorStore;
2195
+ }
2196
+ getFusionLayer() {
2197
+ if (!this.fusionLayer) {
2198
+ this.fusionLayer = new FusionLayer(this.store, this.vectorStore);
2199
+ }
2200
+ return this.fusionLayer;
2201
+ }
2202
+ /**
2203
+ * Assemble context relevant to an intent string within a token budget.
2204
+ */
2205
+ assembleContext(intent, tokenBudget = 4e3) {
2206
+ const fusion = this.getFusionLayer();
2207
+ const topResults = fusion.search(intent, 10);
2208
+ if (topResults.length === 0) {
2209
+ return {
2210
+ nodes: [],
2211
+ edges: [],
2212
+ tokenEstimate: 0,
2213
+ intent,
2214
+ truncated: false
2215
+ };
2216
+ }
2217
+ const contextQL = new ContextQL(this.store);
2218
+ const nodeMap = /* @__PURE__ */ new Map();
2219
+ const edgeSet = /* @__PURE__ */ new Set();
2220
+ const collectedEdges = [];
2221
+ const nodeScores = /* @__PURE__ */ new Map();
2222
+ for (const result of topResults) {
2223
+ nodeScores.set(result.nodeId, result.score);
2224
+ const expanded = contextQL.execute({
2225
+ rootNodeIds: [result.nodeId],
2226
+ maxDepth: 2
2227
+ });
2228
+ for (const node of expanded.nodes) {
2229
+ if (!nodeMap.has(node.id)) {
2230
+ nodeMap.set(node.id, node);
2231
+ if (!nodeScores.has(node.id)) {
2232
+ nodeScores.set(node.id, result.score * 0.5);
2233
+ }
2234
+ }
2235
+ }
2236
+ for (const edge of expanded.edges) {
2237
+ const key = `${edge.from}|${edge.to}|${edge.type}`;
2238
+ if (!edgeSet.has(key)) {
2239
+ edgeSet.add(key);
2240
+ collectedEdges.push(edge);
2241
+ }
2242
+ }
2243
+ }
2244
+ const sortedNodes = Array.from(nodeMap.values()).sort((a, b) => {
2245
+ return (nodeScores.get(b.id) ?? 0) - (nodeScores.get(a.id) ?? 0);
2246
+ });
2247
+ let tokenEstimate = 0;
2248
+ const keptNodes = [];
2249
+ let truncated = false;
2250
+ for (const node of sortedNodes) {
2251
+ const nodeTokens = estimateNodeTokens(node);
2252
+ if (tokenEstimate + nodeTokens > tokenBudget && keptNodes.length > 0) {
2253
+ truncated = true;
2254
+ break;
2255
+ }
2256
+ tokenEstimate += nodeTokens;
2257
+ keptNodes.push(node);
2258
+ }
2259
+ const keptNodeIds = new Set(keptNodes.map((n) => n.id));
2260
+ const keptEdges = collectedEdges.filter(
2261
+ (e) => keptNodeIds.has(e.from) && keptNodeIds.has(e.to)
2262
+ );
2263
+ return {
2264
+ nodes: keptNodes,
2265
+ edges: keptEdges,
2266
+ tokenEstimate,
2267
+ intent,
2268
+ truncated
2269
+ };
2270
+ }
2271
+ /**
2272
+ * Compute a token budget allocation across node types.
2273
+ */
2274
+ computeBudget(totalTokens, phase) {
2275
+ const allNodes = this.store.findNodes({});
2276
+ const typeCounts = {};
2277
+ for (const node of allNodes) {
2278
+ typeCounts[node.type] = (typeCounts[node.type] ?? 0) + 1;
2279
+ }
2280
+ const density = {};
2281
+ const moduleNodes = this.store.findNodes({ type: "module" });
2282
+ for (const mod of moduleNodes) {
2283
+ const outEdges = this.store.getEdges({ from: mod.id });
2284
+ const inEdges = this.store.getEdges({ to: mod.id });
2285
+ density[mod.name] = outEdges.length + inEdges.length;
2286
+ }
2287
+ const boostTypes = phase ? PHASE_NODE_TYPES[phase] : void 0;
2288
+ const boostFactor = 2;
2289
+ let weightedTotal = 0;
2290
+ const weights = {};
2291
+ for (const [type, count] of Object.entries(typeCounts)) {
2292
+ const isBoosted = boostTypes?.includes(type);
2293
+ const weight = count * (isBoosted ? boostFactor : 1);
2294
+ weights[type] = weight;
2295
+ weightedTotal += weight;
2296
+ }
2297
+ const allocations = {};
2298
+ if (weightedTotal > 0) {
2299
+ let allocated = 0;
2300
+ const types = Object.keys(weights);
2301
+ for (let i = 0; i < types.length; i++) {
2302
+ const type = types[i];
2303
+ if (i === types.length - 1) {
2304
+ allocations[type] = totalTokens - allocated;
2305
+ } else {
2306
+ const share = Math.round(weights[type] / weightedTotal * totalTokens);
2307
+ allocations[type] = share;
2308
+ allocated += share;
2309
+ }
2310
+ }
2311
+ }
2312
+ return { total: totalTokens, allocations, density };
2313
+ }
2314
+ /**
2315
+ * Filter graph nodes relevant to a development phase.
2316
+ */
2317
+ filterForPhase(phase) {
2318
+ const nodeTypes = PHASE_NODE_TYPES[phase];
2319
+ if (!nodeTypes) {
2320
+ console.warn(
2321
+ `[harness] Unknown phase "${phase}" in filterForPhase. Returning all code nodes.`
2322
+ );
2323
+ }
2324
+ const relevantTypes = nodeTypes ?? PHASE_NODE_TYPES["implement"] ?? [];
2325
+ const nodes = [];
2326
+ const filePathSet = /* @__PURE__ */ new Set();
2327
+ for (const type of relevantTypes) {
2328
+ const found = this.store.findNodes({ type });
2329
+ for (const node of found) {
2330
+ nodes.push(node);
2331
+ if (node.path) {
2332
+ filePathSet.add(node.path);
2333
+ }
2334
+ }
2335
+ }
2336
+ return {
2337
+ phase,
2338
+ nodes,
2339
+ filePaths: Array.from(filePathSet)
2340
+ };
2341
+ }
2342
+ /**
2343
+ * Generate a markdown repository map from graph structure.
2344
+ */
2345
+ generateMap() {
2346
+ const moduleNodes = this.store.findNodes({ type: "module" });
2347
+ const modulesWithEdgeCount = moduleNodes.map((mod) => {
2348
+ const outEdges = this.store.getEdges({ from: mod.id });
2349
+ const inEdges = this.store.getEdges({ to: mod.id });
2350
+ return { module: mod, edgeCount: outEdges.length + inEdges.length };
2351
+ });
2352
+ modulesWithEdgeCount.sort((a, b) => b.edgeCount - a.edgeCount);
2353
+ const lines = ["# Repository Structure", ""];
2354
+ if (modulesWithEdgeCount.length > 0) {
2355
+ lines.push("## Modules", "");
2356
+ for (const { module: mod, edgeCount } of modulesWithEdgeCount) {
2357
+ lines.push(`### ${mod.name} (${edgeCount} connections)`);
2358
+ lines.push("");
2359
+ const containsEdges = this.store.getEdges({ from: mod.id, type: "contains" });
2360
+ for (const edge of containsEdges) {
2361
+ const fileNode = this.store.getNode(edge.to);
2362
+ if (fileNode && fileNode.type === "file") {
2363
+ const symbolEdges = this.store.getEdges({ from: fileNode.id, type: "contains" });
2364
+ lines.push(`- ${fileNode.path ?? fileNode.name} (${symbolEdges.length} symbols)`);
2365
+ }
2366
+ }
2367
+ lines.push("");
2368
+ }
2369
+ }
2370
+ const fileNodes = this.store.findNodes({ type: "file" });
2371
+ const nonBarrelFiles = fileNodes.filter((n) => !n.name.startsWith("index."));
2372
+ const filesWithOutDegree = nonBarrelFiles.map((f) => {
2373
+ const outEdges = this.store.getEdges({ from: f.id });
2374
+ return { file: f, outDegree: outEdges.length };
2375
+ });
2376
+ filesWithOutDegree.sort((a, b) => b.outDegree - a.outDegree);
2377
+ const entryPoints = filesWithOutDegree.filter((f) => f.outDegree > 0).slice(0, 5);
2378
+ if (entryPoints.length > 0) {
2379
+ lines.push("## Entry Points", "");
2380
+ for (const { file, outDegree } of entryPoints) {
2381
+ lines.push(`- ${file.path ?? file.name} (${outDegree} outbound edges)`);
2382
+ }
2383
+ lines.push("");
2384
+ }
2385
+ return lines.join("\n");
2386
+ }
2387
+ /**
2388
+ * Check documentation coverage of code nodes.
2389
+ */
2390
+ checkCoverage() {
2391
+ const codeNodes = [];
2392
+ for (const type of CODE_NODE_TYPES4) {
2393
+ codeNodes.push(...this.store.findNodes({ type }));
2394
+ }
2395
+ const documented = [];
2396
+ const undocumented = [];
2397
+ for (const node of codeNodes) {
2398
+ const documentsEdges = this.store.getEdges({ to: node.id, type: "documents" });
2399
+ if (documentsEdges.length > 0) {
2400
+ documented.push(node.id);
2401
+ } else {
2402
+ undocumented.push(node.id);
2403
+ }
2404
+ }
2405
+ const totalCodeNodes = codeNodes.length;
2406
+ const coveragePercentage = totalCodeNodes > 0 ? documented.length / totalCodeNodes * 100 : 0;
2407
+ return {
2408
+ documented,
2409
+ undocumented,
2410
+ coveragePercentage,
2411
+ totalCodeNodes
2412
+ };
2413
+ }
2414
+ };
2415
+ var GraphConstraintAdapter = class {
2416
+ constructor(store) {
2417
+ this.store = store;
2418
+ }
2419
+ computeDependencyGraph() {
2420
+ const fileNodes = this.store.findNodes({ type: "file" });
2421
+ const nodes = fileNodes.map((n) => n.path ?? n.id);
2422
+ const importsEdges = this.store.getEdges({ type: "imports" });
2423
+ const edges = importsEdges.map((e) => {
2424
+ const fromNode = this.store.getNode(e.from);
2425
+ const toNode = this.store.getNode(e.to);
2426
+ const fromPath = fromNode?.path ?? e.from;
2427
+ const toPath = toNode?.path ?? e.to;
2428
+ const importType = e.metadata?.importType ?? "static";
2429
+ const line = e.metadata?.line ?? 0;
2430
+ return { from: fromPath, to: toPath, importType, line };
2431
+ });
2432
+ return { nodes, edges };
2433
+ }
2434
+ computeLayerViolations(layers, rootDir) {
2435
+ const { edges } = this.computeDependencyGraph();
2436
+ const violations = [];
2437
+ for (const edge of edges) {
2438
+ const fromRelative = relative2(rootDir, edge.from);
2439
+ const toRelative = relative2(rootDir, edge.to);
2440
+ const fromLayer = this.resolveLayer(fromRelative, layers);
2441
+ const toLayer = this.resolveLayer(toRelative, layers);
2442
+ if (!fromLayer || !toLayer) continue;
2443
+ if (fromLayer.name === toLayer.name) continue;
2444
+ if (!fromLayer.allowedDependencies.includes(toLayer.name)) {
2445
+ violations.push({
2446
+ file: edge.from,
2447
+ imports: edge.to,
2448
+ fromLayer: fromLayer.name,
2449
+ toLayer: toLayer.name,
2450
+ reason: "WRONG_LAYER",
2451
+ line: edge.line
2452
+ });
2453
+ }
2454
+ }
2455
+ return violations;
2456
+ }
2457
+ resolveLayer(filePath, layers) {
2458
+ for (const layer of layers) {
2459
+ for (const pattern of layer.patterns) {
2460
+ if (minimatch(filePath, pattern)) {
2461
+ return layer;
2462
+ }
2463
+ }
2464
+ }
2465
+ return void 0;
2466
+ }
2467
+ };
2468
+ function isDTCGToken(obj) {
2469
+ return typeof obj === "object" && obj !== null && "$value" in obj && "$type" in obj;
2470
+ }
2471
+ async function readFileOrNull(filePath) {
2472
+ try {
2473
+ return await fs4.readFile(filePath, "utf-8");
2474
+ } catch {
2475
+ return null;
2476
+ }
2477
+ }
2478
+ function parseJsonOrError(content, filePath) {
2479
+ try {
2480
+ return { data: JSON.parse(content) };
2481
+ } catch (err) {
2482
+ return {
2483
+ error: `Failed to parse ${filePath}: ${err instanceof Error ? err.message : String(err)}`
2484
+ };
2485
+ }
2486
+ }
2487
+ function walkDTCGTokens(store, obj, groupPath, topGroup, tokensPath) {
2488
+ let count = 0;
2489
+ for (const [key, value] of Object.entries(obj)) {
2490
+ if (key.startsWith("$")) continue;
2491
+ if (isDTCGToken(value)) {
2492
+ const tokenPath = [...groupPath, key].join(".");
2493
+ store.addNode({
2494
+ id: `design_token:${tokenPath}`,
2495
+ type: "design_token",
2496
+ name: tokenPath,
2497
+ path: tokensPath,
2498
+ metadata: {
2499
+ tokenType: value.$type,
2500
+ value: value.$value,
2501
+ group: topGroup || groupPath[0] || key,
2502
+ ...value.$description ? { description: value.$description } : {}
2503
+ }
2504
+ });
2505
+ count++;
2506
+ } else if (typeof value === "object" && value !== null) {
2507
+ count += walkDTCGTokens(
2508
+ store,
2509
+ value,
2510
+ [...groupPath, key],
2511
+ topGroup || key,
2512
+ tokensPath
2513
+ );
2514
+ }
2515
+ }
2516
+ return count;
2517
+ }
2518
+ function parseAestheticDirection(content) {
2519
+ const extract = (pattern) => {
2520
+ const m = content.match(pattern);
2521
+ return m ? m[1].trim() : void 0;
2522
+ };
2523
+ const result = {};
2524
+ const style = extract(/\*\*Style:\*\*\s*(.+)/);
2525
+ if (style !== void 0) result.style = style;
2526
+ const tone = extract(/\*\*Tone:\*\*\s*(.+)/);
2527
+ if (tone !== void 0) result.tone = tone;
2528
+ const differentiator = extract(/\*\*Differentiator:\*\*\s*(.+)/);
2529
+ if (differentiator !== void 0) result.differentiator = differentiator;
2530
+ const strictness = extract(/^level:\s*(strict|standard|permissive)\s*$/m);
2531
+ if (strictness !== void 0) result.strictness = strictness;
2532
+ return result;
2533
+ }
2534
+ function parseAntiPatterns(content) {
2535
+ const lines = content.split("\n");
2536
+ const patterns = [];
2537
+ let inSection = false;
2538
+ for (const line of lines) {
2539
+ if (/^##\s+Anti-Patterns/i.test(line)) {
2540
+ inSection = true;
2541
+ continue;
2542
+ }
2543
+ if (inSection && /^##\s+/.test(line)) {
2544
+ break;
2545
+ }
2546
+ if (inSection) {
2547
+ const bulletMatch = line.match(/^-\s+(.+)/);
2548
+ if (bulletMatch) {
2549
+ patterns.push(bulletMatch[1].trim());
2550
+ }
2551
+ }
2552
+ }
2553
+ return patterns;
2554
+ }
2555
+ var DesignIngestor = class {
2556
+ constructor(store) {
2557
+ this.store = store;
2558
+ }
2559
+ async ingestTokens(tokensPath) {
2560
+ const start = Date.now();
2561
+ const content = await readFileOrNull(tokensPath);
2562
+ if (content === null) return emptyResult(Date.now() - start);
2563
+ const parsed = parseJsonOrError(content, tokensPath);
2564
+ if ("error" in parsed) {
2565
+ return { ...emptyResult(Date.now() - start), errors: [parsed.error] };
2566
+ }
2567
+ const nodesAdded = walkDTCGTokens(this.store, parsed.data, [], "", tokensPath);
2568
+ return {
2569
+ nodesAdded,
2570
+ nodesUpdated: 0,
2571
+ edgesAdded: 0,
2572
+ edgesUpdated: 0,
2573
+ errors: [],
2574
+ durationMs: Date.now() - start
2575
+ };
2576
+ }
2577
+ async ingestDesignIntent(designPath) {
2578
+ const start = Date.now();
2579
+ const content = await readFileOrNull(designPath);
2580
+ if (content === null) return emptyResult(Date.now() - start);
2581
+ let nodesAdded = 0;
2582
+ const direction = parseAestheticDirection(content);
2583
+ const metadata = {};
2584
+ if (direction.style) metadata.style = direction.style;
2585
+ if (direction.tone) metadata.tone = direction.tone;
2586
+ if (direction.differentiator) metadata.differentiator = direction.differentiator;
2587
+ if (direction.strictness) metadata.strictness = direction.strictness;
2588
+ this.store.addNode({
2589
+ id: "aesthetic_intent:project",
2590
+ type: "aesthetic_intent",
2591
+ name: "project",
2592
+ path: designPath,
2593
+ metadata
2594
+ });
2595
+ nodesAdded++;
2596
+ for (const text of parseAntiPatterns(content)) {
2597
+ this.store.addNode({
2598
+ id: `design_constraint:${hash(text)}`,
2599
+ type: "design_constraint",
2600
+ name: text,
2601
+ path: designPath,
2602
+ metadata: { rule: text, severity: "warn", scope: "project" }
2603
+ });
2604
+ nodesAdded++;
2605
+ }
2606
+ return {
2607
+ nodesAdded,
2608
+ nodesUpdated: 0,
2609
+ edgesAdded: 0,
2610
+ edgesUpdated: 0,
2611
+ errors: [],
2612
+ durationMs: Date.now() - start
2613
+ };
2614
+ }
2615
+ async ingestAll(designDir) {
2616
+ const start = Date.now();
2617
+ const [tokensResult, intentResult] = await Promise.all([
2618
+ this.ingestTokens(path5.join(designDir, "tokens.json")),
2619
+ this.ingestDesignIntent(path5.join(designDir, "DESIGN.md"))
2620
+ ]);
2621
+ const merged = mergeResults(tokensResult, intentResult);
2622
+ return { ...merged, durationMs: Date.now() - start };
2623
+ }
2624
+ };
2625
+ var DesignConstraintAdapter = class {
2626
+ constructor(store) {
2627
+ this.store = store;
2628
+ }
2629
+ checkForHardcodedColors(source, file, strictness) {
2630
+ const severity = this.mapSeverity(strictness);
2631
+ const tokenNodes = this.store.findNodes({ type: "design_token" });
2632
+ const colorValues = /* @__PURE__ */ new Set();
2633
+ for (const node of tokenNodes) {
2634
+ if (node.metadata.tokenType === "color" && typeof node.metadata.value === "string") {
2635
+ colorValues.add(node.metadata.value.toLowerCase());
2636
+ }
2637
+ }
2638
+ const hexPattern = /#[0-9a-fA-F]{3,8}\b/g;
2639
+ const violations = [];
2640
+ let match;
2641
+ while ((match = hexPattern.exec(source)) !== null) {
2642
+ const hexValue = match[0];
2643
+ if (!colorValues.has(hexValue.toLowerCase())) {
2644
+ violations.push({
2645
+ code: "DESIGN-001",
2646
+ file,
2647
+ message: `Hardcoded color ${hexValue} is not in the design token set`,
2648
+ severity,
2649
+ value: hexValue
2650
+ });
2651
+ }
2652
+ }
2653
+ return violations;
2654
+ }
2655
+ checkForHardcodedFonts(source, file, strictness) {
2656
+ const severity = this.mapSeverity(strictness);
2657
+ const tokenNodes = this.store.findNodes({ type: "design_token" });
2658
+ const fontFamilies = /* @__PURE__ */ new Set();
2659
+ for (const node of tokenNodes) {
2660
+ if (node.metadata.tokenType === "typography") {
2661
+ const value = node.metadata.value;
2662
+ if (typeof value === "object" && value !== null && "fontFamily" in value) {
2663
+ fontFamilies.add(value.fontFamily.toLowerCase());
2664
+ }
2665
+ }
2666
+ }
2667
+ const fontPatterns = [/fontFamily:\s*['"]([^'"]+)['"]/g, /font-family:\s*['"]([^'"]+)['"]/g];
2668
+ const violations = [];
2669
+ const seen = /* @__PURE__ */ new Set();
2670
+ for (const pattern of fontPatterns) {
2671
+ let match;
2672
+ while ((match = pattern.exec(source)) !== null) {
2673
+ const fontName = match[1];
2674
+ if (seen.has(fontName.toLowerCase())) continue;
2675
+ seen.add(fontName.toLowerCase());
2676
+ if (!fontFamilies.has(fontName.toLowerCase())) {
2677
+ violations.push({
2678
+ code: "DESIGN-002",
2679
+ file,
2680
+ message: `Hardcoded font family "${fontName}" is not in the design token set`,
2681
+ severity,
2682
+ value: fontName
2683
+ });
2684
+ }
2685
+ }
2686
+ }
2687
+ return violations;
2688
+ }
2689
+ checkAll(source, file, strictness) {
2690
+ return [
2691
+ ...this.checkForHardcodedColors(source, file, strictness),
2692
+ ...this.checkForHardcodedFonts(source, file, strictness)
2693
+ ];
2694
+ }
2695
+ mapSeverity(strictness = "standard") {
2696
+ switch (strictness) {
2697
+ case "permissive":
2698
+ return "info";
2699
+ case "standard":
2700
+ return "warn";
2701
+ case "strict":
2702
+ return "error";
2703
+ }
2704
+ }
2705
+ };
2706
+ var GraphFeedbackAdapter = class {
2707
+ constructor(store) {
2708
+ this.store = store;
2709
+ }
2710
+ computeImpactData(changedFiles) {
2711
+ const affectedTests = [];
2712
+ const affectedDocs = [];
2713
+ let impactScope = 0;
2714
+ for (const filePath of changedFiles) {
2715
+ const fileNodes = this.store.findNodes({ path: filePath });
2716
+ if (fileNodes.length === 0) continue;
2717
+ const fileNode = fileNodes[0];
2718
+ const inboundImports = this.store.getEdges({ to: fileNode.id, type: "imports" });
2719
+ for (const edge of inboundImports) {
2720
+ const importerNode = this.store.getNode(edge.from);
2721
+ if (importerNode?.path && /test/i.test(importerNode.path)) {
2722
+ affectedTests.push({
2723
+ testFile: importerNode.path,
2724
+ coversFile: filePath
2725
+ });
2726
+ }
2727
+ impactScope++;
2728
+ }
2729
+ const docsEdges = this.store.getEdges({ to: fileNode.id, type: "documents" });
2730
+ for (const edge of docsEdges) {
2731
+ const docNode = this.store.getNode(edge.from);
2732
+ if (docNode) {
2733
+ affectedDocs.push({
2734
+ docFile: docNode.path ?? docNode.name,
2735
+ documentsFile: filePath
2736
+ });
2737
+ }
2738
+ }
2739
+ }
2740
+ return { affectedTests, affectedDocs, impactScope };
2741
+ }
2742
+ computeHarnessCheckData() {
2743
+ const nodeCount = this.store.nodeCount;
2744
+ const edgeCount = this.store.edgeCount;
2745
+ const violatesEdges = this.store.getEdges({ type: "violates" });
2746
+ const constraintViolations = violatesEdges.length;
2747
+ const fileNodes = this.store.findNodes({ type: "file" });
2748
+ let undocumentedFiles = 0;
2749
+ for (const node of fileNodes) {
2750
+ const docsEdges = this.store.getEdges({ to: node.id, type: "documents" });
2751
+ if (docsEdges.length === 0) {
2752
+ undocumentedFiles++;
2753
+ }
2754
+ }
2755
+ let unreachableNodes = 0;
2756
+ for (const node of fileNodes) {
2757
+ const inboundImports = this.store.getEdges({ to: node.id, type: "imports" });
2758
+ if (inboundImports.length === 0) {
2759
+ const isEntryPoint = node.name === "index.ts" || node.path !== void 0 && node.path.endsWith("/index.ts") || node.metadata?.entryPoint === true;
2760
+ if (!isEntryPoint) {
2761
+ unreachableNodes++;
2762
+ }
2763
+ }
2764
+ }
2765
+ return {
2766
+ graphExists: true,
2767
+ nodeCount,
2768
+ edgeCount,
2769
+ constraintViolations,
2770
+ undocumentedFiles,
2771
+ unreachableNodes
2772
+ };
2773
+ }
2774
+ };
2775
+ var VERSION = "0.2.0";
2776
+ export {
2777
+ Assembler,
2778
+ CIConnector,
2779
+ CURRENT_SCHEMA_VERSION,
2780
+ CodeIngestor,
2781
+ ConfluenceConnector,
2782
+ ContextQL,
2783
+ DesignConstraintAdapter,
2784
+ DesignIngestor,
2785
+ EDGE_TYPES,
2786
+ FusionLayer,
2787
+ GitIngestor,
2788
+ GraphComplexityAdapter,
2789
+ GraphConstraintAdapter,
2790
+ GraphCouplingAdapter,
2791
+ GraphEdgeSchema,
2792
+ GraphEntropyAdapter,
2793
+ GraphFeedbackAdapter,
2794
+ GraphNodeSchema,
2795
+ GraphStore,
2796
+ JiraConnector,
2797
+ KnowledgeIngestor,
2798
+ NODE_TYPES,
2799
+ OBSERVABILITY_TYPES,
2800
+ SlackConnector,
2801
+ SyncManager,
2802
+ TopologicalLinker,
2803
+ VERSION,
2804
+ VectorStore,
2805
+ linkToCode,
2806
+ loadGraph,
2807
+ project,
2808
+ saveGraph
2809
+ };