@harness-engineering/graph 0.3.1 → 0.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -34,11 +34,14 @@ __export(index_exports, {
34
34
  CIConnector: () => CIConnector,
35
35
  CURRENT_SCHEMA_VERSION: () => CURRENT_SCHEMA_VERSION,
36
36
  CodeIngestor: () => CodeIngestor,
37
+ ConflictPredictor: () => ConflictPredictor,
37
38
  ConfluenceConnector: () => ConfluenceConnector,
38
39
  ContextQL: () => ContextQL,
39
40
  DesignConstraintAdapter: () => DesignConstraintAdapter,
40
41
  DesignIngestor: () => DesignIngestor,
41
42
  EDGE_TYPES: () => EDGE_TYPES,
43
+ EntityExtractor: () => EntityExtractor,
44
+ EntityResolver: () => EntityResolver,
42
45
  FusionLayer: () => FusionLayer,
43
46
  GitIngestor: () => GitIngestor,
44
47
  GraphAnomalyAdapter: () => GraphAnomalyAdapter,
@@ -50,15 +53,21 @@ __export(index_exports, {
50
53
  GraphFeedbackAdapter: () => GraphFeedbackAdapter,
51
54
  GraphNodeSchema: () => GraphNodeSchema,
52
55
  GraphStore: () => GraphStore,
56
+ INTENTS: () => INTENTS,
57
+ IntentClassifier: () => IntentClassifier,
53
58
  JiraConnector: () => JiraConnector,
54
59
  KnowledgeIngestor: () => KnowledgeIngestor,
55
60
  NODE_TYPES: () => NODE_TYPES,
56
61
  OBSERVABILITY_TYPES: () => OBSERVABILITY_TYPES,
62
+ ResponseFormatter: () => ResponseFormatter,
57
63
  SlackConnector: () => SlackConnector,
58
64
  SyncManager: () => SyncManager,
65
+ TaskIndependenceAnalyzer: () => TaskIndependenceAnalyzer,
59
66
  TopologicalLinker: () => TopologicalLinker,
60
67
  VERSION: () => VERSION,
61
68
  VectorStore: () => VectorStore,
69
+ askGraph: () => askGraph,
70
+ groupNodesByImpact: () => groupNodesByImpact,
62
71
  linkToCode: () => linkToCode,
63
72
  loadGraph: () => loadGraph,
64
73
  project: () => project,
@@ -425,6 +434,53 @@ var VectorStore = class _VectorStore {
425
434
  };
426
435
 
427
436
  // src/query/ContextQL.ts
437
+ function edgeKey(e) {
438
+ return `${e.from}|${e.to}|${e.type}`;
439
+ }
440
+ function addEdge(state, edge) {
441
+ const key = edgeKey(edge);
442
+ if (!state.edgeSet.has(key)) {
443
+ state.edgeSet.add(key);
444
+ state.resultEdges.push(edge);
445
+ }
446
+ }
447
+ function shouldPruneNode(neighbor, pruneObservability, params) {
448
+ if (pruneObservability && OBSERVABILITY_TYPES.has(neighbor.type)) return true;
449
+ if (params.includeTypes && !params.includeTypes.includes(neighbor.type)) return true;
450
+ if (params.excludeTypes && params.excludeTypes.includes(neighbor.type)) return true;
451
+ return false;
452
+ }
453
+ function isEdgeExcluded(edge, params) {
454
+ return !!(params.includeEdges && !params.includeEdges.includes(edge.type));
455
+ }
456
+ function processNeighbor(store, edge, neighborId, nextDepth, queue, state, pruneObservability, params) {
457
+ if (isEdgeExcluded(edge, params)) return;
458
+ if (state.visited.has(neighborId)) {
459
+ addEdge(state, edge);
460
+ return;
461
+ }
462
+ const neighbor = store.getNode(neighborId);
463
+ if (!neighbor) return;
464
+ state.visited.add(neighborId);
465
+ if (shouldPruneNode(neighbor, pruneObservability, params)) {
466
+ state.pruned++;
467
+ return;
468
+ }
469
+ state.resultNodeMap.set(neighborId, neighbor);
470
+ addEdge(state, edge);
471
+ queue.push({ id: neighborId, depth: nextDepth });
472
+ }
473
+ function addCrossEdges(store, state) {
474
+ const resultNodeIds = new Set(state.resultNodeMap.keys());
475
+ for (const nodeId of resultNodeIds) {
476
+ const outEdges = store.getEdges({ from: nodeId });
477
+ for (const edge of outEdges) {
478
+ if (resultNodeIds.has(edge.to)) {
479
+ addEdge(state, edge);
480
+ }
481
+ }
482
+ }
483
+ }
428
484
  var ContextQL = class {
429
485
  store;
430
486
  constructor(store) {
@@ -434,89 +490,69 @@ var ContextQL = class {
434
490
  const maxDepth = params.maxDepth ?? 3;
435
491
  const bidirectional = params.bidirectional ?? false;
436
492
  const pruneObservability = params.pruneObservability ?? true;
437
- const visited = /* @__PURE__ */ new Set();
438
- const resultNodeMap = /* @__PURE__ */ new Map();
439
- const resultEdges = [];
440
- const edgeSet = /* @__PURE__ */ new Set();
441
- let pruned = 0;
442
- let depthReached = 0;
443
- const edgeKey = (e) => `${e.from}|${e.to}|${e.type}`;
444
- const addEdge = (edge) => {
445
- const key = edgeKey(edge);
446
- if (!edgeSet.has(key)) {
447
- edgeSet.add(key);
448
- resultEdges.push(edge);
449
- }
493
+ const state = {
494
+ visited: /* @__PURE__ */ new Set(),
495
+ resultNodeMap: /* @__PURE__ */ new Map(),
496
+ resultEdges: [],
497
+ edgeSet: /* @__PURE__ */ new Set(),
498
+ pruned: 0,
499
+ depthReached: 0
450
500
  };
451
501
  const queue = [];
452
- for (const rootId of params.rootNodeIds) {
502
+ this.seedRootNodes(params.rootNodeIds, state, queue);
503
+ this.runBFS(queue, maxDepth, bidirectional, pruneObservability, params, state);
504
+ addCrossEdges(this.store, state);
505
+ return {
506
+ nodes: Array.from(state.resultNodeMap.values()),
507
+ edges: state.resultEdges,
508
+ stats: {
509
+ totalTraversed: state.visited.size,
510
+ totalReturned: state.resultNodeMap.size,
511
+ pruned: state.pruned,
512
+ depthReached: state.depthReached
513
+ }
514
+ };
515
+ }
516
+ seedRootNodes(rootNodeIds, state, queue) {
517
+ for (const rootId of rootNodeIds) {
453
518
  const node = this.store.getNode(rootId);
454
519
  if (node) {
455
- visited.add(rootId);
456
- resultNodeMap.set(rootId, node);
520
+ state.visited.add(rootId);
521
+ state.resultNodeMap.set(rootId, node);
457
522
  queue.push({ id: rootId, depth: 0 });
458
523
  }
459
524
  }
525
+ }
526
+ runBFS(queue, maxDepth, bidirectional, pruneObservability, params, state) {
460
527
  let head = 0;
461
528
  while (head < queue.length) {
462
529
  const entry = queue[head++];
463
530
  const { id: currentId, depth } = entry;
464
531
  if (depth >= maxDepth) continue;
465
532
  const nextDepth = depth + 1;
466
- if (nextDepth > depthReached) depthReached = nextDepth;
467
- const outEdges = this.store.getEdges({ from: currentId });
468
- const inEdges = bidirectional ? this.store.getEdges({ to: currentId }) : [];
469
- const allEdges = [
470
- ...outEdges.map((e) => ({ edge: e, neighborId: e.to })),
471
- ...inEdges.map((e) => ({ edge: e, neighborId: e.from }))
472
- ];
533
+ if (nextDepth > state.depthReached) state.depthReached = nextDepth;
534
+ const allEdges = this.gatherEdges(currentId, bidirectional);
473
535
  for (const { edge, neighborId } of allEdges) {
474
- if (params.includeEdges && !params.includeEdges.includes(edge.type)) {
475
- continue;
476
- }
477
- if (visited.has(neighborId)) {
478
- addEdge(edge);
479
- continue;
480
- }
481
- const neighbor = this.store.getNode(neighborId);
482
- if (!neighbor) continue;
483
- visited.add(neighborId);
484
- if (pruneObservability && OBSERVABILITY_TYPES.has(neighbor.type)) {
485
- pruned++;
486
- continue;
487
- }
488
- if (params.includeTypes && !params.includeTypes.includes(neighbor.type)) {
489
- pruned++;
490
- continue;
491
- }
492
- if (params.excludeTypes && params.excludeTypes.includes(neighbor.type)) {
493
- pruned++;
494
- continue;
495
- }
496
- resultNodeMap.set(neighborId, neighbor);
497
- addEdge(edge);
498
- queue.push({ id: neighborId, depth: nextDepth });
499
- }
500
- }
501
- const resultNodeIds = new Set(resultNodeMap.keys());
502
- for (const nodeId of resultNodeIds) {
503
- const outEdges = this.store.getEdges({ from: nodeId });
504
- for (const edge of outEdges) {
505
- if (resultNodeIds.has(edge.to)) {
506
- addEdge(edge);
507
- }
536
+ processNeighbor(
537
+ this.store,
538
+ edge,
539
+ neighborId,
540
+ nextDepth,
541
+ queue,
542
+ state,
543
+ pruneObservability,
544
+ params
545
+ );
508
546
  }
509
547
  }
510
- return {
511
- nodes: Array.from(resultNodeMap.values()),
512
- edges: resultEdges,
513
- stats: {
514
- totalTraversed: visited.size,
515
- totalReturned: resultNodeMap.size,
516
- pruned,
517
- depthReached
518
- }
519
- };
548
+ }
549
+ gatherEdges(nodeId, bidirectional) {
550
+ const outEdges = this.store.getEdges({ from: nodeId });
551
+ const inEdges = bidirectional ? this.store.getEdges({ to: nodeId }) : [];
552
+ return [
553
+ ...outEdges.map((e) => ({ edge: e, neighborId: e.to })),
554
+ ...inEdges.map((e) => ({ edge: e, neighborId: e.from }))
555
+ ];
520
556
  }
521
557
  };
522
558
 
@@ -534,9 +570,50 @@ function project(nodes, spec) {
534
570
  });
535
571
  }
536
572
 
573
+ // src/query/groupImpact.ts
574
+ var TEST_TYPES = /* @__PURE__ */ new Set(["test_result"]);
575
+ var DOC_TYPES = /* @__PURE__ */ new Set(["adr", "decision", "document", "learning"]);
576
+ var CODE_TYPES = /* @__PURE__ */ new Set([
577
+ "file",
578
+ "module",
579
+ "class",
580
+ "interface",
581
+ "function",
582
+ "method",
583
+ "variable"
584
+ ]);
585
+ function groupNodesByImpact(nodes, excludeId) {
586
+ const tests = [];
587
+ const docs = [];
588
+ const code = [];
589
+ const other = [];
590
+ for (const node of nodes) {
591
+ if (excludeId && node.id === excludeId) continue;
592
+ if (TEST_TYPES.has(node.type)) {
593
+ tests.push(node);
594
+ } else if (DOC_TYPES.has(node.type)) {
595
+ docs.push(node);
596
+ } else if (CODE_TYPES.has(node.type)) {
597
+ code.push(node);
598
+ } else {
599
+ other.push(node);
600
+ }
601
+ }
602
+ return { tests, docs, code, other };
603
+ }
604
+
537
605
  // src/ingest/CodeIngestor.ts
538
606
  var fs = __toESM(require("fs/promises"));
539
607
  var path = __toESM(require("path"));
608
+ var SKIP_METHOD_NAMES = /* @__PURE__ */ new Set(["constructor", "if", "for", "while", "switch"]);
609
+ function countBraces(line) {
610
+ let net = 0;
611
+ for (const ch of line) {
612
+ if (ch === "{") net++;
613
+ else if (ch === "}") net--;
614
+ }
615
+ return net;
616
+ }
540
617
  var CodeIngestor = class {
541
618
  constructor(store) {
542
619
  this.store = store;
@@ -551,41 +628,9 @@ var CodeIngestor = class {
551
628
  const fileContents = /* @__PURE__ */ new Map();
552
629
  for (const filePath of files) {
553
630
  try {
554
- const relativePath = path.relative(rootDir, filePath).replace(/\\/g, "/");
555
- const content = await fs.readFile(filePath, "utf-8");
556
- const stat2 = await fs.stat(filePath);
557
- const fileId = `file:${relativePath}`;
558
- fileContents.set(relativePath, content);
559
- const fileNode = {
560
- id: fileId,
561
- type: "file",
562
- name: path.basename(filePath),
563
- path: relativePath,
564
- metadata: { language: this.detectLanguage(filePath) },
565
- lastModified: stat2.mtime.toISOString()
566
- };
567
- this.store.addNode(fileNode);
568
- nodesAdded++;
569
- const symbols = this.extractSymbols(content, fileId, relativePath);
570
- for (const { node, edge } of symbols) {
571
- this.store.addNode(node);
572
- this.store.addEdge(edge);
573
- nodesAdded++;
574
- edgesAdded++;
575
- if (node.type === "function" || node.type === "method") {
576
- let files2 = nameToFiles.get(node.name);
577
- if (!files2) {
578
- files2 = /* @__PURE__ */ new Set();
579
- nameToFiles.set(node.name, files2);
580
- }
581
- files2.add(relativePath);
582
- }
583
- }
584
- const imports = await this.extractImports(content, fileId, relativePath, rootDir);
585
- for (const edge of imports) {
586
- this.store.addEdge(edge);
587
- edgesAdded++;
588
- }
631
+ const result = await this.processFile(filePath, rootDir, nameToFiles, fileContents);
632
+ nodesAdded += result.nodesAdded;
633
+ edgesAdded += result.edgesAdded;
589
634
  } catch (err) {
590
635
  errors.push(`${filePath}: ${err instanceof Error ? err.message : String(err)}`);
591
636
  }
@@ -604,6 +649,48 @@ var CodeIngestor = class {
604
649
  durationMs: Date.now() - start
605
650
  };
606
651
  }
652
+ async processFile(filePath, rootDir, nameToFiles, fileContents) {
653
+ let nodesAdded = 0;
654
+ let edgesAdded = 0;
655
+ const relativePath = path.relative(rootDir, filePath).replace(/\\/g, "/");
656
+ const content = await fs.readFile(filePath, "utf-8");
657
+ const stat2 = await fs.stat(filePath);
658
+ const fileId = `file:${relativePath}`;
659
+ fileContents.set(relativePath, content);
660
+ const fileNode = {
661
+ id: fileId,
662
+ type: "file",
663
+ name: path.basename(filePath),
664
+ path: relativePath,
665
+ metadata: { language: this.detectLanguage(filePath) },
666
+ lastModified: stat2.mtime.toISOString()
667
+ };
668
+ this.store.addNode(fileNode);
669
+ nodesAdded++;
670
+ const symbols = this.extractSymbols(content, fileId, relativePath);
671
+ for (const { node, edge } of symbols) {
672
+ this.store.addNode(node);
673
+ this.store.addEdge(edge);
674
+ nodesAdded++;
675
+ edgesAdded++;
676
+ this.trackCallable(node, relativePath, nameToFiles);
677
+ }
678
+ const imports = await this.extractImports(content, fileId, relativePath, rootDir);
679
+ for (const edge of imports) {
680
+ this.store.addEdge(edge);
681
+ edgesAdded++;
682
+ }
683
+ return { nodesAdded, edgesAdded };
684
+ }
685
+ trackCallable(node, relativePath, nameToFiles) {
686
+ if (node.type !== "function" && node.type !== "method") return;
687
+ let files = nameToFiles.get(node.name);
688
+ if (!files) {
689
+ files = /* @__PURE__ */ new Set();
690
+ nameToFiles.set(node.name, files);
691
+ }
692
+ files.add(relativePath);
693
+ }
607
694
  async findSourceFiles(dir) {
608
695
  const results = [];
609
696
  const entries = await fs.readdir(dir, { withFileTypes: true });
@@ -620,149 +707,152 @@ var CodeIngestor = class {
620
707
  extractSymbols(content, fileId, relativePath) {
621
708
  const results = [];
622
709
  const lines = content.split("\n");
623
- let currentClassName = null;
624
- let currentClassId = null;
625
- let braceDepth = 0;
626
- let insideClass = false;
710
+ const ctx = { className: null, classId: null, insideClass: false, braceDepth: 0 };
627
711
  for (let i = 0; i < lines.length; i++) {
628
712
  const line = lines[i];
629
- const fnMatch = line.match(/(?:export\s+)?(?:async\s+)?function\s+(\w+)/);
630
- if (fnMatch) {
631
- const name = fnMatch[1];
632
- const id = `function:${relativePath}:${name}`;
633
- const endLine = this.findClosingBrace(lines, i);
634
- results.push({
635
- node: {
636
- id,
637
- type: "function",
638
- name,
639
- path: relativePath,
640
- location: { fileId, startLine: i + 1, endLine },
641
- metadata: {
642
- exported: line.includes("export"),
643
- cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
644
- nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
645
- lineCount: endLine - i,
646
- parameterCount: this.countParameters(line)
647
- }
648
- },
649
- edge: { from: fileId, to: id, type: "contains" }
650
- });
651
- if (!insideClass) {
652
- currentClassName = null;
653
- currentClassId = null;
654
- }
655
- continue;
656
- }
657
- const classMatch = line.match(/(?:export\s+)?class\s+(\w+)/);
658
- if (classMatch) {
659
- const name = classMatch[1];
660
- const id = `class:${relativePath}:${name}`;
661
- const endLine = this.findClosingBrace(lines, i);
662
- results.push({
663
- node: {
664
- id,
665
- type: "class",
666
- name,
667
- path: relativePath,
668
- location: { fileId, startLine: i + 1, endLine },
669
- metadata: { exported: line.includes("export") }
670
- },
671
- edge: { from: fileId, to: id, type: "contains" }
672
- });
673
- currentClassName = name;
674
- currentClassId = id;
675
- insideClass = true;
676
- braceDepth = 0;
677
- for (const ch of line) {
678
- if (ch === "{") braceDepth++;
679
- if (ch === "}") braceDepth--;
680
- }
681
- continue;
682
- }
683
- const ifaceMatch = line.match(/(?:export\s+)?interface\s+(\w+)/);
684
- if (ifaceMatch) {
685
- const name = ifaceMatch[1];
686
- const id = `interface:${relativePath}:${name}`;
687
- const endLine = this.findClosingBrace(lines, i);
688
- results.push({
689
- node: {
690
- id,
691
- type: "interface",
692
- name,
693
- path: relativePath,
694
- location: { fileId, startLine: i + 1, endLine },
695
- metadata: { exported: line.includes("export") }
696
- },
697
- edge: { from: fileId, to: id, type: "contains" }
698
- });
699
- currentClassName = null;
700
- currentClassId = null;
701
- insideClass = false;
702
- continue;
703
- }
704
- if (insideClass) {
705
- for (const ch of line) {
706
- if (ch === "{") braceDepth++;
707
- if (ch === "}") braceDepth--;
708
- }
709
- if (braceDepth <= 0) {
710
- currentClassName = null;
711
- currentClassId = null;
712
- insideClass = false;
713
- continue;
714
- }
715
- }
716
- if (insideClass && currentClassName && currentClassId) {
717
- const methodMatch = line.match(
718
- /^\s+(?:(?:public|private|protected|readonly|static|abstract)\s+)*(?:async\s+)?(\w+)\s*\(/
719
- );
720
- if (methodMatch) {
721
- const methodName = methodMatch[1];
722
- if (methodName === "constructor" || methodName === "if" || methodName === "for" || methodName === "while" || methodName === "switch")
723
- continue;
724
- const id = `method:${relativePath}:${currentClassName}.${methodName}`;
725
- const endLine = this.findClosingBrace(lines, i);
726
- results.push({
727
- node: {
728
- id,
729
- type: "method",
730
- name: methodName,
731
- path: relativePath,
732
- location: { fileId, startLine: i + 1, endLine },
733
- metadata: {
734
- className: currentClassName,
735
- exported: false,
736
- cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
737
- nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
738
- lineCount: endLine - i,
739
- parameterCount: this.countParameters(line)
740
- }
741
- },
742
- edge: { from: currentClassId, to: id, type: "contains" }
743
- });
744
- }
745
- continue;
746
- }
747
- const varMatch = line.match(/(?:export\s+)?(?:const|let|var)\s+(\w+)/);
748
- if (varMatch) {
749
- const name = varMatch[1];
750
- const id = `variable:${relativePath}:${name}`;
751
- results.push({
752
- node: {
753
- id,
754
- type: "variable",
755
- name,
756
- path: relativePath,
757
- location: { fileId, startLine: i + 1, endLine: i + 1 },
758
- metadata: { exported: line.includes("export") }
759
- },
760
- edge: { from: fileId, to: id, type: "contains" }
761
- });
762
- }
713
+ if (this.tryExtractFunction(line, lines, i, fileId, relativePath, ctx, results)) continue;
714
+ if (this.tryExtractClass(line, lines, i, fileId, relativePath, ctx, results)) continue;
715
+ if (this.tryExtractInterface(line, lines, i, fileId, relativePath, ctx, results)) continue;
716
+ if (this.updateClassContext(line, ctx)) continue;
717
+ if (this.tryExtractMethod(line, lines, i, fileId, relativePath, ctx, results)) continue;
718
+ if (ctx.insideClass) continue;
719
+ this.tryExtractVariable(line, i, fileId, relativePath, results);
763
720
  }
764
721
  return results;
765
722
  }
723
+ tryExtractFunction(line, lines, i, fileId, relativePath, ctx, results) {
724
+ const fnMatch = line.match(/(?:export\s+)?(?:async\s+)?function\s+(\w+)/);
725
+ if (!fnMatch) return false;
726
+ const name = fnMatch[1];
727
+ const id = `function:${relativePath}:${name}`;
728
+ const endLine = this.findClosingBrace(lines, i);
729
+ results.push({
730
+ node: {
731
+ id,
732
+ type: "function",
733
+ name,
734
+ path: relativePath,
735
+ location: { fileId, startLine: i + 1, endLine },
736
+ metadata: {
737
+ exported: line.includes("export"),
738
+ cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
739
+ nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
740
+ lineCount: endLine - i,
741
+ parameterCount: this.countParameters(line)
742
+ }
743
+ },
744
+ edge: { from: fileId, to: id, type: "contains" }
745
+ });
746
+ if (!ctx.insideClass) {
747
+ ctx.className = null;
748
+ ctx.classId = null;
749
+ }
750
+ return true;
751
+ }
752
+ tryExtractClass(line, lines, i, fileId, relativePath, ctx, results) {
753
+ const classMatch = line.match(/(?:export\s+)?class\s+(\w+)/);
754
+ if (!classMatch) return false;
755
+ const name = classMatch[1];
756
+ const id = `class:${relativePath}:${name}`;
757
+ const endLine = this.findClosingBrace(lines, i);
758
+ results.push({
759
+ node: {
760
+ id,
761
+ type: "class",
762
+ name,
763
+ path: relativePath,
764
+ location: { fileId, startLine: i + 1, endLine },
765
+ metadata: { exported: line.includes("export") }
766
+ },
767
+ edge: { from: fileId, to: id, type: "contains" }
768
+ });
769
+ ctx.className = name;
770
+ ctx.classId = id;
771
+ ctx.insideClass = true;
772
+ ctx.braceDepth = countBraces(line);
773
+ return true;
774
+ }
775
+ tryExtractInterface(line, lines, i, fileId, relativePath, ctx, results) {
776
+ const ifaceMatch = line.match(/(?:export\s+)?interface\s+(\w+)/);
777
+ if (!ifaceMatch) return false;
778
+ const name = ifaceMatch[1];
779
+ const id = `interface:${relativePath}:${name}`;
780
+ const endLine = this.findClosingBrace(lines, i);
781
+ results.push({
782
+ node: {
783
+ id,
784
+ type: "interface",
785
+ name,
786
+ path: relativePath,
787
+ location: { fileId, startLine: i + 1, endLine },
788
+ metadata: { exported: line.includes("export") }
789
+ },
790
+ edge: { from: fileId, to: id, type: "contains" }
791
+ });
792
+ ctx.className = null;
793
+ ctx.classId = null;
794
+ ctx.insideClass = false;
795
+ return true;
796
+ }
797
+ /** Update brace tracking; returns true when line is consumed (class ended or tracked). */
798
+ updateClassContext(line, ctx) {
799
+ if (!ctx.insideClass) return false;
800
+ ctx.braceDepth += countBraces(line);
801
+ if (ctx.braceDepth <= 0) {
802
+ ctx.className = null;
803
+ ctx.classId = null;
804
+ ctx.insideClass = false;
805
+ return true;
806
+ }
807
+ return false;
808
+ }
809
+ tryExtractMethod(line, lines, i, fileId, relativePath, ctx, results) {
810
+ if (!ctx.insideClass || !ctx.className || !ctx.classId) return false;
811
+ const methodMatch = line.match(
812
+ /^\s+(?:(?:public|private|protected|readonly|static|abstract)\s+)*(?:async\s+)?(\w+)\s*\(/
813
+ );
814
+ if (!methodMatch) return false;
815
+ const methodName = methodMatch[1];
816
+ if (SKIP_METHOD_NAMES.has(methodName)) return false;
817
+ const id = `method:${relativePath}:${ctx.className}.${methodName}`;
818
+ const endLine = this.findClosingBrace(lines, i);
819
+ results.push({
820
+ node: {
821
+ id,
822
+ type: "method",
823
+ name: methodName,
824
+ path: relativePath,
825
+ location: { fileId, startLine: i + 1, endLine },
826
+ metadata: {
827
+ className: ctx.className,
828
+ exported: false,
829
+ cyclomaticComplexity: this.computeCyclomaticComplexity(lines.slice(i, endLine)),
830
+ nestingDepth: this.computeMaxNesting(lines.slice(i, endLine)),
831
+ lineCount: endLine - i,
832
+ parameterCount: this.countParameters(line)
833
+ }
834
+ },
835
+ edge: { from: ctx.classId, to: id, type: "contains" }
836
+ });
837
+ return true;
838
+ }
839
+ tryExtractVariable(line, i, fileId, relativePath, results) {
840
+ const varMatch = line.match(/(?:export\s+)?(?:const|let|var)\s+(\w+)/);
841
+ if (!varMatch) return;
842
+ const name = varMatch[1];
843
+ const id = `variable:${relativePath}:${name}`;
844
+ results.push({
845
+ node: {
846
+ id,
847
+ type: "variable",
848
+ name,
849
+ path: relativePath,
850
+ location: { fileId, startLine: i + 1, endLine: i + 1 },
851
+ metadata: { exported: line.includes("export") }
852
+ },
853
+ edge: { from: fileId, to: id, type: "contains" }
854
+ });
855
+ }
766
856
  /**
767
857
  * Find the closing brace for a construct starting at the given line.
768
858
  * Uses a simple brace-counting heuristic. Returns 1-indexed line number.
@@ -1382,17 +1472,33 @@ var KnowledgeIngestor = class {
1382
1472
 
1383
1473
  // src/ingest/connectors/ConnectorUtils.ts
1384
1474
  var CODE_NODE_TYPES2 = ["file", "function", "class", "method", "interface", "variable"];
1475
+ var SANITIZE_RULES = [
1476
+ // Strip XML/HTML-like instruction tags that could be interpreted as system prompts
1477
+ {
1478
+ pattern: /<\/?(?:system|instruction|prompt|role|context|tool_call|function_call|assistant|human|user)[^>]*>/gi,
1479
+ replacement: ""
1480
+ },
1481
+ // Strip markdown-style system prompt markers (including trailing space)
1482
+ {
1483
+ pattern: /^#{1,3}\s*(?:system|instruction|prompt)\s*[::]\s*/gim,
1484
+ replacement: ""
1485
+ },
1486
+ // Strip common injection prefixes
1487
+ {
1488
+ pattern: /(?:ignore|disregard|forget)\s+(?:all\s+)?(?:previous|prior|above)\s+(?:instructions?|prompts?|context)/gi,
1489
+ replacement: "[filtered]"
1490
+ },
1491
+ // Strip "you are now" re-roling attempts (only when followed by AI/agent role words)
1492
+ {
1493
+ pattern: /you\s+are\s+now\s+(?:a\s+)?(?:helpful\s+)?(?:an?\s+)?(?:assistant|system|ai|bot|agent|tool)\b/gi,
1494
+ replacement: "[filtered]"
1495
+ }
1496
+ ];
1385
1497
  function sanitizeExternalText(text, maxLength = 2e3) {
1386
- let sanitized = text.replace(
1387
- /<\/?(?:system|instruction|prompt|role|context|tool_call|function_call|assistant|human|user)[^>]*>/gi,
1388
- ""
1389
- ).replace(/^#{1,3}\s*(?:system|instruction|prompt)\s*[::]\s*/gim, "").replace(
1390
- /(?:ignore|disregard|forget)\s+(?:all\s+)?(?:previous|prior|above)\s+(?:instructions?|prompts?|context)/gi,
1391
- "[filtered]"
1392
- ).replace(
1393
- /you\s+are\s+now\s+(?:a\s+)?(?:helpful\s+)?(?:an?\s+)?(?:assistant|system|ai|bot|agent|tool)\b/gi,
1394
- "[filtered]"
1395
- );
1498
+ let sanitized = text;
1499
+ for (const rule of SANITIZE_RULES) {
1500
+ sanitized = sanitized.replace(rule.pattern, rule.replacement);
1501
+ }
1396
1502
  if (sanitized.length > maxLength) {
1397
1503
  sanitized = sanitized.slice(0, maxLength) + "\u2026";
1398
1504
  }
@@ -1492,6 +1598,28 @@ var SyncManager = class {
1492
1598
  };
1493
1599
 
1494
1600
  // src/ingest/connectors/JiraConnector.ts
1601
+ function buildIngestResult(nodesAdded, edgesAdded, errors, start) {
1602
+ return {
1603
+ nodesAdded,
1604
+ nodesUpdated: 0,
1605
+ edgesAdded,
1606
+ edgesUpdated: 0,
1607
+ errors,
1608
+ durationMs: Date.now() - start
1609
+ };
1610
+ }
1611
+ function buildJql(config) {
1612
+ const project2 = config.project;
1613
+ let jql = project2 ? `project=${project2}` : "";
1614
+ const filters = config.filters;
1615
+ if (filters?.status?.length) {
1616
+ jql += `${jql ? " AND " : ""}status IN (${filters.status.map((s) => `"${s}"`).join(",")})`;
1617
+ }
1618
+ if (filters?.labels?.length) {
1619
+ jql += `${jql ? " AND " : ""}labels IN (${filters.labels.map((l) => `"${l}"`).join(",")})`;
1620
+ }
1621
+ return jql;
1622
+ }
1495
1623
  var JiraConnector = class {
1496
1624
  name = "jira";
1497
1625
  source = "jira";
@@ -1501,105 +1629,81 @@ var JiraConnector = class {
1501
1629
  }
1502
1630
  async ingest(store, config) {
1503
1631
  const start = Date.now();
1504
- const errors = [];
1505
1632
  let nodesAdded = 0;
1506
1633
  let edgesAdded = 0;
1507
1634
  const apiKeyEnv = config.apiKeyEnv ?? "JIRA_API_KEY";
1508
1635
  const apiKey = process.env[apiKeyEnv];
1509
1636
  if (!apiKey) {
1510
- return {
1511
- nodesAdded: 0,
1512
- nodesUpdated: 0,
1513
- edgesAdded: 0,
1514
- edgesUpdated: 0,
1515
- errors: [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
1516
- durationMs: Date.now() - start
1517
- };
1637
+ return buildIngestResult(
1638
+ 0,
1639
+ 0,
1640
+ [`Missing API key: environment variable "${apiKeyEnv}" is not set`],
1641
+ start
1642
+ );
1518
1643
  }
1519
1644
  const baseUrlEnv = config.baseUrlEnv ?? "JIRA_BASE_URL";
1520
1645
  const baseUrl = process.env[baseUrlEnv];
1521
1646
  if (!baseUrl) {
1522
- return {
1523
- nodesAdded: 0,
1524
- nodesUpdated: 0,
1525
- edgesAdded: 0,
1526
- edgesUpdated: 0,
1527
- errors: [`Missing base URL: environment variable "${baseUrlEnv}" is not set`],
1528
- durationMs: Date.now() - start
1529
- };
1530
- }
1531
- const project2 = config.project;
1532
- let jql = project2 ? `project=${project2}` : "";
1533
- const filters = config.filters;
1534
- if (filters?.status?.length) {
1535
- jql += `${jql ? " AND " : ""}status IN (${filters.status.map((s) => `"${s}"`).join(",")})`;
1536
- }
1537
- if (filters?.labels?.length) {
1538
- jql += `${jql ? " AND " : ""}labels IN (${filters.labels.map((l) => `"${l}"`).join(",")})`;
1647
+ return buildIngestResult(
1648
+ 0,
1649
+ 0,
1650
+ [`Missing base URL: environment variable "${baseUrlEnv}" is not set`],
1651
+ start
1652
+ );
1539
1653
  }
1654
+ const jql = buildJql(config);
1540
1655
  const headers = {
1541
1656
  Authorization: `Basic ${apiKey}`,
1542
1657
  "Content-Type": "application/json"
1543
1658
  };
1544
- let startAt = 0;
1545
- const maxResults = 50;
1546
- let total = Infinity;
1547
1659
  try {
1660
+ let startAt = 0;
1661
+ const maxResults = 50;
1662
+ let total = Infinity;
1548
1663
  while (startAt < total) {
1549
1664
  const url = `${baseUrl}/rest/api/2/search?jql=${encodeURIComponent(jql)}&startAt=${startAt}&maxResults=${maxResults}`;
1550
1665
  const response = await this.httpClient(url, { headers });
1551
1666
  if (!response.ok) {
1552
- return {
1553
- nodesAdded,
1554
- nodesUpdated: 0,
1555
- edgesAdded,
1556
- edgesUpdated: 0,
1557
- errors: ["Jira API request failed"],
1558
- durationMs: Date.now() - start
1559
- };
1667
+ return buildIngestResult(nodesAdded, edgesAdded, ["Jira API request failed"], start);
1560
1668
  }
1561
1669
  const data = await response.json();
1562
1670
  total = data.total;
1563
1671
  for (const issue of data.issues) {
1564
- const nodeId = `issue:jira:${issue.key}`;
1565
- store.addNode({
1566
- id: nodeId,
1567
- type: "issue",
1568
- name: sanitizeExternalText(issue.fields.summary, 500),
1569
- metadata: {
1570
- key: issue.key,
1571
- status: issue.fields.status?.name,
1572
- priority: issue.fields.priority?.name,
1573
- assignee: issue.fields.assignee?.displayName,
1574
- labels: issue.fields.labels ?? []
1575
- }
1576
- });
1577
- nodesAdded++;
1578
- const searchText = sanitizeExternalText(
1579
- [issue.fields.summary, issue.fields.description ?? ""].join(" ")
1580
- );
1581
- edgesAdded += linkToCode(store, searchText, nodeId, "applies_to");
1672
+ const counts = this.processIssue(store, issue);
1673
+ nodesAdded += counts.nodesAdded;
1674
+ edgesAdded += counts.edgesAdded;
1582
1675
  }
1583
1676
  startAt += maxResults;
1584
1677
  }
1585
1678
  } catch (err) {
1586
- return {
1679
+ return buildIngestResult(
1587
1680
  nodesAdded,
1588
- nodesUpdated: 0,
1589
1681
  edgesAdded,
1590
- edgesUpdated: 0,
1591
- errors: [`Jira API error: ${err instanceof Error ? err.message : String(err)}`],
1592
- durationMs: Date.now() - start
1593
- };
1682
+ [`Jira API error: ${err instanceof Error ? err.message : String(err)}`],
1683
+ start
1684
+ );
1594
1685
  }
1595
- return {
1596
- nodesAdded,
1597
- nodesUpdated: 0,
1598
- edgesAdded,
1599
- edgesUpdated: 0,
1600
- errors,
1601
- durationMs: Date.now() - start
1602
- };
1686
+ return buildIngestResult(nodesAdded, edgesAdded, [], start);
1687
+ }
1688
+ processIssue(store, issue) {
1689
+ const nodeId = `issue:jira:${issue.key}`;
1690
+ store.addNode({
1691
+ id: nodeId,
1692
+ type: "issue",
1693
+ name: sanitizeExternalText(issue.fields.summary, 500),
1694
+ metadata: {
1695
+ key: issue.key,
1696
+ status: issue.fields.status?.name,
1697
+ priority: issue.fields.priority?.name,
1698
+ assignee: issue.fields.assignee?.displayName,
1699
+ labels: issue.fields.labels ?? []
1700
+ }
1701
+ });
1702
+ const searchText = sanitizeExternalText(
1703
+ [issue.fields.summary, issue.fields.description ?? ""].join(" ")
1704
+ );
1705
+ const edgesAdded = linkToCode(store, searchText, nodeId, "applies_to");
1706
+ return { nodesAdded: 1, edgesAdded };
1603
1707
  }
1604
1708
  };
1605
1709
 
@@ -1632,44 +1736,10 @@ var SlackConnector = class {
1632
1736
  const oldest = config.lookbackDays ? String(Math.floor((Date.now() - Number(config.lookbackDays) * 864e5) / 1e3)) : void 0;
1633
1737
  for (const channel of channels) {
1634
1738
  try {
1635
- let url = `https://slack.com/api/conversations.history?channel=${encodeURIComponent(channel)}`;
1636
- if (oldest) {
1637
- url += `&oldest=${oldest}`;
1638
- }
1639
- const response = await this.httpClient(url, {
1640
- headers: {
1641
- Authorization: `Bearer ${apiKey}`,
1642
- "Content-Type": "application/json"
1643
- }
1644
- });
1645
- if (!response.ok) {
1646
- errors.push(`Slack API request failed for channel ${channel}`);
1647
- continue;
1648
- }
1649
- const data = await response.json();
1650
- if (!data.ok) {
1651
- errors.push(`Slack API error for channel ${channel}`);
1652
- continue;
1653
- }
1654
- for (const message of data.messages) {
1655
- const nodeId = `conversation:slack:${channel}:${message.ts}`;
1656
- const sanitizedText = sanitizeExternalText(message.text);
1657
- const snippet = sanitizedText.length > 100 ? sanitizedText.slice(0, 100) : sanitizedText;
1658
- store.addNode({
1659
- id: nodeId,
1660
- type: "conversation",
1661
- name: snippet,
1662
- metadata: {
1663
- author: message.user,
1664
- channel,
1665
- timestamp: message.ts
1666
- }
1667
- });
1668
- nodesAdded++;
1669
- edgesAdded += linkToCode(store, sanitizedText, nodeId, "references", {
1670
- checkPaths: true
1671
- });
1672
- }
1739
+ const result = await this.processChannel(store, channel, apiKey, oldest);
1740
+ nodesAdded += result.nodesAdded;
1741
+ edgesAdded += result.edgesAdded;
1742
+ errors.push(...result.errors);
1673
1743
  } catch (err) {
1674
1744
  errors.push(
1675
1745
  `Slack API error for channel ${channel}: ${err instanceof Error ? err.message : String(err)}`
@@ -1685,6 +1755,52 @@ var SlackConnector = class {
1685
1755
  durationMs: Date.now() - start
1686
1756
  };
1687
1757
  }
1758
+ async processChannel(store, channel, apiKey, oldest) {
1759
+ const errors = [];
1760
+ let nodesAdded = 0;
1761
+ let edgesAdded = 0;
1762
+ let url = `https://slack.com/api/conversations.history?channel=${encodeURIComponent(channel)}`;
1763
+ if (oldest) {
1764
+ url += `&oldest=${oldest}`;
1765
+ }
1766
+ const response = await this.httpClient(url, {
1767
+ headers: {
1768
+ Authorization: `Bearer ${apiKey}`,
1769
+ "Content-Type": "application/json"
1770
+ }
1771
+ });
1772
+ if (!response.ok) {
1773
+ return {
1774
+ nodesAdded: 0,
1775
+ edgesAdded: 0,
1776
+ errors: [`Slack API request failed for channel ${channel}`]
1777
+ };
1778
+ }
1779
+ const data = await response.json();
1780
+ if (!data.ok) {
1781
+ return { nodesAdded: 0, edgesAdded: 0, errors: [`Slack API error for channel ${channel}`] };
1782
+ }
1783
+ for (const message of data.messages) {
1784
+ const nodeId = `conversation:slack:${channel}:${message.ts}`;
1785
+ const sanitizedText = sanitizeExternalText(message.text);
1786
+ const snippet = sanitizedText.length > 100 ? sanitizedText.slice(0, 100) : sanitizedText;
1787
+ store.addNode({
1788
+ id: nodeId,
1789
+ type: "conversation",
1790
+ name: snippet,
1791
+ metadata: {
1792
+ author: message.user,
1793
+ channel,
1794
+ timestamp: message.ts
1795
+ }
1796
+ });
1797
+ nodesAdded++;
1798
+ edgesAdded += linkToCode(store, sanitizedText, nodeId, "references", {
1799
+ checkPaths: true
1800
+ });
1801
+ }
1802
+ return { nodesAdded, edgesAdded, errors };
1803
+ }
1688
1804
  };
1689
1805
 
1690
1806
  // src/ingest/connectors/ConfluenceConnector.ts
@@ -1716,36 +1832,10 @@ var ConfluenceConnector = class {
1716
1832
  const baseUrl = process.env[baseUrlEnv] ?? "";
1717
1833
  const spaceKey = config.spaceKey ?? "";
1718
1834
  try {
1719
- let nextUrl = `${baseUrl}/wiki/api/v2/pages?spaceKey=${encodeURIComponent(spaceKey)}&limit=25&body-format=storage`;
1720
- while (nextUrl) {
1721
- const response = await this.httpClient(nextUrl, {
1722
- headers: { Authorization: `Bearer ${apiKey}` }
1723
- });
1724
- if (!response.ok) {
1725
- errors.push(`Confluence API error: status ${response.status}`);
1726
- break;
1727
- }
1728
- const data = await response.json();
1729
- for (const page of data.results) {
1730
- const nodeId = `confluence:${page.id}`;
1731
- store.addNode({
1732
- id: nodeId,
1733
- type: "document",
1734
- name: sanitizeExternalText(page.title, 500),
1735
- metadata: {
1736
- source: "confluence",
1737
- spaceKey,
1738
- pageId: page.id,
1739
- status: page.status,
1740
- url: page._links?.webui ?? ""
1741
- }
1742
- });
1743
- nodesAdded++;
1744
- const text = sanitizeExternalText(`${page.title} ${page.body?.storage?.value ?? ""}`);
1745
- edgesAdded += linkToCode(store, text, nodeId, "documents");
1746
- }
1747
- nextUrl = data._links?.next ? `${baseUrl}${data._links.next}` : null;
1748
- }
1835
+ const result = await this.fetchAllPages(store, baseUrl, apiKey, spaceKey);
1836
+ nodesAdded = result.nodesAdded;
1837
+ edgesAdded = result.edgesAdded;
1838
+ errors.push(...result.errors);
1749
1839
  } catch (err) {
1750
1840
  errors.push(`Confluence fetch error: ${err instanceof Error ? err.message : String(err)}`);
1751
1841
  }
@@ -1758,6 +1848,47 @@ var ConfluenceConnector = class {
1758
1848
  durationMs: Date.now() - start
1759
1849
  };
1760
1850
  }
1851
+ async fetchAllPages(store, baseUrl, apiKey, spaceKey) {
1852
+ const errors = [];
1853
+ let nodesAdded = 0;
1854
+ let edgesAdded = 0;
1855
+ let nextUrl = `${baseUrl}/wiki/api/v2/pages?spaceKey=${encodeURIComponent(spaceKey)}&limit=25&body-format=storage`;
1856
+ while (nextUrl) {
1857
+ const response = await this.httpClient(nextUrl, {
1858
+ headers: { Authorization: `Bearer ${apiKey}` }
1859
+ });
1860
+ if (!response.ok) {
1861
+ errors.push(`Confluence API error: status ${response.status}`);
1862
+ break;
1863
+ }
1864
+ const data = await response.json();
1865
+ for (const page of data.results) {
1866
+ const counts = this.processPage(store, page, spaceKey);
1867
+ nodesAdded += counts.nodesAdded;
1868
+ edgesAdded += counts.edgesAdded;
1869
+ }
1870
+ nextUrl = data._links?.next ? `${baseUrl}${data._links.next}` : null;
1871
+ }
1872
+ return { nodesAdded, edgesAdded, errors };
1873
+ }
1874
+ processPage(store, page, spaceKey) {
1875
+ const nodeId = `confluence:${page.id}`;
1876
+ store.addNode({
1877
+ id: nodeId,
1878
+ type: "document",
1879
+ name: sanitizeExternalText(page.title, 500),
1880
+ metadata: {
1881
+ source: "confluence",
1882
+ spaceKey,
1883
+ pageId: page.id,
1884
+ status: page.status,
1885
+ url: page._links?.webui ?? ""
1886
+ }
1887
+ });
1888
+ const text = sanitizeExternalText(`${page.title} ${page.body?.storage?.value ?? ""}`);
1889
+ const edgesAdded = linkToCode(store, text, nodeId, "documents");
1890
+ return { nodesAdded: 1, edgesAdded };
1891
+ }
1761
1892
  };
1762
1893
 
1763
1894
  // src/ingest/connectors/CIConnector.ts
@@ -2050,22 +2181,25 @@ var GraphEntropyAdapter = class {
2050
2181
  * 3. Unreachable = code nodes NOT in visited set
2051
2182
  */
2052
2183
  computeDeadCodeData() {
2053
- const allFileNodes = this.store.findNodes({ type: "file" });
2184
+ const entryPoints = this.findEntryPoints();
2185
+ const visited = this.bfsFromEntryPoints(entryPoints);
2186
+ const unreachableNodes = this.collectUnreachableNodes(visited);
2187
+ return { reachableNodeIds: visited, unreachableNodes, entryPoints };
2188
+ }
2189
+ findEntryPoints() {
2054
2190
  const entryPoints = [];
2055
- for (const node of allFileNodes) {
2056
- if (node.name === "index.ts" || node.metadata?.entryPoint === true) {
2057
- entryPoints.push(node.id);
2058
- }
2059
- }
2060
2191
  for (const nodeType of CODE_NODE_TYPES3) {
2061
- if (nodeType === "file") continue;
2062
2192
  const nodes = this.store.findNodes({ type: nodeType });
2063
2193
  for (const node of nodes) {
2064
- if (node.metadata?.entryPoint === true) {
2194
+ const isIndexFile = nodeType === "file" && node.name === "index.ts";
2195
+ if (isIndexFile || node.metadata?.entryPoint === true) {
2065
2196
  entryPoints.push(node.id);
2066
2197
  }
2067
2198
  }
2068
2199
  }
2200
+ return entryPoints;
2201
+ }
2202
+ bfsFromEntryPoints(entryPoints) {
2069
2203
  const visited = /* @__PURE__ */ new Set();
2070
2204
  const queue = [...entryPoints];
2071
2205
  let head = 0;
@@ -2073,25 +2207,22 @@ var GraphEntropyAdapter = class {
2073
2207
  const nodeId = queue[head++];
2074
2208
  if (visited.has(nodeId)) continue;
2075
2209
  visited.add(nodeId);
2076
- const importEdges = this.store.getEdges({ from: nodeId, type: "imports" });
2077
- for (const edge of importEdges) {
2078
- if (!visited.has(edge.to)) {
2079
- queue.push(edge.to);
2080
- }
2081
- }
2082
- const callEdges = this.store.getEdges({ from: nodeId, type: "calls" });
2083
- for (const edge of callEdges) {
2084
- if (!visited.has(edge.to)) {
2085
- queue.push(edge.to);
2086
- }
2087
- }
2088
- const containsEdges = this.store.getEdges({ from: nodeId, type: "contains" });
2089
- for (const edge of containsEdges) {
2210
+ this.enqueueOutboundEdges(nodeId, visited, queue);
2211
+ }
2212
+ return visited;
2213
+ }
2214
+ enqueueOutboundEdges(nodeId, visited, queue) {
2215
+ const edgeTypes = ["imports", "calls", "contains"];
2216
+ for (const edgeType of edgeTypes) {
2217
+ const edges = this.store.getEdges({ from: nodeId, type: edgeType });
2218
+ for (const edge of edges) {
2090
2219
  if (!visited.has(edge.to)) {
2091
2220
  queue.push(edge.to);
2092
2221
  }
2093
2222
  }
2094
2223
  }
2224
+ }
2225
+ collectUnreachableNodes(visited) {
2095
2226
  const unreachableNodes = [];
2096
2227
  for (const nodeType of CODE_NODE_TYPES3) {
2097
2228
  const nodes = this.store.findNodes({ type: nodeType });
@@ -2106,11 +2237,7 @@ var GraphEntropyAdapter = class {
2106
2237
  }
2107
2238
  }
2108
2239
  }
2109
- return {
2110
- reachableNodeIds: visited,
2111
- unreachableNodes,
2112
- entryPoints
2113
- };
2240
+ return unreachableNodes;
2114
2241
  }
2115
2242
  /**
2116
2243
  * Count all nodes and edges by type.
@@ -2161,33 +2288,9 @@ var GraphComplexityAdapter = class {
2161
2288
  const hotspots = [];
2162
2289
  for (const fnNode of functionNodes) {
2163
2290
  const complexity = fnNode.metadata?.cyclomaticComplexity ?? 1;
2164
- const containsEdges = this.store.getEdges({ to: fnNode.id, type: "contains" });
2165
- let fileId;
2166
- for (const edge of containsEdges) {
2167
- const sourceNode = this.store.getNode(edge.from);
2168
- if (sourceNode?.type === "file") {
2169
- fileId = sourceNode.id;
2170
- break;
2171
- }
2172
- if (sourceNode?.type === "class") {
2173
- const classContainsEdges = this.store.getEdges({ to: sourceNode.id, type: "contains" });
2174
- for (const classEdge of classContainsEdges) {
2175
- const parentNode = this.store.getNode(classEdge.from);
2176
- if (parentNode?.type === "file") {
2177
- fileId = parentNode.id;
2178
- break;
2179
- }
2180
- }
2181
- if (fileId) break;
2182
- }
2183
- }
2291
+ const fileId = this.findContainingFileId(fnNode.id);
2184
2292
  if (!fileId) continue;
2185
- let changeFrequency = fileChangeFrequency.get(fileId);
2186
- if (changeFrequency === void 0) {
2187
- const referencesEdges = this.store.getEdges({ to: fileId, type: "references" });
2188
- changeFrequency = referencesEdges.length;
2189
- fileChangeFrequency.set(fileId, changeFrequency);
2190
- }
2293
+ const changeFrequency = this.getChangeFrequency(fileId, fileChangeFrequency);
2191
2294
  const hotspotScore = changeFrequency * complexity;
2192
2295
  const filePath = fnNode.path ?? fileId.replace(/^file:/, "");
2193
2296
  hotspots.push({
@@ -2205,6 +2308,39 @@ var GraphComplexityAdapter = class {
2205
2308
  );
2206
2309
  return { hotspots, percentile95Score };
2207
2310
  }
2311
+ /**
2312
+ * Walk the 'contains' edges to find the file node that contains a given function/method.
2313
+ * For methods, walks through the intermediate class node.
2314
+ */
2315
+ findContainingFileId(nodeId) {
2316
+ const containsEdges = this.store.getEdges({ to: nodeId, type: "contains" });
2317
+ for (const edge of containsEdges) {
2318
+ const sourceNode = this.store.getNode(edge.from);
2319
+ if (sourceNode?.type === "file") return sourceNode.id;
2320
+ if (sourceNode?.type === "class") {
2321
+ const fileId = this.findParentFileOfClass(sourceNode.id);
2322
+ if (fileId) return fileId;
2323
+ }
2324
+ }
2325
+ return void 0;
2326
+ }
2327
+ findParentFileOfClass(classNodeId) {
2328
+ const classContainsEdges = this.store.getEdges({ to: classNodeId, type: "contains" });
2329
+ for (const classEdge of classContainsEdges) {
2330
+ const parentNode = this.store.getNode(classEdge.from);
2331
+ if (parentNode?.type === "file") return parentNode.id;
2332
+ }
2333
+ return void 0;
2334
+ }
2335
+ getChangeFrequency(fileId, cache) {
2336
+ let freq = cache.get(fileId);
2337
+ if (freq === void 0) {
2338
+ const referencesEdges = this.store.getEdges({ to: fileId, type: "references" });
2339
+ freq = referencesEdges.length;
2340
+ cache.set(fileId, freq);
2341
+ }
2342
+ return freq;
2343
+ }
2208
2344
  computePercentile(descendingScores, percentile) {
2209
2345
  if (descendingScores.length === 0) return 0;
2210
2346
  const ascending = [...descendingScores].sort((a, b) => a - b);
@@ -2511,6 +2647,689 @@ var GraphAnomalyAdapter = class {
2511
2647
  }
2512
2648
  };
2513
2649
 
2650
+ // src/nlq/types.ts
2651
+ var INTENTS = ["impact", "find", "relationships", "explain", "anomaly"];
2652
+
2653
+ // src/nlq/IntentClassifier.ts
2654
+ var SIGNAL_WEIGHTS = {
2655
+ keyword: 0.35,
2656
+ questionWord: 0.2,
2657
+ verbPattern: 0.45
2658
+ };
2659
+ var INTENT_SIGNALS = {
2660
+ impact: {
2661
+ keywords: [
2662
+ "break",
2663
+ "affect",
2664
+ "impact",
2665
+ "change",
2666
+ "depend",
2667
+ "blast",
2668
+ "radius",
2669
+ "risk",
2670
+ "delete",
2671
+ "remove"
2672
+ ],
2673
+ questionWords: ["what", "if"],
2674
+ verbPatterns: [
2675
+ /what\s+(breaks|happens|is affected)/,
2676
+ /if\s+i\s+(change|modify|remove|delete)/,
2677
+ /blast\s+radius/,
2678
+ /what\s+(depend|relies)/
2679
+ ]
2680
+ },
2681
+ find: {
2682
+ keywords: ["find", "where", "locate", "search", "list", "all", "every"],
2683
+ questionWords: ["where"],
2684
+ verbPatterns: [
2685
+ /where\s+is/,
2686
+ /find\s+(the|all|every)/,
2687
+ /show\s+me/,
2688
+ /show\s+(all|every|the)/,
2689
+ /locate\s+/,
2690
+ /list\s+(all|every|the)/
2691
+ ]
2692
+ },
2693
+ relationships: {
2694
+ keywords: [
2695
+ "connect",
2696
+ "call",
2697
+ "import",
2698
+ "use",
2699
+ "depend",
2700
+ "link",
2701
+ "neighbor",
2702
+ "caller",
2703
+ "callee"
2704
+ ],
2705
+ questionWords: ["what", "who"],
2706
+ verbPatterns: [/connects?\s+to/, /depends?\s+on/, /\bcalls?\b/, /\bimports?\b/]
2707
+ },
2708
+ explain: {
2709
+ keywords: ["describe", "explain", "tell", "about", "overview", "summary", "work"],
2710
+ questionWords: ["what", "how"],
2711
+ verbPatterns: [
2712
+ /what\s+is\s+\w/,
2713
+ /describe\s+/,
2714
+ /tell\s+me\s+about/,
2715
+ /how\s+does/,
2716
+ /overview\s+of/,
2717
+ /give\s+me\s+/
2718
+ ]
2719
+ },
2720
+ anomaly: {
2721
+ keywords: [
2722
+ "wrong",
2723
+ "problem",
2724
+ "anomaly",
2725
+ "smell",
2726
+ "issue",
2727
+ "outlier",
2728
+ "hotspot",
2729
+ "suspicious",
2730
+ "risk"
2731
+ ],
2732
+ questionWords: ["what"],
2733
+ verbPatterns: [
2734
+ /what.*(wrong|problem|smell)/,
2735
+ /find.*(issue|anomal|problem)/,
2736
+ /code\s+smell/,
2737
+ /suspicious/,
2738
+ /hotspot/
2739
+ ]
2740
+ }
2741
+ };
2742
+ var IntentClassifier = class {
2743
+ /**
2744
+ * Classify a natural language question into an intent.
2745
+ *
2746
+ * @param question - The natural language question to classify
2747
+ * @returns ClassificationResult with intent, confidence, and per-signal scores
2748
+ */
2749
+ classify(question) {
2750
+ const normalized = question.toLowerCase().trim();
2751
+ const scores = [];
2752
+ for (const intent of INTENTS) {
2753
+ const signals = this.scoreIntent(normalized, INTENT_SIGNALS[intent]);
2754
+ const confidence = this.combineSignals(signals);
2755
+ scores.push({ intent, confidence, signals });
2756
+ }
2757
+ scores.sort((a, b) => b.confidence - a.confidence);
2758
+ const best = scores[0];
2759
+ return {
2760
+ intent: best.intent,
2761
+ confidence: best.confidence,
2762
+ signals: best.signals
2763
+ };
2764
+ }
2765
+ /**
2766
+ * Score individual signals for an intent against the normalized query.
2767
+ */
2768
+ scoreIntent(normalized, signalSet) {
2769
+ return {
2770
+ keyword: this.scoreKeywords(normalized, signalSet.keywords),
2771
+ questionWord: this.scoreQuestionWord(normalized, signalSet.questionWords),
2772
+ verbPattern: this.scoreVerbPatterns(normalized, signalSet.verbPatterns)
2773
+ };
2774
+ }
2775
+ /**
2776
+ * Score keyword signal: uses word-stem matching (checks if any word in the
2777
+ * query starts with the keyword). Saturates at 2 matches to avoid penalizing
2778
+ * intents with many keywords when only a few appear in the query.
2779
+ */
2780
+ scoreKeywords(normalized, keywords) {
2781
+ if (keywords.length === 0) return 0;
2782
+ const words = normalized.split(/\s+/);
2783
+ let matched = 0;
2784
+ for (const keyword of keywords) {
2785
+ if (words.some((w) => w.startsWith(keyword))) {
2786
+ matched++;
2787
+ }
2788
+ }
2789
+ return Math.min(matched / 2, 1);
2790
+ }
2791
+ /**
2792
+ * Score question-word signal: 1.0 if the query starts with a matching
2793
+ * question word, 0 otherwise.
2794
+ */
2795
+ scoreQuestionWord(normalized, questionWords) {
2796
+ const firstWord = normalized.split(/\s+/)[0] ?? "";
2797
+ return questionWords.includes(firstWord) ? 1 : 0;
2798
+ }
2799
+ /**
2800
+ * Score verb-pattern signal: any matching pattern yields a strong score.
2801
+ * Multiple matches increase score but saturate quickly.
2802
+ */
2803
+ scoreVerbPatterns(normalized, patterns) {
2804
+ if (patterns.length === 0) return 0;
2805
+ let matched = 0;
2806
+ for (const pattern of patterns) {
2807
+ if (pattern.test(normalized)) {
2808
+ matched++;
2809
+ }
2810
+ }
2811
+ return matched === 0 ? 0 : Math.min(0.6 + matched * 0.2, 1);
2812
+ }
2813
+ /**
2814
+ * Combine individual signal scores into a single confidence score
2815
+ * using additive weighted scoring. Each signal contributes weight * score,
2816
+ * and the total weights sum to 1.0 so the result is naturally bounded [0, 1].
2817
+ */
2818
+ combineSignals(signals) {
2819
+ let total = 0;
2820
+ for (const key of Object.keys(signals)) {
2821
+ const weight = SIGNAL_WEIGHTS[key];
2822
+ total += signals[key] * weight;
2823
+ }
2824
+ return total;
2825
+ }
2826
+ };
2827
+
2828
+ // src/nlq/EntityExtractor.ts
2829
+ var INTENT_KEYWORDS = /* @__PURE__ */ new Set([
2830
+ // impact
2831
+ "break",
2832
+ "breaks",
2833
+ "affect",
2834
+ "affects",
2835
+ "affected",
2836
+ "impact",
2837
+ "change",
2838
+ "depend",
2839
+ "depends",
2840
+ "blast",
2841
+ "radius",
2842
+ "risk",
2843
+ "delete",
2844
+ "remove",
2845
+ "modify",
2846
+ "happens",
2847
+ // find
2848
+ "find",
2849
+ "where",
2850
+ "locate",
2851
+ "search",
2852
+ "list",
2853
+ "all",
2854
+ "every",
2855
+ "show",
2856
+ // relationships
2857
+ "connect",
2858
+ "connects",
2859
+ "call",
2860
+ "calls",
2861
+ "import",
2862
+ "imports",
2863
+ "use",
2864
+ "uses",
2865
+ "link",
2866
+ "neighbor",
2867
+ "caller",
2868
+ "callers",
2869
+ "callee",
2870
+ "callees",
2871
+ // explain
2872
+ "describe",
2873
+ "explain",
2874
+ "tell",
2875
+ "about",
2876
+ "overview",
2877
+ "summary",
2878
+ "work",
2879
+ "works",
2880
+ // anomaly
2881
+ "wrong",
2882
+ "problem",
2883
+ "problems",
2884
+ "anomaly",
2885
+ "anomalies",
2886
+ "smell",
2887
+ "smells",
2888
+ "issue",
2889
+ "issues",
2890
+ "outlier",
2891
+ "hotspot",
2892
+ "hotspots",
2893
+ "suspicious"
2894
+ ]);
2895
+ var STOP_WORDS2 = /* @__PURE__ */ new Set([
2896
+ "a",
2897
+ "an",
2898
+ "the",
2899
+ "is",
2900
+ "are",
2901
+ "was",
2902
+ "were",
2903
+ "be",
2904
+ "been",
2905
+ "being",
2906
+ "have",
2907
+ "has",
2908
+ "had",
2909
+ "do",
2910
+ "does",
2911
+ "did",
2912
+ "will",
2913
+ "would",
2914
+ "could",
2915
+ "should",
2916
+ "may",
2917
+ "might",
2918
+ "shall",
2919
+ "can",
2920
+ "need",
2921
+ "must",
2922
+ "i",
2923
+ "me",
2924
+ "my",
2925
+ "we",
2926
+ "our",
2927
+ "you",
2928
+ "your",
2929
+ "he",
2930
+ "she",
2931
+ "it",
2932
+ "its",
2933
+ "they",
2934
+ "them",
2935
+ "their",
2936
+ "this",
2937
+ "that",
2938
+ "these",
2939
+ "those",
2940
+ "and",
2941
+ "or",
2942
+ "but",
2943
+ "if",
2944
+ "then",
2945
+ "else",
2946
+ "when",
2947
+ "while",
2948
+ "for",
2949
+ "of",
2950
+ "at",
2951
+ "by",
2952
+ "to",
2953
+ "in",
2954
+ "on",
2955
+ "with",
2956
+ "from",
2957
+ "up",
2958
+ "out",
2959
+ "not",
2960
+ "no",
2961
+ "nor",
2962
+ "so",
2963
+ "too",
2964
+ "very",
2965
+ "just",
2966
+ "also",
2967
+ "what",
2968
+ "who",
2969
+ "how",
2970
+ "which",
2971
+ "where",
2972
+ "why",
2973
+ "there",
2974
+ "here",
2975
+ "any",
2976
+ "some",
2977
+ "each",
2978
+ "than",
2979
+ "like",
2980
+ "get",
2981
+ "give",
2982
+ "go",
2983
+ "make",
2984
+ "see",
2985
+ "know",
2986
+ "take"
2987
+ ]);
2988
+ var PASCAL_OR_CAMEL_RE = /\b([A-Z][a-z]+[A-Za-z]*[a-z][A-Za-z]*|[a-z]+[A-Z][A-Za-z]*)\b/g;
2989
+ var FILE_PATH_RE = /(?:\.\/|[a-zA-Z0-9_-]+\/)[a-zA-Z0-9_\-./]+\.[a-zA-Z]{1,10}/g;
2990
+ var QUOTED_RE = /["']([^"']+)["']/g;
2991
+ function isSkippableWord(cleaned, allConsumed) {
2992
+ if (allConsumed.has(cleaned)) return true;
2993
+ const lower = cleaned.toLowerCase();
2994
+ if (STOP_WORDS2.has(lower)) return true;
2995
+ if (INTENT_KEYWORDS.has(lower)) return true;
2996
+ if (cleaned === cleaned.toUpperCase() && /^[A-Z]+$/.test(cleaned)) return true;
2997
+ return false;
2998
+ }
2999
+ function buildConsumedSet(quotedConsumed, casingConsumed, pathConsumed) {
3000
+ const quotedWords = /* @__PURE__ */ new Set();
3001
+ for (const q of quotedConsumed) {
3002
+ for (const w of q.split(/\s+/)) {
3003
+ if (w.length > 0) quotedWords.add(w);
3004
+ }
3005
+ }
3006
+ return /* @__PURE__ */ new Set([...quotedConsumed, ...quotedWords, ...casingConsumed, ...pathConsumed]);
3007
+ }
3008
+ var EntityExtractor = class {
3009
+ /**
3010
+ * Extract candidate entity mentions from a natural language query.
3011
+ *
3012
+ * @param query - The natural language query to extract entities from
3013
+ * @returns Array of raw entity strings in priority order, deduplicated
3014
+ */
3015
+ extract(query) {
3016
+ const trimmed = query.trim();
3017
+ if (trimmed.length === 0) return [];
3018
+ const seen = /* @__PURE__ */ new Set();
3019
+ const result = [];
3020
+ const add = (entity) => {
3021
+ if (!seen.has(entity)) {
3022
+ seen.add(entity);
3023
+ result.push(entity);
3024
+ }
3025
+ };
3026
+ const quotedConsumed = /* @__PURE__ */ new Set();
3027
+ for (const match of trimmed.matchAll(QUOTED_RE)) {
3028
+ const inner = match[1].trim();
3029
+ if (inner.length > 0) {
3030
+ add(inner);
3031
+ quotedConsumed.add(inner);
3032
+ }
3033
+ }
3034
+ const casingConsumed = /* @__PURE__ */ new Set();
3035
+ for (const match of trimmed.matchAll(PASCAL_OR_CAMEL_RE)) {
3036
+ const token = match[0];
3037
+ if (!quotedConsumed.has(token)) {
3038
+ add(token);
3039
+ casingConsumed.add(token);
3040
+ }
3041
+ }
3042
+ const pathConsumed = /* @__PURE__ */ new Set();
3043
+ for (const match of trimmed.matchAll(FILE_PATH_RE)) {
3044
+ const path6 = match[0];
3045
+ add(path6);
3046
+ pathConsumed.add(path6);
3047
+ }
3048
+ const allConsumed = buildConsumedSet(quotedConsumed, casingConsumed, pathConsumed);
3049
+ const words = trimmed.split(/\s+/);
3050
+ for (const raw of words) {
3051
+ const cleaned = raw.replace(/^[^a-zA-Z0-9]+|[^a-zA-Z0-9]+$/g, "");
3052
+ if (cleaned.length === 0) continue;
3053
+ if (isSkippableWord(cleaned, allConsumed)) continue;
3054
+ add(cleaned);
3055
+ }
3056
+ return result;
3057
+ }
3058
+ };
3059
+
3060
+ // src/nlq/EntityResolver.ts
3061
+ var EntityResolver = class {
3062
+ store;
3063
+ fusion;
3064
+ constructor(store, fusion) {
3065
+ this.store = store;
3066
+ this.fusion = fusion;
3067
+ }
3068
+ /**
3069
+ * Resolve an array of raw entity strings to graph nodes.
3070
+ *
3071
+ * @param raws - Raw entity strings from EntityExtractor
3072
+ * @returns Array of ResolvedEntity for each successfully resolved raw string
3073
+ */
3074
+ resolve(raws) {
3075
+ const results = [];
3076
+ for (const raw of raws) {
3077
+ const resolved = this.resolveOne(raw);
3078
+ if (resolved !== void 0) {
3079
+ results.push(resolved);
3080
+ }
3081
+ }
3082
+ return results;
3083
+ }
3084
+ resolveOne(raw) {
3085
+ const exactMatches = this.store.findNodes({ name: raw });
3086
+ if (exactMatches.length > 0) {
3087
+ const node = exactMatches[0];
3088
+ return {
3089
+ raw,
3090
+ nodeId: node.id,
3091
+ node,
3092
+ confidence: 1,
3093
+ method: "exact"
3094
+ };
3095
+ }
3096
+ if (this.fusion) {
3097
+ const fusionResults = this.fusion.search(raw, 5);
3098
+ if (fusionResults.length > 0 && fusionResults[0].score > 0.5) {
3099
+ const top = fusionResults[0];
3100
+ return {
3101
+ raw,
3102
+ nodeId: top.nodeId,
3103
+ node: top.node,
3104
+ confidence: top.score,
3105
+ method: "fusion"
3106
+ };
3107
+ }
3108
+ }
3109
+ if (raw.length < 3) return void 0;
3110
+ const isPathLike = raw.includes("/");
3111
+ const fileNodes = this.store.findNodes({ type: "file" });
3112
+ for (const node of fileNodes) {
3113
+ if (!node.path) continue;
3114
+ if (isPathLike && node.path.includes(raw)) {
3115
+ return { raw, nodeId: node.id, node, confidence: 0.6, method: "path" };
3116
+ }
3117
+ const basename4 = node.path.split("/").pop() ?? "";
3118
+ if (basename4.includes(raw)) {
3119
+ return { raw, nodeId: node.id, node, confidence: 0.6, method: "path" };
3120
+ }
3121
+ if (raw.length >= 4 && node.path.includes(raw)) {
3122
+ return { raw, nodeId: node.id, node, confidence: 0.6, method: "path" };
3123
+ }
3124
+ }
3125
+ return void 0;
3126
+ }
3127
+ };
3128
+
3129
+ // src/nlq/ResponseFormatter.ts
3130
+ var ResponseFormatter = class {
3131
+ /**
3132
+ * Format graph operation results into a human-readable summary.
3133
+ *
3134
+ * @param intent - The classified intent
3135
+ * @param entities - Resolved entities from the query
3136
+ * @param data - Raw result data (shape varies per intent)
3137
+ * @param query - Original natural language query (optional)
3138
+ * @returns Human-readable summary string
3139
+ */
3140
+ format(intent, entities, data, query) {
3141
+ if (data === null || data === void 0) {
3142
+ return "No results found.";
3143
+ }
3144
+ const firstEntity = entities[0];
3145
+ const entityName = firstEntity?.raw ?? "the target";
3146
+ switch (intent) {
3147
+ case "impact":
3148
+ return this.formatImpact(entityName, data);
3149
+ case "find":
3150
+ return this.formatFind(data, query);
3151
+ case "relationships":
3152
+ return this.formatRelationships(entityName, entities, data);
3153
+ case "explain":
3154
+ return this.formatExplain(entityName, entities, data);
3155
+ case "anomaly":
3156
+ return this.formatAnomaly(data);
3157
+ default:
3158
+ return `Processed results for "${entityName}".`;
3159
+ }
3160
+ }
3161
+ formatImpact(entityName, data) {
3162
+ const d = data;
3163
+ const code = this.safeArrayLength(d?.code);
3164
+ const tests = this.safeArrayLength(d?.tests);
3165
+ const docs = this.safeArrayLength(d?.docs);
3166
+ return `Changing **${entityName}** affects ${this.p(code, "code file")}, ${this.p(tests, "test")}, and ${this.p(docs, "doc")}.`;
3167
+ }
3168
+ formatFind(data, query) {
3169
+ const count = Array.isArray(data) ? data.length : 0;
3170
+ if (query) {
3171
+ return `Found ${this.p(count, "match", "matches")} for "${query}".`;
3172
+ }
3173
+ return `Found ${this.p(count, "match", "matches")}.`;
3174
+ }
3175
+ formatRelationships(entityName, entities, data) {
3176
+ const d = data;
3177
+ const edges = Array.isArray(d?.edges) ? d.edges : [];
3178
+ const firstEntity = entities[0];
3179
+ const rootId = firstEntity?.nodeId ?? "";
3180
+ let outbound = 0;
3181
+ let inbound = 0;
3182
+ for (const edge of edges) {
3183
+ if (edge.from === rootId) outbound++;
3184
+ if (edge.to === rootId) inbound++;
3185
+ }
3186
+ return `**${entityName}** has ${outbound} outbound and ${inbound} inbound relationships.`;
3187
+ }
3188
+ formatExplain(entityName, entities, data) {
3189
+ const d = data;
3190
+ const context = Array.isArray(d?.context) ? d.context : [];
3191
+ const firstEntity = entities[0];
3192
+ const nodeType = firstEntity?.node.type ?? "node";
3193
+ const path6 = firstEntity?.node.path ?? "unknown";
3194
+ let neighborCount = 0;
3195
+ const firstContext = context[0];
3196
+ if (firstContext && Array.isArray(firstContext.nodes)) {
3197
+ neighborCount = firstContext.nodes.length;
3198
+ }
3199
+ return `**${entityName}** is a ${nodeType} at \`${path6}\`. Connected to ${neighborCount} nodes.`;
3200
+ }
3201
+ formatAnomaly(data) {
3202
+ const d = data;
3203
+ const outliers = Array.isArray(d?.statisticalOutliers) ? d.statisticalOutliers : [];
3204
+ const artPoints = Array.isArray(d?.articulationPoints) ? d.articulationPoints : [];
3205
+ const count = outliers.length + artPoints.length;
3206
+ if (count === 0) {
3207
+ return "Found 0 anomalies.";
3208
+ }
3209
+ const topItems = [
3210
+ ...outliers.slice(0, 2).map((o) => o.nodeId ?? "unknown outlier"),
3211
+ ...artPoints.slice(0, 1).map((a) => a.nodeId ?? "unknown bottleneck")
3212
+ ].join(", ");
3213
+ return `Found ${this.p(count, "anomaly", "anomalies")}: ${topItems}.`;
3214
+ }
3215
+ safeArrayLength(value) {
3216
+ return Array.isArray(value) ? value.length : 0;
3217
+ }
3218
+ p(count, singular, plural) {
3219
+ const word = count === 1 ? singular : plural ?? singular + "s";
3220
+ return `${count} ${word}`;
3221
+ }
3222
+ };
3223
+
3224
+ // src/nlq/index.ts
3225
+ var ENTITY_REQUIRED_INTENTS = /* @__PURE__ */ new Set(["impact", "relationships", "explain"]);
3226
+ var classifier = new IntentClassifier();
3227
+ var extractor = new EntityExtractor();
3228
+ var formatter = new ResponseFormatter();
3229
+ async function askGraph(store, question) {
3230
+ const fusion = new FusionLayer(store);
3231
+ const resolver = new EntityResolver(store, fusion);
3232
+ const classification = classifier.classify(question);
3233
+ if (classification.confidence < 0.3) {
3234
+ return {
3235
+ intent: classification.intent,
3236
+ intentConfidence: classification.confidence,
3237
+ entities: [],
3238
+ summary: "I'm not sure what you're asking. Try rephrasing your question.",
3239
+ data: null,
3240
+ suggestions: [
3241
+ 'Try "what breaks if I change <name>?" for impact analysis',
3242
+ 'Try "where is <name>?" to find entities',
3243
+ 'Try "what calls <name>?" for relationships',
3244
+ 'Try "what is <name>?" for explanations',
3245
+ 'Try "what looks wrong?" for anomaly detection'
3246
+ ]
3247
+ };
3248
+ }
3249
+ const rawEntities = extractor.extract(question);
3250
+ const entities = resolver.resolve(rawEntities);
3251
+ if (ENTITY_REQUIRED_INTENTS.has(classification.intent) && entities.length === 0) {
3252
+ return {
3253
+ intent: classification.intent,
3254
+ intentConfidence: classification.confidence,
3255
+ entities: [],
3256
+ summary: "Could not find any matching nodes in the graph for your query. Try using exact class names, function names, or file paths.",
3257
+ data: null
3258
+ };
3259
+ }
3260
+ let data;
3261
+ try {
3262
+ data = executeOperation(store, classification.intent, entities, question, fusion);
3263
+ } catch (err) {
3264
+ return {
3265
+ intent: classification.intent,
3266
+ intentConfidence: classification.confidence,
3267
+ entities,
3268
+ summary: `An error occurred while querying the graph: ${err instanceof Error ? err.message : String(err)}`,
3269
+ data: null
3270
+ };
3271
+ }
3272
+ const summary = formatter.format(classification.intent, entities, data, question);
3273
+ return {
3274
+ intent: classification.intent,
3275
+ intentConfidence: classification.confidence,
3276
+ entities,
3277
+ summary,
3278
+ data
3279
+ };
3280
+ }
3281
+ function executeOperation(store, intent, entities, question, fusion) {
3282
+ const cql = new ContextQL(store);
3283
+ switch (intent) {
3284
+ case "impact": {
3285
+ const rootId = entities[0].nodeId;
3286
+ const result = cql.execute({
3287
+ rootNodeIds: [rootId],
3288
+ bidirectional: true,
3289
+ maxDepth: 3
3290
+ });
3291
+ return groupNodesByImpact(result.nodes, rootId);
3292
+ }
3293
+ case "find": {
3294
+ return fusion.search(question, 10);
3295
+ }
3296
+ case "relationships": {
3297
+ const rootId = entities[0].nodeId;
3298
+ const result = cql.execute({
3299
+ rootNodeIds: [rootId],
3300
+ bidirectional: true,
3301
+ maxDepth: 1
3302
+ });
3303
+ return { nodes: result.nodes, edges: result.edges };
3304
+ }
3305
+ case "explain": {
3306
+ const searchResults = fusion.search(question, 10);
3307
+ const contextBlocks = [];
3308
+ const rootIds = entities.length > 0 ? [entities[0].nodeId] : searchResults.slice(0, 3).map((r) => r.nodeId);
3309
+ for (const rootId of rootIds) {
3310
+ const expanded = cql.execute({
3311
+ rootNodeIds: [rootId],
3312
+ maxDepth: 2
3313
+ });
3314
+ const matchingResult = searchResults.find((r) => r.nodeId === rootId);
3315
+ contextBlocks.push({
3316
+ rootNode: rootId,
3317
+ score: matchingResult?.score ?? 1,
3318
+ nodes: expanded.nodes,
3319
+ edges: expanded.edges
3320
+ });
3321
+ }
3322
+ return { searchResults, context: contextBlocks };
3323
+ }
3324
+ case "anomaly": {
3325
+ const adapter = new GraphAnomalyAdapter(store);
3326
+ return adapter.detect();
3327
+ }
3328
+ default:
3329
+ return null;
3330
+ }
3331
+ }
3332
+
2514
3333
  // src/context/Assembler.ts
2515
3334
  var PHASE_NODE_TYPES = {
2516
3335
  implement: ["file", "function", "class", "method", "interface", "variable"],
@@ -2554,14 +3373,20 @@ var Assembler = class {
2554
3373
  const fusion = this.getFusionLayer();
2555
3374
  const topResults = fusion.search(intent, 10);
2556
3375
  if (topResults.length === 0) {
2557
- return {
2558
- nodes: [],
2559
- edges: [],
2560
- tokenEstimate: 0,
2561
- intent,
2562
- truncated: false
2563
- };
3376
+ return { nodes: [], edges: [], tokenEstimate: 0, intent, truncated: false };
2564
3377
  }
3378
+ const { nodeMap, collectedEdges, nodeScores } = this.expandSearchResults(topResults);
3379
+ const sortedNodes = Array.from(nodeMap.values()).sort((a, b) => {
3380
+ return (nodeScores.get(b.id) ?? 0) - (nodeScores.get(a.id) ?? 0);
3381
+ });
3382
+ const { keptNodes, tokenEstimate, truncated } = this.truncateToFit(sortedNodes, tokenBudget);
3383
+ const keptNodeIds = new Set(keptNodes.map((n) => n.id));
3384
+ const keptEdges = collectedEdges.filter(
3385
+ (e) => keptNodeIds.has(e.from) && keptNodeIds.has(e.to)
3386
+ );
3387
+ return { nodes: keptNodes, edges: keptEdges, tokenEstimate, intent, truncated };
3388
+ }
3389
+ expandSearchResults(topResults) {
2565
3390
  const contextQL = new ContextQL(this.store);
2566
3391
  const nodeMap = /* @__PURE__ */ new Map();
2567
3392
  const edgeSet = /* @__PURE__ */ new Set();
@@ -2589,9 +3414,9 @@ var Assembler = class {
2589
3414
  }
2590
3415
  }
2591
3416
  }
2592
- const sortedNodes = Array.from(nodeMap.values()).sort((a, b) => {
2593
- return (nodeScores.get(b.id) ?? 0) - (nodeScores.get(a.id) ?? 0);
2594
- });
3417
+ return { nodeMap, collectedEdges, nodeScores };
3418
+ }
3419
+ truncateToFit(sortedNodes, tokenBudget) {
2595
3420
  let tokenEstimate = 0;
2596
3421
  const keptNodes = [];
2597
3422
  let truncated = false;
@@ -2604,17 +3429,7 @@ var Assembler = class {
2604
3429
  tokenEstimate += nodeTokens;
2605
3430
  keptNodes.push(node);
2606
3431
  }
2607
- const keptNodeIds = new Set(keptNodes.map((n) => n.id));
2608
- const keptEdges = collectedEdges.filter(
2609
- (e) => keptNodeIds.has(e.from) && keptNodeIds.has(e.to)
2610
- );
2611
- return {
2612
- nodes: keptNodes,
2613
- edges: keptEdges,
2614
- tokenEstimate,
2615
- intent,
2616
- truncated
2617
- };
3432
+ return { keptNodes, tokenEstimate, truncated };
2618
3433
  }
2619
3434
  /**
2620
3435
  * Compute a token budget allocation across node types.
@@ -2787,8 +3602,8 @@ var GraphConstraintAdapter = class {
2787
3602
  const { edges } = this.computeDependencyGraph();
2788
3603
  const violations = [];
2789
3604
  for (const edge of edges) {
2790
- const fromRelative = (0, import_node_path2.relative)(rootDir, edge.from);
2791
- const toRelative = (0, import_node_path2.relative)(rootDir, edge.to);
3605
+ const fromRelative = (0, import_node_path2.relative)(rootDir, edge.from).replaceAll("\\", "/");
3606
+ const toRelative = (0, import_node_path2.relative)(rootDir, edge.to).replaceAll("\\", "/");
2792
3607
  const fromLayer = this.resolveLayer(fromRelative, layers);
2793
3608
  const toLayer = this.resolveLayer(toRelative, layers);
2794
3609
  if (!fromLayer || !toLayer) continue;
@@ -3133,6 +3948,447 @@ var GraphFeedbackAdapter = class {
3133
3948
  }
3134
3949
  };
3135
3950
 
3951
+ // src/independence/TaskIndependenceAnalyzer.ts
3952
+ var DEFAULT_EDGE_TYPES = ["imports", "calls", "references"];
3953
+ var TaskIndependenceAnalyzer = class {
3954
+ store;
3955
+ constructor(store) {
3956
+ this.store = store;
3957
+ }
3958
+ analyze(params) {
3959
+ const { tasks } = params;
3960
+ const depth = params.depth ?? 1;
3961
+ const edgeTypes = params.edgeTypes ?? DEFAULT_EDGE_TYPES;
3962
+ this.validate(tasks);
3963
+ const useGraph = this.store != null && depth > 0;
3964
+ const analysisLevel = useGraph ? "graph-expanded" : "file-only";
3965
+ const originalFiles = /* @__PURE__ */ new Map();
3966
+ const expandedFiles = /* @__PURE__ */ new Map();
3967
+ for (const task of tasks) {
3968
+ const origSet = new Set(task.files);
3969
+ originalFiles.set(task.id, origSet);
3970
+ if (useGraph) {
3971
+ const expanded = this.expandViaGraph(task.files, depth, edgeTypes);
3972
+ expandedFiles.set(task.id, expanded);
3973
+ } else {
3974
+ expandedFiles.set(task.id, /* @__PURE__ */ new Map());
3975
+ }
3976
+ }
3977
+ const taskIds = tasks.map((t) => t.id);
3978
+ const pairs = [];
3979
+ for (let i = 0; i < taskIds.length; i++) {
3980
+ for (let j = i + 1; j < taskIds.length; j++) {
3981
+ const idA = taskIds[i];
3982
+ const idB = taskIds[j];
3983
+ const pair = this.computePairOverlap(
3984
+ idA,
3985
+ idB,
3986
+ originalFiles.get(idA),
3987
+ originalFiles.get(idB),
3988
+ expandedFiles.get(idA),
3989
+ expandedFiles.get(idB)
3990
+ );
3991
+ pairs.push(pair);
3992
+ }
3993
+ }
3994
+ const groups = this.buildGroups(taskIds, pairs);
3995
+ const verdict = this.generateVerdict(taskIds, groups, analysisLevel);
3996
+ return {
3997
+ tasks: taskIds,
3998
+ analysisLevel,
3999
+ depth,
4000
+ pairs,
4001
+ groups,
4002
+ verdict
4003
+ };
4004
+ }
4005
+ // --- Private methods ---
4006
+ validate(tasks) {
4007
+ if (tasks.length < 2) {
4008
+ throw new Error("At least 2 tasks are required for independence analysis");
4009
+ }
4010
+ const seenIds = /* @__PURE__ */ new Set();
4011
+ for (const task of tasks) {
4012
+ if (seenIds.has(task.id)) {
4013
+ throw new Error(`Duplicate task ID: "${task.id}"`);
4014
+ }
4015
+ seenIds.add(task.id);
4016
+ if (task.files.length === 0) {
4017
+ throw new Error(`Task "${task.id}" has an empty files array`);
4018
+ }
4019
+ }
4020
+ }
4021
+ expandViaGraph(files, depth, edgeTypes) {
4022
+ const result = /* @__PURE__ */ new Map();
4023
+ const store = this.store;
4024
+ const cql = new ContextQL(store);
4025
+ const fileSet = new Set(files);
4026
+ for (const file of files) {
4027
+ const nodeId = `file:${file}`;
4028
+ const node = store.getNode(nodeId);
4029
+ if (!node) continue;
4030
+ const queryResult = cql.execute({
4031
+ rootNodeIds: [nodeId],
4032
+ maxDepth: depth,
4033
+ includeEdges: edgeTypes,
4034
+ includeTypes: ["file"]
4035
+ });
4036
+ for (const n of queryResult.nodes) {
4037
+ const path6 = n.path ?? n.id.replace(/^file:/, "");
4038
+ if (!fileSet.has(path6)) {
4039
+ if (!result.has(path6)) {
4040
+ result.set(path6, file);
4041
+ }
4042
+ }
4043
+ }
4044
+ }
4045
+ return result;
4046
+ }
4047
+ computePairOverlap(idA, idB, origA, origB, expandedA, expandedB) {
4048
+ const overlaps = [];
4049
+ for (const file of origA) {
4050
+ if (origB.has(file)) {
4051
+ overlaps.push({ file, type: "direct" });
4052
+ }
4053
+ }
4054
+ const directFiles = new Set(overlaps.map((o) => o.file));
4055
+ const transitiveFiles = /* @__PURE__ */ new Set();
4056
+ for (const [file, via] of expandedA) {
4057
+ if (origB.has(file) && !directFiles.has(file) && !transitiveFiles.has(file)) {
4058
+ transitiveFiles.add(file);
4059
+ overlaps.push({ file, type: "transitive", via });
4060
+ }
4061
+ }
4062
+ for (const [file, via] of expandedB) {
4063
+ if (origA.has(file) && !directFiles.has(file) && !transitiveFiles.has(file)) {
4064
+ transitiveFiles.add(file);
4065
+ overlaps.push({ file, type: "transitive", via });
4066
+ }
4067
+ }
4068
+ for (const [file, viaA] of expandedA) {
4069
+ if (expandedB.has(file) && !directFiles.has(file) && !transitiveFiles.has(file)) {
4070
+ transitiveFiles.add(file);
4071
+ overlaps.push({ file, type: "transitive", via: viaA });
4072
+ }
4073
+ }
4074
+ return {
4075
+ taskA: idA,
4076
+ taskB: idB,
4077
+ independent: overlaps.length === 0,
4078
+ overlaps
4079
+ };
4080
+ }
4081
+ buildGroups(taskIds, pairs) {
4082
+ const parent = /* @__PURE__ */ new Map();
4083
+ const rank = /* @__PURE__ */ new Map();
4084
+ for (const id of taskIds) {
4085
+ parent.set(id, id);
4086
+ rank.set(id, 0);
4087
+ }
4088
+ const find = (x) => {
4089
+ let root = x;
4090
+ while (parent.get(root) !== root) {
4091
+ root = parent.get(root);
4092
+ }
4093
+ let current = x;
4094
+ while (current !== root) {
4095
+ const next = parent.get(current);
4096
+ parent.set(current, root);
4097
+ current = next;
4098
+ }
4099
+ return root;
4100
+ };
4101
+ const union = (a, b) => {
4102
+ const rootA = find(a);
4103
+ const rootB = find(b);
4104
+ if (rootA === rootB) return;
4105
+ const rankA = rank.get(rootA);
4106
+ const rankB = rank.get(rootB);
4107
+ if (rankA < rankB) {
4108
+ parent.set(rootA, rootB);
4109
+ } else if (rankA > rankB) {
4110
+ parent.set(rootB, rootA);
4111
+ } else {
4112
+ parent.set(rootB, rootA);
4113
+ rank.set(rootA, rankA + 1);
4114
+ }
4115
+ };
4116
+ for (const pair of pairs) {
4117
+ if (!pair.independent) {
4118
+ union(pair.taskA, pair.taskB);
4119
+ }
4120
+ }
4121
+ const groupMap = /* @__PURE__ */ new Map();
4122
+ for (const id of taskIds) {
4123
+ const root = find(id);
4124
+ if (!groupMap.has(root)) {
4125
+ groupMap.set(root, []);
4126
+ }
4127
+ groupMap.get(root).push(id);
4128
+ }
4129
+ return Array.from(groupMap.values());
4130
+ }
4131
+ generateVerdict(taskIds, groups, analysisLevel) {
4132
+ const total = taskIds.length;
4133
+ const groupCount = groups.length;
4134
+ let verdict;
4135
+ if (groupCount === 1) {
4136
+ verdict = `All ${total} tasks conflict \u2014 must run serially.`;
4137
+ } else if (groupCount === total) {
4138
+ verdict = `All ${total} tasks are independent \u2014 can all run in parallel.`;
4139
+ } else {
4140
+ verdict = `${total} tasks form ${groupCount} independent groups \u2014 ${groupCount} parallel waves possible.`;
4141
+ }
4142
+ if (analysisLevel === "file-only") {
4143
+ verdict += " Graph unavailable \u2014 transitive dependencies not checked.";
4144
+ }
4145
+ return verdict;
4146
+ }
4147
+ };
4148
+
4149
+ // src/independence/ConflictPredictor.ts
4150
+ var ConflictPredictor = class {
4151
+ store;
4152
+ constructor(store) {
4153
+ this.store = store;
4154
+ }
4155
+ predict(params) {
4156
+ const analyzer = new TaskIndependenceAnalyzer(this.store);
4157
+ const result = analyzer.analyze(params);
4158
+ const churnMap = /* @__PURE__ */ new Map();
4159
+ const couplingMap = /* @__PURE__ */ new Map();
4160
+ let churnThreshold = Infinity;
4161
+ let couplingThreshold = Infinity;
4162
+ if (this.store != null) {
4163
+ const complexityResult = new GraphComplexityAdapter(this.store).computeComplexityHotspots();
4164
+ for (const hotspot of complexityResult.hotspots) {
4165
+ const existing = churnMap.get(hotspot.file);
4166
+ if (existing === void 0 || hotspot.changeFrequency > existing) {
4167
+ churnMap.set(hotspot.file, hotspot.changeFrequency);
4168
+ }
4169
+ }
4170
+ const couplingResult = new GraphCouplingAdapter(this.store).computeCouplingData();
4171
+ for (const fileData of couplingResult.files) {
4172
+ couplingMap.set(fileData.file, fileData.fanIn + fileData.fanOut);
4173
+ }
4174
+ churnThreshold = this.computePercentile(Array.from(churnMap.values()), 80);
4175
+ couplingThreshold = this.computePercentile(Array.from(couplingMap.values()), 80);
4176
+ }
4177
+ const conflicts = [];
4178
+ for (const pair of result.pairs) {
4179
+ if (pair.independent) continue;
4180
+ const { severity, reason, mitigation } = this.classifyPair(
4181
+ pair.taskA,
4182
+ pair.taskB,
4183
+ pair.overlaps,
4184
+ churnMap,
4185
+ couplingMap,
4186
+ churnThreshold,
4187
+ couplingThreshold
4188
+ );
4189
+ conflicts.push({
4190
+ taskA: pair.taskA,
4191
+ taskB: pair.taskB,
4192
+ severity,
4193
+ reason,
4194
+ mitigation,
4195
+ overlaps: pair.overlaps
4196
+ });
4197
+ }
4198
+ const taskIds = result.tasks;
4199
+ const groups = this.buildHighSeverityGroups(taskIds, conflicts);
4200
+ const regrouped = !this.groupsEqual(result.groups, groups);
4201
+ let highCount = 0;
4202
+ let mediumCount = 0;
4203
+ let lowCount = 0;
4204
+ for (const c of conflicts) {
4205
+ if (c.severity === "high") highCount++;
4206
+ else if (c.severity === "medium") mediumCount++;
4207
+ else lowCount++;
4208
+ }
4209
+ const verdict = this.generateVerdict(
4210
+ taskIds,
4211
+ groups,
4212
+ result.analysisLevel,
4213
+ highCount,
4214
+ mediumCount,
4215
+ lowCount,
4216
+ regrouped
4217
+ );
4218
+ return {
4219
+ tasks: taskIds,
4220
+ analysisLevel: result.analysisLevel,
4221
+ depth: result.depth,
4222
+ conflicts,
4223
+ groups,
4224
+ summary: {
4225
+ high: highCount,
4226
+ medium: mediumCount,
4227
+ low: lowCount,
4228
+ regrouped
4229
+ },
4230
+ verdict
4231
+ };
4232
+ }
4233
+ // --- Private helpers ---
4234
+ classifyPair(taskA, taskB, overlaps, churnMap, couplingMap, churnThreshold, couplingThreshold) {
4235
+ let maxSeverity = "low";
4236
+ let primaryReason = "";
4237
+ let primaryMitigation = "";
4238
+ for (const overlap of overlaps) {
4239
+ let overlapSeverity;
4240
+ let reason;
4241
+ let mitigation;
4242
+ if (overlap.type === "direct") {
4243
+ overlapSeverity = "high";
4244
+ reason = `Both tasks write to ${overlap.file}`;
4245
+ mitigation = `Serialize: run ${taskA} before ${taskB}`;
4246
+ } else {
4247
+ const churn = churnMap.get(overlap.file);
4248
+ const coupling = couplingMap.get(overlap.file);
4249
+ const via = overlap.via ?? "unknown";
4250
+ if (churn !== void 0 && churn >= churnThreshold && churnThreshold !== Infinity) {
4251
+ overlapSeverity = "medium";
4252
+ reason = `Transitive overlap on high-churn file ${overlap.file} (via ${via})`;
4253
+ mitigation = `Review: ${overlap.file} changes frequently \u2014 coordinate edits between ${taskA} and ${taskB}`;
4254
+ } else if (coupling !== void 0 && coupling >= couplingThreshold && couplingThreshold !== Infinity) {
4255
+ overlapSeverity = "medium";
4256
+ reason = `Transitive overlap on highly-coupled file ${overlap.file} (via ${via})`;
4257
+ mitigation = `Review: ${overlap.file} has high coupling \u2014 coordinate edits between ${taskA} and ${taskB}`;
4258
+ } else {
4259
+ overlapSeverity = "low";
4260
+ reason = `Transitive overlap on ${overlap.file} (via ${via}) \u2014 low risk`;
4261
+ mitigation = `Info: transitive overlap unlikely to cause conflicts`;
4262
+ }
4263
+ }
4264
+ if (this.severityRank(overlapSeverity) > this.severityRank(maxSeverity)) {
4265
+ maxSeverity = overlapSeverity;
4266
+ primaryReason = reason;
4267
+ primaryMitigation = mitigation;
4268
+ } else if (primaryReason === "") {
4269
+ primaryReason = reason;
4270
+ primaryMitigation = mitigation;
4271
+ }
4272
+ }
4273
+ return { severity: maxSeverity, reason: primaryReason, mitigation: primaryMitigation };
4274
+ }
4275
+ severityRank(severity) {
4276
+ switch (severity) {
4277
+ case "high":
4278
+ return 3;
4279
+ case "medium":
4280
+ return 2;
4281
+ case "low":
4282
+ return 1;
4283
+ }
4284
+ }
4285
+ computePercentile(values, percentile) {
4286
+ if (values.length === 0) return Infinity;
4287
+ const sorted = [...values].sort((a, b) => a - b);
4288
+ const index = Math.ceil(percentile / 100 * sorted.length) - 1;
4289
+ return sorted[Math.min(index, sorted.length - 1)];
4290
+ }
4291
+ buildHighSeverityGroups(taskIds, conflicts) {
4292
+ const parent = /* @__PURE__ */ new Map();
4293
+ const rank = /* @__PURE__ */ new Map();
4294
+ for (const id of taskIds) {
4295
+ parent.set(id, id);
4296
+ rank.set(id, 0);
4297
+ }
4298
+ const find = (x) => {
4299
+ let root = x;
4300
+ while (parent.get(root) !== root) {
4301
+ root = parent.get(root);
4302
+ }
4303
+ let current = x;
4304
+ while (current !== root) {
4305
+ const next = parent.get(current);
4306
+ parent.set(current, root);
4307
+ current = next;
4308
+ }
4309
+ return root;
4310
+ };
4311
+ const union = (a, b) => {
4312
+ const rootA = find(a);
4313
+ const rootB = find(b);
4314
+ if (rootA === rootB) return;
4315
+ const rankA = rank.get(rootA);
4316
+ const rankB = rank.get(rootB);
4317
+ if (rankA < rankB) {
4318
+ parent.set(rootA, rootB);
4319
+ } else if (rankA > rankB) {
4320
+ parent.set(rootB, rootA);
4321
+ } else {
4322
+ parent.set(rootB, rootA);
4323
+ rank.set(rootA, rankA + 1);
4324
+ }
4325
+ };
4326
+ for (const conflict of conflicts) {
4327
+ if (conflict.severity === "high") {
4328
+ union(conflict.taskA, conflict.taskB);
4329
+ }
4330
+ }
4331
+ const groupMap = /* @__PURE__ */ new Map();
4332
+ for (const id of taskIds) {
4333
+ const root = find(id);
4334
+ let group = groupMap.get(root);
4335
+ if (group === void 0) {
4336
+ group = [];
4337
+ groupMap.set(root, group);
4338
+ }
4339
+ group.push(id);
4340
+ }
4341
+ return Array.from(groupMap.values());
4342
+ }
4343
+ groupsEqual(a, b) {
4344
+ if (a.length !== b.length) return false;
4345
+ const normalize2 = (groups) => groups.map((g) => [...g].sort()).sort((x, y) => {
4346
+ const xFirst = x[0];
4347
+ const yFirst = y[0];
4348
+ return xFirst.localeCompare(yFirst);
4349
+ });
4350
+ const normA = normalize2(a);
4351
+ const normB = normalize2(b);
4352
+ for (let i = 0; i < normA.length; i++) {
4353
+ const groupA = normA[i];
4354
+ const groupB = normB[i];
4355
+ if (groupA.length !== groupB.length) return false;
4356
+ for (let j = 0; j < groupA.length; j++) {
4357
+ if (groupA[j] !== groupB[j]) return false;
4358
+ }
4359
+ }
4360
+ return true;
4361
+ }
4362
+ generateVerdict(taskIds, groups, analysisLevel, highCount, mediumCount, lowCount, regrouped) {
4363
+ const total = taskIds.length;
4364
+ const groupCount = groups.length;
4365
+ const parts = [];
4366
+ const conflictParts = [];
4367
+ if (highCount > 0) conflictParts.push(`${highCount} high`);
4368
+ if (mediumCount > 0) conflictParts.push(`${mediumCount} medium`);
4369
+ if (lowCount > 0) conflictParts.push(`${lowCount} low`);
4370
+ if (conflictParts.length === 0) {
4371
+ parts.push(`${total} tasks have no conflicts \u2014 can all run in parallel.`);
4372
+ } else {
4373
+ parts.push(`${total} tasks have ${conflictParts.join(", ")} severity conflicts.`);
4374
+ }
4375
+ if (groupCount === 1) {
4376
+ parts.push(`All tasks must run serially.`);
4377
+ } else if (groupCount === total) {
4378
+ parts.push(`${groupCount} parallel groups (all independent).`);
4379
+ } else {
4380
+ parts.push(`${groupCount} parallel groups possible.`);
4381
+ }
4382
+ if (regrouped) {
4383
+ parts.push(`Tasks were regrouped due to high-severity conflicts.`);
4384
+ }
4385
+ if (analysisLevel === "file-only") {
4386
+ parts.push(`Graph unavailable \u2014 severity based on file overlaps only.`);
4387
+ }
4388
+ return parts.join(" ");
4389
+ }
4390
+ };
4391
+
3136
4392
  // src/index.ts
3137
4393
  var VERSION = "0.2.0";
3138
4394
  // Annotate the CommonJS export names for ESM import in node:
@@ -3141,11 +4397,14 @@ var VERSION = "0.2.0";
3141
4397
  CIConnector,
3142
4398
  CURRENT_SCHEMA_VERSION,
3143
4399
  CodeIngestor,
4400
+ ConflictPredictor,
3144
4401
  ConfluenceConnector,
3145
4402
  ContextQL,
3146
4403
  DesignConstraintAdapter,
3147
4404
  DesignIngestor,
3148
4405
  EDGE_TYPES,
4406
+ EntityExtractor,
4407
+ EntityResolver,
3149
4408
  FusionLayer,
3150
4409
  GitIngestor,
3151
4410
  GraphAnomalyAdapter,
@@ -3157,15 +4416,21 @@ var VERSION = "0.2.0";
3157
4416
  GraphFeedbackAdapter,
3158
4417
  GraphNodeSchema,
3159
4418
  GraphStore,
4419
+ INTENTS,
4420
+ IntentClassifier,
3160
4421
  JiraConnector,
3161
4422
  KnowledgeIngestor,
3162
4423
  NODE_TYPES,
3163
4424
  OBSERVABILITY_TYPES,
4425
+ ResponseFormatter,
3164
4426
  SlackConnector,
3165
4427
  SyncManager,
4428
+ TaskIndependenceAnalyzer,
3166
4429
  TopologicalLinker,
3167
4430
  VERSION,
3168
4431
  VectorStore,
4432
+ askGraph,
4433
+ groupNodesByImpact,
3169
4434
  linkToCode,
3170
4435
  loadGraph,
3171
4436
  project,