@harness-engineering/graph 0.3.1 → 0.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -34,11 +34,14 @@ __export(index_exports, {
34
34
  CIConnector: () => CIConnector,
35
35
  CURRENT_SCHEMA_VERSION: () => CURRENT_SCHEMA_VERSION,
36
36
  CodeIngestor: () => CodeIngestor,
37
+ ConflictPredictor: () => ConflictPredictor,
37
38
  ConfluenceConnector: () => ConfluenceConnector,
38
39
  ContextQL: () => ContextQL,
39
40
  DesignConstraintAdapter: () => DesignConstraintAdapter,
40
41
  DesignIngestor: () => DesignIngestor,
41
42
  EDGE_TYPES: () => EDGE_TYPES,
43
+ EntityExtractor: () => EntityExtractor,
44
+ EntityResolver: () => EntityResolver,
42
45
  FusionLayer: () => FusionLayer,
43
46
  GitIngestor: () => GitIngestor,
44
47
  GraphAnomalyAdapter: () => GraphAnomalyAdapter,
@@ -50,15 +53,21 @@ __export(index_exports, {
50
53
  GraphFeedbackAdapter: () => GraphFeedbackAdapter,
51
54
  GraphNodeSchema: () => GraphNodeSchema,
52
55
  GraphStore: () => GraphStore,
56
+ INTENTS: () => INTENTS,
57
+ IntentClassifier: () => IntentClassifier,
53
58
  JiraConnector: () => JiraConnector,
54
59
  KnowledgeIngestor: () => KnowledgeIngestor,
55
60
  NODE_TYPES: () => NODE_TYPES,
56
61
  OBSERVABILITY_TYPES: () => OBSERVABILITY_TYPES,
62
+ ResponseFormatter: () => ResponseFormatter,
57
63
  SlackConnector: () => SlackConnector,
58
64
  SyncManager: () => SyncManager,
65
+ TaskIndependenceAnalyzer: () => TaskIndependenceAnalyzer,
59
66
  TopologicalLinker: () => TopologicalLinker,
60
67
  VERSION: () => VERSION,
61
68
  VectorStore: () => VectorStore,
69
+ askGraph: () => askGraph,
70
+ groupNodesByImpact: () => groupNodesByImpact,
62
71
  linkToCode: () => linkToCode,
63
72
  loadGraph: () => loadGraph,
64
73
  project: () => project,
@@ -534,6 +543,38 @@ function project(nodes, spec) {
534
543
  });
535
544
  }
536
545
 
546
+ // src/query/groupImpact.ts
547
+ var TEST_TYPES = /* @__PURE__ */ new Set(["test_result"]);
548
+ var DOC_TYPES = /* @__PURE__ */ new Set(["adr", "decision", "document", "learning"]);
549
+ var CODE_TYPES = /* @__PURE__ */ new Set([
550
+ "file",
551
+ "module",
552
+ "class",
553
+ "interface",
554
+ "function",
555
+ "method",
556
+ "variable"
557
+ ]);
558
+ function groupNodesByImpact(nodes, excludeId) {
559
+ const tests = [];
560
+ const docs = [];
561
+ const code = [];
562
+ const other = [];
563
+ for (const node of nodes) {
564
+ if (excludeId && node.id === excludeId) continue;
565
+ if (TEST_TYPES.has(node.type)) {
566
+ tests.push(node);
567
+ } else if (DOC_TYPES.has(node.type)) {
568
+ docs.push(node);
569
+ } else if (CODE_TYPES.has(node.type)) {
570
+ code.push(node);
571
+ } else {
572
+ other.push(node);
573
+ }
574
+ }
575
+ return { tests, docs, code, other };
576
+ }
577
+
537
578
  // src/ingest/CodeIngestor.ts
538
579
  var fs = __toESM(require("fs/promises"));
539
580
  var path = __toESM(require("path"));
@@ -2511,6 +2552,687 @@ var GraphAnomalyAdapter = class {
2511
2552
  }
2512
2553
  };
2513
2554
 
2555
+ // src/nlq/types.ts
2556
+ var INTENTS = ["impact", "find", "relationships", "explain", "anomaly"];
2557
+
2558
+ // src/nlq/IntentClassifier.ts
2559
+ var SIGNAL_WEIGHTS = {
2560
+ keyword: 0.35,
2561
+ questionWord: 0.2,
2562
+ verbPattern: 0.45
2563
+ };
2564
+ var INTENT_SIGNALS = {
2565
+ impact: {
2566
+ keywords: [
2567
+ "break",
2568
+ "affect",
2569
+ "impact",
2570
+ "change",
2571
+ "depend",
2572
+ "blast",
2573
+ "radius",
2574
+ "risk",
2575
+ "delete",
2576
+ "remove"
2577
+ ],
2578
+ questionWords: ["what", "if"],
2579
+ verbPatterns: [
2580
+ /what\s+(breaks|happens|is affected)/,
2581
+ /if\s+i\s+(change|modify|remove|delete)/,
2582
+ /blast\s+radius/,
2583
+ /what\s+(depend|relies)/
2584
+ ]
2585
+ },
2586
+ find: {
2587
+ keywords: ["find", "where", "locate", "search", "list", "all", "every"],
2588
+ questionWords: ["where"],
2589
+ verbPatterns: [
2590
+ /where\s+is/,
2591
+ /find\s+(the|all|every)/,
2592
+ /show\s+me/,
2593
+ /show\s+(all|every|the)/,
2594
+ /locate\s+/,
2595
+ /list\s+(all|every|the)/
2596
+ ]
2597
+ },
2598
+ relationships: {
2599
+ keywords: [
2600
+ "connect",
2601
+ "call",
2602
+ "import",
2603
+ "use",
2604
+ "depend",
2605
+ "link",
2606
+ "neighbor",
2607
+ "caller",
2608
+ "callee"
2609
+ ],
2610
+ questionWords: ["what", "who"],
2611
+ verbPatterns: [/connects?\s+to/, /depends?\s+on/, /\bcalls?\b/, /\bimports?\b/]
2612
+ },
2613
+ explain: {
2614
+ keywords: ["describe", "explain", "tell", "about", "overview", "summary", "work"],
2615
+ questionWords: ["what", "how"],
2616
+ verbPatterns: [
2617
+ /what\s+is\s+\w/,
2618
+ /describe\s+/,
2619
+ /tell\s+me\s+about/,
2620
+ /how\s+does/,
2621
+ /overview\s+of/,
2622
+ /give\s+me\s+/
2623
+ ]
2624
+ },
2625
+ anomaly: {
2626
+ keywords: [
2627
+ "wrong",
2628
+ "problem",
2629
+ "anomaly",
2630
+ "smell",
2631
+ "issue",
2632
+ "outlier",
2633
+ "hotspot",
2634
+ "suspicious",
2635
+ "risk"
2636
+ ],
2637
+ questionWords: ["what"],
2638
+ verbPatterns: [
2639
+ /what.*(wrong|problem|smell)/,
2640
+ /find.*(issue|anomal|problem)/,
2641
+ /code\s+smell/,
2642
+ /suspicious/,
2643
+ /hotspot/
2644
+ ]
2645
+ }
2646
+ };
2647
+ var IntentClassifier = class {
2648
+ /**
2649
+ * Classify a natural language question into an intent.
2650
+ *
2651
+ * @param question - The natural language question to classify
2652
+ * @returns ClassificationResult with intent, confidence, and per-signal scores
2653
+ */
2654
+ classify(question) {
2655
+ const normalized = question.toLowerCase().trim();
2656
+ const scores = [];
2657
+ for (const intent of INTENTS) {
2658
+ const signals = this.scoreIntent(normalized, INTENT_SIGNALS[intent]);
2659
+ const confidence = this.combineSignals(signals);
2660
+ scores.push({ intent, confidence, signals });
2661
+ }
2662
+ scores.sort((a, b) => b.confidence - a.confidence);
2663
+ const best = scores[0];
2664
+ return {
2665
+ intent: best.intent,
2666
+ confidence: best.confidence,
2667
+ signals: best.signals
2668
+ };
2669
+ }
2670
+ /**
2671
+ * Score individual signals for an intent against the normalized query.
2672
+ */
2673
+ scoreIntent(normalized, signalSet) {
2674
+ return {
2675
+ keyword: this.scoreKeywords(normalized, signalSet.keywords),
2676
+ questionWord: this.scoreQuestionWord(normalized, signalSet.questionWords),
2677
+ verbPattern: this.scoreVerbPatterns(normalized, signalSet.verbPatterns)
2678
+ };
2679
+ }
2680
+ /**
2681
+ * Score keyword signal: uses word-stem matching (checks if any word in the
2682
+ * query starts with the keyword). Saturates at 2 matches to avoid penalizing
2683
+ * intents with many keywords when only a few appear in the query.
2684
+ */
2685
+ scoreKeywords(normalized, keywords) {
2686
+ if (keywords.length === 0) return 0;
2687
+ const words = normalized.split(/\s+/);
2688
+ let matched = 0;
2689
+ for (const keyword of keywords) {
2690
+ if (words.some((w) => w.startsWith(keyword))) {
2691
+ matched++;
2692
+ }
2693
+ }
2694
+ return Math.min(matched / 2, 1);
2695
+ }
2696
+ /**
2697
+ * Score question-word signal: 1.0 if the query starts with a matching
2698
+ * question word, 0 otherwise.
2699
+ */
2700
+ scoreQuestionWord(normalized, questionWords) {
2701
+ const firstWord = normalized.split(/\s+/)[0] ?? "";
2702
+ return questionWords.includes(firstWord) ? 1 : 0;
2703
+ }
2704
+ /**
2705
+ * Score verb-pattern signal: any matching pattern yields a strong score.
2706
+ * Multiple matches increase score but saturate quickly.
2707
+ */
2708
+ scoreVerbPatterns(normalized, patterns) {
2709
+ if (patterns.length === 0) return 0;
2710
+ let matched = 0;
2711
+ for (const pattern of patterns) {
2712
+ if (pattern.test(normalized)) {
2713
+ matched++;
2714
+ }
2715
+ }
2716
+ return matched === 0 ? 0 : Math.min(0.6 + matched * 0.2, 1);
2717
+ }
2718
+ /**
2719
+ * Combine individual signal scores into a single confidence score
2720
+ * using additive weighted scoring. Each signal contributes weight * score,
2721
+ * and the total weights sum to 1.0 so the result is naturally bounded [0, 1].
2722
+ */
2723
+ combineSignals(signals) {
2724
+ let total = 0;
2725
+ for (const key of Object.keys(signals)) {
2726
+ const weight = SIGNAL_WEIGHTS[key];
2727
+ total += signals[key] * weight;
2728
+ }
2729
+ return total;
2730
+ }
2731
+ };
2732
+
2733
+ // src/nlq/EntityExtractor.ts
2734
+ var INTENT_KEYWORDS = /* @__PURE__ */ new Set([
2735
+ // impact
2736
+ "break",
2737
+ "breaks",
2738
+ "affect",
2739
+ "affects",
2740
+ "affected",
2741
+ "impact",
2742
+ "change",
2743
+ "depend",
2744
+ "depends",
2745
+ "blast",
2746
+ "radius",
2747
+ "risk",
2748
+ "delete",
2749
+ "remove",
2750
+ "modify",
2751
+ "happens",
2752
+ // find
2753
+ "find",
2754
+ "where",
2755
+ "locate",
2756
+ "search",
2757
+ "list",
2758
+ "all",
2759
+ "every",
2760
+ "show",
2761
+ // relationships
2762
+ "connect",
2763
+ "connects",
2764
+ "call",
2765
+ "calls",
2766
+ "import",
2767
+ "imports",
2768
+ "use",
2769
+ "uses",
2770
+ "link",
2771
+ "neighbor",
2772
+ "caller",
2773
+ "callers",
2774
+ "callee",
2775
+ "callees",
2776
+ // explain
2777
+ "describe",
2778
+ "explain",
2779
+ "tell",
2780
+ "about",
2781
+ "overview",
2782
+ "summary",
2783
+ "work",
2784
+ "works",
2785
+ // anomaly
2786
+ "wrong",
2787
+ "problem",
2788
+ "problems",
2789
+ "anomaly",
2790
+ "anomalies",
2791
+ "smell",
2792
+ "smells",
2793
+ "issue",
2794
+ "issues",
2795
+ "outlier",
2796
+ "hotspot",
2797
+ "hotspots",
2798
+ "suspicious"
2799
+ ]);
2800
+ var STOP_WORDS2 = /* @__PURE__ */ new Set([
2801
+ "a",
2802
+ "an",
2803
+ "the",
2804
+ "is",
2805
+ "are",
2806
+ "was",
2807
+ "were",
2808
+ "be",
2809
+ "been",
2810
+ "being",
2811
+ "have",
2812
+ "has",
2813
+ "had",
2814
+ "do",
2815
+ "does",
2816
+ "did",
2817
+ "will",
2818
+ "would",
2819
+ "could",
2820
+ "should",
2821
+ "may",
2822
+ "might",
2823
+ "shall",
2824
+ "can",
2825
+ "need",
2826
+ "must",
2827
+ "i",
2828
+ "me",
2829
+ "my",
2830
+ "we",
2831
+ "our",
2832
+ "you",
2833
+ "your",
2834
+ "he",
2835
+ "she",
2836
+ "it",
2837
+ "its",
2838
+ "they",
2839
+ "them",
2840
+ "their",
2841
+ "this",
2842
+ "that",
2843
+ "these",
2844
+ "those",
2845
+ "and",
2846
+ "or",
2847
+ "but",
2848
+ "if",
2849
+ "then",
2850
+ "else",
2851
+ "when",
2852
+ "while",
2853
+ "for",
2854
+ "of",
2855
+ "at",
2856
+ "by",
2857
+ "to",
2858
+ "in",
2859
+ "on",
2860
+ "with",
2861
+ "from",
2862
+ "up",
2863
+ "out",
2864
+ "not",
2865
+ "no",
2866
+ "nor",
2867
+ "so",
2868
+ "too",
2869
+ "very",
2870
+ "just",
2871
+ "also",
2872
+ "what",
2873
+ "who",
2874
+ "how",
2875
+ "which",
2876
+ "where",
2877
+ "why",
2878
+ "there",
2879
+ "here",
2880
+ "any",
2881
+ "some",
2882
+ "each",
2883
+ "than",
2884
+ "like",
2885
+ "get",
2886
+ "give",
2887
+ "go",
2888
+ "make",
2889
+ "see",
2890
+ "know",
2891
+ "take"
2892
+ ]);
2893
+ var PASCAL_OR_CAMEL_RE = /\b([A-Z][a-z]+[A-Za-z]*[a-z][A-Za-z]*|[a-z]+[A-Z][A-Za-z]*)\b/g;
2894
+ var FILE_PATH_RE = /(?:\.\/|[a-zA-Z0-9_-]+\/)[a-zA-Z0-9_\-./]+\.[a-zA-Z]{1,10}/g;
2895
+ var QUOTED_RE = /["']([^"']+)["']/g;
2896
+ var EntityExtractor = class {
2897
+ /**
2898
+ * Extract candidate entity mentions from a natural language query.
2899
+ *
2900
+ * @param query - The natural language query to extract entities from
2901
+ * @returns Array of raw entity strings in priority order, deduplicated
2902
+ */
2903
+ extract(query) {
2904
+ const trimmed = query.trim();
2905
+ if (trimmed.length === 0) return [];
2906
+ const seen = /* @__PURE__ */ new Set();
2907
+ const result = [];
2908
+ const add = (entity) => {
2909
+ if (!seen.has(entity)) {
2910
+ seen.add(entity);
2911
+ result.push(entity);
2912
+ }
2913
+ };
2914
+ const quotedConsumed = /* @__PURE__ */ new Set();
2915
+ for (const match of trimmed.matchAll(QUOTED_RE)) {
2916
+ const inner = match[1].trim();
2917
+ if (inner.length > 0) {
2918
+ add(inner);
2919
+ quotedConsumed.add(inner);
2920
+ }
2921
+ }
2922
+ const casingConsumed = /* @__PURE__ */ new Set();
2923
+ for (const match of trimmed.matchAll(PASCAL_OR_CAMEL_RE)) {
2924
+ const token = match[0];
2925
+ if (!quotedConsumed.has(token)) {
2926
+ add(token);
2927
+ casingConsumed.add(token);
2928
+ }
2929
+ }
2930
+ const pathConsumed = /* @__PURE__ */ new Set();
2931
+ for (const match of trimmed.matchAll(FILE_PATH_RE)) {
2932
+ const path6 = match[0];
2933
+ add(path6);
2934
+ pathConsumed.add(path6);
2935
+ }
2936
+ const quotedWords = /* @__PURE__ */ new Set();
2937
+ for (const q of quotedConsumed) {
2938
+ for (const w of q.split(/\s+/)) {
2939
+ if (w.length > 0) quotedWords.add(w);
2940
+ }
2941
+ }
2942
+ const allConsumed = /* @__PURE__ */ new Set([
2943
+ ...quotedConsumed,
2944
+ ...quotedWords,
2945
+ ...casingConsumed,
2946
+ ...pathConsumed
2947
+ ]);
2948
+ const words = trimmed.split(/\s+/);
2949
+ for (const raw of words) {
2950
+ const cleaned = raw.replace(/^[^a-zA-Z0-9]+|[^a-zA-Z0-9]+$/g, "");
2951
+ if (cleaned.length === 0) continue;
2952
+ const lower = cleaned.toLowerCase();
2953
+ if (allConsumed.has(cleaned)) continue;
2954
+ if (STOP_WORDS2.has(lower)) continue;
2955
+ if (INTENT_KEYWORDS.has(lower)) continue;
2956
+ if (cleaned === cleaned.toUpperCase() && /^[A-Z]+$/.test(cleaned)) continue;
2957
+ add(cleaned);
2958
+ }
2959
+ return result;
2960
+ }
2961
+ };
2962
+
2963
+ // src/nlq/EntityResolver.ts
2964
+ var EntityResolver = class {
2965
+ store;
2966
+ fusion;
2967
+ constructor(store, fusion) {
2968
+ this.store = store;
2969
+ this.fusion = fusion;
2970
+ }
2971
+ /**
2972
+ * Resolve an array of raw entity strings to graph nodes.
2973
+ *
2974
+ * @param raws - Raw entity strings from EntityExtractor
2975
+ * @returns Array of ResolvedEntity for each successfully resolved raw string
2976
+ */
2977
+ resolve(raws) {
2978
+ const results = [];
2979
+ for (const raw of raws) {
2980
+ const resolved = this.resolveOne(raw);
2981
+ if (resolved !== void 0) {
2982
+ results.push(resolved);
2983
+ }
2984
+ }
2985
+ return results;
2986
+ }
2987
+ resolveOne(raw) {
2988
+ const exactMatches = this.store.findNodes({ name: raw });
2989
+ if (exactMatches.length > 0) {
2990
+ const node = exactMatches[0];
2991
+ return {
2992
+ raw,
2993
+ nodeId: node.id,
2994
+ node,
2995
+ confidence: 1,
2996
+ method: "exact"
2997
+ };
2998
+ }
2999
+ if (this.fusion) {
3000
+ const fusionResults = this.fusion.search(raw, 5);
3001
+ if (fusionResults.length > 0 && fusionResults[0].score > 0.5) {
3002
+ const top = fusionResults[0];
3003
+ return {
3004
+ raw,
3005
+ nodeId: top.nodeId,
3006
+ node: top.node,
3007
+ confidence: top.score,
3008
+ method: "fusion"
3009
+ };
3010
+ }
3011
+ }
3012
+ if (raw.length < 3) return void 0;
3013
+ const isPathLike = raw.includes("/");
3014
+ const fileNodes = this.store.findNodes({ type: "file" });
3015
+ for (const node of fileNodes) {
3016
+ if (!node.path) continue;
3017
+ if (isPathLike && node.path.includes(raw)) {
3018
+ return { raw, nodeId: node.id, node, confidence: 0.6, method: "path" };
3019
+ }
3020
+ const basename4 = node.path.split("/").pop() ?? "";
3021
+ if (basename4.includes(raw)) {
3022
+ return { raw, nodeId: node.id, node, confidence: 0.6, method: "path" };
3023
+ }
3024
+ if (raw.length >= 4 && node.path.includes(raw)) {
3025
+ return { raw, nodeId: node.id, node, confidence: 0.6, method: "path" };
3026
+ }
3027
+ }
3028
+ return void 0;
3029
+ }
3030
+ };
3031
+
3032
+ // src/nlq/ResponseFormatter.ts
3033
+ var ResponseFormatter = class {
3034
+ /**
3035
+ * Format graph operation results into a human-readable summary.
3036
+ *
3037
+ * @param intent - The classified intent
3038
+ * @param entities - Resolved entities from the query
3039
+ * @param data - Raw result data (shape varies per intent)
3040
+ * @param query - Original natural language query (optional)
3041
+ * @returns Human-readable summary string
3042
+ */
3043
+ format(intent, entities, data, query) {
3044
+ if (data === null || data === void 0) {
3045
+ return "No results found.";
3046
+ }
3047
+ const firstEntity = entities[0];
3048
+ const entityName = firstEntity?.raw ?? "the target";
3049
+ switch (intent) {
3050
+ case "impact":
3051
+ return this.formatImpact(entityName, data);
3052
+ case "find":
3053
+ return this.formatFind(data, query);
3054
+ case "relationships":
3055
+ return this.formatRelationships(entityName, entities, data);
3056
+ case "explain":
3057
+ return this.formatExplain(entityName, entities, data);
3058
+ case "anomaly":
3059
+ return this.formatAnomaly(data);
3060
+ default:
3061
+ return `Processed results for "${entityName}".`;
3062
+ }
3063
+ }
3064
+ formatImpact(entityName, data) {
3065
+ const d = data;
3066
+ const code = this.safeArrayLength(d?.code);
3067
+ const tests = this.safeArrayLength(d?.tests);
3068
+ const docs = this.safeArrayLength(d?.docs);
3069
+ return `Changing **${entityName}** affects ${this.p(code, "code file")}, ${this.p(tests, "test")}, and ${this.p(docs, "doc")}.`;
3070
+ }
3071
+ formatFind(data, query) {
3072
+ const count = Array.isArray(data) ? data.length : 0;
3073
+ if (query) {
3074
+ return `Found ${this.p(count, "match", "matches")} for "${query}".`;
3075
+ }
3076
+ return `Found ${this.p(count, "match", "matches")}.`;
3077
+ }
3078
+ formatRelationships(entityName, entities, data) {
3079
+ const d = data;
3080
+ const edges = Array.isArray(d?.edges) ? d.edges : [];
3081
+ const firstEntity = entities[0];
3082
+ const rootId = firstEntity?.nodeId ?? "";
3083
+ let outbound = 0;
3084
+ let inbound = 0;
3085
+ for (const edge of edges) {
3086
+ if (edge.from === rootId) outbound++;
3087
+ if (edge.to === rootId) inbound++;
3088
+ }
3089
+ return `**${entityName}** has ${outbound} outbound and ${inbound} inbound relationships.`;
3090
+ }
3091
+ formatExplain(entityName, entities, data) {
3092
+ const d = data;
3093
+ const context = Array.isArray(d?.context) ? d.context : [];
3094
+ const firstEntity = entities[0];
3095
+ const nodeType = firstEntity?.node.type ?? "node";
3096
+ const path6 = firstEntity?.node.path ?? "unknown";
3097
+ let neighborCount = 0;
3098
+ const firstContext = context[0];
3099
+ if (firstContext && Array.isArray(firstContext.nodes)) {
3100
+ neighborCount = firstContext.nodes.length;
3101
+ }
3102
+ return `**${entityName}** is a ${nodeType} at \`${path6}\`. Connected to ${neighborCount} nodes.`;
3103
+ }
3104
+ formatAnomaly(data) {
3105
+ const d = data;
3106
+ const outliers = Array.isArray(d?.statisticalOutliers) ? d.statisticalOutliers : [];
3107
+ const artPoints = Array.isArray(d?.articulationPoints) ? d.articulationPoints : [];
3108
+ const count = outliers.length + artPoints.length;
3109
+ if (count === 0) {
3110
+ return "Found 0 anomalies.";
3111
+ }
3112
+ const topItems = [
3113
+ ...outliers.slice(0, 2).map((o) => o.nodeId ?? "unknown outlier"),
3114
+ ...artPoints.slice(0, 1).map((a) => a.nodeId ?? "unknown bottleneck")
3115
+ ].join(", ");
3116
+ return `Found ${this.p(count, "anomaly", "anomalies")}: ${topItems}.`;
3117
+ }
3118
+ safeArrayLength(value) {
3119
+ return Array.isArray(value) ? value.length : 0;
3120
+ }
3121
+ p(count, singular, plural) {
3122
+ const word = count === 1 ? singular : plural ?? singular + "s";
3123
+ return `${count} ${word}`;
3124
+ }
3125
+ };
3126
+
3127
+ // src/nlq/index.ts
3128
+ var ENTITY_REQUIRED_INTENTS = /* @__PURE__ */ new Set(["impact", "relationships", "explain"]);
3129
+ var classifier = new IntentClassifier();
3130
+ var extractor = new EntityExtractor();
3131
+ var formatter = new ResponseFormatter();
3132
+ async function askGraph(store, question) {
3133
+ const fusion = new FusionLayer(store);
3134
+ const resolver = new EntityResolver(store, fusion);
3135
+ const classification = classifier.classify(question);
3136
+ if (classification.confidence < 0.3) {
3137
+ return {
3138
+ intent: classification.intent,
3139
+ intentConfidence: classification.confidence,
3140
+ entities: [],
3141
+ summary: "I'm not sure what you're asking. Try rephrasing your question.",
3142
+ data: null,
3143
+ suggestions: [
3144
+ 'Try "what breaks if I change <name>?" for impact analysis',
3145
+ 'Try "where is <name>?" to find entities',
3146
+ 'Try "what calls <name>?" for relationships',
3147
+ 'Try "what is <name>?" for explanations',
3148
+ 'Try "what looks wrong?" for anomaly detection'
3149
+ ]
3150
+ };
3151
+ }
3152
+ const rawEntities = extractor.extract(question);
3153
+ const entities = resolver.resolve(rawEntities);
3154
+ if (ENTITY_REQUIRED_INTENTS.has(classification.intent) && entities.length === 0) {
3155
+ return {
3156
+ intent: classification.intent,
3157
+ intentConfidence: classification.confidence,
3158
+ entities: [],
3159
+ summary: "Could not find any matching nodes in the graph for your query. Try using exact class names, function names, or file paths.",
3160
+ data: null
3161
+ };
3162
+ }
3163
+ let data;
3164
+ try {
3165
+ data = executeOperation(store, classification.intent, entities, question, fusion);
3166
+ } catch (err) {
3167
+ return {
3168
+ intent: classification.intent,
3169
+ intentConfidence: classification.confidence,
3170
+ entities,
3171
+ summary: `An error occurred while querying the graph: ${err instanceof Error ? err.message : String(err)}`,
3172
+ data: null
3173
+ };
3174
+ }
3175
+ const summary = formatter.format(classification.intent, entities, data, question);
3176
+ return {
3177
+ intent: classification.intent,
3178
+ intentConfidence: classification.confidence,
3179
+ entities,
3180
+ summary,
3181
+ data
3182
+ };
3183
+ }
3184
+ function executeOperation(store, intent, entities, question, fusion) {
3185
+ const cql = new ContextQL(store);
3186
+ switch (intent) {
3187
+ case "impact": {
3188
+ const rootId = entities[0].nodeId;
3189
+ const result = cql.execute({
3190
+ rootNodeIds: [rootId],
3191
+ bidirectional: true,
3192
+ maxDepth: 3
3193
+ });
3194
+ return groupNodesByImpact(result.nodes, rootId);
3195
+ }
3196
+ case "find": {
3197
+ return fusion.search(question, 10);
3198
+ }
3199
+ case "relationships": {
3200
+ const rootId = entities[0].nodeId;
3201
+ const result = cql.execute({
3202
+ rootNodeIds: [rootId],
3203
+ bidirectional: true,
3204
+ maxDepth: 1
3205
+ });
3206
+ return { nodes: result.nodes, edges: result.edges };
3207
+ }
3208
+ case "explain": {
3209
+ const searchResults = fusion.search(question, 10);
3210
+ const contextBlocks = [];
3211
+ const rootIds = entities.length > 0 ? [entities[0].nodeId] : searchResults.slice(0, 3).map((r) => r.nodeId);
3212
+ for (const rootId of rootIds) {
3213
+ const expanded = cql.execute({
3214
+ rootNodeIds: [rootId],
3215
+ maxDepth: 2
3216
+ });
3217
+ const matchingResult = searchResults.find((r) => r.nodeId === rootId);
3218
+ contextBlocks.push({
3219
+ rootNode: rootId,
3220
+ score: matchingResult?.score ?? 1,
3221
+ nodes: expanded.nodes,
3222
+ edges: expanded.edges
3223
+ });
3224
+ }
3225
+ return { searchResults, context: contextBlocks };
3226
+ }
3227
+ case "anomaly": {
3228
+ const adapter = new GraphAnomalyAdapter(store);
3229
+ return adapter.detect();
3230
+ }
3231
+ default:
3232
+ return null;
3233
+ }
3234
+ }
3235
+
2514
3236
  // src/context/Assembler.ts
2515
3237
  var PHASE_NODE_TYPES = {
2516
3238
  implement: ["file", "function", "class", "method", "interface", "variable"],
@@ -3133,6 +3855,447 @@ var GraphFeedbackAdapter = class {
3133
3855
  }
3134
3856
  };
3135
3857
 
3858
+ // src/independence/TaskIndependenceAnalyzer.ts
3859
+ var DEFAULT_EDGE_TYPES = ["imports", "calls", "references"];
3860
+ var TaskIndependenceAnalyzer = class {
3861
+ store;
3862
+ constructor(store) {
3863
+ this.store = store;
3864
+ }
3865
+ analyze(params) {
3866
+ const { tasks } = params;
3867
+ const depth = params.depth ?? 1;
3868
+ const edgeTypes = params.edgeTypes ?? DEFAULT_EDGE_TYPES;
3869
+ this.validate(tasks);
3870
+ const useGraph = this.store != null && depth > 0;
3871
+ const analysisLevel = useGraph ? "graph-expanded" : "file-only";
3872
+ const originalFiles = /* @__PURE__ */ new Map();
3873
+ const expandedFiles = /* @__PURE__ */ new Map();
3874
+ for (const task of tasks) {
3875
+ const origSet = new Set(task.files);
3876
+ originalFiles.set(task.id, origSet);
3877
+ if (useGraph) {
3878
+ const expanded = this.expandViaGraph(task.files, depth, edgeTypes);
3879
+ expandedFiles.set(task.id, expanded);
3880
+ } else {
3881
+ expandedFiles.set(task.id, /* @__PURE__ */ new Map());
3882
+ }
3883
+ }
3884
+ const taskIds = tasks.map((t) => t.id);
3885
+ const pairs = [];
3886
+ for (let i = 0; i < taskIds.length; i++) {
3887
+ for (let j = i + 1; j < taskIds.length; j++) {
3888
+ const idA = taskIds[i];
3889
+ const idB = taskIds[j];
3890
+ const pair = this.computePairOverlap(
3891
+ idA,
3892
+ idB,
3893
+ originalFiles.get(idA),
3894
+ originalFiles.get(idB),
3895
+ expandedFiles.get(idA),
3896
+ expandedFiles.get(idB)
3897
+ );
3898
+ pairs.push(pair);
3899
+ }
3900
+ }
3901
+ const groups = this.buildGroups(taskIds, pairs);
3902
+ const verdict = this.generateVerdict(taskIds, groups, analysisLevel);
3903
+ return {
3904
+ tasks: taskIds,
3905
+ analysisLevel,
3906
+ depth,
3907
+ pairs,
3908
+ groups,
3909
+ verdict
3910
+ };
3911
+ }
3912
+ // --- Private methods ---
3913
+ validate(tasks) {
3914
+ if (tasks.length < 2) {
3915
+ throw new Error("At least 2 tasks are required for independence analysis");
3916
+ }
3917
+ const seenIds = /* @__PURE__ */ new Set();
3918
+ for (const task of tasks) {
3919
+ if (seenIds.has(task.id)) {
3920
+ throw new Error(`Duplicate task ID: "${task.id}"`);
3921
+ }
3922
+ seenIds.add(task.id);
3923
+ if (task.files.length === 0) {
3924
+ throw new Error(`Task "${task.id}" has an empty files array`);
3925
+ }
3926
+ }
3927
+ }
3928
+ expandViaGraph(files, depth, edgeTypes) {
3929
+ const result = /* @__PURE__ */ new Map();
3930
+ const store = this.store;
3931
+ const cql = new ContextQL(store);
3932
+ const fileSet = new Set(files);
3933
+ for (const file of files) {
3934
+ const nodeId = `file:${file}`;
3935
+ const node = store.getNode(nodeId);
3936
+ if (!node) continue;
3937
+ const queryResult = cql.execute({
3938
+ rootNodeIds: [nodeId],
3939
+ maxDepth: depth,
3940
+ includeEdges: edgeTypes,
3941
+ includeTypes: ["file"]
3942
+ });
3943
+ for (const n of queryResult.nodes) {
3944
+ const path6 = n.path ?? n.id.replace(/^file:/, "");
3945
+ if (!fileSet.has(path6)) {
3946
+ if (!result.has(path6)) {
3947
+ result.set(path6, file);
3948
+ }
3949
+ }
3950
+ }
3951
+ }
3952
+ return result;
3953
+ }
3954
+ computePairOverlap(idA, idB, origA, origB, expandedA, expandedB) {
3955
+ const overlaps = [];
3956
+ for (const file of origA) {
3957
+ if (origB.has(file)) {
3958
+ overlaps.push({ file, type: "direct" });
3959
+ }
3960
+ }
3961
+ const directFiles = new Set(overlaps.map((o) => o.file));
3962
+ const transitiveFiles = /* @__PURE__ */ new Set();
3963
+ for (const [file, via] of expandedA) {
3964
+ if (origB.has(file) && !directFiles.has(file) && !transitiveFiles.has(file)) {
3965
+ transitiveFiles.add(file);
3966
+ overlaps.push({ file, type: "transitive", via });
3967
+ }
3968
+ }
3969
+ for (const [file, via] of expandedB) {
3970
+ if (origA.has(file) && !directFiles.has(file) && !transitiveFiles.has(file)) {
3971
+ transitiveFiles.add(file);
3972
+ overlaps.push({ file, type: "transitive", via });
3973
+ }
3974
+ }
3975
+ for (const [file, viaA] of expandedA) {
3976
+ if (expandedB.has(file) && !directFiles.has(file) && !transitiveFiles.has(file)) {
3977
+ transitiveFiles.add(file);
3978
+ overlaps.push({ file, type: "transitive", via: viaA });
3979
+ }
3980
+ }
3981
+ return {
3982
+ taskA: idA,
3983
+ taskB: idB,
3984
+ independent: overlaps.length === 0,
3985
+ overlaps
3986
+ };
3987
+ }
3988
+ buildGroups(taskIds, pairs) {
3989
+ const parent = /* @__PURE__ */ new Map();
3990
+ const rank = /* @__PURE__ */ new Map();
3991
+ for (const id of taskIds) {
3992
+ parent.set(id, id);
3993
+ rank.set(id, 0);
3994
+ }
3995
+ const find = (x) => {
3996
+ let root = x;
3997
+ while (parent.get(root) !== root) {
3998
+ root = parent.get(root);
3999
+ }
4000
+ let current = x;
4001
+ while (current !== root) {
4002
+ const next = parent.get(current);
4003
+ parent.set(current, root);
4004
+ current = next;
4005
+ }
4006
+ return root;
4007
+ };
4008
+ const union = (a, b) => {
4009
+ const rootA = find(a);
4010
+ const rootB = find(b);
4011
+ if (rootA === rootB) return;
4012
+ const rankA = rank.get(rootA);
4013
+ const rankB = rank.get(rootB);
4014
+ if (rankA < rankB) {
4015
+ parent.set(rootA, rootB);
4016
+ } else if (rankA > rankB) {
4017
+ parent.set(rootB, rootA);
4018
+ } else {
4019
+ parent.set(rootB, rootA);
4020
+ rank.set(rootA, rankA + 1);
4021
+ }
4022
+ };
4023
+ for (const pair of pairs) {
4024
+ if (!pair.independent) {
4025
+ union(pair.taskA, pair.taskB);
4026
+ }
4027
+ }
4028
+ const groupMap = /* @__PURE__ */ new Map();
4029
+ for (const id of taskIds) {
4030
+ const root = find(id);
4031
+ if (!groupMap.has(root)) {
4032
+ groupMap.set(root, []);
4033
+ }
4034
+ groupMap.get(root).push(id);
4035
+ }
4036
+ return Array.from(groupMap.values());
4037
+ }
4038
+ generateVerdict(taskIds, groups, analysisLevel) {
4039
+ const total = taskIds.length;
4040
+ const groupCount = groups.length;
4041
+ let verdict;
4042
+ if (groupCount === 1) {
4043
+ verdict = `All ${total} tasks conflict \u2014 must run serially.`;
4044
+ } else if (groupCount === total) {
4045
+ verdict = `All ${total} tasks are independent \u2014 can all run in parallel.`;
4046
+ } else {
4047
+ verdict = `${total} tasks form ${groupCount} independent groups \u2014 ${groupCount} parallel waves possible.`;
4048
+ }
4049
+ if (analysisLevel === "file-only") {
4050
+ verdict += " Graph unavailable \u2014 transitive dependencies not checked.";
4051
+ }
4052
+ return verdict;
4053
+ }
4054
+ };
4055
+
4056
+ // src/independence/ConflictPredictor.ts
4057
+ var ConflictPredictor = class {
4058
+ store;
4059
+ constructor(store) {
4060
+ this.store = store;
4061
+ }
4062
+ predict(params) {
4063
+ const analyzer = new TaskIndependenceAnalyzer(this.store);
4064
+ const result = analyzer.analyze(params);
4065
+ const churnMap = /* @__PURE__ */ new Map();
4066
+ const couplingMap = /* @__PURE__ */ new Map();
4067
+ let churnThreshold = Infinity;
4068
+ let couplingThreshold = Infinity;
4069
+ if (this.store != null) {
4070
+ const complexityResult = new GraphComplexityAdapter(this.store).computeComplexityHotspots();
4071
+ for (const hotspot of complexityResult.hotspots) {
4072
+ const existing = churnMap.get(hotspot.file);
4073
+ if (existing === void 0 || hotspot.changeFrequency > existing) {
4074
+ churnMap.set(hotspot.file, hotspot.changeFrequency);
4075
+ }
4076
+ }
4077
+ const couplingResult = new GraphCouplingAdapter(this.store).computeCouplingData();
4078
+ for (const fileData of couplingResult.files) {
4079
+ couplingMap.set(fileData.file, fileData.fanIn + fileData.fanOut);
4080
+ }
4081
+ churnThreshold = this.computePercentile(Array.from(churnMap.values()), 80);
4082
+ couplingThreshold = this.computePercentile(Array.from(couplingMap.values()), 80);
4083
+ }
4084
+ const conflicts = [];
4085
+ for (const pair of result.pairs) {
4086
+ if (pair.independent) continue;
4087
+ const { severity, reason, mitigation } = this.classifyPair(
4088
+ pair.taskA,
4089
+ pair.taskB,
4090
+ pair.overlaps,
4091
+ churnMap,
4092
+ couplingMap,
4093
+ churnThreshold,
4094
+ couplingThreshold
4095
+ );
4096
+ conflicts.push({
4097
+ taskA: pair.taskA,
4098
+ taskB: pair.taskB,
4099
+ severity,
4100
+ reason,
4101
+ mitigation,
4102
+ overlaps: pair.overlaps
4103
+ });
4104
+ }
4105
+ const taskIds = result.tasks;
4106
+ const groups = this.buildHighSeverityGroups(taskIds, conflicts);
4107
+ const regrouped = !this.groupsEqual(result.groups, groups);
4108
+ let highCount = 0;
4109
+ let mediumCount = 0;
4110
+ let lowCount = 0;
4111
+ for (const c of conflicts) {
4112
+ if (c.severity === "high") highCount++;
4113
+ else if (c.severity === "medium") mediumCount++;
4114
+ else lowCount++;
4115
+ }
4116
+ const verdict = this.generateVerdict(
4117
+ taskIds,
4118
+ groups,
4119
+ result.analysisLevel,
4120
+ highCount,
4121
+ mediumCount,
4122
+ lowCount,
4123
+ regrouped
4124
+ );
4125
+ return {
4126
+ tasks: taskIds,
4127
+ analysisLevel: result.analysisLevel,
4128
+ depth: result.depth,
4129
+ conflicts,
4130
+ groups,
4131
+ summary: {
4132
+ high: highCount,
4133
+ medium: mediumCount,
4134
+ low: lowCount,
4135
+ regrouped
4136
+ },
4137
+ verdict
4138
+ };
4139
+ }
4140
+ // --- Private helpers ---
4141
+ classifyPair(taskA, taskB, overlaps, churnMap, couplingMap, churnThreshold, couplingThreshold) {
4142
+ let maxSeverity = "low";
4143
+ let primaryReason = "";
4144
+ let primaryMitigation = "";
4145
+ for (const overlap of overlaps) {
4146
+ let overlapSeverity;
4147
+ let reason;
4148
+ let mitigation;
4149
+ if (overlap.type === "direct") {
4150
+ overlapSeverity = "high";
4151
+ reason = `Both tasks write to ${overlap.file}`;
4152
+ mitigation = `Serialize: run ${taskA} before ${taskB}`;
4153
+ } else {
4154
+ const churn = churnMap.get(overlap.file);
4155
+ const coupling = couplingMap.get(overlap.file);
4156
+ const via = overlap.via ?? "unknown";
4157
+ if (churn !== void 0 && churn >= churnThreshold && churnThreshold !== Infinity) {
4158
+ overlapSeverity = "medium";
4159
+ reason = `Transitive overlap on high-churn file ${overlap.file} (via ${via})`;
4160
+ mitigation = `Review: ${overlap.file} changes frequently \u2014 coordinate edits between ${taskA} and ${taskB}`;
4161
+ } else if (coupling !== void 0 && coupling >= couplingThreshold && couplingThreshold !== Infinity) {
4162
+ overlapSeverity = "medium";
4163
+ reason = `Transitive overlap on highly-coupled file ${overlap.file} (via ${via})`;
4164
+ mitigation = `Review: ${overlap.file} has high coupling \u2014 coordinate edits between ${taskA} and ${taskB}`;
4165
+ } else {
4166
+ overlapSeverity = "low";
4167
+ reason = `Transitive overlap on ${overlap.file} (via ${via}) \u2014 low risk`;
4168
+ mitigation = `Info: transitive overlap unlikely to cause conflicts`;
4169
+ }
4170
+ }
4171
+ if (this.severityRank(overlapSeverity) > this.severityRank(maxSeverity)) {
4172
+ maxSeverity = overlapSeverity;
4173
+ primaryReason = reason;
4174
+ primaryMitigation = mitigation;
4175
+ } else if (primaryReason === "") {
4176
+ primaryReason = reason;
4177
+ primaryMitigation = mitigation;
4178
+ }
4179
+ }
4180
+ return { severity: maxSeverity, reason: primaryReason, mitigation: primaryMitigation };
4181
+ }
4182
+ severityRank(severity) {
4183
+ switch (severity) {
4184
+ case "high":
4185
+ return 3;
4186
+ case "medium":
4187
+ return 2;
4188
+ case "low":
4189
+ return 1;
4190
+ }
4191
+ }
4192
+ computePercentile(values, percentile) {
4193
+ if (values.length === 0) return Infinity;
4194
+ const sorted = [...values].sort((a, b) => a - b);
4195
+ const index = Math.ceil(percentile / 100 * sorted.length) - 1;
4196
+ return sorted[Math.min(index, sorted.length - 1)];
4197
+ }
4198
+ buildHighSeverityGroups(taskIds, conflicts) {
4199
+ const parent = /* @__PURE__ */ new Map();
4200
+ const rank = /* @__PURE__ */ new Map();
4201
+ for (const id of taskIds) {
4202
+ parent.set(id, id);
4203
+ rank.set(id, 0);
4204
+ }
4205
+ const find = (x) => {
4206
+ let root = x;
4207
+ while (parent.get(root) !== root) {
4208
+ root = parent.get(root);
4209
+ }
4210
+ let current = x;
4211
+ while (current !== root) {
4212
+ const next = parent.get(current);
4213
+ parent.set(current, root);
4214
+ current = next;
4215
+ }
4216
+ return root;
4217
+ };
4218
+ const union = (a, b) => {
4219
+ const rootA = find(a);
4220
+ const rootB = find(b);
4221
+ if (rootA === rootB) return;
4222
+ const rankA = rank.get(rootA);
4223
+ const rankB = rank.get(rootB);
4224
+ if (rankA < rankB) {
4225
+ parent.set(rootA, rootB);
4226
+ } else if (rankA > rankB) {
4227
+ parent.set(rootB, rootA);
4228
+ } else {
4229
+ parent.set(rootB, rootA);
4230
+ rank.set(rootA, rankA + 1);
4231
+ }
4232
+ };
4233
+ for (const conflict of conflicts) {
4234
+ if (conflict.severity === "high") {
4235
+ union(conflict.taskA, conflict.taskB);
4236
+ }
4237
+ }
4238
+ const groupMap = /* @__PURE__ */ new Map();
4239
+ for (const id of taskIds) {
4240
+ const root = find(id);
4241
+ let group = groupMap.get(root);
4242
+ if (group === void 0) {
4243
+ group = [];
4244
+ groupMap.set(root, group);
4245
+ }
4246
+ group.push(id);
4247
+ }
4248
+ return Array.from(groupMap.values());
4249
+ }
4250
+ groupsEqual(a, b) {
4251
+ if (a.length !== b.length) return false;
4252
+ const normalize2 = (groups) => groups.map((g) => [...g].sort()).sort((x, y) => {
4253
+ const xFirst = x[0];
4254
+ const yFirst = y[0];
4255
+ return xFirst.localeCompare(yFirst);
4256
+ });
4257
+ const normA = normalize2(a);
4258
+ const normB = normalize2(b);
4259
+ for (let i = 0; i < normA.length; i++) {
4260
+ const groupA = normA[i];
4261
+ const groupB = normB[i];
4262
+ if (groupA.length !== groupB.length) return false;
4263
+ for (let j = 0; j < groupA.length; j++) {
4264
+ if (groupA[j] !== groupB[j]) return false;
4265
+ }
4266
+ }
4267
+ return true;
4268
+ }
4269
+ generateVerdict(taskIds, groups, analysisLevel, highCount, mediumCount, lowCount, regrouped) {
4270
+ const total = taskIds.length;
4271
+ const groupCount = groups.length;
4272
+ const parts = [];
4273
+ const conflictParts = [];
4274
+ if (highCount > 0) conflictParts.push(`${highCount} high`);
4275
+ if (mediumCount > 0) conflictParts.push(`${mediumCount} medium`);
4276
+ if (lowCount > 0) conflictParts.push(`${lowCount} low`);
4277
+ if (conflictParts.length === 0) {
4278
+ parts.push(`${total} tasks have no conflicts \u2014 can all run in parallel.`);
4279
+ } else {
4280
+ parts.push(`${total} tasks have ${conflictParts.join(", ")} severity conflicts.`);
4281
+ }
4282
+ if (groupCount === 1) {
4283
+ parts.push(`All tasks must run serially.`);
4284
+ } else if (groupCount === total) {
4285
+ parts.push(`${groupCount} parallel groups (all independent).`);
4286
+ } else {
4287
+ parts.push(`${groupCount} parallel groups possible.`);
4288
+ }
4289
+ if (regrouped) {
4290
+ parts.push(`Tasks were regrouped due to high-severity conflicts.`);
4291
+ }
4292
+ if (analysisLevel === "file-only") {
4293
+ parts.push(`Graph unavailable \u2014 severity based on file overlaps only.`);
4294
+ }
4295
+ return parts.join(" ");
4296
+ }
4297
+ };
4298
+
3136
4299
  // src/index.ts
3137
4300
  var VERSION = "0.2.0";
3138
4301
  // Annotate the CommonJS export names for ESM import in node:
@@ -3141,11 +4304,14 @@ var VERSION = "0.2.0";
3141
4304
  CIConnector,
3142
4305
  CURRENT_SCHEMA_VERSION,
3143
4306
  CodeIngestor,
4307
+ ConflictPredictor,
3144
4308
  ConfluenceConnector,
3145
4309
  ContextQL,
3146
4310
  DesignConstraintAdapter,
3147
4311
  DesignIngestor,
3148
4312
  EDGE_TYPES,
4313
+ EntityExtractor,
4314
+ EntityResolver,
3149
4315
  FusionLayer,
3150
4316
  GitIngestor,
3151
4317
  GraphAnomalyAdapter,
@@ -3157,15 +4323,21 @@ var VERSION = "0.2.0";
3157
4323
  GraphFeedbackAdapter,
3158
4324
  GraphNodeSchema,
3159
4325
  GraphStore,
4326
+ INTENTS,
4327
+ IntentClassifier,
3160
4328
  JiraConnector,
3161
4329
  KnowledgeIngestor,
3162
4330
  NODE_TYPES,
3163
4331
  OBSERVABILITY_TYPES,
4332
+ ResponseFormatter,
3164
4333
  SlackConnector,
3165
4334
  SyncManager,
4335
+ TaskIndependenceAnalyzer,
3166
4336
  TopologicalLinker,
3167
4337
  VERSION,
3168
4338
  VectorStore,
4339
+ askGraph,
4340
+ groupNodesByImpact,
3169
4341
  linkToCode,
3170
4342
  loadGraph,
3171
4343
  project,