dino-spec 7.5.0 → 8.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +6 -2
  2. package/dist/core/agents/index.d.ts +3 -1
  3. package/dist/core/agents/index.d.ts.map +1 -1
  4. package/dist/core/agents/index.js +4 -1
  5. package/dist/core/agents/index.js.map +1 -1
  6. package/dist/core/agents/recursive/aggregator.d.ts +19 -0
  7. package/dist/core/agents/recursive/aggregator.d.ts.map +1 -0
  8. package/dist/core/agents/recursive/aggregator.js +556 -0
  9. package/dist/core/agents/recursive/aggregator.js.map +1 -0
  10. package/dist/core/agents/recursive/budget-allocator.d.ts +77 -0
  11. package/dist/core/agents/recursive/budget-allocator.d.ts.map +1 -0
  12. package/dist/core/agents/recursive/budget-allocator.js +386 -0
  13. package/dist/core/agents/recursive/budget-allocator.js.map +1 -0
  14. package/dist/core/agents/recursive/checkpoint-chain.d.ts +61 -0
  15. package/dist/core/agents/recursive/checkpoint-chain.d.ts.map +1 -0
  16. package/dist/core/agents/recursive/checkpoint-chain.js +444 -0
  17. package/dist/core/agents/recursive/checkpoint-chain.js.map +1 -0
  18. package/dist/core/agents/recursive/complexity-analyzer.d.ts +33 -0
  19. package/dist/core/agents/recursive/complexity-analyzer.d.ts.map +1 -0
  20. package/dist/core/agents/recursive/complexity-analyzer.js +370 -0
  21. package/dist/core/agents/recursive/complexity-analyzer.js.map +1 -0
  22. package/dist/core/agents/recursive/index.d.ts +17 -0
  23. package/dist/core/agents/recursive/index.d.ts.map +1 -0
  24. package/dist/core/agents/recursive/index.js +24 -0
  25. package/dist/core/agents/recursive/index.js.map +1 -0
  26. package/dist/core/agents/recursive/orchestrator.d.ts +67 -0
  27. package/dist/core/agents/recursive/orchestrator.d.ts.map +1 -0
  28. package/dist/core/agents/recursive/orchestrator.js +638 -0
  29. package/dist/core/agents/recursive/orchestrator.js.map +1 -0
  30. package/dist/core/agents/recursive/spawn-controller.d.ts +60 -0
  31. package/dist/core/agents/recursive/spawn-controller.d.ts.map +1 -0
  32. package/dist/core/agents/recursive/spawn-controller.js +333 -0
  33. package/dist/core/agents/recursive/spawn-controller.js.map +1 -0
  34. package/dist/core/agents/recursive/types.d.ts +528 -0
  35. package/dist/core/agents/recursive/types.d.ts.map +1 -0
  36. package/dist/core/agents/recursive/types.js +46 -0
  37. package/dist/core/agents/recursive/types.js.map +1 -0
  38. package/dist/mcp/server.d.ts +10 -1
  39. package/dist/mcp/server.d.ts.map +1 -1
  40. package/dist/mcp/server.js +320 -2
  41. package/dist/mcp/server.js.map +1 -1
  42. package/package.json +1 -1
package/README.md CHANGED
@@ -10,15 +10,16 @@
10
10
  <__|-|__|
11
11
  ```
12
12
 
13
- **dino-spec** brings structured, context-aware development to [Claude Code](https://claude.ai/code). It automatically maintains session state, provides 23 MCP tools, and enables intelligent context retrieval.
13
+ **dino-spec** brings structured, context-aware development to [Claude Code](https://claude.ai/code). It automatically maintains session state, provides 33 MCP tools, and enables intelligent context retrieval.
14
14
 
15
15
  ## Features
16
16
 
17
- - **MCP Integration** - 23 tools + 19 resources auto-configured during init
17
+ - **MCP Integration** - 33 tools + 19 resources auto-configured during init
18
18
  - **Session State** - Automatic tracking of focus, test/build/lint status, blockers
19
19
  - **Persistent Memory** - Decisions, patterns, learnings survive across sessions
20
20
  - **Workflow Skills** - Phases from discovery to completion (`/dino.*`)
21
21
  - **Task Delegation** - Specialized agents with context handoffs
22
+ - **Recursive Agents** - Self-spawning agents with complexity-based depth control (v8.0.0)
22
23
  - **Recursive Retrieval** - RLM-style query decomposition with quality feedback
23
24
  - **Dynamic Partitioning** - Intelligent codebase segmentation (directory, feature, dependency, time)
24
25
  - **Context REPL** - Declarative DSL for model-driven exploration
@@ -99,6 +100,9 @@ Key tools available via Claude Code:
99
100
  | `dino_context_repl` | Declarative context queries |
100
101
  | `dino_recursive_query` | RLM-style retrieval |
101
102
  | `dino_partition` | Intelligent codebase partitioning |
103
+ | `dino_agent_analyze` | Analyze task complexity for spawn decisions |
104
+ | `dino_agent_spawn_decision` | Get recursive agent spawn recommendation |
105
+ | `dino_agent_tree` | Display agent execution tree |
102
106
  | `dino_task_create` | Create task in registry |
103
107
  | `dino_plan_create` | Create implementation plan |
104
108
 
@@ -1,14 +1,16 @@
1
1
  /**
2
- * Agent Coordination Module - v7.2.0
2
+ * Agent Coordination Module - v8.0.0
3
3
  *
4
4
  * Multi-agent coordination features:
5
5
  * - Action space masking (tool availability by phase/role)
6
6
  * - Parallel agent orchestration (task coordination)
7
7
  * - Agent context budgets (token management)
8
+ * - Recursive agents (v8.0.0): self-spawning agents with depth control
8
9
  */
9
10
  export type { ToolCategory, ToolDefinition, MaskingSessionState, MaskingTaskState, AgentMaskingContext, ToolAvailability, ActionMask, ConflictStrategy, ParallelExecutionRequest, AgentSpawnContext, AgentExecutionResult, ConflictType, Conflict, ConflictDetection, ParallelExecutionResult, ExecutionWave, ParallelExecutionPlan, AgentBudgetStatus, TaskComplexity, AgentBudget, BudgetAllocationRequest, BudgetWarning, BudgetHandoff, BudgetRecovery, BudgetStats, } from './types.js';
10
11
  export { TOOL_DEFINITIONS, PHASE_RESTRICTIONS, ROLE_PERMISSIONS, checkToolAvailability, generateActionMask, getRecommendedTools, createDefaultContext, formatActionMask, } from './action-mask.js';
11
12
  export { ROLE_BASE_BUDGETS, COMPLEXITY_MULTIPLIERS, HANDOFF_RESERVE_PERCENT, MIN_USEFUL_BUDGET, AgentBudgetManager, budgetManager, calculateRecommendedBudget, formatBudget, hasSufficientBudget, } from './budgets.js';
12
13
  export { getChildTasks, identifyParallelizableTasks, createParallelPlan, createSpawnContext, formatSpawnContext, detectConflicts, resolveConflicts, synthesizeResults, prepareParallelExecution, completeParallelExecution, formatParallelPlan, } from './orchestrator.js';
13
14
  export { runBootstrap, type InitContext } from './initializer.js';
15
+ export * from './recursive/index.js';
14
16
  //# sourceMappingURL=index.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/core/agents/index.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAGH,YAAY,EAEV,YAAY,EACZ,cAAc,EAEd,mBAAmB,EACnB,gBAAgB,EAChB,mBAAmB,EACnB,gBAAgB,EAChB,UAAU,EAEV,gBAAgB,EAChB,wBAAwB,EACxB,iBAAiB,EACjB,oBAAoB,EACpB,YAAY,EACZ,QAAQ,EACR,iBAAiB,EACjB,uBAAuB,EACvB,aAAa,EACb,qBAAqB,EAErB,iBAAiB,EACjB,cAAc,EACd,WAAW,EACX,uBAAuB,EACvB,aAAa,EACb,aAAa,EACb,cAAc,EACd,WAAW,GACZ,MAAM,YAAY,CAAC;AAGpB,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,GACjB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EACL,iBAAiB,EACjB,sBAAsB,EACtB,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,EAClB,aAAa,EACb,0BAA0B,EAC1B,YAAY,EACZ,mBAAmB,GACpB,MAAM,cAAc,CAAC;AAGtB,OAAO,EACL,aAAa,EACb,2BAA2B,EAC3B,kBAAkB,EAClB,kBAAkB,EAClB,kBAAkB,EAClB,eAAe,EACf,gBAAgB,EAChB,iBAAiB,EACjB,wBAAwB,EACxB,yBAAyB,EACzB,kBAAkB,GACnB,MAAM,mBAAmB,CAAC;AAG3B,OAAO,EAAE,YAAY,EAAE,KAAK,WAAW,EAAE,MAAM,kBAAkB,CAAC"}
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/core/agents/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAGH,YAAY,EAEV,YAAY,EACZ,cAAc,EAEd,mBAAmB,EACnB,gBAAgB,EAChB,mBAAmB,EACnB,gBAAgB,EAChB,UAAU,EAEV,gBAAgB,EAChB,wBAAwB,EACxB,iBAAiB,EACjB,oBAAoB,EACpB,YAAY,EACZ,QAAQ,EACR,iBAAiB,EACjB,uBAAuB,EACvB,aAAa,EACb,qBAAqB,EAErB,iBAAiB,EACjB,cAAc,EACd,WAAW,EACX,uBAAuB,EACvB,aAAa,EACb,aAAa,EACb,cAAc,EACd,WAAW,GACZ,MAAM,YAAY,CAAC;AAGpB,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,GACjB,MAAM,kBAAkB,CAAC;AAG1B,OAAO,EACL,iBAAiB,EACjB,sBAAsB,EACtB,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,EAClB,aAAa,EACb,0BAA0B,EAC1B,YAAY,EACZ,mBAAmB,GACpB,MAAM,cAAc,CAAC;AAGtB,OAAO,EACL,aAAa,EACb,2BAA2B,EAC3B,kBAAkB,EAClB,kBAAkB,EAClB,kBAAkB,EAClB,eAAe,EACf,gBAAgB,EAChB,iBAAiB,EACjB,wBAAwB,EACxB,yBAAyB,EACzB,kBAAkB,GACnB,MAAM,mBAAmB,CAAC;AAG3B,OAAO,EAAE,YAAY,EAAE,KAAK,WAAW,EAAE,MAAM,kBAAkB,CAAC;AAGlE,cAAc,sBAAsB,CAAC"}
@@ -1,10 +1,11 @@
1
1
  /**
2
- * Agent Coordination Module - v7.2.0
2
+ * Agent Coordination Module - v8.0.0
3
3
  *
4
4
  * Multi-agent coordination features:
5
5
  * - Action space masking (tool availability by phase/role)
6
6
  * - Parallel agent orchestration (task coordination)
7
7
  * - Agent context budgets (token management)
8
+ * - Recursive agents (v8.0.0): self-spawning agents with depth control
8
9
  */
9
10
  // Action masking
10
11
  export { TOOL_DEFINITIONS, PHASE_RESTRICTIONS, ROLE_PERMISSIONS, checkToolAvailability, generateActionMask, getRecommendedTools, createDefaultContext, formatActionMask, } from './action-mask.js';
@@ -14,4 +15,6 @@ export { ROLE_BASE_BUDGETS, COMPLEXITY_MULTIPLIERS, HANDOFF_RESERVE_PERCENT, MIN
14
15
  export { getChildTasks, identifyParallelizableTasks, createParallelPlan, createSpawnContext, formatSpawnContext, detectConflicts, resolveConflicts, synthesizeResults, prepareParallelExecution, completeParallelExecution, formatParallelPlan, } from './orchestrator.js';
15
16
  // Re-export initializer for completeness
16
17
  export { runBootstrap } from './initializer.js';
18
+ // v8.0.0: Recursive agents
19
+ export * from './recursive/index.js';
17
20
  //# sourceMappingURL=index.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/core/agents/index.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAmCH,iBAAiB;AACjB,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,GACjB,MAAM,kBAAkB,CAAC;AAE1B,UAAU;AACV,OAAO,EACL,iBAAiB,EACjB,sBAAsB,EACtB,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,EAClB,aAAa,EACb,0BAA0B,EAC1B,YAAY,EACZ,mBAAmB,GACpB,MAAM,cAAc,CAAC;AAEtB,gBAAgB;AAChB,OAAO,EACL,aAAa,EACb,2BAA2B,EAC3B,kBAAkB,EAClB,kBAAkB,EAClB,kBAAkB,EAClB,eAAe,EACf,gBAAgB,EAChB,iBAAiB,EACjB,wBAAwB,EACxB,yBAAyB,EACzB,kBAAkB,GACnB,MAAM,mBAAmB,CAAC;AAE3B,yCAAyC;AACzC,OAAO,EAAE,YAAY,EAAoB,MAAM,kBAAkB,CAAC"}
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/core/agents/index.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAmCH,iBAAiB;AACjB,OAAO,EACL,gBAAgB,EAChB,kBAAkB,EAClB,gBAAgB,EAChB,qBAAqB,EACrB,kBAAkB,EAClB,mBAAmB,EACnB,oBAAoB,EACpB,gBAAgB,GACjB,MAAM,kBAAkB,CAAC;AAE1B,UAAU;AACV,OAAO,EACL,iBAAiB,EACjB,sBAAsB,EACtB,uBAAuB,EACvB,iBAAiB,EACjB,kBAAkB,EAClB,aAAa,EACb,0BAA0B,EAC1B,YAAY,EACZ,mBAAmB,GACpB,MAAM,cAAc,CAAC;AAEtB,gBAAgB;AAChB,OAAO,EACL,aAAa,EACb,2BAA2B,EAC3B,kBAAkB,EAClB,kBAAkB,EAClB,kBAAkB,EAClB,eAAe,EACf,gBAAgB,EAChB,iBAAiB,EACjB,wBAAwB,EACxB,yBAAyB,EACzB,kBAAkB,GACnB,MAAM,mBAAmB,CAAC;AAE3B,yCAAyC;AACzC,OAAO,EAAE,YAAY,EAAoB,MAAM,kBAAkB,CAAC;AAElE,2BAA2B;AAC3B,cAAc,sBAAsB,CAAC"}
@@ -0,0 +1,19 @@
1
+ /**
2
+ * Quality-Weighted Aggregator - v8.0.0
3
+ *
4
+ * Aggregates results from child agents:
5
+ * - Quality-weighted merging of findings
6
+ * - Deduplication and similarity-based merging
7
+ * - Multi-level aggregation (leaf → branch → root)
8
+ * - Configurable aggregation strategies
9
+ */
10
+ import type { RecursiveCheckpoint, AggregationConfig, AggregationResult } from './types.js';
11
+ /**
12
+ * Aggregate results from multiple child checkpoints
13
+ */
14
+ export declare function aggregateCheckpoints(checkpoints: RecursiveCheckpoint[], config?: AggregationConfig): AggregationResult;
15
+ /**
16
+ * Aggregate results from a single level of the tree
17
+ */
18
+ export declare function aggregateLevel(childResults: AggregationResult[], config?: AggregationConfig): AggregationResult;
19
+ //# sourceMappingURL=aggregator.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"aggregator.d.ts","sourceRoot":"","sources":["../../../../src/core/agents/recursive/aggregator.ts"],"names":[],"mappings":"AAAA;;;;;;;;GAQG;AAIH,OAAO,KAAK,EACV,mBAAmB,EACnB,iBAAiB,EACjB,iBAAiB,EAIlB,MAAM,YAAY,CAAC;AAyCpB;;GAEG;AACH,wBAAgB,oBAAoB,CAClC,WAAW,EAAE,mBAAmB,EAAE,EAClC,MAAM,GAAE,iBAA8C,GACrD,iBAAiB,CA+EnB;AAED;;GAEG;AACH,wBAAgB,cAAc,CAC5B,YAAY,EAAE,iBAAiB,EAAE,EACjC,MAAM,GAAE,iBAA8C,GACrD,iBAAiB,CA+DnB"}
@@ -0,0 +1,556 @@
1
+ /**
2
+ * Quality-Weighted Aggregator - v8.0.0
3
+ *
4
+ * Aggregates results from child agents:
5
+ * - Quality-weighted merging of findings
6
+ * - Deduplication and similarity-based merging
7
+ * - Multi-level aggregation (leaf → branch → root)
8
+ * - Configurable aggregation strategies
9
+ */
10
+ import { DEFAULT_AGGREGATION_CONFIG } from './types.js';
11
+ // =============================================================================
12
+ // Constants
13
+ // =============================================================================
14
+ /**
15
+ * Quality weight for aggregation scoring
16
+ */
17
+ const QUALITY_WEIGHT = 0.6;
18
+ /**
19
+ * Priority weight for aggregation scoring
20
+ */
21
+ const PRIORITY_WEIGHT = 0.4;
22
+ /**
23
+ * Finding type importance for deduplication
24
+ */
25
+ const FINDING_TYPE_WEIGHTS = {
26
+ issue: 1.0, // Issues are most important
27
+ pattern: 0.9, // Patterns are very important
28
+ dependency: 0.8, // Dependencies are important
29
+ opportunity: 0.7, // Opportunities are nice to have
30
+ note: 0.5, // Notes are lowest priority
31
+ };
32
+ /**
33
+ * Confidence multipliers
34
+ */
35
+ const CONFIDENCE_MULTIPLIERS = {
36
+ high: 1.0,
37
+ medium: 0.7,
38
+ low: 0.4,
39
+ };
40
+ // =============================================================================
41
+ // Core Functions
42
+ // =============================================================================
43
+ /**
44
+ * Aggregate results from multiple child checkpoints
45
+ */
46
+ export function aggregateCheckpoints(checkpoints, config = DEFAULT_AGGREGATION_CONFIG) {
47
+ if (checkpoints.length === 0) {
48
+ return createEmptyResult('No checkpoints to aggregate');
49
+ }
50
+ try {
51
+ // Extract and weight all findings
52
+ const weightedFindings = extractWeightedFindings(checkpoints);
53
+ // Apply aggregation strategy
54
+ let aggregatedFindings;
55
+ switch (config.strategy) {
56
+ case 'quality-weighted':
57
+ aggregatedFindings = aggregateQualityWeighted(weightedFindings, config);
58
+ break;
59
+ case 'union-all':
60
+ aggregatedFindings = aggregateUnionAll(weightedFindings, config);
61
+ break;
62
+ case 'priority-based':
63
+ aggregatedFindings = aggregatePriorityBased(weightedFindings, config);
64
+ break;
65
+ case 'consensus':
66
+ aggregatedFindings = aggregateConsensus(weightedFindings, config);
67
+ break;
68
+ default:
69
+ aggregatedFindings = aggregateQualityWeighted(weightedFindings, config);
70
+ }
71
+ // Apply deduplication if enabled
72
+ const { findings: dedupedFindings, duplicatesRemoved } = config.deduplicate
73
+ ? deduplicateFindings(aggregatedFindings)
74
+ : { findings: aggregatedFindings, duplicatesRemoved: 0 };
75
+ // Apply similarity merging if enabled
76
+ const { findings: mergedFindings, mergeCount } = config.mergeSimilar
77
+ ? mergeSimilarFindings(dedupedFindings, config.similarityThreshold)
78
+ : { findings: dedupedFindings, mergeCount: 0 };
79
+ // Filter by minimum quality
80
+ const qualityFiltered = filterByQuality(mergedFindings, config.minQuality, checkpoints);
81
+ // Limit to max findings
82
+ const finalFindings = qualityFiltered.slice(0, config.maxFindings);
83
+ // Aggregate files
84
+ const aggregatedFiles = aggregateFiles(checkpoints, config);
85
+ // Combine suggestions
86
+ const suggestions = aggregateSuggestions(checkpoints);
87
+ // Calculate combined quality
88
+ const quality = calculateAggregatedQuality(checkpoints, finalFindings);
89
+ // Generate summary
90
+ const summary = generateAggregatedSummary(checkpoints, finalFindings);
91
+ // Calculate statistics
92
+ const stats = {
93
+ sourceCount: checkpoints.length,
94
+ totalFindingsInput: weightedFindings.length,
95
+ findingsOutput: finalFindings.length,
96
+ duplicatesRemoved,
97
+ findingsMerged: mergeCount,
98
+ lowQualityFiltered: mergedFindings.length - qualityFiltered.length,
99
+ qualityImprovement: calculateQualityImprovement(checkpoints, quality),
100
+ };
101
+ return {
102
+ success: true,
103
+ findings: finalFindings,
104
+ files: aggregatedFiles,
105
+ suggestions,
106
+ quality,
107
+ stats,
108
+ summary,
109
+ };
110
+ }
111
+ catch (error) {
112
+ return createEmptyResult(`Aggregation failed: ${error instanceof Error ? error.message : 'Unknown error'}`);
113
+ }
114
+ }
115
+ /**
116
+ * Aggregate results from a single level of the tree
117
+ */
118
+ export function aggregateLevel(childResults, config = DEFAULT_AGGREGATION_CONFIG) {
119
+ if (childResults.length === 0) {
120
+ return createEmptyResult('No child results to aggregate');
121
+ }
122
+ // Combine findings from all child results
123
+ const allFindings = [];
124
+ for (const result of childResults) {
125
+ for (const finding of result.findings) {
126
+ allFindings.push({
127
+ finding,
128
+ sourceAgentId: 'child-result',
129
+ qualityWeight: result.quality.score,
130
+ priorityWeight: 0.5,
131
+ combinedWeight: result.quality.score * 0.7 + 0.3,
132
+ });
133
+ }
134
+ }
135
+ // Apply quality-weighted aggregation
136
+ const aggregatedFindings = aggregateQualityWeighted(allFindings, config);
137
+ // Combine files
138
+ const allFiles = [];
139
+ for (const result of childResults) {
140
+ allFiles.push(...result.files);
141
+ }
142
+ const dedupedFiles = deduplicateFiles(allFiles).slice(0, config.maxFiles);
143
+ // Combine suggestions
144
+ const allSuggestions = [];
145
+ for (const result of childResults) {
146
+ allSuggestions.push(...result.suggestions);
147
+ }
148
+ const uniqueSuggestions = deduplicateSuggestions(allSuggestions);
149
+ // Calculate combined quality
150
+ const avgQuality = childResults.reduce((sum, r) => sum + r.quality.score, 0) / childResults.length;
151
+ const quality = createQualityScore(avgQuality);
152
+ // Generate summary
153
+ const summary = `Aggregated from ${childResults.length} child results: ${aggregatedFindings.length} findings`;
154
+ // Calculate stats
155
+ const stats = {
156
+ sourceCount: childResults.length,
157
+ totalFindingsInput: allFindings.length,
158
+ findingsOutput: aggregatedFindings.length,
159
+ duplicatesRemoved: allFindings.length - aggregatedFindings.length,
160
+ findingsMerged: 0,
161
+ lowQualityFiltered: 0,
162
+ qualityImprovement: 0,
163
+ };
164
+ return {
165
+ success: true,
166
+ findings: aggregatedFindings,
167
+ files: dedupedFiles,
168
+ suggestions: uniqueSuggestions,
169
+ quality,
170
+ stats,
171
+ summary,
172
+ };
173
+ }
174
+ // =============================================================================
175
+ // Aggregation Strategies
176
+ // =============================================================================
177
+ /**
178
+ * Quality-weighted aggregation (default)
179
+ */
180
+ function aggregateQualityWeighted(findings, config) {
181
+ // Sort by combined weight
182
+ const sorted = [...findings].sort((a, b) => b.combinedWeight - a.combinedWeight);
183
+ // Take top findings
184
+ return sorted.slice(0, config.maxFindings).map(wf => wf.finding);
185
+ }
186
+ /**
187
+ * Union all findings (with deduplication)
188
+ */
189
+ function aggregateUnionAll(findings, config) {
190
+ return findings.slice(0, config.maxFindings * 2).map(wf => wf.finding);
191
+ }
192
+ /**
193
+ * Priority-based aggregation
194
+ */
195
+ function aggregatePriorityBased(findings, config) {
196
+ // Sort by priority weight then quality weight
197
+ const sorted = [...findings].sort((a, b) => {
198
+ if (b.priorityWeight !== a.priorityWeight) {
199
+ return b.priorityWeight - a.priorityWeight;
200
+ }
201
+ return b.qualityWeight - a.qualityWeight;
202
+ });
203
+ return sorted.slice(0, config.maxFindings).map(wf => wf.finding);
204
+ }
205
+ /**
206
+ * Consensus aggregation (only findings from multiple sources)
207
+ */
208
+ function aggregateConsensus(findings, config) {
209
+ // Group similar findings
210
+ const groups = groupSimilarFindings(findings, config.similarityThreshold);
211
+ // Only keep findings with multiple sources
212
+ const consensusGroups = groups.filter(g => g.length >= 2);
213
+ // Take best from each group
214
+ const results = [];
215
+ for (const group of consensusGroups) {
216
+ // Sort by combined weight
217
+ group.sort((a, b) => b.combinedWeight - a.combinedWeight);
218
+ results.push(group[0].finding);
219
+ }
220
+ return results.slice(0, config.maxFindings);
221
+ }
222
+ // =============================================================================
223
+ // Finding Processing
224
+ // =============================================================================
225
+ /**
226
+ * Extract weighted findings from checkpoints
227
+ */
228
+ function extractWeightedFindings(checkpoints) {
229
+ const weighted = [];
230
+ for (const cp of checkpoints) {
231
+ const qualityWeight = cp.quality.score;
232
+ for (const finding of cp.checkpoint.findings) {
233
+ // Calculate finding-specific weight
234
+ const typeWeight = FINDING_TYPE_WEIGHTS[finding.type];
235
+ const confWeight = CONFIDENCE_MULTIPLIERS[finding.confidence];
236
+ const findingWeight = typeWeight * confWeight;
237
+ // Calculate priority based on agent context
238
+ const priorityWeight = findingWeight;
239
+ // Combined weight
240
+ const combinedWeight = qualityWeight * QUALITY_WEIGHT +
241
+ priorityWeight * PRIORITY_WEIGHT;
242
+ weighted.push({
243
+ finding,
244
+ sourceAgentId: cp.agentId,
245
+ qualityWeight,
246
+ priorityWeight,
247
+ combinedWeight,
248
+ });
249
+ }
250
+ }
251
+ return weighted;
252
+ }
253
+ /**
254
+ * Deduplicate findings based on description similarity
255
+ */
256
+ function deduplicateFindings(findings) {
257
+ const seen = new Set();
258
+ const deduplicated = [];
259
+ for (const finding of findings) {
260
+ // Normalize for comparison
261
+ const normalized = normalizeFindingForComparison(finding);
262
+ if (!seen.has(normalized)) {
263
+ seen.add(normalized);
264
+ deduplicated.push(finding);
265
+ }
266
+ }
267
+ return {
268
+ findings: deduplicated,
269
+ duplicatesRemoved: findings.length - deduplicated.length,
270
+ };
271
+ }
272
+ /**
273
+ * Merge similar findings
274
+ */
275
+ function mergeSimilarFindings(findings, threshold) {
276
+ if (findings.length <= 1) {
277
+ return { findings, mergeCount: 0 };
278
+ }
279
+ const merged = [];
280
+ const used = new Set();
281
+ let mergeCount = 0;
282
+ for (let i = 0; i < findings.length; i++) {
283
+ if (used.has(i))
284
+ continue;
285
+ const current = findings[i];
286
+ const similar = [current];
287
+ used.add(i);
288
+ // Find similar findings
289
+ for (let j = i + 1; j < findings.length; j++) {
290
+ if (used.has(j))
291
+ continue;
292
+ const similarity = calculateFindingSimilarity(current, findings[j]);
293
+ if (similarity >= threshold) {
294
+ similar.push(findings[j]);
295
+ used.add(j);
296
+ mergeCount++;
297
+ }
298
+ }
299
+ // Merge similar findings
300
+ if (similar.length > 1) {
301
+ merged.push(mergeFindingGroup(similar));
302
+ }
303
+ else {
304
+ merged.push(current);
305
+ }
306
+ }
307
+ return { findings: merged, mergeCount };
308
+ }
309
+ /**
310
+ * Filter findings by quality threshold
311
+ */
312
+ function filterByQuality(findings, minQuality, checkpoints) {
313
+ // Build a map of finding to source checkpoint quality
314
+ const findingQualities = new Map();
315
+ for (const cp of checkpoints) {
316
+ for (const finding of cp.checkpoint.findings) {
317
+ findingQualities.set(finding, cp.quality.score);
318
+ }
319
+ }
320
+ return findings.filter(f => {
321
+ const quality = findingQualities.get(f);
322
+ return quality === undefined || quality >= minQuality;
323
+ });
324
+ }
325
+ // =============================================================================
326
+ // File Processing
327
+ // =============================================================================
328
+ /**
329
+ * Aggregate files from checkpoints
330
+ */
331
+ function aggregateFiles(checkpoints, config) {
332
+ const allFiles = [];
333
+ for (const cp of checkpoints) {
334
+ allFiles.push(...cp.checkpoint.nextContext.relevantFiles);
335
+ }
336
+ // Deduplicate and take top files
337
+ return deduplicateFiles(allFiles).slice(0, config.maxFiles);
338
+ }
339
+ /**
340
+ * Deduplicate files by path
341
+ */
342
+ function deduplicateFiles(files) {
343
+ const byPath = new Map();
344
+ for (const file of files) {
345
+ if (!byPath.has(file.path)) {
346
+ byPath.set(file.path, file);
347
+ }
348
+ else {
349
+ // Keep the one with longer summary (more info)
350
+ const existing = byPath.get(file.path);
351
+ if (file.summary.length > existing.summary.length) {
352
+ byPath.set(file.path, file);
353
+ }
354
+ }
355
+ }
356
+ return Array.from(byPath.values());
357
+ }
358
+ // =============================================================================
359
+ // Suggestion Processing
360
+ // =============================================================================
361
+ /**
362
+ * Aggregate suggestions from checkpoints
363
+ */
364
+ function aggregateSuggestions(checkpoints) {
365
+ const allSuggestions = [];
366
+ for (const cp of checkpoints) {
367
+ allSuggestions.push(...cp.checkpoint.nextContext.suggestions);
368
+ }
369
+ return deduplicateSuggestions(allSuggestions);
370
+ }
371
+ /**
372
+ * Deduplicate suggestions
373
+ */
374
+ function deduplicateSuggestions(suggestions) {
375
+ const seen = new Set();
376
+ const unique = [];
377
+ for (const suggestion of suggestions) {
378
+ const normalized = suggestion.toLowerCase().trim();
379
+ if (!seen.has(normalized)) {
380
+ seen.add(normalized);
381
+ unique.push(suggestion);
382
+ }
383
+ }
384
+ return unique.slice(0, 5);
385
+ }
386
+ // =============================================================================
387
+ // Quality Calculation
388
+ // =============================================================================
389
+ /**
390
+ * Calculate aggregated quality score
391
+ */
392
+ function calculateAggregatedQuality(checkpoints, finalFindings) {
393
+ if (checkpoints.length === 0) {
394
+ return createQualityScore(0);
395
+ }
396
+ // Average quality from sources
397
+ const avgSourceQuality = checkpoints.reduce((sum, cp) => sum + cp.quality.score, 0) / checkpoints.length;
398
+ // Bonus for finding coverage
399
+ const totalSourceFindings = checkpoints.reduce((sum, cp) => sum + cp.checkpoint.findings.length, 0);
400
+ const coverageRatio = totalSourceFindings > 0
401
+ ? finalFindings.length / totalSourceFindings
402
+ : 0;
403
+ const coverageBonus = Math.min(0.1, coverageRatio * 0.2);
404
+ // Combined score
405
+ const finalScore = Math.min(1, avgSourceQuality + coverageBonus);
406
+ return createQualityScore(finalScore);
407
+ }
408
+ /**
409
+ * Calculate quality improvement from aggregation
410
+ */
411
+ function calculateQualityImprovement(checkpoints, aggregatedQuality) {
412
+ if (checkpoints.length === 0)
413
+ return 0;
414
+ const avgSourceQuality = checkpoints.reduce((sum, cp) => sum + cp.quality.score, 0) / checkpoints.length;
415
+ return aggregatedQuality.score - avgSourceQuality;
416
+ }
417
+ /**
418
+ * Create a quality score object
419
+ */
420
+ function createQualityScore(score) {
421
+ return {
422
+ score: Math.max(0, Math.min(1, score)),
423
+ dimensions: {
424
+ relevance: score,
425
+ completeness: score * 0.9,
426
+ consistency: score * 0.95,
427
+ novelty: score * 0.5,
428
+ },
429
+ trend: 'stable',
430
+ confidence: Math.min(1, score + 0.2),
431
+ action: score >= 0.6 ? 'continue' : score >= 0.3 ? 'refine' : 'stop',
432
+ };
433
+ }
434
+ // =============================================================================
435
+ // Helper Functions
436
+ // =============================================================================
437
+ /**
438
+ * Normalize finding for comparison
439
+ */
440
+ function normalizeFindingForComparison(finding) {
441
+ return `${finding.type}:${finding.description.toLowerCase().trim()}:${finding.location || ''}`;
442
+ }
443
+ /**
444
+ * Calculate similarity between two findings (0-1)
445
+ */
446
+ function calculateFindingSimilarity(a, b) {
447
+ // Same type is required for similarity
448
+ if (a.type !== b.type)
449
+ return 0;
450
+ // Same location is a strong signal
451
+ if (a.location && b.location && a.location === b.location) {
452
+ return 0.8;
453
+ }
454
+ // Compare descriptions using token overlap
455
+ const tokensA = new Set(a.description.toLowerCase().split(/\s+/));
456
+ const tokensB = new Set(b.description.toLowerCase().split(/\s+/));
457
+ const intersection = new Set([...tokensA].filter(t => tokensB.has(t)));
458
+ const union = new Set([...tokensA, ...tokensB]);
459
+ const jaccardSimilarity = intersection.size / union.size;
460
+ return jaccardSimilarity;
461
+ }
462
+ /**
463
+ * Group similar findings together
464
+ */
465
+ function groupSimilarFindings(findings, threshold) {
466
+ const groups = [];
467
+ const used = new Set();
468
+ for (let i = 0; i < findings.length; i++) {
469
+ if (used.has(i))
470
+ continue;
471
+ const group = [findings[i]];
472
+ used.add(i);
473
+ for (let j = i + 1; j < findings.length; j++) {
474
+ if (used.has(j))
475
+ continue;
476
+ const similarity = calculateFindingSimilarity(findings[i].finding, findings[j].finding);
477
+ if (similarity >= threshold) {
478
+ group.push(findings[j]);
479
+ used.add(j);
480
+ }
481
+ }
482
+ groups.push(group);
483
+ }
484
+ return groups;
485
+ }
486
+ /**
487
+ * Merge a group of similar findings into one
488
+ */
489
+ function mergeFindingGroup(findings) {
490
+ if (findings.length === 0) {
491
+ throw new Error('Cannot merge empty group');
492
+ }
493
+ if (findings.length === 1) {
494
+ return findings[0];
495
+ }
496
+ // Use the highest confidence finding as base
497
+ const sorted = [...findings].sort((a, b) => {
498
+ const confA = CONFIDENCE_MULTIPLIERS[a.confidence];
499
+ const confB = CONFIDENCE_MULTIPLIERS[b.confidence];
500
+ return confB - confA;
501
+ });
502
+ const base = sorted[0];
503
+ // Merge locations if different
504
+ const locations = new Set(findings.map(f => f.location).filter(Boolean));
505
+ const mergedLocation = locations.size > 1
506
+ ? Array.from(locations).join(', ')
507
+ : base.location;
508
+ return {
509
+ type: base.type,
510
+ description: base.description,
511
+ location: mergedLocation,
512
+ confidence: base.confidence,
513
+ };
514
+ }
515
+ /**
516
+ * Generate summary for aggregated results
517
+ */
518
+ function generateAggregatedSummary(checkpoints, findings) {
519
+ const sourceCount = checkpoints.length;
520
+ const findingCount = findings.length;
521
+ const avgQuality = checkpoints.reduce((sum, cp) => sum + cp.quality.score, 0) / Math.max(1, checkpoints.length);
522
+ const issueCount = findings.filter(f => f.type === 'issue').length;
523
+ const patternCount = findings.filter(f => f.type === 'pattern').length;
524
+ let summary = `Aggregated ${findingCount} findings from ${sourceCount} agents (${(avgQuality * 100).toFixed(0)}% avg quality)`;
525
+ if (issueCount > 0) {
526
+ summary += `. Found ${issueCount} issue${issueCount > 1 ? 's' : ''}`;
527
+ }
528
+ if (patternCount > 0) {
529
+ summary += `. Identified ${patternCount} pattern${patternCount > 1 ? 's' : ''}`;
530
+ }
531
+ return summary;
532
+ }
533
+ /**
534
+ * Create empty result with error message
535
+ */
536
+ function createEmptyResult(error) {
537
+ return {
538
+ success: false,
539
+ findings: [],
540
+ files: [],
541
+ suggestions: [],
542
+ quality: createQualityScore(0),
543
+ stats: {
544
+ sourceCount: 0,
545
+ totalFindingsInput: 0,
546
+ findingsOutput: 0,
547
+ duplicatesRemoved: 0,
548
+ findingsMerged: 0,
549
+ lowQualityFiltered: 0,
550
+ qualityImprovement: 0,
551
+ },
552
+ summary: '',
553
+ error,
554
+ };
555
+ }
556
+ //# sourceMappingURL=aggregator.js.map