code-graph-context 2.4.5 → 2.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +466 -620
- package/dist/mcp/constants.js +274 -0
- package/dist/mcp/handlers/swarm-worker.handler.js +251 -0
- package/dist/mcp/handlers/task-decomposition.handler.js +294 -0
- package/dist/mcp/tools/index.js +13 -1
- package/dist/mcp/tools/swarm-claim-task.tool.js +331 -0
- package/dist/mcp/tools/swarm-complete-task.tool.js +421 -0
- package/dist/mcp/tools/swarm-constants.js +113 -0
- package/dist/mcp/tools/swarm-get-tasks.tool.js +419 -0
- package/dist/mcp/tools/swarm-orchestrate.tool.js +389 -0
- package/dist/mcp/tools/swarm-post-task.tool.js +220 -0
- package/package.json +1 -1
|
@@ -0,0 +1,389 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Swarm Orchestrate Tool
|
|
3
|
+
* Orchestrates multiple agents to tackle complex, multi-file code tasks in parallel
|
|
4
|
+
*
|
|
5
|
+
* This is the main entry point for swarm-based task execution. It:
|
|
6
|
+
* 1. Analyzes the task using semantic search and impact analysis
|
|
7
|
+
* 2. Decomposes the task into atomic, dependency-ordered SwarmTasks
|
|
8
|
+
* 3. Creates tasks on the blackboard for worker agents
|
|
9
|
+
* 4. Returns execution plan for agents to claim and execute
|
|
10
|
+
*/
|
|
11
|
+
import { z } from 'zod';
|
|
12
|
+
import { EmbeddingsService } from '../../core/embeddings/embeddings.service.js';
|
|
13
|
+
import { Neo4jService } from '../../storage/neo4j/neo4j.service.js';
|
|
14
|
+
import { TOOL_NAMES, TOOL_METADATA } from '../constants.js';
|
|
15
|
+
import { TaskDecompositionHandler, } from '../handlers/task-decomposition.handler.js';
|
|
16
|
+
import { createErrorResponse, createSuccessResponse, resolveProjectIdOrError, debugLog } from '../utils.js';
|
|
17
|
+
import { TASK_PRIORITIES, generateSwarmId, ORCHESTRATOR_CONFIG, } from './swarm-constants.js';
|
|
18
|
+
/**
|
|
19
|
+
* Query to search for nodes matching the task description
|
|
20
|
+
*/
|
|
21
|
+
const SEMANTIC_SEARCH_QUERY = `
|
|
22
|
+
CALL db.index.vector.queryNodes('code_embeddings', toInteger($limit), $embedding)
|
|
23
|
+
YIELD node, score
|
|
24
|
+
WHERE node.projectId = $projectId
|
|
25
|
+
AND score >= $minSimilarity
|
|
26
|
+
RETURN node.id AS id,
|
|
27
|
+
node.name AS name,
|
|
28
|
+
node.coreType AS coreType,
|
|
29
|
+
node.semanticType AS semanticType,
|
|
30
|
+
node.filePath AS filePath,
|
|
31
|
+
substring(node.sourceCode, 0, 500) AS sourceCode,
|
|
32
|
+
node.startLine AS startLine,
|
|
33
|
+
node.endLine AS endLine,
|
|
34
|
+
score
|
|
35
|
+
ORDER BY score DESC
|
|
36
|
+
LIMIT toInteger($limit)
|
|
37
|
+
`;
|
|
38
|
+
/**
|
|
39
|
+
* Query to get impact analysis for a node
|
|
40
|
+
*/
|
|
41
|
+
const IMPACT_QUERY = `
|
|
42
|
+
MATCH (target)
|
|
43
|
+
WHERE target.id = $nodeId AND target.projectId = $projectId
|
|
44
|
+
OPTIONAL MATCH (dependent)-[r]->(target)
|
|
45
|
+
WHERE dependent.projectId = $projectId
|
|
46
|
+
AND NOT dependent:Pheromone
|
|
47
|
+
AND NOT dependent:SwarmTask
|
|
48
|
+
WITH target, collect(DISTINCT {
|
|
49
|
+
nodeId: dependent.id,
|
|
50
|
+
filePath: dependent.filePath,
|
|
51
|
+
relType: type(r)
|
|
52
|
+
}) AS dependents
|
|
53
|
+
RETURN target.id AS nodeId,
|
|
54
|
+
size(dependents) AS dependentCount,
|
|
55
|
+
[d IN dependents | d.filePath] AS affectedFiles,
|
|
56
|
+
CASE
|
|
57
|
+
WHEN size(dependents) >= 20 THEN 'CRITICAL'
|
|
58
|
+
WHEN size(dependents) >= 10 THEN 'HIGH'
|
|
59
|
+
WHEN size(dependents) >= 5 THEN 'MEDIUM'
|
|
60
|
+
ELSE 'LOW'
|
|
61
|
+
END AS riskLevel
|
|
62
|
+
`;
|
|
63
|
+
/**
|
|
64
|
+
* Query to create a pheromone marker on a node
|
|
65
|
+
*/
|
|
66
|
+
const CREATE_PHEROMONE_QUERY = `
|
|
67
|
+
MATCH (target)
|
|
68
|
+
WHERE target.id = $nodeId AND target.projectId = $projectId
|
|
69
|
+
MERGE (p:Pheromone {
|
|
70
|
+
nodeId: $nodeId,
|
|
71
|
+
agentId: $agentId,
|
|
72
|
+
type: $type,
|
|
73
|
+
projectId: $projectId
|
|
74
|
+
})
|
|
75
|
+
ON CREATE SET
|
|
76
|
+
p.swarmId = $swarmId,
|
|
77
|
+
p.intensity = $intensity,
|
|
78
|
+
p.createdAt = timestamp(),
|
|
79
|
+
p.updatedAt = timestamp(),
|
|
80
|
+
p.data = $data
|
|
81
|
+
ON MATCH SET
|
|
82
|
+
p.intensity = $intensity,
|
|
83
|
+
p.updatedAt = timestamp(),
|
|
84
|
+
p.data = $data
|
|
85
|
+
MERGE (p)-[:MARKS]->(target)
|
|
86
|
+
RETURN p.nodeId AS nodeId
|
|
87
|
+
`;
|
|
88
|
+
/**
|
|
89
|
+
* Query to create a SwarmTask node
|
|
90
|
+
*/
|
|
91
|
+
const CREATE_TASK_QUERY = `
|
|
92
|
+
CREATE (t:SwarmTask {
|
|
93
|
+
id: $taskId,
|
|
94
|
+
projectId: $projectId,
|
|
95
|
+
swarmId: $swarmId,
|
|
96
|
+
title: $title,
|
|
97
|
+
description: $description,
|
|
98
|
+
type: $type,
|
|
99
|
+
priority: $priority,
|
|
100
|
+
priorityScore: $priorityScore,
|
|
101
|
+
status: $status,
|
|
102
|
+
targetNodeIds: $targetNodeIds,
|
|
103
|
+
targetFilePaths: $targetFilePaths,
|
|
104
|
+
dependencies: $dependencies,
|
|
105
|
+
createdBy: $createdBy,
|
|
106
|
+
createdAt: timestamp(),
|
|
107
|
+
updatedAt: timestamp(),
|
|
108
|
+
metadata: $metadata
|
|
109
|
+
})
|
|
110
|
+
RETURN t.id AS id
|
|
111
|
+
`;
|
|
112
|
+
export const createSwarmOrchestrateTool = (server) => {
|
|
113
|
+
const embeddingsService = new EmbeddingsService();
|
|
114
|
+
const taskDecomposer = new TaskDecompositionHandler();
|
|
115
|
+
server.registerTool(TOOL_NAMES.swarmOrchestrate, {
|
|
116
|
+
title: TOOL_METADATA[TOOL_NAMES.swarmOrchestrate].title,
|
|
117
|
+
description: TOOL_METADATA[TOOL_NAMES.swarmOrchestrate].description,
|
|
118
|
+
inputSchema: {
|
|
119
|
+
projectId: z.string().describe('Project ID, name, or path'),
|
|
120
|
+
task: z.string().min(10).describe('Natural language description of the task to execute'),
|
|
121
|
+
maxAgents: z
|
|
122
|
+
.number()
|
|
123
|
+
.int()
|
|
124
|
+
.min(1)
|
|
125
|
+
.max(ORCHESTRATOR_CONFIG.maxAgentsLimit)
|
|
126
|
+
.optional()
|
|
127
|
+
.default(ORCHESTRATOR_CONFIG.defaultMaxAgents)
|
|
128
|
+
.describe(`Maximum concurrent worker agents (default: ${ORCHESTRATOR_CONFIG.defaultMaxAgents})`),
|
|
129
|
+
dryRun: z
|
|
130
|
+
.boolean()
|
|
131
|
+
.optional()
|
|
132
|
+
.default(false)
|
|
133
|
+
.describe('If true, only plan without creating tasks (default: false)'),
|
|
134
|
+
priority: z
|
|
135
|
+
.enum(Object.keys(TASK_PRIORITIES))
|
|
136
|
+
.optional()
|
|
137
|
+
.default('normal')
|
|
138
|
+
.describe('Overall priority level for tasks'),
|
|
139
|
+
minSimilarity: z
|
|
140
|
+
.number()
|
|
141
|
+
.min(0.5)
|
|
142
|
+
.max(1.0)
|
|
143
|
+
.optional()
|
|
144
|
+
.default(0.65)
|
|
145
|
+
.describe('Minimum similarity score for semantic search (default: 0.65)'),
|
|
146
|
+
maxNodes: z
|
|
147
|
+
.number()
|
|
148
|
+
.int()
|
|
149
|
+
.min(1)
|
|
150
|
+
.max(100)
|
|
151
|
+
.optional()
|
|
152
|
+
.default(50)
|
|
153
|
+
.describe('Maximum nodes to consider from search (default: 50)'),
|
|
154
|
+
},
|
|
155
|
+
}, async ({ projectId, task, maxAgents = ORCHESTRATOR_CONFIG.defaultMaxAgents, dryRun = false, priority = 'normal', minSimilarity = 0.65, maxNodes = 50, }) => {
|
|
156
|
+
const neo4jService = new Neo4jService();
|
|
157
|
+
const swarmId = generateSwarmId();
|
|
158
|
+
try {
|
|
159
|
+
// Step 1: Resolve project ID
|
|
160
|
+
const projectResult = await resolveProjectIdOrError(projectId, neo4jService);
|
|
161
|
+
if (!projectResult.success) {
|
|
162
|
+
await neo4jService.close();
|
|
163
|
+
return projectResult.error;
|
|
164
|
+
}
|
|
165
|
+
const resolvedProjectId = projectResult.projectId;
|
|
166
|
+
await debugLog('Swarm orchestration started', {
|
|
167
|
+
swarmId,
|
|
168
|
+
projectId: resolvedProjectId,
|
|
169
|
+
task,
|
|
170
|
+
maxAgents,
|
|
171
|
+
dryRun,
|
|
172
|
+
});
|
|
173
|
+
// Step 2: Semantic search to find affected nodes
|
|
174
|
+
await debugLog('Searching for affected nodes', { task });
|
|
175
|
+
let embedding;
|
|
176
|
+
try {
|
|
177
|
+
embedding = await embeddingsService.embedText(task);
|
|
178
|
+
}
|
|
179
|
+
catch (error) {
|
|
180
|
+
return createErrorResponse(`Failed to generate embedding for task description: ${error}`);
|
|
181
|
+
}
|
|
182
|
+
const searchResults = await neo4jService.run(SEMANTIC_SEARCH_QUERY, {
|
|
183
|
+
projectId: resolvedProjectId,
|
|
184
|
+
embedding,
|
|
185
|
+
minSimilarity,
|
|
186
|
+
limit: Math.floor(maxNodes),
|
|
187
|
+
});
|
|
188
|
+
if (searchResults.length === 0) {
|
|
189
|
+
return createErrorResponse(`No code found matching task: "${task}". Try rephrasing or use search_codebase to explore the codebase first.`);
|
|
190
|
+
}
|
|
191
|
+
const affectedNodes = searchResults.map((r) => ({
|
|
192
|
+
id: r.id,
|
|
193
|
+
name: r.name,
|
|
194
|
+
coreType: r.coreType,
|
|
195
|
+
semanticType: r.semanticType,
|
|
196
|
+
filePath: r.filePath,
|
|
197
|
+
sourceCode: r.sourceCode,
|
|
198
|
+
startLine: typeof r.startLine === 'object' ? r.startLine.toNumber() : r.startLine,
|
|
199
|
+
endLine: typeof r.endLine === 'object' ? r.endLine.toNumber() : r.endLine,
|
|
200
|
+
}));
|
|
201
|
+
await debugLog('Found affected nodes', {
|
|
202
|
+
count: affectedNodes.length,
|
|
203
|
+
files: [...new Set(affectedNodes.map((n) => n.filePath))].length,
|
|
204
|
+
});
|
|
205
|
+
// Step 3: Run impact analysis on each node
|
|
206
|
+
await debugLog('Running impact analysis', { nodeCount: affectedNodes.length });
|
|
207
|
+
const impactMap = new Map();
|
|
208
|
+
for (const node of affectedNodes) {
|
|
209
|
+
const impactResult = await neo4jService.run(IMPACT_QUERY, {
|
|
210
|
+
nodeId: node.id,
|
|
211
|
+
projectId: resolvedProjectId,
|
|
212
|
+
});
|
|
213
|
+
if (impactResult.length > 0) {
|
|
214
|
+
const impact = impactResult[0];
|
|
215
|
+
impactMap.set(node.id, {
|
|
216
|
+
nodeId: node.id,
|
|
217
|
+
riskLevel: impact.riskLevel,
|
|
218
|
+
directDependents: {
|
|
219
|
+
count: typeof impact.dependentCount === 'object'
|
|
220
|
+
? impact.dependentCount.toNumber()
|
|
221
|
+
: impact.dependentCount,
|
|
222
|
+
byType: {},
|
|
223
|
+
},
|
|
224
|
+
transitiveDependents: { count: 0 },
|
|
225
|
+
affectedFiles: impact.affectedFiles ?? [],
|
|
226
|
+
});
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
// Step 4: Decompose task into atomic tasks
|
|
230
|
+
await debugLog('Decomposing task', { nodeCount: affectedNodes.length });
|
|
231
|
+
const decomposition = await taskDecomposer.decomposeTask(task, affectedNodes, impactMap, priority);
|
|
232
|
+
if (decomposition.tasks.length === 0) {
|
|
233
|
+
return createErrorResponse('Task decomposition produced no actionable tasks');
|
|
234
|
+
}
|
|
235
|
+
await debugLog('Task decomposition complete', {
|
|
236
|
+
totalTasks: decomposition.tasks.length,
|
|
237
|
+
parallelizable: decomposition.summary.parallelizable,
|
|
238
|
+
});
|
|
239
|
+
// Step 5: Create SwarmTasks on the blackboard (unless dry run)
|
|
240
|
+
if (!dryRun) {
|
|
241
|
+
await debugLog('Creating SwarmTasks', { count: decomposition.tasks.length });
|
|
242
|
+
for (const atomicTask of decomposition.tasks) {
|
|
243
|
+
// Determine initial status based on dependencies
|
|
244
|
+
const hasUnmetDeps = atomicTask.dependencies.length > 0;
|
|
245
|
+
const initialStatus = hasUnmetDeps ? 'blocked' : 'available';
|
|
246
|
+
await neo4jService.run(CREATE_TASK_QUERY, {
|
|
247
|
+
taskId: atomicTask.id,
|
|
248
|
+
projectId: resolvedProjectId,
|
|
249
|
+
swarmId,
|
|
250
|
+
title: atomicTask.title,
|
|
251
|
+
description: atomicTask.description,
|
|
252
|
+
type: atomicTask.type,
|
|
253
|
+
priority: atomicTask.priority,
|
|
254
|
+
priorityScore: atomicTask.priorityScore,
|
|
255
|
+
status: initialStatus,
|
|
256
|
+
targetNodeIds: atomicTask.nodeIds,
|
|
257
|
+
targetFilePaths: [atomicTask.filePath],
|
|
258
|
+
dependencies: atomicTask.dependencies,
|
|
259
|
+
createdBy: 'orchestrator',
|
|
260
|
+
metadata: JSON.stringify(atomicTask.metadata ?? {}),
|
|
261
|
+
});
|
|
262
|
+
}
|
|
263
|
+
await debugLog('SwarmTasks created', { swarmId, count: decomposition.tasks.length });
|
|
264
|
+
// Step 5b: Leave "proposal" pheromones on all target nodes
|
|
265
|
+
// This signals to other agents that work is planned for these nodes
|
|
266
|
+
const uniqueNodeIds = new Set();
|
|
267
|
+
for (const atomicTask of decomposition.tasks) {
|
|
268
|
+
for (const nodeId of atomicTask.nodeIds) {
|
|
269
|
+
uniqueNodeIds.add(nodeId);
|
|
270
|
+
}
|
|
271
|
+
}
|
|
272
|
+
await debugLog('Creating proposal pheromones', { nodeCount: uniqueNodeIds.size });
|
|
273
|
+
for (const nodeId of uniqueNodeIds) {
|
|
274
|
+
await neo4jService.run(CREATE_PHEROMONE_QUERY, {
|
|
275
|
+
nodeId,
|
|
276
|
+
projectId: resolvedProjectId,
|
|
277
|
+
agentId: 'orchestrator',
|
|
278
|
+
swarmId,
|
|
279
|
+
type: 'proposal',
|
|
280
|
+
intensity: 1.0,
|
|
281
|
+
data: JSON.stringify({ task, swarmId }),
|
|
282
|
+
});
|
|
283
|
+
}
|
|
284
|
+
await debugLog('Proposal pheromones created', { swarmId, count: uniqueNodeIds.size });
|
|
285
|
+
}
|
|
286
|
+
// Step 6: Generate worker instructions
|
|
287
|
+
const workerInstructions = generateWorkerInstructions(swarmId, resolvedProjectId, maxAgents, decomposition.tasks.length);
|
|
288
|
+
// Step 7: Build result
|
|
289
|
+
const result = {
|
|
290
|
+
swarmId,
|
|
291
|
+
status: dryRun ? 'planning' : 'ready',
|
|
292
|
+
plan: {
|
|
293
|
+
totalTasks: decomposition.tasks.length,
|
|
294
|
+
parallelizable: decomposition.summary.parallelizable,
|
|
295
|
+
sequential: decomposition.summary.sequential,
|
|
296
|
+
estimatedComplexity: decomposition.summary.estimatedComplexity,
|
|
297
|
+
tasks: decomposition.tasks.map((t) => ({
|
|
298
|
+
id: t.id,
|
|
299
|
+
title: t.title,
|
|
300
|
+
type: t.type,
|
|
301
|
+
priority: t.priority,
|
|
302
|
+
status: t.dependencies.length > 0 ? 'blocked' : 'available',
|
|
303
|
+
dependencyCount: t.dependencies.length,
|
|
304
|
+
targetFiles: [t.filePath],
|
|
305
|
+
})),
|
|
306
|
+
dependencyGraph: buildDependencyGraph(decomposition),
|
|
307
|
+
},
|
|
308
|
+
workerInstructions,
|
|
309
|
+
message: dryRun
|
|
310
|
+
? `Dry run complete. ${decomposition.tasks.length} tasks planned but not created.`
|
|
311
|
+
: `Swarm ready! ${decomposition.tasks.length} tasks created. ${decomposition.summary.parallelizable} can run in parallel.`,
|
|
312
|
+
};
|
|
313
|
+
await debugLog('Swarm orchestration complete', {
|
|
314
|
+
swarmId,
|
|
315
|
+
status: result.status,
|
|
316
|
+
totalTasks: result.plan.totalTasks,
|
|
317
|
+
});
|
|
318
|
+
return createSuccessResponse(JSON.stringify(result, null, 2));
|
|
319
|
+
}
|
|
320
|
+
catch (error) {
|
|
321
|
+
await debugLog('Swarm orchestration error', { swarmId, error: String(error) });
|
|
322
|
+
return createErrorResponse(error instanceof Error ? error : String(error));
|
|
323
|
+
}
|
|
324
|
+
finally {
|
|
325
|
+
await neo4jService.close();
|
|
326
|
+
}
|
|
327
|
+
});
|
|
328
|
+
};
|
|
329
|
+
/**
|
|
330
|
+
* Generate instructions for spawning worker agents
|
|
331
|
+
*/
|
|
332
|
+
function generateWorkerInstructions(swarmId, projectId, maxAgents, taskCount) {
|
|
333
|
+
const recommendedAgents = Math.min(maxAgents, Math.ceil(taskCount / 2), taskCount);
|
|
334
|
+
return `
|
|
335
|
+
## Worker Agent Instructions
|
|
336
|
+
|
|
337
|
+
To execute this swarm, spawn ${recommendedAgents} worker agent(s) using the Task tool:
|
|
338
|
+
|
|
339
|
+
\`\`\`
|
|
340
|
+
Task({
|
|
341
|
+
subagent_type: "general-purpose",
|
|
342
|
+
prompt: "You are a swarm worker for swarm ${swarmId}. Project: ${projectId}.
|
|
343
|
+
|
|
344
|
+
Your workflow:
|
|
345
|
+
1. swarm_sense({ projectId: '${projectId}', swarmId: '${swarmId}', types: ['modifying', 'claiming'] })
|
|
346
|
+
2. swarm_claim_task({ projectId: '${projectId}', swarmId: '${swarmId}', agentId: '<your-id>' })
|
|
347
|
+
3. If task claimed: execute it, then swarm_complete_task with action: 'complete'
|
|
348
|
+
4. Loop until no tasks remain
|
|
349
|
+
|
|
350
|
+
Always leave pheromones when working. Exit when swarm_get_tasks shows no available/in_progress tasks.",
|
|
351
|
+
run_in_background: true
|
|
352
|
+
})
|
|
353
|
+
\`\`\`
|
|
354
|
+
|
|
355
|
+
Launch ${recommendedAgents} agents in parallel for optimal execution.
|
|
356
|
+
|
|
357
|
+
## Monitoring Progress
|
|
358
|
+
|
|
359
|
+
Check swarm progress:
|
|
360
|
+
\`\`\`
|
|
361
|
+
swarm_get_tasks({
|
|
362
|
+
projectId: "${projectId}",
|
|
363
|
+
swarmId: "${swarmId}",
|
|
364
|
+
includeStats: true
|
|
365
|
+
})
|
|
366
|
+
\`\`\`
|
|
367
|
+
|
|
368
|
+
## Cleanup (after completion)
|
|
369
|
+
|
|
370
|
+
\`\`\`
|
|
371
|
+
swarm_cleanup({
|
|
372
|
+
projectId: "${projectId}",
|
|
373
|
+
swarmId: "${swarmId}"
|
|
374
|
+
})
|
|
375
|
+
\`\`\`
|
|
376
|
+
`;
|
|
377
|
+
}
|
|
378
|
+
/**
|
|
379
|
+
* Build dependency graph edges for visualization
|
|
380
|
+
*/
|
|
381
|
+
function buildDependencyGraph(decomposition) {
|
|
382
|
+
const edges = [];
|
|
383
|
+
for (const task of decomposition.tasks) {
|
|
384
|
+
for (const depId of task.dependencies) {
|
|
385
|
+
edges.push({ from: depId, to: task.id });
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
return edges;
|
|
389
|
+
}
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Swarm Post Task Tool
|
|
3
|
+
* Post a task to the blackboard for agents to claim and work on
|
|
4
|
+
*/
|
|
5
|
+
import { z } from 'zod';
|
|
6
|
+
import { Neo4jService } from '../../storage/neo4j/neo4j.service.js';
|
|
7
|
+
import { TOOL_NAMES, TOOL_METADATA } from '../constants.js';
|
|
8
|
+
import { createErrorResponse, createSuccessResponse, resolveProjectIdOrError, debugLog } from '../utils.js';
|
|
9
|
+
import { TASK_PRIORITIES, TASK_TYPES, generateTaskId, } from './swarm-constants.js';
|
|
10
|
+
/**
|
|
11
|
+
* Neo4j query to create a new SwarmTask node
|
|
12
|
+
*/
|
|
13
|
+
const CREATE_TASK_QUERY = `
|
|
14
|
+
// Create the task node
|
|
15
|
+
CREATE (t:SwarmTask {
|
|
16
|
+
id: $taskId,
|
|
17
|
+
projectId: $projectId,
|
|
18
|
+
swarmId: $swarmId,
|
|
19
|
+
title: $title,
|
|
20
|
+
description: $description,
|
|
21
|
+
type: $type,
|
|
22
|
+
priority: $priority,
|
|
23
|
+
priorityScore: $priorityScore,
|
|
24
|
+
status: 'available',
|
|
25
|
+
targetNodeIds: $targetNodeIds,
|
|
26
|
+
targetFilePaths: $targetFilePaths,
|
|
27
|
+
dependencies: $dependencies,
|
|
28
|
+
createdBy: $createdBy,
|
|
29
|
+
createdAt: timestamp(),
|
|
30
|
+
updatedAt: timestamp(),
|
|
31
|
+
metadata: $metadata
|
|
32
|
+
})
|
|
33
|
+
|
|
34
|
+
// Link to target code nodes if they exist
|
|
35
|
+
WITH t
|
|
36
|
+
OPTIONAL MATCH (target)
|
|
37
|
+
WHERE target.id IN $targetNodeIds
|
|
38
|
+
AND target.projectId = $projectId
|
|
39
|
+
AND NOT target:SwarmTask
|
|
40
|
+
AND NOT target:Pheromone
|
|
41
|
+
WITH t, collect(DISTINCT target) as targets
|
|
42
|
+
FOREACH (target IN targets | MERGE (t)-[:TARGETS]->(target))
|
|
43
|
+
|
|
44
|
+
// Link to dependency tasks if they exist
|
|
45
|
+
WITH t
|
|
46
|
+
OPTIONAL MATCH (dep:SwarmTask)
|
|
47
|
+
WHERE dep.id IN $dependencies
|
|
48
|
+
AND dep.projectId = $projectId
|
|
49
|
+
WITH t, collect(DISTINCT dep) as deps
|
|
50
|
+
FOREACH (dep IN deps | MERGE (t)-[:DEPENDS_ON]->(dep))
|
|
51
|
+
|
|
52
|
+
// Return the created task
|
|
53
|
+
RETURN t.id as id,
|
|
54
|
+
t.projectId as projectId,
|
|
55
|
+
t.swarmId as swarmId,
|
|
56
|
+
t.title as title,
|
|
57
|
+
t.description as description,
|
|
58
|
+
t.type as type,
|
|
59
|
+
t.priority as priority,
|
|
60
|
+
t.priorityScore as priorityScore,
|
|
61
|
+
t.status as status,
|
|
62
|
+
t.targetNodeIds as targetNodeIds,
|
|
63
|
+
t.targetFilePaths as targetFilePaths,
|
|
64
|
+
t.dependencies as dependencies,
|
|
65
|
+
t.createdBy as createdBy,
|
|
66
|
+
t.createdAt as createdAt
|
|
67
|
+
`;
|
|
68
|
+
/**
|
|
69
|
+
* Query to check if dependencies are met (all completed or no dependencies)
|
|
70
|
+
*/
|
|
71
|
+
const CHECK_DEPENDENCIES_QUERY = `
|
|
72
|
+
MATCH (t:SwarmTask {id: $taskId, projectId: $projectId})
|
|
73
|
+
OPTIONAL MATCH (t)-[:DEPENDS_ON]->(dep:SwarmTask)
|
|
74
|
+
WITH t, collect(dep) as deps,
|
|
75
|
+
[d IN collect(dep) WHERE d.status <> 'completed'] as incompleteDeps
|
|
76
|
+
RETURN size(deps) as totalDeps,
|
|
77
|
+
size(incompleteDeps) as incompleteDeps,
|
|
78
|
+
[d IN incompleteDeps | {id: d.id, title: d.title, status: d.status}] as blockedBy
|
|
79
|
+
`;
|
|
80
|
+
export const createSwarmPostTaskTool = (server) => {
|
|
81
|
+
server.registerTool(TOOL_NAMES.swarmPostTask, {
|
|
82
|
+
title: TOOL_METADATA[TOOL_NAMES.swarmPostTask].title,
|
|
83
|
+
description: TOOL_METADATA[TOOL_NAMES.swarmPostTask].description,
|
|
84
|
+
inputSchema: {
|
|
85
|
+
projectId: z.string().describe('Project ID, name, or path'),
|
|
86
|
+
swarmId: z.string().describe('Swarm ID for grouping related tasks'),
|
|
87
|
+
title: z.string().min(1).max(200).describe('Short title for the task'),
|
|
88
|
+
description: z.string().describe('Detailed description of what needs to be done'),
|
|
89
|
+
type: z
|
|
90
|
+
.enum(TASK_TYPES)
|
|
91
|
+
.optional()
|
|
92
|
+
.default('implement')
|
|
93
|
+
.describe('Task type: implement, refactor, fix, test, review, document, investigate, plan'),
|
|
94
|
+
priority: z
|
|
95
|
+
.enum(Object.keys(TASK_PRIORITIES))
|
|
96
|
+
.optional()
|
|
97
|
+
.default('normal')
|
|
98
|
+
.describe('Priority level: critical, high, normal, low, backlog'),
|
|
99
|
+
targetNodeIds: z
|
|
100
|
+
.array(z.string())
|
|
101
|
+
.optional()
|
|
102
|
+
.default([])
|
|
103
|
+
.describe('Code node IDs this task targets (from search_codebase)'),
|
|
104
|
+
targetFilePaths: z
|
|
105
|
+
.array(z.string())
|
|
106
|
+
.optional()
|
|
107
|
+
.default([])
|
|
108
|
+
.describe('File paths this task affects (alternative to nodeIds)'),
|
|
109
|
+
dependencies: z
|
|
110
|
+
.array(z.string())
|
|
111
|
+
.optional()
|
|
112
|
+
.default([])
|
|
113
|
+
.describe('Task IDs that must be completed before this task can start'),
|
|
114
|
+
createdBy: z.string().describe('Agent ID or identifier of who created this task'),
|
|
115
|
+
metadata: z
|
|
116
|
+
.record(z.unknown())
|
|
117
|
+
.optional()
|
|
118
|
+
.describe('Additional metadata (context, acceptance criteria, etc.)'),
|
|
119
|
+
},
|
|
120
|
+
}, async ({ projectId, swarmId, title, description, type = 'implement', priority = 'normal', targetNodeIds = [], targetFilePaths = [], dependencies = [], createdBy, metadata, }) => {
|
|
121
|
+
const neo4jService = new Neo4jService();
|
|
122
|
+
// Resolve project ID
|
|
123
|
+
const projectResult = await resolveProjectIdOrError(projectId, neo4jService);
|
|
124
|
+
if (!projectResult.success) {
|
|
125
|
+
await neo4jService.close();
|
|
126
|
+
return projectResult.error;
|
|
127
|
+
}
|
|
128
|
+
const resolvedProjectId = projectResult.projectId;
|
|
129
|
+
try {
|
|
130
|
+
const taskId = generateTaskId();
|
|
131
|
+
const priorityScore = TASK_PRIORITIES[priority];
|
|
132
|
+
const metadataJson = metadata ? JSON.stringify(metadata) : null;
|
|
133
|
+
await debugLog('Creating swarm task', {
|
|
134
|
+
taskId,
|
|
135
|
+
projectId: resolvedProjectId,
|
|
136
|
+
swarmId,
|
|
137
|
+
title,
|
|
138
|
+
type,
|
|
139
|
+
priority,
|
|
140
|
+
targetNodeIds: targetNodeIds.length,
|
|
141
|
+
dependencies: dependencies.length,
|
|
142
|
+
});
|
|
143
|
+
// Create the task
|
|
144
|
+
const result = await neo4jService.run(CREATE_TASK_QUERY, {
|
|
145
|
+
taskId,
|
|
146
|
+
projectId: resolvedProjectId,
|
|
147
|
+
swarmId,
|
|
148
|
+
title,
|
|
149
|
+
description,
|
|
150
|
+
type,
|
|
151
|
+
priority,
|
|
152
|
+
priorityScore,
|
|
153
|
+
targetNodeIds,
|
|
154
|
+
targetFilePaths,
|
|
155
|
+
dependencies,
|
|
156
|
+
createdBy,
|
|
157
|
+
metadata: metadataJson,
|
|
158
|
+
});
|
|
159
|
+
if (result.length === 0) {
|
|
160
|
+
return createErrorResponse('Failed to create task');
|
|
161
|
+
}
|
|
162
|
+
const task = result[0];
|
|
163
|
+
// Check dependency status
|
|
164
|
+
let dependencyStatus = { totalDeps: 0, incompleteDeps: 0, blockedBy: [] };
|
|
165
|
+
if (dependencies.length > 0) {
|
|
166
|
+
const depCheck = await neo4jService.run(CHECK_DEPENDENCIES_QUERY, {
|
|
167
|
+
taskId,
|
|
168
|
+
projectId: resolvedProjectId,
|
|
169
|
+
});
|
|
170
|
+
if (depCheck.length > 0) {
|
|
171
|
+
dependencyStatus = {
|
|
172
|
+
totalDeps: typeof depCheck[0].totalDeps === 'object'
|
|
173
|
+
? depCheck[0].totalDeps.toNumber()
|
|
174
|
+
: depCheck[0].totalDeps,
|
|
175
|
+
incompleteDeps: typeof depCheck[0].incompleteDeps === 'object'
|
|
176
|
+
? depCheck[0].incompleteDeps.toNumber()
|
|
177
|
+
: depCheck[0].incompleteDeps,
|
|
178
|
+
blockedBy: depCheck[0].blockedBy || [],
|
|
179
|
+
};
|
|
180
|
+
}
|
|
181
|
+
}
|
|
182
|
+
const isBlocked = dependencyStatus.incompleteDeps > 0;
|
|
183
|
+
return createSuccessResponse(JSON.stringify({
|
|
184
|
+
success: true,
|
|
185
|
+
task: {
|
|
186
|
+
id: task.id,
|
|
187
|
+
projectId: task.projectId,
|
|
188
|
+
swarmId: task.swarmId,
|
|
189
|
+
title: task.title,
|
|
190
|
+
description: task.description,
|
|
191
|
+
type: task.type,
|
|
192
|
+
priority: task.priority,
|
|
193
|
+
priorityScore: task.priorityScore,
|
|
194
|
+
status: isBlocked ? 'blocked' : 'available',
|
|
195
|
+
targetNodeIds: task.targetNodeIds,
|
|
196
|
+
targetFilePaths: task.targetFilePaths,
|
|
197
|
+
dependencies: task.dependencies,
|
|
198
|
+
createdBy: task.createdBy,
|
|
199
|
+
createdAt: typeof task.createdAt === 'object'
|
|
200
|
+
? task.createdAt.toNumber()
|
|
201
|
+
: task.createdAt,
|
|
202
|
+
},
|
|
203
|
+
dependencyStatus: {
|
|
204
|
+
isBlocked,
|
|
205
|
+
...dependencyStatus,
|
|
206
|
+
},
|
|
207
|
+
message: isBlocked
|
|
208
|
+
? `Task created but blocked by ${dependencyStatus.incompleteDeps} incomplete dependencies`
|
|
209
|
+
: 'Task created and available for claiming',
|
|
210
|
+
}));
|
|
211
|
+
}
|
|
212
|
+
catch (error) {
|
|
213
|
+
await debugLog('Swarm post task error', { error: String(error) });
|
|
214
|
+
return createErrorResponse(error instanceof Error ? error : String(error));
|
|
215
|
+
}
|
|
216
|
+
finally {
|
|
217
|
+
await neo4jService.close();
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
};
|
package/package.json
CHANGED