opencode-swarm-plugin 0.37.0 → 0.39.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.env +2 -0
- package/.hive/eval-results.json +26 -0
- package/.hive/issues.jsonl +20 -5
- package/.hive/memories.jsonl +35 -1
- package/.opencode/eval-history.jsonl +12 -0
- package/.turbo/turbo-build.log +4 -4
- package/.turbo/turbo-test.log +319 -319
- package/CHANGELOG.md +258 -0
- package/README.md +50 -0
- package/bin/swarm.test.ts +475 -0
- package/bin/swarm.ts +385 -208
- package/dist/compaction-hook.d.ts +1 -1
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/compaction-prompt-scoring.d.ts +124 -0
- package/dist/compaction-prompt-scoring.d.ts.map +1 -0
- package/dist/eval-capture.d.ts +81 -1
- package/dist/eval-capture.d.ts.map +1 -1
- package/dist/eval-gates.d.ts +84 -0
- package/dist/eval-gates.d.ts.map +1 -0
- package/dist/eval-history.d.ts +117 -0
- package/dist/eval-history.d.ts.map +1 -0
- package/dist/eval-learning.d.ts +216 -0
- package/dist/eval-learning.d.ts.map +1 -0
- package/dist/hive.d.ts +59 -0
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.d.ts +87 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +823 -131
- package/dist/plugin.js +655 -131
- package/dist/post-compaction-tracker.d.ts +133 -0
- package/dist/post-compaction-tracker.d.ts.map +1 -0
- package/dist/swarm-decompose.d.ts +30 -0
- package/dist/swarm-decompose.d.ts.map +1 -1
- package/dist/swarm-orchestrate.d.ts +23 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +25 -1
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm.d.ts +19 -0
- package/dist/swarm.d.ts.map +1 -1
- package/evals/README.md +595 -94
- package/evals/compaction-prompt.eval.ts +149 -0
- package/evals/coordinator-behavior.eval.ts +8 -8
- package/evals/fixtures/compaction-prompt-cases.ts +305 -0
- package/evals/lib/compaction-loader.test.ts +248 -0
- package/evals/lib/compaction-loader.ts +320 -0
- package/evals/lib/data-loader.test.ts +345 -0
- package/evals/lib/data-loader.ts +107 -6
- package/evals/scorers/compaction-prompt-scorers.ts +145 -0
- package/evals/scorers/compaction-scorers.ts +13 -13
- package/evals/scorers/coordinator-discipline.evalite-test.ts +3 -2
- package/evals/scorers/coordinator-discipline.ts +13 -13
- package/examples/plugin-wrapper-template.ts +177 -8
- package/package.json +7 -2
- package/scripts/migrate-unknown-sessions.ts +349 -0
- package/src/compaction-capture.integration.test.ts +257 -0
- package/src/compaction-hook.test.ts +139 -2
- package/src/compaction-hook.ts +113 -2
- package/src/compaction-prompt-scorers.test.ts +299 -0
- package/src/compaction-prompt-scoring.ts +298 -0
- package/src/eval-capture.test.ts +422 -0
- package/src/eval-capture.ts +94 -2
- package/src/eval-gates.test.ts +306 -0
- package/src/eval-gates.ts +218 -0
- package/src/eval-history.test.ts +508 -0
- package/src/eval-history.ts +214 -0
- package/src/eval-learning.test.ts +378 -0
- package/src/eval-learning.ts +360 -0
- package/src/index.ts +61 -1
- package/src/post-compaction-tracker.test.ts +251 -0
- package/src/post-compaction-tracker.ts +237 -0
- package/src/swarm-decompose.test.ts +40 -47
- package/src/swarm-decompose.ts +2 -2
- package/src/swarm-orchestrate.test.ts +270 -7
- package/src/swarm-orchestrate.ts +100 -13
- package/src/swarm-prompts.test.ts +121 -0
- package/src/swarm-prompts.ts +297 -4
- package/src/swarm-research.integration.test.ts +157 -0
- package/src/swarm-review.ts +3 -3
- /package/evals/{evalite.config.ts → evalite.config.ts.bak} +0 -0
|
@@ -0,0 +1,133 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Post-Compaction Tool Call Tracker
|
|
3
|
+
*
|
|
4
|
+
* Tracks tool calls after compaction resumption to detect coordinator violations
|
|
5
|
+
* and provide learning signals for eval-driven development.
|
|
6
|
+
*
|
|
7
|
+
* ## Purpose
|
|
8
|
+
*
|
|
9
|
+
* When context is compacted, the continuation agent needs observation to learn
|
|
10
|
+
* if it's following coordinator discipline. This tracker:
|
|
11
|
+
*
|
|
12
|
+
* 1. Emits resumption_started on first tool call (marks compaction exit)
|
|
13
|
+
* 2. Tracks up to N tool calls (default 20) with violation detection
|
|
14
|
+
* 3. Stops tracking after limit to avoid noise in long sessions
|
|
15
|
+
*
|
|
16
|
+
* ## Coordinator Violations Detected
|
|
17
|
+
*
|
|
18
|
+
* - **Edit/Write**: Coordinators NEVER edit files - spawn worker instead
|
|
19
|
+
* - **swarmmail_reserve/agentmail_reserve**: Workers reserve, not coordinators
|
|
20
|
+
*
|
|
21
|
+
* ## Integration
|
|
22
|
+
*
|
|
23
|
+
* Used by compaction hook to wire tool.call events → eval capture.
|
|
24
|
+
*
|
|
25
|
+
* @example
|
|
26
|
+
* ```typescript
|
|
27
|
+
* const tracker = createPostCompactionTracker({
|
|
28
|
+
* sessionId: "session-123",
|
|
29
|
+
* epicId: "bd-epic-456",
|
|
30
|
+
* onEvent: captureCompactionEvent,
|
|
31
|
+
* });
|
|
32
|
+
*
|
|
33
|
+
* // Wire to OpenCode hook
|
|
34
|
+
* hooks["tool.call"] = (input) => {
|
|
35
|
+
* tracker.trackToolCall({
|
|
36
|
+
* tool: input.tool,
|
|
37
|
+
* args: input.args,
|
|
38
|
+
* timestamp: Date.now(),
|
|
39
|
+
* });
|
|
40
|
+
* };
|
|
41
|
+
* ```
|
|
42
|
+
*/
|
|
43
|
+
/**
|
|
44
|
+
* Tool call event structure
|
|
45
|
+
*/
|
|
46
|
+
export interface ToolCallEvent {
|
|
47
|
+
tool: string;
|
|
48
|
+
args: Record<string, unknown>;
|
|
49
|
+
timestamp: number;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Compaction event payload (matches eval-capture.ts structure)
|
|
53
|
+
*/
|
|
54
|
+
export interface CompactionEvent {
|
|
55
|
+
session_id: string;
|
|
56
|
+
epic_id: string;
|
|
57
|
+
compaction_type: "detection_complete" | "prompt_generated" | "context_injected" | "resumption_started" | "tool_call_tracked";
|
|
58
|
+
payload: {
|
|
59
|
+
session_id?: string;
|
|
60
|
+
epic_id?: string;
|
|
61
|
+
tool?: string;
|
|
62
|
+
args?: Record<string, unknown>;
|
|
63
|
+
call_number?: number;
|
|
64
|
+
is_coordinator_violation?: boolean;
|
|
65
|
+
violation_reason?: string;
|
|
66
|
+
timestamp?: number;
|
|
67
|
+
};
|
|
68
|
+
}
|
|
69
|
+
/**
|
|
70
|
+
* Tracker configuration
|
|
71
|
+
*/
|
|
72
|
+
export interface PostCompactionTrackerConfig {
|
|
73
|
+
sessionId: string;
|
|
74
|
+
epicId: string;
|
|
75
|
+
onEvent: (event: CompactionEvent) => void;
|
|
76
|
+
maxCalls?: number;
|
|
77
|
+
}
|
|
78
|
+
/**
|
|
79
|
+
* Post-compaction tracker instance
|
|
80
|
+
*/
|
|
81
|
+
export interface PostCompactionTracker {
|
|
82
|
+
trackToolCall(event: ToolCallEvent): void;
|
|
83
|
+
isTracking(): boolean;
|
|
84
|
+
}
|
|
85
|
+
/**
|
|
86
|
+
* Default maximum number of tool calls to track
|
|
87
|
+
*
|
|
88
|
+
* Chosen to balance:
|
|
89
|
+
* - Enough data for pattern detection (20 calls is ~2-3 minutes of coordinator work)
|
|
90
|
+
* - Avoiding noise pollution in long sessions
|
|
91
|
+
*/
|
|
92
|
+
export declare const DEFAULT_MAX_TRACKED_CALLS = 20;
|
|
93
|
+
/**
|
|
94
|
+
* Check if tool call is a coordinator violation
|
|
95
|
+
*
|
|
96
|
+
* @param tool - Tool name from OpenCode tool.call hook
|
|
97
|
+
* @returns Violation status with reason if forbidden
|
|
98
|
+
*
|
|
99
|
+
* @example
|
|
100
|
+
* ```typescript
|
|
101
|
+
* const result = isCoordinatorViolation("edit");
|
|
102
|
+
* // { isViolation: true, reason: "Coordinators NEVER edit..." }
|
|
103
|
+
*
|
|
104
|
+
* const result = isCoordinatorViolation("read");
|
|
105
|
+
* // { isViolation: false }
|
|
106
|
+
* ```
|
|
107
|
+
*/
|
|
108
|
+
export declare function isCoordinatorViolation(tool: string): {
|
|
109
|
+
isViolation: boolean;
|
|
110
|
+
reason?: string;
|
|
111
|
+
};
|
|
112
|
+
/**
|
|
113
|
+
* Create a post-compaction tool call tracker
|
|
114
|
+
*
|
|
115
|
+
* @example
|
|
116
|
+
* ```typescript
|
|
117
|
+
* const tracker = createPostCompactionTracker({
|
|
118
|
+
* sessionId: "session-123",
|
|
119
|
+
* epicId: "bd-epic-456",
|
|
120
|
+
* onEvent: (event) => captureCompactionEvent(event),
|
|
121
|
+
* maxCalls: 20
|
|
122
|
+
* });
|
|
123
|
+
*
|
|
124
|
+
* // Track tool calls
|
|
125
|
+
* tracker.trackToolCall({
|
|
126
|
+
* tool: "read",
|
|
127
|
+
* args: { filePath: "/test.ts" },
|
|
128
|
+
* timestamp: Date.now()
|
|
129
|
+
* });
|
|
130
|
+
* ```
|
|
131
|
+
*/
|
|
132
|
+
export declare function createPostCompactionTracker(config: PostCompactionTrackerConfig): PostCompactionTracker;
|
|
133
|
+
//# sourceMappingURL=post-compaction-tracker.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"post-compaction-tracker.d.ts","sourceRoot":"","sources":["../src/post-compaction-tracker.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAyCG;AAEH;;GAEG;AACH,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IAC9B,SAAS,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,MAAM,CAAC;IAChB,eAAe,EACX,oBAAoB,GACpB,kBAAkB,GAClB,kBAAkB,GAClB,oBAAoB,GACpB,mBAAmB,CAAC;IACxB,OAAO,EAAE;QACP,UAAU,CAAC,EAAE,MAAM,CAAC;QACpB,OAAO,CAAC,EAAE,MAAM,CAAC;QACjB,IAAI,CAAC,EAAE,MAAM,CAAC;QACd,IAAI,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;QAC/B,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,wBAAwB,CAAC,EAAE,OAAO,CAAC;QACnC,gBAAgB,CAAC,EAAE,MAAM,CAAC;QAC1B,SAAS,CAAC,EAAE,MAAM,CAAC;KACpB,CAAC;CACH;AAED;;GAEG;AACH,MAAM,WAAW,2BAA2B;IAC1C,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,MAAM,CAAC;IACf,OAAO,EAAE,CAAC,KAAK,EAAE,eAAe,KAAK,IAAI,CAAC;IAC1C,QAAQ,CAAC,EAAE,MAAM,CAAC;CACnB;AAED;;GAEG;AACH,MAAM,WAAW,qBAAqB;IACpC,aAAa,CAAC,KAAK,EAAE,aAAa,GAAG,IAAI,CAAC;IAC1C,UAAU,IAAI,OAAO,CAAC;CACvB;AAMD;;;;;;GAMG;AACH,eAAO,MAAM,yBAAyB,KAAK,CAAC;AAoB5C;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,sBAAsB,CAAC,IAAI,EAAE,MAAM,GAAG;IACpD,WAAW,EAAE,OAAO,CAAC;IACrB,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB,CAMA;AAMD;;;;;;;;;;;;;;;;;;;GAmBG;AACH,wBAAgB,2BAA2B,CACzC,MAAM,EAAE,2BAA2B,GAClC,qBAAqB,CA2DvB"}
|
|
@@ -78,9 +78,24 @@ export declare const swarm_validate_decomposition: {
|
|
|
78
78
|
description: string;
|
|
79
79
|
args: {
|
|
80
80
|
response: z.ZodString;
|
|
81
|
+
project_path: z.ZodOptional<z.ZodString>;
|
|
82
|
+
task: z.ZodOptional<z.ZodString>;
|
|
83
|
+
context: z.ZodOptional<z.ZodString>;
|
|
84
|
+
strategy: z.ZodOptional<z.ZodEnum<{
|
|
85
|
+
"file-based": "file-based";
|
|
86
|
+
"feature-based": "feature-based";
|
|
87
|
+
"risk-based": "risk-based";
|
|
88
|
+
auto: "auto";
|
|
89
|
+
}>>;
|
|
90
|
+
epic_id: z.ZodOptional<z.ZodString>;
|
|
81
91
|
};
|
|
82
92
|
execute(args: {
|
|
83
93
|
response: string;
|
|
94
|
+
project_path?: string | undefined;
|
|
95
|
+
task?: string | undefined;
|
|
96
|
+
context?: string | undefined;
|
|
97
|
+
strategy?: "file-based" | "feature-based" | "risk-based" | "auto" | undefined;
|
|
98
|
+
epic_id?: string | undefined;
|
|
84
99
|
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
85
100
|
};
|
|
86
101
|
/**
|
|
@@ -211,9 +226,24 @@ export declare const decomposeTools: {
|
|
|
211
226
|
description: string;
|
|
212
227
|
args: {
|
|
213
228
|
response: z.ZodString;
|
|
229
|
+
project_path: z.ZodOptional<z.ZodString>;
|
|
230
|
+
task: z.ZodOptional<z.ZodString>;
|
|
231
|
+
context: z.ZodOptional<z.ZodString>;
|
|
232
|
+
strategy: z.ZodOptional<z.ZodEnum<{
|
|
233
|
+
"file-based": "file-based";
|
|
234
|
+
"feature-based": "feature-based";
|
|
235
|
+
"risk-based": "risk-based";
|
|
236
|
+
auto: "auto";
|
|
237
|
+
}>>;
|
|
238
|
+
epic_id: z.ZodOptional<z.ZodString>;
|
|
214
239
|
};
|
|
215
240
|
execute(args: {
|
|
216
241
|
response: string;
|
|
242
|
+
project_path?: string | undefined;
|
|
243
|
+
task?: string | undefined;
|
|
244
|
+
context?: string | undefined;
|
|
245
|
+
strategy?: "file-based" | "feature-based" | "risk-based" | "auto" | undefined;
|
|
246
|
+
epic_id?: string | undefined;
|
|
217
247
|
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
218
248
|
};
|
|
219
249
|
swarm_delegate_planning: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"swarm-decompose.d.ts","sourceRoot":"","sources":["../src/swarm-decompose.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAGH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAyJxB;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,MAAM,CAAC;IACpB,aAAa,EAAE,mBAAmB,GAAG,eAAe,CAAC;IACrD,WAAW,EAAE,MAAM,CAAC;CACrB;AA8CD;;;;;;;;;GASG;AACH,wBAAgB,0BAA0B,CACxC,QAAQ,EAAE,KAAK,CAAC;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,WAAW,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC,GACvD,mBAAmB,EAAE,CAmDvB;AAED;;;;;GAKG;AACH,wBAAgB,mBAAmB,CACjC,QAAQ,EAAE,KAAK,CAAC;IAAE,KAAK,EAAE,MAAM,EAAE,CAAA;CAAE,CAAC,GACnC,MAAM,EAAE,CAgBV;AA+GD;;;;;;;GAOG;AACH,eAAO,MAAM,eAAe;;;;;;;;;;;;;;CAiG1B,CAAC;AAEH;;;;GAIG;AACH,eAAO,MAAM,4BAA4B
|
|
1
|
+
{"version":3,"file":"swarm-decompose.d.ts","sourceRoot":"","sources":["../src/swarm-decompose.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAGH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAyJxB;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,MAAM,CAAC;IACpB,aAAa,EAAE,mBAAmB,GAAG,eAAe,CAAC;IACrD,WAAW,EAAE,MAAM,CAAC;CACrB;AA8CD;;;;;;;;;GASG;AACH,wBAAgB,0BAA0B,CACxC,QAAQ,EAAE,KAAK,CAAC;IAAE,KAAK,EAAE,MAAM,CAAC;IAAC,WAAW,CAAC,EAAE,MAAM,CAAA;CAAE,CAAC,GACvD,mBAAmB,EAAE,CAmDvB;AAED;;;;;GAKG;AACH,wBAAgB,mBAAmB,CACjC,QAAQ,EAAE,KAAK,CAAC;IAAE,KAAK,EAAE,MAAM,EAAE,CAAA;CAAE,CAAC,GACnC,MAAM,EAAE,CAgBV;AA+GD;;;;;;;GAOG;AACH,eAAO,MAAM,eAAe;;;;;;;;;;;;;;CAiG1B,CAAC;AAEH;;;;GAIG;AACH,eAAO,MAAM,4BAA4B;;;;;;;;;;;;;;;;;;;;;;;CAqKvC,CAAC;AAEH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA+BG;AACH,eAAO,MAAM,uBAAuB;;;;;;;;;;;;;;;;;;;CAuMlC,CAAC;AAMH,qBAAa,UAAW,SAAQ,KAAK;aAGjB,SAAS,EAAE,MAAM;aACjB,OAAO,CAAC,EAAE,OAAO;gBAFjC,OAAO,EAAE,MAAM,EACC,SAAS,EAAE,MAAM,EACjB,OAAO,CAAC,EAAE,OAAO,YAAA;CAKpC;AAED,qBAAa,kBAAmB,SAAQ,UAAU;aAG9B,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ;gBADrC,OAAO,EAAE,MAAM,EACC,QAAQ,CAAC,EAAE,CAAC,CAAC,QAAQ,YAAA;CAIxC;AAkCD;;;;;;;;;;;;;;;;;;;GAmBG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;;;;;;;;;;;CA0RjC,CAAC;AAEH,eAAO,MAAM,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAK1B,CAAC"}
|
|
@@ -283,6 +283,8 @@ export declare const swarm_record_outcome: {
|
|
|
283
283
|
user_cancelled: "user_cancelled";
|
|
284
284
|
}>>;
|
|
285
285
|
failure_details: z.ZodOptional<z.ZodString>;
|
|
286
|
+
project_path: z.ZodOptional<z.ZodString>;
|
|
287
|
+
epic_id: z.ZodOptional<z.ZodString>;
|
|
286
288
|
};
|
|
287
289
|
execute(args: {
|
|
288
290
|
bead_id: string;
|
|
@@ -295,6 +297,8 @@ export declare const swarm_record_outcome: {
|
|
|
295
297
|
strategy?: "file-based" | "feature-based" | "risk-based" | "research-based" | undefined;
|
|
296
298
|
failure_mode?: "timeout" | "unknown" | "conflict" | "validation" | "tool_failure" | "context_overflow" | "dependency_blocked" | "user_cancelled" | undefined;
|
|
297
299
|
failure_details?: string | undefined;
|
|
300
|
+
project_path?: string | undefined;
|
|
301
|
+
epic_id?: string | undefined;
|
|
298
302
|
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
299
303
|
};
|
|
300
304
|
/**
|
|
@@ -313,12 +317,27 @@ export declare const swarm_record_outcome: {
|
|
|
313
317
|
* ```
|
|
314
318
|
*/
|
|
315
319
|
export declare function extractTechStack(task: string): string[];
|
|
320
|
+
/**
|
|
321
|
+
* Spawn instruction for a researcher worker
|
|
322
|
+
*/
|
|
323
|
+
export interface ResearchSpawnInstruction {
|
|
324
|
+
/** Unique ID for this research task */
|
|
325
|
+
research_id: string;
|
|
326
|
+
/** Technology being researched */
|
|
327
|
+
tech: string;
|
|
328
|
+
/** Full prompt for the researcher agent */
|
|
329
|
+
prompt: string;
|
|
330
|
+
/** Agent type for the Task tool */
|
|
331
|
+
subagent_type: "swarm/researcher";
|
|
332
|
+
}
|
|
316
333
|
/**
|
|
317
334
|
* Research result from documentation discovery phase
|
|
318
335
|
*/
|
|
319
336
|
export interface ResearchResult {
|
|
320
337
|
/** Technologies identified and researched */
|
|
321
338
|
tech_stack: string[];
|
|
339
|
+
/** Spawn instructions for researcher workers */
|
|
340
|
+
spawn_instructions: ResearchSpawnInstruction[];
|
|
322
341
|
/** Summaries keyed by technology name */
|
|
323
342
|
summaries: Record<string, string>;
|
|
324
343
|
/** Semantic-memory IDs where research is stored */
|
|
@@ -731,6 +750,8 @@ export declare const orchestrateTools: {
|
|
|
731
750
|
user_cancelled: "user_cancelled";
|
|
732
751
|
}>>;
|
|
733
752
|
failure_details: z.ZodOptional<z.ZodString>;
|
|
753
|
+
project_path: z.ZodOptional<z.ZodString>;
|
|
754
|
+
epic_id: z.ZodOptional<z.ZodString>;
|
|
734
755
|
};
|
|
735
756
|
execute(args: {
|
|
736
757
|
bead_id: string;
|
|
@@ -743,6 +764,8 @@ export declare const orchestrateTools: {
|
|
|
743
764
|
strategy?: "file-based" | "feature-based" | "risk-based" | "research-based" | undefined;
|
|
744
765
|
failure_mode?: "timeout" | "unknown" | "conflict" | "validation" | "tool_failure" | "context_overflow" | "dependency_blocked" | "user_cancelled" | undefined;
|
|
745
766
|
failure_details?: string | undefined;
|
|
767
|
+
project_path?: string | undefined;
|
|
768
|
+
epic_id?: string | undefined;
|
|
746
769
|
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
747
770
|
};
|
|
748
771
|
swarm_research_phase: {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"swarm-orchestrate.d.ts","sourceRoot":"","sources":["../src/swarm-orchestrate.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;GAmBG;AAGH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAaxB,OAAO,EACL,KAAK,aAAa,EAEnB,MAAM,0BAA0B,CAAC;
|
|
1
|
+
{"version":3,"file":"swarm-orchestrate.d.ts","sourceRoot":"","sources":["../src/swarm-orchestrate.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;;;;;;;;GAmBG;AAGH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAaxB,OAAO,EACL,KAAK,aAAa,EAEnB,MAAM,0BAA0B,CAAC;AAsDlC;;;;;;;;GAQG;AACH,wBAAgB,qBAAqB,CAAC,MAAM,EAAE;IAC5C,OAAO,EAAE,MAAM,CAAC;IAChB,WAAW,EAAE,MAAM,EAAE,CAAC;IACtB,cAAc,CAAC,EAAE,MAAM,EAAE,CAAC;IAC1B,sBAAsB,CAAC,EAAE,MAAM,EAAE,CAAC;IAClC,gBAAgB,CAAC,EAAE,MAAM,EAAE,CAAC;IAC5B,YAAY,EAAE,MAAM,CAAC;IACrB,SAAS,EAAE,MAAM,CAAC;IAClB,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,eAAe,CAAC,EAAE,MAAM,CAAC;CAC1B,GAAG,aAAa,CA4BhB;AAED;;;;;;;;;;;;;;;;;;;;;;;;GAwBG;AACH,wBAAgB,gBAAgB,CAC9B,aAAa,EAAE,MAAM,EAAE,EACvB,WAAW,EAAE,MAAM,EAAE,GACpB;IAAE,KAAK,EAAE,OAAO,CAAC;IAAC,UAAU,EAAE,MAAM,EAAE,CAAA;CAAE,CAqC1C;AAkaD;;;;;;;;;;GAUG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;CA8JrB,CAAC;AAEH;;;;GAIG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;CAoFvB,CAAC;AAEH;;;;GAIG;AACH,eAAO,MAAM,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;CAkHzB,CAAC;AAEH;;;;;;;;GAQG;AACH,eAAO,MAAM,eAAe;;;;;;;;;;;;;;;;;;;;;;CA6E1B,CAAC;AAEH;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAuCG;AACH,eAAO,MAAM,cAAc;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAiwBzB,CAAC;AAEH;;;;;;;;;;;GAWG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CA0K/B,CAAC;AAwBH;;;;;;;;;;;;;;GAcG;AACH,wBAAgB,gBAAgB,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,CAUvD;AAED;;GAEG;AACH,MAAM,WAAW,wBAAwB;IACvC,uCAAuC;IACvC,WAAW,EAAE,MAAM,CAAC;IACpB,kCAAkC;IAClC,IAAI,EAAE,MAAM,CAAC;IACb,2CAA2C;IAC3C,MAAM,EAAE,MAAM,CAAC;IACf,mCAAmC;IACnC,aAAa,EAAE,kBAAkB,CAAC;CACnC;AAED;;GAEG;AACH,MAAM,WAAW,cAAc;IAC7B,6CAA6C;IAC7C,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,gDAAgD;IAChD,kBAAkB,EAAE,wBAAwB,EAAE,CAAC;IAC/C,yCAAyC;IACzC,SAAS,EAAE,MAAM,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC;IAClC,mDAAmD;IACnD,UAAU,EAAE,MAAM,EAAE,CAAC;CACtB;AAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA0CG;AACH,wBAAsB,gBAAgB,CACpC,IAAI,EAAE,MAAM,EACZ,WAAW,EAAE,MAAM,EACnB,OAAO,CAAC,EAAE;IAAE,aAAa,CAAC,EAAE,OAAO,CAAA;CAAE,GACpC,OAAO,CAAC,cAAc,CAAC,CAgDzB;AAED;;;;;GAKG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;;;;;CAqC/B,CAAC;AAEH;;;;;;;;GAQG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;;;;;;;;;CA6CjC,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,uBAAuB;;;;;;;;;;CAmClC,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,mBAAmB;;;;;;;;CAmB9B,CAAC;AAEH;;;;;;;;;;;;;;;;;GAiBG;AACH,eAAO,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;;CAoJ9B,CAAC;AA4BH;;;;;;;;;;;;GAYG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAqG3B,CAAC;AAEH;;;;;;;;GAQG;AACH,eAAO,MAAM,aAAa;;;;;;;;;;CAuGxB,CAAC;AAEH;;;;;;;;GAQG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgMtB,CAAC;AAMH,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAe5B,CAAC"}
|
package/dist/swarm-prompts.d.ts
CHANGED
|
@@ -39,7 +39,24 @@ export declare const SUBTASK_PROMPT = "You are a swarm agent working on a subtas
|
|
|
39
39
|
*/
|
|
40
40
|
export declare const SUBTASK_PROMPT_V2 = "You are a swarm agent working on: **{subtask_title}**\n\n## [IDENTITY]\nAgent: (assigned at spawn)\nCell: {bead_id}\nEpic: {epic_id}\n\n## [TASK]\n{subtask_description}\n\n## [FILES]\nReserved (exclusive):\n{file_list}\n\nOnly modify these files. Need others? Message the coordinator.\n\n## [CONTEXT]\n{shared_context}\n\n{compressed_context}\n\n{error_context}\n\n## [MANDATORY SURVIVAL CHECKLIST]\n\n**CRITICAL: Follow this checklist IN ORDER. Each step builds on the previous.**\n\n### Step 1: Initialize Coordination (REQUIRED - DO THIS FIRST)\n```\nswarmmail_init(project_path=\"{project_path}\", task_description=\"{bead_id}: {subtask_title}\")\n```\n\n**This registers you with the coordination system and enables:**\n- File reservation tracking\n- Inter-agent communication\n- Progress monitoring\n- Conflict detection\n\n**If you skip this step, your work will not be tracked and swarm_complete will fail.**\n\n### Step 2: \uD83E\uDDE0 Query Past Learnings (MANDATORY - BEFORE starting work)\n\n**\u26A0\uFE0F CRITICAL: ALWAYS query semantic memory BEFORE writing ANY code.**\n\n```\nsemantic-memory_find(query=\"<keywords from your task>\", limit=5, expand=true)\n```\n\n**Why this is MANDATORY:**\n- Past agents may have already solved your exact problem\n- Avoids repeating mistakes that wasted 30+ minutes before\n- Discovers project-specific patterns and gotchas\n- Finds known workarounds for tool/library quirks\n\n**Search Query Examples by Task Type:**\n\n- **Bug fix**: Use exact error message or \"<symptom> <component>\"\n- **New feature**: Search \"<domain concept> implementation pattern\"\n- **Refactor**: Query \"<pattern name> migration approach\"\n- **Integration**: Look for \"<library name> gotchas configuration\"\n- **Testing**: Find \"testing <component type> characterization tests\"\n- **Performance**: Search \"<technology> performance optimization\"\n\n**BEFORE you start coding:**\n1. Run semantic-memory_find with keywords from your task\n2. Read the results with expand=true for full content\n3. Check if any memory solves your problem or warns of pitfalls\n4. Adjust your approach based on past learnings\n\n**If you skip this step, you WILL waste time solving already-solved problems.**\n\n### Step 3: Load Relevant Skills (if available)\n```\nskills_list() # See what skills exist\nskills_use(name=\"<relevant-skill>\", context=\"<your task>\") # Load skill\n```\n\n**Common skill triggers:**\n- Writing tests? \u2192 `skills_use(name=\"testing-patterns\")`\n- Breaking dependencies? \u2192 `skills_use(name=\"testing-patterns\")`\n- Multi-agent coordination? \u2192 `skills_use(name=\"swarm-coordination\")`\n- Building a CLI? \u2192 `skills_use(name=\"cli-builder\")`\n\n### Step 4: Reserve Your Files (YOU reserve, not coordinator)\n```\nswarmmail_reserve(\n paths=[{file_list}],\n reason=\"{bead_id}: {subtask_title}\",\n exclusive=true\n)\n```\n\n**Workers reserve their own files.** This prevents edit conflicts with other agents.\n\n### Step 5: Do the Work (TDD MANDATORY)\n\n**Follow RED \u2192 GREEN \u2192 REFACTOR. No exceptions.**\n\n1. **RED**: Write a failing test that describes the expected behavior\n - Test MUST fail before you write implementation\n - If test passes immediately, your test is wrong\n \n2. **GREEN**: Write minimal code to make the test pass\n - Don't over-engineer - just make it green\n - Hardcode if needed, refactor later\n \n3. **REFACTOR**: Clean up while tests stay green\n - Run tests after every change\n - If tests break, undo and try again\n\n```bash\n# Run tests continuously\nbun test <your-test-file> --watch\n```\n\n**Why TDD?**\n- Catches bugs before they exist\n- Documents expected behavior\n- Enables fearless refactoring\n- Proves your code works\n\n### Step 6: Report Progress at Milestones\n```\nswarm_progress(\n project_key=\"{project_path}\",\n agent_name=\"<your-agent-name>\",\n bead_id=\"{bead_id}\",\n status=\"in_progress\",\n progress_percent=25, # or 50, 75\n message=\"<what you just completed>\"\n)\n```\n\n**Report at 25%, 50%, 75% completion.** This:\n- Triggers auto-checkpoint (saves context)\n- Keeps coordinator informed\n- Prevents silent failures\n\n### Step 7: Manual Checkpoint BEFORE Risky Operations\n```\nswarm_checkpoint(\n project_key=\"{project_path}\",\n agent_name=\"<your-agent-name>\",\n bead_id=\"{bead_id}\"\n)\n```\n\n**Call BEFORE:**\n- Large refactors\n- File deletions\n- Breaking API changes\n- Anything that might fail catastrophically\n\n**Checkpoints preserve context so you can recover if things go wrong.**\n\n### Step 8: \uD83D\uDCBE STORE YOUR LEARNINGS (if you discovered something)\n\n**If you learned it the hard way, STORE IT so the next agent doesn't have to.**\n\n```\nsemantic-memory_store(\n information=\"<what you learned, WHY it matters, how to apply it>\",\n tags=\"<domain, tech-stack, pattern-type>\"\n)\n```\n\n**MANDATORY Storage Triggers - Store when you:**\n- \uD83D\uDC1B **Solved a tricky bug** (>15min debugging) - include root cause + solution\n- \uD83D\uDCA1 **Discovered a project-specific pattern** - domain rules, business logic quirks\n- \u26A0\uFE0F **Found a tool/library gotcha** - API quirks, version-specific bugs, workarounds\n- \uD83D\uDEAB **Tried an approach that failed** - anti-patterns to avoid, why it didn't work\n- \uD83C\uDFD7\uFE0F **Made an architectural decision** - reasoning, alternatives considered, tradeoffs\n\n**What Makes a GOOD Memory:**\n\n\u2705 **GOOD** (actionable, explains WHY):\n```\n\"OAuth refresh tokens need 5min buffer before expiry to avoid race conditions.\nWithout buffer, token refresh can fail mid-request if expiry happens between\ncheck and use. Implemented with: if (expiresAt - Date.now() < 300000) refresh()\"\n```\n\n\u274C **BAD** (generic, no context):\n```\n\"Fixed the auth bug by adding a null check\"\n```\n\n**What NOT to Store:**\n- Generic knowledge that's in official documentation\n- Implementation details that change frequently\n- Vague descriptions without context (\"fixed the thing\")\n\n**The WHY matters more than the WHAT.** Future agents need context to apply your learning.\n\n### Step 9: Complete (REQUIRED - releases reservations)\n```\nswarm_complete(\n project_key=\"{project_path}\",\n agent_name=\"<your-agent-name>\",\n bead_id=\"{bead_id}\",\n summary=\"<what you accomplished>\",\n files_touched=[\"list\", \"of\", \"files\"]\n)\n```\n\n**This automatically:**\n- Runs UBS bug scan\n- Releases file reservations\n- Records learning signals\n- Notifies coordinator\n\n**DO NOT manually close the cell with hive_close.** Use swarm_complete.\n\n## [ON-DEMAND RESEARCH]\n\nIf you encounter unknown API behavior or version-specific issues:\n\n1. **Check semantic-memory first:**\n `semantic-memory_find(query=\"<library> <version> <topic>\", limit=3, expand=true)`\n\n2. **If not found, spawn researcher:**\n `swarm_spawn_researcher(research_id=\"{bead_id}-research\", epic_id=\"{epic_id}\", tech_stack=[\"<library>\"], project_path=\"{project_path}\")`\n Then spawn with Task tool: `Task(subagent_type=\"swarm/researcher\", prompt=\"<from above>\")`\n\n3. **Wait for research, then continue**\n\n**Research triggers:**\n- \"I'm not sure how this API works in version X\"\n- \"This might have breaking changes\"\n- \"The docs I remember might be outdated\"\n\n**Don't research:**\n- Standard patterns you're confident about\n- Well-documented, stable APIs\n- Obvious implementations\n\n## [SWARM MAIL COMMUNICATION]\n\n### Check Inbox Regularly\n```\nswarmmail_inbox() # Check for coordinator messages\nswarmmail_read_message(message_id=N) # Read specific message\n```\n\n### When Blocked\n```\nswarmmail_send(\n to=[\"coordinator\"],\n subject=\"BLOCKED: {bead_id}\",\n body=\"<blocker description, what you need>\",\n importance=\"high\",\n thread_id=\"{epic_id}\"\n)\nhive_update(id=\"{bead_id}\", status=\"blocked\")\n```\n\n### Report Issues to Other Agents\n```\nswarmmail_send(\n to=[\"OtherAgent\", \"coordinator\"],\n subject=\"Issue in {bead_id}\",\n body=\"<describe problem, don't fix their code>\",\n thread_id=\"{epic_id}\"\n)\n```\n\n### Manual Release (if needed)\n```\nswarmmail_release() # Manually release reservations\n```\n\n**Note:** `swarm_complete` automatically releases reservations. Only use manual release if aborting work.\n\n## [OTHER TOOLS]\n### Hive - You Have Autonomy to File Issues\nYou can create new cells against this epic when you discover:\n- **Bugs**: Found a bug while working? File it.\n- **Tech debt**: Spotted something that needs cleanup? File it.\n- **Follow-up work**: Task needs more work than scoped? File a follow-up.\n- **Dependencies**: Need something from another agent? File and link it.\n\n```\nhive_create(\n title=\"<descriptive title>\",\n type=\"bug\", # or \"task\", \"chore\"\n priority=2,\n parent_id=\"{epic_id}\", # Links to this epic\n description=\"Found while working on {bead_id}: <details>\"\n)\n```\n\n**Don't silently ignore issues.** File them so they get tracked and addressed.\n\nOther cell operations:\n- hive_update(id, status) - Mark blocked if stuck\n- hive_query(status=\"open\") - See what else needs work\n\n### Skills\n- skills_list() - Discover available skills\n- skills_use(name) - Activate skill for specialized guidance\n- skills_create(name) - Create new skill (if you found a reusable pattern)\n\n## [CRITICAL REQUIREMENTS]\n\n**NON-NEGOTIABLE:**\n1. Step 1 (swarmmail_init) MUST be first - do it before anything else\n2. \uD83E\uDDE0 Step 2 (semantic-memory_find) MUST happen BEFORE starting work - query first, code second\n3. Step 4 (swarmmail_reserve) - YOU reserve files, not coordinator\n4. Step 6 (swarm_progress) - Report at milestones, don't work silently\n5. \uD83D\uDCBE Step 8 (semantic-memory_store) - If you learned something hard, STORE IT\n6. Step 9 (swarm_complete) - Use this to close, NOT hive_close\n\n**If you skip these steps:**\n- Your work won't be tracked (swarm_complete will fail)\n- \uD83D\uDD04 You'll waste time repeating already-solved problems (no semantic memory query)\n- Edit conflicts with other agents (no file reservation)\n- Lost work if you crash (no checkpoints)\n- \uD83D\uDD04 Future agents repeat YOUR mistakes (no learnings stored)\n\n**Memory is the swarm's collective intelligence. Query it. Feed it.**\n\nBegin now.";
|
|
41
41
|
/**
|
|
42
|
-
*
|
|
42
|
+
* Coordinator Agent Prompt Template
|
|
43
|
+
*
|
|
44
|
+
* Used by the /swarm command to instruct coordinators on their role.
|
|
45
|
+
* Coordinators NEVER execute work directly - they clarify, decompose, spawn workers, and review.
|
|
46
|
+
*
|
|
47
|
+
* Key sections:
|
|
48
|
+
* - Role boundaries (what coordinators NEVER do)
|
|
49
|
+
* - Phase 1.5: Research Phase (spawn researchers, DON'T fetch docs directly)
|
|
50
|
+
* - Forbidden tools (repo-crawl, webfetch, context7, pdf-brain_search)
|
|
51
|
+
* - MANDATORY review loop after each worker completes
|
|
52
|
+
*
|
|
53
|
+
* Placeholders:
|
|
54
|
+
* - {task} - The task description from user
|
|
55
|
+
* - {project_path} - Absolute path to project root
|
|
56
|
+
*/
|
|
57
|
+
export declare const COORDINATOR_PROMPT = "You are a swarm coordinator. Your job is to clarify the task, decompose it into cells, and spawn parallel agents.\n\n## Task\n\n{task}\n\n## CRITICAL: Coordinator Role Boundaries\n\n**\u26A0\uFE0F COORDINATORS NEVER EXECUTE WORK DIRECTLY**\n\nYour role is **ONLY** to:\n1. **Clarify** - Ask questions to understand scope\n2. **Decompose** - Break into subtasks with clear boundaries \n3. **Spawn** - Create worker agents for ALL subtasks\n4. **Monitor** - Check progress, unblock, mediate conflicts\n5. **Verify** - Confirm completion, run final checks\n\n**YOU DO NOT:**\n- Read implementation files (only metadata/structure for planning)\n- Edit code directly\n- Run tests yourself (workers run tests)\n- Implement features\n- Fix bugs inline\n- Make \"quick fixes\" yourself\n\n**ALWAYS spawn workers, even for sequential tasks.** Sequential just means spawn them in order and wait for each to complete before spawning the next.\n\n### Why This Matters\n\n| Coordinator Work | Worker Work | Consequence of Mixing |\n|-----------------|-------------|----------------------|\n| Sonnet context ($$$) | Disposable context | Expensive context waste |\n| Long-lived state | Task-scoped state | Context exhaustion |\n| Orchestration concerns | Implementation concerns | Mixed concerns |\n| No checkpoints | Checkpoints enabled | No recovery |\n| No learning signals | Outcomes tracked | No improvement |\n\n## CRITICAL: NEVER Fetch Documentation Directly\n\n**\u26A0\uFE0F COORDINATORS DO NOT CALL RESEARCH TOOLS DIRECTLY**\n\nThe following tools are **FORBIDDEN** for coordinators to call:\n\n- `repo-crawl_file`, `repo-crawl_readme`, `repo-crawl_search`, `repo-crawl_structure`, `repo-crawl_tree`\n- `repo-autopsy_*` (all variants)\n- `webfetch`, `fetch_fetch`\n- `context7_resolve-library-id`, `context7_get-library-docs`\n- `pdf-brain_search`, `pdf-brain_read`\n\n**WHY?** These tools dump massive context that exhausts your expensive Sonnet context. Your job is orchestration, not research.\n\n**INSTEAD:** Use `swarm_spawn_researcher` (see Phase 1.5 below) to spawn a researcher worker who:\n- Fetches documentation in disposable context\n- Stores full details in semantic-memory\n- Returns a condensed summary for shared_context\n\n## Workflow\n\n### Phase 0: Socratic Planning (INTERACTIVE - unless --fast)\n\n**Before decomposing, clarify the task with the user.**\n\nCheck for flags in the task:\n- `--fast` \u2192 Skip questions, use reasonable defaults\n- `--auto` \u2192 Zero interaction, heuristic decisions\n- `--confirm-only` \u2192 Show plan, get yes/no only\n\n**Default (no flags): Full Socratic Mode**\n\n1. **Analyze task for ambiguity:**\n - Scope unclear? (what's included/excluded)\n - Strategy unclear? (file-based vs feature-based)\n - Dependencies unclear? (what needs to exist first)\n - Success criteria unclear? (how do we know it's done)\n\n2. **If clarification needed, ask ONE question at a time:**\n ```\n The task \"<task>\" needs clarification before I can decompose it.\n\n **Question:** <specific question>\n\n Options:\n a) <option 1> - <tradeoff>\n b) <option 2> - <tradeoff>\n c) <option 3> - <tradeoff>\n\n I'd recommend (b) because <reason>. Which approach?\n ```\n\n3. **Wait for user response before proceeding**\n\n4. **Iterate if needed** (max 2-3 questions)\n\n**Rules:**\n- ONE question at a time - don't overwhelm\n- Offer concrete options - not open-ended\n- Lead with recommendation - save cognitive load\n- Wait for answer - don't assume\n\n### Phase 1: Initialize\n`swarmmail_init(project_path=\"{project_path}\", task_description=\"Swarm: {task}\")`\n\n### Phase 1.5: Research Phase (FOR COMPLEX TASKS)\n\n**\u26A0\uFE0F If the task requires understanding unfamiliar technologies, APIs, or libraries, spawn a researcher FIRST.**\n\n**DO NOT call documentation tools directly.** Instead:\n\n```\n// 1. Spawn researcher with explicit tech stack\nswarm_spawn_researcher(\n research_id=\"research-nextjs-cache-components\",\n epic_id=\"<epic-id>\",\n tech_stack=[\"Next.js 16 Cache Components\", \"React Server Components\"],\n project_path=\"{project_path}\"\n)\n\n// 2. Spawn researcher as Task subagent\nconst researchFindings = await Task(subagent_type=\"swarm/researcher\", prompt=\"<from above>\")\n\n// 3. Researcher returns condensed summary\n// Use this summary in shared_context for workers\n```\n\n**When to spawn a researcher:**\n- Task involves unfamiliar framework versions (e.g., Next.js 16 vs 14)\n- Need to compare installed vs latest library APIs\n- Working with experimental/preview features\n- Need architectural guidance from documentation\n\n**When NOT to spawn a researcher:**\n- Using well-known stable APIs (React hooks, Express middleware)\n- Task is purely refactoring existing code\n- You already have relevant findings from semantic-memory or CASS\n\n**Researcher output:**\n- Full findings stored in semantic-memory (searchable by future agents)\n- Condensed 3-5 bullet summary returned for shared_context\n\n### Phase 2: Knowledge Gathering (MANDATORY)\n\n**Before decomposing, query ALL knowledge sources:**\n\n```\nsemantic-memory_find(query=\"<task keywords>\", limit=5) # Past learnings\ncass_search(query=\"<task description>\", limit=5) # Similar past tasks \nskills_list() # Available skills\n```\n\nSynthesize findings into shared_context for workers.\n\n### Phase 3: Decompose\n```\nswarm_select_strategy(task=\"<task>\")\nswarm_plan_prompt(task=\"<task>\", context=\"<synthesized knowledge>\")\nswarm_validate_decomposition(response=\"<CellTree JSON>\")\n```\n\n### Phase 4: Create Cells\n`hive_create_epic(epic_title=\"<task>\", subtasks=[...])`\n\n### Phase 5: DO NOT Reserve Files\n\n> **\u26A0\uFE0F Coordinator NEVER reserves files.** Workers reserve their own files.\n> If coordinator reserves, workers get blocked and swarm stalls.\n\n### Phase 6: Spawn Workers for ALL Subtasks (MANDATORY)\n\n> **\u26A0\uFE0F ALWAYS spawn workers, even for sequential tasks.**\n> - Parallel tasks: Spawn ALL in a single message\n> - Sequential tasks: Spawn one, wait for completion, spawn next\n\n**For parallel work:**\n```\n// Single message with multiple Task calls\nswarm_spawn_subtask(bead_id_1, epic_id, title_1, files_1, shared_context, project_path=\"{project_path}\")\nTask(subagent_type=\"swarm/worker\", prompt=\"<from above>\")\nswarm_spawn_subtask(bead_id_2, epic_id, title_2, files_2, shared_context, project_path=\"{project_path}\")\nTask(subagent_type=\"swarm/worker\", prompt=\"<from above>\")\n```\n\n**For sequential work:**\n```\n// Spawn worker 1, wait for completion\nswarm_spawn_subtask(bead_id_1, ...)\nconst result1 = await Task(subagent_type=\"swarm/worker\", prompt=\"<from above>\")\n\n// THEN spawn worker 2 with context from worker 1\nswarm_spawn_subtask(bead_id_2, ..., shared_context=\"Worker 1 completed: \" + result1)\nconst result2 = await Task(subagent_type=\"swarm/worker\", prompt=\"<from above>\")\n```\n\n**NEVER do the work yourself.** Even if it seems faster, spawn a worker.\n\n**IMPORTANT:** Pass `project_path` to `swarm_spawn_subtask` so workers can call `swarmmail_init`.\n\n### Phase 7: MANDATORY Review Loop (NON-NEGOTIABLE)\n\n**\u26A0\uFE0F AFTER EVERY Task() RETURNS, YOU MUST:**\n\n1. **CHECK INBOX** - Worker may have sent messages\n `swarmmail_inbox()`\n `swarmmail_read_message(message_id=N)`\n\n2. **REVIEW WORK** - Generate review with diff\n `swarm_review(project_key, epic_id, task_id, files_touched)`\n\n3. **EVALUATE** - Does it meet epic goals?\n - Fulfills subtask requirements?\n - Serves overall epic goal?\n - Enables downstream tasks?\n - Type safety, no obvious bugs?\n\n4. **SEND FEEDBACK** - Approve or request changes\n `swarm_review_feedback(project_key, task_id, worker_id, status, issues)`\n \n **If approved:**\n - Close cell, spawn next worker\n \n **If needs_changes:**\n - `swarm_review_feedback` returns `retry_context` (NOT sends message - worker is dead)\n - Generate retry prompt: `swarm_spawn_retry(retry_context)`\n - Spawn NEW worker with Task() using retry prompt\n - Max 3 attempts before marking task blocked\n \n **If 3 failures:**\n - Mark task blocked, escalate to human\n\n5. **ONLY THEN** - Spawn next worker or complete\n\n**DO NOT skip this. DO NOT batch reviews. Review EACH worker IMMEDIATELY after return.**\n\n**Intervene if:**\n- Worker blocked >5min \u2192 unblock or reassign\n- File conflicts \u2192 mediate between workers\n- Scope creep \u2192 approve or reject expansion\n- Review fails 3x \u2192 mark task blocked, escalate to human\n\n### Phase 8: Complete\n```\n# After all workers complete and reviews pass:\nhive_sync() # Sync all cells to git\n# Coordinator does NOT call swarm_complete - workers do that\n```\n\n## Strategy Reference\n\n| Strategy | Best For | Keywords |\n| -------------- | ------------------------ | -------------------------------------- |\n| file-based | Refactoring, migrations | refactor, migrate, rename, update all |\n| feature-based | New features | add, implement, build, create, feature |\n| risk-based | Bug fixes, security | fix, bug, security, critical, urgent |\n| research-based | Investigation, discovery | research, investigate, explore, learn |\n\n## Flag Reference\n\n| Flag | Effect |\n|------|--------|\n| `--fast` | Skip Socratic questions, use defaults |\n| `--auto` | Zero interaction, heuristic decisions |\n| `--confirm-only` | Show plan, get yes/no only |\n\nBegin with Phase 0 (Socratic Planning) unless `--fast` or `--auto` flag is present.\n";
|
|
58
|
+
/**
|
|
59
|
+
* Researcher Agent Prompt Template
|
|
43
60
|
*
|
|
44
61
|
* Spawned BEFORE decomposition to gather technology documentation.
|
|
45
62
|
* Researchers receive an EXPLICIT list of technologies to research from the coordinator.
|
|
@@ -70,6 +87,13 @@ export declare function formatResearcherPrompt(params: {
|
|
|
70
87
|
project_path: string;
|
|
71
88
|
check_upgrades: boolean;
|
|
72
89
|
}): string;
|
|
90
|
+
/**
|
|
91
|
+
* Format the coordinator prompt with task and project path substitution
|
|
92
|
+
*/
|
|
93
|
+
export declare function formatCoordinatorPrompt(params: {
|
|
94
|
+
task: string;
|
|
95
|
+
projectPath: string;
|
|
96
|
+
}): string;
|
|
73
97
|
/**
|
|
74
98
|
* Format the V2 subtask prompt for a specific agent
|
|
75
99
|
*/
|
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"swarm-prompts.d.ts","sourceRoot":"","sources":["../src/swarm-prompts.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAUH;;;;;GAKG;AACH,eAAO,MAAM,oBAAoB,s6EAkET,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,6BAA6B,mxDAyDlB,CAAC;AAEzB;;;;;GAKG;AACH,eAAO,MAAM,cAAc,mkFAgFK,CAAC;AAEjC;;;;;;;GAOG;AACH,eAAO,MAAM,iBAAiB,goUAiUnB,CAAC;AAEZ;;;;;;;GAOG;AACH,eAAO,MAAM,iBAAiB,4pHA4GV,CAAC;AAErB;;;;;GAKG;AACH,eAAO,MAAM,iCAAiC,u+DAyE7C,CAAC;AAEF;;;;GAIG;AACH,eAAO,MAAM,iBAAiB,8jCAmCU,CAAC;AAMzC;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE;IAC7C,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,cAAc,EAAE,OAAO,CAAC;CACzB,GAAG,MAAM,CAaT;AAED;;GAEG;AACH,wBAAgB,qBAAqB,CAAC,MAAM,EAAE;IAC5C,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;IAC5B,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,gBAAgB,CAAC,EAAE;QACjB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,cAAc,CAAC,EAAE,MAAM,EAAE,CAAC;QAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,CAAC;CACH,GAAG,MAAM,CA2ET;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE;IAC1C,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;IAC5B,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB,GAAG,MAAM,CAUT;AAED;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE;IAC7C,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,EAAE,CAAC;CACzB,GAAG,MAAM,CAMT;AAMD;;GAEG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;;;;;;;;;;CAoC/B,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkH9B,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;CAsDjC,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;CA+I5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,uBAAuB;;;;;;;;;;;;CAoClC,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;CAiI5B,CAAC;AAEH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAOvB,CAAC"}
|
|
1
|
+
{"version":3,"file":"swarm-prompts.d.ts","sourceRoot":"","sources":["../src/swarm-prompts.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;;GAYG;AAUH;;;;;GAKG;AACH,eAAO,MAAM,oBAAoB,s6EAkET,CAAC;AAEzB;;GAEG;AACH,eAAO,MAAM,6BAA6B,mxDAyDlB,CAAC;AAEzB;;;;;GAKG;AACH,eAAO,MAAM,cAAc,mkFAgFK,CAAC;AAEjC;;;;;;;GAOG;AACH,eAAO,MAAM,iBAAiB,goUAiUnB,CAAC;AAEZ;;;;;;;;;;;;;;;GAeG;AACH,eAAO,MAAM,kBAAkB,mgTAuQ9B,CAAC;AAEF;;;;;;;GAOG;AACH,eAAO,MAAM,iBAAiB,4pHA4GV,CAAC;AAErB;;;;;GAKG;AACH,eAAO,MAAM,iCAAiC,u+DAyE7C,CAAC;AAEF;;;;GAIG;AACH,eAAO,MAAM,iBAAiB,8jCAmCU,CAAC;AAMzC;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE;IAC7C,WAAW,EAAE,MAAM,CAAC;IACpB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,EAAE,MAAM,EAAE,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,cAAc,EAAE,OAAO,CAAC;CACzB,GAAG,MAAM,CAaT;AAED;;GAEG;AACH,wBAAgB,uBAAuB,CAAC,MAAM,EAAE;IAC9C,IAAI,EAAE,MAAM,CAAC;IACb,WAAW,EAAE,MAAM,CAAC;CACrB,GAAG,MAAM,CAIT;AAED;;GAEG;AACH,wBAAgB,qBAAqB,CAAC,MAAM,EAAE;IAC5C,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;IAC5B,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB,kBAAkB,CAAC,EAAE,MAAM,CAAC;IAC5B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,gBAAgB,CAAC,EAAE;QACjB,cAAc,CAAC,EAAE,MAAM,CAAC;QACxB,cAAc,CAAC,EAAE,MAAM,EAAE,CAAC;QAC1B,iBAAiB,CAAC,EAAE,MAAM,CAAC;KAC5B,CAAC;CACH,GAAG,MAAM,CA2ET;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CAAC,MAAM,EAAE;IAC1C,UAAU,EAAE,MAAM,CAAC;IACnB,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,EAAE,MAAM,CAAC;IACtB,mBAAmB,EAAE,MAAM,CAAC;IAC5B,KAAK,EAAE,MAAM,EAAE,CAAC;IAChB,cAAc,CAAC,EAAE,MAAM,CAAC;CACzB,GAAG,MAAM,CAUT;AAED;;GAEG;AACH,wBAAgB,sBAAsB,CAAC,MAAM,EAAE;IAC7C,OAAO,EAAE,MAAM,CAAC;IAChB,aAAa,EAAE,MAAM,CAAC;IACtB,aAAa,EAAE,MAAM,EAAE,CAAC;CACzB,GAAG,MAAM,CAMT;AAMD;;GAEG;AACH,eAAO,MAAM,oBAAoB;;;;;;;;;;;;;;;;;;;;;;CAoC/B,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,mBAAmB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkH9B,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;CAsDjC,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;CA+I5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,uBAAuB;;;;;;;;;;;;CAoClC,CAAC;AAEH;;;;;GAKG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;CAiI5B,CAAC;AAEH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAOvB,CAAC"}
|
package/dist/swarm.d.ts
CHANGED
|
@@ -170,6 +170,8 @@ export declare const swarmTools: {
|
|
|
170
170
|
user_cancelled: "user_cancelled";
|
|
171
171
|
}>>;
|
|
172
172
|
failure_details: import("zod").ZodOptional<import("zod").ZodString>;
|
|
173
|
+
project_path: import("zod").ZodOptional<import("zod").ZodString>;
|
|
174
|
+
epic_id: import("zod").ZodOptional<import("zod").ZodString>;
|
|
173
175
|
};
|
|
174
176
|
execute(args: {
|
|
175
177
|
bead_id: string;
|
|
@@ -182,6 +184,8 @@ export declare const swarmTools: {
|
|
|
182
184
|
strategy?: "file-based" | "feature-based" | "risk-based" | "research-based" | undefined;
|
|
183
185
|
failure_mode?: "timeout" | "unknown" | "conflict" | "validation" | "tool_failure" | "context_overflow" | "dependency_blocked" | "user_cancelled" | undefined;
|
|
184
186
|
failure_details?: string | undefined;
|
|
187
|
+
project_path?: string | undefined;
|
|
188
|
+
epic_id?: string | undefined;
|
|
185
189
|
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
186
190
|
};
|
|
187
191
|
swarm_research_phase: {
|
|
@@ -488,9 +492,24 @@ export declare const swarmTools: {
|
|
|
488
492
|
description: string;
|
|
489
493
|
args: {
|
|
490
494
|
response: import("zod").ZodString;
|
|
495
|
+
project_path: import("zod").ZodOptional<import("zod").ZodString>;
|
|
496
|
+
task: import("zod").ZodOptional<import("zod").ZodString>;
|
|
497
|
+
context: import("zod").ZodOptional<import("zod").ZodString>;
|
|
498
|
+
strategy: import("zod").ZodOptional<import("zod").ZodEnum<{
|
|
499
|
+
"file-based": "file-based";
|
|
500
|
+
"feature-based": "feature-based";
|
|
501
|
+
"risk-based": "risk-based";
|
|
502
|
+
auto: "auto";
|
|
503
|
+
}>>;
|
|
504
|
+
epic_id: import("zod").ZodOptional<import("zod").ZodString>;
|
|
491
505
|
};
|
|
492
506
|
execute(args: {
|
|
493
507
|
response: string;
|
|
508
|
+
project_path?: string | undefined;
|
|
509
|
+
task?: string | undefined;
|
|
510
|
+
context?: string | undefined;
|
|
511
|
+
strategy?: "file-based" | "feature-based" | "risk-based" | "auto" | undefined;
|
|
512
|
+
epic_id?: string | undefined;
|
|
494
513
|
}, context: import("@opencode-ai/plugin").ToolContext): Promise<string>;
|
|
495
514
|
};
|
|
496
515
|
swarm_delegate_planning: {
|
package/dist/swarm.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"swarm.d.ts","sourceRoot":"","sources":["../src/swarm.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAGH,cAAc,oBAAoB,CAAC;AACnC,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,kBAAkB,CAAC;AASjC;;;GAGG;AACH,eAAO,MAAM,UAAU
|
|
1
|
+
{"version":3,"file":"swarm.d.ts","sourceRoot":"","sources":["../src/swarm.ts"],"names":[],"mappings":"AAAA;;;;;;;;;;;GAWG;AAGH,cAAc,oBAAoB,CAAC;AACnC,cAAc,mBAAmB,CAAC;AAClC,cAAc,iBAAiB,CAAC;AAChC,cAAc,qBAAqB,CAAC;AACpC,cAAc,kBAAkB,CAAC;AASjC;;;GAGG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAMtB,CAAC"}
|