opencode-swarm-plugin 0.36.0 → 0.37.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.hive/issues.jsonl +16 -4
- package/.hive/memories.jsonl +274 -1
- package/.turbo/turbo-build.log +4 -4
- package/.turbo/turbo-test.log +318 -318
- package/CHANGELOG.md +113 -0
- package/bin/swarm.test.ts +106 -0
- package/bin/swarm.ts +413 -179
- package/dist/compaction-hook.d.ts +54 -4
- package/dist/compaction-hook.d.ts.map +1 -1
- package/dist/eval-capture.d.ts +122 -17
- package/dist/eval-capture.d.ts.map +1 -1
- package/dist/index.d.ts +1 -7
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +1278 -619
- package/dist/planning-guardrails.d.ts +121 -0
- package/dist/planning-guardrails.d.ts.map +1 -1
- package/dist/plugin.d.ts +9 -9
- package/dist/plugin.d.ts.map +1 -1
- package/dist/plugin.js +1283 -329
- package/dist/schemas/task.d.ts +0 -1
- package/dist/schemas/task.d.ts.map +1 -1
- package/dist/swarm-decompose.d.ts +0 -8
- package/dist/swarm-decompose.d.ts.map +1 -1
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +0 -4
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm-review.d.ts.map +1 -1
- package/dist/swarm.d.ts +0 -6
- package/dist/swarm.d.ts.map +1 -1
- package/evals/README.md +38 -0
- package/evals/coordinator-session.eval.ts +154 -0
- package/evals/fixtures/coordinator-sessions.ts +328 -0
- package/evals/lib/data-loader.ts +69 -0
- package/evals/scorers/coordinator-discipline.evalite-test.ts +536 -0
- package/evals/scorers/coordinator-discipline.ts +315 -0
- package/evals/scorers/index.ts +12 -0
- package/examples/plugin-wrapper-template.ts +303 -4
- package/package.json +2 -2
- package/src/compaction-hook.test.ts +8 -1
- package/src/compaction-hook.ts +31 -21
- package/src/eval-capture.test.ts +390 -0
- package/src/eval-capture.ts +163 -4
- package/src/hive.integration.test.ts +148 -0
- package/src/hive.ts +89 -0
- package/src/index.ts +68 -1
- package/src/planning-guardrails.test.ts +387 -2
- package/src/planning-guardrails.ts +289 -0
- package/src/plugin.ts +10 -10
- package/src/swarm-decompose.test.ts +195 -0
- package/src/swarm-decompose.ts +72 -1
- package/src/swarm-orchestrate.ts +44 -0
- package/src/swarm-prompts.ts +20 -0
- package/src/swarm-review.integration.test.ts +24 -29
- package/src/swarm-review.ts +41 -0
package/src/eval-capture.ts
CHANGED
|
@@ -9,12 +9,15 @@
|
|
|
9
9
|
* 2. swarm_complete captures: outcome signals per subtask
|
|
10
10
|
* 3. swarm_record_outcome captures: learning signals
|
|
11
11
|
* 4. Human feedback (optional): accept/reject/modify
|
|
12
|
+
* 5. Coordinator events: decisions, violations, outcomes
|
|
13
|
+
* 6. Session capture: full coordinator session to ~/.config/swarm-tools/sessions/
|
|
12
14
|
*
|
|
13
15
|
* @module eval-capture
|
|
14
16
|
*/
|
|
17
|
+
import * as fs from "node:fs";
|
|
18
|
+
import * as os from "node:os";
|
|
19
|
+
import * as path from "node:path";
|
|
15
20
|
import { z } from "zod";
|
|
16
|
-
import * as fs from "fs";
|
|
17
|
-
import * as path from "path";
|
|
18
21
|
|
|
19
22
|
// ============================================================================
|
|
20
23
|
// Schemas
|
|
@@ -119,6 +122,67 @@ export type PartialEvalRecord = Partial<EvalRecord> & {
|
|
|
119
122
|
task: string;
|
|
120
123
|
};
|
|
121
124
|
|
|
125
|
+
/**
|
|
126
|
+
* Coordinator Event - captures coordinator decisions, violations, and outcomes
|
|
127
|
+
*/
|
|
128
|
+
export const CoordinatorEventSchema = z.discriminatedUnion("event_type", [
|
|
129
|
+
// DECISION events
|
|
130
|
+
z.object({
|
|
131
|
+
session_id: z.string(),
|
|
132
|
+
epic_id: z.string(),
|
|
133
|
+
timestamp: z.string(),
|
|
134
|
+
event_type: z.literal("DECISION"),
|
|
135
|
+
decision_type: z.enum([
|
|
136
|
+
"strategy_selected",
|
|
137
|
+
"worker_spawned",
|
|
138
|
+
"review_completed",
|
|
139
|
+
"decomposition_complete",
|
|
140
|
+
]),
|
|
141
|
+
payload: z.any(),
|
|
142
|
+
}),
|
|
143
|
+
// VIOLATION events
|
|
144
|
+
z.object({
|
|
145
|
+
session_id: z.string(),
|
|
146
|
+
epic_id: z.string(),
|
|
147
|
+
timestamp: z.string(),
|
|
148
|
+
event_type: z.literal("VIOLATION"),
|
|
149
|
+
violation_type: z.enum([
|
|
150
|
+
"coordinator_edited_file",
|
|
151
|
+
"coordinator_ran_tests",
|
|
152
|
+
"coordinator_reserved_files",
|
|
153
|
+
"no_worker_spawned",
|
|
154
|
+
]),
|
|
155
|
+
payload: z.any(),
|
|
156
|
+
}),
|
|
157
|
+
// OUTCOME events
|
|
158
|
+
z.object({
|
|
159
|
+
session_id: z.string(),
|
|
160
|
+
epic_id: z.string(),
|
|
161
|
+
timestamp: z.string(),
|
|
162
|
+
event_type: z.literal("OUTCOME"),
|
|
163
|
+
outcome_type: z.enum([
|
|
164
|
+
"subtask_success",
|
|
165
|
+
"subtask_retry",
|
|
166
|
+
"subtask_failed",
|
|
167
|
+
"epic_complete",
|
|
168
|
+
]),
|
|
169
|
+
payload: z.any(),
|
|
170
|
+
}),
|
|
171
|
+
]);
|
|
172
|
+
export type CoordinatorEvent = z.infer<typeof CoordinatorEventSchema>;
|
|
173
|
+
|
|
174
|
+
/**
|
|
175
|
+
* Coordinator Session - wraps a full coordinator session
|
|
176
|
+
*/
|
|
177
|
+
export const CoordinatorSessionSchema = z.object({
|
|
178
|
+
session_id: z.string(),
|
|
179
|
+
epic_id: z.string(),
|
|
180
|
+
start_time: z.string(),
|
|
181
|
+
end_time: z.string().optional(),
|
|
182
|
+
events: z.array(CoordinatorEventSchema),
|
|
183
|
+
});
|
|
184
|
+
export type CoordinatorSession = z.infer<typeof CoordinatorSessionSchema>;
|
|
185
|
+
|
|
122
186
|
// ============================================================================
|
|
123
187
|
// Storage
|
|
124
188
|
// ============================================================================
|
|
@@ -155,7 +219,7 @@ export function appendEvalRecord(
|
|
|
155
219
|
): void {
|
|
156
220
|
ensureEvalDataDir(projectPath);
|
|
157
221
|
const evalPath = getEvalDataPath(projectPath);
|
|
158
|
-
const line = JSON.stringify(record)
|
|
222
|
+
const line = `${JSON.stringify(record)}\n`;
|
|
159
223
|
fs.appendFileSync(evalPath, line, "utf-8");
|
|
160
224
|
}
|
|
161
225
|
|
|
@@ -211,7 +275,7 @@ export function updateEvalRecord(
|
|
|
211
275
|
|
|
212
276
|
// Rewrite the file
|
|
213
277
|
const evalPath = getEvalDataPath(projectPath);
|
|
214
|
-
const content = records.map((r) => JSON.stringify(r)).join("\n")
|
|
278
|
+
const content = `${records.map((r) => JSON.stringify(r)).join("\n")}\n`;
|
|
215
279
|
fs.writeFileSync(evalPath, content, "utf-8");
|
|
216
280
|
|
|
217
281
|
return true;
|
|
@@ -484,3 +548,98 @@ export function getEvalDataStats(projectPath: string): {
|
|
|
484
548
|
avgTimeBalance,
|
|
485
549
|
};
|
|
486
550
|
}
|
|
551
|
+
|
|
552
|
+
// ============================================================================
|
|
553
|
+
// Coordinator Session Capture
|
|
554
|
+
// ============================================================================
|
|
555
|
+
|
|
556
|
+
/**
|
|
557
|
+
* Get the session directory path
|
|
558
|
+
*/
|
|
559
|
+
export function getSessionDir(): string {
|
|
560
|
+
return path.join(os.homedir(), ".config", "swarm-tools", "sessions");
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
/**
|
|
564
|
+
* Get the session file path for a session ID
|
|
565
|
+
*/
|
|
566
|
+
export function getSessionPath(sessionId: string): string {
|
|
567
|
+
return path.join(getSessionDir(), `${sessionId}.jsonl`);
|
|
568
|
+
}
|
|
569
|
+
|
|
570
|
+
/**
|
|
571
|
+
* Ensure the session directory exists
|
|
572
|
+
*/
|
|
573
|
+
export function ensureSessionDir(): void {
|
|
574
|
+
const sessionDir = getSessionDir();
|
|
575
|
+
if (!fs.existsSync(sessionDir)) {
|
|
576
|
+
fs.mkdirSync(sessionDir, { recursive: true });
|
|
577
|
+
}
|
|
578
|
+
}
|
|
579
|
+
|
|
580
|
+
/**
|
|
581
|
+
* Capture a coordinator event to the session file
|
|
582
|
+
*
|
|
583
|
+
* Appends the event as a JSONL line to ~/.config/swarm-tools/sessions/{session_id}.jsonl
|
|
584
|
+
*/
|
|
585
|
+
export function captureCoordinatorEvent(event: CoordinatorEvent): void {
|
|
586
|
+
// Validate event
|
|
587
|
+
CoordinatorEventSchema.parse(event);
|
|
588
|
+
|
|
589
|
+
// Ensure directory exists
|
|
590
|
+
ensureSessionDir();
|
|
591
|
+
|
|
592
|
+
// Append to session file
|
|
593
|
+
const sessionPath = getSessionPath(event.session_id);
|
|
594
|
+
const line = `${JSON.stringify(event)}\n`;
|
|
595
|
+
fs.appendFileSync(sessionPath, line, "utf-8");
|
|
596
|
+
}
|
|
597
|
+
|
|
598
|
+
/**
|
|
599
|
+
* Read all events from a session file
|
|
600
|
+
*/
|
|
601
|
+
export function readSessionEvents(sessionId: string): CoordinatorEvent[] {
|
|
602
|
+
const sessionPath = getSessionPath(sessionId);
|
|
603
|
+
if (!fs.existsSync(sessionPath)) {
|
|
604
|
+
return [];
|
|
605
|
+
}
|
|
606
|
+
|
|
607
|
+
const content = fs.readFileSync(sessionPath, "utf-8");
|
|
608
|
+
const lines = content.trim().split("\n").filter(Boolean);
|
|
609
|
+
|
|
610
|
+
return lines.map((line) => {
|
|
611
|
+
const parsed = JSON.parse(line);
|
|
612
|
+
return CoordinatorEventSchema.parse(parsed);
|
|
613
|
+
});
|
|
614
|
+
}
|
|
615
|
+
|
|
616
|
+
/**
|
|
617
|
+
* Save a session - wraps all events in a CoordinatorSession structure
|
|
618
|
+
*
|
|
619
|
+
* Reads all events from the session file and wraps them in a session object.
|
|
620
|
+
* Returns null if the session file doesn't exist.
|
|
621
|
+
*/
|
|
622
|
+
export function saveSession(params: {
|
|
623
|
+
session_id: string;
|
|
624
|
+
epic_id: string;
|
|
625
|
+
}): CoordinatorSession | null {
|
|
626
|
+
const events = readSessionEvents(params.session_id);
|
|
627
|
+
if (events.length === 0) {
|
|
628
|
+
return null;
|
|
629
|
+
}
|
|
630
|
+
|
|
631
|
+
// Get timestamps from events
|
|
632
|
+
const timestamps = events.map((e) => new Date(e.timestamp).getTime());
|
|
633
|
+
const startTime = new Date(Math.min(...timestamps)).toISOString();
|
|
634
|
+
const endTime = new Date(Math.max(...timestamps)).toISOString();
|
|
635
|
+
|
|
636
|
+
const session: CoordinatorSession = {
|
|
637
|
+
session_id: params.session_id,
|
|
638
|
+
epic_id: params.epic_id,
|
|
639
|
+
start_time: startTime,
|
|
640
|
+
end_time: endTime,
|
|
641
|
+
events,
|
|
642
|
+
};
|
|
643
|
+
|
|
644
|
+
return session;
|
|
645
|
+
}
|
|
@@ -1895,6 +1895,154 @@ describe("beads integration", () => {
|
|
|
1895
1895
|
});
|
|
1896
1896
|
});
|
|
1897
1897
|
|
|
1898
|
+
describe("hive_cells", () => {
|
|
1899
|
+
let testCellId: string;
|
|
1900
|
+
|
|
1901
|
+
beforeEach(async () => {
|
|
1902
|
+
// Create a test cell for hive_cells tests
|
|
1903
|
+
const result = await hive_create.execute(
|
|
1904
|
+
{ title: "Cells tool test", type: "task" },
|
|
1905
|
+
mockContext,
|
|
1906
|
+
);
|
|
1907
|
+
const cell = parseResponse<Cell>(result);
|
|
1908
|
+
testCellId = cell.id;
|
|
1909
|
+
createdBeadIds.push(testCellId);
|
|
1910
|
+
});
|
|
1911
|
+
|
|
1912
|
+
it("lists all cells with no filters", async () => {
|
|
1913
|
+
const { hive_cells } = await import("./hive");
|
|
1914
|
+
|
|
1915
|
+
const result = await hive_cells.execute({}, mockContext);
|
|
1916
|
+
const cells = parseResponse<Cell[]>(result);
|
|
1917
|
+
|
|
1918
|
+
expect(Array.isArray(cells)).toBe(true);
|
|
1919
|
+
expect(cells.length).toBeGreaterThan(0);
|
|
1920
|
+
});
|
|
1921
|
+
|
|
1922
|
+
it("filters by status", async () => {
|
|
1923
|
+
const { hive_cells } = await import("./hive");
|
|
1924
|
+
|
|
1925
|
+
const result = await hive_cells.execute({ status: "open" }, mockContext);
|
|
1926
|
+
const cells = parseResponse<Cell[]>(result);
|
|
1927
|
+
|
|
1928
|
+
expect(Array.isArray(cells)).toBe(true);
|
|
1929
|
+
expect(cells.every((c) => c.status === "open")).toBe(true);
|
|
1930
|
+
});
|
|
1931
|
+
|
|
1932
|
+
it("filters by type", async () => {
|
|
1933
|
+
const { hive_cells } = await import("./hive");
|
|
1934
|
+
|
|
1935
|
+
// Create a bug cell
|
|
1936
|
+
const bugResult = await hive_create.execute(
|
|
1937
|
+
{ title: "Bug for cells test", type: "bug" },
|
|
1938
|
+
mockContext,
|
|
1939
|
+
);
|
|
1940
|
+
const bug = parseResponse<Cell>(bugResult);
|
|
1941
|
+
createdBeadIds.push(bug.id);
|
|
1942
|
+
|
|
1943
|
+
const result = await hive_cells.execute({ type: "bug" }, mockContext);
|
|
1944
|
+
const cells = parseResponse<Cell[]>(result);
|
|
1945
|
+
|
|
1946
|
+
expect(Array.isArray(cells)).toBe(true);
|
|
1947
|
+
expect(cells.every((c) => c.issue_type === "bug")).toBe(true);
|
|
1948
|
+
});
|
|
1949
|
+
|
|
1950
|
+
it("returns next ready cell when ready=true", async () => {
|
|
1951
|
+
const { hive_cells } = await import("./hive");
|
|
1952
|
+
|
|
1953
|
+
const result = await hive_cells.execute({ ready: true }, mockContext);
|
|
1954
|
+
const cells = parseResponse<Cell[]>(result);
|
|
1955
|
+
|
|
1956
|
+
expect(Array.isArray(cells)).toBe(true);
|
|
1957
|
+
// Should return 0 or 1 cells (the next ready one)
|
|
1958
|
+
expect(cells.length).toBeLessThanOrEqual(1);
|
|
1959
|
+
if (cells.length === 1) {
|
|
1960
|
+
expect(["open", "in_progress"]).toContain(cells[0].status);
|
|
1961
|
+
}
|
|
1962
|
+
});
|
|
1963
|
+
|
|
1964
|
+
it("looks up cell by partial ID", async () => {
|
|
1965
|
+
const { hive_cells } = await import("./hive");
|
|
1966
|
+
|
|
1967
|
+
// Extract hash from full ID (6-char segment before the last hyphen)
|
|
1968
|
+
const lastHyphenIndex = testCellId.lastIndexOf("-");
|
|
1969
|
+
const beforeLast = testCellId.substring(0, lastHyphenIndex);
|
|
1970
|
+
const secondLastHyphenIndex = beforeLast.lastIndexOf("-");
|
|
1971
|
+
const hash = testCellId.substring(secondLastHyphenIndex + 1, lastHyphenIndex);
|
|
1972
|
+
|
|
1973
|
+
// Use last 6 chars of hash (or full hash if short)
|
|
1974
|
+
const shortHash = hash.substring(Math.max(0, hash.length - 6));
|
|
1975
|
+
|
|
1976
|
+
try {
|
|
1977
|
+
const result = await hive_cells.execute({ id: shortHash }, mockContext);
|
|
1978
|
+
const cells = parseResponse<Cell[]>(result);
|
|
1979
|
+
|
|
1980
|
+
// Should return exactly one cell matching the ID
|
|
1981
|
+
expect(cells).toHaveLength(1);
|
|
1982
|
+
expect(cells[0].id).toBe(testCellId);
|
|
1983
|
+
} catch (error) {
|
|
1984
|
+
// If ambiguous, verify error message is helpful
|
|
1985
|
+
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
1986
|
+
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
1987
|
+
expect(error.message).toContain(shortHash);
|
|
1988
|
+
} else {
|
|
1989
|
+
throw error;
|
|
1990
|
+
}
|
|
1991
|
+
}
|
|
1992
|
+
});
|
|
1993
|
+
|
|
1994
|
+
it("looks up cell by full ID", async () => {
|
|
1995
|
+
const { hive_cells } = await import("./hive");
|
|
1996
|
+
|
|
1997
|
+
const result = await hive_cells.execute({ id: testCellId }, mockContext);
|
|
1998
|
+
const cells = parseResponse<Cell[]>(result);
|
|
1999
|
+
|
|
2000
|
+
expect(cells).toHaveLength(1);
|
|
2001
|
+
expect(cells[0].id).toBe(testCellId);
|
|
2002
|
+
expect(cells[0].title).toBe("Cells tool test");
|
|
2003
|
+
});
|
|
2004
|
+
|
|
2005
|
+
it("throws error for non-existent ID", async () => {
|
|
2006
|
+
const { hive_cells } = await import("./hive");
|
|
2007
|
+
|
|
2008
|
+
await expect(
|
|
2009
|
+
hive_cells.execute({ id: "nonexistent999" }, mockContext),
|
|
2010
|
+
).rejects.toThrow(/not found|no cell|nonexistent999/i);
|
|
2011
|
+
});
|
|
2012
|
+
|
|
2013
|
+
it("respects limit parameter", async () => {
|
|
2014
|
+
const { hive_cells } = await import("./hive");
|
|
2015
|
+
|
|
2016
|
+
const result = await hive_cells.execute({ limit: 2 }, mockContext);
|
|
2017
|
+
const cells = parseResponse<Cell[]>(result);
|
|
2018
|
+
|
|
2019
|
+
expect(cells.length).toBeLessThanOrEqual(2);
|
|
2020
|
+
});
|
|
2021
|
+
|
|
2022
|
+
it("combines filters (status + type + limit)", async () => {
|
|
2023
|
+
const { hive_cells } = await import("./hive");
|
|
2024
|
+
|
|
2025
|
+
// Create some task cells
|
|
2026
|
+
for (let i = 0; i < 3; i++) {
|
|
2027
|
+
const r = await hive_create.execute(
|
|
2028
|
+
{ title: `Task ${i}`, type: "task" },
|
|
2029
|
+
mockContext,
|
|
2030
|
+
);
|
|
2031
|
+
const c = parseResponse<Cell>(r);
|
|
2032
|
+
createdBeadIds.push(c.id);
|
|
2033
|
+
}
|
|
2034
|
+
|
|
2035
|
+
const result = await hive_cells.execute(
|
|
2036
|
+
{ status: "open", type: "task", limit: 2 },
|
|
2037
|
+
mockContext,
|
|
2038
|
+
);
|
|
2039
|
+
const cells = parseResponse<Cell[]>(result);
|
|
2040
|
+
|
|
2041
|
+
expect(cells.length).toBeLessThanOrEqual(2);
|
|
2042
|
+
expect(cells.every((c) => c.status === "open" && c.issue_type === "task")).toBe(true);
|
|
2043
|
+
});
|
|
2044
|
+
});
|
|
2045
|
+
|
|
1898
2046
|
describe("bigint to Date conversion", () => {
|
|
1899
2047
|
it("should handle PGLite bigint timestamps correctly in hive_query", async () => {
|
|
1900
2048
|
const { mkdirSync, rmSync } = await import("node:fs");
|
package/src/hive.ts
CHANGED
|
@@ -1092,6 +1092,94 @@ export const hive_ready = tool({
|
|
|
1092
1092
|
},
|
|
1093
1093
|
});
|
|
1094
1094
|
|
|
1095
|
+
/**
|
|
1096
|
+
* Query cells from the hive database with flexible filtering
|
|
1097
|
+
*/
|
|
1098
|
+
export const hive_cells = tool({
|
|
1099
|
+
description: `Query cells from the hive database with flexible filtering.
|
|
1100
|
+
|
|
1101
|
+
USE THIS TOOL TO:
|
|
1102
|
+
- List all open cells: hive_cells()
|
|
1103
|
+
- Find cells by status: hive_cells({ status: "in_progress" })
|
|
1104
|
+
- Find cells by type: hive_cells({ type: "bug" })
|
|
1105
|
+
- Get a specific cell by partial ID: hive_cells({ id: "mjkmd" })
|
|
1106
|
+
- Get the next ready (unblocked) cell: hive_cells({ ready: true })
|
|
1107
|
+
- Combine filters: hive_cells({ status: "open", type: "task" })
|
|
1108
|
+
|
|
1109
|
+
RETURNS: Array of cells with id, title, status, priority, type, parent_id, created_at, updated_at
|
|
1110
|
+
|
|
1111
|
+
PREFER THIS OVER hive_query when you need to:
|
|
1112
|
+
- See what work is available
|
|
1113
|
+
- Check status of multiple cells
|
|
1114
|
+
- Find cells matching criteria
|
|
1115
|
+
- Look up a cell by partial ID`,
|
|
1116
|
+
args: {
|
|
1117
|
+
id: tool.schema.string().optional().describe("Partial or full cell ID to look up"),
|
|
1118
|
+
status: tool.schema.enum(["open", "in_progress", "blocked", "closed"]).optional().describe("Filter by status"),
|
|
1119
|
+
type: tool.schema.enum(["task", "bug", "feature", "epic", "chore"]).optional().describe("Filter by type"),
|
|
1120
|
+
ready: tool.schema.boolean().optional().describe("If true, return only the next unblocked cell"),
|
|
1121
|
+
limit: tool.schema.number().optional().describe("Max cells to return (default 20)"),
|
|
1122
|
+
},
|
|
1123
|
+
async execute(args, ctx) {
|
|
1124
|
+
const projectKey = getHiveWorkingDirectory();
|
|
1125
|
+
const adapter = await getHiveAdapter(projectKey);
|
|
1126
|
+
|
|
1127
|
+
try {
|
|
1128
|
+
// If specific ID requested, resolve and return single cell
|
|
1129
|
+
if (args.id) {
|
|
1130
|
+
const fullId = await resolvePartialId(adapter, projectKey, args.id) || args.id;
|
|
1131
|
+
const cell = await adapter.getCell(projectKey, fullId);
|
|
1132
|
+
if (!cell) {
|
|
1133
|
+
throw new HiveError(`No cell found matching ID '${args.id}'`, "hive_cells");
|
|
1134
|
+
}
|
|
1135
|
+
const formatted = formatCellForOutput(cell);
|
|
1136
|
+
return JSON.stringify([formatted], null, 2);
|
|
1137
|
+
}
|
|
1138
|
+
|
|
1139
|
+
// If ready flag, return next unblocked cell
|
|
1140
|
+
if (args.ready) {
|
|
1141
|
+
const ready = await adapter.getNextReadyCell(projectKey);
|
|
1142
|
+
if (!ready) {
|
|
1143
|
+
return JSON.stringify([], null, 2);
|
|
1144
|
+
}
|
|
1145
|
+
const formatted = formatCellForOutput(ready);
|
|
1146
|
+
return JSON.stringify([formatted], null, 2);
|
|
1147
|
+
}
|
|
1148
|
+
|
|
1149
|
+
// Query with filters
|
|
1150
|
+
const cells = await adapter.queryCells(projectKey, {
|
|
1151
|
+
status: args.status,
|
|
1152
|
+
type: args.type,
|
|
1153
|
+
limit: args.limit || 20,
|
|
1154
|
+
});
|
|
1155
|
+
|
|
1156
|
+
const formatted = cells.map(c => formatCellForOutput(c));
|
|
1157
|
+
return JSON.stringify(formatted, null, 2);
|
|
1158
|
+
} catch (error) {
|
|
1159
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
1160
|
+
|
|
1161
|
+
// Provide helpful error messages
|
|
1162
|
+
if (message.includes("Ambiguous hash")) {
|
|
1163
|
+
throw new HiveError(
|
|
1164
|
+
`Ambiguous ID '${args.id}': multiple cells match. Please provide more characters.`,
|
|
1165
|
+
"hive_cells",
|
|
1166
|
+
);
|
|
1167
|
+
}
|
|
1168
|
+
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
1169
|
+
throw new HiveError(
|
|
1170
|
+
`No cell found matching ID '${args.id || "unknown"}'`,
|
|
1171
|
+
"hive_cells",
|
|
1172
|
+
);
|
|
1173
|
+
}
|
|
1174
|
+
|
|
1175
|
+
throw new HiveError(
|
|
1176
|
+
`Failed to query cells: ${message}`,
|
|
1177
|
+
"hive_cells",
|
|
1178
|
+
);
|
|
1179
|
+
}
|
|
1180
|
+
},
|
|
1181
|
+
});
|
|
1182
|
+
|
|
1095
1183
|
/**
|
|
1096
1184
|
* Sync hive to git and push
|
|
1097
1185
|
*/
|
|
@@ -1345,6 +1433,7 @@ export const hiveTools = {
|
|
|
1345
1433
|
hive_close,
|
|
1346
1434
|
hive_start,
|
|
1347
1435
|
hive_ready,
|
|
1436
|
+
hive_cells,
|
|
1348
1437
|
hive_sync,
|
|
1349
1438
|
hive_link_thread,
|
|
1350
1439
|
};
|
package/src/index.ts
CHANGED
|
@@ -57,6 +57,11 @@ import {
|
|
|
57
57
|
import {
|
|
58
58
|
analyzeTodoWrite,
|
|
59
59
|
shouldAnalyzeTool,
|
|
60
|
+
detectCoordinatorViolation,
|
|
61
|
+
isInCoordinatorContext,
|
|
62
|
+
getCoordinatorContext,
|
|
63
|
+
setCoordinatorContext,
|
|
64
|
+
clearCoordinatorContext,
|
|
60
65
|
} from "./planning-guardrails";
|
|
61
66
|
import { createCompactionHook } from "./compaction-hook";
|
|
62
67
|
|
|
@@ -78,7 +83,7 @@ import { createCompactionHook } from "./compaction-hook";
|
|
|
78
83
|
* @param input - Plugin context from OpenCode
|
|
79
84
|
* @returns Plugin hooks including tools, events, and tool execution hooks
|
|
80
85
|
*/
|
|
81
|
-
|
|
86
|
+
const SwarmPlugin: Plugin = async (
|
|
82
87
|
input: PluginInput,
|
|
83
88
|
): Promise<Hooks> => {
|
|
84
89
|
const { $, directory, client } = input;
|
|
@@ -190,6 +195,8 @@ export const SwarmPlugin: Plugin = async (
|
|
|
190
195
|
*
|
|
191
196
|
* Warns when agents are about to make planning mistakes:
|
|
192
197
|
* - Using todowrite for multi-file implementation (should use swarm)
|
|
198
|
+
* - Coordinator editing files directly (should spawn workers)
|
|
199
|
+
* - Coordinator running tests (workers should run tests)
|
|
193
200
|
*/
|
|
194
201
|
"tool.execute.before": async (input, output) => {
|
|
195
202
|
const toolName = input.tool;
|
|
@@ -201,6 +208,36 @@ export const SwarmPlugin: Plugin = async (
|
|
|
201
208
|
console.warn(`[swarm-plugin] ${analysis.warning}`);
|
|
202
209
|
}
|
|
203
210
|
}
|
|
211
|
+
|
|
212
|
+
// Check for coordinator violations when in coordinator context
|
|
213
|
+
if (isInCoordinatorContext()) {
|
|
214
|
+
const ctx = getCoordinatorContext();
|
|
215
|
+
const violation = detectCoordinatorViolation({
|
|
216
|
+
sessionId: ctx.sessionId || "unknown",
|
|
217
|
+
epicId: ctx.epicId || "unknown",
|
|
218
|
+
toolName,
|
|
219
|
+
toolArgs: output.args as Record<string, unknown>,
|
|
220
|
+
agentContext: "coordinator",
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
if (violation.isViolation) {
|
|
224
|
+
console.warn(`[swarm-plugin] ${violation.message}`);
|
|
225
|
+
}
|
|
226
|
+
}
|
|
227
|
+
|
|
228
|
+
// Activate coordinator context when swarm tools are used
|
|
229
|
+
if (toolName === "hive_create_epic" || toolName === "swarm_decompose") {
|
|
230
|
+
setCoordinatorContext({
|
|
231
|
+
isCoordinator: true,
|
|
232
|
+
sessionId: input.sessionID,
|
|
233
|
+
});
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
// Capture epic ID when epic is created
|
|
237
|
+
if (toolName === "hive_create_epic" && output.args) {
|
|
238
|
+
const args = output.args as { epic_title?: string };
|
|
239
|
+
// Epic ID will be set after execution in tool.execute.after
|
|
240
|
+
}
|
|
204
241
|
},
|
|
205
242
|
|
|
206
243
|
/**
|
|
@@ -258,6 +295,36 @@ export const SwarmPlugin: Plugin = async (
|
|
|
258
295
|
await releaseReservations();
|
|
259
296
|
}
|
|
260
297
|
|
|
298
|
+
// Capture epic ID when epic is created (for coordinator context)
|
|
299
|
+
if (toolName === "hive_create_epic" && output.output) {
|
|
300
|
+
try {
|
|
301
|
+
const result = JSON.parse(output.output);
|
|
302
|
+
if (result.epic?.id) {
|
|
303
|
+
setCoordinatorContext({
|
|
304
|
+
isCoordinator: true,
|
|
305
|
+
epicId: result.epic.id,
|
|
306
|
+
sessionId: input.sessionID,
|
|
307
|
+
});
|
|
308
|
+
}
|
|
309
|
+
} catch {
|
|
310
|
+
// Parsing failed - ignore
|
|
311
|
+
}
|
|
312
|
+
}
|
|
313
|
+
|
|
314
|
+
// Clear coordinator context when epic is closed
|
|
315
|
+
if (toolName === "hive_close" && output.output && isInCoordinatorContext()) {
|
|
316
|
+
const ctx = getCoordinatorContext();
|
|
317
|
+
try {
|
|
318
|
+
// Check if the closed cell is the active epic
|
|
319
|
+
const result = JSON.parse(output.output);
|
|
320
|
+
if (result.id === ctx.epicId) {
|
|
321
|
+
clearCoordinatorContext();
|
|
322
|
+
}
|
|
323
|
+
} catch {
|
|
324
|
+
// Parsing failed - ignore
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
|
|
261
328
|
// Note: hive_sync should be called explicitly at session end
|
|
262
329
|
// Auto-sync was removed because bd CLI is deprecated
|
|
263
330
|
// The hive_sync tool handles flushing to JSONL and git commit/push
|