opencode-swarm-plugin 0.30.7 → 0.31.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.hive/memories.jsonl +10 -0
- package/.turbo/turbo-build.log +3 -3
- package/.turbo/turbo-test.log +319 -319
- package/CHANGELOG.md +96 -0
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.d.ts +6 -6
- package/dist/index.js +107 -20
- package/dist/plugin.js +107 -20
- package/dist/swarm-decompose.d.ts +8 -8
- package/dist/swarm-decompose.d.ts.map +1 -1
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts +6 -6
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm.d.ts +6 -6
- package/opencode-swarm-plugin-0.30.7.tgz +0 -0
- package/package.json +2 -2
- package/src/hive.integration.test.ts +332 -3
- package/src/hive.ts +155 -11
- package/src/swarm-decompose.ts +7 -11
- package/src/swarm-orchestrate.ts +27 -1
- package/src/swarm-prompts.ts +5 -7
- package/src/swarm.integration.test.ts +70 -0
|
@@ -406,7 +406,7 @@ describe("beads integration", () => {
|
|
|
406
406
|
});
|
|
407
407
|
|
|
408
408
|
describe("hive_create_epic", () => {
|
|
409
|
-
it("creates an epic with subtasks", async () => {
|
|
409
|
+
it("creates an epic with subtasks and syncs to JSONL", async () => {
|
|
410
410
|
const result = await hive_create_epic.execute(
|
|
411
411
|
{
|
|
412
412
|
epic_title: "Integration test epic",
|
|
@@ -438,6 +438,28 @@ describe("beads integration", () => {
|
|
|
438
438
|
expect(subtaskBead).toBeDefined();
|
|
439
439
|
expect(subtaskBead!.parent_id).toBe(epicResult.epic.id);
|
|
440
440
|
}
|
|
441
|
+
|
|
442
|
+
// NEW TEST: Verify cells are synced to JSONL immediately
|
|
443
|
+
const { readFileSync, existsSync } = await import("node:fs");
|
|
444
|
+
const { join } = await import("node:path");
|
|
445
|
+
const jsonlPath = join(TEST_PROJECT_KEY, ".hive", "issues.jsonl");
|
|
446
|
+
|
|
447
|
+
expect(existsSync(jsonlPath)).toBe(true);
|
|
448
|
+
|
|
449
|
+
const jsonlContent = readFileSync(jsonlPath, "utf-8");
|
|
450
|
+
const lines = jsonlContent.trim().split("\n").filter(l => l);
|
|
451
|
+
const cells = lines.map(line => JSON.parse(line));
|
|
452
|
+
|
|
453
|
+
// Epic and all subtasks should be in JSONL
|
|
454
|
+
const epicInJsonl = cells.find(c => c.id === epicResult.epic.id);
|
|
455
|
+
expect(epicInJsonl).toBeDefined();
|
|
456
|
+
expect(epicInJsonl!.title).toBe("Integration test epic");
|
|
457
|
+
|
|
458
|
+
for (const subtask of epicResult.subtasks) {
|
|
459
|
+
const subtaskInJsonl = cells.find(c => c.id === subtask.id);
|
|
460
|
+
expect(subtaskInJsonl).toBeDefined();
|
|
461
|
+
expect(subtaskInJsonl!.parent_id).toBe(epicResult.epic.id);
|
|
462
|
+
}
|
|
441
463
|
});
|
|
442
464
|
|
|
443
465
|
it("creates an epic with files metadata in subtasks", async () => {
|
|
@@ -601,6 +623,181 @@ describe("beads integration", () => {
|
|
|
601
623
|
});
|
|
602
624
|
});
|
|
603
625
|
|
|
626
|
+
describe("partial ID resolution", () => {
|
|
627
|
+
let fullId: string;
|
|
628
|
+
let hash: string;
|
|
629
|
+
|
|
630
|
+
beforeEach(async () => {
|
|
631
|
+
// Create a test cell to resolve
|
|
632
|
+
const result = await hive_create.execute(
|
|
633
|
+
{ title: "Partial ID test cell" },
|
|
634
|
+
mockContext,
|
|
635
|
+
);
|
|
636
|
+
const cell = parseResponse<Cell>(result);
|
|
637
|
+
fullId = cell.id;
|
|
638
|
+
createdBeadIds.push(fullId);
|
|
639
|
+
|
|
640
|
+
// Extract hash from ID (format: {prefix}-{hash}-{timestamp}{random})
|
|
641
|
+
// The last segment is always timestamp+random (11 chars)
|
|
642
|
+
// The hash is the 6-char segment before that
|
|
643
|
+
// Examples:
|
|
644
|
+
// "opencode-swarm-monorepo-lf2p4u-mjd2h5v4wdt" -> hash is "lf2p4u"
|
|
645
|
+
// "cell--gcel4-mjd2h5v4wdt" -> hash is "-gcel4" (negative hash creates consecutive hyphens)
|
|
646
|
+
|
|
647
|
+
// Find the last hyphen, then work backwards to find the second-to-last hyphen
|
|
648
|
+
const lastHyphenIndex = fullId.lastIndexOf("-");
|
|
649
|
+
if (lastHyphenIndex === -1) {
|
|
650
|
+
hash = "";
|
|
651
|
+
} else {
|
|
652
|
+
// Get everything before the last hyphen
|
|
653
|
+
const beforeLast = fullId.substring(0, lastHyphenIndex);
|
|
654
|
+
// Find the second-to-last hyphen
|
|
655
|
+
const secondLastHyphenIndex = beforeLast.lastIndexOf("-");
|
|
656
|
+
if (secondLastHyphenIndex === -1) {
|
|
657
|
+
hash = "";
|
|
658
|
+
} else {
|
|
659
|
+
// Hash is between second-to-last and last hyphen
|
|
660
|
+
hash = fullId.substring(secondLastHyphenIndex + 1, lastHyphenIndex);
|
|
661
|
+
}
|
|
662
|
+
}
|
|
663
|
+
});
|
|
664
|
+
|
|
665
|
+
describe("hive_update", () => {
|
|
666
|
+
it("accepts full cell ID (no resolution needed)", async () => {
|
|
667
|
+
const result = await hive_update.execute(
|
|
668
|
+
{ id: fullId, description: "Updated via full ID" },
|
|
669
|
+
mockContext,
|
|
670
|
+
);
|
|
671
|
+
|
|
672
|
+
const updated = parseResponse<Cell>(result);
|
|
673
|
+
expect(updated.id).toBe(fullId);
|
|
674
|
+
expect(updated.description).toContain("Updated via full ID");
|
|
675
|
+
});
|
|
676
|
+
|
|
677
|
+
it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
|
|
678
|
+
try {
|
|
679
|
+
const result = await hive_update.execute(
|
|
680
|
+
{ id: hash, priority: 1 },
|
|
681
|
+
mockContext,
|
|
682
|
+
);
|
|
683
|
+
|
|
684
|
+
const updated = parseResponse<Cell>(result);
|
|
685
|
+
expect(updated.id).toBe(fullId);
|
|
686
|
+
expect(updated.priority).toBe(1);
|
|
687
|
+
} catch (error) {
|
|
688
|
+
// In test environment with many cells, hash may be ambiguous
|
|
689
|
+
// Verify we get a helpful error message
|
|
690
|
+
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
691
|
+
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
692
|
+
expect(error.message).toContain(hash);
|
|
693
|
+
} else {
|
|
694
|
+
throw error; // Re-throw if not ambiguity error
|
|
695
|
+
}
|
|
696
|
+
}
|
|
697
|
+
});
|
|
698
|
+
|
|
699
|
+
it("throws helpful error for non-existent hash", async () => {
|
|
700
|
+
await expect(
|
|
701
|
+
hive_update.execute({ id: "zzzzzz", status: "closed" }, mockContext),
|
|
702
|
+
).rejects.toThrow(/not found|no cell|zzzzzz/i);
|
|
703
|
+
});
|
|
704
|
+
|
|
705
|
+
it("throws helpful error for ambiguous hash", async () => {
|
|
706
|
+
// Create another cell with potentially similar hash
|
|
707
|
+
// (in practice, hashes are unique, but we simulate ambiguity by using a short partial)
|
|
708
|
+
// This test verifies the error message is helpful
|
|
709
|
+
try {
|
|
710
|
+
// Use a single char which might match multiple cells in larger datasets
|
|
711
|
+
await hive_update.execute({ id: "a", status: "closed" }, mockContext);
|
|
712
|
+
// If it succeeds, it means only one cell matched - that's fine
|
|
713
|
+
} catch (error) {
|
|
714
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
715
|
+
// Error should mention ambiguity if multiple matches
|
|
716
|
+
if (message.includes("ambiguous") || message.includes("multiple")) {
|
|
717
|
+
expect(message).toMatch(/ambiguous|multiple/i);
|
|
718
|
+
}
|
|
719
|
+
}
|
|
720
|
+
});
|
|
721
|
+
});
|
|
722
|
+
|
|
723
|
+
describe("hive_close", () => {
|
|
724
|
+
it("accepts full cell ID", async () => {
|
|
725
|
+
const result = await hive_close.execute(
|
|
726
|
+
{ id: fullId, reason: "Closed via full ID" },
|
|
727
|
+
mockContext,
|
|
728
|
+
);
|
|
729
|
+
|
|
730
|
+
expect(result).toContain("Closed");
|
|
731
|
+
expect(result).toContain(fullId);
|
|
732
|
+
|
|
733
|
+
const closed = await adapter.getCell(TEST_PROJECT_KEY, fullId);
|
|
734
|
+
expect(closed?.status).toBe("closed");
|
|
735
|
+
});
|
|
736
|
+
|
|
737
|
+
it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
|
|
738
|
+
try {
|
|
739
|
+
const result = await hive_close.execute(
|
|
740
|
+
{ id: hash, reason: "Close via hash" },
|
|
741
|
+
mockContext,
|
|
742
|
+
);
|
|
743
|
+
|
|
744
|
+
expect(result).toContain("Closed");
|
|
745
|
+
expect(result).toContain(fullId);
|
|
746
|
+
} catch (error) {
|
|
747
|
+
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
748
|
+
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
749
|
+
expect(error.message).toContain(hash);
|
|
750
|
+
} else {
|
|
751
|
+
throw error;
|
|
752
|
+
}
|
|
753
|
+
}
|
|
754
|
+
});
|
|
755
|
+
|
|
756
|
+
it("throws helpful error for non-existent hash", async () => {
|
|
757
|
+
await expect(
|
|
758
|
+
hive_close.execute({ id: "nonono", reason: "Test" }, mockContext),
|
|
759
|
+
).rejects.toThrow(/not found|no cell|nonono/i);
|
|
760
|
+
});
|
|
761
|
+
});
|
|
762
|
+
|
|
763
|
+
describe("hive_start", () => {
|
|
764
|
+
it("accepts full cell ID", async () => {
|
|
765
|
+
const result = await hive_start.execute({ id: fullId }, mockContext);
|
|
766
|
+
|
|
767
|
+
expect(result).toContain("Started");
|
|
768
|
+
expect(result).toContain(fullId);
|
|
769
|
+
|
|
770
|
+
const started = await adapter.getCell(TEST_PROJECT_KEY, fullId);
|
|
771
|
+
expect(started?.status).toBe("in_progress");
|
|
772
|
+
});
|
|
773
|
+
|
|
774
|
+
it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
|
|
775
|
+
try {
|
|
776
|
+
const result = await hive_start.execute(
|
|
777
|
+
{ id: hash },
|
|
778
|
+
mockContext,
|
|
779
|
+
);
|
|
780
|
+
|
|
781
|
+
expect(result).toContain("Started");
|
|
782
|
+
expect(result).toContain(fullId);
|
|
783
|
+
} catch (error) {
|
|
784
|
+
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
785
|
+
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
786
|
+
expect(error.message).toContain(hash);
|
|
787
|
+
} else {
|
|
788
|
+
throw error;
|
|
789
|
+
}
|
|
790
|
+
}
|
|
791
|
+
});
|
|
792
|
+
|
|
793
|
+
it("throws helpful error for non-existent hash", async () => {
|
|
794
|
+
await expect(
|
|
795
|
+
hive_start.execute({ id: "nope99" }, mockContext),
|
|
796
|
+
).rejects.toThrow(/not found|no cell|nope99/i);
|
|
797
|
+
});
|
|
798
|
+
});
|
|
799
|
+
});
|
|
800
|
+
|
|
604
801
|
describe("workflow integration", () => {
|
|
605
802
|
it("complete bead lifecycle: create -> start -> update -> close", async () => {
|
|
606
803
|
// 1. Create
|
|
@@ -1225,8 +1422,8 @@ describe("beads integration", () => {
|
|
|
1225
1422
|
mockContext,
|
|
1226
1423
|
);
|
|
1227
1424
|
|
|
1228
|
-
// Should return "No cells to sync" since no dirty cells
|
|
1229
|
-
expect(result).toContain("No cells to sync");
|
|
1425
|
+
// Should return "No cells or memories to sync" since no dirty cells
|
|
1426
|
+
expect(result).toContain("No cells or memories to sync");
|
|
1230
1427
|
} finally {
|
|
1231
1428
|
setHiveWorkingDirectory(originalDir);
|
|
1232
1429
|
rmSync(tempProject, { recursive: true, force: true });
|
|
@@ -1450,4 +1647,136 @@ describe("beads integration", () => {
|
|
|
1450
1647
|
rmSync(tempProject, { recursive: true, force: true });
|
|
1451
1648
|
});
|
|
1452
1649
|
});
|
|
1650
|
+
|
|
1651
|
+
describe("process exit hook", () => {
|
|
1652
|
+
it("registers beforeExit hook that syncs dirty cells", async () => {
|
|
1653
|
+
const { mkdirSync, rmSync, writeFileSync, readFileSync, existsSync } = await import("node:fs");
|
|
1654
|
+
const { join } = await import("node:path");
|
|
1655
|
+
const { tmpdir } = await import("node:os");
|
|
1656
|
+
const { execSync } = await import("node:child_process");
|
|
1657
|
+
|
|
1658
|
+
// Create temp project
|
|
1659
|
+
const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
|
|
1660
|
+
const hiveDir = join(tempProject, ".hive");
|
|
1661
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
1662
|
+
|
|
1663
|
+
// Initialize git repo
|
|
1664
|
+
execSync("git init", { cwd: tempProject });
|
|
1665
|
+
execSync('git config user.email "test@example.com"', { cwd: tempProject });
|
|
1666
|
+
execSync('git config user.name "Test User"', { cwd: tempProject });
|
|
1667
|
+
|
|
1668
|
+
// Initial commit with empty issues.jsonl
|
|
1669
|
+
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1670
|
+
execSync("git add .", { cwd: tempProject });
|
|
1671
|
+
execSync('git commit -m "initial"', { cwd: tempProject });
|
|
1672
|
+
|
|
1673
|
+
// Set working directory
|
|
1674
|
+
const originalDir = getHiveWorkingDirectory();
|
|
1675
|
+
setHiveWorkingDirectory(tempProject);
|
|
1676
|
+
|
|
1677
|
+
try {
|
|
1678
|
+
// Create a cell (marks it dirty but don't sync)
|
|
1679
|
+
await hive_create.execute(
|
|
1680
|
+
{ title: "Exit hook test cell", type: "task" },
|
|
1681
|
+
mockContext,
|
|
1682
|
+
);
|
|
1683
|
+
|
|
1684
|
+
// Verify cell is NOT in JSONL yet (only in PGLite)
|
|
1685
|
+
const beforeContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1686
|
+
expect(beforeContent.trim()).toBe("");
|
|
1687
|
+
|
|
1688
|
+
// Simulate process exit by triggering beforeExit event
|
|
1689
|
+
process.emit("beforeExit", 0);
|
|
1690
|
+
|
|
1691
|
+
// Wait for async flush to complete
|
|
1692
|
+
await new Promise(resolve => setTimeout(resolve, 100));
|
|
1693
|
+
|
|
1694
|
+
// Verify cell was synced to JSONL by the exit hook
|
|
1695
|
+
const afterContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1696
|
+
expect(afterContent.trim()).not.toBe("");
|
|
1697
|
+
|
|
1698
|
+
const cells = afterContent.trim().split("\n").map(line => JSON.parse(line));
|
|
1699
|
+
expect(cells).toHaveLength(1);
|
|
1700
|
+
expect(cells[0].title).toBe("Exit hook test cell");
|
|
1701
|
+
} finally {
|
|
1702
|
+
setHiveWorkingDirectory(originalDir);
|
|
1703
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
1704
|
+
}
|
|
1705
|
+
});
|
|
1706
|
+
|
|
1707
|
+
it("exit hook is idempotent - safe to call multiple times", async () => {
|
|
1708
|
+
const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1709
|
+
const { join } = await import("node:path");
|
|
1710
|
+
const { tmpdir } = await import("node:os");
|
|
1711
|
+
|
|
1712
|
+
// Create temp project
|
|
1713
|
+
const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
|
|
1714
|
+
const hiveDir = join(tempProject, ".hive");
|
|
1715
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
1716
|
+
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1717
|
+
|
|
1718
|
+
// Set working directory
|
|
1719
|
+
const originalDir = getHiveWorkingDirectory();
|
|
1720
|
+
setHiveWorkingDirectory(tempProject);
|
|
1721
|
+
|
|
1722
|
+
try {
|
|
1723
|
+
// Create a cell
|
|
1724
|
+
await hive_create.execute(
|
|
1725
|
+
{ title: "Idempotent test cell", type: "task" },
|
|
1726
|
+
mockContext,
|
|
1727
|
+
);
|
|
1728
|
+
|
|
1729
|
+
// Trigger exit hook multiple times
|
|
1730
|
+
process.emit("beforeExit", 0);
|
|
1731
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
1732
|
+
|
|
1733
|
+
process.emit("beforeExit", 0);
|
|
1734
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
1735
|
+
|
|
1736
|
+
// Verify cell is written only once (no duplication)
|
|
1737
|
+
const content = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1738
|
+
const lines = content.trim().split("\n").filter(l => l);
|
|
1739
|
+
|
|
1740
|
+
// Should have exactly one cell (even though we triggered hook twice)
|
|
1741
|
+
expect(lines.length).toBeGreaterThanOrEqual(1);
|
|
1742
|
+
|
|
1743
|
+
// All cells should have unique IDs
|
|
1744
|
+
const cells = lines.map(line => JSON.parse(line));
|
|
1745
|
+
const uniqueIds = new Set(cells.map(c => c.id));
|
|
1746
|
+
expect(uniqueIds.size).toBe(cells.length);
|
|
1747
|
+
} finally {
|
|
1748
|
+
setHiveWorkingDirectory(originalDir);
|
|
1749
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
1750
|
+
}
|
|
1751
|
+
});
|
|
1752
|
+
|
|
1753
|
+
it("exit hook handles case with no dirty cells gracefully", async () => {
|
|
1754
|
+
const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1755
|
+
const { join } = await import("node:path");
|
|
1756
|
+
const { tmpdir } = await import("node:os");
|
|
1757
|
+
|
|
1758
|
+
// Create temp project with empty JSONL
|
|
1759
|
+
const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
|
|
1760
|
+
const hiveDir = join(tempProject, ".hive");
|
|
1761
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
1762
|
+
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1763
|
+
|
|
1764
|
+
// Set working directory
|
|
1765
|
+
const originalDir = getHiveWorkingDirectory();
|
|
1766
|
+
setHiveWorkingDirectory(tempProject);
|
|
1767
|
+
|
|
1768
|
+
try {
|
|
1769
|
+
// Trigger exit hook with no dirty cells (should not throw)
|
|
1770
|
+
process.emit("beforeExit", 0);
|
|
1771
|
+
await new Promise(resolve => setTimeout(resolve, 50));
|
|
1772
|
+
|
|
1773
|
+
// JSONL should still be empty (no error thrown)
|
|
1774
|
+
const content = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1775
|
+
expect(content.trim()).toBe("");
|
|
1776
|
+
} finally {
|
|
1777
|
+
setHiveWorkingDirectory(originalDir);
|
|
1778
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
1779
|
+
}
|
|
1780
|
+
});
|
|
1781
|
+
});
|
|
1453
1782
|
});
|
package/src/hive.ts
CHANGED
|
@@ -23,6 +23,7 @@ import {
|
|
|
23
23
|
type HiveAdapter,
|
|
24
24
|
type Cell as AdapterCell,
|
|
25
25
|
getSwarmMail,
|
|
26
|
+
resolvePartialId,
|
|
26
27
|
} from "swarm-mail";
|
|
27
28
|
import { existsSync, readFileSync } from "node:fs";
|
|
28
29
|
import { join } from "node:path";
|
|
@@ -423,6 +424,78 @@ export async function importJsonlToPGLite(projectPath: string): Promise<{
|
|
|
423
424
|
*/
|
|
424
425
|
const adapterCache = new Map<string, HiveAdapter>();
|
|
425
426
|
|
|
427
|
+
// ============================================================================
|
|
428
|
+
// Process Exit Hook - Safety Net for Dirty Cells
|
|
429
|
+
// ============================================================================
|
|
430
|
+
|
|
431
|
+
/**
|
|
432
|
+
* Track if exit hook is already registered (prevent duplicate registrations)
|
|
433
|
+
*/
|
|
434
|
+
let exitHookRegistered = false;
|
|
435
|
+
|
|
436
|
+
/**
|
|
437
|
+
* Track if exit hook is currently running (prevent re-entry)
|
|
438
|
+
*/
|
|
439
|
+
let exitHookRunning = false;
|
|
440
|
+
|
|
441
|
+
/**
|
|
442
|
+
* Register process.on('beforeExit') handler to flush dirty cells
|
|
443
|
+
* This is a safety net - catches any dirty cells that weren't explicitly synced
|
|
444
|
+
*
|
|
445
|
+
* Idempotent - safe to call multiple times (only registers once)
|
|
446
|
+
*/
|
|
447
|
+
function registerExitHook(): void {
|
|
448
|
+
if (exitHookRegistered) {
|
|
449
|
+
return; // Already registered
|
|
450
|
+
}
|
|
451
|
+
|
|
452
|
+
exitHookRegistered = true;
|
|
453
|
+
|
|
454
|
+
process.on('beforeExit', async (code) => {
|
|
455
|
+
// Prevent re-entry if already flushing
|
|
456
|
+
if (exitHookRunning) {
|
|
457
|
+
return;
|
|
458
|
+
}
|
|
459
|
+
|
|
460
|
+
exitHookRunning = true;
|
|
461
|
+
|
|
462
|
+
try {
|
|
463
|
+
// Flush all projects that have adapters (and potentially dirty cells)
|
|
464
|
+
const flushPromises: Promise<void>[] = [];
|
|
465
|
+
|
|
466
|
+
for (const [projectKey, adapter] of adapterCache.entries()) {
|
|
467
|
+
const flushPromise = (async () => {
|
|
468
|
+
try {
|
|
469
|
+
ensureHiveDirectory(projectKey);
|
|
470
|
+
const flushManager = new FlushManager({
|
|
471
|
+
adapter,
|
|
472
|
+
projectKey,
|
|
473
|
+
outputPath: `${projectKey}/.hive/issues.jsonl`,
|
|
474
|
+
});
|
|
475
|
+
await flushManager.flush();
|
|
476
|
+
} catch (error) {
|
|
477
|
+
// Non-fatal - log and continue
|
|
478
|
+
console.warn(
|
|
479
|
+
`[hive exit hook] Failed to flush ${projectKey}:`,
|
|
480
|
+
error instanceof Error ? error.message : String(error)
|
|
481
|
+
);
|
|
482
|
+
}
|
|
483
|
+
})();
|
|
484
|
+
|
|
485
|
+
flushPromises.push(flushPromise);
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
// Wait for all flushes to complete
|
|
489
|
+
await Promise.all(flushPromises);
|
|
490
|
+
} finally {
|
|
491
|
+
exitHookRunning = false;
|
|
492
|
+
}
|
|
493
|
+
});
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
// Register exit hook immediately when module is imported
|
|
497
|
+
registerExitHook();
|
|
498
|
+
|
|
426
499
|
/**
|
|
427
500
|
* Get or create a HiveAdapter instance for a project
|
|
428
501
|
* Exported for testing - allows tests to verify state directly
|
|
@@ -694,6 +767,23 @@ export const hive_create_epic = tool({
|
|
|
694
767
|
}
|
|
695
768
|
}
|
|
696
769
|
|
|
770
|
+
// Sync cells to JSONL so spawned workers can see them immediately
|
|
771
|
+
try {
|
|
772
|
+
ensureHiveDirectory(projectKey);
|
|
773
|
+
const flushManager = new FlushManager({
|
|
774
|
+
adapter,
|
|
775
|
+
projectKey,
|
|
776
|
+
outputPath: `${projectKey}/.hive/issues.jsonl`,
|
|
777
|
+
});
|
|
778
|
+
await flushManager.flush();
|
|
779
|
+
} catch (error) {
|
|
780
|
+
// Non-fatal - log and continue
|
|
781
|
+
console.warn(
|
|
782
|
+
"[hive_create_epic] Failed to sync to JSONL:",
|
|
783
|
+
error,
|
|
784
|
+
);
|
|
785
|
+
}
|
|
786
|
+
|
|
697
787
|
return JSON.stringify(result, null, 2);
|
|
698
788
|
} catch (error) {
|
|
699
789
|
// Partial failure - rollback via deleteCell
|
|
@@ -790,7 +880,7 @@ export const hive_query = tool({
|
|
|
790
880
|
export const hive_update = tool({
|
|
791
881
|
description: "Update cell status/description",
|
|
792
882
|
args: {
|
|
793
|
-
id: tool.schema.string().describe("Cell ID"),
|
|
883
|
+
id: tool.schema.string().describe("Cell ID or partial hash"),
|
|
794
884
|
status: tool.schema
|
|
795
885
|
.enum(["open", "in_progress", "blocked", "closed"])
|
|
796
886
|
.optional()
|
|
@@ -809,26 +899,29 @@ export const hive_update = tool({
|
|
|
809
899
|
const adapter = await getHiveAdapter(projectKey);
|
|
810
900
|
|
|
811
901
|
try {
|
|
902
|
+
// Resolve partial ID to full ID
|
|
903
|
+
const cellId = await resolvePartialId(adapter, projectKey, validated.id) || validated.id;
|
|
904
|
+
|
|
812
905
|
let cell: AdapterCell;
|
|
813
906
|
|
|
814
907
|
// Status changes use changeCellStatus, other fields use updateCell
|
|
815
908
|
if (validated.status) {
|
|
816
909
|
cell = await adapter.changeCellStatus(
|
|
817
910
|
projectKey,
|
|
818
|
-
|
|
911
|
+
cellId,
|
|
819
912
|
validated.status,
|
|
820
913
|
);
|
|
821
914
|
}
|
|
822
915
|
|
|
823
916
|
// Update other fields if provided
|
|
824
917
|
if (validated.description !== undefined || validated.priority !== undefined) {
|
|
825
|
-
cell = await adapter.updateCell(projectKey,
|
|
918
|
+
cell = await adapter.updateCell(projectKey, cellId, {
|
|
826
919
|
description: validated.description,
|
|
827
920
|
priority: validated.priority,
|
|
828
921
|
});
|
|
829
922
|
} else if (!validated.status) {
|
|
830
923
|
// No changes requested
|
|
831
|
-
const existingCell = await adapter.getCell(projectKey,
|
|
924
|
+
const existingCell = await adapter.getCell(projectKey, cellId);
|
|
832
925
|
if (!existingCell) {
|
|
833
926
|
throw new HiveError(
|
|
834
927
|
`Cell not found: ${validated.id}`,
|
|
@@ -838,12 +931,27 @@ export const hive_update = tool({
|
|
|
838
931
|
cell = existingCell;
|
|
839
932
|
}
|
|
840
933
|
|
|
841
|
-
await adapter.markDirty(projectKey,
|
|
934
|
+
await adapter.markDirty(projectKey, cellId);
|
|
842
935
|
|
|
843
936
|
const formatted = formatCellForOutput(cell!);
|
|
844
937
|
return JSON.stringify(formatted, null, 2);
|
|
845
938
|
} catch (error) {
|
|
846
939
|
const message = error instanceof Error ? error.message : String(error);
|
|
940
|
+
|
|
941
|
+
// Provide helpful error messages
|
|
942
|
+
if (message.includes("Ambiguous hash")) {
|
|
943
|
+
throw new HiveError(
|
|
944
|
+
`Ambiguous ID '${validated.id}': multiple cells match. Please provide more characters.`,
|
|
945
|
+
"hive_update",
|
|
946
|
+
);
|
|
947
|
+
}
|
|
948
|
+
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
949
|
+
throw new HiveError(
|
|
950
|
+
`No cell found matching ID '${validated.id}'`,
|
|
951
|
+
"hive_update",
|
|
952
|
+
);
|
|
953
|
+
}
|
|
954
|
+
|
|
847
955
|
throw new HiveError(
|
|
848
956
|
`Failed to update cell: ${message}`,
|
|
849
957
|
"hive_update",
|
|
@@ -858,7 +966,7 @@ export const hive_update = tool({
|
|
|
858
966
|
export const hive_close = tool({
|
|
859
967
|
description: "Close a cell with reason",
|
|
860
968
|
args: {
|
|
861
|
-
id: tool.schema.string().describe("Cell ID"),
|
|
969
|
+
id: tool.schema.string().describe("Cell ID or partial hash"),
|
|
862
970
|
reason: tool.schema.string().describe("Completion reason"),
|
|
863
971
|
},
|
|
864
972
|
async execute(args, ctx) {
|
|
@@ -867,17 +975,35 @@ export const hive_close = tool({
|
|
|
867
975
|
const adapter = await getHiveAdapter(projectKey);
|
|
868
976
|
|
|
869
977
|
try {
|
|
978
|
+
// Resolve partial ID to full ID
|
|
979
|
+
const cellId = await resolvePartialId(adapter, projectKey, validated.id) || validated.id;
|
|
980
|
+
|
|
870
981
|
const cell = await adapter.closeCell(
|
|
871
982
|
projectKey,
|
|
872
|
-
|
|
983
|
+
cellId,
|
|
873
984
|
validated.reason,
|
|
874
985
|
);
|
|
875
986
|
|
|
876
|
-
await adapter.markDirty(projectKey,
|
|
987
|
+
await adapter.markDirty(projectKey, cellId);
|
|
877
988
|
|
|
878
989
|
return `Closed ${cell.id}: ${validated.reason}`;
|
|
879
990
|
} catch (error) {
|
|
880
991
|
const message = error instanceof Error ? error.message : String(error);
|
|
992
|
+
|
|
993
|
+
// Provide helpful error messages
|
|
994
|
+
if (message.includes("Ambiguous hash")) {
|
|
995
|
+
throw new HiveError(
|
|
996
|
+
`Ambiguous ID '${validated.id}': multiple cells match. Please provide more characters.`,
|
|
997
|
+
"hive_close",
|
|
998
|
+
);
|
|
999
|
+
}
|
|
1000
|
+
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
1001
|
+
throw new HiveError(
|
|
1002
|
+
`No cell found matching ID '${validated.id}'`,
|
|
1003
|
+
"hive_close",
|
|
1004
|
+
);
|
|
1005
|
+
}
|
|
1006
|
+
|
|
881
1007
|
throw new HiveError(
|
|
882
1008
|
`Failed to close cell: ${message}`,
|
|
883
1009
|
"hive_close",
|
|
@@ -893,24 +1019,42 @@ export const hive_start = tool({
|
|
|
893
1019
|
description:
|
|
894
1020
|
"Mark a cell as in-progress (shortcut for update --status in_progress)",
|
|
895
1021
|
args: {
|
|
896
|
-
id: tool.schema.string().describe("Cell ID"),
|
|
1022
|
+
id: tool.schema.string().describe("Cell ID or partial hash"),
|
|
897
1023
|
},
|
|
898
1024
|
async execute(args, ctx) {
|
|
899
1025
|
const projectKey = getHiveWorkingDirectory();
|
|
900
1026
|
const adapter = await getHiveAdapter(projectKey);
|
|
901
1027
|
|
|
902
1028
|
try {
|
|
1029
|
+
// Resolve partial ID to full ID
|
|
1030
|
+
const cellId = await resolvePartialId(adapter, projectKey, args.id) || args.id;
|
|
1031
|
+
|
|
903
1032
|
const cell = await adapter.changeCellStatus(
|
|
904
1033
|
projectKey,
|
|
905
|
-
|
|
1034
|
+
cellId,
|
|
906
1035
|
"in_progress",
|
|
907
1036
|
);
|
|
908
1037
|
|
|
909
|
-
await adapter.markDirty(projectKey,
|
|
1038
|
+
await adapter.markDirty(projectKey, cellId);
|
|
910
1039
|
|
|
911
1040
|
return `Started: ${cell.id}`;
|
|
912
1041
|
} catch (error) {
|
|
913
1042
|
const message = error instanceof Error ? error.message : String(error);
|
|
1043
|
+
|
|
1044
|
+
// Provide helpful error messages
|
|
1045
|
+
if (message.includes("Ambiguous hash")) {
|
|
1046
|
+
throw new HiveError(
|
|
1047
|
+
`Ambiguous ID '${args.id}': multiple cells match. Please provide more characters.`,
|
|
1048
|
+
"hive_start",
|
|
1049
|
+
);
|
|
1050
|
+
}
|
|
1051
|
+
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
1052
|
+
throw new HiveError(
|
|
1053
|
+
`No cell found matching ID '${args.id}'`,
|
|
1054
|
+
"hive_start",
|
|
1055
|
+
);
|
|
1056
|
+
}
|
|
1057
|
+
|
|
914
1058
|
throw new HiveError(
|
|
915
1059
|
`Failed to start cell: ${message}`,
|
|
916
1060
|
"hive_start",
|
package/src/swarm-decompose.ts
CHANGED
|
@@ -52,7 +52,7 @@ Agents MUST update their bead status as they work. No silent progress.
|
|
|
52
52
|
|
|
53
53
|
## Requirements
|
|
54
54
|
|
|
55
|
-
1. **Break into
|
|
55
|
+
1. **Break into independent subtasks** that can run in parallel (as many as needed)
|
|
56
56
|
2. **Assign files** - each subtask must specify which files it will modify
|
|
57
57
|
3. **No file overlap** - files cannot appear in multiple subtasks (they get exclusive locks)
|
|
58
58
|
4. **Order by dependency** - if subtask B needs subtask A's output, A must come first in the array
|
|
@@ -129,7 +129,7 @@ Agents MUST update their bead status as they work. No silent progress.
|
|
|
129
129
|
|
|
130
130
|
## Requirements
|
|
131
131
|
|
|
132
|
-
1. **Break into
|
|
132
|
+
1. **Break into independent subtasks** that can run in parallel (as many as needed)
|
|
133
133
|
2. **Assign files** - each subtask must specify which files it will modify
|
|
134
134
|
3. **No file overlap** - files cannot appear in multiple subtasks (they get exclusive locks)
|
|
135
135
|
4. **Order by dependency** - if subtask B needs subtask A's output, A must come first in the array
|
|
@@ -437,10 +437,9 @@ export const swarm_decompose = tool({
|
|
|
437
437
|
max_subtasks: tool.schema
|
|
438
438
|
.number()
|
|
439
439
|
.int()
|
|
440
|
-
.min(
|
|
441
|
-
.
|
|
442
|
-
.
|
|
443
|
-
.describe("Maximum number of subtasks (default: 5)"),
|
|
440
|
+
.min(1)
|
|
441
|
+
.optional()
|
|
442
|
+
.describe("Suggested max subtasks (optional - LLM decides if not specified)"),
|
|
444
443
|
context: tool.schema
|
|
445
444
|
.string()
|
|
446
445
|
.optional()
|
|
@@ -453,7 +452,6 @@ export const swarm_decompose = tool({
|
|
|
453
452
|
.number()
|
|
454
453
|
.int()
|
|
455
454
|
.min(1)
|
|
456
|
-
.max(10)
|
|
457
455
|
.optional()
|
|
458
456
|
.describe("Max CASS results to include (default: 3)"),
|
|
459
457
|
},
|
|
@@ -702,11 +700,9 @@ export const swarm_delegate_planning = tool({
|
|
|
702
700
|
max_subtasks: tool.schema
|
|
703
701
|
.number()
|
|
704
702
|
.int()
|
|
705
|
-
.min(
|
|
706
|
-
.max(10)
|
|
703
|
+
.min(1)
|
|
707
704
|
.optional()
|
|
708
|
-
.
|
|
709
|
-
.describe("Maximum number of subtasks (default: 5)"),
|
|
705
|
+
.describe("Suggested max subtasks (optional - LLM decides if not specified)"),
|
|
710
706
|
strategy: tool.schema
|
|
711
707
|
.enum(["auto", "file-based", "feature-based", "risk-based"])
|
|
712
708
|
.optional()
|