opencode-swarm-plugin 0.30.7 → 0.31.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -406,7 +406,7 @@ describe("beads integration", () => {
406
406
  });
407
407
 
408
408
  describe("hive_create_epic", () => {
409
- it("creates an epic with subtasks", async () => {
409
+ it("creates an epic with subtasks and syncs to JSONL", async () => {
410
410
  const result = await hive_create_epic.execute(
411
411
  {
412
412
  epic_title: "Integration test epic",
@@ -438,6 +438,28 @@ describe("beads integration", () => {
438
438
  expect(subtaskBead).toBeDefined();
439
439
  expect(subtaskBead!.parent_id).toBe(epicResult.epic.id);
440
440
  }
441
+
442
+ // NEW TEST: Verify cells are synced to JSONL immediately
443
+ const { readFileSync, existsSync } = await import("node:fs");
444
+ const { join } = await import("node:path");
445
+ const jsonlPath = join(TEST_PROJECT_KEY, ".hive", "issues.jsonl");
446
+
447
+ expect(existsSync(jsonlPath)).toBe(true);
448
+
449
+ const jsonlContent = readFileSync(jsonlPath, "utf-8");
450
+ const lines = jsonlContent.trim().split("\n").filter(l => l);
451
+ const cells = lines.map(line => JSON.parse(line));
452
+
453
+ // Epic and all subtasks should be in JSONL
454
+ const epicInJsonl = cells.find(c => c.id === epicResult.epic.id);
455
+ expect(epicInJsonl).toBeDefined();
456
+ expect(epicInJsonl!.title).toBe("Integration test epic");
457
+
458
+ for (const subtask of epicResult.subtasks) {
459
+ const subtaskInJsonl = cells.find(c => c.id === subtask.id);
460
+ expect(subtaskInJsonl).toBeDefined();
461
+ expect(subtaskInJsonl!.parent_id).toBe(epicResult.epic.id);
462
+ }
441
463
  });
442
464
 
443
465
  it("creates an epic with files metadata in subtasks", async () => {
@@ -601,6 +623,181 @@ describe("beads integration", () => {
601
623
  });
602
624
  });
603
625
 
626
+ describe("partial ID resolution", () => {
627
+ let fullId: string;
628
+ let hash: string;
629
+
630
+ beforeEach(async () => {
631
+ // Create a test cell to resolve
632
+ const result = await hive_create.execute(
633
+ { title: "Partial ID test cell" },
634
+ mockContext,
635
+ );
636
+ const cell = parseResponse<Cell>(result);
637
+ fullId = cell.id;
638
+ createdBeadIds.push(fullId);
639
+
640
+ // Extract hash from ID (format: {prefix}-{hash}-{timestamp}{random})
641
+ // The last segment is always timestamp+random (11 chars)
642
+ // The hash is the 6-char segment before that
643
+ // Examples:
644
+ // "opencode-swarm-monorepo-lf2p4u-mjd2h5v4wdt" -> hash is "lf2p4u"
645
+ // "cell--gcel4-mjd2h5v4wdt" -> hash is "-gcel4" (negative hash creates consecutive hyphens)
646
+
647
+ // Find the last hyphen, then work backwards to find the second-to-last hyphen
648
+ const lastHyphenIndex = fullId.lastIndexOf("-");
649
+ if (lastHyphenIndex === -1) {
650
+ hash = "";
651
+ } else {
652
+ // Get everything before the last hyphen
653
+ const beforeLast = fullId.substring(0, lastHyphenIndex);
654
+ // Find the second-to-last hyphen
655
+ const secondLastHyphenIndex = beforeLast.lastIndexOf("-");
656
+ if (secondLastHyphenIndex === -1) {
657
+ hash = "";
658
+ } else {
659
+ // Hash is between second-to-last and last hyphen
660
+ hash = fullId.substring(secondLastHyphenIndex + 1, lastHyphenIndex);
661
+ }
662
+ }
663
+ });
664
+
665
+ describe("hive_update", () => {
666
+ it("accepts full cell ID (no resolution needed)", async () => {
667
+ const result = await hive_update.execute(
668
+ { id: fullId, description: "Updated via full ID" },
669
+ mockContext,
670
+ );
671
+
672
+ const updated = parseResponse<Cell>(result);
673
+ expect(updated.id).toBe(fullId);
674
+ expect(updated.description).toContain("Updated via full ID");
675
+ });
676
+
677
+ it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
678
+ try {
679
+ const result = await hive_update.execute(
680
+ { id: hash, priority: 1 },
681
+ mockContext,
682
+ );
683
+
684
+ const updated = parseResponse<Cell>(result);
685
+ expect(updated.id).toBe(fullId);
686
+ expect(updated.priority).toBe(1);
687
+ } catch (error) {
688
+ // In test environment with many cells, hash may be ambiguous
689
+ // Verify we get a helpful error message
690
+ if (error instanceof Error && error.message.includes("Ambiguous")) {
691
+ expect(error.message).toMatch(/ambiguous.*multiple/i);
692
+ expect(error.message).toContain(hash);
693
+ } else {
694
+ throw error; // Re-throw if not ambiguity error
695
+ }
696
+ }
697
+ });
698
+
699
+ it("throws helpful error for non-existent hash", async () => {
700
+ await expect(
701
+ hive_update.execute({ id: "zzzzzz", status: "closed" }, mockContext),
702
+ ).rejects.toThrow(/not found|no cell|zzzzzz/i);
703
+ });
704
+
705
+ it("throws helpful error for ambiguous hash", async () => {
706
+ // Create another cell with potentially similar hash
707
+ // (in practice, hashes are unique, but we simulate ambiguity by using a short partial)
708
+ // This test verifies the error message is helpful
709
+ try {
710
+ // Use a single char which might match multiple cells in larger datasets
711
+ await hive_update.execute({ id: "a", status: "closed" }, mockContext);
712
+ // If it succeeds, it means only one cell matched - that's fine
713
+ } catch (error) {
714
+ const message = error instanceof Error ? error.message : String(error);
715
+ // Error should mention ambiguity if multiple matches
716
+ if (message.includes("ambiguous") || message.includes("multiple")) {
717
+ expect(message).toMatch(/ambiguous|multiple/i);
718
+ }
719
+ }
720
+ });
721
+ });
722
+
723
+ describe("hive_close", () => {
724
+ it("accepts full cell ID", async () => {
725
+ const result = await hive_close.execute(
726
+ { id: fullId, reason: "Closed via full ID" },
727
+ mockContext,
728
+ );
729
+
730
+ expect(result).toContain("Closed");
731
+ expect(result).toContain(fullId);
732
+
733
+ const closed = await adapter.getCell(TEST_PROJECT_KEY, fullId);
734
+ expect(closed?.status).toBe("closed");
735
+ });
736
+
737
+ it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
738
+ try {
739
+ const result = await hive_close.execute(
740
+ { id: hash, reason: "Close via hash" },
741
+ mockContext,
742
+ );
743
+
744
+ expect(result).toContain("Closed");
745
+ expect(result).toContain(fullId);
746
+ } catch (error) {
747
+ if (error instanceof Error && error.message.includes("Ambiguous")) {
748
+ expect(error.message).toMatch(/ambiguous.*multiple/i);
749
+ expect(error.message).toContain(hash);
750
+ } else {
751
+ throw error;
752
+ }
753
+ }
754
+ });
755
+
756
+ it("throws helpful error for non-existent hash", async () => {
757
+ await expect(
758
+ hive_close.execute({ id: "nonono", reason: "Test" }, mockContext),
759
+ ).rejects.toThrow(/not found|no cell|nonono/i);
760
+ });
761
+ });
762
+
763
+ describe("hive_start", () => {
764
+ it("accepts full cell ID", async () => {
765
+ const result = await hive_start.execute({ id: fullId }, mockContext);
766
+
767
+ expect(result).toContain("Started");
768
+ expect(result).toContain(fullId);
769
+
770
+ const started = await adapter.getCell(TEST_PROJECT_KEY, fullId);
771
+ expect(started?.status).toBe("in_progress");
772
+ });
773
+
774
+ it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
775
+ try {
776
+ const result = await hive_start.execute(
777
+ { id: hash },
778
+ mockContext,
779
+ );
780
+
781
+ expect(result).toContain("Started");
782
+ expect(result).toContain(fullId);
783
+ } catch (error) {
784
+ if (error instanceof Error && error.message.includes("Ambiguous")) {
785
+ expect(error.message).toMatch(/ambiguous.*multiple/i);
786
+ expect(error.message).toContain(hash);
787
+ } else {
788
+ throw error;
789
+ }
790
+ }
791
+ });
792
+
793
+ it("throws helpful error for non-existent hash", async () => {
794
+ await expect(
795
+ hive_start.execute({ id: "nope99" }, mockContext),
796
+ ).rejects.toThrow(/not found|no cell|nope99/i);
797
+ });
798
+ });
799
+ });
800
+
604
801
  describe("workflow integration", () => {
605
802
  it("complete bead lifecycle: create -> start -> update -> close", async () => {
606
803
  // 1. Create
@@ -1225,8 +1422,8 @@ describe("beads integration", () => {
1225
1422
  mockContext,
1226
1423
  );
1227
1424
 
1228
- // Should return "No cells to sync" since no dirty cells
1229
- expect(result).toContain("No cells to sync");
1425
+ // Should return "No cells or memories to sync" since no dirty cells
1426
+ expect(result).toContain("No cells or memories to sync");
1230
1427
  } finally {
1231
1428
  setHiveWorkingDirectory(originalDir);
1232
1429
  rmSync(tempProject, { recursive: true, force: true });
@@ -1450,4 +1647,227 @@ describe("beads integration", () => {
1450
1647
  rmSync(tempProject, { recursive: true, force: true });
1451
1648
  });
1452
1649
  });
1650
+
1651
+ describe("process exit hook", () => {
1652
+ it("registers beforeExit hook that syncs dirty cells", async () => {
1653
+ const { mkdirSync, rmSync, writeFileSync, readFileSync, existsSync } = await import("node:fs");
1654
+ const { join } = await import("node:path");
1655
+ const { tmpdir } = await import("node:os");
1656
+ const { execSync } = await import("node:child_process");
1657
+
1658
+ // Create temp project
1659
+ const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
1660
+ const hiveDir = join(tempProject, ".hive");
1661
+ mkdirSync(hiveDir, { recursive: true });
1662
+
1663
+ // Initialize git repo
1664
+ execSync("git init", { cwd: tempProject });
1665
+ execSync('git config user.email "test@example.com"', { cwd: tempProject });
1666
+ execSync('git config user.name "Test User"', { cwd: tempProject });
1667
+
1668
+ // Initial commit with empty issues.jsonl
1669
+ writeFileSync(join(hiveDir, "issues.jsonl"), "");
1670
+ execSync("git add .", { cwd: tempProject });
1671
+ execSync('git commit -m "initial"', { cwd: tempProject });
1672
+
1673
+ // Set working directory
1674
+ const originalDir = getHiveWorkingDirectory();
1675
+ setHiveWorkingDirectory(tempProject);
1676
+
1677
+ try {
1678
+ // Create a cell (marks it dirty but don't sync)
1679
+ await hive_create.execute(
1680
+ { title: "Exit hook test cell", type: "task" },
1681
+ mockContext,
1682
+ );
1683
+
1684
+ // Verify cell is NOT in JSONL yet (only in PGLite)
1685
+ const beforeContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
1686
+ expect(beforeContent.trim()).toBe("");
1687
+
1688
+ // Simulate process exit by triggering beforeExit event
1689
+ process.emit("beforeExit", 0);
1690
+
1691
+ // Wait for async flush to complete
1692
+ await new Promise(resolve => setTimeout(resolve, 100));
1693
+
1694
+ // Verify cell was synced to JSONL by the exit hook
1695
+ const afterContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
1696
+ expect(afterContent.trim()).not.toBe("");
1697
+
1698
+ const cells = afterContent.trim().split("\n").map(line => JSON.parse(line));
1699
+ expect(cells).toHaveLength(1);
1700
+ expect(cells[0].title).toBe("Exit hook test cell");
1701
+ } finally {
1702
+ setHiveWorkingDirectory(originalDir);
1703
+ rmSync(tempProject, { recursive: true, force: true });
1704
+ }
1705
+ });
1706
+
1707
+ it("exit hook is idempotent - safe to call multiple times", async () => {
1708
+ const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
1709
+ const { join } = await import("node:path");
1710
+ const { tmpdir } = await import("node:os");
1711
+
1712
+ // Create temp project
1713
+ const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
1714
+ const hiveDir = join(tempProject, ".hive");
1715
+ mkdirSync(hiveDir, { recursive: true });
1716
+ writeFileSync(join(hiveDir, "issues.jsonl"), "");
1717
+
1718
+ // Set working directory
1719
+ const originalDir = getHiveWorkingDirectory();
1720
+ setHiveWorkingDirectory(tempProject);
1721
+
1722
+ try {
1723
+ // Create a cell
1724
+ await hive_create.execute(
1725
+ { title: "Idempotent test cell", type: "task" },
1726
+ mockContext,
1727
+ );
1728
+
1729
+ // Trigger exit hook multiple times
1730
+ process.emit("beforeExit", 0);
1731
+ await new Promise(resolve => setTimeout(resolve, 50));
1732
+
1733
+ process.emit("beforeExit", 0);
1734
+ await new Promise(resolve => setTimeout(resolve, 50));
1735
+
1736
+ // Verify cell is written only once (no duplication)
1737
+ const content = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
1738
+ const lines = content.trim().split("\n").filter(l => l);
1739
+
1740
+ // Should have exactly one cell (even though we triggered hook twice)
1741
+ expect(lines.length).toBeGreaterThanOrEqual(1);
1742
+
1743
+ // All cells should have unique IDs
1744
+ const cells = lines.map(line => JSON.parse(line));
1745
+ const uniqueIds = new Set(cells.map(c => c.id));
1746
+ expect(uniqueIds.size).toBe(cells.length);
1747
+ } finally {
1748
+ setHiveWorkingDirectory(originalDir);
1749
+ rmSync(tempProject, { recursive: true, force: true });
1750
+ }
1751
+ });
1752
+
1753
+ it("exit hook handles case with no dirty cells gracefully", async () => {
1754
+ const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
1755
+ const { join } = await import("node:path");
1756
+ const { tmpdir } = await import("node:os");
1757
+
1758
+ // Create temp project with empty JSONL
1759
+ const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
1760
+ const hiveDir = join(tempProject, ".hive");
1761
+ mkdirSync(hiveDir, { recursive: true });
1762
+ writeFileSync(join(hiveDir, "issues.jsonl"), "");
1763
+
1764
+ // Set working directory
1765
+ const originalDir = getHiveWorkingDirectory();
1766
+ setHiveWorkingDirectory(tempProject);
1767
+
1768
+ try {
1769
+ // Trigger exit hook with no dirty cells (should not throw)
1770
+ process.emit("beforeExit", 0);
1771
+ await new Promise(resolve => setTimeout(resolve, 50));
1772
+
1773
+ // JSONL should still be empty (no error thrown)
1774
+ const content = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
1775
+ expect(content.trim()).toBe("");
1776
+ } finally {
1777
+ setHiveWorkingDirectory(originalDir);
1778
+ rmSync(tempProject, { recursive: true, force: true });
1779
+ }
1780
+ });
1781
+ });
1782
+
1783
+ describe("bigint to Date conversion", () => {
1784
+ it("should handle PGLite bigint timestamps correctly in hive_query", async () => {
1785
+ const { mkdirSync, rmSync } = await import("node:fs");
1786
+ const { join } = await import("node:path");
1787
+ const { tmpdir } = await import("node:os");
1788
+
1789
+ const tempProject = join(tmpdir(), `hive-bigint-test-${Date.now()}`);
1790
+ const hiveDir = join(tempProject, ".hive");
1791
+ mkdirSync(hiveDir, { recursive: true });
1792
+
1793
+ const originalDir = getHiveWorkingDirectory();
1794
+ setHiveWorkingDirectory(tempProject);
1795
+
1796
+ try {
1797
+ // Create a cell
1798
+ const createResponse = await hive_create.execute(
1799
+ { title: "Test bigint dates", type: "task" },
1800
+ mockContext
1801
+ );
1802
+ const created = parseResponse<Cell>(createResponse);
1803
+
1804
+ // Query it back - this triggers formatCellForOutput with PGLite bigint timestamps
1805
+ const queryResponse = await hive_query.execute({ status: "open" }, mockContext);
1806
+ const queried = parseResponse<Cell[]>(queryResponse);
1807
+
1808
+ expect(queried.length).toBeGreaterThan(0);
1809
+ const cell = queried.find(c => c.id === created.id);
1810
+ expect(cell).toBeDefined();
1811
+
1812
+ // These should be valid ISO date strings, not "Invalid Date"
1813
+ expect(cell!.created_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
1814
+ expect(cell!.updated_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
1815
+ expect(cell!.created_at).not.toBe("Invalid Date");
1816
+ expect(cell!.updated_at).not.toBe("Invalid Date");
1817
+
1818
+ // Verify dates are actually valid by parsing
1819
+ const createdDate = new Date(cell!.created_at);
1820
+ const updatedDate = new Date(cell!.updated_at);
1821
+ expect(createdDate.getTime()).toBeGreaterThan(0);
1822
+ expect(updatedDate.getTime()).toBeGreaterThan(0);
1823
+ } finally {
1824
+ setHiveWorkingDirectory(originalDir);
1825
+ rmSync(tempProject, { recursive: true, force: true });
1826
+ }
1827
+ });
1828
+
1829
+ it("should handle closed_at bigint timestamp correctly", async () => {
1830
+ const { mkdirSync, rmSync } = await import("node:fs");
1831
+ const { join } = await import("node:path");
1832
+ const { tmpdir } = await import("node:os");
1833
+
1834
+ const tempProject = join(tmpdir(), `hive-bigint-closed-test-${Date.now()}`);
1835
+ const hiveDir = join(tempProject, ".hive");
1836
+ mkdirSync(hiveDir, { recursive: true });
1837
+
1838
+ const originalDir = getHiveWorkingDirectory();
1839
+ setHiveWorkingDirectory(tempProject);
1840
+
1841
+ try {
1842
+ // Create and close a cell
1843
+ const createResponse = await hive_create.execute(
1844
+ { title: "Test closed bigint date", type: "task" },
1845
+ mockContext
1846
+ );
1847
+ const created = parseResponse<Cell>(createResponse);
1848
+
1849
+ await hive_close.execute(
1850
+ { id: created.id, reason: "Testing bigint closed_at" },
1851
+ mockContext
1852
+ );
1853
+
1854
+ // Query closed cells
1855
+ const queryResponse = await hive_query.execute({ status: "closed" }, mockContext);
1856
+ const queried = parseResponse<Cell[]>(queryResponse);
1857
+
1858
+ const cell = queried.find(c => c.id === created.id);
1859
+ expect(cell).toBeDefined();
1860
+ expect(cell!.closed_at).toBeDefined();
1861
+ expect(cell!.closed_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
1862
+ expect(cell!.closed_at).not.toBe("Invalid Date");
1863
+
1864
+ // Verify closed_at is valid
1865
+ const closedDate = new Date(cell!.closed_at!);
1866
+ expect(closedDate.getTime()).toBeGreaterThan(0);
1867
+ } finally {
1868
+ setHiveWorkingDirectory(originalDir);
1869
+ rmSync(tempProject, { recursive: true, force: true });
1870
+ }
1871
+ });
1872
+ });
1453
1873
  });