opencode-swarm-plugin 0.27.3 → 0.28.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,9 +1,9 @@
1
1
  $ bun build ./src/index.ts --outdir ./dist --target node --external @electric-sql/pglite --external swarm-mail && bun build ./src/plugin.ts --outfile ./dist/plugin.js --target node --external @electric-sql/pglite --external swarm-mail && tsc
2
- Bundled 200 modules in 41ms
2
+ Bundled 200 modules in 32ms
3
3
 
4
- index.js 1.19 MB (entry point)
4
+ index.js 1.20 MB (entry point)
5
5
 
6
- Bundled 201 modules in 39ms
6
+ Bundled 201 modules in 31ms
7
7
 
8
8
  plugin.js 1.16 MB (entry point)
9
9
 
package/CHANGELOG.md CHANGED
@@ -1,5 +1,29 @@
1
1
  # opencode-swarm-plugin
2
2
 
3
+ ## 0.28.0
4
+
5
+ ### Minor Changes
6
+
7
+ - [`de2fa62`](https://github.com/joelhooks/swarm-tools/commit/de2fa628524b88511e06164104ff7b5fb93d39e5) Thanks [@joelhooks](https://github.com/joelhooks)! - Add full beads→hive migration pipeline with JSONL import to PGLite
8
+
9
+ - Add `mergeHistoricBeads()` to merge beads.base.jsonl into issues.jsonl
10
+ - Add `importJsonlToPGLite()` to import JSONL records into PGLite database
11
+ - Wire both functions into `swarm setup` migration flow
12
+ - Fix closed_at constraint issue when importing closed cells
13
+ - TDD: 12 new integration tests for migration functions
14
+
15
+ ## 0.27.4
16
+
17
+ ### Patch Changes
18
+
19
+ - [`f23f774`](https://github.com/joelhooks/swarm-tools/commit/f23f774e4b83a3422d8266b6b1ad083daaec03e2) Thanks [@joelhooks](https://github.com/joelhooks)! - Enforce coordinator always spawns workers, never executes work directly
20
+
21
+ - Added "Coordinator Role Boundaries" section to /swarm command
22
+ - Coordinators now explicitly forbidden from editing code, running tests, or making "quick fixes"
23
+ - Updated Phase 5 to clarify coordinators NEVER reserve files (workers do)
24
+ - Updated Phase 6 with patterns for both parallel and sequential worker spawning
25
+ - Worker agent template now confirms it was spawned correctly and to report coordinator violations
26
+
3
27
  ## 0.27.3
4
28
 
5
29
  ### Patch Changes
package/bin/swarm.ts CHANGED
@@ -30,6 +30,8 @@ import { fileURLToPath } from "url";
30
30
  import {
31
31
  checkBeadsMigrationNeeded,
32
32
  migrateBeadsToHive,
33
+ mergeHistoricBeads,
34
+ importJsonlToPGLite,
33
35
  } from "../src/hive";
34
36
 
35
37
  const __dirname = dirname(fileURLToPath(import.meta.url));
@@ -855,6 +857,37 @@ You are a swarm coordinator. Your job is to clarify the task, decompose it into
855
857
 
856
858
  $ARGUMENTS
857
859
 
860
+ ## CRITICAL: Coordinator Role Boundaries
861
+
862
+ **⚠️ COORDINATORS NEVER EXECUTE WORK DIRECTLY**
863
+
864
+ Your role is **ONLY** to:
865
+ 1. **Clarify** - Ask questions to understand scope
866
+ 2. **Decompose** - Break into subtasks with clear boundaries
867
+ 3. **Spawn** - Create worker agents for ALL subtasks
868
+ 4. **Monitor** - Check progress, unblock, mediate conflicts
869
+ 5. **Verify** - Confirm completion, run final checks
870
+
871
+ **YOU DO NOT:**
872
+ - Read implementation files (only metadata/structure for planning)
873
+ - Edit code directly
874
+ - Run tests yourself (workers run tests)
875
+ - Implement features
876
+ - Fix bugs inline
877
+ - Make "quick fixes" yourself
878
+
879
+ **ALWAYS spawn workers, even for sequential tasks.** Sequential just means spawn them in order and wait for each to complete before spawning the next.
880
+
881
+ ### Why This Matters
882
+
883
+ | Coordinator Work | Worker Work | Consequence of Mixing |
884
+ |-----------------|-------------|----------------------|
885
+ | Sonnet context ($$$) | Disposable context | Expensive context waste |
886
+ | Long-lived state | Task-scoped state | Context exhaustion |
887
+ | Orchestration concerns | Implementation concerns | Mixed concerns |
888
+ | No checkpoints | Checkpoints enabled | No recovery |
889
+ | No learning signals | Outcomes tracked | No improvement |
890
+
858
891
  ## Workflow
859
892
 
860
893
  ### Phase 0: Socratic Planning (INTERACTIVE - unless --fast)
@@ -923,15 +956,39 @@ swarm_validate_decomposition(response="<CellTree JSON>")
923
956
  ### Phase 4: Create Beads
924
957
  \`hive_create_epic(epic_title="<task>", subtasks=[...])\`
925
958
 
926
- ### Phase 5: Reserve Files
927
- \`swarmmail_reserve(paths=[...], reason="<bead-id>: <desc>")\`
959
+ ### Phase 5: DO NOT Reserve Files
960
+
961
+ > **⚠️ Coordinator NEVER reserves files.** Workers reserve their own files.
962
+ > If coordinator reserves, workers get blocked and swarm stalls.
963
+
964
+ ### Phase 6: Spawn Workers for ALL Subtasks (MANDATORY)
928
965
 
929
- ### Phase 6: Spawn Agents (ALL in single message)
966
+ > **⚠️ ALWAYS spawn workers, even for sequential tasks.**
967
+ > - Parallel tasks: Spawn ALL in a single message
968
+ > - Sequential tasks: Spawn one, wait for completion, spawn next
969
+
970
+ **For parallel work:**
930
971
  \`\`\`
931
- swarm_spawn_subtask(bead_id, epic_id, subtask_title, files, shared_context, project_path="$PWD")
972
+ // Single message with multiple Task calls
973
+ swarm_spawn_subtask(bead_id_1, epic_id, title_1, files_1, shared_context, project_path="$PWD")
974
+ Task(subagent_type="swarm/worker", prompt="<from above>")
975
+ swarm_spawn_subtask(bead_id_2, epic_id, title_2, files_2, shared_context, project_path="$PWD")
932
976
  Task(subagent_type="swarm/worker", prompt="<from above>")
933
977
  \`\`\`
934
978
 
979
+ **For sequential work:**
980
+ \`\`\`
981
+ // Spawn worker 1, wait for completion
982
+ swarm_spawn_subtask(bead_id_1, ...)
983
+ const result1 = await Task(subagent_type="swarm/worker", prompt="<from above>")
984
+
985
+ // THEN spawn worker 2 with context from worker 1
986
+ swarm_spawn_subtask(bead_id_2, ..., shared_context="Worker 1 completed: " + result1)
987
+ const result2 = await Task(subagent_type="swarm/worker", prompt="<from above>")
988
+ \`\`\`
989
+
990
+ **NEVER do the work yourself.** Even if it seems faster, spawn a worker.
991
+
935
992
  **IMPORTANT:** Pass \`project_path\` to \`swarm_spawn_subtask\` so workers can call \`swarmmail_init\`.
936
993
 
937
994
  ### Phase 7: Monitor
@@ -1035,6 +1092,12 @@ model: ${model}
1035
1092
 
1036
1093
  You are a swarm worker agent. Your prompt contains a **MANDATORY SURVIVAL CHECKLIST** - follow it IN ORDER.
1037
1094
 
1095
+ ## You Were Spawned Correctly
1096
+
1097
+ If you're reading this, a coordinator spawned you - that's the correct pattern. Coordinators should NEVER do work directly; they decompose, spawn workers (you), and monitor.
1098
+
1099
+ **If you ever see a coordinator editing code or running tests directly, that's a bug.** Report it.
1100
+
1038
1101
  ## CRITICAL: Read Your Prompt Carefully
1039
1102
 
1040
1103
  Your Task prompt contains detailed instructions including:
@@ -1520,6 +1583,18 @@ async function setup() {
1520
1583
  if (result.migrated) {
1521
1584
  migrateSpinner.stop("Migration complete");
1522
1585
  p.log.success("Renamed .beads/ → .hive/");
1586
+
1587
+ // Merge historic beads into issues.jsonl
1588
+ const mergeResult = await mergeHistoricBeads(cwd);
1589
+ if (mergeResult.merged > 0) {
1590
+ p.log.success(`Merged ${mergeResult.merged} historic beads (${mergeResult.skipped} already present)`);
1591
+ }
1592
+
1593
+ // Import JSONL into PGLite database
1594
+ const importResult = await importJsonlToPGLite(cwd);
1595
+ if (importResult.imported > 0 || importResult.updated > 0) {
1596
+ p.log.success(`Database: ${importResult.imported} imported, ${importResult.updated} updated`);
1597
+ }
1523
1598
  } else {
1524
1599
  migrateSpinner.stop("Migration skipped");
1525
1600
  p.log.warn(result.reason || "Unknown reason");
package/dist/hive.d.ts CHANGED
@@ -80,6 +80,40 @@ export declare function migrateBeadsToHive(projectPath: string): Promise<Migrati
80
80
  * @param projectPath - Absolute path to the project root
81
81
  */
82
82
  export declare function ensureHiveDirectory(projectPath: string): void;
83
+ /**
84
+ * Merge historic beads from beads.base.jsonl into issues.jsonl
85
+ *
86
+ * This function reads beads.base.jsonl (historic data) and issues.jsonl (current data),
87
+ * merges them by ID (issues.jsonl version wins for duplicates), and writes the result
88
+ * back to issues.jsonl.
89
+ *
90
+ * Use case: After migrating from .beads to .hive, you may have a beads.base.jsonl file
91
+ * containing old beads that should be merged into the current issues.jsonl.
92
+ *
93
+ * @param projectPath - Absolute path to the project root
94
+ * @returns Object with merged and skipped counts
95
+ */
96
+ export declare function mergeHistoricBeads(projectPath: string): Promise<{
97
+ merged: number;
98
+ skipped: number;
99
+ }>;
100
+ /**
101
+ * Import cells from .hive/issues.jsonl into PGLite database
102
+ *
103
+ * Reads the JSONL file and upserts each record into the cells table
104
+ * using the HiveAdapter. Provides granular error reporting for invalid lines.
105
+ *
106
+ * This function manually parses JSONL line-by-line to gracefully handle
107
+ * invalid JSON without throwing. Each valid line is imported via the adapter.
108
+ *
109
+ * @param projectPath - Absolute path to the project root
110
+ * @returns Object with imported, updated, and error counts
111
+ */
112
+ export declare function importJsonlToPGLite(projectPath: string): Promise<{
113
+ imported: number;
114
+ updated: number;
115
+ errors: number;
116
+ }>;
83
117
  /**
84
118
  * Get or create a HiveAdapter instance for a project
85
119
  * Exported for testing - allows tests to verify state directly
@@ -1 +1 @@
1
- {"version":3,"file":"hive.d.ts","sourceRoot":"","sources":["../src/hive.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAIL,KAAK,WAAW,EAGjB,MAAM,YAAY,CAAC;AAepB;;;;;GAKG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAE/D;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,IAAI,MAAM,CAEhD;AAGD,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAChE,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAuChE;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;aAGhB,OAAO,EAAE,MAAM;aACf,QAAQ,CAAC,EAAE,MAAM;aACjB,MAAM,CAAC,EAAE,MAAM;gBAH/B,OAAO,EAAE,MAAM,EACC,OAAO,EAAE,MAAM,EACf,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,MAAM,CAAC,EAAE,MAAM,YAAA;CAKlC;AAGD,eAAO,MAAM,SAAS,kBAAY,CAAC;AAEnC;;GAEG;AACH,qBAAa,mBAAoB,SAAQ,KAAK;aAG1B,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBADpC,OAAO,EAAE,MAAM,EACC,QAAQ,EAAE,CAAC,CAAC,QAAQ;CAKvC;AAGD,eAAO,MAAM,mBAAmB,4BAAsB,CAAC;AAMvD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,kCAAkC;IAClC,MAAM,EAAE,OAAO,CAAC;IAChB,4CAA4C;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,sCAAsC;IACtC,QAAQ,EAAE,OAAO,CAAC;IAClB,sCAAsC;IACtC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;;;;;;;;GASG;AACH,wBAAgB,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,oBAAoB,CAgBnF;AAED;;;;;;;;GAQG;AACH,wBAAsB,kBAAkB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC,CAyBtF;AAED;;;;;;;GAOG;AACH,wBAAgB,mBAAmB,CAAC,WAAW,EAAE,MAAM,GAAG,IAAI,CAO7D;AAYD;;;;;;GAMG;AACH,wBAAsB,cAAc,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAiB7E;AAGD,eAAO,MAAM,eAAe,uBAAiB,CAAC;AA+E9C;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;CA+CtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgJ3B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;CAiDrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;CA+DtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;CA6BrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CA4BrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;CAwBrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,SAAS;;;;;;;;CAwIpB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;CA8C3B,CAAC;AAMH,eAAO,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUrB,CAAC;AAkCF;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;;;;CAMvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAM5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;CAMvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CAMrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;CAM5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUtB,CAAC"}
1
+ {"version":3,"file":"hive.d.ts","sourceRoot":"","sources":["../src/hive.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAIL,KAAK,WAAW,EAGjB,MAAM,YAAY,CAAC;AAepB;;;;;GAKG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAE/D;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,IAAI,MAAM,CAEhD;AAGD,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAChE,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAuChE;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;aAGhB,OAAO,EAAE,MAAM;aACf,QAAQ,CAAC,EAAE,MAAM;aACjB,MAAM,CAAC,EAAE,MAAM;gBAH/B,OAAO,EAAE,MAAM,EACC,OAAO,EAAE,MAAM,EACf,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,MAAM,CAAC,EAAE,MAAM,YAAA;CAKlC;AAGD,eAAO,MAAM,SAAS,kBAAY,CAAC;AAEnC;;GAEG;AACH,qBAAa,mBAAoB,SAAQ,KAAK;aAG1B,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBADpC,OAAO,EAAE,MAAM,EACC,QAAQ,EAAE,CAAC,CAAC,QAAQ;CAKvC;AAGD,eAAO,MAAM,mBAAmB,4BAAsB,CAAC;AAMvD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,kCAAkC;IAClC,MAAM,EAAE,OAAO,CAAC;IAChB,4CAA4C;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,sCAAsC;IACtC,QAAQ,EAAE,OAAO,CAAC;IAClB,sCAAsC;IACtC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;;;;;;;;GASG;AACH,wBAAgB,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,oBAAoB,CAgBnF;AAED;;;;;;;;GAQG;AACH,wBAAsB,kBAAkB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC,CAyBtF;AAED;;;;;;;GAOG;AACH,wBAAgB,mBAAmB,CAAC,WAAW,EAAE,MAAM,GAAG,IAAI,CAO7D;AAED;;;;;;;;;;;;GAYG;AACH,wBAAsB,kBAAkB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC;IAAC,MAAM,EAAE,MAAM,CAAC;IAAC,OAAO,EAAE,MAAM,CAAA;CAAC,CAAC,CA6CxG;AAED;;;;;;;;;;;GAWG;AACH,wBAAsB,mBAAmB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC;IACtE,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;CAChB,CAAC,CAmGD;AAYD;;;;;;GAMG;AACH,wBAAsB,cAAc,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAiB7E;AAGD,eAAO,MAAM,eAAe,uBAAiB,CAAC;AA+E9C;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;CA+CtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgJ3B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;CAiDrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;CA+DtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;CA6BrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CA4BrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;CAwBrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,SAAS;;;;;;;;CAwIpB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;CA8C3B,CAAC;AAMH,eAAO,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUrB,CAAC;AAkCF;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;;;;CAMvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAM5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;CAMvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CAMrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;CAM5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUtB,CAAC"}
package/dist/index.js CHANGED
@@ -27778,6 +27778,110 @@ function ensureHiveDirectory(projectPath) {
27778
27778
  mkdirSync(hiveDir, { recursive: true });
27779
27779
  }
27780
27780
  }
27781
+ async function mergeHistoricBeads(projectPath) {
27782
+ const { readFileSync: readFileSync2, writeFileSync, existsSync: existsSync2 } = await import("node:fs");
27783
+ const hiveDir = join(projectPath, ".hive");
27784
+ const basePath = join(hiveDir, "beads.base.jsonl");
27785
+ const issuesPath = join(hiveDir, "issues.jsonl");
27786
+ if (!existsSync2(basePath)) {
27787
+ return { merged: 0, skipped: 0 };
27788
+ }
27789
+ const baseContent = readFileSync2(basePath, "utf-8");
27790
+ const baseLines = baseContent.trim().split(`
27791
+ `).filter((l) => l);
27792
+ const baseBeads = baseLines.map((line) => JSON.parse(line));
27793
+ let issuesBeads = [];
27794
+ if (existsSync2(issuesPath)) {
27795
+ const issuesContent = readFileSync2(issuesPath, "utf-8");
27796
+ const issuesLines = issuesContent.trim().split(`
27797
+ `).filter((l) => l);
27798
+ issuesBeads = issuesLines.map((line) => JSON.parse(line));
27799
+ }
27800
+ const existingIds = new Set(issuesBeads.map((b) => b.id));
27801
+ let merged = 0;
27802
+ let skipped = 0;
27803
+ for (const baseBead of baseBeads) {
27804
+ if (existingIds.has(baseBead.id)) {
27805
+ skipped++;
27806
+ } else {
27807
+ issuesBeads.push(baseBead);
27808
+ merged++;
27809
+ }
27810
+ }
27811
+ const mergedContent = issuesBeads.map((b) => JSON.stringify(b)).join(`
27812
+ `) + `
27813
+ `;
27814
+ writeFileSync(issuesPath, mergedContent, "utf-8");
27815
+ return { merged, skipped };
27816
+ }
27817
+ async function importJsonlToPGLite(projectPath) {
27818
+ const jsonlPath = join(projectPath, ".hive", "issues.jsonl");
27819
+ if (!existsSync(jsonlPath)) {
27820
+ return { imported: 0, updated: 0, errors: 0 };
27821
+ }
27822
+ const jsonlContent = readFileSync(jsonlPath, "utf-8");
27823
+ if (!jsonlContent || jsonlContent.trim() === "") {
27824
+ return { imported: 0, updated: 0, errors: 0 };
27825
+ }
27826
+ const adapter = await getHiveAdapter(projectPath);
27827
+ const lines = jsonlContent.split(`
27828
+ `).filter((l) => l.trim());
27829
+ let imported = 0;
27830
+ let updated = 0;
27831
+ let errors3 = 0;
27832
+ for (const line of lines) {
27833
+ try {
27834
+ const cellData = JSON.parse(line);
27835
+ const existing = await adapter.getCell(projectPath, cellData.id);
27836
+ if (existing) {
27837
+ try {
27838
+ await adapter.updateCell(projectPath, cellData.id, {
27839
+ title: cellData.title,
27840
+ description: cellData.description,
27841
+ priority: cellData.priority,
27842
+ assignee: cellData.assignee
27843
+ });
27844
+ if (existing.status !== cellData.status) {
27845
+ if (cellData.status === "closed") {
27846
+ await adapter.closeCell(projectPath, cellData.id, "Imported from JSONL");
27847
+ } else {
27848
+ await adapter.changeCellStatus(projectPath, cellData.id, cellData.status);
27849
+ }
27850
+ }
27851
+ updated++;
27852
+ } catch (updateError) {
27853
+ errors3++;
27854
+ }
27855
+ } else {
27856
+ const db = await adapter.getDatabase();
27857
+ const status = cellData.status === "tombstone" ? "closed" : cellData.status;
27858
+ const isClosed = status === "closed";
27859
+ const closedAt = isClosed ? cellData.closed_at ? new Date(cellData.closed_at).getTime() : new Date(cellData.updated_at).getTime() : null;
27860
+ await db.query(`INSERT INTO cells (
27861
+ id, project_key, type, status, title, description, priority,
27862
+ parent_id, assignee, created_at, updated_at, closed_at
27863
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)`, [
27864
+ cellData.id,
27865
+ projectPath,
27866
+ cellData.issue_type,
27867
+ status,
27868
+ cellData.title,
27869
+ cellData.description || null,
27870
+ cellData.priority,
27871
+ cellData.parent_id || null,
27872
+ cellData.assignee || null,
27873
+ new Date(cellData.created_at).getTime(),
27874
+ new Date(cellData.updated_at).getTime(),
27875
+ closedAt
27876
+ ]);
27877
+ imported++;
27878
+ }
27879
+ } catch (error45) {
27880
+ errors3++;
27881
+ }
27882
+ }
27883
+ return { imported, updated, errors: errors3 };
27884
+ }
27781
27885
  var adapterCache = new Map;
27782
27886
  async function getHiveAdapter(projectKey) {
27783
27887
  if (adapterCache.has(projectKey)) {
@@ -35998,6 +36102,7 @@ export {
35998
36102
  repoCrawlTools,
35999
36103
  parseFrontmatter,
36000
36104
  migrateBeadsToHive,
36105
+ mergeHistoricBeads,
36001
36106
  mcpCallWithAutoInit,
36002
36107
  mandateTools,
36003
36108
  mandateSchemas,
@@ -36012,6 +36117,7 @@ export {
36012
36117
  isAgentNotFoundError,
36013
36118
  isAgentEvent,
36014
36119
  invalidateSkillsCache,
36120
+ importJsonlToPGLite,
36015
36121
  ifToolAvailable,
36016
36122
  hive_update,
36017
36123
  hive_sync,
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "opencode-swarm-plugin",
3
- "version": "0.27.3",
3
+ "version": "0.28.0",
4
4
  "description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
5
5
  "type": "module",
6
6
  "main": "./dist/index.js",
@@ -830,4 +830,510 @@ describe("beads integration", () => {
830
830
  rmSync(tempProject, { recursive: true, force: true });
831
831
  });
832
832
  });
833
+
834
+ describe("importJsonlToPGLite", () => {
835
+ it("imports empty JSONL - no-op", async () => {
836
+ const { importJsonlToPGLite } = await import("./hive");
837
+ const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
838
+ const { join } = await import("node:path");
839
+ const { tmpdir } = await import("node:os");
840
+
841
+ // Create temp project with empty JSONL
842
+ const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
843
+ const hiveDir = join(tempProject, ".hive");
844
+ mkdirSync(hiveDir, { recursive: true });
845
+ writeFileSync(join(hiveDir, "issues.jsonl"), "");
846
+
847
+ const result = await importJsonlToPGLite(tempProject);
848
+
849
+ expect(result.imported).toBe(0);
850
+ expect(result.updated).toBe(0);
851
+ expect(result.errors).toBe(0);
852
+
853
+ // Cleanup
854
+ rmSync(tempProject, { recursive: true, force: true });
855
+ });
856
+
857
+ it("imports new records - all inserted", async () => {
858
+ const { importJsonlToPGLite, getHiveAdapter } = await import("./hive");
859
+ const { mkdirSync, rmSync, writeFileSync, unlinkSync } = await import("node:fs");
860
+ const { join } = await import("node:path");
861
+ const { tmpdir } = await import("node:os");
862
+
863
+ // Create temp project with new cells
864
+ const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
865
+ const hiveDir = join(tempProject, ".hive");
866
+ mkdirSync(hiveDir, { recursive: true });
867
+
868
+ const cell1 = {
869
+ id: "bd-import-1",
870
+ title: "Import test 1",
871
+ status: "open" as const,
872
+ priority: 2,
873
+ issue_type: "task" as const,
874
+ created_at: new Date().toISOString(),
875
+ updated_at: new Date().toISOString(),
876
+ dependencies: [],
877
+ labels: [],
878
+ comments: [],
879
+ };
880
+
881
+ const cell2 = {
882
+ id: "bd-import-2",
883
+ title: "Import test 2",
884
+ status: "in_progress" as const,
885
+ priority: 1,
886
+ issue_type: "bug" as const,
887
+ created_at: new Date().toISOString(),
888
+ updated_at: new Date().toISOString(),
889
+ dependencies: [],
890
+ labels: [],
891
+ comments: [],
892
+ };
893
+
894
+ writeFileSync(
895
+ join(hiveDir, "issues.jsonl"),
896
+ JSON.stringify(cell1) + "\n" + JSON.stringify(cell2) + "\n"
897
+ );
898
+
899
+ // CRITICAL: Call importJsonlToPGLite() which will call getHiveAdapter()
900
+ // The auto-migration will import cells, so we expect 0 imported here
901
+ // because auto-migration already did it
902
+ const result = await importJsonlToPGLite(tempProject);
903
+
904
+ // Auto-migration runs on first getHiveAdapter() call and imports cells
905
+ // So when importJsonlToPGLite() runs, cells are already there
906
+ // This is expected behavior - the function is idempotent
907
+ expect(result.imported + result.updated).toBe(2);
908
+ expect(result.errors).toBe(0);
909
+
910
+ // Verify cells exist in database
911
+ const adapter = await getHiveAdapter(tempProject);
912
+ const importedCell1 = await adapter.getCell(tempProject, "bd-import-1");
913
+ const importedCell2 = await adapter.getCell(tempProject, "bd-import-2");
914
+
915
+ expect(importedCell1).toBeDefined();
916
+ expect(importedCell1!.title).toBe("Import test 1");
917
+ expect(importedCell2).toBeDefined();
918
+ expect(importedCell2!.title).toBe("Import test 2");
919
+
920
+ // Cleanup
921
+ rmSync(tempProject, { recursive: true, force: true });
922
+ });
923
+
924
+ it("updates existing records", async () => {
925
+ const { importJsonlToPGLite, getHiveAdapter } = await import("./hive");
926
+ const { mkdirSync, rmSync, writeFileSync, unlinkSync } = await import("node:fs");
927
+ const { join } = await import("node:path");
928
+ const { tmpdir } = await import("node:os");
929
+
930
+ // Create temp project
931
+ const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
932
+ const hiveDir = join(tempProject, ".hive");
933
+ mkdirSync(hiveDir, { recursive: true });
934
+
935
+ // Write JSONL FIRST (before getHiveAdapter to avoid auto-migration)
936
+ const originalCell = {
937
+ id: "bd-update-1",
938
+ title: "Original title",
939
+ status: "open",
940
+ priority: 2,
941
+ issue_type: "task",
942
+ created_at: new Date().toISOString(),
943
+ updated_at: new Date().toISOString(),
944
+ dependencies: [],
945
+ labels: [],
946
+ comments: [],
947
+ };
948
+
949
+ writeFileSync(
950
+ join(hiveDir, "issues.jsonl"),
951
+ JSON.stringify(originalCell) + "\n"
952
+ );
953
+
954
+ // Get adapter - this will auto-migrate the original cell
955
+ const adapter = await getHiveAdapter(tempProject);
956
+
957
+ // Now update the JSONL with new data
958
+ const updatedCell = {
959
+ ...originalCell,
960
+ title: "Updated title",
961
+ description: "New description",
962
+ status: "in_progress" as const,
963
+ priority: 0,
964
+ updated_at: new Date().toISOString(),
965
+ };
966
+
967
+ writeFileSync(
968
+ join(hiveDir, "issues.jsonl"),
969
+ JSON.stringify(updatedCell) + "\n"
970
+ );
971
+
972
+ const result = await importJsonlToPGLite(tempProject);
973
+
974
+ expect(result.imported).toBe(0);
975
+ expect(result.updated).toBe(1);
976
+ expect(result.errors).toBe(0);
977
+
978
+ // Verify update
979
+ const cell = await adapter.getCell(tempProject, "bd-update-1");
980
+ expect(cell).toBeDefined();
981
+ expect(cell!.title).toBe("Updated title");
982
+ expect(cell!.description).toContain("New description");
983
+ expect(cell!.status).toBe("in_progress");
984
+
985
+ // Cleanup
986
+ rmSync(tempProject, { recursive: true, force: true });
987
+ });
988
+
989
+ it("handles mixed new and existing records", async () => {
990
+ const { importJsonlToPGLite, getHiveAdapter } = await import("./hive");
991
+ const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
992
+ const { join } = await import("node:path");
993
+ const { tmpdir } = await import("node:os");
994
+
995
+ // Create temp project with NO initial JSONL (avoid auto-migration)
996
+ const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
997
+ const hiveDir = join(tempProject, ".hive");
998
+ mkdirSync(hiveDir, { recursive: true });
999
+
1000
+ // Get adapter first (no auto-migration since no JSONL exists)
1001
+ const adapter = await getHiveAdapter(tempProject);
1002
+
1003
+ // Create existing cell directly via adapter
1004
+ await adapter.createCell(tempProject, {
1005
+ title: "Existing",
1006
+ type: "task",
1007
+ priority: 2,
1008
+ });
1009
+
1010
+ // Get the created cell to find its ID
1011
+ const cells = await adapter.queryCells(tempProject, { limit: 1 });
1012
+ const existingId = cells[0].id;
1013
+
1014
+ // Now write JSONL with updated existing + new cell
1015
+ const existingUpdated = {
1016
+ id: existingId,
1017
+ title: "Existing updated",
1018
+ status: "closed" as const,
1019
+ priority: 2,
1020
+ issue_type: "task" as const,
1021
+ created_at: new Date().toISOString(),
1022
+ updated_at: new Date().toISOString(),
1023
+ closed_at: new Date().toISOString(),
1024
+ dependencies: [],
1025
+ labels: [],
1026
+ comments: [],
1027
+ };
1028
+
1029
+ const newCell = {
1030
+ id: "bd-new",
1031
+ title: "Brand new",
1032
+ status: "open" as const,
1033
+ priority: 1,
1034
+ issue_type: "feature" as const,
1035
+ created_at: new Date().toISOString(),
1036
+ updated_at: new Date().toISOString(),
1037
+ dependencies: [],
1038
+ labels: [],
1039
+ comments: [],
1040
+ };
1041
+
1042
+ writeFileSync(
1043
+ join(hiveDir, "issues.jsonl"),
1044
+ JSON.stringify(existingUpdated) + "\n" + JSON.stringify(newCell) + "\n"
1045
+ );
1046
+
1047
+ const result = await importJsonlToPGLite(tempProject);
1048
+
1049
+ // importJsonlToPGLite() finds:
1050
+ // - existingId already exists (updated)
1051
+ // - bd-new is new (imported)
1052
+ expect(result.imported).toBe(1); // bd-new
1053
+ expect(result.updated).toBe(1); // existing cell
1054
+ expect(result.errors).toBe(0);
1055
+
1056
+ // Cleanup
1057
+ rmSync(tempProject, { recursive: true, force: true });
1058
+ });
1059
+
1060
+ it("skips invalid JSON lines and counts errors", async () => {
1061
+ const { importJsonlToPGLite } = await import("./hive");
1062
+ const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
1063
+ const { join } = await import("node:path");
1064
+ const { tmpdir } = await import("node:os");
1065
+
1066
+ // Create temp project
1067
+ const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
1068
+ const hiveDir = join(tempProject, ".hive");
1069
+ mkdirSync(hiveDir, { recursive: true });
1070
+
1071
+ const validCell = {
1072
+ id: "bd-valid",
1073
+ title: "Valid",
1074
+ status: "open",
1075
+ priority: 2,
1076
+ issue_type: "task",
1077
+ created_at: new Date().toISOString(),
1078
+ updated_at: new Date().toISOString(),
1079
+ dependencies: [],
1080
+ labels: [],
1081
+ comments: [],
1082
+ };
1083
+
1084
+ // Mix valid and invalid JSON
1085
+ writeFileSync(
1086
+ join(hiveDir, "issues.jsonl"),
1087
+ JSON.stringify(validCell) + "\n" +
1088
+ "{ invalid json \n" +
1089
+ '{"id":"incomplete"\n'
1090
+ );
1091
+
1092
+ const result = await importJsonlToPGLite(tempProject);
1093
+
1094
+ expect(result.imported).toBe(1); // Only the valid one
1095
+ expect(result.errors).toBe(2); // Two invalid lines
1096
+
1097
+ // Cleanup
1098
+ rmSync(tempProject, { recursive: true, force: true });
1099
+ });
1100
+
1101
+ it("handles missing JSONL file gracefully", async () => {
1102
+ const { importJsonlToPGLite } = await import("./hive");
1103
+ const { mkdirSync, rmSync } = await import("node:fs");
1104
+ const { join } = await import("node:path");
1105
+ const { tmpdir } = await import("node:os");
1106
+
1107
+ // Create temp project without issues.jsonl
1108
+ const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
1109
+ const hiveDir = join(tempProject, ".hive");
1110
+ mkdirSync(hiveDir, { recursive: true });
1111
+
1112
+ const result = await importJsonlToPGLite(tempProject);
1113
+
1114
+ expect(result.imported).toBe(0);
1115
+ expect(result.updated).toBe(0);
1116
+ expect(result.errors).toBe(0);
1117
+
1118
+ // Cleanup
1119
+ rmSync(tempProject, { recursive: true, force: true });
1120
+ });
1121
+ });
1122
+
1123
+ describe("mergeHistoricBeads", () => {
1124
+ it("merges empty base file - no changes", async () => {
1125
+ const { mergeHistoricBeads } = await import("./hive");
1126
+ const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
1127
+ const { join } = await import("node:path");
1128
+ const { tmpdir } = await import("node:os");
1129
+
1130
+ // Create temp project with .hive directory
1131
+ const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
1132
+ const hiveDir = join(tempProject, ".hive");
1133
+ mkdirSync(hiveDir, { recursive: true });
1134
+
1135
+ // Create empty base file
1136
+ writeFileSync(join(hiveDir, "beads.base.jsonl"), "");
1137
+
1138
+ // Create issues.jsonl with one bead
1139
+ const existingBead = { id: "bd-existing", title: "Existing bead" };
1140
+ writeFileSync(join(hiveDir, "issues.jsonl"), JSON.stringify(existingBead) + "\n");
1141
+
1142
+ const result = await mergeHistoricBeads(tempProject);
1143
+
1144
+ expect(result.merged).toBe(0);
1145
+ expect(result.skipped).toBe(0);
1146
+
1147
+ // Cleanup
1148
+ rmSync(tempProject, { recursive: true, force: true });
1149
+ });
1150
+
1151
+ it("merges empty issues file - all base records imported", async () => {
1152
+ const { mergeHistoricBeads } = await import("./hive");
1153
+ const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
1154
+ const { join } = await import("node:path");
1155
+ const { tmpdir } = await import("node:os");
1156
+
1157
+ // Create temp project
1158
+ const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
1159
+ const hiveDir = join(tempProject, ".hive");
1160
+ mkdirSync(hiveDir, { recursive: true });
1161
+
1162
+ // Create base file with 2 beads
1163
+ const baseBead1 = { id: "bd-base-1", title: "Historic bead 1" };
1164
+ const baseBead2 = { id: "bd-base-2", title: "Historic bead 2" };
1165
+ writeFileSync(
1166
+ join(hiveDir, "beads.base.jsonl"),
1167
+ JSON.stringify(baseBead1) + "\n" + JSON.stringify(baseBead2) + "\n"
1168
+ );
1169
+
1170
+ // Empty issues file
1171
+ writeFileSync(join(hiveDir, "issues.jsonl"), "");
1172
+
1173
+ const result = await mergeHistoricBeads(tempProject);
1174
+
1175
+ expect(result.merged).toBe(2);
1176
+ expect(result.skipped).toBe(0);
1177
+
1178
+ // Verify issues.jsonl now has both beads
1179
+ const issuesContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
1180
+ const lines = issuesContent.trim().split("\n").filter(l => l);
1181
+ expect(lines).toHaveLength(2);
1182
+
1183
+ const beads = lines.map(line => JSON.parse(line));
1184
+ expect(beads.find(b => b.id === "bd-base-1")).toBeDefined();
1185
+ expect(beads.find(b => b.id === "bd-base-2")).toBeDefined();
1186
+
1187
+ // Cleanup
1188
+ rmSync(tempProject, { recursive: true, force: true });
1189
+ });
1190
+
1191
+ it("overlapping IDs - issues.jsonl wins (more recent)", async () => {
1192
+ const { mergeHistoricBeads } = await import("./hive");
1193
+ const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
1194
+ const { join } = await import("node:path");
1195
+ const { tmpdir } = await import("node:os");
1196
+
1197
+ // Create temp project
1198
+ const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
1199
+ const hiveDir = join(tempProject, ".hive");
1200
+ mkdirSync(hiveDir, { recursive: true });
1201
+
1202
+ // Base has old version of bd-overlap
1203
+ const baseOldVersion = { id: "bd-overlap", title: "Old title", status: "open" };
1204
+ writeFileSync(
1205
+ join(hiveDir, "beads.base.jsonl"),
1206
+ JSON.stringify(baseOldVersion) + "\n"
1207
+ );
1208
+
1209
+ // Issues has new version (updated)
1210
+ const issuesNewVersion = { id: "bd-overlap", title: "New title", status: "closed" };
1211
+ writeFileSync(
1212
+ join(hiveDir, "issues.jsonl"),
1213
+ JSON.stringify(issuesNewVersion) + "\n"
1214
+ );
1215
+
1216
+ const result = await mergeHistoricBeads(tempProject);
1217
+
1218
+ expect(result.merged).toBe(0); // Nothing new to merge
1219
+ expect(result.skipped).toBe(1); // Skipped the old version
1220
+
1221
+ // Verify issues.jsonl still has new version (unchanged)
1222
+ const issuesContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
1223
+ const bead = JSON.parse(issuesContent.trim());
1224
+ expect(bead.title).toBe("New title");
1225
+ expect(bead.status).toBe("closed");
1226
+
1227
+ // Cleanup
1228
+ rmSync(tempProject, { recursive: true, force: true });
1229
+ });
1230
+
1231
+ it("no overlap - all records combined", async () => {
1232
+ const { mergeHistoricBeads } = await import("./hive");
1233
+ const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
1234
+ const { join } = await import("node:path");
1235
+ const { tmpdir } = await import("node:os");
1236
+
1237
+ // Create temp project
1238
+ const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
1239
+ const hiveDir = join(tempProject, ".hive");
1240
+ mkdirSync(hiveDir, { recursive: true });
1241
+
1242
+ // Base has 2 beads
1243
+ const baseBead1 = { id: "bd-base-1", title: "Historic 1" };
1244
+ const baseBead2 = { id: "bd-base-2", title: "Historic 2" };
1245
+ writeFileSync(
1246
+ join(hiveDir, "beads.base.jsonl"),
1247
+ JSON.stringify(baseBead1) + "\n" + JSON.stringify(baseBead2) + "\n"
1248
+ );
1249
+
1250
+ // Issues has 2 different beads
1251
+ const issuesBead1 = { id: "bd-current-1", title: "Current 1" };
1252
+ const issuesBead2 = { id: "bd-current-2", title: "Current 2" };
1253
+ writeFileSync(
1254
+ join(hiveDir, "issues.jsonl"),
1255
+ JSON.stringify(issuesBead1) + "\n" + JSON.stringify(issuesBead2) + "\n"
1256
+ );
1257
+
1258
+ const result = await mergeHistoricBeads(tempProject);
1259
+
1260
+ expect(result.merged).toBe(2); // Added 2 from base
1261
+ expect(result.skipped).toBe(0);
1262
+
1263
+ // Verify issues.jsonl now has all 4 beads
1264
+ const issuesContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
1265
+ const lines = issuesContent.trim().split("\n").filter(l => l);
1266
+ expect(lines).toHaveLength(4);
1267
+
1268
+ const beads = lines.map(line => JSON.parse(line));
1269
+ expect(beads.find(b => b.id === "bd-base-1")).toBeDefined();
1270
+ expect(beads.find(b => b.id === "bd-base-2")).toBeDefined();
1271
+ expect(beads.find(b => b.id === "bd-current-1")).toBeDefined();
1272
+ expect(beads.find(b => b.id === "bd-current-2")).toBeDefined();
1273
+
1274
+ // Cleanup
1275
+ rmSync(tempProject, { recursive: true, force: true });
1276
+ });
1277
+
1278
+ it("missing base file - graceful handling", async () => {
1279
+ const { mergeHistoricBeads } = await import("./hive");
1280
+ const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
1281
+ const { join } = await import("node:path");
1282
+ const { tmpdir } = await import("node:os");
1283
+
1284
+ // Create temp project with .hive but NO base file
1285
+ const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
1286
+ const hiveDir = join(tempProject, ".hive");
1287
+ mkdirSync(hiveDir, { recursive: true });
1288
+
1289
+ // Issues exists, base doesn't
1290
+ const issuesBead = { id: "bd-current", title: "Current" };
1291
+ writeFileSync(join(hiveDir, "issues.jsonl"), JSON.stringify(issuesBead) + "\n");
1292
+
1293
+ const result = await mergeHistoricBeads(tempProject);
1294
+
1295
+ // Should return zeros, not throw
1296
+ expect(result.merged).toBe(0);
1297
+ expect(result.skipped).toBe(0);
1298
+
1299
+ // Cleanup
1300
+ rmSync(tempProject, { recursive: true, force: true });
1301
+ });
1302
+
1303
+ it("missing issues file - creates it from base", async () => {
1304
+ const { mergeHistoricBeads } = await import("./hive");
1305
+ const { mkdirSync, rmSync, writeFileSync, readFileSync, existsSync } = await import("node:fs");
1306
+ const { join } = await import("node:path");
1307
+ const { tmpdir } = await import("node:os");
1308
+
1309
+ // Create temp project with base but NO issues file
1310
+ const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
1311
+ const hiveDir = join(tempProject, ".hive");
1312
+ mkdirSync(hiveDir, { recursive: true });
1313
+
1314
+ // Base exists, issues doesn't
1315
+ const baseBead = { id: "bd-base", title: "Historic" };
1316
+ writeFileSync(
1317
+ join(hiveDir, "beads.base.jsonl"),
1318
+ JSON.stringify(baseBead) + "\n"
1319
+ );
1320
+
1321
+ const issuesPath = join(hiveDir, "issues.jsonl");
1322
+ expect(existsSync(issuesPath)).toBe(false);
1323
+
1324
+ const result = await mergeHistoricBeads(tempProject);
1325
+
1326
+ expect(result.merged).toBe(1);
1327
+ expect(result.skipped).toBe(0);
1328
+
1329
+ // Verify issues.jsonl was created
1330
+ expect(existsSync(issuesPath)).toBe(true);
1331
+ const content = readFileSync(issuesPath, "utf-8");
1332
+ const bead = JSON.parse(content.trim());
1333
+ expect(bead.id).toBe("bd-base");
1334
+
1335
+ // Cleanup
1336
+ rmSync(tempProject, { recursive: true, force: true });
1337
+ });
1338
+ });
833
1339
  });
package/src/hive.ts CHANGED
@@ -235,6 +235,183 @@ export function ensureHiveDirectory(projectPath: string): void {
235
235
  }
236
236
  }
237
237
 
238
+ /**
239
+ * Merge historic beads from beads.base.jsonl into issues.jsonl
240
+ *
241
+ * This function reads beads.base.jsonl (historic data) and issues.jsonl (current data),
242
+ * merges them by ID (issues.jsonl version wins for duplicates), and writes the result
243
+ * back to issues.jsonl.
244
+ *
245
+ * Use case: After migrating from .beads to .hive, you may have a beads.base.jsonl file
246
+ * containing old beads that should be merged into the current issues.jsonl.
247
+ *
248
+ * @param projectPath - Absolute path to the project root
249
+ * @returns Object with merged and skipped counts
250
+ */
251
+ export async function mergeHistoricBeads(projectPath: string): Promise<{merged: number, skipped: number}> {
252
+ const { readFileSync, writeFileSync, existsSync } = await import("node:fs");
253
+ const hiveDir = join(projectPath, ".hive");
254
+ const basePath = join(hiveDir, "beads.base.jsonl");
255
+ const issuesPath = join(hiveDir, "issues.jsonl");
256
+
257
+ // If base file doesn't exist, nothing to merge
258
+ if (!existsSync(basePath)) {
259
+ return { merged: 0, skipped: 0 };
260
+ }
261
+
262
+ // Read base file
263
+ const baseContent = readFileSync(basePath, "utf-8");
264
+ const baseLines = baseContent.trim().split("\n").filter(l => l);
265
+ const baseBeads = baseLines.map(line => JSON.parse(line));
266
+
267
+ // Read issues file (or create empty if missing)
268
+ let issuesBeads: any[] = [];
269
+ if (existsSync(issuesPath)) {
270
+ const issuesContent = readFileSync(issuesPath, "utf-8");
271
+ const issuesLines = issuesContent.trim().split("\n").filter(l => l);
272
+ issuesBeads = issuesLines.map(line => JSON.parse(line));
273
+ }
274
+
275
+ // Build set of existing IDs in issues.jsonl
276
+ const existingIds = new Set(issuesBeads.map(b => b.id));
277
+
278
+ // Merge: add beads from base that aren't in issues
279
+ let merged = 0;
280
+ let skipped = 0;
281
+
282
+ for (const baseBead of baseBeads) {
283
+ if (existingIds.has(baseBead.id)) {
284
+ skipped++;
285
+ } else {
286
+ issuesBeads.push(baseBead);
287
+ merged++;
288
+ }
289
+ }
290
+
291
+ // Write merged result back to issues.jsonl
292
+ const mergedContent = issuesBeads.map(b => JSON.stringify(b)).join("\n") + "\n";
293
+ writeFileSync(issuesPath, mergedContent, "utf-8");
294
+
295
+ return { merged, skipped };
296
+ }
297
+
298
+ /**
299
+ * Import cells from .hive/issues.jsonl into PGLite database
300
+ *
301
+ * Reads the JSONL file and upserts each record into the cells table
302
+ * using the HiveAdapter. Provides granular error reporting for invalid lines.
303
+ *
304
+ * This function manually parses JSONL line-by-line to gracefully handle
305
+ * invalid JSON without throwing. Each valid line is imported via the adapter.
306
+ *
307
+ * @param projectPath - Absolute path to the project root
308
+ * @returns Object with imported, updated, and error counts
309
+ */
310
+ export async function importJsonlToPGLite(projectPath: string): Promise<{
311
+ imported: number;
312
+ updated: number;
313
+ errors: number;
314
+ }> {
315
+ const jsonlPath = join(projectPath, ".hive", "issues.jsonl");
316
+
317
+ // Handle missing file gracefully
318
+ if (!existsSync(jsonlPath)) {
319
+ return { imported: 0, updated: 0, errors: 0 };
320
+ }
321
+
322
+ // Read JSONL content
323
+ const jsonlContent = readFileSync(jsonlPath, "utf-8");
324
+
325
+ // Handle empty file
326
+ if (!jsonlContent || jsonlContent.trim() === "") {
327
+ return { imported: 0, updated: 0, errors: 0 };
328
+ }
329
+
330
+ // Get adapter - but we need to prevent auto-migration from running
331
+ // Auto-migration only runs if DB is empty, so we check first
332
+ const adapter = await getHiveAdapter(projectPath);
333
+
334
+ // Parse JSONL line-by-line, tolerating invalid JSON
335
+ const lines = jsonlContent.split("\n").filter(l => l.trim());
336
+ let imported = 0;
337
+ let updated = 0;
338
+ let errors = 0;
339
+
340
+ for (const line of lines) {
341
+ try {
342
+ const cellData = JSON.parse(line);
343
+
344
+ // Check if cell exists
345
+ const existing = await adapter.getCell(projectPath, cellData.id);
346
+
347
+ if (existing) {
348
+ // Update existing cell
349
+ try {
350
+ await adapter.updateCell(projectPath, cellData.id, {
351
+ title: cellData.title,
352
+ description: cellData.description,
353
+ priority: cellData.priority,
354
+ assignee: cellData.assignee,
355
+ });
356
+
357
+ // Update status if needed - use closeCell for 'closed' status
358
+ if (existing.status !== cellData.status) {
359
+ if (cellData.status === "closed") {
360
+ await adapter.closeCell(projectPath, cellData.id, "Imported from JSONL");
361
+ } else {
362
+ await adapter.changeCellStatus(projectPath, cellData.id, cellData.status);
363
+ }
364
+ }
365
+
366
+ updated++;
367
+ } catch (updateError) {
368
+ // Update failed - count as error
369
+ errors++;
370
+ }
371
+ } else {
372
+ // Create new cell - use direct DB insert to preserve ID
373
+ const db = await adapter.getDatabase();
374
+
375
+ const status = cellData.status === "tombstone" ? "closed" : cellData.status;
376
+ const isClosed = status === "closed";
377
+ const closedAt = isClosed
378
+ ? (cellData.closed_at
379
+ ? new Date(cellData.closed_at).getTime()
380
+ : new Date(cellData.updated_at).getTime())
381
+ : null;
382
+
383
+ await db.query(
384
+ `INSERT INTO cells (
385
+ id, project_key, type, status, title, description, priority,
386
+ parent_id, assignee, created_at, updated_at, closed_at
387
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)`,
388
+ [
389
+ cellData.id,
390
+ projectPath,
391
+ cellData.issue_type,
392
+ status,
393
+ cellData.title,
394
+ cellData.description || null,
395
+ cellData.priority,
396
+ cellData.parent_id || null,
397
+ cellData.assignee || null,
398
+ new Date(cellData.created_at).getTime(),
399
+ new Date(cellData.updated_at).getTime(),
400
+ closedAt,
401
+ ]
402
+ );
403
+
404
+ imported++;
405
+ }
406
+ } catch (error) {
407
+ // Invalid JSON or import error - count and continue
408
+ errors++;
409
+ }
410
+ }
411
+
412
+ return { imported, updated, errors };
413
+ }
414
+
238
415
  // ============================================================================
239
416
  // Adapter Singleton
240
417
  // ============================================================================