opencode-swarm-plugin 0.27.2 → 0.27.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +2 -2
- package/CHANGELOG.md +28 -0
- package/bin/swarm.ts +108 -4
- package/dist/hive.d.ts +48 -0
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.js +41 -0
- package/dist/plugin.js +8 -0
- package/package.json +2 -2
- package/src/hive.integration.test.ts +146 -7
- package/src/hive.ts +114 -6
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,9 +1,9 @@
|
|
|
1
1
|
$ bun build ./src/index.ts --outdir ./dist --target node --external @electric-sql/pglite --external swarm-mail && bun build ./src/plugin.ts --outfile ./dist/plugin.js --target node --external @electric-sql/pglite --external swarm-mail && tsc
|
|
2
|
-
Bundled 200 modules in
|
|
2
|
+
Bundled 200 modules in 35ms
|
|
3
3
|
|
|
4
4
|
index.js 1.19 MB (entry point)
|
|
5
5
|
|
|
6
|
-
Bundled 201 modules in
|
|
6
|
+
Bundled 201 modules in 33ms
|
|
7
7
|
|
|
8
8
|
plugin.js 1.16 MB (entry point)
|
|
9
9
|
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,33 @@
|
|
|
1
1
|
# opencode-swarm-plugin
|
|
2
2
|
|
|
3
|
+
## 0.27.4
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- [`f23f774`](https://github.com/joelhooks/swarm-tools/commit/f23f774e4b83a3422d8266b6b1ad083daaec03e2) Thanks [@joelhooks](https://github.com/joelhooks)! - Enforce coordinator always spawns workers, never executes work directly
|
|
8
|
+
|
|
9
|
+
- Added "Coordinator Role Boundaries" section to /swarm command
|
|
10
|
+
- Coordinators now explicitly forbidden from editing code, running tests, or making "quick fixes"
|
|
11
|
+
- Updated Phase 5 to clarify coordinators NEVER reserve files (workers do)
|
|
12
|
+
- Updated Phase 6 with patterns for both parallel and sequential worker spawning
|
|
13
|
+
- Worker agent template now confirms it was spawned correctly and to report coordinator violations
|
|
14
|
+
|
|
15
|
+
## 0.27.3
|
|
16
|
+
|
|
17
|
+
### Patch Changes
|
|
18
|
+
|
|
19
|
+
- [`ec23d25`](https://github.com/joelhooks/swarm-tools/commit/ec23d25aeca667c0294a6255fecf11dd7d7fd6b3) Thanks [@joelhooks](https://github.com/joelhooks)! - Add .beads → .hive directory migration support
|
|
20
|
+
|
|
21
|
+
- Fix migration version collision: beadsMigration now v7, cellsViewMigration now v8 (was conflicting with streams v6)
|
|
22
|
+
- Add `checkBeadsMigrationNeeded()` to detect legacy .beads directories
|
|
23
|
+
- Add `migrateBeadsToHive()` to rename .beads to .hive
|
|
24
|
+
- Add `ensureHiveDirectory()` to create .hive if missing (called by hive_sync)
|
|
25
|
+
- Update hive_sync to ensure .hive directory exists before writing
|
|
26
|
+
- Add migration prompt to `swarm setup` CLI flow
|
|
27
|
+
|
|
28
|
+
- Updated dependencies [[`ec23d25`](https://github.com/joelhooks/swarm-tools/commit/ec23d25aeca667c0294a6255fecf11dd7d7fd6b3)]:
|
|
29
|
+
- swarm-mail@0.3.3
|
|
30
|
+
|
|
3
31
|
## 0.27.2
|
|
4
32
|
|
|
5
33
|
### Patch Changes
|
package/bin/swarm.ts
CHANGED
|
@@ -27,6 +27,10 @@ import {
|
|
|
27
27
|
import { homedir } from "os";
|
|
28
28
|
import { basename, dirname, join } from "path";
|
|
29
29
|
import { fileURLToPath } from "url";
|
|
30
|
+
import {
|
|
31
|
+
checkBeadsMigrationNeeded,
|
|
32
|
+
migrateBeadsToHive,
|
|
33
|
+
} from "../src/hive";
|
|
30
34
|
|
|
31
35
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
32
36
|
const pkg = JSON.parse(
|
|
@@ -851,6 +855,37 @@ You are a swarm coordinator. Your job is to clarify the task, decompose it into
|
|
|
851
855
|
|
|
852
856
|
$ARGUMENTS
|
|
853
857
|
|
|
858
|
+
## CRITICAL: Coordinator Role Boundaries
|
|
859
|
+
|
|
860
|
+
**⚠️ COORDINATORS NEVER EXECUTE WORK DIRECTLY**
|
|
861
|
+
|
|
862
|
+
Your role is **ONLY** to:
|
|
863
|
+
1. **Clarify** - Ask questions to understand scope
|
|
864
|
+
2. **Decompose** - Break into subtasks with clear boundaries
|
|
865
|
+
3. **Spawn** - Create worker agents for ALL subtasks
|
|
866
|
+
4. **Monitor** - Check progress, unblock, mediate conflicts
|
|
867
|
+
5. **Verify** - Confirm completion, run final checks
|
|
868
|
+
|
|
869
|
+
**YOU DO NOT:**
|
|
870
|
+
- Read implementation files (only metadata/structure for planning)
|
|
871
|
+
- Edit code directly
|
|
872
|
+
- Run tests yourself (workers run tests)
|
|
873
|
+
- Implement features
|
|
874
|
+
- Fix bugs inline
|
|
875
|
+
- Make "quick fixes" yourself
|
|
876
|
+
|
|
877
|
+
**ALWAYS spawn workers, even for sequential tasks.** Sequential just means spawn them in order and wait for each to complete before spawning the next.
|
|
878
|
+
|
|
879
|
+
### Why This Matters
|
|
880
|
+
|
|
881
|
+
| Coordinator Work | Worker Work | Consequence of Mixing |
|
|
882
|
+
|-----------------|-------------|----------------------|
|
|
883
|
+
| Sonnet context ($$$) | Disposable context | Expensive context waste |
|
|
884
|
+
| Long-lived state | Task-scoped state | Context exhaustion |
|
|
885
|
+
| Orchestration concerns | Implementation concerns | Mixed concerns |
|
|
886
|
+
| No checkpoints | Checkpoints enabled | No recovery |
|
|
887
|
+
| No learning signals | Outcomes tracked | No improvement |
|
|
888
|
+
|
|
854
889
|
## Workflow
|
|
855
890
|
|
|
856
891
|
### Phase 0: Socratic Planning (INTERACTIVE - unless --fast)
|
|
@@ -919,15 +954,39 @@ swarm_validate_decomposition(response="<CellTree JSON>")
|
|
|
919
954
|
### Phase 4: Create Beads
|
|
920
955
|
\`hive_create_epic(epic_title="<task>", subtasks=[...])\`
|
|
921
956
|
|
|
922
|
-
### Phase 5: Reserve Files
|
|
923
|
-
|
|
957
|
+
### Phase 5: DO NOT Reserve Files
|
|
958
|
+
|
|
959
|
+
> **⚠️ Coordinator NEVER reserves files.** Workers reserve their own files.
|
|
960
|
+
> If coordinator reserves, workers get blocked and swarm stalls.
|
|
924
961
|
|
|
925
|
-
### Phase 6: Spawn
|
|
962
|
+
### Phase 6: Spawn Workers for ALL Subtasks (MANDATORY)
|
|
963
|
+
|
|
964
|
+
> **⚠️ ALWAYS spawn workers, even for sequential tasks.**
|
|
965
|
+
> - Parallel tasks: Spawn ALL in a single message
|
|
966
|
+
> - Sequential tasks: Spawn one, wait for completion, spawn next
|
|
967
|
+
|
|
968
|
+
**For parallel work:**
|
|
926
969
|
\`\`\`
|
|
927
|
-
|
|
970
|
+
// Single message with multiple Task calls
|
|
971
|
+
swarm_spawn_subtask(bead_id_1, epic_id, title_1, files_1, shared_context, project_path="$PWD")
|
|
972
|
+
Task(subagent_type="swarm/worker", prompt="<from above>")
|
|
973
|
+
swarm_spawn_subtask(bead_id_2, epic_id, title_2, files_2, shared_context, project_path="$PWD")
|
|
928
974
|
Task(subagent_type="swarm/worker", prompt="<from above>")
|
|
929
975
|
\`\`\`
|
|
930
976
|
|
|
977
|
+
**For sequential work:**
|
|
978
|
+
\`\`\`
|
|
979
|
+
// Spawn worker 1, wait for completion
|
|
980
|
+
swarm_spawn_subtask(bead_id_1, ...)
|
|
981
|
+
const result1 = await Task(subagent_type="swarm/worker", prompt="<from above>")
|
|
982
|
+
|
|
983
|
+
// THEN spawn worker 2 with context from worker 1
|
|
984
|
+
swarm_spawn_subtask(bead_id_2, ..., shared_context="Worker 1 completed: " + result1)
|
|
985
|
+
const result2 = await Task(subagent_type="swarm/worker", prompt="<from above>")
|
|
986
|
+
\`\`\`
|
|
987
|
+
|
|
988
|
+
**NEVER do the work yourself.** Even if it seems faster, spawn a worker.
|
|
989
|
+
|
|
931
990
|
**IMPORTANT:** Pass \`project_path\` to \`swarm_spawn_subtask\` so workers can call \`swarmmail_init\`.
|
|
932
991
|
|
|
933
992
|
### Phase 7: Monitor
|
|
@@ -1031,6 +1090,12 @@ model: ${model}
|
|
|
1031
1090
|
|
|
1032
1091
|
You are a swarm worker agent. Your prompt contains a **MANDATORY SURVIVAL CHECKLIST** - follow it IN ORDER.
|
|
1033
1092
|
|
|
1093
|
+
## You Were Spawned Correctly
|
|
1094
|
+
|
|
1095
|
+
If you're reading this, a coordinator spawned you - that's the correct pattern. Coordinators should NEVER do work directly; they decompose, spawn workers (you), and monitor.
|
|
1096
|
+
|
|
1097
|
+
**If you ever see a coordinator editing code or running tests directly, that's a bug.** Report it.
|
|
1098
|
+
|
|
1034
1099
|
## CRITICAL: Read Your Prompt Carefully
|
|
1035
1100
|
|
|
1036
1101
|
Your Task prompt contains detailed instructions including:
|
|
@@ -1490,6 +1555,45 @@ async function setup() {
|
|
|
1490
1555
|
}
|
|
1491
1556
|
}
|
|
1492
1557
|
|
|
1558
|
+
// Check for .beads → .hive migration
|
|
1559
|
+
const cwd = process.cwd();
|
|
1560
|
+
const migrationCheck = checkBeadsMigrationNeeded(cwd);
|
|
1561
|
+
if (migrationCheck.needed) {
|
|
1562
|
+
p.log.step("Legacy .beads directory detected");
|
|
1563
|
+
p.log.message(dim(" Found: " + migrationCheck.beadsPath));
|
|
1564
|
+
|
|
1565
|
+
const shouldMigrate = await p.confirm({
|
|
1566
|
+
message: "Migrate .beads to .hive? (recommended)",
|
|
1567
|
+
initialValue: true,
|
|
1568
|
+
});
|
|
1569
|
+
|
|
1570
|
+
if (p.isCancel(shouldMigrate)) {
|
|
1571
|
+
p.cancel("Setup cancelled");
|
|
1572
|
+
process.exit(0);
|
|
1573
|
+
}
|
|
1574
|
+
|
|
1575
|
+
if (shouldMigrate) {
|
|
1576
|
+
const migrateSpinner = p.spinner();
|
|
1577
|
+
migrateSpinner.start("Migrating .beads to .hive...");
|
|
1578
|
+
|
|
1579
|
+
try {
|
|
1580
|
+
const result = await migrateBeadsToHive(cwd);
|
|
1581
|
+
if (result.migrated) {
|
|
1582
|
+
migrateSpinner.stop("Migration complete");
|
|
1583
|
+
p.log.success("Renamed .beads/ → .hive/");
|
|
1584
|
+
} else {
|
|
1585
|
+
migrateSpinner.stop("Migration skipped");
|
|
1586
|
+
p.log.warn(result.reason || "Unknown reason");
|
|
1587
|
+
}
|
|
1588
|
+
} catch (error) {
|
|
1589
|
+
migrateSpinner.stop("Migration failed");
|
|
1590
|
+
p.log.error(error instanceof Error ? error.message : String(error));
|
|
1591
|
+
}
|
|
1592
|
+
} else {
|
|
1593
|
+
p.log.warn("Skipping migration - .beads will continue to work but is deprecated");
|
|
1594
|
+
}
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1493
1597
|
// Model selection
|
|
1494
1598
|
p.log.step("Configure swarm agents...");
|
|
1495
1599
|
|
package/dist/hive.d.ts
CHANGED
|
@@ -32,6 +32,54 @@ export declare class HiveValidationError extends Error {
|
|
|
32
32
|
constructor(message: string, zodError: z.ZodError);
|
|
33
33
|
}
|
|
34
34
|
export declare const BeadValidationError: typeof HiveValidationError;
|
|
35
|
+
/**
|
|
36
|
+
* Result of checking if .beads → .hive migration is needed
|
|
37
|
+
*/
|
|
38
|
+
export interface MigrationCheckResult {
|
|
39
|
+
/** Whether migration is needed */
|
|
40
|
+
needed: boolean;
|
|
41
|
+
/** Path to .beads directory if it exists */
|
|
42
|
+
beadsPath?: string;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Result of migrating .beads → .hive
|
|
46
|
+
*/
|
|
47
|
+
export interface MigrationResult {
|
|
48
|
+
/** Whether migration was performed */
|
|
49
|
+
migrated: boolean;
|
|
50
|
+
/** Reason if migration was skipped */
|
|
51
|
+
reason?: string;
|
|
52
|
+
}
|
|
53
|
+
/**
|
|
54
|
+
* Check if .beads → .hive migration is needed
|
|
55
|
+
*
|
|
56
|
+
* Migration is needed when:
|
|
57
|
+
* - .beads directory exists
|
|
58
|
+
* - .hive directory does NOT exist
|
|
59
|
+
*
|
|
60
|
+
* @param projectPath - Absolute path to the project root
|
|
61
|
+
* @returns MigrationCheckResult indicating if migration is needed
|
|
62
|
+
*/
|
|
63
|
+
export declare function checkBeadsMigrationNeeded(projectPath: string): MigrationCheckResult;
|
|
64
|
+
/**
|
|
65
|
+
* Migrate .beads directory to .hive
|
|
66
|
+
*
|
|
67
|
+
* This function renames .beads to .hive. It should only be called
|
|
68
|
+
* after user confirmation via CLI prompt.
|
|
69
|
+
*
|
|
70
|
+
* @param projectPath - Absolute path to the project root
|
|
71
|
+
* @returns MigrationResult indicating success or skip reason
|
|
72
|
+
*/
|
|
73
|
+
export declare function migrateBeadsToHive(projectPath: string): Promise<MigrationResult>;
|
|
74
|
+
/**
|
|
75
|
+
* Ensure .hive directory exists
|
|
76
|
+
*
|
|
77
|
+
* Creates .hive directory if it doesn't exist. This is idempotent
|
|
78
|
+
* and safe to call multiple times.
|
|
79
|
+
*
|
|
80
|
+
* @param projectPath - Absolute path to the project root
|
|
81
|
+
*/
|
|
82
|
+
export declare function ensureHiveDirectory(projectPath: string): void;
|
|
35
83
|
/**
|
|
36
84
|
* Get or create a HiveAdapter instance for a project
|
|
37
85
|
* Exported for testing - allows tests to verify state directly
|
package/dist/hive.d.ts.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"file":"hive.d.ts","sourceRoot":"","sources":["../src/hive.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAIL,KAAK,WAAW,EAGjB,MAAM,YAAY,CAAC;AAepB;;;;;GAKG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAE/D;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,IAAI,MAAM,CAEhD;AAGD,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAChE,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAuChE;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;aAGhB,OAAO,EAAE,MAAM;aACf,QAAQ,CAAC,EAAE,MAAM;aACjB,MAAM,CAAC,EAAE,MAAM;gBAH/B,OAAO,EAAE,MAAM,EACC,OAAO,EAAE,MAAM,EACf,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,MAAM,CAAC,EAAE,MAAM,YAAA;CAKlC;AAGD,eAAO,MAAM,SAAS,kBAAY,CAAC;AAEnC;;GAEG;AACH,qBAAa,mBAAoB,SAAQ,KAAK;aAG1B,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBADpC,OAAO,EAAE,MAAM,EACC,QAAQ,EAAE,CAAC,CAAC,QAAQ;CAKvC;AAGD,eAAO,MAAM,mBAAmB,4BAAsB,CAAC;
|
|
1
|
+
{"version":3,"file":"hive.d.ts","sourceRoot":"","sources":["../src/hive.ts"],"names":[],"mappings":"AAgBA,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AACxB,OAAO,EAIL,KAAK,WAAW,EAGjB,MAAM,YAAY,CAAC;AAepB;;;;;GAKG;AACH,wBAAgB,uBAAuB,CAAC,SAAS,EAAE,MAAM,GAAG,IAAI,CAE/D;AAED;;;GAGG;AACH,wBAAgB,uBAAuB,IAAI,MAAM,CAEhD;AAGD,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAChE,eAAO,MAAM,wBAAwB,gCAA0B,CAAC;AAuChE;;GAEG;AACH,qBAAa,SAAU,SAAQ,KAAK;aAGhB,OAAO,EAAE,MAAM;aACf,QAAQ,CAAC,EAAE,MAAM;aACjB,MAAM,CAAC,EAAE,MAAM;gBAH/B,OAAO,EAAE,MAAM,EACC,OAAO,EAAE,MAAM,EACf,QAAQ,CAAC,EAAE,MAAM,YAAA,EACjB,MAAM,CAAC,EAAE,MAAM,YAAA;CAKlC;AAGD,eAAO,MAAM,SAAS,kBAAY,CAAC;AAEnC;;GAEG;AACH,qBAAa,mBAAoB,SAAQ,KAAK;aAG1B,QAAQ,EAAE,CAAC,CAAC,QAAQ;gBADpC,OAAO,EAAE,MAAM,EACC,QAAQ,EAAE,CAAC,CAAC,QAAQ;CAKvC;AAGD,eAAO,MAAM,mBAAmB,4BAAsB,CAAC;AAMvD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,kCAAkC;IAClC,MAAM,EAAE,OAAO,CAAC;IAChB,4CAA4C;IAC5C,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED;;GAEG;AACH,MAAM,WAAW,eAAe;IAC9B,sCAAsC;IACtC,QAAQ,EAAE,OAAO,CAAC;IAClB,sCAAsC;IACtC,MAAM,CAAC,EAAE,MAAM,CAAC;CACjB;AAED;;;;;;;;;GASG;AACH,wBAAgB,yBAAyB,CAAC,WAAW,EAAE,MAAM,GAAG,oBAAoB,CAgBnF;AAED;;;;;;;;GAQG;AACH,wBAAsB,kBAAkB,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,eAAe,CAAC,CAyBtF;AAED;;;;;;;GAOG;AACH,wBAAgB,mBAAmB,CAAC,WAAW,EAAE,MAAM,GAAG,IAAI,CAO7D;AAYD;;;;;;GAMG;AACH,wBAAsB,cAAc,CAAC,UAAU,EAAE,MAAM,GAAG,OAAO,CAAC,WAAW,CAAC,CAiB7E;AAGD,eAAO,MAAM,eAAe,uBAAiB,CAAC;AA+E9C;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;CA+CtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAgJ3B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;CAiDrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;CA+DtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;CA6BrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CA4BrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;CAwBrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,SAAS;;;;;;;;CAwIpB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,gBAAgB;;;;;;;;;;CA8C3B,CAAC;AAMH,eAAO,MAAM,SAAS;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUrB,CAAC;AAkCF;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;;;;CAMvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAM5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;;;;;;;;;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,YAAY;;;;;;;;;;;;;;;;;;;CAMvB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,WAAW;;;;CAMtB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;CAMrB,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,iBAAiB;;;;;;;;;;CAM5B,CAAC;AAEH;;GAEG;AACH,eAAO,MAAM,UAAU;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAUtB,CAAC"}
|
package/dist/index.js
CHANGED
|
@@ -27741,6 +27741,43 @@ class HiveValidationError extends Error {
|
|
|
27741
27741
|
}
|
|
27742
27742
|
}
|
|
27743
27743
|
var BeadValidationError = HiveValidationError;
|
|
27744
|
+
function checkBeadsMigrationNeeded(projectPath) {
|
|
27745
|
+
const beadsDir = join(projectPath, ".beads");
|
|
27746
|
+
const hiveDir = join(projectPath, ".hive");
|
|
27747
|
+
if (existsSync(hiveDir)) {
|
|
27748
|
+
return { needed: false };
|
|
27749
|
+
}
|
|
27750
|
+
if (existsSync(beadsDir)) {
|
|
27751
|
+
return { needed: true, beadsPath: beadsDir };
|
|
27752
|
+
}
|
|
27753
|
+
return { needed: false };
|
|
27754
|
+
}
|
|
27755
|
+
async function migrateBeadsToHive(projectPath) {
|
|
27756
|
+
const beadsDir = join(projectPath, ".beads");
|
|
27757
|
+
const hiveDir = join(projectPath, ".hive");
|
|
27758
|
+
if (existsSync(hiveDir)) {
|
|
27759
|
+
return {
|
|
27760
|
+
migrated: false,
|
|
27761
|
+
reason: ".hive directory already exists - skipping migration to avoid data loss"
|
|
27762
|
+
};
|
|
27763
|
+
}
|
|
27764
|
+
if (!existsSync(beadsDir)) {
|
|
27765
|
+
return {
|
|
27766
|
+
migrated: false,
|
|
27767
|
+
reason: ".beads directory not found - nothing to migrate"
|
|
27768
|
+
};
|
|
27769
|
+
}
|
|
27770
|
+
const { renameSync } = await import("node:fs");
|
|
27771
|
+
renameSync(beadsDir, hiveDir);
|
|
27772
|
+
return { migrated: true };
|
|
27773
|
+
}
|
|
27774
|
+
function ensureHiveDirectory(projectPath) {
|
|
27775
|
+
const hiveDir = join(projectPath, ".hive");
|
|
27776
|
+
if (!existsSync(hiveDir)) {
|
|
27777
|
+
const { mkdirSync } = __require("node:fs");
|
|
27778
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
27779
|
+
}
|
|
27780
|
+
}
|
|
27744
27781
|
var adapterCache = new Map;
|
|
27745
27782
|
async function getHiveAdapter(projectKey) {
|
|
27746
27783
|
if (adapterCache.has(projectKey)) {
|
|
@@ -28075,6 +28112,7 @@ var hive_sync = tool({
|
|
|
28075
28112
|
}
|
|
28076
28113
|
}
|
|
28077
28114
|
};
|
|
28115
|
+
ensureHiveDirectory(projectKey);
|
|
28078
28116
|
const flushManager = new FlushManager({
|
|
28079
28117
|
adapter,
|
|
28080
28118
|
projectKey,
|
|
@@ -35959,6 +35997,7 @@ export {
|
|
|
35959
35997
|
requireTool,
|
|
35960
35998
|
repoCrawlTools,
|
|
35961
35999
|
parseFrontmatter,
|
|
36000
|
+
migrateBeadsToHive,
|
|
35962
36001
|
mcpCallWithAutoInit,
|
|
35963
36002
|
mandateTools,
|
|
35964
36003
|
mandateSchemas,
|
|
@@ -36012,6 +36051,7 @@ export {
|
|
|
36012
36051
|
extractJsonFromText,
|
|
36013
36052
|
evaluatePromotion,
|
|
36014
36053
|
evaluateBatchPromotions,
|
|
36054
|
+
ensureHiveDirectory,
|
|
36015
36055
|
discoverSkills,
|
|
36016
36056
|
src_default as default,
|
|
36017
36057
|
createStorageWithFallback,
|
|
@@ -36024,6 +36064,7 @@ export {
|
|
|
36024
36064
|
createAgentMailError,
|
|
36025
36065
|
clearSessionState,
|
|
36026
36066
|
checkTool,
|
|
36067
|
+
checkBeadsMigrationNeeded,
|
|
36027
36068
|
checkAllTools,
|
|
36028
36069
|
beads_update,
|
|
36029
36070
|
beads_sync,
|
package/dist/plugin.js
CHANGED
|
@@ -27633,6 +27633,13 @@ class HiveError extends Error {
|
|
|
27633
27633
|
this.name = "HiveError";
|
|
27634
27634
|
}
|
|
27635
27635
|
}
|
|
27636
|
+
function ensureHiveDirectory(projectPath) {
|
|
27637
|
+
const hiveDir = join(projectPath, ".hive");
|
|
27638
|
+
if (!existsSync(hiveDir)) {
|
|
27639
|
+
const { mkdirSync } = __require("node:fs");
|
|
27640
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
27641
|
+
}
|
|
27642
|
+
}
|
|
27636
27643
|
var adapterCache = new Map;
|
|
27637
27644
|
async function getHiveAdapter(projectKey) {
|
|
27638
27645
|
if (adapterCache.has(projectKey)) {
|
|
@@ -27966,6 +27973,7 @@ var hive_sync = tool({
|
|
|
27966
27973
|
}
|
|
27967
27974
|
}
|
|
27968
27975
|
};
|
|
27976
|
+
ensureHiveDirectory(projectKey);
|
|
27969
27977
|
const flushManager = new FlushManager({
|
|
27970
27978
|
adapter,
|
|
27971
27979
|
projectKey,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "opencode-swarm-plugin",
|
|
3
|
-
"version": "0.27.
|
|
3
|
+
"version": "0.27.4",
|
|
4
4
|
"description": "Multi-agent swarm coordination for OpenCode with learning capabilities, beads integration, and Agent Mail",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "./dist/index.js",
|
|
@@ -33,7 +33,7 @@
|
|
|
33
33
|
"@opencode-ai/plugin": "^1.0.134",
|
|
34
34
|
"gray-matter": "^4.0.3",
|
|
35
35
|
"ioredis": "^5.4.1",
|
|
36
|
-
"swarm-mail": "0.3.
|
|
36
|
+
"swarm-mail": "0.3.3",
|
|
37
37
|
"zod": "4.1.8"
|
|
38
38
|
},
|
|
39
39
|
"devDependencies": {
|
|
@@ -20,13 +20,6 @@ import {
|
|
|
20
20
|
getHiveAdapter,
|
|
21
21
|
setHiveWorkingDirectory,
|
|
22
22
|
// Legacy aliases for backward compatibility tests
|
|
23
|
-
hive_create,
|
|
24
|
-
hive_create_epic,
|
|
25
|
-
hive_query,
|
|
26
|
-
hive_update,
|
|
27
|
-
hive_close,
|
|
28
|
-
hive_start,
|
|
29
|
-
hive_ready,
|
|
30
23
|
beads_link_thread,
|
|
31
24
|
BeadError,
|
|
32
25
|
getBeadsAdapter,
|
|
@@ -691,4 +684,150 @@ describe("beads integration", () => {
|
|
|
691
684
|
}
|
|
692
685
|
});
|
|
693
686
|
});
|
|
687
|
+
|
|
688
|
+
describe("Directory Migration (.beads → .hive)", () => {
|
|
689
|
+
it("checkBeadsMigrationNeeded detects .beads without .hive", async () => {
|
|
690
|
+
const { checkBeadsMigrationNeeded } = await import("./hive");
|
|
691
|
+
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
692
|
+
const { join } = await import("node:path");
|
|
693
|
+
const { tmpdir } = await import("node:os");
|
|
694
|
+
|
|
695
|
+
// Create temp project with .beads directory only
|
|
696
|
+
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
697
|
+
const beadsDir = join(tempProject, ".beads");
|
|
698
|
+
|
|
699
|
+
mkdirSync(beadsDir, { recursive: true });
|
|
700
|
+
writeFileSync(join(beadsDir, "issues.jsonl"), '{"id":"bd-test","title":"Test"}');
|
|
701
|
+
|
|
702
|
+
const result = checkBeadsMigrationNeeded(tempProject);
|
|
703
|
+
|
|
704
|
+
expect(result.needed).toBe(true);
|
|
705
|
+
expect(result.beadsPath).toBe(beadsDir);
|
|
706
|
+
|
|
707
|
+
// Cleanup
|
|
708
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
709
|
+
});
|
|
710
|
+
|
|
711
|
+
it("checkBeadsMigrationNeeded returns false if .hive exists", async () => {
|
|
712
|
+
const { checkBeadsMigrationNeeded } = await import("./hive");
|
|
713
|
+
const { mkdirSync, rmSync } = await import("node:fs");
|
|
714
|
+
const { join } = await import("node:path");
|
|
715
|
+
const { tmpdir } = await import("node:os");
|
|
716
|
+
|
|
717
|
+
// Create temp project with .hive directory
|
|
718
|
+
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
719
|
+
const hiveDir = join(tempProject, ".hive");
|
|
720
|
+
|
|
721
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
722
|
+
|
|
723
|
+
const result = checkBeadsMigrationNeeded(tempProject);
|
|
724
|
+
|
|
725
|
+
expect(result.needed).toBe(false);
|
|
726
|
+
|
|
727
|
+
// Cleanup
|
|
728
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
729
|
+
});
|
|
730
|
+
|
|
731
|
+
it("migrateBeadsToHive renames .beads to .hive", async () => {
|
|
732
|
+
const { migrateBeadsToHive } = await import("./hive");
|
|
733
|
+
const { mkdirSync, existsSync, rmSync, writeFileSync } = await import("node:fs");
|
|
734
|
+
const { join } = await import("node:path");
|
|
735
|
+
const { tmpdir } = await import("node:os");
|
|
736
|
+
|
|
737
|
+
// Create temp project with .beads directory
|
|
738
|
+
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
739
|
+
const beadsDir = join(tempProject, ".beads");
|
|
740
|
+
const hiveDir = join(tempProject, ".hive");
|
|
741
|
+
|
|
742
|
+
mkdirSync(beadsDir, { recursive: true });
|
|
743
|
+
writeFileSync(join(beadsDir, "issues.jsonl"), '{"id":"bd-test","title":"Test"}');
|
|
744
|
+
writeFileSync(join(beadsDir, "config.yaml"), "version: 1");
|
|
745
|
+
|
|
746
|
+
// Run migration (called after user confirms in CLI)
|
|
747
|
+
const result = await migrateBeadsToHive(tempProject);
|
|
748
|
+
|
|
749
|
+
// Verify .beads renamed to .hive
|
|
750
|
+
expect(result.migrated).toBe(true);
|
|
751
|
+
expect(existsSync(hiveDir)).toBe(true);
|
|
752
|
+
expect(existsSync(beadsDir)).toBe(false);
|
|
753
|
+
expect(existsSync(join(hiveDir, "issues.jsonl"))).toBe(true);
|
|
754
|
+
expect(existsSync(join(hiveDir, "config.yaml"))).toBe(true);
|
|
755
|
+
|
|
756
|
+
// Cleanup
|
|
757
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
758
|
+
});
|
|
759
|
+
|
|
760
|
+
it("migrateBeadsToHive skips if .hive already exists", async () => {
|
|
761
|
+
const { migrateBeadsToHive } = await import("./hive");
|
|
762
|
+
const { mkdirSync, existsSync, rmSync, writeFileSync } = await import("node:fs");
|
|
763
|
+
const { join } = await import("node:path");
|
|
764
|
+
const { tmpdir } = await import("node:os");
|
|
765
|
+
|
|
766
|
+
// Create temp project with BOTH .beads and .hive
|
|
767
|
+
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
768
|
+
const beadsDir = join(tempProject, ".beads");
|
|
769
|
+
const hiveDir = join(tempProject, ".hive");
|
|
770
|
+
|
|
771
|
+
mkdirSync(beadsDir, { recursive: true });
|
|
772
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
773
|
+
writeFileSync(join(beadsDir, "issues.jsonl"), '{"id":"bd-old"}');
|
|
774
|
+
writeFileSync(join(hiveDir, "issues.jsonl"), '{"id":"bd-new"}');
|
|
775
|
+
|
|
776
|
+
// Run migration - should skip
|
|
777
|
+
const result = await migrateBeadsToHive(tempProject);
|
|
778
|
+
|
|
779
|
+
// Verify both still exist (no migration)
|
|
780
|
+
expect(result.migrated).toBe(false);
|
|
781
|
+
expect(result.reason).toContain("already exists");
|
|
782
|
+
expect(existsSync(beadsDir)).toBe(true);
|
|
783
|
+
expect(existsSync(hiveDir)).toBe(true);
|
|
784
|
+
|
|
785
|
+
// Cleanup
|
|
786
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
787
|
+
});
|
|
788
|
+
|
|
789
|
+
it("ensureHiveDirectory creates .hive if missing", async () => {
|
|
790
|
+
const { ensureHiveDirectory } = await import("./hive");
|
|
791
|
+
const { mkdirSync, existsSync, rmSync } = await import("node:fs");
|
|
792
|
+
const { join } = await import("node:path");
|
|
793
|
+
const { tmpdir } = await import("node:os");
|
|
794
|
+
|
|
795
|
+
// Create empty temp project
|
|
796
|
+
const tempProject = join(tmpdir(), `hive-ensure-test-${Date.now()}`);
|
|
797
|
+
mkdirSync(tempProject, { recursive: true });
|
|
798
|
+
|
|
799
|
+
const hiveDir = join(tempProject, ".hive");
|
|
800
|
+
expect(existsSync(hiveDir)).toBe(false);
|
|
801
|
+
|
|
802
|
+
// Ensure creates it
|
|
803
|
+
ensureHiveDirectory(tempProject);
|
|
804
|
+
|
|
805
|
+
expect(existsSync(hiveDir)).toBe(true);
|
|
806
|
+
|
|
807
|
+
// Cleanup
|
|
808
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
809
|
+
});
|
|
810
|
+
|
|
811
|
+
it("ensureHiveDirectory is idempotent", async () => {
|
|
812
|
+
const { ensureHiveDirectory } = await import("./hive");
|
|
813
|
+
const { mkdirSync, existsSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
814
|
+
const { join } = await import("node:path");
|
|
815
|
+
const { tmpdir } = await import("node:os");
|
|
816
|
+
|
|
817
|
+
// Create temp project with existing .hive
|
|
818
|
+
const tempProject = join(tmpdir(), `hive-ensure-test-${Date.now()}`);
|
|
819
|
+
const hiveDir = join(tempProject, ".hive");
|
|
820
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
821
|
+
writeFileSync(join(hiveDir, "issues.jsonl"), '{"id":"existing"}');
|
|
822
|
+
|
|
823
|
+
// Ensure doesn't overwrite
|
|
824
|
+
ensureHiveDirectory(tempProject);
|
|
825
|
+
|
|
826
|
+
expect(existsSync(hiveDir)).toBe(true);
|
|
827
|
+
expect(readFileSync(join(hiveDir, "issues.jsonl"), "utf-8")).toBe('{"id":"existing"}');
|
|
828
|
+
|
|
829
|
+
// Cleanup
|
|
830
|
+
rmSync(tempProject, { recursive: true, force: true });
|
|
831
|
+
});
|
|
832
|
+
});
|
|
694
833
|
});
|
package/src/hive.ts
CHANGED
|
@@ -130,6 +130,111 @@ export class HiveValidationError extends Error {
|
|
|
130
130
|
// Legacy alias for backward compatibility
|
|
131
131
|
export const BeadValidationError = HiveValidationError;
|
|
132
132
|
|
|
133
|
+
// ============================================================================
|
|
134
|
+
// Directory Migration (.beads → .hive)
|
|
135
|
+
// ============================================================================
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* Result of checking if .beads → .hive migration is needed
|
|
139
|
+
*/
|
|
140
|
+
export interface MigrationCheckResult {
|
|
141
|
+
/** Whether migration is needed */
|
|
142
|
+
needed: boolean;
|
|
143
|
+
/** Path to .beads directory if it exists */
|
|
144
|
+
beadsPath?: string;
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
/**
|
|
148
|
+
* Result of migrating .beads → .hive
|
|
149
|
+
*/
|
|
150
|
+
export interface MigrationResult {
|
|
151
|
+
/** Whether migration was performed */
|
|
152
|
+
migrated: boolean;
|
|
153
|
+
/** Reason if migration was skipped */
|
|
154
|
+
reason?: string;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
/**
|
|
158
|
+
* Check if .beads → .hive migration is needed
|
|
159
|
+
*
|
|
160
|
+
* Migration is needed when:
|
|
161
|
+
* - .beads directory exists
|
|
162
|
+
* - .hive directory does NOT exist
|
|
163
|
+
*
|
|
164
|
+
* @param projectPath - Absolute path to the project root
|
|
165
|
+
* @returns MigrationCheckResult indicating if migration is needed
|
|
166
|
+
*/
|
|
167
|
+
export function checkBeadsMigrationNeeded(projectPath: string): MigrationCheckResult {
|
|
168
|
+
const beadsDir = join(projectPath, ".beads");
|
|
169
|
+
const hiveDir = join(projectPath, ".hive");
|
|
170
|
+
|
|
171
|
+
// If .hive already exists, no migration needed
|
|
172
|
+
if (existsSync(hiveDir)) {
|
|
173
|
+
return { needed: false };
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
// If .beads exists but .hive doesn't, migration is needed
|
|
177
|
+
if (existsSync(beadsDir)) {
|
|
178
|
+
return { needed: true, beadsPath: beadsDir };
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
// Neither exists - no migration needed
|
|
182
|
+
return { needed: false };
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
/**
|
|
186
|
+
* Migrate .beads directory to .hive
|
|
187
|
+
*
|
|
188
|
+
* This function renames .beads to .hive. It should only be called
|
|
189
|
+
* after user confirmation via CLI prompt.
|
|
190
|
+
*
|
|
191
|
+
* @param projectPath - Absolute path to the project root
|
|
192
|
+
* @returns MigrationResult indicating success or skip reason
|
|
193
|
+
*/
|
|
194
|
+
export async function migrateBeadsToHive(projectPath: string): Promise<MigrationResult> {
|
|
195
|
+
const beadsDir = join(projectPath, ".beads");
|
|
196
|
+
const hiveDir = join(projectPath, ".hive");
|
|
197
|
+
|
|
198
|
+
// Check if .hive already exists - skip migration
|
|
199
|
+
if (existsSync(hiveDir)) {
|
|
200
|
+
return {
|
|
201
|
+
migrated: false,
|
|
202
|
+
reason: ".hive directory already exists - skipping migration to avoid data loss"
|
|
203
|
+
};
|
|
204
|
+
}
|
|
205
|
+
|
|
206
|
+
// Check if .beads exists
|
|
207
|
+
if (!existsSync(beadsDir)) {
|
|
208
|
+
return {
|
|
209
|
+
migrated: false,
|
|
210
|
+
reason: ".beads directory not found - nothing to migrate"
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
// Perform the rename
|
|
215
|
+
const { renameSync } = await import("node:fs");
|
|
216
|
+
renameSync(beadsDir, hiveDir);
|
|
217
|
+
|
|
218
|
+
return { migrated: true };
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
/**
|
|
222
|
+
* Ensure .hive directory exists
|
|
223
|
+
*
|
|
224
|
+
* Creates .hive directory if it doesn't exist. This is idempotent
|
|
225
|
+
* and safe to call multiple times.
|
|
226
|
+
*
|
|
227
|
+
* @param projectPath - Absolute path to the project root
|
|
228
|
+
*/
|
|
229
|
+
export function ensureHiveDirectory(projectPath: string): void {
|
|
230
|
+
const hiveDir = join(projectPath, ".hive");
|
|
231
|
+
|
|
232
|
+
if (!existsSync(hiveDir)) {
|
|
233
|
+
const { mkdirSync } = require("node:fs");
|
|
234
|
+
mkdirSync(hiveDir, { recursive: true });
|
|
235
|
+
}
|
|
236
|
+
}
|
|
237
|
+
|
|
133
238
|
// ============================================================================
|
|
134
239
|
// Adapter Singleton
|
|
135
240
|
// ============================================================================
|
|
@@ -714,7 +819,10 @@ export const hive_sync = tool({
|
|
|
714
819
|
}
|
|
715
820
|
};
|
|
716
821
|
|
|
717
|
-
// 1.
|
|
822
|
+
// 1. Ensure .hive directory exists before writing
|
|
823
|
+
ensureHiveDirectory(projectKey);
|
|
824
|
+
|
|
825
|
+
// 2. Flush cells to JSONL using FlushManager
|
|
718
826
|
const flushManager = new FlushManager({
|
|
719
827
|
adapter,
|
|
720
828
|
projectKey,
|
|
@@ -731,7 +839,7 @@ export const hive_sync = tool({
|
|
|
731
839
|
return "No cells to sync";
|
|
732
840
|
}
|
|
733
841
|
|
|
734
|
-
//
|
|
842
|
+
// 3. Check if there are changes to commit
|
|
735
843
|
const hiveStatusResult = await runGitCommand([
|
|
736
844
|
"status",
|
|
737
845
|
"--porcelain",
|
|
@@ -740,7 +848,7 @@ export const hive_sync = tool({
|
|
|
740
848
|
const hasChanges = hiveStatusResult.stdout.trim() !== "";
|
|
741
849
|
|
|
742
850
|
if (hasChanges) {
|
|
743
|
-
//
|
|
851
|
+
// 4. Stage .hive changes
|
|
744
852
|
const addResult = await runGitCommand(["add", ".hive/"]);
|
|
745
853
|
if (addResult.exitCode !== 0) {
|
|
746
854
|
throw new HiveError(
|
|
@@ -750,7 +858,7 @@ export const hive_sync = tool({
|
|
|
750
858
|
);
|
|
751
859
|
}
|
|
752
860
|
|
|
753
|
-
//
|
|
861
|
+
// 5. Commit
|
|
754
862
|
const commitResult = await withTimeout(
|
|
755
863
|
runGitCommand(["commit", "-m", "chore: sync hive"]),
|
|
756
864
|
TIMEOUT_MS,
|
|
@@ -768,7 +876,7 @@ export const hive_sync = tool({
|
|
|
768
876
|
}
|
|
769
877
|
}
|
|
770
878
|
|
|
771
|
-
//
|
|
879
|
+
// 6. Pull if requested
|
|
772
880
|
if (autoPull) {
|
|
773
881
|
const pullResult = await withTimeout(
|
|
774
882
|
runGitCommand(["pull", "--rebase"]),
|
|
@@ -785,7 +893,7 @@ export const hive_sync = tool({
|
|
|
785
893
|
}
|
|
786
894
|
}
|
|
787
895
|
|
|
788
|
-
//
|
|
896
|
+
// 7. Push
|
|
789
897
|
const pushResult = await withTimeout(
|
|
790
898
|
runGitCommand(["push"]),
|
|
791
899
|
TIMEOUT_MS,
|