opencode-swarm-plugin 0.26.0 → 0.27.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. package/.turbo/turbo-build.log +4 -4
  2. package/CHANGELOG.md +37 -0
  3. package/README.md +43 -46
  4. package/bin/swarm.ts +8 -8
  5. package/dist/compaction-hook.d.ts +57 -0
  6. package/dist/compaction-hook.d.ts.map +1 -0
  7. package/dist/hive.d.ts +741 -0
  8. package/dist/hive.d.ts.map +1 -0
  9. package/dist/index.d.ts +139 -23
  10. package/dist/index.d.ts.map +1 -1
  11. package/dist/index.js +1418 -387
  12. package/dist/learning.d.ts +9 -9
  13. package/dist/plugin.js +1240 -386
  14. package/dist/schemas/cell-events.d.ts +1352 -0
  15. package/dist/schemas/{bead-events.d.ts.map → cell-events.d.ts.map} +1 -1
  16. package/dist/schemas/{bead.d.ts → cell.d.ts} +173 -29
  17. package/dist/schemas/cell.d.ts.map +1 -0
  18. package/dist/schemas/index.d.ts +11 -7
  19. package/dist/schemas/index.d.ts.map +1 -1
  20. package/dist/structured.d.ts +17 -7
  21. package/dist/structured.d.ts.map +1 -1
  22. package/dist/swarm-decompose.d.ts +5 -5
  23. package/dist/swarm-orchestrate.d.ts +16 -2
  24. package/dist/swarm-orchestrate.d.ts.map +1 -1
  25. package/dist/swarm-prompts.d.ts +9 -9
  26. package/dist/swarm-prompts.d.ts.map +1 -1
  27. package/dist/swarm-review.d.ts +210 -0
  28. package/dist/swarm-review.d.ts.map +1 -0
  29. package/dist/swarm-worktree.d.ts +185 -0
  30. package/dist/swarm-worktree.d.ts.map +1 -0
  31. package/dist/swarm.d.ts +7 -0
  32. package/dist/swarm.d.ts.map +1 -1
  33. package/dist/tool-availability.d.ts +3 -2
  34. package/dist/tool-availability.d.ts.map +1 -1
  35. package/docs/analysis-socratic-planner-pattern.md +1 -1
  36. package/docs/planning/ADR-007-swarm-enhancements-worktree-review.md +168 -0
  37. package/docs/testing/context-recovery-test.md +2 -2
  38. package/evals/README.md +2 -2
  39. package/evals/scorers/index.ts +7 -7
  40. package/examples/commands/swarm.md +21 -23
  41. package/examples/plugin-wrapper-template.ts +310 -44
  42. package/examples/skills/{beads-workflow → hive-workflow}/SKILL.md +40 -40
  43. package/examples/skills/swarm-coordination/SKILL.md +1 -1
  44. package/global-skills/swarm-coordination/SKILL.md +14 -14
  45. package/global-skills/swarm-coordination/references/coordinator-patterns.md +3 -3
  46. package/package.json +2 -2
  47. package/src/compaction-hook.ts +161 -0
  48. package/src/{beads.integration.test.ts → hive.integration.test.ts} +92 -80
  49. package/src/hive.ts +1017 -0
  50. package/src/index.ts +57 -20
  51. package/src/learning.ts +9 -9
  52. package/src/output-guardrails.test.ts +4 -4
  53. package/src/output-guardrails.ts +9 -9
  54. package/src/planning-guardrails.test.ts +1 -1
  55. package/src/planning-guardrails.ts +1 -1
  56. package/src/schemas/{bead-events.test.ts → cell-events.test.ts} +83 -77
  57. package/src/schemas/cell-events.ts +807 -0
  58. package/src/schemas/{bead.ts → cell.ts} +95 -41
  59. package/src/schemas/evaluation.ts +1 -1
  60. package/src/schemas/index.ts +90 -18
  61. package/src/schemas/swarm-context.ts +2 -2
  62. package/src/structured.test.ts +15 -15
  63. package/src/structured.ts +18 -11
  64. package/src/swarm-decompose.ts +23 -23
  65. package/src/swarm-orchestrate.ts +135 -21
  66. package/src/swarm-prompts.ts +43 -43
  67. package/src/swarm-review.test.ts +702 -0
  68. package/src/swarm-review.ts +696 -0
  69. package/src/swarm-worktree.test.ts +501 -0
  70. package/src/swarm-worktree.ts +575 -0
  71. package/src/swarm.integration.test.ts +12 -12
  72. package/src/tool-availability.ts +36 -3
  73. package/dist/beads.d.ts +0 -383
  74. package/dist/beads.d.ts.map +0 -1
  75. package/dist/schemas/bead-events.d.ts +0 -698
  76. package/dist/schemas/bead.d.ts.map +0 -1
  77. package/src/beads.ts +0 -800
  78. package/src/schemas/bead-events.ts +0 -583
package/src/hive.ts ADDED
@@ -0,0 +1,1017 @@
1
+ /**
2
+ * Hive Module - Type-safe wrappers using HiveAdapter
3
+ *
4
+ * This module provides validated, type-safe operations for the Hive
5
+ * issue tracker using the HiveAdapter from swarm-mail.
6
+ *
7
+ * Key principles:
8
+ * - Use HiveAdapter for all operations (no CLI commands)
9
+ * - Validate all inputs with Zod schemas
10
+ * - Throw typed errors on failure
11
+ * - Support atomic epic creation with rollback
12
+ *
13
+ * IMPORTANT: Call setHiveWorkingDirectory() before using tools to ensure
14
+ * operations run in the correct project directory.
15
+ */
16
+ import { tool } from "@opencode-ai/plugin";
17
+ import { z } from "zod";
18
+ import {
19
+ createHiveAdapter,
20
+ FlushManager,
21
+ importFromJSONL,
22
+ type HiveAdapter,
23
+ type Cell as AdapterCell,
24
+ getSwarmMail,
25
+ } from "swarm-mail";
26
+ import { existsSync, readFileSync } from "node:fs";
27
+ import { join } from "node:path";
28
+
29
+ // ============================================================================
30
+ // Working Directory Configuration
31
+ // ============================================================================
32
+
33
+ /**
34
+ * Module-level working directory for hive commands.
35
+ * Set this via setHiveWorkingDirectory() before using tools.
36
+ * If not set, commands run in process.cwd() which may be wrong for plugins.
37
+ */
38
+ let hiveWorkingDirectory: string | null = null;
39
+
40
+ /**
41
+ * Set the working directory for all hive commands.
42
+ * Call this from the plugin initialization with the project directory.
43
+ *
44
+ * @param directory - Absolute path to the project directory
45
+ */
46
+ export function setHiveWorkingDirectory(directory: string): void {
47
+ hiveWorkingDirectory = directory;
48
+ }
49
+
50
+ /**
51
+ * Get the current working directory for hive commands.
52
+ * Returns the configured directory or process.cwd() as fallback.
53
+ */
54
+ export function getHiveWorkingDirectory(): string {
55
+ return hiveWorkingDirectory || process.cwd();
56
+ }
57
+
58
+ // Legacy aliases for backward compatibility
59
+ export const setBeadsWorkingDirectory = setHiveWorkingDirectory;
60
+ export const getBeadsWorkingDirectory = getHiveWorkingDirectory;
61
+
62
+ /**
63
+ * Run a git command in the correct working directory.
64
+ */
65
+ async function runGitCommand(
66
+ args: string[],
67
+ ): Promise<{ exitCode: number; stdout: string; stderr: string }> {
68
+ const cwd = getHiveWorkingDirectory();
69
+ const proc = Bun.spawn(["git", ...args], {
70
+ cwd,
71
+ stdout: "pipe",
72
+ stderr: "pipe",
73
+ });
74
+
75
+ const [stdout, stderr] = await Promise.all([
76
+ new Response(proc.stdout).text(),
77
+ new Response(proc.stderr).text(),
78
+ ]);
79
+
80
+ const exitCode = await proc.exited;
81
+
82
+ return { exitCode, stdout, stderr };
83
+ }
84
+
85
+ import {
86
+ CellSchema,
87
+ CellCreateArgsSchema,
88
+ CellUpdateArgsSchema,
89
+ CellCloseArgsSchema,
90
+ CellQueryArgsSchema,
91
+ EpicCreateArgsSchema,
92
+ EpicCreateResultSchema,
93
+ type Cell,
94
+ type CellCreateArgs,
95
+ type EpicCreateResult,
96
+ } from "./schemas";
97
+ import { createEvent, appendEvent } from "swarm-mail";
98
+
99
+ /**
100
+ * Custom error for hive operations
101
+ */
102
+ export class HiveError extends Error {
103
+ constructor(
104
+ message: string,
105
+ public readonly command: string,
106
+ public readonly exitCode?: number,
107
+ public readonly stderr?: string,
108
+ ) {
109
+ super(message);
110
+ this.name = "HiveError";
111
+ }
112
+ }
113
+
114
+ // Legacy alias for backward compatibility
115
+ export const BeadError = HiveError;
116
+
117
+ /**
118
+ * Custom error for validation failures
119
+ */
120
+ export class HiveValidationError extends Error {
121
+ constructor(
122
+ message: string,
123
+ public readonly zodError: z.ZodError,
124
+ ) {
125
+ super(message);
126
+ this.name = "HiveValidationError";
127
+ }
128
+ }
129
+
130
+ // Legacy alias for backward compatibility
131
+ export const BeadValidationError = HiveValidationError;
132
+
133
+ // ============================================================================
134
+ // Adapter Singleton
135
+ // ============================================================================
136
+
137
+ /**
138
+ * Lazy singleton for HiveAdapter instances
139
+ * Maps projectKey -> HiveAdapter
140
+ */
141
+ const adapterCache = new Map<string, HiveAdapter>();
142
+
143
+ /**
144
+ * Get or create a HiveAdapter instance for a project
145
+ * Exported for testing - allows tests to verify state directly
146
+ *
147
+ * On first initialization, checks for .beads/issues.jsonl and imports
148
+ * historical beads if the database is empty.
149
+ */
150
+ export async function getHiveAdapter(projectKey: string): Promise<HiveAdapter> {
151
+ if (adapterCache.has(projectKey)) {
152
+ return adapterCache.get(projectKey)!;
153
+ }
154
+
155
+ const swarmMail = await getSwarmMail(projectKey);
156
+ const db = await swarmMail.getDatabase();
157
+ const adapter = createHiveAdapter(db, projectKey);
158
+
159
+ // Run migrations to ensure schema exists
160
+ await adapter.runMigrations();
161
+
162
+ // Auto-migrate from JSONL if database is empty and file exists
163
+ await autoMigrateFromJSONL(adapter, projectKey);
164
+
165
+ adapterCache.set(projectKey, adapter);
166
+ return adapter;
167
+ }
168
+
169
+ // Legacy alias for backward compatibility
170
+ export const getBeadsAdapter = getHiveAdapter;
171
+
172
+ /**
173
+ * Auto-migrate cells from .hive/issues.jsonl if:
174
+ * 1. The JSONL file exists
175
+ * 2. The database has no cells for this project
176
+ *
177
+ * This enables seamless migration from the old bd CLI to the new PGLite-based system.
178
+ */
179
+ async function autoMigrateFromJSONL(adapter: HiveAdapter, projectKey: string): Promise<void> {
180
+ const jsonlPath = join(projectKey, ".hive", "issues.jsonl");
181
+
182
+ // Check if JSONL file exists
183
+ if (!existsSync(jsonlPath)) {
184
+ return;
185
+ }
186
+
187
+ // Check if database already has cells
188
+ const existingCells = await adapter.queryCells(projectKey, { limit: 1 });
189
+ if (existingCells.length > 0) {
190
+ return; // Already have cells, skip migration
191
+ }
192
+
193
+ // Read and import JSONL
194
+ try {
195
+ const jsonlContent = readFileSync(jsonlPath, "utf-8");
196
+ const result = await importFromJSONL(adapter, projectKey, jsonlContent, {
197
+ skipExisting: true, // Safety: don't overwrite if somehow cells exist
198
+ });
199
+
200
+ if (result.created > 0 || result.updated > 0) {
201
+ console.log(
202
+ `[hive] Auto-migrated ${result.created} cells from ${jsonlPath} (${result.skipped} skipped, ${result.errors.length} errors)`
203
+ );
204
+ }
205
+
206
+ if (result.errors.length > 0) {
207
+ console.warn(
208
+ `[hive] Migration errors:`,
209
+ result.errors.slice(0, 5).map((e) => `${e.cellId}: ${e.error}`)
210
+ );
211
+ }
212
+ } catch (error) {
213
+ // Non-fatal - log and continue
214
+ console.warn(
215
+ `[hive] Failed to auto-migrate from ${jsonlPath}:`,
216
+ error instanceof Error ? error.message : String(error)
217
+ );
218
+ }
219
+ }
220
+
221
+ /**
222
+ * Format adapter cell for output (map field names)
223
+ * Adapter uses: type, created_at/updated_at (timestamps)
224
+ * Schema expects: issue_type, created_at/updated_at (ISO strings)
225
+ */
226
+ function formatCellForOutput(adapterCell: AdapterCell): Record<string, unknown> {
227
+ return {
228
+ id: adapterCell.id,
229
+ title: adapterCell.title,
230
+ description: adapterCell.description || "",
231
+ status: adapterCell.status,
232
+ priority: adapterCell.priority,
233
+ issue_type: adapterCell.type, // Adapter: type → Schema: issue_type
234
+ created_at: new Date(adapterCell.created_at).toISOString(),
235
+ updated_at: new Date(adapterCell.updated_at).toISOString(),
236
+ closed_at: adapterCell.closed_at
237
+ ? new Date(adapterCell.closed_at).toISOString()
238
+ : undefined,
239
+ parent_id: adapterCell.parent_id || undefined,
240
+ dependencies: [], // TODO: fetch from adapter if needed
241
+ metadata: {},
242
+ };
243
+ }
244
+
245
+ // ============================================================================
246
+ // Tool Definitions
247
+ // ============================================================================
248
+
249
+ /**
250
+ * Create a new cell with type-safe validation
251
+ */
252
+ export const hive_create = tool({
253
+ description: "Create a new cell in the hive with type-safe validation",
254
+ args: {
255
+ title: tool.schema.string().describe("Cell title"),
256
+ type: tool.schema
257
+ .enum(["bug", "feature", "task", "epic", "chore"])
258
+ .optional()
259
+ .describe("Issue type (default: task)"),
260
+ priority: tool.schema
261
+ .number()
262
+ .min(0)
263
+ .max(3)
264
+ .optional()
265
+ .describe("Priority 0-3 (default: 2)"),
266
+ description: tool.schema.string().optional().describe("Cell description"),
267
+ parent_id: tool.schema
268
+ .string()
269
+ .optional()
270
+ .describe("Parent cell ID for epic children"),
271
+ },
272
+ async execute(args, ctx) {
273
+ const validated = CellCreateArgsSchema.parse(args);
274
+ const projectKey = getHiveWorkingDirectory();
275
+ const adapter = await getHiveAdapter(projectKey);
276
+
277
+ try {
278
+ const cell = await adapter.createCell(projectKey, {
279
+ title: validated.title,
280
+ type: validated.type || "task",
281
+ priority: validated.priority ?? 2,
282
+ description: validated.description,
283
+ parent_id: validated.parent_id,
284
+ });
285
+
286
+ // Mark dirty for export
287
+ await adapter.markDirty(projectKey, cell.id);
288
+
289
+ const formatted = formatCellForOutput(cell);
290
+ return JSON.stringify(formatted, null, 2);
291
+ } catch (error) {
292
+ const message = error instanceof Error ? error.message : String(error);
293
+ throw new HiveError(
294
+ `Failed to create cell: ${message}`,
295
+ "hive_create",
296
+ );
297
+ }
298
+ },
299
+ });
300
+
301
+ /**
302
+ * Create an epic with subtasks in one atomic operation
303
+ */
304
+ export const hive_create_epic = tool({
305
+ description: "Create epic with subtasks in one atomic operation",
306
+ args: {
307
+ epic_title: tool.schema.string().describe("Epic title"),
308
+ epic_description: tool.schema
309
+ .string()
310
+ .optional()
311
+ .describe("Epic description"),
312
+ epic_id: tool.schema
313
+ .string()
314
+ .optional()
315
+ .describe("Custom ID for the epic (e.g., 'phase-0')"),
316
+ subtasks: tool.schema
317
+ .array(
318
+ tool.schema.object({
319
+ title: tool.schema.string(),
320
+ priority: tool.schema.number().min(0).max(3).optional(),
321
+ files: tool.schema.array(tool.schema.string()).optional(),
322
+ id_suffix: tool.schema
323
+ .string()
324
+ .optional()
325
+ .describe(
326
+ "Custom ID suffix (e.g., 'e2e-test' becomes 'phase-0.e2e-test')",
327
+ ),
328
+ }),
329
+ )
330
+ .describe("Subtasks to create under the epic"),
331
+ strategy: tool.schema
332
+ .enum(["file-based", "feature-based", "risk-based"])
333
+ .optional()
334
+ .describe("Decomposition strategy used (default: feature-based)"),
335
+ task: tool.schema
336
+ .string()
337
+ .optional()
338
+ .describe("Original task description that was decomposed"),
339
+ project_key: tool.schema
340
+ .string()
341
+ .optional()
342
+ .describe("Project path for event emission"),
343
+ recovery_context: tool.schema
344
+ .object({
345
+ shared_context: tool.schema.string().optional(),
346
+ skills_to_load: tool.schema.array(tool.schema.string()).optional(),
347
+ coordinator_notes: tool.schema.string().optional(),
348
+ })
349
+ .optional()
350
+ .describe("Recovery context from checkpoint compaction"),
351
+ },
352
+ async execute(args, ctx) {
353
+ const validated = EpicCreateArgsSchema.parse(args);
354
+ const projectKey = getHiveWorkingDirectory();
355
+ const adapter = await getHiveAdapter(projectKey);
356
+ const created: AdapterCell[] = [];
357
+
358
+ try {
359
+ // 1. Create epic
360
+ const epic = await adapter.createCell(projectKey, {
361
+ title: validated.epic_title,
362
+ type: "epic",
363
+ priority: 1,
364
+ description: validated.epic_description,
365
+ });
366
+ await adapter.markDirty(projectKey, epic.id);
367
+ created.push(epic);
368
+
369
+ // 2. Create subtasks
370
+ for (const subtask of validated.subtasks) {
371
+ const subtaskCell = await adapter.createCell(projectKey, {
372
+ title: subtask.title,
373
+ type: "task",
374
+ priority: subtask.priority ?? 2,
375
+ parent_id: epic.id,
376
+ });
377
+ await adapter.markDirty(projectKey, subtaskCell.id);
378
+ created.push(subtaskCell);
379
+ }
380
+
381
+ const result: EpicCreateResult = {
382
+ success: true,
383
+ epic: formatCellForOutput(epic) as Cell,
384
+ subtasks: created.slice(1).map((c) => formatCellForOutput(c) as Cell),
385
+ };
386
+
387
+ // Emit DecompositionGeneratedEvent for learning system
388
+ if (args.project_key) {
389
+ try {
390
+ const event = createEvent("decomposition_generated", {
391
+ project_key: args.project_key,
392
+ epic_id: epic.id,
393
+ task: args.task || validated.epic_title,
394
+ context: validated.epic_description,
395
+ strategy: args.strategy || "feature-based",
396
+ epic_title: validated.epic_title,
397
+ subtasks: validated.subtasks.map((st) => ({
398
+ title: st.title,
399
+ files: st.files || [],
400
+ priority: st.priority,
401
+ })),
402
+ recovery_context: args.recovery_context,
403
+ });
404
+ await appendEvent(event, args.project_key);
405
+ } catch (error) {
406
+ // Non-fatal - log and continue
407
+ console.warn(
408
+ "[hive_create_epic] Failed to emit DecompositionGeneratedEvent:",
409
+ error,
410
+ );
411
+ }
412
+ }
413
+
414
+ return JSON.stringify(result, null, 2);
415
+ } catch (error) {
416
+ // Partial failure - rollback via deleteCell
417
+ const rollbackErrors: string[] = [];
418
+
419
+ for (const cell of created) {
420
+ try {
421
+ await adapter.deleteCell(projectKey, cell.id, {
422
+ reason: "Rollback partial epic",
423
+ });
424
+ } catch (rollbackError) {
425
+ const errMsg =
426
+ rollbackError instanceof Error
427
+ ? rollbackError.message
428
+ : String(rollbackError);
429
+ console.error(`Failed to rollback cell ${cell.id}:`, rollbackError);
430
+ rollbackErrors.push(`${cell.id}: ${errMsg}`);
431
+ }
432
+ }
433
+
434
+ const errorMsg = error instanceof Error ? error.message : String(error);
435
+ let rollbackInfo = `\n\nRolled back ${created.length - rollbackErrors.length} cell(s)`;
436
+
437
+ if (rollbackErrors.length > 0) {
438
+ rollbackInfo += `\n\nRollback failures (${rollbackErrors.length}):\n${rollbackErrors.join("\n")}`;
439
+ }
440
+
441
+ throw new HiveError(
442
+ `Epic creation failed: ${errorMsg}${rollbackInfo}`,
443
+ "hive_create_epic",
444
+ 1,
445
+ );
446
+ }
447
+ },
448
+ });
449
+
450
+ /**
451
+ * Query cells with filters
452
+ */
453
+ export const hive_query = tool({
454
+ description: "Query hive cells with filters (replaces bd list, bd ready, bd wip)",
455
+ args: {
456
+ status: tool.schema
457
+ .enum(["open", "in_progress", "blocked", "closed"])
458
+ .optional()
459
+ .describe("Filter by status"),
460
+ type: tool.schema
461
+ .enum(["bug", "feature", "task", "epic", "chore"])
462
+ .optional()
463
+ .describe("Filter by type"),
464
+ ready: tool.schema
465
+ .boolean()
466
+ .optional()
467
+ .describe("Only show unblocked cells"),
468
+ limit: tool.schema
469
+ .number()
470
+ .optional()
471
+ .describe("Max results to return (default: 20)"),
472
+ },
473
+ async execute(args, ctx) {
474
+ const validated = CellQueryArgsSchema.parse(args);
475
+ const projectKey = getHiveWorkingDirectory();
476
+ const adapter = await getHiveAdapter(projectKey);
477
+
478
+ try {
479
+ let cells: AdapterCell[];
480
+
481
+ if (validated.ready) {
482
+ const readyCell = await adapter.getNextReadyCell(projectKey);
483
+ cells = readyCell ? [readyCell] : [];
484
+ } else {
485
+ cells = await adapter.queryCells(projectKey, {
486
+ status: validated.status,
487
+ type: validated.type,
488
+ limit: validated.limit || 20,
489
+ });
490
+ }
491
+
492
+ const formatted = cells.map((c) => formatCellForOutput(c));
493
+ return JSON.stringify(formatted, null, 2);
494
+ } catch (error) {
495
+ const message = error instanceof Error ? error.message : String(error);
496
+ throw new HiveError(
497
+ `Failed to query cells: ${message}`,
498
+ "hive_query",
499
+ );
500
+ }
501
+ },
502
+ });
503
+
504
+ /**
505
+ * Update a cell's status or description
506
+ */
507
+ export const hive_update = tool({
508
+ description: "Update cell status/description",
509
+ args: {
510
+ id: tool.schema.string().describe("Cell ID"),
511
+ status: tool.schema
512
+ .enum(["open", "in_progress", "blocked", "closed"])
513
+ .optional()
514
+ .describe("New status"),
515
+ description: tool.schema.string().optional().describe("New description"),
516
+ priority: tool.schema
517
+ .number()
518
+ .min(0)
519
+ .max(3)
520
+ .optional()
521
+ .describe("New priority"),
522
+ },
523
+ async execute(args, ctx) {
524
+ const validated = CellUpdateArgsSchema.parse(args);
525
+ const projectKey = getHiveWorkingDirectory();
526
+ const adapter = await getHiveAdapter(projectKey);
527
+
528
+ try {
529
+ let cell: AdapterCell;
530
+
531
+ // Status changes use changeCellStatus, other fields use updateCell
532
+ if (validated.status) {
533
+ cell = await adapter.changeCellStatus(
534
+ projectKey,
535
+ validated.id,
536
+ validated.status,
537
+ );
538
+ }
539
+
540
+ // Update other fields if provided
541
+ if (validated.description !== undefined || validated.priority !== undefined) {
542
+ cell = await adapter.updateCell(projectKey, validated.id, {
543
+ description: validated.description,
544
+ priority: validated.priority,
545
+ });
546
+ } else if (!validated.status) {
547
+ // No changes requested
548
+ const existingCell = await adapter.getCell(projectKey, validated.id);
549
+ if (!existingCell) {
550
+ throw new HiveError(
551
+ `Cell not found: ${validated.id}`,
552
+ "hive_update",
553
+ );
554
+ }
555
+ cell = existingCell;
556
+ }
557
+
558
+ await adapter.markDirty(projectKey, validated.id);
559
+
560
+ const formatted = formatCellForOutput(cell!);
561
+ return JSON.stringify(formatted, null, 2);
562
+ } catch (error) {
563
+ const message = error instanceof Error ? error.message : String(error);
564
+ throw new HiveError(
565
+ `Failed to update cell: ${message}`,
566
+ "hive_update",
567
+ );
568
+ }
569
+ },
570
+ });
571
+
572
+ /**
573
+ * Close a cell with reason
574
+ */
575
+ export const hive_close = tool({
576
+ description: "Close a cell with reason",
577
+ args: {
578
+ id: tool.schema.string().describe("Cell ID"),
579
+ reason: tool.schema.string().describe("Completion reason"),
580
+ },
581
+ async execute(args, ctx) {
582
+ const validated = CellCloseArgsSchema.parse(args);
583
+ const projectKey = getHiveWorkingDirectory();
584
+ const adapter = await getHiveAdapter(projectKey);
585
+
586
+ try {
587
+ const cell = await adapter.closeCell(
588
+ projectKey,
589
+ validated.id,
590
+ validated.reason,
591
+ );
592
+
593
+ await adapter.markDirty(projectKey, validated.id);
594
+
595
+ return `Closed ${cell.id}: ${validated.reason}`;
596
+ } catch (error) {
597
+ const message = error instanceof Error ? error.message : String(error);
598
+ throw new HiveError(
599
+ `Failed to close cell: ${message}`,
600
+ "hive_close",
601
+ );
602
+ }
603
+ },
604
+ });
605
+
606
+ /**
607
+ * Mark a cell as in-progress
608
+ */
609
+ export const hive_start = tool({
610
+ description:
611
+ "Mark a cell as in-progress (shortcut for update --status in_progress)",
612
+ args: {
613
+ id: tool.schema.string().describe("Cell ID"),
614
+ },
615
+ async execute(args, ctx) {
616
+ const projectKey = getHiveWorkingDirectory();
617
+ const adapter = await getHiveAdapter(projectKey);
618
+
619
+ try {
620
+ const cell = await adapter.changeCellStatus(
621
+ projectKey,
622
+ args.id,
623
+ "in_progress",
624
+ );
625
+
626
+ await adapter.markDirty(projectKey, args.id);
627
+
628
+ return `Started: ${cell.id}`;
629
+ } catch (error) {
630
+ const message = error instanceof Error ? error.message : String(error);
631
+ throw new HiveError(
632
+ `Failed to start cell: ${message}`,
633
+ "hive_start",
634
+ );
635
+ }
636
+ },
637
+ });
638
+
639
+ /**
640
+ * Get the next ready cell
641
+ */
642
+ export const hive_ready = tool({
643
+ description: "Get the next ready cell (unblocked, highest priority)",
644
+ args: {},
645
+ async execute(args, ctx) {
646
+ const projectKey = getHiveWorkingDirectory();
647
+ const adapter = await getHiveAdapter(projectKey);
648
+
649
+ try {
650
+ const cell = await adapter.getNextReadyCell(projectKey);
651
+
652
+ if (!cell) {
653
+ return "No ready cells";
654
+ }
655
+
656
+ const formatted = formatCellForOutput(cell);
657
+ return JSON.stringify(formatted, null, 2);
658
+ } catch (error) {
659
+ const message = error instanceof Error ? error.message : String(error);
660
+ throw new HiveError(
661
+ `Failed to get ready cells: ${message}`,
662
+ "hive_ready",
663
+ );
664
+ }
665
+ },
666
+ });
667
+
668
+ /**
669
+ * Sync hive to git and push
670
+ */
671
+ export const hive_sync = tool({
672
+ description: "Sync hive to git and push (MANDATORY at session end)",
673
+ args: {
674
+ auto_pull: tool.schema
675
+ .boolean()
676
+ .optional()
677
+ .describe("Pull before sync (default: true)"),
678
+ },
679
+ async execute(args, ctx) {
680
+ const autoPull = args.auto_pull ?? true;
681
+ const projectKey = getHiveWorkingDirectory();
682
+ const adapter = await getHiveAdapter(projectKey);
683
+ const TIMEOUT_MS = 30000; // 30 seconds
684
+
685
+ /**
686
+ * Helper to run a command with timeout
687
+ */
688
+ const withTimeout = async <T>(
689
+ promise: Promise<T>,
690
+ timeoutMs: number,
691
+ operation: string,
692
+ ): Promise<T> => {
693
+ let timeoutId: ReturnType<typeof setTimeout> | undefined;
694
+
695
+ const timeoutPromise = new Promise<never>((_, reject) => {
696
+ timeoutId = setTimeout(
697
+ () =>
698
+ reject(
699
+ new HiveError(
700
+ `Operation timed out after ${timeoutMs}ms`,
701
+ operation,
702
+ ),
703
+ ),
704
+ timeoutMs,
705
+ );
706
+ });
707
+
708
+ try {
709
+ return await Promise.race([promise, timeoutPromise]);
710
+ } finally {
711
+ if (timeoutId !== undefined) {
712
+ clearTimeout(timeoutId);
713
+ }
714
+ }
715
+ };
716
+
717
+ // 1. Flush cells to JSONL using FlushManager
718
+ const flushManager = new FlushManager({
719
+ adapter,
720
+ projectKey,
721
+ outputPath: `${projectKey}/.hive/issues.jsonl`,
722
+ });
723
+
724
+ const flushResult = await withTimeout(
725
+ flushManager.flush(),
726
+ TIMEOUT_MS,
727
+ "flush hive",
728
+ );
729
+
730
+ if (flushResult.cellsExported === 0) {
731
+ return "No cells to sync";
732
+ }
733
+
734
+ // 2. Check if there are changes to commit
735
+ const hiveStatusResult = await runGitCommand([
736
+ "status",
737
+ "--porcelain",
738
+ ".hive/",
739
+ ]);
740
+ const hasChanges = hiveStatusResult.stdout.trim() !== "";
741
+
742
+ if (hasChanges) {
743
+ // 3. Stage .hive changes
744
+ const addResult = await runGitCommand(["add", ".hive/"]);
745
+ if (addResult.exitCode !== 0) {
746
+ throw new HiveError(
747
+ `Failed to stage hive: ${addResult.stderr}`,
748
+ "git add .hive/",
749
+ addResult.exitCode,
750
+ );
751
+ }
752
+
753
+ // 4. Commit
754
+ const commitResult = await withTimeout(
755
+ runGitCommand(["commit", "-m", "chore: sync hive"]),
756
+ TIMEOUT_MS,
757
+ "git commit",
758
+ );
759
+ if (
760
+ commitResult.exitCode !== 0 &&
761
+ !commitResult.stdout.includes("nothing to commit")
762
+ ) {
763
+ throw new HiveError(
764
+ `Failed to commit hive: ${commitResult.stderr}`,
765
+ "git commit",
766
+ commitResult.exitCode,
767
+ );
768
+ }
769
+ }
770
+
771
+ // 5. Pull if requested
772
+ if (autoPull) {
773
+ const pullResult = await withTimeout(
774
+ runGitCommand(["pull", "--rebase"]),
775
+ TIMEOUT_MS,
776
+ "git pull --rebase",
777
+ );
778
+
779
+ if (pullResult.exitCode !== 0) {
780
+ throw new HiveError(
781
+ `Failed to pull: ${pullResult.stderr}`,
782
+ "git pull --rebase",
783
+ pullResult.exitCode,
784
+ );
785
+ }
786
+ }
787
+
788
+ // 6. Push
789
+ const pushResult = await withTimeout(
790
+ runGitCommand(["push"]),
791
+ TIMEOUT_MS,
792
+ "git push",
793
+ );
794
+ if (pushResult.exitCode !== 0) {
795
+ throw new HiveError(
796
+ `Failed to push: ${pushResult.stderr}`,
797
+ "git push",
798
+ pushResult.exitCode,
799
+ );
800
+ }
801
+
802
+ return "Hive synced and pushed successfully";
803
+ },
804
+ });
805
+
806
+ /**
807
+ * Link a cell to an Agent Mail thread
808
+ */
809
+ export const hive_link_thread = tool({
810
+ description: "Add metadata linking cell to Agent Mail thread",
811
+ args: {
812
+ cell_id: tool.schema.string().describe("Cell ID"),
813
+ thread_id: tool.schema.string().describe("Agent Mail thread ID"),
814
+ },
815
+ async execute(args, ctx) {
816
+ const projectKey = getHiveWorkingDirectory();
817
+ const adapter = await getHiveAdapter(projectKey);
818
+
819
+ try {
820
+ const cell = await adapter.getCell(projectKey, args.cell_id);
821
+
822
+ if (!cell) {
823
+ throw new HiveError(
824
+ `Cell not found: ${args.cell_id}`,
825
+ "hive_link_thread",
826
+ );
827
+ }
828
+
829
+ const existingDesc = cell.description || "";
830
+ const threadMarker = `[thread:${args.thread_id}]`;
831
+
832
+ if (existingDesc.includes(threadMarker)) {
833
+ return `Cell ${args.cell_id} already linked to thread ${args.thread_id}`;
834
+ }
835
+
836
+ const newDesc = existingDesc
837
+ ? `${existingDesc}\n\n${threadMarker}`
838
+ : threadMarker;
839
+
840
+ await adapter.updateCell(projectKey, args.cell_id, {
841
+ description: newDesc,
842
+ });
843
+
844
+ await adapter.markDirty(projectKey, args.cell_id);
845
+
846
+ return `Linked cell ${args.cell_id} to thread ${args.thread_id}`;
847
+ } catch (error) {
848
+ const message = error instanceof Error ? error.message : String(error);
849
+ throw new HiveError(
850
+ `Failed to link thread: ${message}`,
851
+ "hive_link_thread",
852
+ );
853
+ }
854
+ },
855
+ });
856
+
857
+ // ============================================================================
858
+ // Export all tools
859
+ // ============================================================================
860
+
861
+ export const hiveTools = {
862
+ hive_create,
863
+ hive_create_epic,
864
+ hive_query,
865
+ hive_update,
866
+ hive_close,
867
+ hive_start,
868
+ hive_ready,
869
+ hive_sync,
870
+ hive_link_thread,
871
+ };
872
+
873
+ // ============================================================================
874
+ // Deprecation Warning System
875
+ // ============================================================================
876
+
877
+ /**
878
+ * Track which deprecated tools have been warned about.
879
+ * Only warn once per tool name to avoid spam.
880
+ */
881
+ const warnedTools = new Set<string>();
882
+
883
+ /**
884
+ * Log a deprecation warning for a renamed tool.
885
+ * Only warns once per tool name per session.
886
+ *
887
+ * @param oldName - The deprecated tool name (e.g., "hive_create")
888
+ * @param newName - The new tool name to use instead (e.g., "hive_create")
889
+ */
890
+ function warnDeprecated(oldName: string, newName: string): void {
891
+ if (warnedTools.has(oldName)) {
892
+ return; // Already warned
893
+ }
894
+
895
+ warnedTools.add(oldName);
896
+ console.warn(
897
+ `[DEPRECATED] ${oldName} is deprecated, use ${newName} instead. Will be removed in v1.0`
898
+ );
899
+ }
900
+
901
+ // ============================================================================
902
+ // Legacy Aliases (DEPRECATED - use hive_* instead)
903
+ // ============================================================================
904
+
905
+ /**
906
+ * @deprecated Use hive_create instead. Will be removed in v1.0
907
+ */
908
+ export const beads_create = tool({
909
+ ...hive_create,
910
+ async execute(args, ctx) {
911
+ warnDeprecated('beads_create', 'hive_create');
912
+ return hive_create.execute(args, ctx);
913
+ }
914
+ });
915
+
916
+ /**
917
+ * @deprecated Use hive_create_epic instead. Will be removed in v1.0
918
+ */
919
+ export const beads_create_epic = tool({
920
+ ...hive_create_epic,
921
+ async execute(args, ctx) {
922
+ warnDeprecated('beads_create_epic', 'hive_create_epic');
923
+ return hive_create_epic.execute(args, ctx);
924
+ }
925
+ });
926
+
927
+ /**
928
+ * @deprecated Use hive_query instead. Will be removed in v1.0
929
+ */
930
+ export const beads_query = tool({
931
+ ...hive_query,
932
+ async execute(args, ctx) {
933
+ warnDeprecated('beads_query', 'hive_query');
934
+ return hive_query.execute(args, ctx);
935
+ }
936
+ });
937
+
938
+ /**
939
+ * @deprecated Use hive_update instead. Will be removed in v1.0
940
+ */
941
+ export const beads_update = tool({
942
+ ...hive_update,
943
+ async execute(args, ctx) {
944
+ warnDeprecated('beads_update', 'hive_update');
945
+ return hive_update.execute(args, ctx);
946
+ }
947
+ });
948
+
949
+ /**
950
+ * @deprecated Use hive_close instead. Will be removed in v1.0
951
+ */
952
+ export const beads_close = tool({
953
+ ...hive_close,
954
+ async execute(args, ctx) {
955
+ warnDeprecated('beads_close', 'hive_close');
956
+ return hive_close.execute(args, ctx);
957
+ }
958
+ });
959
+
960
+ /**
961
+ * @deprecated Use hive_start instead. Will be removed in v1.0
962
+ */
963
+ export const beads_start = tool({
964
+ ...hive_start,
965
+ async execute(args, ctx) {
966
+ warnDeprecated('beads_start', 'hive_start');
967
+ return hive_start.execute(args, ctx);
968
+ }
969
+ });
970
+
971
+ /**
972
+ * @deprecated Use hive_ready instead. Will be removed in v1.0
973
+ */
974
+ export const beads_ready = tool({
975
+ ...hive_ready,
976
+ async execute(args, ctx) {
977
+ warnDeprecated('beads_ready', 'hive_ready');
978
+ return hive_ready.execute(args, ctx);
979
+ }
980
+ });
981
+
982
+ /**
983
+ * @deprecated Use hive_sync instead. Will be removed in v1.0
984
+ */
985
+ export const beads_sync = tool({
986
+ ...hive_sync,
987
+ async execute(args, ctx) {
988
+ warnDeprecated('beads_sync', 'hive_sync');
989
+ return hive_sync.execute(args, ctx);
990
+ }
991
+ });
992
+
993
+ /**
994
+ * @deprecated Use hive_link_thread instead. Will be removed in v1.0
995
+ */
996
+ export const beads_link_thread = tool({
997
+ ...hive_link_thread,
998
+ async execute(args, ctx) {
999
+ warnDeprecated('beads_link_thread', 'hive_link_thread');
1000
+ return hive_link_thread.execute(args, ctx);
1001
+ }
1002
+ });
1003
+
1004
+ /**
1005
+ * @deprecated Use hiveTools instead. Will be removed in v1.0
1006
+ */
1007
+ export const beadsTools = {
1008
+ beads_create,
1009
+ beads_create_epic,
1010
+ beads_query,
1011
+ beads_update,
1012
+ beads_close,
1013
+ beads_start,
1014
+ beads_ready,
1015
+ beads_sync,
1016
+ beads_link_thread,
1017
+ };