opencode-swarm-plugin 0.44.0 → 0.44.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/swarm.serve.test.ts +6 -4
- package/bin/swarm.ts +16 -10
- package/dist/compaction-prompt-scoring.js +139 -0
- package/dist/eval-capture.js +12811 -0
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.js +7644 -62599
- package/dist/plugin.js +23766 -78721
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm-review.d.ts.map +1 -1
- package/package.json +17 -5
- package/.changeset/swarm-insights-data-layer.md +0 -63
- package/.hive/analysis/eval-failure-analysis-2025-12-25.md +0 -331
- package/.hive/analysis/session-data-quality-audit.md +0 -320
- package/.hive/eval-results.json +0 -483
- package/.hive/issues.jsonl +0 -138
- package/.hive/memories.jsonl +0 -729
- package/.opencode/eval-history.jsonl +0 -327
- package/.turbo/turbo-build.log +0 -9
- package/CHANGELOG.md +0 -2286
- package/SCORER-ANALYSIS.md +0 -598
- package/docs/analysis/subagent-coordination-patterns.md +0 -902
- package/docs/analysis-socratic-planner-pattern.md +0 -504
- package/docs/planning/ADR-001-monorepo-structure.md +0 -171
- package/docs/planning/ADR-002-package-extraction.md +0 -393
- package/docs/planning/ADR-003-performance-improvements.md +0 -451
- package/docs/planning/ADR-004-message-queue-features.md +0 -187
- package/docs/planning/ADR-005-devtools-observability.md +0 -202
- package/docs/planning/ADR-007-swarm-enhancements-worktree-review.md +0 -168
- package/docs/planning/ADR-008-worker-handoff-protocol.md +0 -293
- package/docs/planning/ADR-009-oh-my-opencode-patterns.md +0 -353
- package/docs/planning/ADR-010-cass-inhousing.md +0 -1215
- package/docs/planning/ROADMAP.md +0 -368
- package/docs/semantic-memory-cli-syntax.md +0 -123
- package/docs/swarm-mail-architecture.md +0 -1147
- package/docs/testing/context-recovery-test.md +0 -470
- package/evals/ARCHITECTURE.md +0 -1189
- package/evals/README.md +0 -768
- package/evals/compaction-prompt.eval.ts +0 -149
- package/evals/compaction-resumption.eval.ts +0 -289
- package/evals/coordinator-behavior.eval.ts +0 -307
- package/evals/coordinator-session.eval.ts +0 -154
- package/evals/evalite.config.ts.bak +0 -15
- package/evals/example.eval.ts +0 -31
- package/evals/fixtures/cass-baseline.ts +0 -217
- package/evals/fixtures/compaction-cases.ts +0 -350
- package/evals/fixtures/compaction-prompt-cases.ts +0 -311
- package/evals/fixtures/coordinator-sessions.ts +0 -328
- package/evals/fixtures/decomposition-cases.ts +0 -105
- package/evals/lib/compaction-loader.test.ts +0 -248
- package/evals/lib/compaction-loader.ts +0 -320
- package/evals/lib/data-loader.evalite-test.ts +0 -289
- package/evals/lib/data-loader.test.ts +0 -345
- package/evals/lib/data-loader.ts +0 -281
- package/evals/lib/llm.ts +0 -115
- package/evals/scorers/compaction-prompt-scorers.ts +0 -145
- package/evals/scorers/compaction-scorers.ts +0 -305
- package/evals/scorers/coordinator-discipline.evalite-test.ts +0 -539
- package/evals/scorers/coordinator-discipline.ts +0 -325
- package/evals/scorers/index.test.ts +0 -146
- package/evals/scorers/index.ts +0 -328
- package/evals/scorers/outcome-scorers.evalite-test.ts +0 -27
- package/evals/scorers/outcome-scorers.ts +0 -349
- package/evals/swarm-decomposition.eval.ts +0 -121
- package/examples/commands/swarm.md +0 -745
- package/examples/plugin-wrapper-template.ts +0 -2515
- package/examples/skills/hive-workflow/SKILL.md +0 -212
- package/examples/skills/skill-creator/SKILL.md +0 -223
- package/examples/skills/swarm-coordination/SKILL.md +0 -292
- package/global-skills/cli-builder/SKILL.md +0 -344
- package/global-skills/cli-builder/references/advanced-patterns.md +0 -244
- package/global-skills/learning-systems/SKILL.md +0 -644
- package/global-skills/skill-creator/LICENSE.txt +0 -202
- package/global-skills/skill-creator/SKILL.md +0 -352
- package/global-skills/skill-creator/references/output-patterns.md +0 -82
- package/global-skills/skill-creator/references/workflows.md +0 -28
- package/global-skills/swarm-coordination/SKILL.md +0 -995
- package/global-skills/swarm-coordination/references/coordinator-patterns.md +0 -235
- package/global-skills/swarm-coordination/references/strategies.md +0 -138
- package/global-skills/system-design/SKILL.md +0 -213
- package/global-skills/testing-patterns/SKILL.md +0 -430
- package/global-skills/testing-patterns/references/dependency-breaking-catalog.md +0 -586
- package/opencode-swarm-plugin-0.30.7.tgz +0 -0
- package/opencode-swarm-plugin-0.31.0.tgz +0 -0
- package/scripts/cleanup-test-memories.ts +0 -346
- package/scripts/init-skill.ts +0 -222
- package/scripts/migrate-unknown-sessions.ts +0 -349
- package/scripts/validate-skill.ts +0 -204
- package/src/agent-mail.ts +0 -1724
- package/src/anti-patterns.test.ts +0 -1167
- package/src/anti-patterns.ts +0 -448
- package/src/compaction-capture.integration.test.ts +0 -257
- package/src/compaction-hook.test.ts +0 -838
- package/src/compaction-hook.ts +0 -1204
- package/src/compaction-observability.integration.test.ts +0 -139
- package/src/compaction-observability.test.ts +0 -187
- package/src/compaction-observability.ts +0 -324
- package/src/compaction-prompt-scorers.test.ts +0 -475
- package/src/compaction-prompt-scoring.ts +0 -300
- package/src/contributor-tools.test.ts +0 -133
- package/src/contributor-tools.ts +0 -201
- package/src/dashboard.test.ts +0 -611
- package/src/dashboard.ts +0 -462
- package/src/error-enrichment.test.ts +0 -403
- package/src/error-enrichment.ts +0 -219
- package/src/eval-capture.test.ts +0 -1015
- package/src/eval-capture.ts +0 -929
- package/src/eval-gates.test.ts +0 -306
- package/src/eval-gates.ts +0 -218
- package/src/eval-history.test.ts +0 -508
- package/src/eval-history.ts +0 -214
- package/src/eval-learning.test.ts +0 -378
- package/src/eval-learning.ts +0 -360
- package/src/eval-runner.test.ts +0 -223
- package/src/eval-runner.ts +0 -402
- package/src/export-tools.test.ts +0 -476
- package/src/export-tools.ts +0 -257
- package/src/hive.integration.test.ts +0 -2241
- package/src/hive.ts +0 -1628
- package/src/index.ts +0 -940
- package/src/learning.integration.test.ts +0 -1815
- package/src/learning.ts +0 -1079
- package/src/logger.test.ts +0 -189
- package/src/logger.ts +0 -135
- package/src/mandate-promotion.test.ts +0 -473
- package/src/mandate-promotion.ts +0 -239
- package/src/mandate-storage.integration.test.ts +0 -601
- package/src/mandate-storage.test.ts +0 -578
- package/src/mandate-storage.ts +0 -794
- package/src/mandates.ts +0 -540
- package/src/memory-tools.test.ts +0 -195
- package/src/memory-tools.ts +0 -344
- package/src/memory.integration.test.ts +0 -334
- package/src/memory.test.ts +0 -158
- package/src/memory.ts +0 -527
- package/src/model-selection.test.ts +0 -188
- package/src/model-selection.ts +0 -68
- package/src/observability-tools.test.ts +0 -359
- package/src/observability-tools.ts +0 -871
- package/src/output-guardrails.test.ts +0 -438
- package/src/output-guardrails.ts +0 -381
- package/src/pattern-maturity.test.ts +0 -1160
- package/src/pattern-maturity.ts +0 -525
- package/src/planning-guardrails.test.ts +0 -491
- package/src/planning-guardrails.ts +0 -438
- package/src/plugin.ts +0 -23
- package/src/post-compaction-tracker.test.ts +0 -251
- package/src/post-compaction-tracker.ts +0 -237
- package/src/query-tools.test.ts +0 -636
- package/src/query-tools.ts +0 -324
- package/src/rate-limiter.integration.test.ts +0 -466
- package/src/rate-limiter.ts +0 -774
- package/src/replay-tools.test.ts +0 -496
- package/src/replay-tools.ts +0 -240
- package/src/repo-crawl.integration.test.ts +0 -441
- package/src/repo-crawl.ts +0 -610
- package/src/schemas/cell-events.test.ts +0 -347
- package/src/schemas/cell-events.ts +0 -807
- package/src/schemas/cell.ts +0 -257
- package/src/schemas/evaluation.ts +0 -166
- package/src/schemas/index.test.ts +0 -199
- package/src/schemas/index.ts +0 -286
- package/src/schemas/mandate.ts +0 -232
- package/src/schemas/swarm-context.ts +0 -115
- package/src/schemas/task.ts +0 -161
- package/src/schemas/worker-handoff.test.ts +0 -302
- package/src/schemas/worker-handoff.ts +0 -131
- package/src/sessions/agent-discovery.test.ts +0 -137
- package/src/sessions/agent-discovery.ts +0 -112
- package/src/sessions/index.ts +0 -15
- package/src/skills.integration.test.ts +0 -1192
- package/src/skills.test.ts +0 -643
- package/src/skills.ts +0 -1549
- package/src/storage.integration.test.ts +0 -341
- package/src/storage.ts +0 -884
- package/src/structured.integration.test.ts +0 -817
- package/src/structured.test.ts +0 -1046
- package/src/structured.ts +0 -762
- package/src/swarm-decompose.test.ts +0 -188
- package/src/swarm-decompose.ts +0 -1302
- package/src/swarm-deferred.integration.test.ts +0 -157
- package/src/swarm-deferred.test.ts +0 -38
- package/src/swarm-insights.test.ts +0 -214
- package/src/swarm-insights.ts +0 -459
- package/src/swarm-mail.integration.test.ts +0 -970
- package/src/swarm-mail.ts +0 -739
- package/src/swarm-orchestrate.integration.test.ts +0 -282
- package/src/swarm-orchestrate.test.ts +0 -548
- package/src/swarm-orchestrate.ts +0 -3084
- package/src/swarm-prompts.test.ts +0 -1270
- package/src/swarm-prompts.ts +0 -2077
- package/src/swarm-research.integration.test.ts +0 -701
- package/src/swarm-research.test.ts +0 -698
- package/src/swarm-research.ts +0 -472
- package/src/swarm-review.integration.test.ts +0 -285
- package/src/swarm-review.test.ts +0 -879
- package/src/swarm-review.ts +0 -709
- package/src/swarm-strategies.ts +0 -407
- package/src/swarm-worktree.test.ts +0 -501
- package/src/swarm-worktree.ts +0 -575
- package/src/swarm.integration.test.ts +0 -2377
- package/src/swarm.ts +0 -38
- package/src/tool-adapter.integration.test.ts +0 -1221
- package/src/tool-availability.ts +0 -461
- package/tsconfig.json +0 -28
package/src/hive.ts
DELETED
|
@@ -1,1628 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Hive Module - Type-safe wrappers using HiveAdapter
|
|
3
|
-
*
|
|
4
|
-
* This module provides validated, type-safe operations for the Hive
|
|
5
|
-
* issue tracker using the HiveAdapter from swarm-mail.
|
|
6
|
-
*
|
|
7
|
-
* Key principles:
|
|
8
|
-
* - Use HiveAdapter for all operations (no CLI commands)
|
|
9
|
-
* - Validate all inputs with Zod schemas
|
|
10
|
-
* - Throw typed errors on failure
|
|
11
|
-
* - Support atomic epic creation with rollback
|
|
12
|
-
*
|
|
13
|
-
* IMPORTANT: Call setHiveWorkingDirectory() before using tools to ensure
|
|
14
|
-
* operations run in the correct project directory.
|
|
15
|
-
*/
|
|
16
|
-
import { tool } from "@opencode-ai/plugin";
|
|
17
|
-
import { z } from "zod";
|
|
18
|
-
import {
|
|
19
|
-
createHiveAdapter,
|
|
20
|
-
FlushManager,
|
|
21
|
-
importFromJSONL,
|
|
22
|
-
syncMemories,
|
|
23
|
-
type HiveAdapter,
|
|
24
|
-
type Cell as AdapterCell,
|
|
25
|
-
getSwarmMailLibSQL,
|
|
26
|
-
resolvePartialId,
|
|
27
|
-
} from "swarm-mail";
|
|
28
|
-
import { existsSync, readFileSync } from "node:fs";
|
|
29
|
-
import { join } from "node:path";
|
|
30
|
-
|
|
31
|
-
// ============================================================================
|
|
32
|
-
// Working Directory Configuration
|
|
33
|
-
// ============================================================================
|
|
34
|
-
|
|
35
|
-
/**
|
|
36
|
-
* Module-level working directory for hive commands.
|
|
37
|
-
* Set this via setHiveWorkingDirectory() before using tools.
|
|
38
|
-
* If not set, commands run in process.cwd() which may be wrong for plugins.
|
|
39
|
-
*/
|
|
40
|
-
let hiveWorkingDirectory: string | null = null;
|
|
41
|
-
|
|
42
|
-
/**
|
|
43
|
-
* Set the working directory for all hive commands.
|
|
44
|
-
* Call this from the plugin initialization with the project directory.
|
|
45
|
-
*
|
|
46
|
-
* @param directory - Absolute path to the project directory
|
|
47
|
-
*/
|
|
48
|
-
export function setHiveWorkingDirectory(directory: string): void {
|
|
49
|
-
hiveWorkingDirectory = directory;
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
/**
|
|
53
|
-
* Get the current working directory for hive commands.
|
|
54
|
-
* Returns the configured directory or process.cwd() as fallback.
|
|
55
|
-
*/
|
|
56
|
-
export function getHiveWorkingDirectory(): string {
|
|
57
|
-
return hiveWorkingDirectory || process.cwd();
|
|
58
|
-
}
|
|
59
|
-
|
|
60
|
-
// Legacy aliases for backward compatibility
|
|
61
|
-
export const setBeadsWorkingDirectory = setHiveWorkingDirectory;
|
|
62
|
-
export const getBeadsWorkingDirectory = getHiveWorkingDirectory;
|
|
63
|
-
|
|
64
|
-
/**
|
|
65
|
-
* Run a git command in the correct working directory.
|
|
66
|
-
*/
|
|
67
|
-
async function runGitCommand(
|
|
68
|
-
args: string[],
|
|
69
|
-
): Promise<{ exitCode: number; stdout: string; stderr: string }> {
|
|
70
|
-
const cwd = getHiveWorkingDirectory();
|
|
71
|
-
const proc = Bun.spawn(["git", ...args], {
|
|
72
|
-
cwd,
|
|
73
|
-
stdout: "pipe",
|
|
74
|
-
stderr: "pipe",
|
|
75
|
-
});
|
|
76
|
-
|
|
77
|
-
const [stdout, stderr] = await Promise.all([
|
|
78
|
-
new Response(proc.stdout).text(),
|
|
79
|
-
new Response(proc.stderr).text(),
|
|
80
|
-
]);
|
|
81
|
-
|
|
82
|
-
const exitCode = await proc.exited;
|
|
83
|
-
|
|
84
|
-
return { exitCode, stdout, stderr };
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
import {
|
|
88
|
-
CellSchema,
|
|
89
|
-
CellCreateArgsSchema,
|
|
90
|
-
CellUpdateArgsSchema,
|
|
91
|
-
CellCloseArgsSchema,
|
|
92
|
-
CellQueryArgsSchema,
|
|
93
|
-
EpicCreateArgsSchema,
|
|
94
|
-
EpicCreateResultSchema,
|
|
95
|
-
type Cell,
|
|
96
|
-
type CellCreateArgs,
|
|
97
|
-
type EpicCreateResult,
|
|
98
|
-
} from "./schemas";
|
|
99
|
-
import { createEvent, appendEvent } from "swarm-mail";
|
|
100
|
-
|
|
101
|
-
/**
|
|
102
|
-
* Custom error for hive operations
|
|
103
|
-
*/
|
|
104
|
-
export class HiveError extends Error {
|
|
105
|
-
constructor(
|
|
106
|
-
message: string,
|
|
107
|
-
public readonly command: string,
|
|
108
|
-
public readonly exitCode?: number,
|
|
109
|
-
public readonly stderr?: string,
|
|
110
|
-
) {
|
|
111
|
-
super(message);
|
|
112
|
-
this.name = "HiveError";
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
// Legacy alias for backward compatibility
|
|
117
|
-
export const BeadError = HiveError;
|
|
118
|
-
|
|
119
|
-
/**
|
|
120
|
-
* Custom error for validation failures
|
|
121
|
-
*/
|
|
122
|
-
export class HiveValidationError extends Error {
|
|
123
|
-
constructor(
|
|
124
|
-
message: string,
|
|
125
|
-
public readonly zodError: z.ZodError,
|
|
126
|
-
) {
|
|
127
|
-
super(message);
|
|
128
|
-
this.name = "HiveValidationError";
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
// Legacy alias for backward compatibility
|
|
133
|
-
export const BeadValidationError = HiveValidationError;
|
|
134
|
-
|
|
135
|
-
// ============================================================================
|
|
136
|
-
// Directory Migration (.beads → .hive)
|
|
137
|
-
// ============================================================================
|
|
138
|
-
|
|
139
|
-
/**
|
|
140
|
-
* Result of checking if .beads → .hive migration is needed
|
|
141
|
-
*/
|
|
142
|
-
export interface MigrationCheckResult {
|
|
143
|
-
/** Whether migration is needed */
|
|
144
|
-
needed: boolean;
|
|
145
|
-
/** Path to .beads directory if it exists */
|
|
146
|
-
beadsPath?: string;
|
|
147
|
-
}
|
|
148
|
-
|
|
149
|
-
/**
|
|
150
|
-
* Result of migrating .beads → .hive
|
|
151
|
-
*/
|
|
152
|
-
export interface MigrationResult {
|
|
153
|
-
/** Whether migration was performed */
|
|
154
|
-
migrated: boolean;
|
|
155
|
-
/** Reason if migration was skipped */
|
|
156
|
-
reason?: string;
|
|
157
|
-
}
|
|
158
|
-
|
|
159
|
-
/**
|
|
160
|
-
* Check if .beads → .hive migration is needed
|
|
161
|
-
*
|
|
162
|
-
* Migration is needed when:
|
|
163
|
-
* - .beads directory exists
|
|
164
|
-
* - .hive directory does NOT exist
|
|
165
|
-
*
|
|
166
|
-
* @param projectPath - Absolute path to the project root
|
|
167
|
-
* @returns MigrationCheckResult indicating if migration is needed
|
|
168
|
-
*/
|
|
169
|
-
export function checkBeadsMigrationNeeded(projectPath: string): MigrationCheckResult {
|
|
170
|
-
const beadsDir = join(projectPath, ".beads");
|
|
171
|
-
const hiveDir = join(projectPath, ".hive");
|
|
172
|
-
|
|
173
|
-
// If .hive already exists, no migration needed
|
|
174
|
-
if (existsSync(hiveDir)) {
|
|
175
|
-
return { needed: false };
|
|
176
|
-
}
|
|
177
|
-
|
|
178
|
-
// If .beads exists but .hive doesn't, migration is needed
|
|
179
|
-
if (existsSync(beadsDir)) {
|
|
180
|
-
return { needed: true, beadsPath: beadsDir };
|
|
181
|
-
}
|
|
182
|
-
|
|
183
|
-
// Neither exists - no migration needed
|
|
184
|
-
return { needed: false };
|
|
185
|
-
}
|
|
186
|
-
|
|
187
|
-
/**
|
|
188
|
-
* Migrate .beads directory to .hive
|
|
189
|
-
*
|
|
190
|
-
* This function renames .beads to .hive. It should only be called
|
|
191
|
-
* after user confirmation via CLI prompt.
|
|
192
|
-
*
|
|
193
|
-
* @param projectPath - Absolute path to the project root
|
|
194
|
-
* @returns MigrationResult indicating success or skip reason
|
|
195
|
-
*/
|
|
196
|
-
export async function migrateBeadsToHive(projectPath: string): Promise<MigrationResult> {
|
|
197
|
-
const beadsDir = join(projectPath, ".beads");
|
|
198
|
-
const hiveDir = join(projectPath, ".hive");
|
|
199
|
-
|
|
200
|
-
// Check if .hive already exists - skip migration
|
|
201
|
-
if (existsSync(hiveDir)) {
|
|
202
|
-
return {
|
|
203
|
-
migrated: false,
|
|
204
|
-
reason: ".hive directory already exists - skipping migration to avoid data loss"
|
|
205
|
-
};
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
// Check if .beads exists
|
|
209
|
-
if (!existsSync(beadsDir)) {
|
|
210
|
-
return {
|
|
211
|
-
migrated: false,
|
|
212
|
-
reason: ".beads directory not found - nothing to migrate"
|
|
213
|
-
};
|
|
214
|
-
}
|
|
215
|
-
|
|
216
|
-
// Perform the rename
|
|
217
|
-
const { renameSync } = await import("node:fs");
|
|
218
|
-
renameSync(beadsDir, hiveDir);
|
|
219
|
-
|
|
220
|
-
return { migrated: true };
|
|
221
|
-
}
|
|
222
|
-
|
|
223
|
-
/**
|
|
224
|
-
* Ensure .hive directory exists
|
|
225
|
-
*
|
|
226
|
-
* Creates .hive directory if it doesn't exist. This is idempotent
|
|
227
|
-
* and safe to call multiple times.
|
|
228
|
-
*
|
|
229
|
-
* @param projectPath - Absolute path to the project root
|
|
230
|
-
*/
|
|
231
|
-
export function ensureHiveDirectory(projectPath: string): void {
|
|
232
|
-
const hiveDir = join(projectPath, ".hive");
|
|
233
|
-
|
|
234
|
-
if (!existsSync(hiveDir)) {
|
|
235
|
-
const { mkdirSync } = require("node:fs");
|
|
236
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
237
|
-
}
|
|
238
|
-
}
|
|
239
|
-
|
|
240
|
-
/**
|
|
241
|
-
* Merge historic beads from beads.base.jsonl into issues.jsonl
|
|
242
|
-
*
|
|
243
|
-
* This function reads beads.base.jsonl (historic data) and issues.jsonl (current data),
|
|
244
|
-
* merges them by ID (issues.jsonl version wins for duplicates), and writes the result
|
|
245
|
-
* back to issues.jsonl.
|
|
246
|
-
*
|
|
247
|
-
* Use case: After migrating from .beads to .hive, you may have a beads.base.jsonl file
|
|
248
|
-
* containing old beads that should be merged into the current issues.jsonl.
|
|
249
|
-
*
|
|
250
|
-
* @param projectPath - Absolute path to the project root
|
|
251
|
-
* @returns Object with merged and skipped counts
|
|
252
|
-
*/
|
|
253
|
-
export async function mergeHistoricBeads(projectPath: string): Promise<{merged: number, skipped: number}> {
|
|
254
|
-
const { readFileSync, writeFileSync, existsSync } = await import("node:fs");
|
|
255
|
-
const hiveDir = join(projectPath, ".hive");
|
|
256
|
-
const basePath = join(hiveDir, "beads.base.jsonl");
|
|
257
|
-
const issuesPath = join(hiveDir, "issues.jsonl");
|
|
258
|
-
|
|
259
|
-
// If base file doesn't exist, nothing to merge
|
|
260
|
-
if (!existsSync(basePath)) {
|
|
261
|
-
return { merged: 0, skipped: 0 };
|
|
262
|
-
}
|
|
263
|
-
|
|
264
|
-
// Read base file
|
|
265
|
-
const baseContent = readFileSync(basePath, "utf-8");
|
|
266
|
-
const baseLines = baseContent.trim().split("\n").filter(l => l);
|
|
267
|
-
const baseBeads = baseLines.map(line => JSON.parse(line));
|
|
268
|
-
|
|
269
|
-
// Read issues file (or create empty if missing)
|
|
270
|
-
let issuesBeads: any[] = [];
|
|
271
|
-
if (existsSync(issuesPath)) {
|
|
272
|
-
const issuesContent = readFileSync(issuesPath, "utf-8");
|
|
273
|
-
const issuesLines = issuesContent.trim().split("\n").filter(l => l);
|
|
274
|
-
issuesBeads = issuesLines.map(line => JSON.parse(line));
|
|
275
|
-
}
|
|
276
|
-
|
|
277
|
-
// Build set of existing IDs in issues.jsonl
|
|
278
|
-
const existingIds = new Set(issuesBeads.map(b => b.id));
|
|
279
|
-
|
|
280
|
-
// Merge: add beads from base that aren't in issues
|
|
281
|
-
let merged = 0;
|
|
282
|
-
let skipped = 0;
|
|
283
|
-
|
|
284
|
-
for (const baseBead of baseBeads) {
|
|
285
|
-
if (existingIds.has(baseBead.id)) {
|
|
286
|
-
skipped++;
|
|
287
|
-
} else {
|
|
288
|
-
issuesBeads.push(baseBead);
|
|
289
|
-
merged++;
|
|
290
|
-
}
|
|
291
|
-
}
|
|
292
|
-
|
|
293
|
-
// Write merged result back to issues.jsonl
|
|
294
|
-
const mergedContent = issuesBeads.map(b => JSON.stringify(b)).join("\n") + "\n";
|
|
295
|
-
writeFileSync(issuesPath, mergedContent, "utf-8");
|
|
296
|
-
|
|
297
|
-
return { merged, skipped };
|
|
298
|
-
}
|
|
299
|
-
|
|
300
|
-
/**
|
|
301
|
-
* Import cells from .hive/issues.jsonl into PGLite database
|
|
302
|
-
*
|
|
303
|
-
* Reads the JSONL file and upserts each record into the cells table
|
|
304
|
-
* using the HiveAdapter. Provides granular error reporting for invalid lines.
|
|
305
|
-
*
|
|
306
|
-
* This function manually parses JSONL line-by-line to gracefully handle
|
|
307
|
-
* invalid JSON without throwing. Each valid line is imported via the adapter.
|
|
308
|
-
*
|
|
309
|
-
* @param projectPath - Absolute path to the project root
|
|
310
|
-
* @returns Object with imported, updated, and error counts
|
|
311
|
-
*/
|
|
312
|
-
export async function importJsonlToPGLite(projectPath: string): Promise<{
|
|
313
|
-
imported: number;
|
|
314
|
-
updated: number;
|
|
315
|
-
errors: number;
|
|
316
|
-
}> {
|
|
317
|
-
const jsonlPath = join(projectPath, ".hive", "issues.jsonl");
|
|
318
|
-
|
|
319
|
-
// Handle missing file gracefully
|
|
320
|
-
if (!existsSync(jsonlPath)) {
|
|
321
|
-
return { imported: 0, updated: 0, errors: 0 };
|
|
322
|
-
}
|
|
323
|
-
|
|
324
|
-
// Read JSONL content
|
|
325
|
-
const jsonlContent = readFileSync(jsonlPath, "utf-8");
|
|
326
|
-
|
|
327
|
-
// Handle empty file
|
|
328
|
-
if (!jsonlContent || jsonlContent.trim() === "") {
|
|
329
|
-
return { imported: 0, updated: 0, errors: 0 };
|
|
330
|
-
}
|
|
331
|
-
|
|
332
|
-
// Get adapter - but we need to prevent auto-migration from running
|
|
333
|
-
// Auto-migration only runs if DB is empty, so we check first
|
|
334
|
-
const adapter = await getHiveAdapter(projectPath);
|
|
335
|
-
|
|
336
|
-
// Parse JSONL line-by-line, tolerating invalid JSON
|
|
337
|
-
const lines = jsonlContent.split("\n").filter(l => l.trim());
|
|
338
|
-
let imported = 0;
|
|
339
|
-
let updated = 0;
|
|
340
|
-
let errors = 0;
|
|
341
|
-
|
|
342
|
-
for (const line of lines) {
|
|
343
|
-
try {
|
|
344
|
-
const cellData = JSON.parse(line);
|
|
345
|
-
|
|
346
|
-
// Check if cell exists
|
|
347
|
-
const existing = await adapter.getCell(projectPath, cellData.id);
|
|
348
|
-
|
|
349
|
-
if (existing) {
|
|
350
|
-
// Update existing cell
|
|
351
|
-
try {
|
|
352
|
-
await adapter.updateCell(projectPath, cellData.id, {
|
|
353
|
-
title: cellData.title,
|
|
354
|
-
description: cellData.description,
|
|
355
|
-
priority: cellData.priority,
|
|
356
|
-
assignee: cellData.assignee,
|
|
357
|
-
});
|
|
358
|
-
|
|
359
|
-
// Update status if needed - use closeCell for 'closed' status
|
|
360
|
-
if (existing.status !== cellData.status) {
|
|
361
|
-
if (cellData.status === "closed") {
|
|
362
|
-
await adapter.closeCell(projectPath, cellData.id, "Imported from JSONL");
|
|
363
|
-
} else {
|
|
364
|
-
await adapter.changeCellStatus(projectPath, cellData.id, cellData.status);
|
|
365
|
-
}
|
|
366
|
-
}
|
|
367
|
-
|
|
368
|
-
updated++;
|
|
369
|
-
} catch (updateError) {
|
|
370
|
-
// Update failed - count as error
|
|
371
|
-
errors++;
|
|
372
|
-
}
|
|
373
|
-
} else {
|
|
374
|
-
// Create new cell - use direct DB insert to preserve ID
|
|
375
|
-
const db = await adapter.getDatabase();
|
|
376
|
-
|
|
377
|
-
const status = cellData.status === "tombstone" ? "closed" : cellData.status;
|
|
378
|
-
const isClosed = status === "closed";
|
|
379
|
-
const closedAt = isClosed
|
|
380
|
-
? (cellData.closed_at
|
|
381
|
-
? new Date(cellData.closed_at).getTime()
|
|
382
|
-
: new Date(cellData.updated_at).getTime())
|
|
383
|
-
: null;
|
|
384
|
-
|
|
385
|
-
await db.query(
|
|
386
|
-
`INSERT INTO cells (
|
|
387
|
-
id, project_key, type, status, title, description, priority,
|
|
388
|
-
parent_id, assignee, created_at, updated_at, closed_at
|
|
389
|
-
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)`,
|
|
390
|
-
[
|
|
391
|
-
cellData.id,
|
|
392
|
-
projectPath,
|
|
393
|
-
cellData.issue_type,
|
|
394
|
-
status,
|
|
395
|
-
cellData.title,
|
|
396
|
-
cellData.description || null,
|
|
397
|
-
cellData.priority,
|
|
398
|
-
cellData.parent_id || null,
|
|
399
|
-
cellData.assignee || null,
|
|
400
|
-
new Date(cellData.created_at).getTime(),
|
|
401
|
-
new Date(cellData.updated_at).getTime(),
|
|
402
|
-
closedAt,
|
|
403
|
-
]
|
|
404
|
-
);
|
|
405
|
-
|
|
406
|
-
imported++;
|
|
407
|
-
}
|
|
408
|
-
} catch (error) {
|
|
409
|
-
// Invalid JSON or import error - count and continue
|
|
410
|
-
errors++;
|
|
411
|
-
}
|
|
412
|
-
}
|
|
413
|
-
|
|
414
|
-
return { imported, updated, errors };
|
|
415
|
-
}
|
|
416
|
-
|
|
417
|
-
// ============================================================================
|
|
418
|
-
// Adapter Singleton
|
|
419
|
-
// ============================================================================
|
|
420
|
-
|
|
421
|
-
/**
|
|
422
|
-
* Lazy singleton for HiveAdapter instances
|
|
423
|
-
* Maps projectKey -> HiveAdapter
|
|
424
|
-
*/
|
|
425
|
-
const adapterCache = new Map<string, HiveAdapter>();
|
|
426
|
-
|
|
427
|
-
// ============================================================================
|
|
428
|
-
// Process Exit Hook - Safety Net for Dirty Cells
|
|
429
|
-
// ============================================================================
|
|
430
|
-
|
|
431
|
-
/**
|
|
432
|
-
* Track if exit hook is already registered (prevent duplicate registrations)
|
|
433
|
-
*/
|
|
434
|
-
let exitHookRegistered = false;
|
|
435
|
-
|
|
436
|
-
/**
|
|
437
|
-
* Track if exit hook is currently running (prevent re-entry)
|
|
438
|
-
*/
|
|
439
|
-
let exitHookRunning = false;
|
|
440
|
-
|
|
441
|
-
/**
|
|
442
|
-
* Register process.on('beforeExit') handler to flush dirty cells
|
|
443
|
-
* This is a safety net - catches any dirty cells that weren't explicitly synced
|
|
444
|
-
*
|
|
445
|
-
* Idempotent - safe to call multiple times (only registers once)
|
|
446
|
-
*/
|
|
447
|
-
function registerExitHook(): void {
|
|
448
|
-
if (exitHookRegistered) {
|
|
449
|
-
return; // Already registered
|
|
450
|
-
}
|
|
451
|
-
|
|
452
|
-
exitHookRegistered = true;
|
|
453
|
-
|
|
454
|
-
process.on('beforeExit', async (code) => {
|
|
455
|
-
// Prevent re-entry if already flushing
|
|
456
|
-
if (exitHookRunning) {
|
|
457
|
-
return;
|
|
458
|
-
}
|
|
459
|
-
|
|
460
|
-
exitHookRunning = true;
|
|
461
|
-
|
|
462
|
-
try {
|
|
463
|
-
// Flush all projects that have adapters (and potentially dirty cells)
|
|
464
|
-
const flushPromises: Promise<void>[] = [];
|
|
465
|
-
|
|
466
|
-
for (const [projectKey, adapter] of adapterCache.entries()) {
|
|
467
|
-
const flushPromise = (async () => {
|
|
468
|
-
try {
|
|
469
|
-
ensureHiveDirectory(projectKey);
|
|
470
|
-
const flushManager = new FlushManager({
|
|
471
|
-
adapter,
|
|
472
|
-
projectKey,
|
|
473
|
-
outputPath: `${projectKey}/.hive/issues.jsonl`,
|
|
474
|
-
});
|
|
475
|
-
await flushManager.flush();
|
|
476
|
-
} catch (error) {
|
|
477
|
-
// Non-fatal - log and continue
|
|
478
|
-
console.warn(
|
|
479
|
-
`[hive exit hook] Failed to flush ${projectKey}:`,
|
|
480
|
-
error instanceof Error ? error.message : String(error)
|
|
481
|
-
);
|
|
482
|
-
}
|
|
483
|
-
})();
|
|
484
|
-
|
|
485
|
-
flushPromises.push(flushPromise);
|
|
486
|
-
}
|
|
487
|
-
|
|
488
|
-
// Wait for all flushes to complete
|
|
489
|
-
await Promise.all(flushPromises);
|
|
490
|
-
} finally {
|
|
491
|
-
exitHookRunning = false;
|
|
492
|
-
}
|
|
493
|
-
});
|
|
494
|
-
}
|
|
495
|
-
|
|
496
|
-
// Register exit hook immediately when module is imported
|
|
497
|
-
registerExitHook();
|
|
498
|
-
|
|
499
|
-
/**
|
|
500
|
-
* Get or create a HiveAdapter instance for a project
|
|
501
|
-
* Exported for testing - allows tests to verify state directly
|
|
502
|
-
*
|
|
503
|
-
* On first initialization, checks for .beads/issues.jsonl and imports
|
|
504
|
-
* historical beads if the database is empty.
|
|
505
|
-
*/
|
|
506
|
-
export async function getHiveAdapter(projectKey: string): Promise<HiveAdapter> {
|
|
507
|
-
if (adapterCache.has(projectKey)) {
|
|
508
|
-
return adapterCache.get(projectKey)!;
|
|
509
|
-
}
|
|
510
|
-
|
|
511
|
-
const swarmMail = await getSwarmMailLibSQL(projectKey);
|
|
512
|
-
const db = await swarmMail.getDatabase();
|
|
513
|
-
const adapter = createHiveAdapter(db, projectKey);
|
|
514
|
-
|
|
515
|
-
// Run migrations to ensure schema exists
|
|
516
|
-
await adapter.runMigrations();
|
|
517
|
-
|
|
518
|
-
// Auto-migrate from JSONL if database is empty and file exists
|
|
519
|
-
await autoMigrateFromJSONL(adapter, projectKey);
|
|
520
|
-
|
|
521
|
-
adapterCache.set(projectKey, adapter);
|
|
522
|
-
return adapter;
|
|
523
|
-
}
|
|
524
|
-
|
|
525
|
-
// Legacy alias for backward compatibility
|
|
526
|
-
export const getBeadsAdapter = getHiveAdapter;
|
|
527
|
-
|
|
528
|
-
/**
|
|
529
|
-
* Auto-migrate cells from .hive/issues.jsonl if:
|
|
530
|
-
* 1. The JSONL file exists
|
|
531
|
-
* 2. The database has no cells for this project
|
|
532
|
-
*
|
|
533
|
-
* This enables seamless migration from the old bd CLI to the new PGLite-based system.
|
|
534
|
-
*/
|
|
535
|
-
async function autoMigrateFromJSONL(adapter: HiveAdapter, projectKey: string): Promise<void> {
|
|
536
|
-
const jsonlPath = join(projectKey, ".hive", "issues.jsonl");
|
|
537
|
-
|
|
538
|
-
// Check if JSONL file exists
|
|
539
|
-
if (!existsSync(jsonlPath)) {
|
|
540
|
-
return;
|
|
541
|
-
}
|
|
542
|
-
|
|
543
|
-
// Check if database already has cells
|
|
544
|
-
const existingCells = await adapter.queryCells(projectKey, { limit: 1 });
|
|
545
|
-
if (existingCells.length > 0) {
|
|
546
|
-
return; // Already have cells, skip migration
|
|
547
|
-
}
|
|
548
|
-
|
|
549
|
-
// Read and import JSONL
|
|
550
|
-
try {
|
|
551
|
-
const jsonlContent = readFileSync(jsonlPath, "utf-8");
|
|
552
|
-
const result = await importFromJSONL(adapter, projectKey, jsonlContent, {
|
|
553
|
-
skipExisting: true, // Safety: don't overwrite if somehow cells exist
|
|
554
|
-
});
|
|
555
|
-
|
|
556
|
-
if (result.created > 0 || result.updated > 0) {
|
|
557
|
-
console.log(
|
|
558
|
-
`[hive] Auto-migrated ${result.created} cells from ${jsonlPath} (${result.skipped} skipped, ${result.errors.length} errors)`
|
|
559
|
-
);
|
|
560
|
-
}
|
|
561
|
-
|
|
562
|
-
if (result.errors.length > 0) {
|
|
563
|
-
console.warn(
|
|
564
|
-
`[hive] Migration errors:`,
|
|
565
|
-
result.errors.slice(0, 5).map((e) => `${e.cellId}: ${e.error}`)
|
|
566
|
-
);
|
|
567
|
-
}
|
|
568
|
-
} catch (error) {
|
|
569
|
-
// Non-fatal - log and continue
|
|
570
|
-
console.warn(
|
|
571
|
-
`[hive] Failed to auto-migrate from ${jsonlPath}:`,
|
|
572
|
-
error instanceof Error ? error.message : String(error)
|
|
573
|
-
);
|
|
574
|
-
}
|
|
575
|
-
}
|
|
576
|
-
|
|
577
|
-
/**
|
|
578
|
-
* Format adapter cell for output (map field names)
|
|
579
|
-
* Adapter uses: type, created_at/updated_at (timestamps)
|
|
580
|
-
* Schema expects: issue_type, created_at/updated_at (ISO strings)
|
|
581
|
-
*/
|
|
582
|
-
function formatCellForOutput(adapterCell: AdapterCell): Record<string, unknown> {
|
|
583
|
-
return {
|
|
584
|
-
id: adapterCell.id,
|
|
585
|
-
title: adapterCell.title,
|
|
586
|
-
description: adapterCell.description || "",
|
|
587
|
-
status: adapterCell.status,
|
|
588
|
-
priority: adapterCell.priority,
|
|
589
|
-
issue_type: adapterCell.type, // Adapter: type → Schema: issue_type
|
|
590
|
-
created_at: new Date(Number(adapterCell.created_at)).toISOString(),
|
|
591
|
-
updated_at: new Date(Number(adapterCell.updated_at)).toISOString(),
|
|
592
|
-
closed_at: adapterCell.closed_at
|
|
593
|
-
? new Date(Number(adapterCell.closed_at)).toISOString()
|
|
594
|
-
: undefined,
|
|
595
|
-
parent_id: adapterCell.parent_id || undefined,
|
|
596
|
-
dependencies: [], // TODO: fetch from adapter if needed
|
|
597
|
-
metadata: {},
|
|
598
|
-
};
|
|
599
|
-
}
|
|
600
|
-
|
|
601
|
-
// ============================================================================
|
|
602
|
-
// Tool Definitions
|
|
603
|
-
// ============================================================================
|
|
604
|
-
|
|
605
|
-
/**
|
|
606
|
-
* Create a new cell with type-safe validation
|
|
607
|
-
*/
|
|
608
|
-
export const hive_create = tool({
|
|
609
|
-
description: "Create a new cell in the hive with type-safe validation",
|
|
610
|
-
args: {
|
|
611
|
-
title: tool.schema.string().describe("Cell title"),
|
|
612
|
-
type: tool.schema
|
|
613
|
-
.enum(["bug", "feature", "task", "epic", "chore"])
|
|
614
|
-
.optional()
|
|
615
|
-
.describe("Issue type (default: task)"),
|
|
616
|
-
priority: tool.schema
|
|
617
|
-
.number()
|
|
618
|
-
.min(0)
|
|
619
|
-
.max(3)
|
|
620
|
-
.optional()
|
|
621
|
-
.describe("Priority 0-3 (default: 2)"),
|
|
622
|
-
description: tool.schema.string().optional().describe("Cell description"),
|
|
623
|
-
parent_id: tool.schema
|
|
624
|
-
.string()
|
|
625
|
-
.optional()
|
|
626
|
-
.describe("Parent cell ID for epic children"),
|
|
627
|
-
},
|
|
628
|
-
async execute(args, ctx) {
|
|
629
|
-
const validated = CellCreateArgsSchema.parse(args);
|
|
630
|
-
const projectKey = getHiveWorkingDirectory();
|
|
631
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
632
|
-
|
|
633
|
-
try {
|
|
634
|
-
const cell = await adapter.createCell(projectKey, {
|
|
635
|
-
title: validated.title,
|
|
636
|
-
type: validated.type || "task",
|
|
637
|
-
priority: validated.priority ?? 2,
|
|
638
|
-
description: validated.description,
|
|
639
|
-
parent_id: validated.parent_id,
|
|
640
|
-
});
|
|
641
|
-
|
|
642
|
-
// Mark dirty for export
|
|
643
|
-
await adapter.markDirty(projectKey, cell.id);
|
|
644
|
-
|
|
645
|
-
const formatted = formatCellForOutput(cell);
|
|
646
|
-
return JSON.stringify(formatted, null, 2);
|
|
647
|
-
} catch (error) {
|
|
648
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
649
|
-
throw new HiveError(
|
|
650
|
-
`Failed to create cell: ${message}`,
|
|
651
|
-
"hive_create",
|
|
652
|
-
);
|
|
653
|
-
}
|
|
654
|
-
},
|
|
655
|
-
});
|
|
656
|
-
|
|
657
|
-
/**
|
|
658
|
-
* Create an epic with subtasks in one atomic operation
|
|
659
|
-
*/
|
|
660
|
-
export const hive_create_epic = tool({
|
|
661
|
-
description: "Create epic with subtasks in one atomic operation",
|
|
662
|
-
args: {
|
|
663
|
-
epic_title: tool.schema.string().describe("Epic title"),
|
|
664
|
-
epic_description: tool.schema
|
|
665
|
-
.string()
|
|
666
|
-
.optional()
|
|
667
|
-
.describe("Epic description"),
|
|
668
|
-
epic_id: tool.schema
|
|
669
|
-
.string()
|
|
670
|
-
.optional()
|
|
671
|
-
.describe("Custom ID for the epic (e.g., 'phase-0')"),
|
|
672
|
-
subtasks: tool.schema
|
|
673
|
-
.array(
|
|
674
|
-
tool.schema.object({
|
|
675
|
-
title: tool.schema.string(),
|
|
676
|
-
priority: tool.schema.number().min(0).max(3).optional(),
|
|
677
|
-
files: tool.schema.array(tool.schema.string()).optional(),
|
|
678
|
-
id_suffix: tool.schema
|
|
679
|
-
.string()
|
|
680
|
-
.optional()
|
|
681
|
-
.describe(
|
|
682
|
-
"Custom ID suffix (e.g., 'e2e-test' becomes 'phase-0.e2e-test')",
|
|
683
|
-
),
|
|
684
|
-
}),
|
|
685
|
-
)
|
|
686
|
-
.describe("Subtasks to create under the epic"),
|
|
687
|
-
strategy: tool.schema
|
|
688
|
-
.enum(["file-based", "feature-based", "risk-based"])
|
|
689
|
-
.optional()
|
|
690
|
-
.describe("Decomposition strategy used (default: feature-based)"),
|
|
691
|
-
task: tool.schema
|
|
692
|
-
.string()
|
|
693
|
-
.optional()
|
|
694
|
-
.describe("Original task description that was decomposed"),
|
|
695
|
-
project_key: tool.schema
|
|
696
|
-
.string()
|
|
697
|
-
.optional()
|
|
698
|
-
.describe("Project path for event emission"),
|
|
699
|
-
recovery_context: tool.schema
|
|
700
|
-
.object({
|
|
701
|
-
shared_context: tool.schema.string().optional(),
|
|
702
|
-
skills_to_load: tool.schema.array(tool.schema.string()).optional(),
|
|
703
|
-
coordinator_notes: tool.schema.string().optional(),
|
|
704
|
-
})
|
|
705
|
-
.optional()
|
|
706
|
-
.describe("Recovery context from checkpoint compaction"),
|
|
707
|
-
},
|
|
708
|
-
async execute(args, ctx) {
|
|
709
|
-
const validated = EpicCreateArgsSchema.parse(args);
|
|
710
|
-
const projectKey = getHiveWorkingDirectory();
|
|
711
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
712
|
-
const created: AdapterCell[] = [];
|
|
713
|
-
|
|
714
|
-
try {
|
|
715
|
-
// 1. Create epic
|
|
716
|
-
const epic = await adapter.createCell(projectKey, {
|
|
717
|
-
title: validated.epic_title,
|
|
718
|
-
type: "epic",
|
|
719
|
-
priority: 1,
|
|
720
|
-
description: validated.epic_description,
|
|
721
|
-
});
|
|
722
|
-
await adapter.markDirty(projectKey, epic.id);
|
|
723
|
-
created.push(epic);
|
|
724
|
-
|
|
725
|
-
// 2. Create subtasks
|
|
726
|
-
for (const subtask of validated.subtasks) {
|
|
727
|
-
const subtaskCell = await adapter.createCell(projectKey, {
|
|
728
|
-
title: subtask.title,
|
|
729
|
-
type: "task",
|
|
730
|
-
priority: subtask.priority ?? 2,
|
|
731
|
-
parent_id: epic.id,
|
|
732
|
-
});
|
|
733
|
-
await adapter.markDirty(projectKey, subtaskCell.id);
|
|
734
|
-
created.push(subtaskCell);
|
|
735
|
-
}
|
|
736
|
-
|
|
737
|
-
const result: EpicCreateResult = {
|
|
738
|
-
success: true,
|
|
739
|
-
epic: formatCellForOutput(epic) as Cell,
|
|
740
|
-
subtasks: created.slice(1).map((c) => formatCellForOutput(c) as Cell),
|
|
741
|
-
};
|
|
742
|
-
|
|
743
|
-
// Emit DecompositionGeneratedEvent for learning system
|
|
744
|
-
// Always emit using projectKey (from getHiveWorkingDirectory), not args.project_key
|
|
745
|
-
// This fixes the bug where events weren't emitted when callers didn't pass project_key
|
|
746
|
-
const effectiveProjectKey = args.project_key || projectKey;
|
|
747
|
-
try {
|
|
748
|
-
const event = createEvent("decomposition_generated", {
|
|
749
|
-
project_key: effectiveProjectKey,
|
|
750
|
-
epic_id: epic.id,
|
|
751
|
-
task: args.task || validated.epic_title,
|
|
752
|
-
context: validated.epic_description,
|
|
753
|
-
strategy: args.strategy || "feature-based",
|
|
754
|
-
epic_title: validated.epic_title,
|
|
755
|
-
subtasks: validated.subtasks.map((st) => ({
|
|
756
|
-
title: st.title,
|
|
757
|
-
files: st.files || [],
|
|
758
|
-
priority: st.priority,
|
|
759
|
-
})),
|
|
760
|
-
recovery_context: args.recovery_context,
|
|
761
|
-
});
|
|
762
|
-
await appendEvent(event, effectiveProjectKey);
|
|
763
|
-
} catch (error) {
|
|
764
|
-
// Non-fatal - log and continue
|
|
765
|
-
console.warn(
|
|
766
|
-
"[hive_create_epic] Failed to emit DecompositionGeneratedEvent:",
|
|
767
|
-
error,
|
|
768
|
-
);
|
|
769
|
-
}
|
|
770
|
-
|
|
771
|
-
// Capture decomposition_complete event for eval scoring
|
|
772
|
-
try {
|
|
773
|
-
const { captureCoordinatorEvent } = await import("./eval-capture.js");
|
|
774
|
-
|
|
775
|
-
// Build files_per_subtask map (indexed by subtask index)
|
|
776
|
-
const filesPerSubtask: Record<number, string[]> = {};
|
|
777
|
-
validated.subtasks.forEach((subtask, index) => {
|
|
778
|
-
if (subtask.files && subtask.files.length > 0) {
|
|
779
|
-
filesPerSubtask[index] = subtask.files;
|
|
780
|
-
}
|
|
781
|
-
});
|
|
782
|
-
|
|
783
|
-
captureCoordinatorEvent({
|
|
784
|
-
session_id: ctx.sessionID || "unknown",
|
|
785
|
-
epic_id: epic.id,
|
|
786
|
-
timestamp: new Date().toISOString(),
|
|
787
|
-
event_type: "DECISION",
|
|
788
|
-
decision_type: "decomposition_complete",
|
|
789
|
-
payload: {
|
|
790
|
-
subtask_count: validated.subtasks.length,
|
|
791
|
-
strategy_used: args.strategy || "feature-based",
|
|
792
|
-
files_per_subtask: filesPerSubtask,
|
|
793
|
-
epic_title: validated.epic_title,
|
|
794
|
-
task: args.task,
|
|
795
|
-
},
|
|
796
|
-
});
|
|
797
|
-
} catch (error) {
|
|
798
|
-
// Non-fatal - log and continue
|
|
799
|
-
console.warn(
|
|
800
|
-
"[hive_create_epic] Failed to capture decomposition_complete event:",
|
|
801
|
-
error,
|
|
802
|
-
);
|
|
803
|
-
}
|
|
804
|
-
|
|
805
|
-
// Sync cells to JSONL so spawned workers can see them immediately
|
|
806
|
-
try {
|
|
807
|
-
ensureHiveDirectory(projectKey);
|
|
808
|
-
const flushManager = new FlushManager({
|
|
809
|
-
adapter,
|
|
810
|
-
projectKey,
|
|
811
|
-
outputPath: `${projectKey}/.hive/issues.jsonl`,
|
|
812
|
-
});
|
|
813
|
-
await flushManager.flush();
|
|
814
|
-
} catch (error) {
|
|
815
|
-
// Non-fatal - log and continue
|
|
816
|
-
console.warn(
|
|
817
|
-
"[hive_create_epic] Failed to sync to JSONL:",
|
|
818
|
-
error,
|
|
819
|
-
);
|
|
820
|
-
}
|
|
821
|
-
|
|
822
|
-
return JSON.stringify(result, null, 2);
|
|
823
|
-
} catch (error) {
|
|
824
|
-
// Partial failure - rollback via deleteCell
|
|
825
|
-
const rollbackErrors: string[] = [];
|
|
826
|
-
|
|
827
|
-
for (const cell of created) {
|
|
828
|
-
try {
|
|
829
|
-
await adapter.deleteCell(projectKey, cell.id, {
|
|
830
|
-
reason: "Rollback partial epic",
|
|
831
|
-
});
|
|
832
|
-
} catch (rollbackError) {
|
|
833
|
-
const errMsg =
|
|
834
|
-
rollbackError instanceof Error
|
|
835
|
-
? rollbackError.message
|
|
836
|
-
: String(rollbackError);
|
|
837
|
-
console.error(`Failed to rollback cell ${cell.id}:`, rollbackError);
|
|
838
|
-
rollbackErrors.push(`${cell.id}: ${errMsg}`);
|
|
839
|
-
}
|
|
840
|
-
}
|
|
841
|
-
|
|
842
|
-
const errorMsg = error instanceof Error ? error.message : String(error);
|
|
843
|
-
let rollbackInfo = `\n\nRolled back ${created.length - rollbackErrors.length} cell(s)`;
|
|
844
|
-
|
|
845
|
-
if (rollbackErrors.length > 0) {
|
|
846
|
-
rollbackInfo += `\n\nRollback failures (${rollbackErrors.length}):\n${rollbackErrors.join("\n")}`;
|
|
847
|
-
}
|
|
848
|
-
|
|
849
|
-
throw new HiveError(
|
|
850
|
-
`Epic creation failed: ${errorMsg}${rollbackInfo}`,
|
|
851
|
-
"hive_create_epic",
|
|
852
|
-
1,
|
|
853
|
-
);
|
|
854
|
-
}
|
|
855
|
-
},
|
|
856
|
-
});
|
|
857
|
-
|
|
858
|
-
/**
|
|
859
|
-
* Query cells with filters
|
|
860
|
-
*/
|
|
861
|
-
export const hive_query = tool({
|
|
862
|
-
description: "Query hive cells with filters (replaces bd list, bd ready, bd wip)",
|
|
863
|
-
args: {
|
|
864
|
-
status: tool.schema
|
|
865
|
-
.enum(["open", "in_progress", "blocked", "closed"])
|
|
866
|
-
.optional()
|
|
867
|
-
.describe("Filter by status"),
|
|
868
|
-
type: tool.schema
|
|
869
|
-
.enum(["bug", "feature", "task", "epic", "chore"])
|
|
870
|
-
.optional()
|
|
871
|
-
.describe("Filter by type"),
|
|
872
|
-
ready: tool.schema
|
|
873
|
-
.boolean()
|
|
874
|
-
.optional()
|
|
875
|
-
.describe("Only show unblocked cells"),
|
|
876
|
-
parent_id: tool.schema
|
|
877
|
-
.string()
|
|
878
|
-
.optional()
|
|
879
|
-
.describe("Filter by parent epic ID (returns children of an epic)"),
|
|
880
|
-
limit: tool.schema
|
|
881
|
-
.number()
|
|
882
|
-
.optional()
|
|
883
|
-
.describe("Max results to return (default: 20)"),
|
|
884
|
-
},
|
|
885
|
-
async execute(args, ctx) {
|
|
886
|
-
const validated = CellQueryArgsSchema.parse(args);
|
|
887
|
-
const projectKey = getHiveWorkingDirectory();
|
|
888
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
889
|
-
|
|
890
|
-
try {
|
|
891
|
-
let cells: AdapterCell[];
|
|
892
|
-
|
|
893
|
-
if (validated.ready) {
|
|
894
|
-
const readyCell = await adapter.getNextReadyCell(projectKey);
|
|
895
|
-
cells = readyCell ? [readyCell] : [];
|
|
896
|
-
} else {
|
|
897
|
-
cells = await adapter.queryCells(projectKey, {
|
|
898
|
-
status: validated.status,
|
|
899
|
-
type: validated.type,
|
|
900
|
-
parent_id: validated.parent_id,
|
|
901
|
-
limit: validated.limit || 20,
|
|
902
|
-
});
|
|
903
|
-
}
|
|
904
|
-
|
|
905
|
-
const formatted = cells.map((c) => formatCellForOutput(c));
|
|
906
|
-
return JSON.stringify(formatted, null, 2);
|
|
907
|
-
} catch (error) {
|
|
908
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
909
|
-
throw new HiveError(
|
|
910
|
-
`Failed to query cells: ${message}`,
|
|
911
|
-
"hive_query",
|
|
912
|
-
);
|
|
913
|
-
}
|
|
914
|
-
},
|
|
915
|
-
});
|
|
916
|
-
|
|
917
|
-
/**
|
|
918
|
-
* Update a cell's status or description
|
|
919
|
-
*/
|
|
920
|
-
export const hive_update = tool({
|
|
921
|
-
description: "Update cell status/description",
|
|
922
|
-
args: {
|
|
923
|
-
id: tool.schema.string().describe("Cell ID or partial hash"),
|
|
924
|
-
status: tool.schema
|
|
925
|
-
.enum(["open", "in_progress", "blocked", "closed"])
|
|
926
|
-
.optional()
|
|
927
|
-
.describe("New status"),
|
|
928
|
-
description: tool.schema.string().optional().describe("New description"),
|
|
929
|
-
priority: tool.schema
|
|
930
|
-
.number()
|
|
931
|
-
.min(0)
|
|
932
|
-
.max(3)
|
|
933
|
-
.optional()
|
|
934
|
-
.describe("New priority"),
|
|
935
|
-
},
|
|
936
|
-
async execute(args, ctx) {
|
|
937
|
-
const validated = CellUpdateArgsSchema.parse(args);
|
|
938
|
-
const projectKey = getHiveWorkingDirectory();
|
|
939
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
940
|
-
|
|
941
|
-
try {
|
|
942
|
-
// Resolve partial ID to full ID
|
|
943
|
-
const cellId = await resolvePartialId(adapter, projectKey, validated.id) || validated.id;
|
|
944
|
-
|
|
945
|
-
let cell: AdapterCell;
|
|
946
|
-
|
|
947
|
-
// Status changes use changeCellStatus, other fields use updateCell
|
|
948
|
-
if (validated.status) {
|
|
949
|
-
cell = await adapter.changeCellStatus(
|
|
950
|
-
projectKey,
|
|
951
|
-
cellId,
|
|
952
|
-
validated.status,
|
|
953
|
-
);
|
|
954
|
-
}
|
|
955
|
-
|
|
956
|
-
// Update other fields if provided
|
|
957
|
-
if (validated.description !== undefined || validated.priority !== undefined) {
|
|
958
|
-
cell = await adapter.updateCell(projectKey, cellId, {
|
|
959
|
-
description: validated.description,
|
|
960
|
-
priority: validated.priority,
|
|
961
|
-
});
|
|
962
|
-
} else if (!validated.status) {
|
|
963
|
-
// No changes requested
|
|
964
|
-
const existingCell = await adapter.getCell(projectKey, cellId);
|
|
965
|
-
if (!existingCell) {
|
|
966
|
-
throw new HiveError(
|
|
967
|
-
`Cell not found: ${validated.id}`,
|
|
968
|
-
"hive_update",
|
|
969
|
-
);
|
|
970
|
-
}
|
|
971
|
-
cell = existingCell;
|
|
972
|
-
}
|
|
973
|
-
|
|
974
|
-
await adapter.markDirty(projectKey, cellId);
|
|
975
|
-
|
|
976
|
-
const formatted = formatCellForOutput(cell!);
|
|
977
|
-
return JSON.stringify(formatted, null, 2);
|
|
978
|
-
} catch (error) {
|
|
979
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
980
|
-
|
|
981
|
-
// Provide helpful error messages
|
|
982
|
-
if (message.includes("Ambiguous hash")) {
|
|
983
|
-
throw new HiveError(
|
|
984
|
-
`Ambiguous ID '${validated.id}': multiple cells match. Please provide more characters.`,
|
|
985
|
-
"hive_update",
|
|
986
|
-
);
|
|
987
|
-
}
|
|
988
|
-
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
989
|
-
throw new HiveError(
|
|
990
|
-
`No cell found matching ID '${validated.id}'`,
|
|
991
|
-
"hive_update",
|
|
992
|
-
);
|
|
993
|
-
}
|
|
994
|
-
|
|
995
|
-
throw new HiveError(
|
|
996
|
-
`Failed to update cell: ${message}`,
|
|
997
|
-
"hive_update",
|
|
998
|
-
);
|
|
999
|
-
}
|
|
1000
|
-
},
|
|
1001
|
-
});
|
|
1002
|
-
|
|
1003
|
-
/**
|
|
1004
|
-
* Close a cell with reason
|
|
1005
|
-
*/
|
|
1006
|
-
export const hive_close = tool({
|
|
1007
|
-
description: "Close a cell with reason",
|
|
1008
|
-
args: {
|
|
1009
|
-
id: tool.schema.string().describe("Cell ID or partial hash"),
|
|
1010
|
-
reason: tool.schema.string().describe("Completion reason"),
|
|
1011
|
-
},
|
|
1012
|
-
async execute(args, ctx) {
|
|
1013
|
-
const validated = CellCloseArgsSchema.parse(args);
|
|
1014
|
-
const projectKey = getHiveWorkingDirectory();
|
|
1015
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
1016
|
-
|
|
1017
|
-
try {
|
|
1018
|
-
// Resolve partial ID to full ID
|
|
1019
|
-
const cellId = await resolvePartialId(adapter, projectKey, validated.id) || validated.id;
|
|
1020
|
-
|
|
1021
|
-
const cell = await adapter.closeCell(
|
|
1022
|
-
projectKey,
|
|
1023
|
-
cellId,
|
|
1024
|
-
validated.reason,
|
|
1025
|
-
);
|
|
1026
|
-
|
|
1027
|
-
await adapter.markDirty(projectKey, cellId);
|
|
1028
|
-
|
|
1029
|
-
return `Closed ${cell.id}: ${validated.reason}`;
|
|
1030
|
-
} catch (error) {
|
|
1031
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
1032
|
-
|
|
1033
|
-
// Provide helpful error messages
|
|
1034
|
-
if (message.includes("Ambiguous hash")) {
|
|
1035
|
-
throw new HiveError(
|
|
1036
|
-
`Ambiguous ID '${validated.id}': multiple cells match. Please provide more characters.`,
|
|
1037
|
-
"hive_close",
|
|
1038
|
-
);
|
|
1039
|
-
}
|
|
1040
|
-
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
1041
|
-
throw new HiveError(
|
|
1042
|
-
`No cell found matching ID '${validated.id}'`,
|
|
1043
|
-
"hive_close",
|
|
1044
|
-
);
|
|
1045
|
-
}
|
|
1046
|
-
|
|
1047
|
-
throw new HiveError(
|
|
1048
|
-
`Failed to close cell: ${message}`,
|
|
1049
|
-
"hive_close",
|
|
1050
|
-
);
|
|
1051
|
-
}
|
|
1052
|
-
},
|
|
1053
|
-
});
|
|
1054
|
-
|
|
1055
|
-
/**
|
|
1056
|
-
* Mark a cell as in-progress
|
|
1057
|
-
*/
|
|
1058
|
-
export const hive_start = tool({
|
|
1059
|
-
description:
|
|
1060
|
-
"Mark a cell as in-progress (shortcut for update --status in_progress)",
|
|
1061
|
-
args: {
|
|
1062
|
-
id: tool.schema.string().describe("Cell ID or partial hash"),
|
|
1063
|
-
},
|
|
1064
|
-
async execute(args, ctx) {
|
|
1065
|
-
const projectKey = getHiveWorkingDirectory();
|
|
1066
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
1067
|
-
|
|
1068
|
-
try {
|
|
1069
|
-
// Resolve partial ID to full ID
|
|
1070
|
-
const cellId = await resolvePartialId(adapter, projectKey, args.id) || args.id;
|
|
1071
|
-
|
|
1072
|
-
const cell = await adapter.changeCellStatus(
|
|
1073
|
-
projectKey,
|
|
1074
|
-
cellId,
|
|
1075
|
-
"in_progress",
|
|
1076
|
-
);
|
|
1077
|
-
|
|
1078
|
-
await adapter.markDirty(projectKey, cellId);
|
|
1079
|
-
|
|
1080
|
-
return `Started: ${cell.id}`;
|
|
1081
|
-
} catch (error) {
|
|
1082
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
1083
|
-
|
|
1084
|
-
// Provide helpful error messages
|
|
1085
|
-
if (message.includes("Ambiguous hash")) {
|
|
1086
|
-
throw new HiveError(
|
|
1087
|
-
`Ambiguous ID '${args.id}': multiple cells match. Please provide more characters.`,
|
|
1088
|
-
"hive_start",
|
|
1089
|
-
);
|
|
1090
|
-
}
|
|
1091
|
-
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
1092
|
-
throw new HiveError(
|
|
1093
|
-
`No cell found matching ID '${args.id}'`,
|
|
1094
|
-
"hive_start",
|
|
1095
|
-
);
|
|
1096
|
-
}
|
|
1097
|
-
|
|
1098
|
-
throw new HiveError(
|
|
1099
|
-
`Failed to start cell: ${message}`,
|
|
1100
|
-
"hive_start",
|
|
1101
|
-
);
|
|
1102
|
-
}
|
|
1103
|
-
},
|
|
1104
|
-
});
|
|
1105
|
-
|
|
1106
|
-
/**
|
|
1107
|
-
* Get the next ready cell
|
|
1108
|
-
*/
|
|
1109
|
-
export const hive_ready = tool({
|
|
1110
|
-
description: "Get the next ready cell (unblocked, highest priority)",
|
|
1111
|
-
args: {},
|
|
1112
|
-
async execute(args, ctx) {
|
|
1113
|
-
const projectKey = getHiveWorkingDirectory();
|
|
1114
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
1115
|
-
|
|
1116
|
-
try {
|
|
1117
|
-
const cell = await adapter.getNextReadyCell(projectKey);
|
|
1118
|
-
|
|
1119
|
-
if (!cell) {
|
|
1120
|
-
return "No ready cells";
|
|
1121
|
-
}
|
|
1122
|
-
|
|
1123
|
-
const formatted = formatCellForOutput(cell);
|
|
1124
|
-
return JSON.stringify(formatted, null, 2);
|
|
1125
|
-
} catch (error) {
|
|
1126
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
1127
|
-
throw new HiveError(
|
|
1128
|
-
`Failed to get ready cells: ${message}`,
|
|
1129
|
-
"hive_ready",
|
|
1130
|
-
);
|
|
1131
|
-
}
|
|
1132
|
-
},
|
|
1133
|
-
});
|
|
1134
|
-
|
|
1135
|
-
/**
|
|
1136
|
-
* Query cells from the hive database with flexible filtering
|
|
1137
|
-
*/
|
|
1138
|
-
export const hive_cells = tool({
|
|
1139
|
-
description: `Query cells from the hive database with flexible filtering.
|
|
1140
|
-
|
|
1141
|
-
USE THIS TOOL TO:
|
|
1142
|
-
- List all open cells: hive_cells()
|
|
1143
|
-
- Find cells by status: hive_cells({ status: "in_progress" })
|
|
1144
|
-
- Find cells by type: hive_cells({ type: "bug" })
|
|
1145
|
-
- Get a specific cell by partial ID: hive_cells({ id: "mjkmd" })
|
|
1146
|
-
- Get the next ready (unblocked) cell: hive_cells({ ready: true })
|
|
1147
|
-
- Get children of an epic: hive_cells({ parent_id: "epic-id" })
|
|
1148
|
-
- Combine filters: hive_cells({ status: "open", type: "task" })
|
|
1149
|
-
|
|
1150
|
-
RETURNS: Array of cells with id, title, status, priority, type, parent_id, created_at, updated_at
|
|
1151
|
-
|
|
1152
|
-
PREFER THIS OVER hive_query when you need to:
|
|
1153
|
-
- See what work is available
|
|
1154
|
-
- Check status of multiple cells
|
|
1155
|
-
- Find cells matching criteria
|
|
1156
|
-
- Look up a cell by partial ID`,
|
|
1157
|
-
args: {
|
|
1158
|
-
id: tool.schema.string().optional().describe("Partial or full cell ID to look up"),
|
|
1159
|
-
status: tool.schema.enum(["open", "in_progress", "blocked", "closed"]).optional().describe("Filter by status"),
|
|
1160
|
-
type: tool.schema.enum(["task", "bug", "feature", "epic", "chore"]).optional().describe("Filter by type"),
|
|
1161
|
-
parent_id: tool.schema.string().optional().describe("Filter by parent epic ID (returns children of an epic)"),
|
|
1162
|
-
ready: tool.schema.boolean().optional().describe("If true, return only the next unblocked cell"),
|
|
1163
|
-
limit: tool.schema.number().optional().describe("Max cells to return (default 20)"),
|
|
1164
|
-
},
|
|
1165
|
-
async execute(args, ctx) {
|
|
1166
|
-
const projectKey = getHiveWorkingDirectory();
|
|
1167
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
1168
|
-
|
|
1169
|
-
try {
|
|
1170
|
-
// If specific ID requested, resolve and return single cell
|
|
1171
|
-
if (args.id) {
|
|
1172
|
-
const fullId = await resolvePartialId(adapter, projectKey, args.id) || args.id;
|
|
1173
|
-
const cell = await adapter.getCell(projectKey, fullId);
|
|
1174
|
-
if (!cell) {
|
|
1175
|
-
throw new HiveError(`No cell found matching ID '${args.id}'`, "hive_cells");
|
|
1176
|
-
}
|
|
1177
|
-
const formatted = formatCellForOutput(cell);
|
|
1178
|
-
return JSON.stringify([formatted], null, 2);
|
|
1179
|
-
}
|
|
1180
|
-
|
|
1181
|
-
// If ready flag, return next unblocked cell
|
|
1182
|
-
if (args.ready) {
|
|
1183
|
-
const ready = await adapter.getNextReadyCell(projectKey);
|
|
1184
|
-
if (!ready) {
|
|
1185
|
-
return JSON.stringify([], null, 2);
|
|
1186
|
-
}
|
|
1187
|
-
const formatted = formatCellForOutput(ready);
|
|
1188
|
-
return JSON.stringify([formatted], null, 2);
|
|
1189
|
-
}
|
|
1190
|
-
|
|
1191
|
-
// Query with filters
|
|
1192
|
-
const cells = await adapter.queryCells(projectKey, {
|
|
1193
|
-
status: args.status,
|
|
1194
|
-
type: args.type,
|
|
1195
|
-
parent_id: args.parent_id,
|
|
1196
|
-
limit: args.limit || 20,
|
|
1197
|
-
});
|
|
1198
|
-
|
|
1199
|
-
const formatted = cells.map(c => formatCellForOutput(c));
|
|
1200
|
-
return JSON.stringify(formatted, null, 2);
|
|
1201
|
-
} catch (error) {
|
|
1202
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
1203
|
-
|
|
1204
|
-
// Provide helpful error messages
|
|
1205
|
-
if (message.includes("Ambiguous hash")) {
|
|
1206
|
-
throw new HiveError(
|
|
1207
|
-
`Ambiguous ID '${args.id}': multiple cells match. Please provide more characters.`,
|
|
1208
|
-
"hive_cells",
|
|
1209
|
-
);
|
|
1210
|
-
}
|
|
1211
|
-
if (message.includes("Bead not found") || message.includes("Cell not found")) {
|
|
1212
|
-
throw new HiveError(
|
|
1213
|
-
`No cell found matching ID '${args.id || "unknown"}'`,
|
|
1214
|
-
"hive_cells",
|
|
1215
|
-
);
|
|
1216
|
-
}
|
|
1217
|
-
|
|
1218
|
-
throw new HiveError(
|
|
1219
|
-
`Failed to query cells: ${message}`,
|
|
1220
|
-
"hive_cells",
|
|
1221
|
-
);
|
|
1222
|
-
}
|
|
1223
|
-
},
|
|
1224
|
-
});
|
|
1225
|
-
|
|
1226
|
-
/**
|
|
1227
|
-
* Sync hive to git and push
|
|
1228
|
-
*/
|
|
1229
|
-
export const hive_sync = tool({
|
|
1230
|
-
description: "Sync hive to git and push (MANDATORY at session end)",
|
|
1231
|
-
args: {
|
|
1232
|
-
auto_pull: tool.schema
|
|
1233
|
-
.boolean()
|
|
1234
|
-
.optional()
|
|
1235
|
-
.describe("Pull before sync (default: true)"),
|
|
1236
|
-
},
|
|
1237
|
-
async execute(args, ctx) {
|
|
1238
|
-
const autoPull = args.auto_pull ?? true;
|
|
1239
|
-
const projectKey = getHiveWorkingDirectory();
|
|
1240
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
1241
|
-
const TIMEOUT_MS = 30000; // 30 seconds
|
|
1242
|
-
|
|
1243
|
-
/**
|
|
1244
|
-
* Helper to run a command with timeout
|
|
1245
|
-
*/
|
|
1246
|
-
const withTimeout = async <T>(
|
|
1247
|
-
promise: Promise<T>,
|
|
1248
|
-
timeoutMs: number,
|
|
1249
|
-
operation: string,
|
|
1250
|
-
): Promise<T> => {
|
|
1251
|
-
let timeoutId: ReturnType<typeof setTimeout> | undefined;
|
|
1252
|
-
|
|
1253
|
-
const timeoutPromise = new Promise<never>((_, reject) => {
|
|
1254
|
-
timeoutId = setTimeout(
|
|
1255
|
-
() =>
|
|
1256
|
-
reject(
|
|
1257
|
-
new HiveError(
|
|
1258
|
-
`Operation timed out after ${timeoutMs}ms`,
|
|
1259
|
-
operation,
|
|
1260
|
-
),
|
|
1261
|
-
),
|
|
1262
|
-
timeoutMs,
|
|
1263
|
-
);
|
|
1264
|
-
});
|
|
1265
|
-
|
|
1266
|
-
try {
|
|
1267
|
-
return await Promise.race([promise, timeoutPromise]);
|
|
1268
|
-
} finally {
|
|
1269
|
-
if (timeoutId !== undefined) {
|
|
1270
|
-
clearTimeout(timeoutId);
|
|
1271
|
-
}
|
|
1272
|
-
}
|
|
1273
|
-
};
|
|
1274
|
-
|
|
1275
|
-
// 1. Ensure .hive directory exists before writing
|
|
1276
|
-
ensureHiveDirectory(projectKey);
|
|
1277
|
-
|
|
1278
|
-
// 2. Flush cells to JSONL using FlushManager
|
|
1279
|
-
const flushManager = new FlushManager({
|
|
1280
|
-
adapter,
|
|
1281
|
-
projectKey,
|
|
1282
|
-
outputPath: `${projectKey}/.hive/issues.jsonl`,
|
|
1283
|
-
});
|
|
1284
|
-
|
|
1285
|
-
const flushResult = await withTimeout(
|
|
1286
|
-
flushManager.flush(),
|
|
1287
|
-
TIMEOUT_MS,
|
|
1288
|
-
"flush hive",
|
|
1289
|
-
);
|
|
1290
|
-
|
|
1291
|
-
// 2b. Sync memories to JSONL
|
|
1292
|
-
const swarmMail = await getSwarmMailLibSQL(projectKey);
|
|
1293
|
-
const db = await swarmMail.getDatabase();
|
|
1294
|
-
const hivePath = join(projectKey, ".hive");
|
|
1295
|
-
let memoriesSynced = 0;
|
|
1296
|
-
try {
|
|
1297
|
-
const memoryResult = await syncMemories(db, hivePath);
|
|
1298
|
-
memoriesSynced = memoryResult.exported;
|
|
1299
|
-
} catch (err) {
|
|
1300
|
-
// Memory sync is optional - don't fail if it errors
|
|
1301
|
-
console.warn("[hive_sync] Memory sync warning:", err);
|
|
1302
|
-
}
|
|
1303
|
-
|
|
1304
|
-
if (flushResult.cellsExported === 0 && memoriesSynced === 0) {
|
|
1305
|
-
return "No cells or memories to sync";
|
|
1306
|
-
}
|
|
1307
|
-
|
|
1308
|
-
// 3. Check if there are changes to commit
|
|
1309
|
-
const hiveStatusResult = await runGitCommand([
|
|
1310
|
-
"status",
|
|
1311
|
-
"--porcelain",
|
|
1312
|
-
".hive/",
|
|
1313
|
-
]);
|
|
1314
|
-
const hasChanges = hiveStatusResult.stdout.trim() !== "";
|
|
1315
|
-
|
|
1316
|
-
if (hasChanges) {
|
|
1317
|
-
// 4. Stage .hive changes
|
|
1318
|
-
const addResult = await runGitCommand(["add", ".hive/"]);
|
|
1319
|
-
if (addResult.exitCode !== 0) {
|
|
1320
|
-
throw new HiveError(
|
|
1321
|
-
`Failed to stage hive: ${addResult.stderr}`,
|
|
1322
|
-
"git add .hive/",
|
|
1323
|
-
addResult.exitCode,
|
|
1324
|
-
);
|
|
1325
|
-
}
|
|
1326
|
-
|
|
1327
|
-
// 5. Commit
|
|
1328
|
-
const commitResult = await withTimeout(
|
|
1329
|
-
runGitCommand(["commit", "-m", "chore: sync hive"]),
|
|
1330
|
-
TIMEOUT_MS,
|
|
1331
|
-
"git commit",
|
|
1332
|
-
);
|
|
1333
|
-
if (
|
|
1334
|
-
commitResult.exitCode !== 0 &&
|
|
1335
|
-
!commitResult.stdout.includes("nothing to commit")
|
|
1336
|
-
) {
|
|
1337
|
-
throw new HiveError(
|
|
1338
|
-
`Failed to commit hive: ${commitResult.stderr}`,
|
|
1339
|
-
"git commit",
|
|
1340
|
-
commitResult.exitCode,
|
|
1341
|
-
);
|
|
1342
|
-
}
|
|
1343
|
-
}
|
|
1344
|
-
|
|
1345
|
-
// 6. Pull if requested (check if remote exists first)
|
|
1346
|
-
if (autoPull) {
|
|
1347
|
-
const remoteCheckResult = await runGitCommand(["remote"]);
|
|
1348
|
-
const hasRemote = remoteCheckResult.stdout.trim() !== "";
|
|
1349
|
-
|
|
1350
|
-
if (hasRemote) {
|
|
1351
|
-
// Check for unstaged changes that would block pull --rebase
|
|
1352
|
-
const statusResult = await runGitCommand(["status", "--porcelain"]);
|
|
1353
|
-
const hasUnstagedChanges = statusResult.stdout.trim() !== "";
|
|
1354
|
-
let didStash = false;
|
|
1355
|
-
|
|
1356
|
-
if (hasUnstagedChanges) {
|
|
1357
|
-
// Stash all changes (including untracked) before pull
|
|
1358
|
-
const stashResult = await runGitCommand(["stash", "push", "-u", "-m", "hive_sync: auto-stash before pull"]);
|
|
1359
|
-
if (stashResult.exitCode === 0) {
|
|
1360
|
-
didStash = true;
|
|
1361
|
-
}
|
|
1362
|
-
// If stash fails (e.g., nothing to stash), continue anyway
|
|
1363
|
-
}
|
|
1364
|
-
|
|
1365
|
-
try {
|
|
1366
|
-
const pullResult = await withTimeout(
|
|
1367
|
-
runGitCommand(["pull", "--rebase"]),
|
|
1368
|
-
TIMEOUT_MS,
|
|
1369
|
-
"git pull --rebase",
|
|
1370
|
-
);
|
|
1371
|
-
|
|
1372
|
-
if (pullResult.exitCode !== 0) {
|
|
1373
|
-
throw new HiveError(
|
|
1374
|
-
`Failed to pull: ${pullResult.stderr}`,
|
|
1375
|
-
"git pull --rebase",
|
|
1376
|
-
pullResult.exitCode,
|
|
1377
|
-
);
|
|
1378
|
-
}
|
|
1379
|
-
} finally {
|
|
1380
|
-
// Pop stash if we stashed
|
|
1381
|
-
if (didStash) {
|
|
1382
|
-
const popResult = await runGitCommand(["stash", "pop"]);
|
|
1383
|
-
if (popResult.exitCode !== 0) {
|
|
1384
|
-
// Stash pop failed - likely a conflict. Log warning but don't fail sync.
|
|
1385
|
-
console.warn(`[hive_sync] Warning: stash pop failed. Your changes are in 'git stash list'. Error: ${popResult.stderr}`);
|
|
1386
|
-
}
|
|
1387
|
-
}
|
|
1388
|
-
}
|
|
1389
|
-
}
|
|
1390
|
-
}
|
|
1391
|
-
|
|
1392
|
-
// 7. Push (check if remote exists first)
|
|
1393
|
-
const remoteCheckResult = await runGitCommand(["remote"]);
|
|
1394
|
-
const hasRemote = remoteCheckResult.stdout.trim() !== "";
|
|
1395
|
-
|
|
1396
|
-
if (hasRemote) {
|
|
1397
|
-
const pushResult = await withTimeout(
|
|
1398
|
-
runGitCommand(["push"]),
|
|
1399
|
-
TIMEOUT_MS,
|
|
1400
|
-
"git push",
|
|
1401
|
-
);
|
|
1402
|
-
if (pushResult.exitCode !== 0) {
|
|
1403
|
-
throw new HiveError(
|
|
1404
|
-
`Failed to push: ${pushResult.stderr}`,
|
|
1405
|
-
"git push",
|
|
1406
|
-
pushResult.exitCode,
|
|
1407
|
-
);
|
|
1408
|
-
}
|
|
1409
|
-
return "Hive synced and pushed successfully";
|
|
1410
|
-
} else {
|
|
1411
|
-
return "Hive synced successfully (no remote configured)";
|
|
1412
|
-
}
|
|
1413
|
-
},
|
|
1414
|
-
});
|
|
1415
|
-
|
|
1416
|
-
/**
|
|
1417
|
-
* Link a cell to an Agent Mail thread
|
|
1418
|
-
*/
|
|
1419
|
-
export const hive_link_thread = tool({
|
|
1420
|
-
description: "Add metadata linking cell to Agent Mail thread",
|
|
1421
|
-
args: {
|
|
1422
|
-
bead_id: tool.schema.string().describe("Cell ID"),
|
|
1423
|
-
thread_id: tool.schema.string().describe("Agent Mail thread ID"),
|
|
1424
|
-
},
|
|
1425
|
-
async execute(args, ctx) {
|
|
1426
|
-
const projectKey = getHiveWorkingDirectory();
|
|
1427
|
-
const adapter = await getHiveAdapter(projectKey);
|
|
1428
|
-
|
|
1429
|
-
try {
|
|
1430
|
-
const cell = await adapter.getCell(projectKey, args.bead_id);
|
|
1431
|
-
|
|
1432
|
-
if (!cell) {
|
|
1433
|
-
throw new HiveError(
|
|
1434
|
-
`Cell not found: ${args.bead_id}`,
|
|
1435
|
-
"hive_link_thread",
|
|
1436
|
-
);
|
|
1437
|
-
}
|
|
1438
|
-
|
|
1439
|
-
const existingDesc = cell.description || "";
|
|
1440
|
-
const threadMarker = `[thread:${args.thread_id}]`;
|
|
1441
|
-
|
|
1442
|
-
if (existingDesc.includes(threadMarker)) {
|
|
1443
|
-
return `Cell ${args.bead_id} already linked to thread ${args.thread_id}`;
|
|
1444
|
-
}
|
|
1445
|
-
|
|
1446
|
-
const newDesc = existingDesc
|
|
1447
|
-
? `${existingDesc}\n\n${threadMarker}`
|
|
1448
|
-
: threadMarker;
|
|
1449
|
-
|
|
1450
|
-
await adapter.updateCell(projectKey, args.bead_id, {
|
|
1451
|
-
description: newDesc,
|
|
1452
|
-
});
|
|
1453
|
-
|
|
1454
|
-
await adapter.markDirty(projectKey, args.bead_id);
|
|
1455
|
-
|
|
1456
|
-
return `Linked cell ${args.bead_id} to thread ${args.thread_id}`;
|
|
1457
|
-
} catch (error) {
|
|
1458
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
1459
|
-
throw new HiveError(
|
|
1460
|
-
`Failed to link thread: ${message}`,
|
|
1461
|
-
"hive_link_thread",
|
|
1462
|
-
);
|
|
1463
|
-
}
|
|
1464
|
-
},
|
|
1465
|
-
});
|
|
1466
|
-
|
|
1467
|
-
// ============================================================================
|
|
1468
|
-
// Export all tools
|
|
1469
|
-
// ============================================================================
|
|
1470
|
-
|
|
1471
|
-
export const hiveTools = {
|
|
1472
|
-
hive_create,
|
|
1473
|
-
hive_create_epic,
|
|
1474
|
-
hive_query,
|
|
1475
|
-
hive_update,
|
|
1476
|
-
hive_close,
|
|
1477
|
-
hive_start,
|
|
1478
|
-
hive_ready,
|
|
1479
|
-
hive_cells,
|
|
1480
|
-
hive_sync,
|
|
1481
|
-
hive_link_thread,
|
|
1482
|
-
};
|
|
1483
|
-
|
|
1484
|
-
// ============================================================================
|
|
1485
|
-
// Deprecation Warning System
|
|
1486
|
-
// ============================================================================
|
|
1487
|
-
|
|
1488
|
-
/**
|
|
1489
|
-
* Track which deprecated tools have been warned about.
|
|
1490
|
-
* Only warn once per tool name to avoid spam.
|
|
1491
|
-
*/
|
|
1492
|
-
const warnedTools = new Set<string>();
|
|
1493
|
-
|
|
1494
|
-
/**
|
|
1495
|
-
* Log a deprecation warning for a renamed tool.
|
|
1496
|
-
* Only warns once per tool name per session.
|
|
1497
|
-
*
|
|
1498
|
-
* @param oldName - The deprecated tool name (e.g., "hive_create")
|
|
1499
|
-
* @param newName - The new tool name to use instead (e.g., "hive_create")
|
|
1500
|
-
*/
|
|
1501
|
-
function warnDeprecated(oldName: string, newName: string): void {
|
|
1502
|
-
if (warnedTools.has(oldName)) {
|
|
1503
|
-
return; // Already warned
|
|
1504
|
-
}
|
|
1505
|
-
|
|
1506
|
-
warnedTools.add(oldName);
|
|
1507
|
-
console.warn(
|
|
1508
|
-
`[DEPRECATED] ${oldName} is deprecated, use ${newName} instead. Will be removed in v1.0`
|
|
1509
|
-
);
|
|
1510
|
-
}
|
|
1511
|
-
|
|
1512
|
-
// ============================================================================
|
|
1513
|
-
// Legacy Aliases (DEPRECATED - use hive_* instead)
|
|
1514
|
-
// ============================================================================
|
|
1515
|
-
|
|
1516
|
-
/**
|
|
1517
|
-
* @deprecated Use hive_create instead. Will be removed in v1.0
|
|
1518
|
-
*/
|
|
1519
|
-
export const beads_create = tool({
|
|
1520
|
-
...hive_create,
|
|
1521
|
-
async execute(args, ctx) {
|
|
1522
|
-
warnDeprecated('beads_create', 'hive_create');
|
|
1523
|
-
return hive_create.execute(args, ctx);
|
|
1524
|
-
}
|
|
1525
|
-
});
|
|
1526
|
-
|
|
1527
|
-
/**
|
|
1528
|
-
* @deprecated Use hive_create_epic instead. Will be removed in v1.0
|
|
1529
|
-
*/
|
|
1530
|
-
export const beads_create_epic = tool({
|
|
1531
|
-
...hive_create_epic,
|
|
1532
|
-
async execute(args, ctx) {
|
|
1533
|
-
warnDeprecated('beads_create_epic', 'hive_create_epic');
|
|
1534
|
-
return hive_create_epic.execute(args, ctx);
|
|
1535
|
-
}
|
|
1536
|
-
});
|
|
1537
|
-
|
|
1538
|
-
/**
|
|
1539
|
-
* @deprecated Use hive_query instead. Will be removed in v1.0
|
|
1540
|
-
*/
|
|
1541
|
-
export const beads_query = tool({
|
|
1542
|
-
...hive_query,
|
|
1543
|
-
async execute(args, ctx) {
|
|
1544
|
-
warnDeprecated('beads_query', 'hive_query');
|
|
1545
|
-
return hive_query.execute(args, ctx);
|
|
1546
|
-
}
|
|
1547
|
-
});
|
|
1548
|
-
|
|
1549
|
-
/**
|
|
1550
|
-
* @deprecated Use hive_update instead. Will be removed in v1.0
|
|
1551
|
-
*/
|
|
1552
|
-
export const beads_update = tool({
|
|
1553
|
-
...hive_update,
|
|
1554
|
-
async execute(args, ctx) {
|
|
1555
|
-
warnDeprecated('beads_update', 'hive_update');
|
|
1556
|
-
return hive_update.execute(args, ctx);
|
|
1557
|
-
}
|
|
1558
|
-
});
|
|
1559
|
-
|
|
1560
|
-
/**
|
|
1561
|
-
* @deprecated Use hive_close instead. Will be removed in v1.0
|
|
1562
|
-
*/
|
|
1563
|
-
export const beads_close = tool({
|
|
1564
|
-
...hive_close,
|
|
1565
|
-
async execute(args, ctx) {
|
|
1566
|
-
warnDeprecated('beads_close', 'hive_close');
|
|
1567
|
-
return hive_close.execute(args, ctx);
|
|
1568
|
-
}
|
|
1569
|
-
});
|
|
1570
|
-
|
|
1571
|
-
/**
|
|
1572
|
-
* @deprecated Use hive_start instead. Will be removed in v1.0
|
|
1573
|
-
*/
|
|
1574
|
-
export const beads_start = tool({
|
|
1575
|
-
...hive_start,
|
|
1576
|
-
async execute(args, ctx) {
|
|
1577
|
-
warnDeprecated('beads_start', 'hive_start');
|
|
1578
|
-
return hive_start.execute(args, ctx);
|
|
1579
|
-
}
|
|
1580
|
-
});
|
|
1581
|
-
|
|
1582
|
-
/**
|
|
1583
|
-
* @deprecated Use hive_ready instead. Will be removed in v1.0
|
|
1584
|
-
*/
|
|
1585
|
-
export const beads_ready = tool({
|
|
1586
|
-
...hive_ready,
|
|
1587
|
-
async execute(args, ctx) {
|
|
1588
|
-
warnDeprecated('beads_ready', 'hive_ready');
|
|
1589
|
-
return hive_ready.execute(args, ctx);
|
|
1590
|
-
}
|
|
1591
|
-
});
|
|
1592
|
-
|
|
1593
|
-
/**
|
|
1594
|
-
* @deprecated Use hive_sync instead. Will be removed in v1.0
|
|
1595
|
-
*/
|
|
1596
|
-
export const beads_sync = tool({
|
|
1597
|
-
...hive_sync,
|
|
1598
|
-
async execute(args, ctx) {
|
|
1599
|
-
warnDeprecated('beads_sync', 'hive_sync');
|
|
1600
|
-
return hive_sync.execute(args, ctx);
|
|
1601
|
-
}
|
|
1602
|
-
});
|
|
1603
|
-
|
|
1604
|
-
/**
|
|
1605
|
-
* @deprecated Use hive_link_thread instead. Will be removed in v1.0
|
|
1606
|
-
*/
|
|
1607
|
-
export const beads_link_thread = tool({
|
|
1608
|
-
...hive_link_thread,
|
|
1609
|
-
async execute(args, ctx) {
|
|
1610
|
-
warnDeprecated('beads_link_thread', 'hive_link_thread');
|
|
1611
|
-
return hive_link_thread.execute(args, ctx);
|
|
1612
|
-
}
|
|
1613
|
-
});
|
|
1614
|
-
|
|
1615
|
-
/**
|
|
1616
|
-
* @deprecated Use hiveTools instead. Will be removed in v1.0
|
|
1617
|
-
*/
|
|
1618
|
-
export const beadsTools = {
|
|
1619
|
-
beads_create,
|
|
1620
|
-
beads_create_epic,
|
|
1621
|
-
beads_query,
|
|
1622
|
-
beads_update,
|
|
1623
|
-
beads_close,
|
|
1624
|
-
beads_start,
|
|
1625
|
-
beads_ready,
|
|
1626
|
-
beads_sync,
|
|
1627
|
-
beads_link_thread,
|
|
1628
|
-
};
|