opencode-swarm-plugin 0.43.0 → 0.44.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/cass.characterization.test.ts +422 -0
- package/bin/swarm.serve.test.ts +6 -4
- package/bin/swarm.test.ts +68 -0
- package/bin/swarm.ts +81 -8
- package/dist/compaction-prompt-scoring.js +139 -0
- package/dist/contributor-tools.d.ts +42 -0
- package/dist/contributor-tools.d.ts.map +1 -0
- package/dist/eval-capture.js +12811 -0
- package/dist/hive.d.ts.map +1 -1
- package/dist/index.d.ts +12 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +7728 -62590
- package/dist/plugin.js +23833 -78695
- package/dist/sessions/agent-discovery.d.ts +59 -0
- package/dist/sessions/agent-discovery.d.ts.map +1 -0
- package/dist/sessions/index.d.ts +10 -0
- package/dist/sessions/index.d.ts.map +1 -0
- package/dist/swarm-orchestrate.d.ts.map +1 -1
- package/dist/swarm-prompts.d.ts.map +1 -1
- package/dist/swarm-review.d.ts.map +1 -1
- package/package.json +17 -5
- package/.changeset/swarm-insights-data-layer.md +0 -63
- package/.hive/analysis/eval-failure-analysis-2025-12-25.md +0 -331
- package/.hive/analysis/session-data-quality-audit.md +0 -320
- package/.hive/eval-results.json +0 -483
- package/.hive/issues.jsonl +0 -138
- package/.hive/memories.jsonl +0 -729
- package/.opencode/eval-history.jsonl +0 -327
- package/.turbo/turbo-build.log +0 -9
- package/CHANGELOG.md +0 -2255
- package/SCORER-ANALYSIS.md +0 -598
- package/docs/analysis/subagent-coordination-patterns.md +0 -902
- package/docs/analysis-socratic-planner-pattern.md +0 -504
- package/docs/planning/ADR-001-monorepo-structure.md +0 -171
- package/docs/planning/ADR-002-package-extraction.md +0 -393
- package/docs/planning/ADR-003-performance-improvements.md +0 -451
- package/docs/planning/ADR-004-message-queue-features.md +0 -187
- package/docs/planning/ADR-005-devtools-observability.md +0 -202
- package/docs/planning/ADR-007-swarm-enhancements-worktree-review.md +0 -168
- package/docs/planning/ADR-008-worker-handoff-protocol.md +0 -293
- package/docs/planning/ADR-009-oh-my-opencode-patterns.md +0 -353
- package/docs/planning/ROADMAP.md +0 -368
- package/docs/semantic-memory-cli-syntax.md +0 -123
- package/docs/swarm-mail-architecture.md +0 -1147
- package/docs/testing/context-recovery-test.md +0 -470
- package/evals/ARCHITECTURE.md +0 -1189
- package/evals/README.md +0 -768
- package/evals/compaction-prompt.eval.ts +0 -149
- package/evals/compaction-resumption.eval.ts +0 -289
- package/evals/coordinator-behavior.eval.ts +0 -307
- package/evals/coordinator-session.eval.ts +0 -154
- package/evals/evalite.config.ts.bak +0 -15
- package/evals/example.eval.ts +0 -31
- package/evals/fixtures/compaction-cases.ts +0 -350
- package/evals/fixtures/compaction-prompt-cases.ts +0 -311
- package/evals/fixtures/coordinator-sessions.ts +0 -328
- package/evals/fixtures/decomposition-cases.ts +0 -105
- package/evals/lib/compaction-loader.test.ts +0 -248
- package/evals/lib/compaction-loader.ts +0 -320
- package/evals/lib/data-loader.evalite-test.ts +0 -289
- package/evals/lib/data-loader.test.ts +0 -345
- package/evals/lib/data-loader.ts +0 -281
- package/evals/lib/llm.ts +0 -115
- package/evals/scorers/compaction-prompt-scorers.ts +0 -145
- package/evals/scorers/compaction-scorers.ts +0 -305
- package/evals/scorers/coordinator-discipline.evalite-test.ts +0 -539
- package/evals/scorers/coordinator-discipline.ts +0 -325
- package/evals/scorers/index.test.ts +0 -146
- package/evals/scorers/index.ts +0 -328
- package/evals/scorers/outcome-scorers.evalite-test.ts +0 -27
- package/evals/scorers/outcome-scorers.ts +0 -349
- package/evals/swarm-decomposition.eval.ts +0 -121
- package/examples/commands/swarm.md +0 -745
- package/examples/plugin-wrapper-template.ts +0 -2426
- package/examples/skills/hive-workflow/SKILL.md +0 -212
- package/examples/skills/skill-creator/SKILL.md +0 -223
- package/examples/skills/swarm-coordination/SKILL.md +0 -292
- package/global-skills/cli-builder/SKILL.md +0 -344
- package/global-skills/cli-builder/references/advanced-patterns.md +0 -244
- package/global-skills/learning-systems/SKILL.md +0 -644
- package/global-skills/skill-creator/LICENSE.txt +0 -202
- package/global-skills/skill-creator/SKILL.md +0 -352
- package/global-skills/skill-creator/references/output-patterns.md +0 -82
- package/global-skills/skill-creator/references/workflows.md +0 -28
- package/global-skills/swarm-coordination/SKILL.md +0 -995
- package/global-skills/swarm-coordination/references/coordinator-patterns.md +0 -235
- package/global-skills/swarm-coordination/references/strategies.md +0 -138
- package/global-skills/system-design/SKILL.md +0 -213
- package/global-skills/testing-patterns/SKILL.md +0 -430
- package/global-skills/testing-patterns/references/dependency-breaking-catalog.md +0 -586
- package/opencode-swarm-plugin-0.30.7.tgz +0 -0
- package/opencode-swarm-plugin-0.31.0.tgz +0 -0
- package/scripts/cleanup-test-memories.ts +0 -346
- package/scripts/init-skill.ts +0 -222
- package/scripts/migrate-unknown-sessions.ts +0 -349
- package/scripts/validate-skill.ts +0 -204
- package/src/agent-mail.ts +0 -1724
- package/src/anti-patterns.test.ts +0 -1167
- package/src/anti-patterns.ts +0 -448
- package/src/compaction-capture.integration.test.ts +0 -257
- package/src/compaction-hook.test.ts +0 -838
- package/src/compaction-hook.ts +0 -1204
- package/src/compaction-observability.integration.test.ts +0 -139
- package/src/compaction-observability.test.ts +0 -187
- package/src/compaction-observability.ts +0 -324
- package/src/compaction-prompt-scorers.test.ts +0 -475
- package/src/compaction-prompt-scoring.ts +0 -300
- package/src/dashboard.test.ts +0 -611
- package/src/dashboard.ts +0 -462
- package/src/error-enrichment.test.ts +0 -403
- package/src/error-enrichment.ts +0 -219
- package/src/eval-capture.test.ts +0 -1015
- package/src/eval-capture.ts +0 -929
- package/src/eval-gates.test.ts +0 -306
- package/src/eval-gates.ts +0 -218
- package/src/eval-history.test.ts +0 -508
- package/src/eval-history.ts +0 -214
- package/src/eval-learning.test.ts +0 -378
- package/src/eval-learning.ts +0 -360
- package/src/eval-runner.test.ts +0 -223
- package/src/eval-runner.ts +0 -402
- package/src/export-tools.test.ts +0 -476
- package/src/export-tools.ts +0 -257
- package/src/hive.integration.test.ts +0 -2241
- package/src/hive.ts +0 -1628
- package/src/index.ts +0 -935
- package/src/learning.integration.test.ts +0 -1815
- package/src/learning.ts +0 -1079
- package/src/logger.test.ts +0 -189
- package/src/logger.ts +0 -135
- package/src/mandate-promotion.test.ts +0 -473
- package/src/mandate-promotion.ts +0 -239
- package/src/mandate-storage.integration.test.ts +0 -601
- package/src/mandate-storage.test.ts +0 -578
- package/src/mandate-storage.ts +0 -794
- package/src/mandates.ts +0 -540
- package/src/memory-tools.test.ts +0 -195
- package/src/memory-tools.ts +0 -344
- package/src/memory.integration.test.ts +0 -334
- package/src/memory.test.ts +0 -158
- package/src/memory.ts +0 -527
- package/src/model-selection.test.ts +0 -188
- package/src/model-selection.ts +0 -68
- package/src/observability-tools.test.ts +0 -359
- package/src/observability-tools.ts +0 -871
- package/src/output-guardrails.test.ts +0 -438
- package/src/output-guardrails.ts +0 -381
- package/src/pattern-maturity.test.ts +0 -1160
- package/src/pattern-maturity.ts +0 -525
- package/src/planning-guardrails.test.ts +0 -491
- package/src/planning-guardrails.ts +0 -438
- package/src/plugin.ts +0 -23
- package/src/post-compaction-tracker.test.ts +0 -251
- package/src/post-compaction-tracker.ts +0 -237
- package/src/query-tools.test.ts +0 -636
- package/src/query-tools.ts +0 -324
- package/src/rate-limiter.integration.test.ts +0 -466
- package/src/rate-limiter.ts +0 -774
- package/src/replay-tools.test.ts +0 -496
- package/src/replay-tools.ts +0 -240
- package/src/repo-crawl.integration.test.ts +0 -441
- package/src/repo-crawl.ts +0 -610
- package/src/schemas/cell-events.test.ts +0 -347
- package/src/schemas/cell-events.ts +0 -807
- package/src/schemas/cell.ts +0 -257
- package/src/schemas/evaluation.ts +0 -166
- package/src/schemas/index.test.ts +0 -199
- package/src/schemas/index.ts +0 -286
- package/src/schemas/mandate.ts +0 -232
- package/src/schemas/swarm-context.ts +0 -115
- package/src/schemas/task.ts +0 -161
- package/src/schemas/worker-handoff.test.ts +0 -302
- package/src/schemas/worker-handoff.ts +0 -131
- package/src/skills.integration.test.ts +0 -1192
- package/src/skills.test.ts +0 -643
- package/src/skills.ts +0 -1549
- package/src/storage.integration.test.ts +0 -341
- package/src/storage.ts +0 -884
- package/src/structured.integration.test.ts +0 -817
- package/src/structured.test.ts +0 -1046
- package/src/structured.ts +0 -762
- package/src/swarm-decompose.test.ts +0 -188
- package/src/swarm-decompose.ts +0 -1302
- package/src/swarm-deferred.integration.test.ts +0 -157
- package/src/swarm-deferred.test.ts +0 -38
- package/src/swarm-insights.test.ts +0 -214
- package/src/swarm-insights.ts +0 -459
- package/src/swarm-mail.integration.test.ts +0 -970
- package/src/swarm-mail.ts +0 -739
- package/src/swarm-orchestrate.integration.test.ts +0 -282
- package/src/swarm-orchestrate.test.ts +0 -548
- package/src/swarm-orchestrate.ts +0 -3084
- package/src/swarm-prompts.test.ts +0 -1270
- package/src/swarm-prompts.ts +0 -2077
- package/src/swarm-research.integration.test.ts +0 -701
- package/src/swarm-research.test.ts +0 -698
- package/src/swarm-research.ts +0 -472
- package/src/swarm-review.integration.test.ts +0 -285
- package/src/swarm-review.test.ts +0 -879
- package/src/swarm-review.ts +0 -709
- package/src/swarm-strategies.ts +0 -407
- package/src/swarm-worktree.test.ts +0 -501
- package/src/swarm-worktree.ts +0 -575
- package/src/swarm.integration.test.ts +0 -2377
- package/src/swarm.ts +0 -38
- package/src/tool-adapter.integration.test.ts +0 -1221
- package/src/tool-availability.ts +0 -461
- package/tsconfig.json +0 -28
|
@@ -1,2241 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Hive Integration Tests
|
|
3
|
-
*
|
|
4
|
-
* These tests exercise the HiveAdapter-based tools directly.
|
|
5
|
-
* They validate the tool wrappers work correctly with actual hive operations.
|
|
6
|
-
*
|
|
7
|
-
* Run with: bun test src/hive.integration.test.ts
|
|
8
|
-
*/
|
|
9
|
-
import { describe, it, expect, beforeAll, beforeEach, afterAll } from "vitest";
|
|
10
|
-
import { tmpdir } from "node:os";
|
|
11
|
-
import { join } from "node:path";
|
|
12
|
-
import {
|
|
13
|
-
hive_create,
|
|
14
|
-
hive_create_epic,
|
|
15
|
-
hive_query,
|
|
16
|
-
hive_update,
|
|
17
|
-
hive_close,
|
|
18
|
-
hive_start,
|
|
19
|
-
hive_ready,
|
|
20
|
-
hive_cells,
|
|
21
|
-
hive_link_thread,
|
|
22
|
-
hive_sync,
|
|
23
|
-
HiveError,
|
|
24
|
-
getHiveAdapter,
|
|
25
|
-
setHiveWorkingDirectory,
|
|
26
|
-
getHiveWorkingDirectory,
|
|
27
|
-
// Legacy aliases for backward compatibility tests
|
|
28
|
-
beads_link_thread,
|
|
29
|
-
BeadError,
|
|
30
|
-
getBeadsAdapter,
|
|
31
|
-
setBeadsWorkingDirectory,
|
|
32
|
-
} from "./hive";
|
|
33
|
-
import type { Cell, Bead, EpicCreateResult } from "./schemas";
|
|
34
|
-
import type { HiveAdapter } from "swarm-mail";
|
|
35
|
-
|
|
36
|
-
/**
|
|
37
|
-
* Mock tool context for execute functions
|
|
38
|
-
* The real context is provided by OpenCode runtime
|
|
39
|
-
*/
|
|
40
|
-
const mockContext = {
|
|
41
|
-
sessionID: "test-session-" + Date.now(),
|
|
42
|
-
messageID: "test-message-" + Date.now(),
|
|
43
|
-
agent: "test-agent",
|
|
44
|
-
abort: new AbortController().signal,
|
|
45
|
-
};
|
|
46
|
-
|
|
47
|
-
/**
|
|
48
|
-
* Helper to parse JSON response from tool execute
|
|
49
|
-
*/
|
|
50
|
-
function parseResponse<T>(response: string): T {
|
|
51
|
-
return JSON.parse(response) as T;
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
/**
|
|
55
|
-
* Track created beads for cleanup
|
|
56
|
-
*/
|
|
57
|
-
const createdBeadIds: string[] = [];
|
|
58
|
-
|
|
59
|
-
/**
|
|
60
|
-
* Test project key - use temp directory to isolate tests
|
|
61
|
-
*/
|
|
62
|
-
const TEST_PROJECT_KEY = join(tmpdir(), `beads-integration-test-${Date.now()}`);
|
|
63
|
-
|
|
64
|
-
/**
|
|
65
|
-
* Adapter instance for verification
|
|
66
|
-
*/
|
|
67
|
-
let adapter: HiveAdapter;
|
|
68
|
-
|
|
69
|
-
/**
|
|
70
|
-
* Cleanup helper - close all created beads after tests
|
|
71
|
-
*/
|
|
72
|
-
async function cleanupBeads() {
|
|
73
|
-
for (const id of createdBeadIds) {
|
|
74
|
-
try {
|
|
75
|
-
await hive_close.execute({ id, reason: "Test cleanup" }, mockContext);
|
|
76
|
-
} catch {
|
|
77
|
-
// Ignore cleanup errors - bead may already be closed
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
createdBeadIds.length = 0;
|
|
81
|
-
}
|
|
82
|
-
|
|
83
|
-
describe("beads integration", () => {
|
|
84
|
-
// Initialize adapter before running tests
|
|
85
|
-
beforeAll(async () => {
|
|
86
|
-
// Set working directory for beads commands
|
|
87
|
-
setBeadsWorkingDirectory(TEST_PROJECT_KEY);
|
|
88
|
-
|
|
89
|
-
// Get adapter instance for verification
|
|
90
|
-
adapter = await getBeadsAdapter(TEST_PROJECT_KEY);
|
|
91
|
-
});
|
|
92
|
-
|
|
93
|
-
afterAll(async () => {
|
|
94
|
-
await cleanupBeads();
|
|
95
|
-
});
|
|
96
|
-
|
|
97
|
-
describe("hive_create", () => {
|
|
98
|
-
it("creates a bead with minimal args (title only)", async () => {
|
|
99
|
-
const result = await hive_create.execute(
|
|
100
|
-
{ title: "Test bead minimal" },
|
|
101
|
-
mockContext,
|
|
102
|
-
);
|
|
103
|
-
|
|
104
|
-
const bead = parseResponse<Bead>(result);
|
|
105
|
-
createdBeadIds.push(bead.id);
|
|
106
|
-
|
|
107
|
-
expect(bead.title).toBe("Test bead minimal");
|
|
108
|
-
expect(bead.status).toBe("open");
|
|
109
|
-
expect(bead.issue_type).toBe("task"); // default
|
|
110
|
-
expect(bead.priority).toBe(2); // default
|
|
111
|
-
expect(bead.id).toMatch(/^[a-z0-9-]+-[a-z0-9]+$/);
|
|
112
|
-
});
|
|
113
|
-
|
|
114
|
-
it("creates a bead with all options", async () => {
|
|
115
|
-
const result = await hive_create.execute(
|
|
116
|
-
{
|
|
117
|
-
title: "Test bug with priority",
|
|
118
|
-
type: "bug",
|
|
119
|
-
priority: 0, // P0 critical
|
|
120
|
-
description: "This is a critical bug",
|
|
121
|
-
},
|
|
122
|
-
mockContext,
|
|
123
|
-
);
|
|
124
|
-
|
|
125
|
-
const bead = parseResponse<Bead>(result);
|
|
126
|
-
createdBeadIds.push(bead.id);
|
|
127
|
-
|
|
128
|
-
expect(bead.title).toBe("Test bug with priority");
|
|
129
|
-
expect(bead.issue_type).toBe("bug");
|
|
130
|
-
expect(bead.priority).toBe(0);
|
|
131
|
-
expect(bead.description).toContain("critical bug");
|
|
132
|
-
});
|
|
133
|
-
|
|
134
|
-
it("creates a feature type bead", async () => {
|
|
135
|
-
const result = await hive_create.execute(
|
|
136
|
-
{ title: "New feature request", type: "feature", priority: 1 },
|
|
137
|
-
mockContext,
|
|
138
|
-
);
|
|
139
|
-
|
|
140
|
-
const bead = parseResponse<Bead>(result);
|
|
141
|
-
createdBeadIds.push(bead.id);
|
|
142
|
-
|
|
143
|
-
expect(bead.issue_type).toBe("feature");
|
|
144
|
-
expect(bead.priority).toBe(1);
|
|
145
|
-
});
|
|
146
|
-
|
|
147
|
-
it("creates a chore type bead", async () => {
|
|
148
|
-
const result = await hive_create.execute(
|
|
149
|
-
{ title: "Cleanup task", type: "chore", priority: 3 },
|
|
150
|
-
mockContext,
|
|
151
|
-
);
|
|
152
|
-
|
|
153
|
-
const bead = parseResponse<Bead>(result);
|
|
154
|
-
createdBeadIds.push(bead.id);
|
|
155
|
-
|
|
156
|
-
expect(bead.issue_type).toBe("chore");
|
|
157
|
-
expect(bead.priority).toBe(3);
|
|
158
|
-
});
|
|
159
|
-
});
|
|
160
|
-
|
|
161
|
-
describe("hive_query", () => {
|
|
162
|
-
let testBeadId: string;
|
|
163
|
-
|
|
164
|
-
beforeEach(async () => {
|
|
165
|
-
// Create a test bead for query tests
|
|
166
|
-
const result = await hive_create.execute(
|
|
167
|
-
{ title: "Query test bead", type: "task" },
|
|
168
|
-
mockContext,
|
|
169
|
-
);
|
|
170
|
-
const bead = parseResponse<Bead>(result);
|
|
171
|
-
testBeadId = bead.id;
|
|
172
|
-
createdBeadIds.push(testBeadId);
|
|
173
|
-
});
|
|
174
|
-
|
|
175
|
-
it("queries all open beads", async () => {
|
|
176
|
-
const result = await hive_query.execute({ status: "open" }, mockContext);
|
|
177
|
-
|
|
178
|
-
const beads = parseResponse<Bead[]>(result);
|
|
179
|
-
|
|
180
|
-
expect(Array.isArray(beads)).toBe(true);
|
|
181
|
-
expect(beads.length).toBeGreaterThan(0);
|
|
182
|
-
expect(beads.every((b) => b.status === "open")).toBe(true);
|
|
183
|
-
});
|
|
184
|
-
|
|
185
|
-
it("queries beads by type", async () => {
|
|
186
|
-
const result = await hive_query.execute({ type: "task" }, mockContext);
|
|
187
|
-
|
|
188
|
-
const beads = parseResponse<Bead[]>(result);
|
|
189
|
-
|
|
190
|
-
expect(Array.isArray(beads)).toBe(true);
|
|
191
|
-
expect(beads.every((b) => b.issue_type === "task")).toBe(true);
|
|
192
|
-
});
|
|
193
|
-
|
|
194
|
-
it("queries ready beads (unblocked)", async () => {
|
|
195
|
-
const result = await hive_query.execute({ ready: true }, mockContext);
|
|
196
|
-
|
|
197
|
-
const beads = parseResponse<Bead[]>(result);
|
|
198
|
-
|
|
199
|
-
expect(Array.isArray(beads)).toBe(true);
|
|
200
|
-
// Ready beads should be open (not closed, not blocked)
|
|
201
|
-
for (const bead of beads) {
|
|
202
|
-
expect(["open", "in_progress"]).toContain(bead.status);
|
|
203
|
-
}
|
|
204
|
-
});
|
|
205
|
-
|
|
206
|
-
it("limits results", async () => {
|
|
207
|
-
// Create multiple beads first
|
|
208
|
-
for (let i = 0; i < 5; i++) {
|
|
209
|
-
const result = await hive_create.execute(
|
|
210
|
-
{ title: `Limit test bead ${i}` },
|
|
211
|
-
mockContext,
|
|
212
|
-
);
|
|
213
|
-
const bead = parseResponse<Bead>(result);
|
|
214
|
-
createdBeadIds.push(bead.id);
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
const result = await hive_query.execute({ limit: 3 }, mockContext);
|
|
218
|
-
|
|
219
|
-
const beads = parseResponse<Bead[]>(result);
|
|
220
|
-
expect(beads.length).toBeLessThanOrEqual(3);
|
|
221
|
-
});
|
|
222
|
-
|
|
223
|
-
it("combines filters", async () => {
|
|
224
|
-
const result = await hive_query.execute(
|
|
225
|
-
{ status: "open", type: "task", limit: 5 },
|
|
226
|
-
mockContext,
|
|
227
|
-
);
|
|
228
|
-
|
|
229
|
-
const beads = parseResponse<Bead[]>(result);
|
|
230
|
-
|
|
231
|
-
expect(Array.isArray(beads)).toBe(true);
|
|
232
|
-
expect(beads.length).toBeLessThanOrEqual(5);
|
|
233
|
-
for (const bead of beads) {
|
|
234
|
-
expect(bead.status).toBe("open");
|
|
235
|
-
expect(bead.issue_type).toBe("task");
|
|
236
|
-
}
|
|
237
|
-
});
|
|
238
|
-
});
|
|
239
|
-
|
|
240
|
-
describe("hive_update", () => {
|
|
241
|
-
let testBeadId: string;
|
|
242
|
-
|
|
243
|
-
beforeEach(async () => {
|
|
244
|
-
const result = await hive_create.execute(
|
|
245
|
-
{ title: "Update test bead", description: "Original description" },
|
|
246
|
-
mockContext,
|
|
247
|
-
);
|
|
248
|
-
const bead = parseResponse<Bead>(result);
|
|
249
|
-
testBeadId = bead.id;
|
|
250
|
-
createdBeadIds.push(testBeadId);
|
|
251
|
-
});
|
|
252
|
-
|
|
253
|
-
it("updates bead status", async () => {
|
|
254
|
-
const result = await hive_update.execute(
|
|
255
|
-
{ id: testBeadId, status: "in_progress" },
|
|
256
|
-
mockContext,
|
|
257
|
-
);
|
|
258
|
-
|
|
259
|
-
const bead = parseResponse<Bead>(result);
|
|
260
|
-
expect(bead.status).toBe("in_progress");
|
|
261
|
-
});
|
|
262
|
-
|
|
263
|
-
it("updates bead description", async () => {
|
|
264
|
-
const result = await hive_update.execute(
|
|
265
|
-
{ id: testBeadId, description: "Updated description" },
|
|
266
|
-
mockContext,
|
|
267
|
-
);
|
|
268
|
-
|
|
269
|
-
const bead = parseResponse<Bead>(result);
|
|
270
|
-
expect(bead.description).toContain("Updated description");
|
|
271
|
-
});
|
|
272
|
-
|
|
273
|
-
it("updates bead priority", async () => {
|
|
274
|
-
const result = await hive_update.execute(
|
|
275
|
-
{ id: testBeadId, priority: 0 },
|
|
276
|
-
mockContext,
|
|
277
|
-
);
|
|
278
|
-
|
|
279
|
-
const bead = parseResponse<Bead>(result);
|
|
280
|
-
expect(bead.priority).toBe(0);
|
|
281
|
-
});
|
|
282
|
-
|
|
283
|
-
it("updates multiple fields at once", async () => {
|
|
284
|
-
const result = await hive_update.execute(
|
|
285
|
-
{
|
|
286
|
-
id: testBeadId,
|
|
287
|
-
status: "blocked",
|
|
288
|
-
description: "Blocked on dependency",
|
|
289
|
-
priority: 1,
|
|
290
|
-
},
|
|
291
|
-
mockContext,
|
|
292
|
-
);
|
|
293
|
-
|
|
294
|
-
const bead = parseResponse<Bead>(result);
|
|
295
|
-
expect(bead.status).toBe("blocked");
|
|
296
|
-
expect(bead.description).toContain("Blocked on dependency");
|
|
297
|
-
expect(bead.priority).toBe(1);
|
|
298
|
-
});
|
|
299
|
-
|
|
300
|
-
it("throws BeadError for invalid bead ID", async () => {
|
|
301
|
-
await expect(
|
|
302
|
-
hive_update.execute(
|
|
303
|
-
{ id: "nonexistent-bead-xyz", status: "closed" },
|
|
304
|
-
mockContext,
|
|
305
|
-
),
|
|
306
|
-
).rejects.toThrow(BeadError);
|
|
307
|
-
});
|
|
308
|
-
});
|
|
309
|
-
|
|
310
|
-
describe("hive_close", () => {
|
|
311
|
-
it("closes a bead with reason", async () => {
|
|
312
|
-
// Create a fresh bead to close
|
|
313
|
-
const createResult = await hive_create.execute(
|
|
314
|
-
{ title: "Bead to close" },
|
|
315
|
-
mockContext,
|
|
316
|
-
);
|
|
317
|
-
const created = parseResponse<Bead>(createResult);
|
|
318
|
-
// Don't add to cleanup since we're closing it
|
|
319
|
-
|
|
320
|
-
const result = await hive_close.execute(
|
|
321
|
-
{ id: created.id, reason: "Task completed successfully" },
|
|
322
|
-
mockContext,
|
|
323
|
-
);
|
|
324
|
-
|
|
325
|
-
expect(result).toContain("Closed");
|
|
326
|
-
expect(result).toContain(created.id);
|
|
327
|
-
|
|
328
|
-
// Verify it's actually closed using adapter
|
|
329
|
-
const closedBead = await adapter.getCell(TEST_PROJECT_KEY, created.id);
|
|
330
|
-
expect(closedBead).toBeDefined();
|
|
331
|
-
expect(closedBead!.status).toBe("closed");
|
|
332
|
-
});
|
|
333
|
-
|
|
334
|
-
it("throws BeadError for invalid bead ID", async () => {
|
|
335
|
-
await expect(
|
|
336
|
-
hive_close.execute(
|
|
337
|
-
{ id: "nonexistent-bead-xyz", reason: "Test" },
|
|
338
|
-
mockContext,
|
|
339
|
-
),
|
|
340
|
-
).rejects.toThrow(BeadError);
|
|
341
|
-
});
|
|
342
|
-
});
|
|
343
|
-
|
|
344
|
-
describe("hive_start", () => {
|
|
345
|
-
it("marks a bead as in_progress", async () => {
|
|
346
|
-
// Create a fresh bead
|
|
347
|
-
const createResult = await hive_create.execute(
|
|
348
|
-
{ title: "Bead to start" },
|
|
349
|
-
mockContext,
|
|
350
|
-
);
|
|
351
|
-
const created = parseResponse<Bead>(createResult);
|
|
352
|
-
createdBeadIds.push(created.id);
|
|
353
|
-
|
|
354
|
-
expect(created.status).toBe("open");
|
|
355
|
-
|
|
356
|
-
const result = await hive_start.execute({ id: created.id }, mockContext);
|
|
357
|
-
|
|
358
|
-
expect(result).toContain("Started");
|
|
359
|
-
expect(result).toContain(created.id);
|
|
360
|
-
|
|
361
|
-
// Verify status changed using adapter
|
|
362
|
-
const startedBead = await adapter.getCell(TEST_PROJECT_KEY, created.id);
|
|
363
|
-
expect(startedBead).toBeDefined();
|
|
364
|
-
expect(startedBead!.status).toBe("in_progress");
|
|
365
|
-
});
|
|
366
|
-
|
|
367
|
-
it("throws BeadError for invalid bead ID", async () => {
|
|
368
|
-
await expect(
|
|
369
|
-
hive_start.execute({ id: "nonexistent-bead-xyz" }, mockContext),
|
|
370
|
-
).rejects.toThrow(BeadError);
|
|
371
|
-
});
|
|
372
|
-
});
|
|
373
|
-
|
|
374
|
-
describe("hive_ready", () => {
|
|
375
|
-
it("returns the highest priority unblocked bead", async () => {
|
|
376
|
-
// Create a high priority bead
|
|
377
|
-
const createResult = await hive_create.execute(
|
|
378
|
-
{ title: "High priority ready bead", priority: 0 },
|
|
379
|
-
mockContext,
|
|
380
|
-
);
|
|
381
|
-
const created = parseResponse<Bead>(createResult);
|
|
382
|
-
createdBeadIds.push(created.id);
|
|
383
|
-
|
|
384
|
-
const result = await hive_ready.execute({}, mockContext);
|
|
385
|
-
|
|
386
|
-
// Should return a bead (or "No ready beads" message)
|
|
387
|
-
if (result !== "No ready beads") {
|
|
388
|
-
const bead = parseResponse<Bead>(result);
|
|
389
|
-
expect(bead.id).toBeDefined();
|
|
390
|
-
expect(bead.status).not.toBe("closed");
|
|
391
|
-
expect(bead.status).not.toBe("blocked");
|
|
392
|
-
}
|
|
393
|
-
});
|
|
394
|
-
|
|
395
|
-
it("returns no ready beads message when all are closed", async () => {
|
|
396
|
-
// This test depends on the state of the beads database
|
|
397
|
-
// It may return a bead if there are open ones
|
|
398
|
-
const result = await hive_ready.execute({}, mockContext);
|
|
399
|
-
|
|
400
|
-
expect(typeof result).toBe("string");
|
|
401
|
-
// Either a JSON bead or "No ready beads"
|
|
402
|
-
if (result === "No ready beads") {
|
|
403
|
-
expect(result).toBe("No ready beads");
|
|
404
|
-
} else {
|
|
405
|
-
const bead = parseResponse<Bead>(result);
|
|
406
|
-
expect(bead.id).toBeDefined();
|
|
407
|
-
}
|
|
408
|
-
});
|
|
409
|
-
});
|
|
410
|
-
|
|
411
|
-
describe("hive_create_epic", () => {
|
|
412
|
-
it("creates an epic with subtasks and syncs to JSONL", async () => {
|
|
413
|
-
const result = await hive_create_epic.execute(
|
|
414
|
-
{
|
|
415
|
-
epic_title: "Integration test epic",
|
|
416
|
-
epic_description: "Testing epic creation",
|
|
417
|
-
subtasks: [
|
|
418
|
-
{ title: "Subtask 1", priority: 2 },
|
|
419
|
-
{ title: "Subtask 2", priority: 3 },
|
|
420
|
-
{ title: "Subtask 3", priority: 1 },
|
|
421
|
-
],
|
|
422
|
-
},
|
|
423
|
-
mockContext,
|
|
424
|
-
);
|
|
425
|
-
|
|
426
|
-
const epicResult = parseResponse<EpicCreateResult>(result);
|
|
427
|
-
createdBeadIds.push(epicResult.epic.id);
|
|
428
|
-
for (const subtask of epicResult.subtasks) {
|
|
429
|
-
createdBeadIds.push(subtask.id);
|
|
430
|
-
}
|
|
431
|
-
|
|
432
|
-
expect(epicResult.success).toBe(true);
|
|
433
|
-
expect(epicResult.epic.title).toBe("Integration test epic");
|
|
434
|
-
expect(epicResult.epic.issue_type).toBe("epic");
|
|
435
|
-
expect(epicResult.subtasks).toHaveLength(3);
|
|
436
|
-
|
|
437
|
-
// Subtasks should have parent_id pointing to epic
|
|
438
|
-
// Verify via adapter since parent_id may not be in the output schema
|
|
439
|
-
for (const subtask of epicResult.subtasks) {
|
|
440
|
-
const subtaskBead = await adapter.getCell(TEST_PROJECT_KEY, subtask.id);
|
|
441
|
-
expect(subtaskBead).toBeDefined();
|
|
442
|
-
expect(subtaskBead!.parent_id).toBe(epicResult.epic.id);
|
|
443
|
-
}
|
|
444
|
-
|
|
445
|
-
// NEW TEST: Verify cells are synced to JSONL immediately
|
|
446
|
-
const { readFileSync, existsSync } = await import("node:fs");
|
|
447
|
-
const { join } = await import("node:path");
|
|
448
|
-
const jsonlPath = join(TEST_PROJECT_KEY, ".hive", "issues.jsonl");
|
|
449
|
-
|
|
450
|
-
expect(existsSync(jsonlPath)).toBe(true);
|
|
451
|
-
|
|
452
|
-
const jsonlContent = readFileSync(jsonlPath, "utf-8");
|
|
453
|
-
const lines = jsonlContent.trim().split("\n").filter(l => l);
|
|
454
|
-
const cells = lines.map(line => JSON.parse(line));
|
|
455
|
-
|
|
456
|
-
// Epic and all subtasks should be in JSONL
|
|
457
|
-
const epicInJsonl = cells.find(c => c.id === epicResult.epic.id);
|
|
458
|
-
expect(epicInJsonl).toBeDefined();
|
|
459
|
-
expect(epicInJsonl!.title).toBe("Integration test epic");
|
|
460
|
-
|
|
461
|
-
for (const subtask of epicResult.subtasks) {
|
|
462
|
-
const subtaskInJsonl = cells.find(c => c.id === subtask.id);
|
|
463
|
-
expect(subtaskInJsonl).toBeDefined();
|
|
464
|
-
expect(subtaskInJsonl!.parent_id).toBe(epicResult.epic.id);
|
|
465
|
-
}
|
|
466
|
-
});
|
|
467
|
-
|
|
468
|
-
it("creates an epic with files metadata in subtasks", async () => {
|
|
469
|
-
const result = await hive_create_epic.execute(
|
|
470
|
-
{
|
|
471
|
-
epic_title: "Epic with file references",
|
|
472
|
-
subtasks: [
|
|
473
|
-
{ title: "Edit src/a.ts", priority: 2, files: ["src/a.ts"] },
|
|
474
|
-
{
|
|
475
|
-
title: "Edit src/b.ts",
|
|
476
|
-
priority: 2,
|
|
477
|
-
files: ["src/b.ts", "src/c.ts"],
|
|
478
|
-
},
|
|
479
|
-
],
|
|
480
|
-
},
|
|
481
|
-
mockContext,
|
|
482
|
-
);
|
|
483
|
-
|
|
484
|
-
const epicResult = parseResponse<EpicCreateResult>(result);
|
|
485
|
-
createdBeadIds.push(epicResult.epic.id);
|
|
486
|
-
for (const subtask of epicResult.subtasks) {
|
|
487
|
-
createdBeadIds.push(subtask.id);
|
|
488
|
-
}
|
|
489
|
-
|
|
490
|
-
expect(epicResult.success).toBe(true);
|
|
491
|
-
expect(epicResult.subtasks).toHaveLength(2);
|
|
492
|
-
});
|
|
493
|
-
|
|
494
|
-
it("creates epic with single subtask", async () => {
|
|
495
|
-
const result = await hive_create_epic.execute(
|
|
496
|
-
{
|
|
497
|
-
epic_title: "Single subtask epic",
|
|
498
|
-
subtasks: [{ title: "Only task", priority: 1 }],
|
|
499
|
-
},
|
|
500
|
-
mockContext,
|
|
501
|
-
);
|
|
502
|
-
|
|
503
|
-
const epicResult = parseResponse<EpicCreateResult>(result);
|
|
504
|
-
createdBeadIds.push(epicResult.epic.id);
|
|
505
|
-
createdBeadIds.push(epicResult.subtasks[0].id);
|
|
506
|
-
|
|
507
|
-
expect(epicResult.success).toBe(true);
|
|
508
|
-
expect(epicResult.subtasks).toHaveLength(1);
|
|
509
|
-
});
|
|
510
|
-
|
|
511
|
-
it("preserves subtask order", async () => {
|
|
512
|
-
const titles = ["First", "Second", "Third", "Fourth"];
|
|
513
|
-
const result = await hive_create_epic.execute(
|
|
514
|
-
{
|
|
515
|
-
epic_title: "Ordered subtasks epic",
|
|
516
|
-
subtasks: titles.map((title, i) => ({ title, priority: 2 })),
|
|
517
|
-
},
|
|
518
|
-
mockContext,
|
|
519
|
-
);
|
|
520
|
-
|
|
521
|
-
const epicResult = parseResponse<EpicCreateResult>(result);
|
|
522
|
-
createdBeadIds.push(epicResult.epic.id);
|
|
523
|
-
for (const subtask of epicResult.subtasks) {
|
|
524
|
-
createdBeadIds.push(subtask.id);
|
|
525
|
-
}
|
|
526
|
-
|
|
527
|
-
expect(epicResult.success).toBe(true);
|
|
528
|
-
// Subtasks should be in creation order
|
|
529
|
-
for (let i = 0; i < titles.length; i++) {
|
|
530
|
-
expect(epicResult.subtasks[i].title).toBe(titles[i]);
|
|
531
|
-
}
|
|
532
|
-
});
|
|
533
|
-
});
|
|
534
|
-
|
|
535
|
-
describe("beads_link_thread", () => {
|
|
536
|
-
let testBeadId: string;
|
|
537
|
-
|
|
538
|
-
beforeEach(async () => {
|
|
539
|
-
const result = await hive_create.execute(
|
|
540
|
-
{ title: "Thread link test bead" },
|
|
541
|
-
mockContext,
|
|
542
|
-
);
|
|
543
|
-
const bead = parseResponse<Bead>(result);
|
|
544
|
-
testBeadId = bead.id;
|
|
545
|
-
createdBeadIds.push(testBeadId);
|
|
546
|
-
});
|
|
547
|
-
|
|
548
|
-
it("links a bead to an Agent Mail thread", async () => {
|
|
549
|
-
const threadId = "test-thread-123";
|
|
550
|
-
const result = await beads_link_thread.execute(
|
|
551
|
-
{ bead_id: testBeadId, thread_id: threadId },
|
|
552
|
-
mockContext,
|
|
553
|
-
);
|
|
554
|
-
|
|
555
|
-
expect(result).toContain("Linked");
|
|
556
|
-
expect(result).toContain(testBeadId);
|
|
557
|
-
expect(result).toContain(threadId);
|
|
558
|
-
|
|
559
|
-
// Verify the thread marker is in the description using adapter
|
|
560
|
-
const linkedBead = await adapter.getCell(TEST_PROJECT_KEY, testBeadId);
|
|
561
|
-
expect(linkedBead).toBeDefined();
|
|
562
|
-
expect(linkedBead!.description).toContain(`[thread:${threadId}]`);
|
|
563
|
-
});
|
|
564
|
-
|
|
565
|
-
it("returns message if thread already linked", async () => {
|
|
566
|
-
const threadId = "test-thread-456";
|
|
567
|
-
|
|
568
|
-
// Link once
|
|
569
|
-
await beads_link_thread.execute(
|
|
570
|
-
{ bead_id: testBeadId, thread_id: threadId },
|
|
571
|
-
mockContext,
|
|
572
|
-
);
|
|
573
|
-
|
|
574
|
-
// Try to link again
|
|
575
|
-
const result = await beads_link_thread.execute(
|
|
576
|
-
{ bead_id: testBeadId, thread_id: threadId },
|
|
577
|
-
mockContext,
|
|
578
|
-
);
|
|
579
|
-
|
|
580
|
-
expect(result).toContain("already linked");
|
|
581
|
-
});
|
|
582
|
-
|
|
583
|
-
it("preserves existing description when linking", async () => {
|
|
584
|
-
// Update bead with a description first
|
|
585
|
-
await hive_update.execute(
|
|
586
|
-
{ id: testBeadId, description: "Important context here" },
|
|
587
|
-
mockContext,
|
|
588
|
-
);
|
|
589
|
-
|
|
590
|
-
const threadId = "test-thread-789";
|
|
591
|
-
await beads_link_thread.execute(
|
|
592
|
-
{ bead_id: testBeadId, thread_id: threadId },
|
|
593
|
-
mockContext,
|
|
594
|
-
);
|
|
595
|
-
|
|
596
|
-
// Verify both original description and thread marker exist using adapter
|
|
597
|
-
const linkedBead = await adapter.getCell(TEST_PROJECT_KEY, testBeadId);
|
|
598
|
-
expect(linkedBead).toBeDefined();
|
|
599
|
-
expect(linkedBead!.description).toContain("Important context here");
|
|
600
|
-
expect(linkedBead!.description).toContain(`[thread:${threadId}]`);
|
|
601
|
-
});
|
|
602
|
-
|
|
603
|
-
it("throws BeadError for invalid bead ID", async () => {
|
|
604
|
-
await expect(
|
|
605
|
-
beads_link_thread.execute(
|
|
606
|
-
{ bead_id: "nonexistent-bead-xyz", thread_id: "thread-123" },
|
|
607
|
-
mockContext,
|
|
608
|
-
),
|
|
609
|
-
).rejects.toThrow(BeadError);
|
|
610
|
-
});
|
|
611
|
-
});
|
|
612
|
-
|
|
613
|
-
describe("error handling", () => {
|
|
614
|
-
it("throws BeadError with command info on adapter failure", async () => {
|
|
615
|
-
try {
|
|
616
|
-
await hive_update.execute(
|
|
617
|
-
{ id: "definitely-not-a-real-bead-id", status: "closed" },
|
|
618
|
-
mockContext,
|
|
619
|
-
);
|
|
620
|
-
expect.fail("Should have thrown");
|
|
621
|
-
} catch (error) {
|
|
622
|
-
expect(error).toBeInstanceOf(BeadError);
|
|
623
|
-
const beadError = error as InstanceType<typeof BeadError>;
|
|
624
|
-
expect(beadError.command).toBeDefined();
|
|
625
|
-
}
|
|
626
|
-
});
|
|
627
|
-
});
|
|
628
|
-
|
|
629
|
-
describe("partial ID resolution", () => {
|
|
630
|
-
let fullId: string;
|
|
631
|
-
let hash: string;
|
|
632
|
-
|
|
633
|
-
beforeEach(async () => {
|
|
634
|
-
// Create a test cell to resolve
|
|
635
|
-
const result = await hive_create.execute(
|
|
636
|
-
{ title: "Partial ID test cell" },
|
|
637
|
-
mockContext,
|
|
638
|
-
);
|
|
639
|
-
const cell = parseResponse<Cell>(result);
|
|
640
|
-
fullId = cell.id;
|
|
641
|
-
createdBeadIds.push(fullId);
|
|
642
|
-
|
|
643
|
-
// Extract hash from ID (format: {prefix}-{hash}-{timestamp}{random})
|
|
644
|
-
// The last segment is always timestamp+random (11 chars)
|
|
645
|
-
// The hash is the 6-char segment before that
|
|
646
|
-
// Examples:
|
|
647
|
-
// "opencode-swarm-monorepo-lf2p4u-mjd2h5v4wdt" -> hash is "lf2p4u"
|
|
648
|
-
// "cell--gcel4-mjd2h5v4wdt" -> hash is "-gcel4" (negative hash creates consecutive hyphens)
|
|
649
|
-
|
|
650
|
-
// Find the last hyphen, then work backwards to find the second-to-last hyphen
|
|
651
|
-
const lastHyphenIndex = fullId.lastIndexOf("-");
|
|
652
|
-
if (lastHyphenIndex === -1) {
|
|
653
|
-
hash = "";
|
|
654
|
-
} else {
|
|
655
|
-
// Get everything before the last hyphen
|
|
656
|
-
const beforeLast = fullId.substring(0, lastHyphenIndex);
|
|
657
|
-
// Find the second-to-last hyphen
|
|
658
|
-
const secondLastHyphenIndex = beforeLast.lastIndexOf("-");
|
|
659
|
-
if (secondLastHyphenIndex === -1) {
|
|
660
|
-
hash = "";
|
|
661
|
-
} else {
|
|
662
|
-
// Hash is between second-to-last and last hyphen
|
|
663
|
-
hash = fullId.substring(secondLastHyphenIndex + 1, lastHyphenIndex);
|
|
664
|
-
}
|
|
665
|
-
}
|
|
666
|
-
});
|
|
667
|
-
|
|
668
|
-
it("short hashes work with all ID-taking tools", async () => {
|
|
669
|
-
// Use last 6-8 chars of hash (or full hash if short)
|
|
670
|
-
const shortHash = hash.substring(Math.max(0, hash.length - 8));
|
|
671
|
-
|
|
672
|
-
try {
|
|
673
|
-
// Test hive_update
|
|
674
|
-
await hive_update.execute(
|
|
675
|
-
{ id: shortHash, description: "Updated via short hash" },
|
|
676
|
-
mockContext,
|
|
677
|
-
);
|
|
678
|
-
|
|
679
|
-
// Test hive_start
|
|
680
|
-
await hive_start.execute({ id: shortHash }, mockContext);
|
|
681
|
-
|
|
682
|
-
// Test hive_close
|
|
683
|
-
const result = await hive_close.execute(
|
|
684
|
-
{ id: shortHash, reason: "Closed via short hash" },
|
|
685
|
-
mockContext,
|
|
686
|
-
);
|
|
687
|
-
|
|
688
|
-
expect(result).toContain("Closed");
|
|
689
|
-
expect(result).toContain(fullId);
|
|
690
|
-
} catch (error) {
|
|
691
|
-
// If ambiguous, verify error message is helpful
|
|
692
|
-
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
693
|
-
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
694
|
-
expect(error.message).toContain(shortHash);
|
|
695
|
-
} else {
|
|
696
|
-
throw error;
|
|
697
|
-
}
|
|
698
|
-
}
|
|
699
|
-
});
|
|
700
|
-
|
|
701
|
-
describe("hive_update", () => {
|
|
702
|
-
it("accepts full cell ID (no resolution needed)", async () => {
|
|
703
|
-
const result = await hive_update.execute(
|
|
704
|
-
{ id: fullId, description: "Updated via full ID" },
|
|
705
|
-
mockContext,
|
|
706
|
-
);
|
|
707
|
-
|
|
708
|
-
const updated = parseResponse<Cell>(result);
|
|
709
|
-
expect(updated.id).toBe(fullId);
|
|
710
|
-
expect(updated.description).toContain("Updated via full ID");
|
|
711
|
-
});
|
|
712
|
-
|
|
713
|
-
it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
|
|
714
|
-
try {
|
|
715
|
-
const result = await hive_update.execute(
|
|
716
|
-
{ id: hash, priority: 1 },
|
|
717
|
-
mockContext,
|
|
718
|
-
);
|
|
719
|
-
|
|
720
|
-
const updated = parseResponse<Cell>(result);
|
|
721
|
-
expect(updated.id).toBe(fullId);
|
|
722
|
-
expect(updated.priority).toBe(1);
|
|
723
|
-
} catch (error) {
|
|
724
|
-
// In test environment with many cells, hash may be ambiguous
|
|
725
|
-
// Verify we get a helpful error message
|
|
726
|
-
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
727
|
-
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
728
|
-
expect(error.message).toContain(hash);
|
|
729
|
-
} else {
|
|
730
|
-
throw error; // Re-throw if not ambiguity error
|
|
731
|
-
}
|
|
732
|
-
}
|
|
733
|
-
});
|
|
734
|
-
|
|
735
|
-
it("throws helpful error for non-existent hash", async () => {
|
|
736
|
-
await expect(
|
|
737
|
-
hive_update.execute({ id: "zzzzzz", status: "closed" }, mockContext),
|
|
738
|
-
).rejects.toThrow(/not found|no cell|zzzzzz/i);
|
|
739
|
-
});
|
|
740
|
-
|
|
741
|
-
it("throws helpful error for ambiguous hash", async () => {
|
|
742
|
-
// Create another cell with potentially similar hash
|
|
743
|
-
// (in practice, hashes are unique, but we simulate ambiguity by using a short partial)
|
|
744
|
-
// This test verifies the error message is helpful
|
|
745
|
-
try {
|
|
746
|
-
// Use a single char which might match multiple cells in larger datasets
|
|
747
|
-
await hive_update.execute({ id: "a", status: "closed" }, mockContext);
|
|
748
|
-
// If it succeeds, it means only one cell matched - that's fine
|
|
749
|
-
} catch (error) {
|
|
750
|
-
const message = error instanceof Error ? error.message : String(error);
|
|
751
|
-
// Error should mention ambiguity if multiple matches
|
|
752
|
-
if (message.includes("ambiguous") || message.includes("multiple")) {
|
|
753
|
-
expect(message).toMatch(/ambiguous|multiple/i);
|
|
754
|
-
}
|
|
755
|
-
}
|
|
756
|
-
});
|
|
757
|
-
});
|
|
758
|
-
|
|
759
|
-
describe("hive_close", () => {
|
|
760
|
-
it("accepts full cell ID", async () => {
|
|
761
|
-
const result = await hive_close.execute(
|
|
762
|
-
{ id: fullId, reason: "Closed via full ID" },
|
|
763
|
-
mockContext,
|
|
764
|
-
);
|
|
765
|
-
|
|
766
|
-
expect(result).toContain("Closed");
|
|
767
|
-
expect(result).toContain(fullId);
|
|
768
|
-
|
|
769
|
-
const closed = await adapter.getCell(TEST_PROJECT_KEY, fullId);
|
|
770
|
-
expect(closed?.status).toBe("closed");
|
|
771
|
-
});
|
|
772
|
-
|
|
773
|
-
it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
|
|
774
|
-
try {
|
|
775
|
-
const result = await hive_close.execute(
|
|
776
|
-
{ id: hash, reason: "Close via hash" },
|
|
777
|
-
mockContext,
|
|
778
|
-
);
|
|
779
|
-
|
|
780
|
-
expect(result).toContain("Closed");
|
|
781
|
-
expect(result).toContain(fullId);
|
|
782
|
-
} catch (error) {
|
|
783
|
-
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
784
|
-
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
785
|
-
expect(error.message).toContain(hash);
|
|
786
|
-
} else {
|
|
787
|
-
throw error;
|
|
788
|
-
}
|
|
789
|
-
}
|
|
790
|
-
});
|
|
791
|
-
|
|
792
|
-
it("throws helpful error for non-existent hash", async () => {
|
|
793
|
-
await expect(
|
|
794
|
-
hive_close.execute({ id: "nonono", reason: "Test" }, mockContext),
|
|
795
|
-
).rejects.toThrow(/not found|no cell|nonono/i);
|
|
796
|
-
});
|
|
797
|
-
});
|
|
798
|
-
|
|
799
|
-
describe("hive_start", () => {
|
|
800
|
-
it("accepts full cell ID", async () => {
|
|
801
|
-
const result = await hive_start.execute({ id: fullId }, mockContext);
|
|
802
|
-
|
|
803
|
-
expect(result).toContain("Started");
|
|
804
|
-
expect(result).toContain(fullId);
|
|
805
|
-
|
|
806
|
-
const started = await adapter.getCell(TEST_PROJECT_KEY, fullId);
|
|
807
|
-
expect(started?.status).toBe("in_progress");
|
|
808
|
-
});
|
|
809
|
-
|
|
810
|
-
it("resolves hash to full ID (or shows helpful error if ambiguous)", async () => {
|
|
811
|
-
try {
|
|
812
|
-
const result = await hive_start.execute(
|
|
813
|
-
{ id: hash },
|
|
814
|
-
mockContext,
|
|
815
|
-
);
|
|
816
|
-
|
|
817
|
-
expect(result).toContain("Started");
|
|
818
|
-
expect(result).toContain(fullId);
|
|
819
|
-
} catch (error) {
|
|
820
|
-
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
821
|
-
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
822
|
-
expect(error.message).toContain(hash);
|
|
823
|
-
} else {
|
|
824
|
-
throw error;
|
|
825
|
-
}
|
|
826
|
-
}
|
|
827
|
-
});
|
|
828
|
-
|
|
829
|
-
it("throws helpful error for non-existent hash", async () => {
|
|
830
|
-
await expect(
|
|
831
|
-
hive_start.execute({ id: "nope99" }, mockContext),
|
|
832
|
-
).rejects.toThrow(/not found|no cell|nope99/i);
|
|
833
|
-
});
|
|
834
|
-
});
|
|
835
|
-
});
|
|
836
|
-
|
|
837
|
-
describe("workflow integration", () => {
|
|
838
|
-
it("complete bead lifecycle: create -> start -> update -> close", async () => {
|
|
839
|
-
// 1. Create
|
|
840
|
-
const createResult = await hive_create.execute(
|
|
841
|
-
{ title: "Lifecycle test bead", type: "task", priority: 2 },
|
|
842
|
-
mockContext,
|
|
843
|
-
);
|
|
844
|
-
const bead = parseResponse<Bead>(createResult);
|
|
845
|
-
expect(bead.status).toBe("open");
|
|
846
|
-
|
|
847
|
-
// 2. Start (in_progress)
|
|
848
|
-
const startResult = await hive_start.execute(
|
|
849
|
-
{ id: bead.id },
|
|
850
|
-
mockContext,
|
|
851
|
-
);
|
|
852
|
-
expect(startResult).toContain("Started");
|
|
853
|
-
|
|
854
|
-
// 3. Update (add progress note)
|
|
855
|
-
const updateResult = await hive_update.execute(
|
|
856
|
-
{ id: bead.id, description: "50% complete" },
|
|
857
|
-
mockContext,
|
|
858
|
-
);
|
|
859
|
-
const updated = parseResponse<Bead>(updateResult);
|
|
860
|
-
expect(updated.description).toContain("50%");
|
|
861
|
-
|
|
862
|
-
// 4. Close
|
|
863
|
-
const closeResult = await hive_close.execute(
|
|
864
|
-
{ id: bead.id, reason: "Completed successfully" },
|
|
865
|
-
mockContext,
|
|
866
|
-
);
|
|
867
|
-
expect(closeResult).toContain("Closed");
|
|
868
|
-
|
|
869
|
-
// Verify final state using adapter
|
|
870
|
-
const finalBead = await adapter.getCell(TEST_PROJECT_KEY, bead.id);
|
|
871
|
-
expect(finalBead).toBeDefined();
|
|
872
|
-
expect(finalBead!.status).toBe("closed");
|
|
873
|
-
});
|
|
874
|
-
|
|
875
|
-
it("epic workflow: create epic -> start subtasks -> close subtasks -> close epic", async () => {
|
|
876
|
-
// 1. Create epic with subtasks
|
|
877
|
-
const epicResult = await hive_create_epic.execute(
|
|
878
|
-
{
|
|
879
|
-
epic_title: "Workflow test epic",
|
|
880
|
-
subtasks: [
|
|
881
|
-
{ title: "Step 1", priority: 2 },
|
|
882
|
-
{ title: "Step 2", priority: 2 },
|
|
883
|
-
],
|
|
884
|
-
},
|
|
885
|
-
mockContext,
|
|
886
|
-
);
|
|
887
|
-
const epic = parseResponse<EpicCreateResult>(epicResult);
|
|
888
|
-
expect(epic.success).toBe(true);
|
|
889
|
-
|
|
890
|
-
// 2. Start and complete first subtask
|
|
891
|
-
await hive_start.execute({ id: epic.subtasks[0].id }, mockContext);
|
|
892
|
-
await hive_close.execute(
|
|
893
|
-
{ id: epic.subtasks[0].id, reason: "Step 1 done" },
|
|
894
|
-
mockContext,
|
|
895
|
-
);
|
|
896
|
-
|
|
897
|
-
// 3. Start and complete second subtask
|
|
898
|
-
await hive_start.execute({ id: epic.subtasks[1].id }, mockContext);
|
|
899
|
-
await hive_close.execute(
|
|
900
|
-
{ id: epic.subtasks[1].id, reason: "Step 2 done" },
|
|
901
|
-
mockContext,
|
|
902
|
-
);
|
|
903
|
-
|
|
904
|
-
// 4. Close the epic
|
|
905
|
-
await hive_close.execute(
|
|
906
|
-
{ id: epic.epic.id, reason: "All subtasks completed" },
|
|
907
|
-
mockContext,
|
|
908
|
-
);
|
|
909
|
-
|
|
910
|
-
// Verify all are closed using adapter
|
|
911
|
-
const epicClosed = await adapter.getCell(TEST_PROJECT_KEY, epic.epic.id);
|
|
912
|
-
expect(epicClosed).toBeDefined();
|
|
913
|
-
expect(epicClosed!.status).toBe("closed");
|
|
914
|
-
|
|
915
|
-
for (const subtask of epic.subtasks) {
|
|
916
|
-
const subtaskClosed = await adapter.getCell(TEST_PROJECT_KEY, subtask.id);
|
|
917
|
-
expect(subtaskClosed).toBeDefined();
|
|
918
|
-
expect(subtaskClosed!.status).toBe("closed");
|
|
919
|
-
}
|
|
920
|
-
});
|
|
921
|
-
});
|
|
922
|
-
|
|
923
|
-
describe("Directory Migration (.beads → .hive)", () => {
|
|
924
|
-
it("checkBeadsMigrationNeeded detects .beads without .hive", async () => {
|
|
925
|
-
const { checkBeadsMigrationNeeded } = await import("./hive");
|
|
926
|
-
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
927
|
-
const { join } = await import("node:path");
|
|
928
|
-
const { tmpdir } = await import("node:os");
|
|
929
|
-
|
|
930
|
-
// Create temp project with .beads directory only
|
|
931
|
-
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
932
|
-
const beadsDir = join(tempProject, ".beads");
|
|
933
|
-
|
|
934
|
-
mkdirSync(beadsDir, { recursive: true });
|
|
935
|
-
writeFileSync(join(beadsDir, "issues.jsonl"), '{"id":"bd-test","title":"Test"}');
|
|
936
|
-
|
|
937
|
-
const result = checkBeadsMigrationNeeded(tempProject);
|
|
938
|
-
|
|
939
|
-
expect(result.needed).toBe(true);
|
|
940
|
-
expect(result.beadsPath).toBe(beadsDir);
|
|
941
|
-
|
|
942
|
-
// Cleanup
|
|
943
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
944
|
-
});
|
|
945
|
-
|
|
946
|
-
it("checkBeadsMigrationNeeded returns false if .hive exists", async () => {
|
|
947
|
-
const { checkBeadsMigrationNeeded } = await import("./hive");
|
|
948
|
-
const { mkdirSync, rmSync } = await import("node:fs");
|
|
949
|
-
const { join } = await import("node:path");
|
|
950
|
-
const { tmpdir } = await import("node:os");
|
|
951
|
-
|
|
952
|
-
// Create temp project with .hive directory
|
|
953
|
-
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
954
|
-
const hiveDir = join(tempProject, ".hive");
|
|
955
|
-
|
|
956
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
957
|
-
|
|
958
|
-
const result = checkBeadsMigrationNeeded(tempProject);
|
|
959
|
-
|
|
960
|
-
expect(result.needed).toBe(false);
|
|
961
|
-
|
|
962
|
-
// Cleanup
|
|
963
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
964
|
-
});
|
|
965
|
-
|
|
966
|
-
it("migrateBeadsToHive renames .beads to .hive", async () => {
|
|
967
|
-
const { migrateBeadsToHive } = await import("./hive");
|
|
968
|
-
const { mkdirSync, existsSync, rmSync, writeFileSync } = await import("node:fs");
|
|
969
|
-
const { join } = await import("node:path");
|
|
970
|
-
const { tmpdir } = await import("node:os");
|
|
971
|
-
|
|
972
|
-
// Create temp project with .beads directory
|
|
973
|
-
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
974
|
-
const beadsDir = join(tempProject, ".beads");
|
|
975
|
-
const hiveDir = join(tempProject, ".hive");
|
|
976
|
-
|
|
977
|
-
mkdirSync(beadsDir, { recursive: true });
|
|
978
|
-
writeFileSync(join(beadsDir, "issues.jsonl"), '{"id":"bd-test","title":"Test"}');
|
|
979
|
-
writeFileSync(join(beadsDir, "config.yaml"), "version: 1");
|
|
980
|
-
|
|
981
|
-
// Run migration (called after user confirms in CLI)
|
|
982
|
-
const result = await migrateBeadsToHive(tempProject);
|
|
983
|
-
|
|
984
|
-
// Verify .beads renamed to .hive
|
|
985
|
-
expect(result.migrated).toBe(true);
|
|
986
|
-
expect(existsSync(hiveDir)).toBe(true);
|
|
987
|
-
expect(existsSync(beadsDir)).toBe(false);
|
|
988
|
-
expect(existsSync(join(hiveDir, "issues.jsonl"))).toBe(true);
|
|
989
|
-
expect(existsSync(join(hiveDir, "config.yaml"))).toBe(true);
|
|
990
|
-
|
|
991
|
-
// Cleanup
|
|
992
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
993
|
-
});
|
|
994
|
-
|
|
995
|
-
it("migrateBeadsToHive skips if .hive already exists", async () => {
|
|
996
|
-
const { migrateBeadsToHive } = await import("./hive");
|
|
997
|
-
const { mkdirSync, existsSync, rmSync, writeFileSync } = await import("node:fs");
|
|
998
|
-
const { join } = await import("node:path");
|
|
999
|
-
const { tmpdir } = await import("node:os");
|
|
1000
|
-
|
|
1001
|
-
// Create temp project with BOTH .beads and .hive
|
|
1002
|
-
const tempProject = join(tmpdir(), `hive-migration-test-${Date.now()}`);
|
|
1003
|
-
const beadsDir = join(tempProject, ".beads");
|
|
1004
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1005
|
-
|
|
1006
|
-
mkdirSync(beadsDir, { recursive: true });
|
|
1007
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1008
|
-
writeFileSync(join(beadsDir, "issues.jsonl"), '{"id":"bd-old"}');
|
|
1009
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), '{"id":"bd-new"}');
|
|
1010
|
-
|
|
1011
|
-
// Run migration - should skip
|
|
1012
|
-
const result = await migrateBeadsToHive(tempProject);
|
|
1013
|
-
|
|
1014
|
-
// Verify both still exist (no migration)
|
|
1015
|
-
expect(result.migrated).toBe(false);
|
|
1016
|
-
expect(result.reason).toContain("already exists");
|
|
1017
|
-
expect(existsSync(beadsDir)).toBe(true);
|
|
1018
|
-
expect(existsSync(hiveDir)).toBe(true);
|
|
1019
|
-
|
|
1020
|
-
// Cleanup
|
|
1021
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1022
|
-
});
|
|
1023
|
-
|
|
1024
|
-
it("ensureHiveDirectory creates .hive if missing", async () => {
|
|
1025
|
-
const { ensureHiveDirectory } = await import("./hive");
|
|
1026
|
-
const { mkdirSync, existsSync, rmSync } = await import("node:fs");
|
|
1027
|
-
const { join } = await import("node:path");
|
|
1028
|
-
const { tmpdir } = await import("node:os");
|
|
1029
|
-
|
|
1030
|
-
// Create empty temp project
|
|
1031
|
-
const tempProject = join(tmpdir(), `hive-ensure-test-${Date.now()}`);
|
|
1032
|
-
mkdirSync(tempProject, { recursive: true });
|
|
1033
|
-
|
|
1034
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1035
|
-
expect(existsSync(hiveDir)).toBe(false);
|
|
1036
|
-
|
|
1037
|
-
// Ensure creates it
|
|
1038
|
-
ensureHiveDirectory(tempProject);
|
|
1039
|
-
|
|
1040
|
-
expect(existsSync(hiveDir)).toBe(true);
|
|
1041
|
-
|
|
1042
|
-
// Cleanup
|
|
1043
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1044
|
-
});
|
|
1045
|
-
|
|
1046
|
-
it("ensureHiveDirectory is idempotent", async () => {
|
|
1047
|
-
const { ensureHiveDirectory } = await import("./hive");
|
|
1048
|
-
const { mkdirSync, existsSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1049
|
-
const { join } = await import("node:path");
|
|
1050
|
-
const { tmpdir } = await import("node:os");
|
|
1051
|
-
|
|
1052
|
-
// Create temp project with existing .hive
|
|
1053
|
-
const tempProject = join(tmpdir(), `hive-ensure-test-${Date.now()}`);
|
|
1054
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1055
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1056
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), '{"id":"existing"}');
|
|
1057
|
-
|
|
1058
|
-
// Ensure doesn't overwrite
|
|
1059
|
-
ensureHiveDirectory(tempProject);
|
|
1060
|
-
|
|
1061
|
-
expect(existsSync(hiveDir)).toBe(true);
|
|
1062
|
-
expect(readFileSync(join(hiveDir, "issues.jsonl"), "utf-8")).toBe('{"id":"existing"}');
|
|
1063
|
-
|
|
1064
|
-
// Cleanup
|
|
1065
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1066
|
-
});
|
|
1067
|
-
});
|
|
1068
|
-
|
|
1069
|
-
describe("importJsonlToPGLite", () => {
|
|
1070
|
-
it("imports empty JSONL - no-op", async () => {
|
|
1071
|
-
const { importJsonlToPGLite } = await import("./hive");
|
|
1072
|
-
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
1073
|
-
const { join } = await import("node:path");
|
|
1074
|
-
const { tmpdir } = await import("node:os");
|
|
1075
|
-
|
|
1076
|
-
// Create temp project with empty JSONL
|
|
1077
|
-
const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
|
|
1078
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1079
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1080
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1081
|
-
|
|
1082
|
-
const result = await importJsonlToPGLite(tempProject);
|
|
1083
|
-
|
|
1084
|
-
expect(result.imported).toBe(0);
|
|
1085
|
-
expect(result.updated).toBe(0);
|
|
1086
|
-
expect(result.errors).toBe(0);
|
|
1087
|
-
|
|
1088
|
-
// Cleanup
|
|
1089
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1090
|
-
});
|
|
1091
|
-
|
|
1092
|
-
it("imports new records - all inserted", async () => {
|
|
1093
|
-
const { importJsonlToPGLite, getHiveAdapter } = await import("./hive");
|
|
1094
|
-
const { mkdirSync, rmSync, writeFileSync, unlinkSync } = await import("node:fs");
|
|
1095
|
-
const { join } = await import("node:path");
|
|
1096
|
-
const { tmpdir } = await import("node:os");
|
|
1097
|
-
|
|
1098
|
-
// Create temp project with new cells
|
|
1099
|
-
const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
|
|
1100
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1101
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1102
|
-
|
|
1103
|
-
const cell1 = {
|
|
1104
|
-
id: "bd-import-1",
|
|
1105
|
-
title: "Import test 1",
|
|
1106
|
-
status: "open" as const,
|
|
1107
|
-
priority: 2,
|
|
1108
|
-
issue_type: "task" as const,
|
|
1109
|
-
created_at: new Date().toISOString(),
|
|
1110
|
-
updated_at: new Date().toISOString(),
|
|
1111
|
-
dependencies: [],
|
|
1112
|
-
labels: [],
|
|
1113
|
-
comments: [],
|
|
1114
|
-
};
|
|
1115
|
-
|
|
1116
|
-
const cell2 = {
|
|
1117
|
-
id: "bd-import-2",
|
|
1118
|
-
title: "Import test 2",
|
|
1119
|
-
status: "in_progress" as const,
|
|
1120
|
-
priority: 1,
|
|
1121
|
-
issue_type: "bug" as const,
|
|
1122
|
-
created_at: new Date().toISOString(),
|
|
1123
|
-
updated_at: new Date().toISOString(),
|
|
1124
|
-
dependencies: [],
|
|
1125
|
-
labels: [],
|
|
1126
|
-
comments: [],
|
|
1127
|
-
};
|
|
1128
|
-
|
|
1129
|
-
writeFileSync(
|
|
1130
|
-
join(hiveDir, "issues.jsonl"),
|
|
1131
|
-
JSON.stringify(cell1) + "\n" + JSON.stringify(cell2) + "\n"
|
|
1132
|
-
);
|
|
1133
|
-
|
|
1134
|
-
// CRITICAL: Call importJsonlToPGLite() which will call getHiveAdapter()
|
|
1135
|
-
// The auto-migration will import cells, so we expect 0 imported here
|
|
1136
|
-
// because auto-migration already did it
|
|
1137
|
-
const result = await importJsonlToPGLite(tempProject);
|
|
1138
|
-
|
|
1139
|
-
// Auto-migration runs on first getHiveAdapter() call and imports cells
|
|
1140
|
-
// So when importJsonlToPGLite() runs, cells are already there
|
|
1141
|
-
// This is expected behavior - the function is idempotent
|
|
1142
|
-
expect(result.imported + result.updated).toBe(2);
|
|
1143
|
-
expect(result.errors).toBe(0);
|
|
1144
|
-
|
|
1145
|
-
// Verify cells exist in database
|
|
1146
|
-
const adapter = await getHiveAdapter(tempProject);
|
|
1147
|
-
const importedCell1 = await adapter.getCell(tempProject, "bd-import-1");
|
|
1148
|
-
const importedCell2 = await adapter.getCell(tempProject, "bd-import-2");
|
|
1149
|
-
|
|
1150
|
-
expect(importedCell1).toBeDefined();
|
|
1151
|
-
expect(importedCell1!.title).toBe("Import test 1");
|
|
1152
|
-
expect(importedCell2).toBeDefined();
|
|
1153
|
-
expect(importedCell2!.title).toBe("Import test 2");
|
|
1154
|
-
|
|
1155
|
-
// Cleanup
|
|
1156
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1157
|
-
});
|
|
1158
|
-
|
|
1159
|
-
it("updates existing records", async () => {
|
|
1160
|
-
const { importJsonlToPGLite, getHiveAdapter } = await import("./hive");
|
|
1161
|
-
const { mkdirSync, rmSync, writeFileSync, unlinkSync } = await import("node:fs");
|
|
1162
|
-
const { join } = await import("node:path");
|
|
1163
|
-
const { tmpdir } = await import("node:os");
|
|
1164
|
-
|
|
1165
|
-
// Create temp project
|
|
1166
|
-
const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
|
|
1167
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1168
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1169
|
-
|
|
1170
|
-
// Write JSONL FIRST (before getHiveAdapter to avoid auto-migration)
|
|
1171
|
-
const originalCell = {
|
|
1172
|
-
id: "bd-update-1",
|
|
1173
|
-
title: "Original title",
|
|
1174
|
-
status: "open",
|
|
1175
|
-
priority: 2,
|
|
1176
|
-
issue_type: "task",
|
|
1177
|
-
created_at: new Date().toISOString(),
|
|
1178
|
-
updated_at: new Date().toISOString(),
|
|
1179
|
-
dependencies: [],
|
|
1180
|
-
labels: [],
|
|
1181
|
-
comments: [],
|
|
1182
|
-
};
|
|
1183
|
-
|
|
1184
|
-
writeFileSync(
|
|
1185
|
-
join(hiveDir, "issues.jsonl"),
|
|
1186
|
-
JSON.stringify(originalCell) + "\n"
|
|
1187
|
-
);
|
|
1188
|
-
|
|
1189
|
-
// Get adapter - this will auto-migrate the original cell
|
|
1190
|
-
const adapter = await getHiveAdapter(tempProject);
|
|
1191
|
-
|
|
1192
|
-
// Now update the JSONL with new data
|
|
1193
|
-
const updatedCell = {
|
|
1194
|
-
...originalCell,
|
|
1195
|
-
title: "Updated title",
|
|
1196
|
-
description: "New description",
|
|
1197
|
-
status: "in_progress" as const,
|
|
1198
|
-
priority: 0,
|
|
1199
|
-
updated_at: new Date().toISOString(),
|
|
1200
|
-
};
|
|
1201
|
-
|
|
1202
|
-
writeFileSync(
|
|
1203
|
-
join(hiveDir, "issues.jsonl"),
|
|
1204
|
-
JSON.stringify(updatedCell) + "\n"
|
|
1205
|
-
);
|
|
1206
|
-
|
|
1207
|
-
const result = await importJsonlToPGLite(tempProject);
|
|
1208
|
-
|
|
1209
|
-
expect(result.imported).toBe(0);
|
|
1210
|
-
expect(result.updated).toBe(1);
|
|
1211
|
-
expect(result.errors).toBe(0);
|
|
1212
|
-
|
|
1213
|
-
// Verify update
|
|
1214
|
-
const cell = await adapter.getCell(tempProject, "bd-update-1");
|
|
1215
|
-
expect(cell).toBeDefined();
|
|
1216
|
-
expect(cell!.title).toBe("Updated title");
|
|
1217
|
-
expect(cell!.description).toContain("New description");
|
|
1218
|
-
expect(cell!.status).toBe("in_progress");
|
|
1219
|
-
|
|
1220
|
-
// Cleanup
|
|
1221
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1222
|
-
});
|
|
1223
|
-
|
|
1224
|
-
it("handles mixed new and existing records", async () => {
|
|
1225
|
-
const { importJsonlToPGLite, getHiveAdapter } = await import("./hive");
|
|
1226
|
-
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
1227
|
-
const { join } = await import("node:path");
|
|
1228
|
-
const { tmpdir } = await import("node:os");
|
|
1229
|
-
|
|
1230
|
-
// Create temp project with NO initial JSONL (avoid auto-migration)
|
|
1231
|
-
const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
|
|
1232
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1233
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1234
|
-
|
|
1235
|
-
// Get adapter first (no auto-migration since no JSONL exists)
|
|
1236
|
-
const adapter = await getHiveAdapter(tempProject);
|
|
1237
|
-
|
|
1238
|
-
// Create existing cell directly via adapter
|
|
1239
|
-
await adapter.createCell(tempProject, {
|
|
1240
|
-
title: "Existing",
|
|
1241
|
-
type: "task",
|
|
1242
|
-
priority: 2,
|
|
1243
|
-
});
|
|
1244
|
-
|
|
1245
|
-
// Get the created cell to find its ID
|
|
1246
|
-
const cells = await adapter.queryCells(tempProject, { limit: 1 });
|
|
1247
|
-
const existingId = cells[0].id;
|
|
1248
|
-
|
|
1249
|
-
// Now write JSONL with updated existing + new cell
|
|
1250
|
-
const existingUpdated = {
|
|
1251
|
-
id: existingId,
|
|
1252
|
-
title: "Existing updated",
|
|
1253
|
-
status: "closed" as const,
|
|
1254
|
-
priority: 2,
|
|
1255
|
-
issue_type: "task" as const,
|
|
1256
|
-
created_at: new Date().toISOString(),
|
|
1257
|
-
updated_at: new Date().toISOString(),
|
|
1258
|
-
closed_at: new Date().toISOString(),
|
|
1259
|
-
dependencies: [],
|
|
1260
|
-
labels: [],
|
|
1261
|
-
comments: [],
|
|
1262
|
-
};
|
|
1263
|
-
|
|
1264
|
-
const newCell = {
|
|
1265
|
-
id: "bd-new",
|
|
1266
|
-
title: "Brand new",
|
|
1267
|
-
status: "open" as const,
|
|
1268
|
-
priority: 1,
|
|
1269
|
-
issue_type: "feature" as const,
|
|
1270
|
-
created_at: new Date().toISOString(),
|
|
1271
|
-
updated_at: new Date().toISOString(),
|
|
1272
|
-
dependencies: [],
|
|
1273
|
-
labels: [],
|
|
1274
|
-
comments: [],
|
|
1275
|
-
};
|
|
1276
|
-
|
|
1277
|
-
writeFileSync(
|
|
1278
|
-
join(hiveDir, "issues.jsonl"),
|
|
1279
|
-
JSON.stringify(existingUpdated) + "\n" + JSON.stringify(newCell) + "\n"
|
|
1280
|
-
);
|
|
1281
|
-
|
|
1282
|
-
const result = await importJsonlToPGLite(tempProject);
|
|
1283
|
-
|
|
1284
|
-
// importJsonlToPGLite() finds:
|
|
1285
|
-
// - existingId already exists (updated)
|
|
1286
|
-
// - bd-new is new (imported)
|
|
1287
|
-
expect(result.imported).toBe(1); // bd-new
|
|
1288
|
-
expect(result.updated).toBe(1); // existing cell
|
|
1289
|
-
expect(result.errors).toBe(0);
|
|
1290
|
-
|
|
1291
|
-
// Cleanup
|
|
1292
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1293
|
-
});
|
|
1294
|
-
|
|
1295
|
-
it("skips invalid JSON lines and counts errors", async () => {
|
|
1296
|
-
const { importJsonlToPGLite } = await import("./hive");
|
|
1297
|
-
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
1298
|
-
const { join } = await import("node:path");
|
|
1299
|
-
const { tmpdir } = await import("node:os");
|
|
1300
|
-
|
|
1301
|
-
// Create temp project
|
|
1302
|
-
const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
|
|
1303
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1304
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1305
|
-
|
|
1306
|
-
const validCell = {
|
|
1307
|
-
id: "bd-valid",
|
|
1308
|
-
title: "Valid",
|
|
1309
|
-
status: "open",
|
|
1310
|
-
priority: 2,
|
|
1311
|
-
issue_type: "task",
|
|
1312
|
-
created_at: new Date().toISOString(),
|
|
1313
|
-
updated_at: new Date().toISOString(),
|
|
1314
|
-
dependencies: [],
|
|
1315
|
-
labels: [],
|
|
1316
|
-
comments: [],
|
|
1317
|
-
};
|
|
1318
|
-
|
|
1319
|
-
// Mix valid and invalid JSON
|
|
1320
|
-
writeFileSync(
|
|
1321
|
-
join(hiveDir, "issues.jsonl"),
|
|
1322
|
-
JSON.stringify(validCell) + "\n" +
|
|
1323
|
-
"{ invalid json \n" +
|
|
1324
|
-
'{"id":"incomplete"\n'
|
|
1325
|
-
);
|
|
1326
|
-
|
|
1327
|
-
const result = await importJsonlToPGLite(tempProject);
|
|
1328
|
-
|
|
1329
|
-
expect(result.imported).toBe(1); // Only the valid one
|
|
1330
|
-
expect(result.errors).toBe(2); // Two invalid lines
|
|
1331
|
-
|
|
1332
|
-
// Cleanup
|
|
1333
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1334
|
-
});
|
|
1335
|
-
|
|
1336
|
-
it("handles missing JSONL file gracefully", async () => {
|
|
1337
|
-
const { importJsonlToPGLite } = await import("./hive");
|
|
1338
|
-
const { mkdirSync, rmSync } = await import("node:fs");
|
|
1339
|
-
const { join } = await import("node:path");
|
|
1340
|
-
const { tmpdir } = await import("node:os");
|
|
1341
|
-
|
|
1342
|
-
// Create temp project without issues.jsonl
|
|
1343
|
-
const tempProject = join(tmpdir(), `hive-import-test-${Date.now()}`);
|
|
1344
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1345
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1346
|
-
|
|
1347
|
-
const result = await importJsonlToPGLite(tempProject);
|
|
1348
|
-
|
|
1349
|
-
expect(result.imported).toBe(0);
|
|
1350
|
-
expect(result.updated).toBe(0);
|
|
1351
|
-
expect(result.errors).toBe(0);
|
|
1352
|
-
|
|
1353
|
-
// Cleanup
|
|
1354
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1355
|
-
});
|
|
1356
|
-
});
|
|
1357
|
-
|
|
1358
|
-
describe("hive_sync", () => {
|
|
1359
|
-
it("succeeds with unstaged changes outside .hive/ (stash-before-pull)", async () => {
|
|
1360
|
-
const { mkdirSync, rmSync, writeFileSync, existsSync } = await import("node:fs");
|
|
1361
|
-
const { join } = await import("node:path");
|
|
1362
|
-
const { tmpdir } = await import("node:os");
|
|
1363
|
-
const { execSync } = await import("node:child_process");
|
|
1364
|
-
|
|
1365
|
-
// Create a temp git repository with a remote (to trigger pull)
|
|
1366
|
-
const tempProject = join(tmpdir(), `hive-sync-stash-test-${Date.now()}`);
|
|
1367
|
-
const remoteProject = join(tmpdir(), `hive-sync-remote-${Date.now()}`);
|
|
1368
|
-
|
|
1369
|
-
// Create "remote" bare repo
|
|
1370
|
-
mkdirSync(remoteProject, { recursive: true });
|
|
1371
|
-
execSync("git init --bare", { cwd: remoteProject });
|
|
1372
|
-
|
|
1373
|
-
// Create local repo
|
|
1374
|
-
mkdirSync(tempProject, { recursive: true });
|
|
1375
|
-
execSync("git init", { cwd: tempProject });
|
|
1376
|
-
execSync('git config user.email "test@example.com"', { cwd: tempProject });
|
|
1377
|
-
execSync('git config user.name "Test User"', { cwd: tempProject });
|
|
1378
|
-
execSync(`git remote add origin ${remoteProject}`, { cwd: tempProject });
|
|
1379
|
-
|
|
1380
|
-
// Create .hive directory and a source file
|
|
1381
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1382
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1383
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1384
|
-
writeFileSync(join(tempProject, "src.ts"), "// initial");
|
|
1385
|
-
|
|
1386
|
-
// Initial commit and push
|
|
1387
|
-
execSync("git add .", { cwd: tempProject });
|
|
1388
|
-
execSync('git commit -m "initial commit"', { cwd: tempProject });
|
|
1389
|
-
execSync("git push -u origin main", { cwd: tempProject });
|
|
1390
|
-
|
|
1391
|
-
// Now create unstaged changes OUTSIDE .hive/
|
|
1392
|
-
writeFileSync(join(tempProject, "src.ts"), "// modified but not staged");
|
|
1393
|
-
|
|
1394
|
-
// Set working directory for hive commands
|
|
1395
|
-
const originalDir = getHiveWorkingDirectory();
|
|
1396
|
-
setHiveWorkingDirectory(tempProject);
|
|
1397
|
-
|
|
1398
|
-
try {
|
|
1399
|
-
// Create a cell (this will mark it dirty and flush will write to JSONL)
|
|
1400
|
-
await hive_create.execute(
|
|
1401
|
-
{ title: "Stash test cell", type: "task" },
|
|
1402
|
-
mockContext,
|
|
1403
|
-
);
|
|
1404
|
-
|
|
1405
|
-
// Sync WITH auto_pull=true (this is where the bug manifests)
|
|
1406
|
-
// Before fix: fails with "cannot pull with rebase: You have unstaged changes"
|
|
1407
|
-
// After fix: stashes, pulls, pops, succeeds
|
|
1408
|
-
const result = await hive_sync.execute(
|
|
1409
|
-
{ auto_pull: true },
|
|
1410
|
-
mockContext,
|
|
1411
|
-
);
|
|
1412
|
-
|
|
1413
|
-
// Should succeed
|
|
1414
|
-
expect(result).toContain("successfully");
|
|
1415
|
-
|
|
1416
|
-
// Verify .hive changes were committed
|
|
1417
|
-
const hiveStatus = execSync("git status --porcelain .hive/", {
|
|
1418
|
-
cwd: tempProject,
|
|
1419
|
-
encoding: "utf-8",
|
|
1420
|
-
});
|
|
1421
|
-
expect(hiveStatus.trim()).toBe("");
|
|
1422
|
-
|
|
1423
|
-
// Verify unstaged changes are still there (stash was popped)
|
|
1424
|
-
const srcStatus = execSync("git status --porcelain src.ts", {
|
|
1425
|
-
cwd: tempProject,
|
|
1426
|
-
encoding: "utf-8",
|
|
1427
|
-
});
|
|
1428
|
-
expect(srcStatus.trim()).toContain("M src.ts");
|
|
1429
|
-
} finally {
|
|
1430
|
-
// Restore original working directory
|
|
1431
|
-
setHiveWorkingDirectory(originalDir);
|
|
1432
|
-
|
|
1433
|
-
// Cleanup
|
|
1434
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1435
|
-
rmSync(remoteProject, { recursive: true, force: true });
|
|
1436
|
-
}
|
|
1437
|
-
});
|
|
1438
|
-
|
|
1439
|
-
it("commits .hive changes before pulling (regression test for unstaged changes error)", async () => {
|
|
1440
|
-
const { mkdirSync, rmSync, writeFileSync, existsSync } = await import("node:fs");
|
|
1441
|
-
const { join } = await import("node:path");
|
|
1442
|
-
const { tmpdir } = await import("node:os");
|
|
1443
|
-
const { execSync } = await import("node:child_process");
|
|
1444
|
-
|
|
1445
|
-
// Create a temp git repository
|
|
1446
|
-
const tempProject = join(tmpdir(), `hive-sync-test-${Date.now()}`);
|
|
1447
|
-
mkdirSync(tempProject, { recursive: true });
|
|
1448
|
-
|
|
1449
|
-
// Initialize git repo
|
|
1450
|
-
execSync("git init", { cwd: tempProject });
|
|
1451
|
-
execSync('git config user.email "test@example.com"', { cwd: tempProject });
|
|
1452
|
-
execSync('git config user.name "Test User"', { cwd: tempProject });
|
|
1453
|
-
|
|
1454
|
-
// Create .hive directory and issues.jsonl
|
|
1455
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1456
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1457
|
-
const issuesPath = join(hiveDir, "issues.jsonl");
|
|
1458
|
-
writeFileSync(issuesPath, "");
|
|
1459
|
-
|
|
1460
|
-
// Initial commit
|
|
1461
|
-
execSync("git add .", { cwd: tempProject });
|
|
1462
|
-
execSync('git commit -m "initial commit"', { cwd: tempProject });
|
|
1463
|
-
|
|
1464
|
-
// Set working directory for hive commands
|
|
1465
|
-
const originalDir = getHiveWorkingDirectory();
|
|
1466
|
-
setHiveWorkingDirectory(tempProject);
|
|
1467
|
-
|
|
1468
|
-
try {
|
|
1469
|
-
// Create a cell (this will mark it dirty and flush will write to JSONL)
|
|
1470
|
-
await hive_create.execute(
|
|
1471
|
-
{ title: "Sync test cell", type: "task" },
|
|
1472
|
-
mockContext,
|
|
1473
|
-
);
|
|
1474
|
-
|
|
1475
|
-
// Sync with auto_pull=false (skip pull since no remote configured)
|
|
1476
|
-
const result = await hive_sync.execute(
|
|
1477
|
-
{ auto_pull: false },
|
|
1478
|
-
mockContext,
|
|
1479
|
-
);
|
|
1480
|
-
|
|
1481
|
-
// Should succeed
|
|
1482
|
-
expect(result).toContain("successfully");
|
|
1483
|
-
|
|
1484
|
-
// Verify .hive changes were committed (working tree should be clean)
|
|
1485
|
-
const status = execSync("git status --porcelain", {
|
|
1486
|
-
cwd: tempProject,
|
|
1487
|
-
encoding: "utf-8",
|
|
1488
|
-
});
|
|
1489
|
-
expect(status.trim()).toBe("");
|
|
1490
|
-
|
|
1491
|
-
// Verify commit exists
|
|
1492
|
-
const log = execSync("git log --oneline", {
|
|
1493
|
-
cwd: tempProject,
|
|
1494
|
-
encoding: "utf-8",
|
|
1495
|
-
});
|
|
1496
|
-
expect(log).toContain("chore: sync hive");
|
|
1497
|
-
} finally {
|
|
1498
|
-
// Restore original working directory
|
|
1499
|
-
setHiveWorkingDirectory(originalDir);
|
|
1500
|
-
|
|
1501
|
-
// Cleanup
|
|
1502
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1503
|
-
}
|
|
1504
|
-
});
|
|
1505
|
-
|
|
1506
|
-
it("handles case with no changes to commit", async () => {
|
|
1507
|
-
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
1508
|
-
const { join } = await import("node:path");
|
|
1509
|
-
const { tmpdir } = await import("node:os");
|
|
1510
|
-
const { execSync } = await import("node:child_process");
|
|
1511
|
-
|
|
1512
|
-
// Create temp git repo
|
|
1513
|
-
const tempProject = join(tmpdir(), `hive-sync-test-${Date.now()}`);
|
|
1514
|
-
mkdirSync(tempProject, { recursive: true });
|
|
1515
|
-
|
|
1516
|
-
// Initialize git
|
|
1517
|
-
execSync("git init", { cwd: tempProject });
|
|
1518
|
-
execSync('git config user.email "test@example.com"', { cwd: tempProject });
|
|
1519
|
-
execSync('git config user.name "Test User"', { cwd: tempProject });
|
|
1520
|
-
|
|
1521
|
-
// Create .hive directory with committed issues.jsonl
|
|
1522
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1523
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1524
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1525
|
-
|
|
1526
|
-
// Commit everything
|
|
1527
|
-
execSync("git add .", { cwd: tempProject });
|
|
1528
|
-
execSync('git commit -m "initial"', { cwd: tempProject });
|
|
1529
|
-
|
|
1530
|
-
// Set working directory
|
|
1531
|
-
const originalDir = getHiveWorkingDirectory();
|
|
1532
|
-
setHiveWorkingDirectory(tempProject);
|
|
1533
|
-
|
|
1534
|
-
try {
|
|
1535
|
-
// Sync with no changes (should handle gracefully)
|
|
1536
|
-
const result = await hive_sync.execute(
|
|
1537
|
-
{ auto_pull: false },
|
|
1538
|
-
mockContext,
|
|
1539
|
-
);
|
|
1540
|
-
|
|
1541
|
-
// Should return "No cells or memories to sync" since no dirty cells
|
|
1542
|
-
expect(result).toContain("No cells or memories to sync");
|
|
1543
|
-
} finally {
|
|
1544
|
-
setHiveWorkingDirectory(originalDir);
|
|
1545
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1546
|
-
}
|
|
1547
|
-
});
|
|
1548
|
-
});
|
|
1549
|
-
|
|
1550
|
-
describe("mergeHistoricBeads", () => {
|
|
1551
|
-
it("merges empty base file - no changes", async () => {
|
|
1552
|
-
const { mergeHistoricBeads } = await import("./hive");
|
|
1553
|
-
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
1554
|
-
const { join } = await import("node:path");
|
|
1555
|
-
const { tmpdir } = await import("node:os");
|
|
1556
|
-
|
|
1557
|
-
// Create temp project with .hive directory
|
|
1558
|
-
const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
|
|
1559
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1560
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1561
|
-
|
|
1562
|
-
// Create empty base file
|
|
1563
|
-
writeFileSync(join(hiveDir, "beads.base.jsonl"), "");
|
|
1564
|
-
|
|
1565
|
-
// Create issues.jsonl with one bead
|
|
1566
|
-
const existingBead = { id: "bd-existing", title: "Existing bead" };
|
|
1567
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), JSON.stringify(existingBead) + "\n");
|
|
1568
|
-
|
|
1569
|
-
const result = await mergeHistoricBeads(tempProject);
|
|
1570
|
-
|
|
1571
|
-
expect(result.merged).toBe(0);
|
|
1572
|
-
expect(result.skipped).toBe(0);
|
|
1573
|
-
|
|
1574
|
-
// Cleanup
|
|
1575
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1576
|
-
});
|
|
1577
|
-
|
|
1578
|
-
it("merges empty issues file - all base records imported", async () => {
|
|
1579
|
-
const { mergeHistoricBeads } = await import("./hive");
|
|
1580
|
-
const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1581
|
-
const { join } = await import("node:path");
|
|
1582
|
-
const { tmpdir } = await import("node:os");
|
|
1583
|
-
|
|
1584
|
-
// Create temp project
|
|
1585
|
-
const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
|
|
1586
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1587
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1588
|
-
|
|
1589
|
-
// Create base file with 2 beads
|
|
1590
|
-
const baseBead1 = { id: "bd-base-1", title: "Historic bead 1" };
|
|
1591
|
-
const baseBead2 = { id: "bd-base-2", title: "Historic bead 2" };
|
|
1592
|
-
writeFileSync(
|
|
1593
|
-
join(hiveDir, "beads.base.jsonl"),
|
|
1594
|
-
JSON.stringify(baseBead1) + "\n" + JSON.stringify(baseBead2) + "\n"
|
|
1595
|
-
);
|
|
1596
|
-
|
|
1597
|
-
// Empty issues file
|
|
1598
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1599
|
-
|
|
1600
|
-
const result = await mergeHistoricBeads(tempProject);
|
|
1601
|
-
|
|
1602
|
-
expect(result.merged).toBe(2);
|
|
1603
|
-
expect(result.skipped).toBe(0);
|
|
1604
|
-
|
|
1605
|
-
// Verify issues.jsonl now has both beads
|
|
1606
|
-
const issuesContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1607
|
-
const lines = issuesContent.trim().split("\n").filter(l => l);
|
|
1608
|
-
expect(lines).toHaveLength(2);
|
|
1609
|
-
|
|
1610
|
-
const beads = lines.map(line => JSON.parse(line));
|
|
1611
|
-
expect(beads.find(b => b.id === "bd-base-1")).toBeDefined();
|
|
1612
|
-
expect(beads.find(b => b.id === "bd-base-2")).toBeDefined();
|
|
1613
|
-
|
|
1614
|
-
// Cleanup
|
|
1615
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1616
|
-
});
|
|
1617
|
-
|
|
1618
|
-
it("overlapping IDs - issues.jsonl wins (more recent)", async () => {
|
|
1619
|
-
const { mergeHistoricBeads } = await import("./hive");
|
|
1620
|
-
const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1621
|
-
const { join } = await import("node:path");
|
|
1622
|
-
const { tmpdir } = await import("node:os");
|
|
1623
|
-
|
|
1624
|
-
// Create temp project
|
|
1625
|
-
const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
|
|
1626
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1627
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1628
|
-
|
|
1629
|
-
// Base has old version of bd-overlap
|
|
1630
|
-
const baseOldVersion = { id: "bd-overlap", title: "Old title", status: "open" };
|
|
1631
|
-
writeFileSync(
|
|
1632
|
-
join(hiveDir, "beads.base.jsonl"),
|
|
1633
|
-
JSON.stringify(baseOldVersion) + "\n"
|
|
1634
|
-
);
|
|
1635
|
-
|
|
1636
|
-
// Issues has new version (updated)
|
|
1637
|
-
const issuesNewVersion = { id: "bd-overlap", title: "New title", status: "closed" };
|
|
1638
|
-
writeFileSync(
|
|
1639
|
-
join(hiveDir, "issues.jsonl"),
|
|
1640
|
-
JSON.stringify(issuesNewVersion) + "\n"
|
|
1641
|
-
);
|
|
1642
|
-
|
|
1643
|
-
const result = await mergeHistoricBeads(tempProject);
|
|
1644
|
-
|
|
1645
|
-
expect(result.merged).toBe(0); // Nothing new to merge
|
|
1646
|
-
expect(result.skipped).toBe(1); // Skipped the old version
|
|
1647
|
-
|
|
1648
|
-
// Verify issues.jsonl still has new version (unchanged)
|
|
1649
|
-
const issuesContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1650
|
-
const bead = JSON.parse(issuesContent.trim());
|
|
1651
|
-
expect(bead.title).toBe("New title");
|
|
1652
|
-
expect(bead.status).toBe("closed");
|
|
1653
|
-
|
|
1654
|
-
// Cleanup
|
|
1655
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1656
|
-
});
|
|
1657
|
-
|
|
1658
|
-
it("no overlap - all records combined", async () => {
|
|
1659
|
-
const { mergeHistoricBeads } = await import("./hive");
|
|
1660
|
-
const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1661
|
-
const { join } = await import("node:path");
|
|
1662
|
-
const { tmpdir } = await import("node:os");
|
|
1663
|
-
|
|
1664
|
-
// Create temp project
|
|
1665
|
-
const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
|
|
1666
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1667
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1668
|
-
|
|
1669
|
-
// Base has 2 beads
|
|
1670
|
-
const baseBead1 = { id: "bd-base-1", title: "Historic 1" };
|
|
1671
|
-
const baseBead2 = { id: "bd-base-2", title: "Historic 2" };
|
|
1672
|
-
writeFileSync(
|
|
1673
|
-
join(hiveDir, "beads.base.jsonl"),
|
|
1674
|
-
JSON.stringify(baseBead1) + "\n" + JSON.stringify(baseBead2) + "\n"
|
|
1675
|
-
);
|
|
1676
|
-
|
|
1677
|
-
// Issues has 2 different beads
|
|
1678
|
-
const issuesBead1 = { id: "bd-current-1", title: "Current 1" };
|
|
1679
|
-
const issuesBead2 = { id: "bd-current-2", title: "Current 2" };
|
|
1680
|
-
writeFileSync(
|
|
1681
|
-
join(hiveDir, "issues.jsonl"),
|
|
1682
|
-
JSON.stringify(issuesBead1) + "\n" + JSON.stringify(issuesBead2) + "\n"
|
|
1683
|
-
);
|
|
1684
|
-
|
|
1685
|
-
const result = await mergeHistoricBeads(tempProject);
|
|
1686
|
-
|
|
1687
|
-
expect(result.merged).toBe(2); // Added 2 from base
|
|
1688
|
-
expect(result.skipped).toBe(0);
|
|
1689
|
-
|
|
1690
|
-
// Verify issues.jsonl now has all 4 beads
|
|
1691
|
-
const issuesContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1692
|
-
const lines = issuesContent.trim().split("\n").filter(l => l);
|
|
1693
|
-
expect(lines).toHaveLength(4);
|
|
1694
|
-
|
|
1695
|
-
const beads = lines.map(line => JSON.parse(line));
|
|
1696
|
-
expect(beads.find(b => b.id === "bd-base-1")).toBeDefined();
|
|
1697
|
-
expect(beads.find(b => b.id === "bd-base-2")).toBeDefined();
|
|
1698
|
-
expect(beads.find(b => b.id === "bd-current-1")).toBeDefined();
|
|
1699
|
-
expect(beads.find(b => b.id === "bd-current-2")).toBeDefined();
|
|
1700
|
-
|
|
1701
|
-
// Cleanup
|
|
1702
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1703
|
-
});
|
|
1704
|
-
|
|
1705
|
-
it("missing base file - graceful handling", async () => {
|
|
1706
|
-
const { mergeHistoricBeads } = await import("./hive");
|
|
1707
|
-
const { mkdirSync, rmSync, writeFileSync } = await import("node:fs");
|
|
1708
|
-
const { join } = await import("node:path");
|
|
1709
|
-
const { tmpdir } = await import("node:os");
|
|
1710
|
-
|
|
1711
|
-
// Create temp project with .hive but NO base file
|
|
1712
|
-
const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
|
|
1713
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1714
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1715
|
-
|
|
1716
|
-
// Issues exists, base doesn't
|
|
1717
|
-
const issuesBead = { id: "bd-current", title: "Current" };
|
|
1718
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), JSON.stringify(issuesBead) + "\n");
|
|
1719
|
-
|
|
1720
|
-
const result = await mergeHistoricBeads(tempProject);
|
|
1721
|
-
|
|
1722
|
-
// Should return zeros, not throw
|
|
1723
|
-
expect(result.merged).toBe(0);
|
|
1724
|
-
expect(result.skipped).toBe(0);
|
|
1725
|
-
|
|
1726
|
-
// Cleanup
|
|
1727
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1728
|
-
});
|
|
1729
|
-
|
|
1730
|
-
it("missing issues file - creates it from base", async () => {
|
|
1731
|
-
const { mergeHistoricBeads } = await import("./hive");
|
|
1732
|
-
const { mkdirSync, rmSync, writeFileSync, readFileSync, existsSync } = await import("node:fs");
|
|
1733
|
-
const { join } = await import("node:path");
|
|
1734
|
-
const { tmpdir } = await import("node:os");
|
|
1735
|
-
|
|
1736
|
-
// Create temp project with base but NO issues file
|
|
1737
|
-
const tempProject = join(tmpdir(), `hive-merge-test-${Date.now()}`);
|
|
1738
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1739
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1740
|
-
|
|
1741
|
-
// Base exists, issues doesn't
|
|
1742
|
-
const baseBead = { id: "bd-base", title: "Historic" };
|
|
1743
|
-
writeFileSync(
|
|
1744
|
-
join(hiveDir, "beads.base.jsonl"),
|
|
1745
|
-
JSON.stringify(baseBead) + "\n"
|
|
1746
|
-
);
|
|
1747
|
-
|
|
1748
|
-
const issuesPath = join(hiveDir, "issues.jsonl");
|
|
1749
|
-
expect(existsSync(issuesPath)).toBe(false);
|
|
1750
|
-
|
|
1751
|
-
const result = await mergeHistoricBeads(tempProject);
|
|
1752
|
-
|
|
1753
|
-
expect(result.merged).toBe(1);
|
|
1754
|
-
expect(result.skipped).toBe(0);
|
|
1755
|
-
|
|
1756
|
-
// Verify issues.jsonl was created
|
|
1757
|
-
expect(existsSync(issuesPath)).toBe(true);
|
|
1758
|
-
const content = readFileSync(issuesPath, "utf-8");
|
|
1759
|
-
const bead = JSON.parse(content.trim());
|
|
1760
|
-
expect(bead.id).toBe("bd-base");
|
|
1761
|
-
|
|
1762
|
-
// Cleanup
|
|
1763
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1764
|
-
});
|
|
1765
|
-
});
|
|
1766
|
-
|
|
1767
|
-
describe("process exit hook", () => {
|
|
1768
|
-
it("registers beforeExit hook that syncs dirty cells", async () => {
|
|
1769
|
-
const { mkdirSync, rmSync, writeFileSync, readFileSync, existsSync } = await import("node:fs");
|
|
1770
|
-
const { join } = await import("node:path");
|
|
1771
|
-
const { tmpdir } = await import("node:os");
|
|
1772
|
-
const { execSync } = await import("node:child_process");
|
|
1773
|
-
|
|
1774
|
-
// Create temp project
|
|
1775
|
-
const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
|
|
1776
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1777
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1778
|
-
|
|
1779
|
-
// Initialize git repo
|
|
1780
|
-
execSync("git init", { cwd: tempProject });
|
|
1781
|
-
execSync('git config user.email "test@example.com"', { cwd: tempProject });
|
|
1782
|
-
execSync('git config user.name "Test User"', { cwd: tempProject });
|
|
1783
|
-
|
|
1784
|
-
// Initial commit with empty issues.jsonl
|
|
1785
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1786
|
-
execSync("git add .", { cwd: tempProject });
|
|
1787
|
-
execSync('git commit -m "initial"', { cwd: tempProject });
|
|
1788
|
-
|
|
1789
|
-
// Set working directory
|
|
1790
|
-
const originalDir = getHiveWorkingDirectory();
|
|
1791
|
-
setHiveWorkingDirectory(tempProject);
|
|
1792
|
-
|
|
1793
|
-
try {
|
|
1794
|
-
// Create a cell (marks it dirty but don't sync)
|
|
1795
|
-
await hive_create.execute(
|
|
1796
|
-
{ title: "Exit hook test cell", type: "task" },
|
|
1797
|
-
mockContext,
|
|
1798
|
-
);
|
|
1799
|
-
|
|
1800
|
-
// Verify cell is NOT in JSONL yet (only in PGLite)
|
|
1801
|
-
const beforeContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1802
|
-
expect(beforeContent.trim()).toBe("");
|
|
1803
|
-
|
|
1804
|
-
// Simulate process exit by triggering beforeExit event
|
|
1805
|
-
process.emit("beforeExit", 0);
|
|
1806
|
-
|
|
1807
|
-
// Wait for async flush to complete
|
|
1808
|
-
await new Promise(resolve => setTimeout(resolve, 100));
|
|
1809
|
-
|
|
1810
|
-
// Verify cell was synced to JSONL by the exit hook
|
|
1811
|
-
const afterContent = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1812
|
-
expect(afterContent.trim()).not.toBe("");
|
|
1813
|
-
|
|
1814
|
-
const cells = afterContent.trim().split("\n").map(line => JSON.parse(line));
|
|
1815
|
-
expect(cells).toHaveLength(1);
|
|
1816
|
-
expect(cells[0].title).toBe("Exit hook test cell");
|
|
1817
|
-
} finally {
|
|
1818
|
-
setHiveWorkingDirectory(originalDir);
|
|
1819
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1820
|
-
}
|
|
1821
|
-
});
|
|
1822
|
-
|
|
1823
|
-
it("exit hook is idempotent - safe to call multiple times", async () => {
|
|
1824
|
-
const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1825
|
-
const { join } = await import("node:path");
|
|
1826
|
-
const { tmpdir } = await import("node:os");
|
|
1827
|
-
|
|
1828
|
-
// Create temp project
|
|
1829
|
-
const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
|
|
1830
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1831
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1832
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1833
|
-
|
|
1834
|
-
// Set working directory
|
|
1835
|
-
const originalDir = getHiveWorkingDirectory();
|
|
1836
|
-
setHiveWorkingDirectory(tempProject);
|
|
1837
|
-
|
|
1838
|
-
try {
|
|
1839
|
-
// Create a cell
|
|
1840
|
-
await hive_create.execute(
|
|
1841
|
-
{ title: "Idempotent test cell", type: "task" },
|
|
1842
|
-
mockContext,
|
|
1843
|
-
);
|
|
1844
|
-
|
|
1845
|
-
// Trigger exit hook multiple times
|
|
1846
|
-
process.emit("beforeExit", 0);
|
|
1847
|
-
await new Promise(resolve => setTimeout(resolve, 50));
|
|
1848
|
-
|
|
1849
|
-
process.emit("beforeExit", 0);
|
|
1850
|
-
await new Promise(resolve => setTimeout(resolve, 50));
|
|
1851
|
-
|
|
1852
|
-
// Verify cell is written only once (no duplication)
|
|
1853
|
-
const content = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1854
|
-
const lines = content.trim().split("\n").filter(l => l);
|
|
1855
|
-
|
|
1856
|
-
// Should have exactly one cell (even though we triggered hook twice)
|
|
1857
|
-
expect(lines.length).toBeGreaterThanOrEqual(1);
|
|
1858
|
-
|
|
1859
|
-
// All cells should have unique IDs
|
|
1860
|
-
const cells = lines.map(line => JSON.parse(line));
|
|
1861
|
-
const uniqueIds = new Set(cells.map(c => c.id));
|
|
1862
|
-
expect(uniqueIds.size).toBe(cells.length);
|
|
1863
|
-
} finally {
|
|
1864
|
-
setHiveWorkingDirectory(originalDir);
|
|
1865
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1866
|
-
}
|
|
1867
|
-
});
|
|
1868
|
-
|
|
1869
|
-
it("exit hook handles case with no dirty cells gracefully", async () => {
|
|
1870
|
-
const { mkdirSync, rmSync, writeFileSync, readFileSync } = await import("node:fs");
|
|
1871
|
-
const { join } = await import("node:path");
|
|
1872
|
-
const { tmpdir } = await import("node:os");
|
|
1873
|
-
|
|
1874
|
-
// Create temp project with empty JSONL
|
|
1875
|
-
const tempProject = join(tmpdir(), `hive-exit-hook-test-${Date.now()}`);
|
|
1876
|
-
const hiveDir = join(tempProject, ".hive");
|
|
1877
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
1878
|
-
writeFileSync(join(hiveDir, "issues.jsonl"), "");
|
|
1879
|
-
|
|
1880
|
-
// Set working directory
|
|
1881
|
-
const originalDir = getHiveWorkingDirectory();
|
|
1882
|
-
setHiveWorkingDirectory(tempProject);
|
|
1883
|
-
|
|
1884
|
-
try {
|
|
1885
|
-
// Trigger exit hook with no dirty cells (should not throw)
|
|
1886
|
-
process.emit("beforeExit", 0);
|
|
1887
|
-
await new Promise(resolve => setTimeout(resolve, 50));
|
|
1888
|
-
|
|
1889
|
-
// JSONL should still be empty (no error thrown)
|
|
1890
|
-
const content = readFileSync(join(hiveDir, "issues.jsonl"), "utf-8");
|
|
1891
|
-
expect(content.trim()).toBe("");
|
|
1892
|
-
} finally {
|
|
1893
|
-
setHiveWorkingDirectory(originalDir);
|
|
1894
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
1895
|
-
}
|
|
1896
|
-
});
|
|
1897
|
-
});
|
|
1898
|
-
|
|
1899
|
-
describe("hive_cells", () => {
|
|
1900
|
-
let testCellId: string;
|
|
1901
|
-
|
|
1902
|
-
beforeEach(async () => {
|
|
1903
|
-
// Create a test cell for hive_cells tests
|
|
1904
|
-
const result = await hive_create.execute(
|
|
1905
|
-
{ title: "Cells tool test", type: "task" },
|
|
1906
|
-
mockContext,
|
|
1907
|
-
);
|
|
1908
|
-
const cell = parseResponse<Cell>(result);
|
|
1909
|
-
testCellId = cell.id;
|
|
1910
|
-
createdBeadIds.push(testCellId);
|
|
1911
|
-
});
|
|
1912
|
-
|
|
1913
|
-
it("lists all cells with no filters", async () => {
|
|
1914
|
-
const { hive_cells } = await import("./hive");
|
|
1915
|
-
|
|
1916
|
-
const result = await hive_cells.execute({}, mockContext);
|
|
1917
|
-
const cells = parseResponse<Cell[]>(result);
|
|
1918
|
-
|
|
1919
|
-
expect(Array.isArray(cells)).toBe(true);
|
|
1920
|
-
expect(cells.length).toBeGreaterThan(0);
|
|
1921
|
-
});
|
|
1922
|
-
|
|
1923
|
-
it("filters by status", async () => {
|
|
1924
|
-
const { hive_cells } = await import("./hive");
|
|
1925
|
-
|
|
1926
|
-
const result = await hive_cells.execute({ status: "open" }, mockContext);
|
|
1927
|
-
const cells = parseResponse<Cell[]>(result);
|
|
1928
|
-
|
|
1929
|
-
expect(Array.isArray(cells)).toBe(true);
|
|
1930
|
-
expect(cells.every((c) => c.status === "open")).toBe(true);
|
|
1931
|
-
});
|
|
1932
|
-
|
|
1933
|
-
it("filters by type", async () => {
|
|
1934
|
-
const { hive_cells } = await import("./hive");
|
|
1935
|
-
|
|
1936
|
-
// Create a bug cell
|
|
1937
|
-
const bugResult = await hive_create.execute(
|
|
1938
|
-
{ title: "Bug for cells test", type: "bug" },
|
|
1939
|
-
mockContext,
|
|
1940
|
-
);
|
|
1941
|
-
const bug = parseResponse<Cell>(bugResult);
|
|
1942
|
-
createdBeadIds.push(bug.id);
|
|
1943
|
-
|
|
1944
|
-
const result = await hive_cells.execute({ type: "bug" }, mockContext);
|
|
1945
|
-
const cells = parseResponse<Cell[]>(result);
|
|
1946
|
-
|
|
1947
|
-
expect(Array.isArray(cells)).toBe(true);
|
|
1948
|
-
expect(cells.every((c) => c.issue_type === "bug")).toBe(true);
|
|
1949
|
-
});
|
|
1950
|
-
|
|
1951
|
-
it("returns next ready cell when ready=true", async () => {
|
|
1952
|
-
const { hive_cells } = await import("./hive");
|
|
1953
|
-
|
|
1954
|
-
const result = await hive_cells.execute({ ready: true }, mockContext);
|
|
1955
|
-
const cells = parseResponse<Cell[]>(result);
|
|
1956
|
-
|
|
1957
|
-
expect(Array.isArray(cells)).toBe(true);
|
|
1958
|
-
// Should return 0 or 1 cells (the next ready one)
|
|
1959
|
-
expect(cells.length).toBeLessThanOrEqual(1);
|
|
1960
|
-
if (cells.length === 1) {
|
|
1961
|
-
expect(["open", "in_progress"]).toContain(cells[0].status);
|
|
1962
|
-
}
|
|
1963
|
-
});
|
|
1964
|
-
|
|
1965
|
-
it("looks up cell by partial ID", async () => {
|
|
1966
|
-
const { hive_cells } = await import("./hive");
|
|
1967
|
-
|
|
1968
|
-
// Extract hash from full ID (6-char segment before the last hyphen)
|
|
1969
|
-
const lastHyphenIndex = testCellId.lastIndexOf("-");
|
|
1970
|
-
const beforeLast = testCellId.substring(0, lastHyphenIndex);
|
|
1971
|
-
const secondLastHyphenIndex = beforeLast.lastIndexOf("-");
|
|
1972
|
-
const hash = testCellId.substring(secondLastHyphenIndex + 1, lastHyphenIndex);
|
|
1973
|
-
|
|
1974
|
-
// Use last 6 chars of hash (or full hash if short)
|
|
1975
|
-
const shortHash = hash.substring(Math.max(0, hash.length - 6));
|
|
1976
|
-
|
|
1977
|
-
try {
|
|
1978
|
-
const result = await hive_cells.execute({ id: shortHash }, mockContext);
|
|
1979
|
-
const cells = parseResponse<Cell[]>(result);
|
|
1980
|
-
|
|
1981
|
-
// Should return exactly one cell matching the ID
|
|
1982
|
-
expect(cells).toHaveLength(1);
|
|
1983
|
-
expect(cells[0].id).toBe(testCellId);
|
|
1984
|
-
} catch (error) {
|
|
1985
|
-
// If ambiguous, verify error message is helpful
|
|
1986
|
-
if (error instanceof Error && error.message.includes("Ambiguous")) {
|
|
1987
|
-
expect(error.message).toMatch(/ambiguous.*multiple/i);
|
|
1988
|
-
expect(error.message).toContain(shortHash);
|
|
1989
|
-
} else {
|
|
1990
|
-
throw error;
|
|
1991
|
-
}
|
|
1992
|
-
}
|
|
1993
|
-
});
|
|
1994
|
-
|
|
1995
|
-
it("looks up cell by full ID", async () => {
|
|
1996
|
-
const { hive_cells } = await import("./hive");
|
|
1997
|
-
|
|
1998
|
-
const result = await hive_cells.execute({ id: testCellId }, mockContext);
|
|
1999
|
-
const cells = parseResponse<Cell[]>(result);
|
|
2000
|
-
|
|
2001
|
-
expect(cells).toHaveLength(1);
|
|
2002
|
-
expect(cells[0].id).toBe(testCellId);
|
|
2003
|
-
expect(cells[0].title).toBe("Cells tool test");
|
|
2004
|
-
});
|
|
2005
|
-
|
|
2006
|
-
it("throws error for non-existent ID", async () => {
|
|
2007
|
-
const { hive_cells } = await import("./hive");
|
|
2008
|
-
|
|
2009
|
-
await expect(
|
|
2010
|
-
hive_cells.execute({ id: "nonexistent999" }, mockContext),
|
|
2011
|
-
).rejects.toThrow(/not found|no cell|nonexistent999/i);
|
|
2012
|
-
});
|
|
2013
|
-
|
|
2014
|
-
it("respects limit parameter", async () => {
|
|
2015
|
-
const { hive_cells } = await import("./hive");
|
|
2016
|
-
|
|
2017
|
-
const result = await hive_cells.execute({ limit: 2 }, mockContext);
|
|
2018
|
-
const cells = parseResponse<Cell[]>(result);
|
|
2019
|
-
|
|
2020
|
-
expect(cells.length).toBeLessThanOrEqual(2);
|
|
2021
|
-
});
|
|
2022
|
-
|
|
2023
|
-
it("combines filters (status + type + limit)", async () => {
|
|
2024
|
-
const { hive_cells } = await import("./hive");
|
|
2025
|
-
|
|
2026
|
-
// Create some task cells
|
|
2027
|
-
for (let i = 0; i < 3; i++) {
|
|
2028
|
-
const r = await hive_create.execute(
|
|
2029
|
-
{ title: `Task ${i}`, type: "task" },
|
|
2030
|
-
mockContext,
|
|
2031
|
-
);
|
|
2032
|
-
const c = parseResponse<Cell>(r);
|
|
2033
|
-
createdBeadIds.push(c.id);
|
|
2034
|
-
}
|
|
2035
|
-
|
|
2036
|
-
const result = await hive_cells.execute(
|
|
2037
|
-
{ status: "open", type: "task", limit: 2 },
|
|
2038
|
-
mockContext,
|
|
2039
|
-
);
|
|
2040
|
-
const cells = parseResponse<Cell[]>(result);
|
|
2041
|
-
|
|
2042
|
-
expect(cells.length).toBeLessThanOrEqual(2);
|
|
2043
|
-
expect(cells.every((c) => c.status === "open" && c.issue_type === "task")).toBe(true);
|
|
2044
|
-
});
|
|
2045
|
-
});
|
|
2046
|
-
|
|
2047
|
-
describe("bigint to Date conversion", () => {
|
|
2048
|
-
it("should handle PGLite bigint timestamps correctly in hive_query", async () => {
|
|
2049
|
-
const { mkdirSync, rmSync } = await import("node:fs");
|
|
2050
|
-
const { join } = await import("node:path");
|
|
2051
|
-
const { tmpdir } = await import("node:os");
|
|
2052
|
-
|
|
2053
|
-
const tempProject = join(tmpdir(), `hive-bigint-test-${Date.now()}`);
|
|
2054
|
-
const hiveDir = join(tempProject, ".hive");
|
|
2055
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
2056
|
-
|
|
2057
|
-
const originalDir = getHiveWorkingDirectory();
|
|
2058
|
-
setHiveWorkingDirectory(tempProject);
|
|
2059
|
-
|
|
2060
|
-
try {
|
|
2061
|
-
// Create a cell
|
|
2062
|
-
const createResponse = await hive_create.execute(
|
|
2063
|
-
{ title: "Test bigint dates", type: "task" },
|
|
2064
|
-
mockContext
|
|
2065
|
-
);
|
|
2066
|
-
const created = parseResponse<Cell>(createResponse);
|
|
2067
|
-
|
|
2068
|
-
// Query it back - this triggers formatCellForOutput with PGLite bigint timestamps
|
|
2069
|
-
const queryResponse = await hive_query.execute({ status: "open" }, mockContext);
|
|
2070
|
-
const queried = parseResponse<Cell[]>(queryResponse);
|
|
2071
|
-
|
|
2072
|
-
expect(queried.length).toBeGreaterThan(0);
|
|
2073
|
-
const cell = queried.find(c => c.id === created.id);
|
|
2074
|
-
expect(cell).toBeDefined();
|
|
2075
|
-
|
|
2076
|
-
// These should be valid ISO date strings, not "Invalid Date"
|
|
2077
|
-
expect(cell!.created_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
|
|
2078
|
-
expect(cell!.updated_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
|
|
2079
|
-
expect(cell!.created_at).not.toBe("Invalid Date");
|
|
2080
|
-
expect(cell!.updated_at).not.toBe("Invalid Date");
|
|
2081
|
-
|
|
2082
|
-
// Verify dates are actually valid by parsing
|
|
2083
|
-
const createdDate = new Date(cell!.created_at);
|
|
2084
|
-
const updatedDate = new Date(cell!.updated_at);
|
|
2085
|
-
expect(createdDate.getTime()).toBeGreaterThan(0);
|
|
2086
|
-
expect(updatedDate.getTime()).toBeGreaterThan(0);
|
|
2087
|
-
} finally {
|
|
2088
|
-
setHiveWorkingDirectory(originalDir);
|
|
2089
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
2090
|
-
}
|
|
2091
|
-
});
|
|
2092
|
-
|
|
2093
|
-
it("should handle closed_at bigint timestamp correctly", async () => {
|
|
2094
|
-
const { mkdirSync, rmSync } = await import("node:fs");
|
|
2095
|
-
const { join } = await import("node:path");
|
|
2096
|
-
const { tmpdir } = await import("node:os");
|
|
2097
|
-
|
|
2098
|
-
const tempProject = join(tmpdir(), `hive-bigint-closed-test-${Date.now()}`);
|
|
2099
|
-
const hiveDir = join(tempProject, ".hive");
|
|
2100
|
-
mkdirSync(hiveDir, { recursive: true });
|
|
2101
|
-
|
|
2102
|
-
const originalDir = getHiveWorkingDirectory();
|
|
2103
|
-
setHiveWorkingDirectory(tempProject);
|
|
2104
|
-
|
|
2105
|
-
try {
|
|
2106
|
-
// Create and close a cell
|
|
2107
|
-
const createResponse = await hive_create.execute(
|
|
2108
|
-
{ title: "Test closed bigint date", type: "task" },
|
|
2109
|
-
mockContext
|
|
2110
|
-
);
|
|
2111
|
-
const created = parseResponse<Cell>(createResponse);
|
|
2112
|
-
|
|
2113
|
-
await hive_close.execute(
|
|
2114
|
-
{ id: created.id, reason: "Testing bigint closed_at" },
|
|
2115
|
-
mockContext
|
|
2116
|
-
);
|
|
2117
|
-
|
|
2118
|
-
// Query closed cells
|
|
2119
|
-
const queryResponse = await hive_query.execute({ status: "closed" }, mockContext);
|
|
2120
|
-
const queried = parseResponse<Cell[]>(queryResponse);
|
|
2121
|
-
|
|
2122
|
-
const cell = queried.find(c => c.id === created.id);
|
|
2123
|
-
expect(cell).toBeDefined();
|
|
2124
|
-
expect(cell!.closed_at).toBeDefined();
|
|
2125
|
-
expect(cell!.closed_at).toMatch(/^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/);
|
|
2126
|
-
expect(cell!.closed_at).not.toBe("Invalid Date");
|
|
2127
|
-
|
|
2128
|
-
// Verify closed_at is valid
|
|
2129
|
-
const closedDate = new Date(cell!.closed_at!);
|
|
2130
|
-
expect(closedDate.getTime()).toBeGreaterThan(0);
|
|
2131
|
-
} finally {
|
|
2132
|
-
setHiveWorkingDirectory(originalDir);
|
|
2133
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
2134
|
-
}
|
|
2135
|
-
});
|
|
2136
|
-
});
|
|
2137
|
-
|
|
2138
|
-
describe("parent_id filter", () => {
|
|
2139
|
-
it("hive_query filters by parent_id to get epic children", async () => {
|
|
2140
|
-
const { rmSync } = await import("node:fs");
|
|
2141
|
-
const tempProject = join(tmpdir(), `hive-parent-filter-${Date.now()}`);
|
|
2142
|
-
const originalDir = getHiveWorkingDirectory();
|
|
2143
|
-
setHiveWorkingDirectory(tempProject);
|
|
2144
|
-
|
|
2145
|
-
try {
|
|
2146
|
-
// Create an epic
|
|
2147
|
-
const epicResponse = await hive_create.execute(
|
|
2148
|
-
{ title: "Epic Task", type: "epic", priority: 1 },
|
|
2149
|
-
mockContext
|
|
2150
|
-
);
|
|
2151
|
-
const epic = parseResponse<Cell>(epicResponse);
|
|
2152
|
-
|
|
2153
|
-
// Create children of the epic
|
|
2154
|
-
const child1Response = await hive_create.execute(
|
|
2155
|
-
{ title: "Subtask 1", type: "task", parent_id: epic.id },
|
|
2156
|
-
mockContext
|
|
2157
|
-
);
|
|
2158
|
-
const child1 = parseResponse<Cell>(child1Response);
|
|
2159
|
-
|
|
2160
|
-
const child2Response = await hive_create.execute(
|
|
2161
|
-
{ title: "Subtask 2", type: "task", parent_id: epic.id },
|
|
2162
|
-
mockContext
|
|
2163
|
-
);
|
|
2164
|
-
const child2 = parseResponse<Cell>(child2Response);
|
|
2165
|
-
|
|
2166
|
-
// Create unrelated cell
|
|
2167
|
-
await hive_create.execute(
|
|
2168
|
-
{ title: "Unrelated Task", type: "task" },
|
|
2169
|
-
mockContext
|
|
2170
|
-
);
|
|
2171
|
-
|
|
2172
|
-
// Query by parent_id
|
|
2173
|
-
const queryResponse = await hive_query.execute(
|
|
2174
|
-
{ parent_id: epic.id },
|
|
2175
|
-
mockContext
|
|
2176
|
-
);
|
|
2177
|
-
const children = parseResponse<Cell[]>(queryResponse);
|
|
2178
|
-
|
|
2179
|
-
// Should only return the 2 children
|
|
2180
|
-
expect(children).toHaveLength(2);
|
|
2181
|
-
expect(children.map(c => c.id)).toContain(child1.id);
|
|
2182
|
-
expect(children.map(c => c.id)).toContain(child2.id);
|
|
2183
|
-
expect(children.every(c => c.parent_id === epic.id)).toBe(true);
|
|
2184
|
-
} finally {
|
|
2185
|
-
setHiveWorkingDirectory(originalDir);
|
|
2186
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
2187
|
-
}
|
|
2188
|
-
});
|
|
2189
|
-
|
|
2190
|
-
it("hive_cells filters by parent_id to get epic children", async () => {
|
|
2191
|
-
const { rmSync } = await import("node:fs");
|
|
2192
|
-
const tempProject = join(tmpdir(), `hive-cells-parent-filter-${Date.now()}`);
|
|
2193
|
-
const originalDir = getHiveWorkingDirectory();
|
|
2194
|
-
setHiveWorkingDirectory(tempProject);
|
|
2195
|
-
|
|
2196
|
-
try {
|
|
2197
|
-
// Create an epic
|
|
2198
|
-
const epicResponse = await hive_create.execute(
|
|
2199
|
-
{ title: "Epic with Children", type: "epic", priority: 1 },
|
|
2200
|
-
mockContext
|
|
2201
|
-
);
|
|
2202
|
-
const epic = parseResponse<Cell>(epicResponse);
|
|
2203
|
-
|
|
2204
|
-
// Create children
|
|
2205
|
-
const child1Response = await hive_create.execute(
|
|
2206
|
-
{ title: "Child A", type: "task", parent_id: epic.id },
|
|
2207
|
-
mockContext
|
|
2208
|
-
);
|
|
2209
|
-
const child1 = parseResponse<Cell>(child1Response);
|
|
2210
|
-
|
|
2211
|
-
const child2Response = await hive_create.execute(
|
|
2212
|
-
{ title: "Child B", type: "bug", parent_id: epic.id },
|
|
2213
|
-
mockContext
|
|
2214
|
-
);
|
|
2215
|
-
const child2 = parseResponse<Cell>(child2Response);
|
|
2216
|
-
|
|
2217
|
-
// Create unrelated cells
|
|
2218
|
-
await hive_create.execute(
|
|
2219
|
-
{ title: "Orphan Task", type: "task" },
|
|
2220
|
-
mockContext
|
|
2221
|
-
);
|
|
2222
|
-
|
|
2223
|
-
// Query using hive_cells with parent_id
|
|
2224
|
-
const cellsResponse = await hive_cells.execute(
|
|
2225
|
-
{ parent_id: epic.id },
|
|
2226
|
-
mockContext
|
|
2227
|
-
);
|
|
2228
|
-
const cells = parseResponse<Cell[]>(cellsResponse);
|
|
2229
|
-
|
|
2230
|
-
// Should only return the 2 children
|
|
2231
|
-
expect(cells).toHaveLength(2);
|
|
2232
|
-
expect(cells.map(c => c.id)).toContain(child1.id);
|
|
2233
|
-
expect(cells.map(c => c.id)).toContain(child2.id);
|
|
2234
|
-
expect(cells.every(c => c.parent_id === epic.id)).toBe(true);
|
|
2235
|
-
} finally {
|
|
2236
|
-
setHiveWorkingDirectory(originalDir);
|
|
2237
|
-
rmSync(tempProject, { recursive: true, force: true });
|
|
2238
|
-
}
|
|
2239
|
-
});
|
|
2240
|
-
});
|
|
2241
|
-
});
|