opencode-swarm-plugin 0.31.6 → 0.32.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/.turbo/turbo-build.log +10 -9
  2. package/.turbo/turbo-test.log +319 -317
  3. package/CHANGELOG.md +158 -0
  4. package/README.md +7 -4
  5. package/bin/swarm.ts +388 -87
  6. package/dist/compaction-hook.d.ts +1 -1
  7. package/dist/compaction-hook.d.ts.map +1 -1
  8. package/dist/hive.d.ts.map +1 -1
  9. package/dist/index.d.ts +0 -2
  10. package/dist/index.d.ts.map +1 -1
  11. package/dist/index.js +123 -134
  12. package/dist/memory-tools.d.ts.map +1 -1
  13. package/dist/memory.d.ts +5 -4
  14. package/dist/memory.d.ts.map +1 -1
  15. package/dist/plugin.js +118 -131
  16. package/dist/swarm-orchestrate.d.ts +29 -5
  17. package/dist/swarm-orchestrate.d.ts.map +1 -1
  18. package/dist/swarm-prompts.d.ts +7 -0
  19. package/dist/swarm-prompts.d.ts.map +1 -1
  20. package/dist/swarm.d.ts +0 -2
  21. package/dist/swarm.d.ts.map +1 -1
  22. package/evals/lib/{data-loader.test.ts → data-loader.evalite-test.ts} +7 -6
  23. package/evals/lib/data-loader.ts +1 -1
  24. package/evals/scorers/{outcome-scorers.test.ts → outcome-scorers.evalite-test.ts} +1 -1
  25. package/examples/plugin-wrapper-template.ts +19 -4
  26. package/global-skills/swarm-coordination/SKILL.md +118 -8
  27. package/package.json +2 -2
  28. package/src/compaction-hook.ts +5 -3
  29. package/src/hive.integration.test.ts +83 -1
  30. package/src/hive.ts +37 -12
  31. package/src/mandate-storage.integration.test.ts +601 -0
  32. package/src/memory-tools.ts +6 -4
  33. package/src/memory.integration.test.ts +117 -49
  34. package/src/memory.test.ts +41 -217
  35. package/src/memory.ts +12 -8
  36. package/src/repo-crawl.integration.test.ts +441 -0
  37. package/src/skills.integration.test.ts +1056 -0
  38. package/src/structured.integration.test.ts +817 -0
  39. package/src/swarm-deferred.integration.test.ts +157 -0
  40. package/src/swarm-deferred.test.ts +38 -0
  41. package/src/swarm-mail.integration.test.ts +15 -19
  42. package/src/swarm-orchestrate.integration.test.ts +282 -0
  43. package/src/swarm-orchestrate.ts +96 -201
  44. package/src/swarm-prompts.test.ts +92 -0
  45. package/src/swarm-prompts.ts +69 -0
  46. package/src/swarm-review.integration.test.ts +290 -0
  47. package/src/swarm.integration.test.ts +23 -20
  48. package/src/tool-adapter.integration.test.ts +1221 -0
@@ -0,0 +1,1221 @@
1
+ /**
2
+ * Tool Adapter Wiring Integration Tests
3
+ *
4
+ * **THE BUG WE'RE PREVENTING:**
5
+ * ```
6
+ * Error: [streams/store] dbOverride parameter is required for this function.
7
+ * PGlite getDatabase() has been removed.
8
+ * ```
9
+ *
10
+ * This happened because:
11
+ * 1. Store functions required explicit `dbOverride` parameter
12
+ * 2. Plugin tools called store functions without passing the adapter
13
+ * 3. No integration test exercised the full tool → store → DB path
14
+ *
15
+ * **THESE TESTS VERIFY:**
16
+ * - Tools call store functions correctly (with adapter passed through)
17
+ * - No "dbOverride required" errors occur
18
+ * - Full end-to-end path works: tool.execute() → store → DB
19
+ * - Tests would have FAILED before the fix
20
+ *
21
+ * Run with: bun test src/tool-adapter.integration.test.ts
22
+ */
23
+
24
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
25
+ import { mkdir, rm } from "node:fs/promises";
26
+ import { join } from "node:path";
27
+ import { tmpdir } from "node:os";
28
+ import {
29
+ clearAdapterCache,
30
+ createInMemorySwarmMailLibSQL,
31
+ type SwarmMailAdapter,
32
+ } from "swarm-mail";
33
+
34
+ // Import tools to test
35
+ import {
36
+ clearSessionState,
37
+ swarmmail_inbox,
38
+ swarmmail_init,
39
+ swarmmail_read_message,
40
+ swarmmail_release,
41
+ swarmmail_reserve,
42
+ swarmmail_send,
43
+ } from "./swarm-mail";
44
+
45
+ import {
46
+ getHiveWorkingDirectory,
47
+ hive_close,
48
+ hive_create,
49
+ hive_create_epic,
50
+ hive_query,
51
+ hive_ready,
52
+ hive_start,
53
+ hive_update,
54
+ setHiveWorkingDirectory,
55
+ } from "./hive";
56
+
57
+ import {
58
+ swarm_broadcast,
59
+ swarm_checkpoint,
60
+ swarm_progress,
61
+ swarm_status,
62
+ } from "./swarm-orchestrate";
63
+
64
+ import {
65
+ semantic_memory_store,
66
+ semantic_memory_find,
67
+ } from "./memory-tools";
68
+
69
+ import type { Bead, EpicCreateResult } from "./schemas";
70
+
71
+ // ============================================================================
72
+ // Test Configuration
73
+ // ============================================================================
74
+
75
+ /** Generate unique test database path per test run */
76
+ function testDbPath(prefix = "tool-adapter"): string {
77
+ return join(tmpdir(), `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`);
78
+ }
79
+
80
+ /** Track paths created during test for cleanup */
81
+ let testPaths: string[] = [];
82
+
83
+ function trackPath(path: string): string {
84
+ testPaths.push(path);
85
+ return path;
86
+ }
87
+
88
+ let TEST_DB_PATH: string;
89
+ let swarmMail: SwarmMailAdapter;
90
+
91
+ /**
92
+ * Mock tool context
93
+ */
94
+ interface MockToolContext {
95
+ sessionID: string;
96
+ }
97
+
98
+ /**
99
+ * Generate a unique test context to avoid state collisions between tests
100
+ */
101
+ function createTestContext(): MockToolContext {
102
+ const id = `test-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
103
+ return { sessionID: id };
104
+ }
105
+
106
+ /**
107
+ * Helper to execute tool and parse JSON response
108
+ */
109
+ async function executeTool<T>(
110
+ tool: { execute: (args: unknown, ctx: unknown) => Promise<string> },
111
+ args: unknown,
112
+ ctx: MockToolContext,
113
+ ): Promise<T> {
114
+ const result = await tool.execute(args, ctx);
115
+ return JSON.parse(result) as T;
116
+ }
117
+
118
+ /**
119
+ * Helper to execute tool and return raw string
120
+ */
121
+ async function executeToolRaw(
122
+ tool: { execute: (args: unknown, ctx: unknown) => Promise<string> },
123
+ args: unknown,
124
+ ctx: MockToolContext,
125
+ ): Promise<string> {
126
+ return await tool.execute(args, ctx);
127
+ }
128
+
129
+ // ============================================================================
130
+ // Test Lifecycle Hooks
131
+ // ============================================================================
132
+
133
+ beforeEach(async () => {
134
+ testPaths = [];
135
+ TEST_DB_PATH = trackPath(testDbPath());
136
+
137
+ // Create directory for test database (tools will create DB here)
138
+ await mkdir(TEST_DB_PATH, { recursive: true });
139
+
140
+ // Clear adapter cache to ensure clean state
141
+ clearAdapterCache();
142
+
143
+ // Don't create SwarmMail here - let tools create it
144
+ // This ensures tests use the SAME DB adapter as tools
145
+ swarmMail = null!;
146
+ });
147
+
148
+ afterEach(async () => {
149
+ // Close SwarmMail adapter if created
150
+ if (swarmMail) {
151
+ await swarmMail.close();
152
+ }
153
+
154
+ // Clear all cached adapters
155
+ clearAdapterCache();
156
+
157
+ // Clean up all test database directories
158
+ for (const path of testPaths) {
159
+ try {
160
+ await rm(path, { recursive: true, force: true });
161
+ } catch {
162
+ // Ignore errors during cleanup
163
+ }
164
+ }
165
+ testPaths = [];
166
+ });
167
+
168
+ // ============================================================================
169
+ // SWARM MAIL TOOLS - Adapter Wiring Tests
170
+ // ============================================================================
171
+
172
+ describe("swarmmail tools adapter wiring", () => {
173
+ /**
174
+ * TEST: swarmmail_init creates adapter and registers agent
175
+ *
176
+ * This was the FIRST thing that broke when DB adapter wiring was wrong.
177
+ * The error manifested as:
178
+ * "Error: [streams/store] dbOverride parameter is required"
179
+ *
180
+ * KEY: We're NOT testing DB state - we're testing NO ERRORS.
181
+ * If adapter wiring is broken, this throws before returning.
182
+ */
183
+ it("swarmmail_init works without explicit dbOverride", async () => {
184
+ const ctx = createTestContext();
185
+
186
+ const result = await executeTool<{
187
+ agent_name: string;
188
+ project_key: string;
189
+ message: string;
190
+ }>(swarmmail_init, { project_path: TEST_DB_PATH }, ctx);
191
+
192
+ // Should succeed (no "dbOverride required" error)
193
+ expect(result.agent_name).toBeTruthy();
194
+ expect(result.project_key).toBe(TEST_DB_PATH);
195
+ expect(result.message).toContain(result.agent_name);
196
+
197
+ // If we got here, adapter wiring works!
198
+ // (The bug would have thrown "dbOverride required" before returning)
199
+
200
+ clearSessionState(ctx.sessionID);
201
+ });
202
+
203
+ /**
204
+ * TEST: swarmmail_send works after init
205
+ *
206
+ * Full flow: init → send → verify no errors
207
+ * KEY: We're testing adapter wiring, not DB state.
208
+ * The bug manifested as "dbOverride required" error during send.
209
+ */
210
+ it("swarmmail_send works without explicit dbOverride", async () => {
211
+ const senderCtx = createTestContext();
212
+ const recipientCtx = createTestContext();
213
+
214
+ // Initialize both agents
215
+ await executeTool<{ agent_name: string }>(
216
+ swarmmail_init,
217
+ { project_path: TEST_DB_PATH, agent_name: "Sender" },
218
+ senderCtx,
219
+ );
220
+
221
+ const recipient = await executeTool<{ agent_name: string }>(
222
+ swarmmail_init,
223
+ { project_path: TEST_DB_PATH, agent_name: "Recipient" },
224
+ recipientCtx,
225
+ );
226
+
227
+ // Send message (this calls store functions)
228
+ const result = await executeTool<{
229
+ success: boolean;
230
+ message_id: number;
231
+ thread_id?: string;
232
+ recipient_count: number;
233
+ }>(
234
+ swarmmail_send,
235
+ {
236
+ to: [recipient.agent_name],
237
+ subject: "Test message",
238
+ body: "This is a test message body",
239
+ thread_id: "bd-test-123",
240
+ importance: "normal",
241
+ },
242
+ senderCtx,
243
+ );
244
+
245
+ // Should succeed (no "dbOverride required" error)
246
+ expect(result.success).toBe(true);
247
+ expect(result.message_id).toBeGreaterThan(0);
248
+ expect(result.thread_id).toBe("bd-test-123");
249
+
250
+ // If we got here, adapter wiring works!
251
+
252
+ clearSessionState(senderCtx.sessionID);
253
+ clearSessionState(recipientCtx.sessionID);
254
+ });
255
+
256
+ /**
257
+ * TEST: swarmmail_inbox returns messages
258
+ *
259
+ * Full flow: init → send → inbox → verify
260
+ * Tests that message queries go through DB adapter correctly
261
+ */
262
+ it("swarmmail_inbox works without explicit dbOverride", async () => {
263
+ const senderCtx = createTestContext();
264
+ const recipientCtx = createTestContext();
265
+
266
+ await executeTool<{ agent_name: string }>(
267
+ swarmmail_init,
268
+ { project_path: TEST_DB_PATH, agent_name: "InboxSender" },
269
+ senderCtx,
270
+ );
271
+
272
+ const recipient = await executeTool<{ agent_name: string }>(
273
+ swarmmail_init,
274
+ { project_path: TEST_DB_PATH, agent_name: "InboxRecipient" },
275
+ recipientCtx,
276
+ );
277
+
278
+ // Send a message
279
+ await executeTool(
280
+ swarmmail_send,
281
+ {
282
+ to: [recipient.agent_name],
283
+ subject: "Inbox test message",
284
+ body: "This body should NOT be included by default",
285
+ },
286
+ senderCtx,
287
+ );
288
+
289
+ // Fetch inbox (this calls store functions to query messages)
290
+ const result = await executeTool<{
291
+ messages: Array<{
292
+ id: number;
293
+ from: string;
294
+ subject: string;
295
+ body?: string;
296
+ }>;
297
+ total: number;
298
+ note: string;
299
+ }>(swarmmail_inbox, {}, recipientCtx);
300
+
301
+ // Should succeed
302
+ expect(result.messages.length).toBeGreaterThan(0);
303
+ const testMsg = result.messages.find(
304
+ (m) => m.subject === "Inbox test message",
305
+ );
306
+ expect(testMsg).toBeDefined();
307
+ expect(testMsg?.from).toBe("InboxSender");
308
+ // Body should NOT be included (context-safe)
309
+ expect(testMsg?.body).toBeUndefined();
310
+
311
+ clearSessionState(senderCtx.sessionID);
312
+ clearSessionState(recipientCtx.sessionID);
313
+ });
314
+
315
+ /**
316
+ * TEST: swarmmail_read_message returns full message
317
+ *
318
+ * Tests that fetching individual message bodies works through DB adapter
319
+ */
320
+ it("swarmmail_read_message works without explicit dbOverride", async () => {
321
+ const senderCtx = createTestContext();
322
+ const recipientCtx = createTestContext();
323
+
324
+ await executeTool<{ agent_name: string }>(
325
+ swarmmail_init,
326
+ { project_path: TEST_DB_PATH, agent_name: "ReadSender" },
327
+ senderCtx,
328
+ );
329
+
330
+ const recipient = await executeTool<{ agent_name: string }>(
331
+ swarmmail_init,
332
+ { project_path: TEST_DB_PATH, agent_name: "ReadRecipient" },
333
+ recipientCtx,
334
+ );
335
+
336
+ // Send a message
337
+ const sent = await executeTool<{ message_id: number }>(
338
+ swarmmail_send,
339
+ {
340
+ to: [recipient.agent_name],
341
+ subject: "Read test message",
342
+ body: "This message body should be returned",
343
+ },
344
+ senderCtx,
345
+ );
346
+
347
+ // Read the message (this calls store functions)
348
+ const result = await executeTool<{
349
+ id: number;
350
+ from: string;
351
+ subject: string;
352
+ body: string;
353
+ }>(swarmmail_read_message, { message_id: sent.message_id }, recipientCtx);
354
+
355
+ // Should succeed
356
+ expect(result.id).toBe(sent.message_id);
357
+ expect(result.from).toBe("ReadSender");
358
+ expect(result.subject).toBe("Read test message");
359
+ expect(result.body).toBe("This message body should be returned");
360
+
361
+ clearSessionState(senderCtx.sessionID);
362
+ clearSessionState(recipientCtx.sessionID);
363
+ });
364
+
365
+ /**
366
+ * TEST: swarmmail_reserve creates reservations
367
+ *
368
+ * Full flow: init → reserve → verify no errors
369
+ * KEY: We're testing adapter wiring, not DB state.
370
+ */
371
+ it("swarmmail_reserve works without explicit dbOverride", async () => {
372
+ const ctx = createTestContext();
373
+
374
+ await executeTool(
375
+ swarmmail_init,
376
+ { project_path: TEST_DB_PATH, agent_name: "ReserveAgent" },
377
+ ctx,
378
+ );
379
+
380
+ // Reserve files (this calls store functions)
381
+ const result = await executeTool<{
382
+ granted: Array<{
383
+ id: number;
384
+ path_pattern: string;
385
+ exclusive: boolean;
386
+ }>;
387
+ conflicts?: Array<{ path: string; holders: string[] }>;
388
+ }>(
389
+ swarmmail_reserve,
390
+ {
391
+ paths: ["src/auth/**", "src/config.ts"],
392
+ reason: "bd-test-123: Working on auth",
393
+ exclusive: true,
394
+ ttl_seconds: 3600,
395
+ },
396
+ ctx,
397
+ );
398
+
399
+ // Should succeed (no "dbOverride required" error)
400
+ expect(result.granted.length).toBe(2);
401
+ expect(result.conflicts).toBeUndefined();
402
+ expect(result.granted[0].exclusive).toBe(true);
403
+
404
+ // If we got here, adapter wiring works!
405
+
406
+ clearSessionState(ctx.sessionID);
407
+ });
408
+
409
+ /**
410
+ * TEST: swarmmail_release releases reservations
411
+ *
412
+ * Tests that releasing file reservations works through DB adapter
413
+ */
414
+ it("swarmmail_release works without explicit dbOverride", async () => {
415
+ const ctx = createTestContext();
416
+
417
+ await executeTool(
418
+ swarmmail_init,
419
+ { project_path: TEST_DB_PATH, agent_name: "ReleaseAgent" },
420
+ ctx,
421
+ );
422
+
423
+ // Create reservations
424
+ await executeTool(
425
+ swarmmail_reserve,
426
+ {
427
+ paths: ["src/release-test-1.ts", "src/release-test-2.ts"],
428
+ exclusive: true,
429
+ },
430
+ ctx,
431
+ );
432
+
433
+ // Release all (this calls store functions)
434
+ const result = await executeTool<{
435
+ released: number;
436
+ released_at: string;
437
+ }>(swarmmail_release, {}, ctx);
438
+
439
+ // Should succeed
440
+ expect(result.released).toBe(2);
441
+ expect(result.released_at).toBeTruthy();
442
+
443
+ clearSessionState(ctx.sessionID);
444
+ });
445
+ });
446
+
447
+ // ============================================================================
448
+ // HIVE TOOLS - Adapter Wiring Tests
449
+ // ============================================================================
450
+
451
+ describe("hive tools adapter wiring", () => {
452
+ const createdCellIds: string[] = [];
453
+
454
+ afterEach(async () => {
455
+ // Close all created cells
456
+ const originalDir = getHiveWorkingDirectory();
457
+ setHiveWorkingDirectory(TEST_DB_PATH);
458
+
459
+ for (const id of createdCellIds) {
460
+ try {
461
+ await hive_close.execute({ id, reason: "Test cleanup" }, createTestContext());
462
+ } catch {
463
+ // Ignore cleanup errors
464
+ }
465
+ }
466
+ createdCellIds.length = 0;
467
+
468
+ setHiveWorkingDirectory(originalDir);
469
+ });
470
+
471
+ /**
472
+ * TEST: hive_create works end-to-end
473
+ *
474
+ * Create cell, verify in DB
475
+ * Tests that cell creation goes through DB adapter correctly
476
+ */
477
+ it("hive_create works without explicit dbOverride", async () => {
478
+ const ctx = createTestContext();
479
+ const originalDir = getHiveWorkingDirectory();
480
+ setHiveWorkingDirectory(TEST_DB_PATH);
481
+
482
+ try {
483
+ const result = await executeTool<Bead>(
484
+ hive_create,
485
+ { title: "Test cell minimal" },
486
+ ctx,
487
+ );
488
+
489
+ createdCellIds.push(result.id);
490
+
491
+ // Should succeed
492
+ expect(result.title).toBe("Test cell minimal");
493
+ expect(result.status).toBe("open");
494
+ expect(result.issue_type).toBe("task");
495
+ expect(result.id).toMatch(/^[a-z0-9-]+-[a-z0-9]+$/);
496
+
497
+ // Get the Hive adapter that tools are using
498
+ const { getHiveAdapter } = await import("./hive");
499
+ const hiveAdapter = await getHiveAdapter(TEST_DB_PATH);
500
+
501
+ // Verify cell was created via adapter
502
+ const cell = await hiveAdapter.getCell(TEST_DB_PATH, result.id);
503
+ expect(cell).toBeDefined();
504
+ expect(cell!.title).toBe("Test cell minimal");
505
+ } finally {
506
+ setHiveWorkingDirectory(originalDir);
507
+ }
508
+ });
509
+
510
+ /**
511
+ * TEST: hive_query returns cells
512
+ *
513
+ * Create cells, query, verify results
514
+ * Tests that cell queries go through DB adapter correctly
515
+ */
516
+ it("hive_query works without explicit dbOverride", async () => {
517
+ const ctx = createTestContext();
518
+ const originalDir = getHiveWorkingDirectory();
519
+ setHiveWorkingDirectory(TEST_DB_PATH);
520
+
521
+ try {
522
+ // Create a test cell
523
+ const created = await executeTool<Bead>(
524
+ hive_create,
525
+ { title: "Query test cell", type: "task" },
526
+ ctx,
527
+ );
528
+ createdCellIds.push(created.id);
529
+
530
+ // Query cells (this calls store functions)
531
+ const result = await executeTool<Bead[]>(
532
+ hive_query,
533
+ { status: "open" },
534
+ ctx,
535
+ );
536
+
537
+ // Should succeed
538
+ expect(Array.isArray(result)).toBe(true);
539
+ expect(result.length).toBeGreaterThan(0);
540
+ expect(result.every((b) => b.status === "open")).toBe(true);
541
+
542
+ // Find our test cell
543
+ const found = result.find((b) => b.id === created.id);
544
+ expect(found).toBeDefined();
545
+ expect(found?.title).toBe("Query test cell");
546
+ } finally {
547
+ setHiveWorkingDirectory(originalDir);
548
+ }
549
+ });
550
+
551
+ /**
552
+ * TEST: hive_update updates cell
553
+ *
554
+ * Tests that cell updates go through DB adapter correctly
555
+ */
556
+ it("hive_update works without explicit dbOverride", async () => {
557
+ const ctx = createTestContext();
558
+ const originalDir = getHiveWorkingDirectory();
559
+ setHiveWorkingDirectory(TEST_DB_PATH);
560
+
561
+ try {
562
+ // Create a test cell
563
+ const created = await executeTool<Bead>(
564
+ hive_create,
565
+ { title: "Update test cell", description: "Original description" },
566
+ ctx,
567
+ );
568
+ createdCellIds.push(created.id);
569
+
570
+ // Update cell (this calls store functions)
571
+ const result = await executeTool<Bead>(
572
+ hive_update,
573
+ { id: created.id, description: "Updated description" },
574
+ ctx,
575
+ );
576
+
577
+ // Should succeed
578
+ expect(result.description).toContain("Updated description");
579
+
580
+ // Verify update via adapter
581
+ const { getHiveAdapter } = await import("./hive");
582
+ const hiveAdapter = await getHiveAdapter(TEST_DB_PATH);
583
+ const cell = await hiveAdapter.getCell(TEST_DB_PATH, created.id);
584
+ expect(cell!.description).toContain("Updated description");
585
+ } finally {
586
+ setHiveWorkingDirectory(originalDir);
587
+ }
588
+ });
589
+
590
+ /**
591
+ * TEST: hive_close closes cell
592
+ *
593
+ * Tests that closing cells goes through DB adapter correctly
594
+ */
595
+ it("hive_close works without explicit dbOverride", async () => {
596
+ const ctx = createTestContext();
597
+ const originalDir = getHiveWorkingDirectory();
598
+ setHiveWorkingDirectory(TEST_DB_PATH);
599
+
600
+ try {
601
+ // Create a test cell
602
+ const created = await executeTool<Bead>(
603
+ hive_create,
604
+ { title: "Close test cell" },
605
+ ctx,
606
+ );
607
+
608
+ // Close cell (this calls store functions)
609
+ const result = await executeToolRaw(
610
+ hive_close,
611
+ { id: created.id, reason: "Task completed" },
612
+ ctx,
613
+ );
614
+
615
+ // Should succeed
616
+ expect(result).toContain("Closed");
617
+ expect(result).toContain(created.id);
618
+
619
+ // Verify cell is closed via adapter
620
+ const { getHiveAdapter } = await import("./hive");
621
+ const hiveAdapter = await getHiveAdapter(TEST_DB_PATH);
622
+ const cell = await hiveAdapter.getCell(TEST_DB_PATH, created.id);
623
+ expect(cell!.status).toBe("closed");
624
+ } finally {
625
+ setHiveWorkingDirectory(originalDir);
626
+ }
627
+ });
628
+
629
+ /**
630
+ * TEST: hive_start marks cell as in_progress
631
+ *
632
+ * Tests that starting cells goes through DB adapter correctly
633
+ */
634
+ it("hive_start works without explicit dbOverride", async () => {
635
+ const ctx = createTestContext();
636
+ const originalDir = getHiveWorkingDirectory();
637
+ setHiveWorkingDirectory(TEST_DB_PATH);
638
+
639
+ try {
640
+ // Create a test cell
641
+ const created = await executeTool<Bead>(
642
+ hive_create,
643
+ { title: "Start test cell" },
644
+ ctx,
645
+ );
646
+ createdCellIds.push(created.id);
647
+
648
+ expect(created.status).toBe("open");
649
+
650
+ // Start cell (this calls store functions)
651
+ const result = await executeToolRaw(
652
+ hive_start,
653
+ { id: created.id },
654
+ ctx,
655
+ );
656
+
657
+ // Should succeed
658
+ expect(result).toContain("Started");
659
+ expect(result).toContain(created.id);
660
+
661
+ // Verify status changed via adapter
662
+ const { getHiveAdapter } = await import("./hive");
663
+ const hiveAdapter = await getHiveAdapter(TEST_DB_PATH);
664
+ const cell = await hiveAdapter.getCell(TEST_DB_PATH, created.id);
665
+ expect(cell!.status).toBe("in_progress");
666
+ } finally {
667
+ setHiveWorkingDirectory(originalDir);
668
+ }
669
+ });
670
+
671
+ /**
672
+ * TEST: hive_ready returns next unblocked cell
673
+ *
674
+ * Tests that querying ready cells goes through DB adapter correctly
675
+ */
676
+ it("hive_ready works without explicit dbOverride", async () => {
677
+ const ctx = createTestContext();
678
+ const originalDir = getHiveWorkingDirectory();
679
+ setHiveWorkingDirectory(TEST_DB_PATH);
680
+
681
+ try {
682
+ // Create a high priority cell
683
+ const created = await executeTool<Bead>(
684
+ hive_create,
685
+ { title: "Ready test cell", priority: 0 },
686
+ ctx,
687
+ );
688
+ createdCellIds.push(created.id);
689
+
690
+ // Get ready cell (this calls store functions)
691
+ const result = await executeToolRaw(hive_ready, {}, ctx);
692
+
693
+ // Should succeed (either returns a cell or "No ready beads")
694
+ if (result !== "No ready beads") {
695
+ const cell = JSON.parse(result) as Bead;
696
+ expect(cell.id).toBeDefined();
697
+ expect(cell.status).not.toBe("closed");
698
+ expect(cell.status).not.toBe("blocked");
699
+ } else {
700
+ expect(result).toBe("No ready beads");
701
+ }
702
+ } finally {
703
+ setHiveWorkingDirectory(originalDir);
704
+ }
705
+ });
706
+
707
+ /**
708
+ * TEST: hive_create_epic creates epic + subtasks atomically
709
+ *
710
+ * Tests that epic creation goes through DB adapter correctly
711
+ */
712
+ it("hive_create_epic works without explicit dbOverride", async () => {
713
+ const ctx = createTestContext();
714
+ const originalDir = getHiveWorkingDirectory();
715
+ setHiveWorkingDirectory(TEST_DB_PATH);
716
+
717
+ try {
718
+ // Create epic (this calls store functions)
719
+ const result = await executeTool<EpicCreateResult>(
720
+ hive_create_epic,
721
+ {
722
+ epic_title: "Integration test epic",
723
+ epic_description: "Testing epic creation",
724
+ subtasks: [
725
+ { title: "Subtask 1", priority: 2 },
726
+ { title: "Subtask 2", priority: 3 },
727
+ ],
728
+ },
729
+ ctx,
730
+ );
731
+
732
+ createdCellIds.push(result.epic.id);
733
+ for (const subtask of result.subtasks) {
734
+ createdCellIds.push(subtask.id);
735
+ }
736
+
737
+ // Should succeed
738
+ expect(result.success).toBe(true);
739
+ expect(result.epic.title).toBe("Integration test epic");
740
+ expect(result.epic.issue_type).toBe("epic");
741
+ expect(result.subtasks).toHaveLength(2);
742
+
743
+ // Verify epic and subtasks via adapter
744
+ const { getHiveAdapter } = await import("./hive");
745
+ const hiveAdapter = await getHiveAdapter(TEST_DB_PATH);
746
+
747
+ const epic = await hiveAdapter.getCell(TEST_DB_PATH, result.epic.id);
748
+ expect(epic).toBeDefined();
749
+ expect(epic!.title).toBe("Integration test epic");
750
+
751
+ for (const subtask of result.subtasks) {
752
+ const sub = await hiveAdapter.getCell(TEST_DB_PATH, subtask.id);
753
+ expect(sub).toBeDefined();
754
+ expect(sub!.parent_id).toBe(result.epic.id);
755
+ }
756
+ } finally {
757
+ setHiveWorkingDirectory(originalDir);
758
+ }
759
+ });
760
+ });
761
+
762
+ // ============================================================================
763
+ // SWARM ORCHESTRATE TOOLS - Adapter Wiring Tests
764
+ // ============================================================================
765
+
766
+ describe("swarm tools adapter wiring", () => {
767
+ /**
768
+ * TEST: swarm_progress works without explicit dbOverride
769
+ *
770
+ * Tests that progress reporting goes through DB adapter correctly
771
+ */
772
+ it("swarm_progress works without explicit dbOverride", async () => {
773
+ const ctx = createTestContext();
774
+ const originalDir = getHiveWorkingDirectory();
775
+ setHiveWorkingDirectory(TEST_DB_PATH);
776
+
777
+ try {
778
+ // Create a test cell first
779
+ const created = await executeTool<Bead>(
780
+ hive_create,
781
+ { title: "Progress test cell" },
782
+ ctx,
783
+ );
784
+
785
+ // Initialize swarm mail
786
+ await executeTool(
787
+ swarmmail_init,
788
+ { project_path: TEST_DB_PATH, agent_name: "ProgressAgent" },
789
+ ctx,
790
+ );
791
+
792
+ // Report progress (this calls store functions)
793
+ const result = await executeToolRaw(
794
+ swarm_progress,
795
+ {
796
+ project_key: TEST_DB_PATH,
797
+ agent_name: "ProgressAgent",
798
+ bead_id: created.id,
799
+ status: "in_progress",
800
+ message: "50% complete",
801
+ progress_percent: 50,
802
+ },
803
+ ctx,
804
+ );
805
+
806
+ // Should succeed (no "dbOverride required" error)
807
+ expect(result).toContain("Progress");
808
+ expect(result).toContain("50%");
809
+
810
+ // Just verify no error thrown (progress is logged, not necessarily stored in events table)
811
+ // The key test is that swarm_progress didn't throw "dbOverride required"
812
+
813
+ clearSessionState(ctx.sessionID);
814
+ } finally {
815
+ setHiveWorkingDirectory(originalDir);
816
+ }
817
+ });
818
+
819
+ /**
820
+ * TEST: swarm_status queries work without explicit dbOverride
821
+ *
822
+ * Tests that status queries go through DB adapter correctly
823
+ */
824
+ it("swarm_status works without explicit dbOverride", async () => {
825
+ const ctx = createTestContext();
826
+ const originalDir = getHiveWorkingDirectory();
827
+ setHiveWorkingDirectory(TEST_DB_PATH);
828
+
829
+ try {
830
+ // Create an epic with subtasks
831
+ const epic = await executeTool<EpicCreateResult>(
832
+ hive_create_epic,
833
+ {
834
+ epic_title: "Status test epic",
835
+ subtasks: [
836
+ { title: "Status subtask 1", priority: 2 },
837
+ { title: "Status subtask 2", priority: 2 },
838
+ ],
839
+ },
840
+ ctx,
841
+ );
842
+
843
+ // Get status (this calls store functions)
844
+ const result = await executeTool<{
845
+ epic_id: string;
846
+ total_agents: number;
847
+ agents: Array<{ bead_id: string; status: string }>;
848
+ progress_percent: number;
849
+ }>(
850
+ swarm_status,
851
+ {
852
+ project_key: TEST_DB_PATH,
853
+ epic_id: epic.epic.id,
854
+ },
855
+ ctx,
856
+ );
857
+
858
+ // Should succeed (no "dbOverride required" error)
859
+ expect(result.epic_id).toBe(epic.epic.id);
860
+ expect(result.total_agents).toBe(2);
861
+ expect(result.agents).toHaveLength(2);
862
+ expect(result.progress_percent).toBeGreaterThanOrEqual(0);
863
+
864
+ // If we got here, adapter wiring works!
865
+ } finally {
866
+ setHiveWorkingDirectory(originalDir);
867
+ }
868
+ });
869
+
870
+ /**
871
+ * TEST: swarm_broadcast works without explicit dbOverride
872
+ *
873
+ * Tests that broadcasting messages goes through DB adapter correctly
874
+ */
875
+ it("swarm_broadcast works without explicit dbOverride", async () => {
876
+ const ctx = createTestContext();
877
+ const originalDir = getHiveWorkingDirectory();
878
+ setHiveWorkingDirectory(TEST_DB_PATH);
879
+
880
+ try {
881
+ // Create an epic with subtasks
882
+ const epic = await executeTool<EpicCreateResult>(
883
+ hive_create_epic,
884
+ {
885
+ epic_title: "Broadcast test epic",
886
+ subtasks: [
887
+ { title: "Broadcast subtask 1", priority: 2 },
888
+ ],
889
+ },
890
+ ctx,
891
+ );
892
+
893
+ // Initialize swarm mail
894
+ await executeTool(
895
+ swarmmail_init,
896
+ { project_path: TEST_DB_PATH, agent_name: "BroadcastAgent" },
897
+ ctx,
898
+ );
899
+
900
+ // Broadcast message (this calls store functions)
901
+ const result = await executeToolRaw(
902
+ swarm_broadcast,
903
+ {
904
+ project_path: TEST_DB_PATH,
905
+ agent_name: "BroadcastAgent",
906
+ epic_id: epic.epic.id,
907
+ message: "Testing broadcast functionality",
908
+ importance: "info",
909
+ files_affected: ["src/test.ts"],
910
+ },
911
+ ctx,
912
+ );
913
+
914
+ // Should succeed (no "dbOverride required" error)
915
+ expect(result).toContain("Broadcast");
916
+
917
+ // If we got here, adapter wiring works!
918
+ clearSessionState(ctx.sessionID);
919
+ } finally {
920
+ setHiveWorkingDirectory(originalDir);
921
+ }
922
+ });
923
+
924
+ /**
925
+ * TEST: swarm_checkpoint works without explicit dbOverride
926
+ *
927
+ * Tests that checkpoint creation goes through DB adapter correctly
928
+ * NOTE: May fail with "no such table: swarm_contexts" (expected - test DB doesn't have that table)
929
+ * but should NOT fail with "dbOverride required" (that's the bug we're preventing)
930
+ */
931
+ it("swarm_checkpoint works without explicit dbOverride", async () => {
932
+ const ctx = createTestContext();
933
+ const originalDir = getHiveWorkingDirectory();
934
+ setHiveWorkingDirectory(TEST_DB_PATH);
935
+
936
+ try {
937
+ // Create an epic with subtask
938
+ const epic = await executeTool<EpicCreateResult>(
939
+ hive_create_epic,
940
+ {
941
+ epic_title: "Checkpoint test epic",
942
+ subtasks: [
943
+ { title: "Checkpoint subtask", priority: 2 },
944
+ ],
945
+ },
946
+ ctx,
947
+ );
948
+
949
+ // Initialize swarm mail
950
+ await executeTool(
951
+ swarmmail_init,
952
+ { project_path: TEST_DB_PATH, agent_name: "CheckpointAgent" },
953
+ ctx,
954
+ );
955
+
956
+ // Create checkpoint (this calls store functions)
957
+ const result = await executeToolRaw(
958
+ swarm_checkpoint,
959
+ {
960
+ project_key: TEST_DB_PATH,
961
+ agent_name: "CheckpointAgent",
962
+ bead_id: epic.subtasks[0].id,
963
+ epic_id: epic.epic.id,
964
+ files_modified: ["src/test.ts"],
965
+ progress_percent: 50,
966
+ directives: {
967
+ shared_context: "Testing checkpoint",
968
+ },
969
+ },
970
+ ctx,
971
+ );
972
+
973
+ // Key test: did NOT throw "dbOverride required" error
974
+ // Result may contain error message about missing table (that's OK - test DB doesn't have swarm_contexts)
975
+ // But it should NOT contain "dbOverride parameter is required"
976
+ expect(result).not.toContain("dbOverride parameter is required");
977
+ expect(result).not.toContain("dbOverride required");
978
+
979
+ // If we got here, adapter wiring works!
980
+ // (Even if checkpoint failed for OTHER reasons like missing table)
981
+ clearSessionState(ctx.sessionID);
982
+ } finally {
983
+ setHiveWorkingDirectory(originalDir);
984
+ }
985
+ });
986
+ });
987
+
988
+ // ============================================================================
989
+ // MEMORY TOOLS - Adapter Wiring Tests
990
+ // ============================================================================
991
+
992
+ describe("memory tools adapter wiring", () => {
993
+ /**
994
+ * TEST: semantic_memory_store works without explicit dbOverride
995
+ *
996
+ * Tests that storing memories goes through DB adapter correctly
997
+ */
998
+ it("semantic_memory_store works without explicit dbOverride", async () => {
999
+ const ctx = createTestContext();
1000
+
1001
+ // Store a memory (this calls store functions)
1002
+ const result = await executeTool<{ id: string }>(
1003
+ semantic_memory_store,
1004
+ {
1005
+ information: "Test memory for adapter wiring verification",
1006
+ tags: "test,memory",
1007
+ },
1008
+ ctx,
1009
+ );
1010
+
1011
+ // Should succeed (no "dbOverride required" error)
1012
+ expect(result.id).toBeTruthy();
1013
+ expect(result.id).toMatch(/^mem_/);
1014
+
1015
+ // If we got here, adapter wiring works!
1016
+ });
1017
+
1018
+ /**
1019
+ * TEST: semantic_memory_find works without explicit dbOverride
1020
+ *
1021
+ * Tests that finding memories goes through DB adapter correctly
1022
+ */
1023
+ it("semantic_memory_find works without explicit dbOverride", async () => {
1024
+ const ctx = createTestContext();
1025
+
1026
+ // Store a memory first
1027
+ const stored = await executeTool<{ id: string }>(
1028
+ semantic_memory_store,
1029
+ {
1030
+ information: "OAuth refresh tokens need 5min buffer before expiry",
1031
+ metadata: "auth,tokens,oauth",
1032
+ tags: "auth,integration-test",
1033
+ },
1034
+ ctx,
1035
+ );
1036
+
1037
+ expect(stored.id).toBeTruthy();
1038
+
1039
+ // Find the memory (this calls store functions)
1040
+ const result = await executeToolRaw(
1041
+ semantic_memory_find,
1042
+ {
1043
+ query: "OAuth tokens buffer",
1044
+ limit: 5,
1045
+ },
1046
+ ctx,
1047
+ );
1048
+
1049
+ // Should succeed (no "dbOverride required" error)
1050
+ expect(result).toContain("OAuth");
1051
+ expect(result).toContain("buffer");
1052
+
1053
+ // If we got here, adapter wiring works!
1054
+ });
1055
+ });
1056
+
1057
+ // ============================================================================
1058
+ // SMOKE TEST - Full Workflow Integration
1059
+ // ============================================================================
1060
+
1061
+ describe("smoke test - all tools in sequence", () => {
1062
+ /**
1063
+ * CRITICAL INTEGRATION TEST
1064
+ *
1065
+ * This test runs a COMPLETE workflow using multiple tools in sequence.
1066
+ * If ANY tool has broken adapter wiring, this test fails.
1067
+ *
1068
+ * This catches interaction bugs that unit tests miss:
1069
+ * - Adapter lifecycle issues
1070
+ * - State corruption between tool calls
1071
+ * - Context loss across layers
1072
+ * - Resource cleanup problems
1073
+ *
1074
+ * The workflow simulates a real swarm agent task:
1075
+ * 1. Initialize agent
1076
+ * 2. Create work item
1077
+ * 3. Reserve files
1078
+ * 4. Report progress
1079
+ * 5. Store learning
1080
+ * 6. Query learnings
1081
+ * 7. Complete and communicate
1082
+ * 8. Close work item
1083
+ * 9. Release resources
1084
+ */
1085
+ it("runs full workflow without adapter errors", async () => {
1086
+ const ctx = createTestContext();
1087
+ const originalDir = getHiveWorkingDirectory();
1088
+ setHiveWorkingDirectory(TEST_DB_PATH);
1089
+
1090
+ try {
1091
+ // 1. Initialize swarm mail
1092
+ const init = await executeTool<{ agent_name: string; project_key: string }>(
1093
+ swarmmail_init,
1094
+ { project_path: TEST_DB_PATH, agent_name: "SmokeTestAgent" },
1095
+ ctx,
1096
+ );
1097
+ expect(init.agent_name).toBe("SmokeTestAgent");
1098
+
1099
+ // 2. Create a cell
1100
+ const cell = await executeTool<Bead>(
1101
+ hive_create,
1102
+ { title: "Smoke test workflow cell", description: "Full integration test" },
1103
+ ctx,
1104
+ );
1105
+ expect(cell.id).toBeTruthy();
1106
+
1107
+ // 3. Reserve files
1108
+ const reserve = await executeTool<{ granted: Array<{ id: number }> }>(
1109
+ swarmmail_reserve,
1110
+ {
1111
+ paths: ["src/smoke-test-1.ts", "src/smoke-test-2.ts"],
1112
+ reason: `${cell.id}: Smoke test workflow`,
1113
+ exclusive: true,
1114
+ },
1115
+ ctx,
1116
+ );
1117
+ expect(reserve.granted.length).toBe(2);
1118
+
1119
+ // 4. Report progress
1120
+ const progress = await executeToolRaw(
1121
+ swarm_progress,
1122
+ {
1123
+ project_key: TEST_DB_PATH,
1124
+ agent_name: "SmokeTestAgent",
1125
+ bead_id: cell.id,
1126
+ status: "in_progress",
1127
+ message: "Testing workflow integration",
1128
+ progress_percent: 50,
1129
+ },
1130
+ ctx,
1131
+ );
1132
+ expect(progress).toContain("Progress");
1133
+
1134
+ // 5. Store a learning
1135
+ const memory = await executeTool<{ id: string }>(
1136
+ semantic_memory_store,
1137
+ {
1138
+ information: "Smoke test verified full tool adapter wiring works end-to-end",
1139
+ tags: "test,verification",
1140
+ },
1141
+ ctx,
1142
+ );
1143
+ expect(memory.id).toBeTruthy();
1144
+
1145
+ // 6. Find the learning
1146
+ const findResult = await executeToolRaw(
1147
+ semantic_memory_find,
1148
+ { query: "smoke test adapter wiring", limit: 3 },
1149
+ ctx,
1150
+ );
1151
+ expect(findResult).toContain("Smoke test");
1152
+
1153
+ // 7. Send completion message
1154
+ const send = await executeTool<{ success: boolean; message_id: number }>(
1155
+ swarmmail_send,
1156
+ {
1157
+ to: ["coordinator"],
1158
+ subject: `Completed: ${cell.id}`,
1159
+ body: "Smoke test workflow completed successfully",
1160
+ thread_id: cell.id,
1161
+ },
1162
+ ctx,
1163
+ );
1164
+ expect(send.success).toBe(true);
1165
+
1166
+ // 8. Close the cell
1167
+ const close = await executeToolRaw(
1168
+ hive_close,
1169
+ { id: cell.id, reason: "Smoke test workflow completed" },
1170
+ ctx,
1171
+ );
1172
+ expect(close).toContain("Closed");
1173
+
1174
+ // 9. Release files
1175
+ const release = await executeTool<{ released: number }>(
1176
+ swarmmail_release,
1177
+ {},
1178
+ ctx,
1179
+ );
1180
+ expect(release.released).toBe(2);
1181
+
1182
+ // ✅ If we got here, ALL tools work correctly through the adapter!
1183
+ // No "dbOverride required" errors occurred.
1184
+ // This proves the full tool → store → DB path is intact.
1185
+
1186
+ clearSessionState(ctx.sessionID);
1187
+ } finally {
1188
+ setHiveWorkingDirectory(originalDir);
1189
+ }
1190
+ });
1191
+ });
1192
+
1193
+ /**
1194
+ * SUMMARY OF BUGS THESE TESTS PREVENT:
1195
+ *
1196
+ * 1. **DB Adapter Not Passed Through**
1197
+ * - Tools call store functions without dbOverride
1198
+ * - Store functions require explicit adapter
1199
+ * - Error: "dbOverride parameter is required"
1200
+ *
1201
+ * 2. **Store Function Signature Changes**
1202
+ * - Store functions change to require adapter
1203
+ * - Tools not updated to pass adapter
1204
+ * - Silent breakage until runtime
1205
+ *
1206
+ * 3. **Initialization Order Issues**
1207
+ * - Adapter not created before tools use it
1208
+ * - Tools assume adapter exists globally
1209
+ * - Error: "Cannot read property of undefined"
1210
+ *
1211
+ * 4. **Context Loss Across Layers**
1212
+ * - Tool → store → DB path breaks
1213
+ * - Each layer assumes next has context
1214
+ * - Integration gap not caught by unit tests
1215
+ *
1216
+ * **HOW THESE TESTS CATCH THEM:**
1217
+ * - Exercise FULL path: tool.execute() → store → DB
1218
+ * - No mocking of store functions
1219
+ * - Verify actual DB operations succeed
1220
+ * - Would have FAILED before the fix
1221
+ */