swarm-mail 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. package/README.md +201 -0
  2. package/package.json +28 -0
  3. package/src/adapter.ts +306 -0
  4. package/src/index.ts +57 -0
  5. package/src/pglite.ts +189 -0
  6. package/src/streams/agent-mail.test.ts +777 -0
  7. package/src/streams/agent-mail.ts +535 -0
  8. package/src/streams/debug.test.ts +500 -0
  9. package/src/streams/debug.ts +727 -0
  10. package/src/streams/effect/ask.integration.test.ts +314 -0
  11. package/src/streams/effect/ask.ts +202 -0
  12. package/src/streams/effect/cursor.integration.test.ts +418 -0
  13. package/src/streams/effect/cursor.ts +288 -0
  14. package/src/streams/effect/deferred.test.ts +357 -0
  15. package/src/streams/effect/deferred.ts +445 -0
  16. package/src/streams/effect/index.ts +17 -0
  17. package/src/streams/effect/layers.ts +73 -0
  18. package/src/streams/effect/lock.test.ts +385 -0
  19. package/src/streams/effect/lock.ts +399 -0
  20. package/src/streams/effect/mailbox.test.ts +260 -0
  21. package/src/streams/effect/mailbox.ts +318 -0
  22. package/src/streams/events.test.ts +924 -0
  23. package/src/streams/events.ts +329 -0
  24. package/src/streams/index.test.ts +229 -0
  25. package/src/streams/index.ts +578 -0
  26. package/src/streams/migrations.test.ts +359 -0
  27. package/src/streams/migrations.ts +362 -0
  28. package/src/streams/projections.test.ts +611 -0
  29. package/src/streams/projections.ts +564 -0
  30. package/src/streams/store.integration.test.ts +658 -0
  31. package/src/streams/store.ts +1129 -0
  32. package/src/streams/swarm-mail.ts +552 -0
  33. package/src/types/adapter.ts +392 -0
  34. package/src/types/database.ts +127 -0
  35. package/src/types/index.ts +26 -0
  36. package/tsconfig.json +22 -0
@@ -0,0 +1,329 @@
1
+ /**
2
+ * Event Types for Swarm Mail Event Sourcing
3
+ *
4
+ * All agent coordination operations are represented as immutable events.
5
+ * Current state is computed by replaying events (projections).
6
+ *
7
+ * Event sourcing benefits:
8
+ * - Full audit trail for debugging
9
+ * - Replay from any point
10
+ * - Events ARE the training data for learning
11
+ * - No lost messages - append-only, durable
12
+ */
13
+ import { z } from "zod";
14
+
15
+ // ============================================================================
16
+ // Base Event Schema
17
+ // ============================================================================
18
+
19
+ /**
20
+ * Base fields present on all events
21
+ */
22
+ export const BaseEventSchema = z.object({
23
+ /** Auto-generated event ID */
24
+ id: z.number().optional(),
25
+ /** Event type discriminator */
26
+ type: z.string(),
27
+ /** Project key (usually absolute path) */
28
+ project_key: z.string(),
29
+ /** Timestamp when event occurred */
30
+ timestamp: z.number(), // Unix ms
31
+ /** Sequence number for ordering */
32
+ sequence: z.number().optional(),
33
+ });
34
+
35
+ // ============================================================================
36
+ // Agent Events
37
+ // ============================================================================
38
+
39
+ export const AgentRegisteredEventSchema = BaseEventSchema.extend({
40
+ type: z.literal("agent_registered"),
41
+ agent_name: z.string(),
42
+ program: z.string().default("opencode"),
43
+ model: z.string().default("unknown"),
44
+ task_description: z.string().optional(),
45
+ });
46
+
47
+ export const AgentActiveEventSchema = BaseEventSchema.extend({
48
+ type: z.literal("agent_active"),
49
+ agent_name: z.string(),
50
+ });
51
+
52
+ // ============================================================================
53
+ // Message Events
54
+ // ============================================================================
55
+
56
+ export const MessageSentEventSchema = BaseEventSchema.extend({
57
+ type: z.literal("message_sent"),
58
+ /** Message ID (auto-generated) */
59
+ message_id: z.number().optional(),
60
+ from_agent: z.string(),
61
+ to_agents: z.array(z.string()),
62
+ subject: z.string(),
63
+ body: z.string(),
64
+ thread_id: z.string().optional(),
65
+ importance: z.enum(["low", "normal", "high", "urgent"]).default("normal"),
66
+ ack_required: z.boolean().default(false),
67
+ });
68
+
69
+ export const MessageReadEventSchema = BaseEventSchema.extend({
70
+ type: z.literal("message_read"),
71
+ message_id: z.number(),
72
+ agent_name: z.string(),
73
+ });
74
+
75
+ export const MessageAckedEventSchema = BaseEventSchema.extend({
76
+ type: z.literal("message_acked"),
77
+ message_id: z.number(),
78
+ agent_name: z.string(),
79
+ });
80
+
81
+ // ============================================================================
82
+ // File Reservation Events
83
+ // ============================================================================
84
+
85
+ export const FileReservedEventSchema = BaseEventSchema.extend({
86
+ type: z.literal("file_reserved"),
87
+ /** Reservation ID (auto-generated) */
88
+ reservation_id: z.number().optional(),
89
+ agent_name: z.string(),
90
+ paths: z.array(z.string()),
91
+ reason: z.string().optional(),
92
+ exclusive: z.boolean().default(true),
93
+ /** TTL in seconds */
94
+ ttl_seconds: z.number().default(3600),
95
+ /** Absolute expiry timestamp */
96
+ expires_at: z.number(),
97
+ });
98
+
99
+ export const FileReleasedEventSchema = BaseEventSchema.extend({
100
+ type: z.literal("file_released"),
101
+ agent_name: z.string(),
102
+ /** Specific paths to release, or empty to release all */
103
+ paths: z.array(z.string()).optional(),
104
+ /** Specific reservation IDs to release */
105
+ reservation_ids: z.array(z.number()).optional(),
106
+ });
107
+
108
+ // ============================================================================
109
+ // Task Events (for swarm integration)
110
+ // ============================================================================
111
+
112
+ export const TaskStartedEventSchema = BaseEventSchema.extend({
113
+ type: z.literal("task_started"),
114
+ agent_name: z.string(),
115
+ bead_id: z.string(),
116
+ epic_id: z.string().optional(),
117
+ });
118
+
119
+ export const TaskProgressEventSchema = BaseEventSchema.extend({
120
+ type: z.literal("task_progress"),
121
+ agent_name: z.string(),
122
+ bead_id: z.string(),
123
+ progress_percent: z.number().min(0).max(100).optional(),
124
+ message: z.string().optional(),
125
+ files_touched: z.array(z.string()).optional(),
126
+ });
127
+
128
+ export const TaskCompletedEventSchema = BaseEventSchema.extend({
129
+ type: z.literal("task_completed"),
130
+ agent_name: z.string(),
131
+ bead_id: z.string(),
132
+ summary: z.string(),
133
+ files_touched: z.array(z.string()).optional(),
134
+ success: z.boolean().default(true),
135
+ });
136
+
137
+ export const TaskBlockedEventSchema = BaseEventSchema.extend({
138
+ type: z.literal("task_blocked"),
139
+ agent_name: z.string(),
140
+ bead_id: z.string(),
141
+ reason: z.string(),
142
+ });
143
+
144
+ // ============================================================================
145
+ // Eval Capture Events (for learning system)
146
+ // ============================================================================
147
+
148
+ export const DecompositionGeneratedEventSchema = BaseEventSchema.extend({
149
+ type: z.literal("decomposition_generated"),
150
+ epic_id: z.string(),
151
+ task: z.string(),
152
+ context: z.string().optional(),
153
+ strategy: z.enum(["file-based", "feature-based", "risk-based"]),
154
+ epic_title: z.string(),
155
+ subtasks: z.array(
156
+ z.object({
157
+ title: z.string(),
158
+ files: z.array(z.string()),
159
+ priority: z.number().min(0).max(3).optional(),
160
+ }),
161
+ ),
162
+ recovery_context: z
163
+ .object({
164
+ shared_context: z.string().optional(),
165
+ skills_to_load: z.array(z.string()).optional(),
166
+ coordinator_notes: z.string().optional(),
167
+ })
168
+ .optional(),
169
+ });
170
+
171
+ export const SubtaskOutcomeEventSchema = BaseEventSchema.extend({
172
+ type: z.literal("subtask_outcome"),
173
+ epic_id: z.string(),
174
+ bead_id: z.string(),
175
+ planned_files: z.array(z.string()),
176
+ actual_files: z.array(z.string()),
177
+ duration_ms: z.number().min(0),
178
+ error_count: z.number().min(0).default(0),
179
+ retry_count: z.number().min(0).default(0),
180
+ success: z.boolean(),
181
+ });
182
+
183
+ export const HumanFeedbackEventSchema = BaseEventSchema.extend({
184
+ type: z.literal("human_feedback"),
185
+ epic_id: z.string(),
186
+ accepted: z.boolean(),
187
+ modified: z.boolean().default(false),
188
+ notes: z.string().optional(),
189
+ });
190
+
191
+ // ============================================================================
192
+ // Swarm Checkpoint Events (for recovery and coordination)
193
+ // ============================================================================
194
+
195
+ export const SwarmCheckpointedEventSchema = BaseEventSchema.extend({
196
+ type: z.literal("swarm_checkpointed"),
197
+ epic_id: z.string(),
198
+ bead_id: z.string(),
199
+ strategy: z.enum(["file-based", "feature-based", "risk-based"]),
200
+ files: z.array(z.string()),
201
+ dependencies: z.array(z.string()),
202
+ directives: z.object({
203
+ shared_context: z.string().optional(),
204
+ skills_to_load: z.array(z.string()).optional(),
205
+ coordinator_notes: z.string().optional(),
206
+ }),
207
+ recovery: z.object({
208
+ last_checkpoint: z.number(),
209
+ files_modified: z.array(z.string()),
210
+ progress_percent: z.number().min(0).max(100),
211
+ last_message: z.string().optional(),
212
+ error_context: z.string().optional(),
213
+ }),
214
+ });
215
+
216
+ export const SwarmRecoveredEventSchema = BaseEventSchema.extend({
217
+ type: z.literal("swarm_recovered"),
218
+ epic_id: z.string(),
219
+ bead_id: z.string(),
220
+ recovered_from_checkpoint: z.number(), // timestamp
221
+ });
222
+
223
+ // ============================================================================
224
+ // Union Type
225
+ // ============================================================================
226
+
227
+ export const AgentEventSchema = z.discriminatedUnion("type", [
228
+ AgentRegisteredEventSchema,
229
+ AgentActiveEventSchema,
230
+ MessageSentEventSchema,
231
+ MessageReadEventSchema,
232
+ MessageAckedEventSchema,
233
+ FileReservedEventSchema,
234
+ FileReleasedEventSchema,
235
+ TaskStartedEventSchema,
236
+ TaskProgressEventSchema,
237
+ TaskCompletedEventSchema,
238
+ TaskBlockedEventSchema,
239
+ DecompositionGeneratedEventSchema,
240
+ SubtaskOutcomeEventSchema,
241
+ HumanFeedbackEventSchema,
242
+ SwarmCheckpointedEventSchema,
243
+ SwarmRecoveredEventSchema,
244
+ ]);
245
+
246
+ export type AgentEvent = z.infer<typeof AgentEventSchema>;
247
+
248
+ // Individual event types for convenience
249
+ export type AgentRegisteredEvent = z.infer<typeof AgentRegisteredEventSchema>;
250
+ export type AgentActiveEvent = z.infer<typeof AgentActiveEventSchema>;
251
+ export type MessageSentEvent = z.infer<typeof MessageSentEventSchema>;
252
+ export type MessageReadEvent = z.infer<typeof MessageReadEventSchema>;
253
+ export type MessageAckedEvent = z.infer<typeof MessageAckedEventSchema>;
254
+ export type FileReservedEvent = z.infer<typeof FileReservedEventSchema>;
255
+ export type FileReleasedEvent = z.infer<typeof FileReleasedEventSchema>;
256
+ export type TaskStartedEvent = z.infer<typeof TaskStartedEventSchema>;
257
+ export type TaskProgressEvent = z.infer<typeof TaskProgressEventSchema>;
258
+ export type TaskCompletedEvent = z.infer<typeof TaskCompletedEventSchema>;
259
+ export type TaskBlockedEvent = z.infer<typeof TaskBlockedEventSchema>;
260
+ export type DecompositionGeneratedEvent = z.infer<
261
+ typeof DecompositionGeneratedEventSchema
262
+ >;
263
+ export type SubtaskOutcomeEvent = z.infer<typeof SubtaskOutcomeEventSchema>;
264
+ export type HumanFeedbackEvent = z.infer<typeof HumanFeedbackEventSchema>;
265
+ export type SwarmCheckpointedEvent = z.infer<
266
+ typeof SwarmCheckpointedEventSchema
267
+ >;
268
+ export type SwarmRecoveredEvent = z.infer<typeof SwarmRecoveredEventSchema>;
269
+
270
+ // ============================================================================
271
+ // Session State Types
272
+ // ============================================================================
273
+
274
+ /**
275
+ * Shared session state for Agent Mail and Swarm Mail
276
+ *
277
+ * Common fields for tracking agent coordination session across both
278
+ * the MCP-based implementation (agent-mail) and the embedded event-sourced
279
+ * implementation (swarm-mail).
280
+ */
281
+ export interface MailSessionState {
282
+ /** Project key (usually absolute path) */
283
+ projectKey: string;
284
+ /** Agent name for this session */
285
+ agentName: string;
286
+ /** Active reservation IDs */
287
+ reservations: number[];
288
+ /** Session start timestamp (ISO-8601) */
289
+ startedAt: string;
290
+ }
291
+
292
+ // ============================================================================
293
+ // Event Helpers
294
+ // ============================================================================
295
+
296
+ /**
297
+ * Create an event with timestamp and validate
298
+ */
299
+ export function createEvent<T extends AgentEvent["type"]>(
300
+ type: T,
301
+ data: Omit<
302
+ Extract<AgentEvent, { type: T }>,
303
+ "type" | "timestamp" | "id" | "sequence"
304
+ >,
305
+ ): Extract<AgentEvent, { type: T }> {
306
+ const event = {
307
+ type,
308
+ timestamp: Date.now(),
309
+ ...data,
310
+ } as Extract<AgentEvent, { type: T }>;
311
+
312
+ // Validate
313
+ const result = AgentEventSchema.safeParse(event);
314
+ if (!result.success) {
315
+ throw new Error(`Invalid event: ${result.error.message}`);
316
+ }
317
+
318
+ return result.data as Extract<AgentEvent, { type: T }>;
319
+ }
320
+
321
+ /**
322
+ * Type guard for specific event types
323
+ */
324
+ export function isEventType<T extends AgentEvent["type"]>(
325
+ event: AgentEvent,
326
+ type: T,
327
+ ): event is Extract<AgentEvent, { type: T }> {
328
+ return event.type === type;
329
+ }
@@ -0,0 +1,229 @@
1
+ /**
2
+ * Tests for database singleton management
3
+ *
4
+ * Each test uses isolated database paths to prevent cross-test pollution.
5
+ */
6
+ import { randomUUID } from "node:crypto";
7
+ import { afterEach, beforeEach, describe, expect, it } from "vitest";
8
+
9
+ import {
10
+ closeAllDatabases,
11
+ closeDatabase,
12
+ getDatabase,
13
+ getDatabasePath,
14
+ getDatabaseStats,
15
+ isDatabaseHealthy,
16
+ resetDatabase,
17
+ } from "./index";
18
+
19
+ // ============================================================================
20
+ // Test Isolation Helpers
21
+ // ============================================================================
22
+
23
+ /** Generate unique test database path */
24
+ function testDbPath(prefix = "test"): string {
25
+ return `/tmp/streams-${prefix}-${randomUUID()}`;
26
+ }
27
+
28
+ /** Track paths created during test for cleanup */
29
+ let testPaths: string[] = [];
30
+
31
+ function trackPath(path: string): string {
32
+ testPaths.push(path);
33
+ return path;
34
+ }
35
+
36
+ // ============================================================================
37
+ // Global Cleanup
38
+ // ============================================================================
39
+
40
+ beforeEach(async () => {
41
+ testPaths = [];
42
+ // Nuclear cleanup - close everything before each test
43
+ await closeAllDatabases();
44
+ });
45
+
46
+ afterEach(async () => {
47
+ // Clean up all test databases
48
+ for (const path of testPaths) {
49
+ try {
50
+ // Wipe all data before closing
51
+ const db = await getDatabase(path);
52
+ await db.exec(`
53
+ DELETE FROM message_recipients;
54
+ DELETE FROM messages;
55
+ DELETE FROM reservations;
56
+ DELETE FROM agents;
57
+ DELETE FROM events;
58
+ DELETE FROM locks;
59
+ DELETE FROM cursors;
60
+ DELETE FROM deferred;
61
+ `);
62
+ } catch {
63
+ // Ignore errors during cleanup
64
+ }
65
+ await closeDatabase(path);
66
+ }
67
+ testPaths = [];
68
+ // Nuclear cleanup after each test too
69
+ await closeAllDatabases();
70
+ });
71
+
72
+ // ============================================================================
73
+ // Tests
74
+ // ============================================================================
75
+
76
+ describe("getDatabasePath", () => {
77
+ it("returns project-local path when .opencode exists", () => {
78
+ const path = getDatabasePath(process.cwd());
79
+ expect(path).toMatch(/\.opencode\/streams$/);
80
+ });
81
+
82
+ it("falls back to global path when projectPath is undefined", () => {
83
+ const path = getDatabasePath();
84
+ expect(path).toMatch(/\.opencode\/streams$/);
85
+ expect(path).toContain(require("node:os").homedir());
86
+ });
87
+ });
88
+
89
+ describe("getDatabase singleton", () => {
90
+ it("returns same instance for same path", async () => {
91
+ const path = trackPath(testDbPath("same-instance"));
92
+ const db1 = await getDatabase(path);
93
+ const db2 = await getDatabase(path);
94
+ expect(db1).toBe(db2);
95
+ });
96
+
97
+ it("caches instances by path", async () => {
98
+ // This test verifies the caching behavior - same path returns same instance
99
+ // Different paths may or may not return different instances depending on
100
+ // whether file-based storage works or falls back to in-memory
101
+ const path1 = trackPath(testDbPath("cache-1"));
102
+ const path2 = trackPath(testDbPath("cache-2"));
103
+
104
+ const db1a = await getDatabase(path1);
105
+ const db1b = await getDatabase(path1);
106
+ const db2 = await getDatabase(path2);
107
+
108
+ // Same path MUST return same instance (this is the cache contract)
109
+ expect(db1a).toBe(db1b);
110
+
111
+ // Both should be functional
112
+ const r1 = await db1a.query("SELECT 1 as ok");
113
+ const r2 = await db2.query("SELECT 1 as ok");
114
+ expect(r1.rows[0]).toEqual({ ok: 1 });
115
+ expect(r2.rows[0]).toEqual({ ok: 1 });
116
+ });
117
+
118
+ it("initializes schema on first access", async () => {
119
+ const path = trackPath(testDbPath("schema-init"));
120
+ const db = await getDatabase(path);
121
+ const result = await db.query("SELECT COUNT(*) FROM events");
122
+ expect(result.rows).toBeDefined();
123
+ });
124
+
125
+ it("does not reinitialize schema on subsequent access", async () => {
126
+ const path = trackPath(testDbPath("no-reinit"));
127
+ const db1 = await getDatabase(path);
128
+
129
+ await db1.exec(
130
+ "INSERT INTO events (type, project_key, timestamp, data) VALUES ('test', 'test', 123, '{}')",
131
+ );
132
+
133
+ const db2 = await getDatabase(path);
134
+ const result = await db2.query<{ count: string }>(
135
+ "SELECT COUNT(*) as count FROM events",
136
+ );
137
+ expect(parseInt(result.rows[0].count)).toBe(1);
138
+ });
139
+
140
+ it("concurrent calls return the same instance (no race condition)", async () => {
141
+ const path = trackPath(testDbPath("race"));
142
+ const promises = Array.from({ length: 10 }, () => getDatabase(path));
143
+ const results = await Promise.all(promises);
144
+
145
+ const firstInstance = results[0];
146
+ const allSame = results.every((db) => db === firstInstance);
147
+ expect(allSame).toBe(true);
148
+ });
149
+ });
150
+
151
+ describe("closeDatabase", () => {
152
+ it("removes instance from cache", async () => {
153
+ const path = trackPath(testDbPath("close"));
154
+ const db1 = await getDatabase(path);
155
+ await closeDatabase(path);
156
+ const db2 = await getDatabase(path);
157
+ expect(db1).not.toBe(db2);
158
+ });
159
+
160
+ it("handles closing non-existent database gracefully", async () => {
161
+ const path = testDbPath("non-existent");
162
+ // Should not throw
163
+ await closeDatabase(path);
164
+ expect(true).toBe(true);
165
+ });
166
+ });
167
+
168
+ describe("closeAllDatabases", () => {
169
+ it("closes all cached instances", async () => {
170
+ const path1 = trackPath(testDbPath("all-1"));
171
+ const path2 = trackPath(testDbPath("all-2"));
172
+ const db1 = await getDatabase(path1);
173
+ const db2 = await getDatabase(path2);
174
+
175
+ await closeAllDatabases();
176
+
177
+ const db3 = await getDatabase(path1);
178
+ const db4 = await getDatabase(path2);
179
+
180
+ expect(db3).not.toBe(db1);
181
+ expect(db4).not.toBe(db2);
182
+ });
183
+ });
184
+
185
+ describe("isDatabaseHealthy", () => {
186
+ it("returns true for healthy database", async () => {
187
+ const path = trackPath(testDbPath("healthy"));
188
+ await getDatabase(path);
189
+ const healthy = await isDatabaseHealthy(path);
190
+ expect(healthy).toBe(true);
191
+ });
192
+ });
193
+
194
+ describe("resetDatabase", () => {
195
+ it("clears all data but keeps schema", async () => {
196
+ const path = trackPath(testDbPath("reset"));
197
+ const db = await getDatabase(path);
198
+ await db.exec(
199
+ "INSERT INTO events (type, project_key, timestamp, data) VALUES ('test', 'test', 123, '{}')",
200
+ );
201
+
202
+ await resetDatabase(path);
203
+
204
+ const result = await db.query<{ count: string }>(
205
+ "SELECT COUNT(*) as count FROM events",
206
+ );
207
+ expect(parseInt(result.rows[0].count)).toBe(0);
208
+
209
+ // Schema should still exist
210
+ await expect(
211
+ db.query("SELECT 1 FROM events LIMIT 0"),
212
+ ).resolves.toBeDefined();
213
+ });
214
+ });
215
+
216
+ describe("getDatabaseStats", () => {
217
+ it("returns counts for all tables", async () => {
218
+ const path = trackPath(testDbPath("stats"));
219
+ await resetDatabase(path);
220
+ const stats = await getDatabaseStats(path);
221
+
222
+ expect(stats).toEqual({
223
+ events: 0,
224
+ agents: 0,
225
+ messages: 0,
226
+ reservations: 0,
227
+ });
228
+ });
229
+ });