swarm-mail 1.2.2 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. package/README.md +174 -144
  2. package/bin/swarm-db.ts +168 -0
  3. package/dist/adapter.d.ts +2 -2
  4. package/dist/analytics/formatters.d.ts +50 -0
  5. package/dist/analytics/formatters.d.ts.map +1 -0
  6. package/dist/analytics/index.d.ts +35 -0
  7. package/dist/analytics/index.d.ts.map +1 -0
  8. package/dist/analytics/queries/agent-activity.d.ts +21 -0
  9. package/dist/analytics/queries/agent-activity.d.ts.map +1 -0
  10. package/dist/analytics/queries/checkpoint-frequency.d.ts +26 -0
  11. package/dist/analytics/queries/checkpoint-frequency.d.ts.map +1 -0
  12. package/dist/analytics/queries/failed-decompositions.d.ts +22 -0
  13. package/dist/analytics/queries/failed-decompositions.d.ts.map +1 -0
  14. package/dist/analytics/queries/human-feedback.d.ts +26 -0
  15. package/dist/analytics/queries/human-feedback.d.ts.map +1 -0
  16. package/dist/analytics/queries/index.d.ts +21 -0
  17. package/dist/analytics/queries/index.d.ts.map +1 -0
  18. package/dist/analytics/queries/lock-contention.d.ts +20 -0
  19. package/dist/analytics/queries/lock-contention.d.ts.map +1 -0
  20. package/dist/analytics/queries/message-latency.d.ts +24 -0
  21. package/dist/analytics/queries/message-latency.d.ts.map +1 -0
  22. package/dist/analytics/queries/recovery-success.d.ts +26 -0
  23. package/dist/analytics/queries/recovery-success.d.ts.map +1 -0
  24. package/dist/analytics/queries/scope-violations.d.ts +27 -0
  25. package/dist/analytics/queries/scope-violations.d.ts.map +1 -0
  26. package/dist/analytics/queries/strategy-success-rates.d.ts +20 -0
  27. package/dist/analytics/queries/strategy-success-rates.d.ts.map +1 -0
  28. package/dist/analytics/queries/task-duration.d.ts +31 -0
  29. package/dist/analytics/queries/task-duration.d.ts.map +1 -0
  30. package/dist/analytics/query-builder.d.ts +110 -0
  31. package/dist/analytics/query-builder.d.ts.map +1 -0
  32. package/dist/analytics/types.d.ts +36 -0
  33. package/dist/analytics/types.d.ts.map +1 -0
  34. package/dist/cli/db.d.ts +73 -0
  35. package/dist/cli/db.d.ts.map +1 -0
  36. package/dist/db/client.d.ts +68 -0
  37. package/dist/db/client.d.ts.map +1 -0
  38. package/dist/db/drizzle.d.ts +24 -0
  39. package/dist/db/drizzle.d.ts.map +1 -0
  40. package/dist/db/index.d.ts +25 -0
  41. package/dist/db/index.d.ts.map +1 -0
  42. package/dist/db/migrate.d.ts +81 -0
  43. package/dist/db/migrate.d.ts.map +1 -0
  44. package/dist/db/schema/hive.d.ts +1596 -0
  45. package/dist/db/schema/hive.d.ts.map +1 -0
  46. package/dist/db/schema/index.d.ts +12 -0
  47. package/dist/db/schema/index.d.ts.map +1 -0
  48. package/dist/db/schema/memory.d.ts +214 -0
  49. package/dist/db/schema/memory.d.ts.map +1 -0
  50. package/dist/db/schema/streams.d.ts +1601 -0
  51. package/dist/db/schema/streams.d.ts.map +1 -0
  52. package/dist/debug-demo.d.ts +12 -0
  53. package/dist/debug-demo.d.ts.map +1 -0
  54. package/dist/debug.d.ts +19 -0
  55. package/dist/debug.d.ts.map +1 -0
  56. package/dist/errors/base-error.d.ts +49 -0
  57. package/dist/errors/base-error.d.ts.map +1 -0
  58. package/dist/errors/checkpoint-error.d.ts +10 -0
  59. package/dist/errors/checkpoint-error.d.ts.map +1 -0
  60. package/dist/errors/decomposition-error.d.ts +10 -0
  61. package/dist/errors/decomposition-error.d.ts.map +1 -0
  62. package/dist/errors/index.d.ts +37 -0
  63. package/dist/errors/index.d.ts.map +1 -0
  64. package/dist/errors/reservation-error.d.ts +10 -0
  65. package/dist/errors/reservation-error.d.ts.map +1 -0
  66. package/dist/errors/validation-error.d.ts +10 -0
  67. package/dist/errors/validation-error.d.ts.map +1 -0
  68. package/dist/hive/adapter.d.ts +8 -8
  69. package/dist/hive/adapter.d.ts.map +1 -1
  70. package/dist/hive/dependencies-drizzle.d.ts +31 -0
  71. package/dist/hive/dependencies-drizzle.d.ts.map +1 -0
  72. package/dist/hive/dependencies.d.ts.map +1 -1
  73. package/dist/hive/flush-manager.d.ts +2 -1
  74. package/dist/hive/flush-manager.d.ts.map +1 -1
  75. package/dist/hive/jsonl.d.ts.map +1 -1
  76. package/dist/hive/migrations.d.ts +23 -3
  77. package/dist/hive/migrations.d.ts.map +1 -1
  78. package/dist/hive/projections-drizzle.d.ts +43 -0
  79. package/dist/hive/projections-drizzle.d.ts.map +1 -0
  80. package/dist/hive/projections.d.ts +8 -0
  81. package/dist/hive/projections.d.ts.map +1 -1
  82. package/dist/hive/queries-drizzle.d.ts +52 -0
  83. package/dist/hive/queries-drizzle.d.ts.map +1 -0
  84. package/dist/hive/queries.d.ts +38 -0
  85. package/dist/hive/queries.d.ts.map +1 -1
  86. package/dist/hive/store.d.ts +36 -14
  87. package/dist/hive/store.d.ts.map +1 -1
  88. package/dist/index.d.ts +37 -18
  89. package/dist/index.d.ts.map +1 -1
  90. package/dist/index.js +84673 -38352
  91. package/dist/libsql.convenience.d.ts +155 -0
  92. package/dist/libsql.convenience.d.ts.map +1 -0
  93. package/dist/libsql.d.ts +93 -0
  94. package/dist/libsql.d.ts.map +1 -0
  95. package/dist/memory/adapter.d.ts +6 -7
  96. package/dist/memory/adapter.d.ts.map +1 -1
  97. package/dist/memory/libsql-schema.d.ts +83 -0
  98. package/dist/memory/libsql-schema.d.ts.map +1 -0
  99. package/dist/memory/migrate-legacy.d.ts +3 -0
  100. package/dist/memory/migrate-legacy.d.ts.map +1 -1
  101. package/dist/memory/migrations.d.ts +11 -0
  102. package/dist/memory/migrations.d.ts.map +1 -1
  103. package/dist/memory/store.d.ts +39 -31
  104. package/dist/memory/store.d.ts.map +1 -1
  105. package/dist/memory/test-utils.d.ts +59 -0
  106. package/dist/memory/test-utils.d.ts.map +1 -0
  107. package/dist/migrate-pglite-to-libsql.d.ts +73 -0
  108. package/dist/migrate-pglite-to-libsql.d.ts.map +1 -0
  109. package/dist/pglite.d.ts +8 -170
  110. package/dist/pglite.d.ts.map +1 -1
  111. package/dist/streams/agent-mail.d.ts +13 -0
  112. package/dist/streams/agent-mail.d.ts.map +1 -1
  113. package/dist/streams/auto-migrate.d.ts +182 -0
  114. package/dist/streams/auto-migrate.d.ts.map +1 -0
  115. package/dist/streams/effect/ask.d.ts +5 -4
  116. package/dist/streams/effect/ask.d.ts.map +1 -1
  117. package/dist/streams/effect/cursor.d.ts +3 -2
  118. package/dist/streams/effect/cursor.d.ts.map +1 -1
  119. package/dist/streams/effect/deferred.d.ts +12 -9
  120. package/dist/streams/effect/deferred.d.ts.map +1 -1
  121. package/dist/streams/effect/lock.d.ts +13 -11
  122. package/dist/streams/effect/lock.d.ts.map +1 -1
  123. package/dist/streams/effect/mailbox.d.ts +3 -2
  124. package/dist/streams/effect/mailbox.d.ts.map +1 -1
  125. package/dist/streams/events.d.ts +4 -0
  126. package/dist/streams/events.d.ts.map +1 -1
  127. package/dist/streams/index.d.ts +20 -74
  128. package/dist/streams/index.d.ts.map +1 -1
  129. package/dist/streams/libsql-schema.d.ts +77 -0
  130. package/dist/streams/libsql-schema.d.ts.map +1 -0
  131. package/dist/streams/migrations.d.ts +8 -8
  132. package/dist/streams/migrations.d.ts.map +1 -1
  133. package/dist/streams/projections-drizzle.d.ts +175 -0
  134. package/dist/streams/projections-drizzle.d.ts.map +1 -0
  135. package/dist/streams/projections.d.ts +9 -0
  136. package/dist/streams/projections.d.ts.map +1 -1
  137. package/dist/streams/store-drizzle.d.ts +96 -0
  138. package/dist/streams/store-drizzle.d.ts.map +1 -0
  139. package/dist/streams/store.d.ts +23 -0
  140. package/dist/streams/store.d.ts.map +1 -1
  141. package/dist/streams/swarm-mail.d.ts +9 -0
  142. package/dist/streams/swarm-mail.d.ts.map +1 -1
  143. package/dist/test-libsql.d.ts +76 -0
  144. package/dist/test-libsql.d.ts.map +1 -0
  145. package/dist/types/adapter.d.ts +3 -3
  146. package/dist/types/database.d.ts +12 -12
  147. package/dist/types/hive-adapter.d.ts +5 -5
  148. package/package.json +10 -6
  149. package/dist/beads/adapter.d.ts +0 -38
  150. package/dist/beads/adapter.d.ts.map +0 -1
  151. package/dist/beads/blocked-cache.d.ts +0 -21
  152. package/dist/beads/blocked-cache.d.ts.map +0 -1
  153. package/dist/beads/comments.d.ts +0 -21
  154. package/dist/beads/comments.d.ts.map +0 -1
  155. package/dist/beads/dependencies.d.ts +0 -58
  156. package/dist/beads/dependencies.d.ts.map +0 -1
  157. package/dist/beads/events.d.ts +0 -163
  158. package/dist/beads/events.d.ts.map +0 -1
  159. package/dist/beads/flush-manager.d.ts +0 -71
  160. package/dist/beads/flush-manager.d.ts.map +0 -1
  161. package/dist/beads/index.d.ts +0 -25
  162. package/dist/beads/index.d.ts.map +0 -1
  163. package/dist/beads/jsonl.d.ts +0 -103
  164. package/dist/beads/jsonl.d.ts.map +0 -1
  165. package/dist/beads/labels.d.ts +0 -21
  166. package/dist/beads/labels.d.ts.map +0 -1
  167. package/dist/beads/merge.d.ts +0 -99
  168. package/dist/beads/merge.d.ts.map +0 -1
  169. package/dist/beads/migrations.d.ts +0 -41
  170. package/dist/beads/migrations.d.ts.map +0 -1
  171. package/dist/beads/operations.d.ts +0 -56
  172. package/dist/beads/operations.d.ts.map +0 -1
  173. package/dist/beads/projections.d.ts +0 -103
  174. package/dist/beads/projections.d.ts.map +0 -1
  175. package/dist/beads/queries.d.ts +0 -77
  176. package/dist/beads/queries.d.ts.map +0 -1
  177. package/dist/beads/store.d.ts +0 -98
  178. package/dist/beads/store.d.ts.map +0 -1
  179. package/dist/beads/validation.d.ts +0 -75
  180. package/dist/beads/validation.d.ts.map +0 -1
  181. package/dist/daemon.d.ts +0 -161
  182. package/dist/daemon.d.ts.map +0 -1
  183. package/dist/socket-adapter.d.ts +0 -78
  184. package/dist/socket-adapter.d.ts.map +0 -1
  185. package/dist/streams/debug.d.ts +0 -173
  186. package/dist/streams/debug.d.ts.map +0 -1
  187. package/dist/test-server.d.ts +0 -64
  188. package/dist/test-server.d.ts.map +0 -1
  189. package/dist/types/beads-adapter.d.ts +0 -397
  190. package/dist/types/beads-adapter.d.ts.map +0 -1
package/README.md CHANGED
@@ -25,25 +25,35 @@
25
25
  🐝 🐝 ██║ ╚═╝ ██║██║ ██║██║███████╗
26
26
  ╚═╝ ╚═╝╚═╝ ╚═╝╚═╝╚══════╝ 🐝
27
27
  🐝
28
- ⚡ Actor-Model Primitives for Agent Coordination
28
+ Event Sourcing + Actor Model Primitives ⚡
29
29
  ```
30
30
 
31
- Event sourcing primitives for multi-agent coordination. Local-first, no external servers.
31
+ Event sourcing and actor-model primitives for multi-agent coordination. Built on **libSQL** (embedded SQLite) with **Drizzle ORM**. Local-first, no external servers.
32
32
 
33
33
  **[🌐 swarmtools.ai](https://swarmtools.ai)** | **[📖 Full Documentation](https://swarmtools.ai/docs)**
34
34
 
35
+ ## What is swarm-mail?
36
+
37
+ A TypeScript library providing:
38
+
39
+ 1. **Event Store** - Append-only log with automatic projection updates (agents, messages, file reservations)
40
+ 2. **Actor Primitives** - DurableMailbox, DurableLock, DurableCursor, DurableDeferred (Effect-TS based)
41
+ 3. **Hive** - Git-synced work item tracker (cells, epics, dependencies)
42
+ 4. **Semantic Memory** - Vector embeddings for persistent agent learnings (Ollama + pgvector)
43
+
35
44
  ```
36
45
  ┌─────────────────────────────────────────────────────────────┐
37
46
  │ SWARM MAIL STACK │
38
47
  ├─────────────────────────────────────────────────────────────┤
39
- TIER 3: COORDINATION
48
+ │ COORDINATION
49
+ │ ├── HiveAdapter - Work item tracking (cells, epics) │
40
50
  │ └── ask<Req, Res>() - Request/Response (RPC-style) │
41
51
  │ │
42
- TIER 2: PATTERNS
52
+ │ PATTERNS
43
53
  │ ├── DurableMailbox - Actor inbox with typed envelopes │
44
54
  │ └── DurableLock - CAS-based mutual exclusion │
45
55
  │ │
46
- TIER 1: PRIMITIVES
56
+ │ PRIMITIVES
47
57
  │ ├── DurableCursor - Checkpointed stream reader │
48
58
  │ └── DurableDeferred - Distributed promise │
49
59
  │ │
@@ -51,7 +61,7 @@ Event sourcing primitives for multi-agent coordination. Local-first, no external
51
61
  │ └── Semantic Memory - Vector embeddings (pgvector/Ollama) │
52
62
  │ │
53
63
  │ STORAGE │
54
- │ └── PGLite (Embedded Postgres) + Migrations
64
+ │ └── libSQL (Embedded SQLite via Drizzle ORM)
55
65
  └─────────────────────────────────────────────────────────────┘
56
66
  ```
57
67
 
@@ -61,17 +71,13 @@ Event sourcing primitives for multi-agent coordination. Local-first, no external
61
71
  bun add swarm-mail
62
72
  ```
63
73
 
64
- ## Usage
65
-
66
- ### Event Store
67
-
68
- Append-only event log with automatic projection updates:
74
+ ## Quick Start
69
75
 
70
76
  ```typescript
71
- import { getSwarmMail } from "swarm-mail";
77
+ import { getSwarmMailLibSQL } from "swarm-mail";
72
78
 
73
- // Create swarm mail instance (automatically creates PGlite adapter)
74
- const swarmMail = await getSwarmMail("/my/project");
79
+ // Create swarm mail instance (libSQL + Drizzle)
80
+ const swarmMail = await getSwarmMailLibSQL("/my/project");
75
81
 
76
82
  // Append events
77
83
  await swarmMail.appendEvent({
@@ -84,145 +90,141 @@ await swarmMail.appendEvent({
84
90
  // Query projections
85
91
  const agents = await swarmMail.getAgents();
86
92
  const messages = await swarmMail.getInbox("WorkerA", { limit: 5 });
93
+
94
+ // Clean shutdown
95
+ await swarmMail.close();
87
96
  ```
88
97
 
89
- ### Durable Primitives (Effect-TS)
98
+ ## Core APIs
90
99
 
91
- Built on Effect-TS for type-safe, composable coordination:
100
+ ### Event Store
101
+
102
+ Append-only event log with automatic projection updates:
92
103
 
93
104
  ```typescript
94
- import { DurableMailbox, DurableLock, ask } from 'swarm-mail'
95
- import { Effect } from 'effect'
96
-
97
- // Actor mailbox
98
- const mailbox = DurableMailbox.create<MyMessage>('worker-a')
99
- await Effect.runPromise(
100
- mailbox.send({ type: 'task', payload: 'do something' })
101
- )
102
-
103
- // File locking
104
- const lock = DurableLock.create('src/auth.ts')
105
- await Effect.runPromise(
106
- lock.acquire({ ttl: 60000 }).pipe(
107
- Effect.flatMap(() => /* do work */),
108
- Effect.ensuring(lock.release())
109
- )
110
- )
111
-
112
- // Request/response
113
- const response = await Effect.runPromise(
114
- ask<Request, Response>('other-agent', { type: 'get-types' })
115
- )
116
- ```
105
+ import { getSwarmMailLibSQL } from "swarm-mail";
117
106
 
118
- ### Database Adapter
107
+ const swarmMail = await getSwarmMailLibSQL("/my/project");
119
108
 
120
- Dependency injection for testing and flexibility:
109
+ // Append events
110
+ await swarmMail.appendEvent({
111
+ type: "message_sent",
112
+ from: "WorkerA",
113
+ to: ["WorkerB"],
114
+ subject: "Task complete",
115
+ body: "Auth flow implemented",
116
+ timestamp: Date.now(),
117
+ });
121
118
 
122
- ```typescript
123
- import { DatabaseAdapter, createSwarmMailAdapter } from 'swarm-mail'
124
-
125
- // Implement your own adapter
126
- const customAdapter: DatabaseAdapter = {
127
- query: async (sql, params) => /* ... */,
128
- exec: async (sql) => /* ... */,
129
- transaction: async (fn) => /* ... */,
130
- close: async () => /* ... */
131
- }
119
+ // Query inbox
120
+ const messages = await swarmMail.getInbox("WorkerB", {
121
+ limit: 5,
122
+ unreadOnly: true
123
+ });
132
124
 
133
- // Use custom adapter
134
- const swarmMail = createSwarmMailAdapter(customAdapter, '/my/project')
125
+ // Get thread
126
+ const thread = await swarmMail.getThread("epic-123");
135
127
 
136
- // Or use the convenience layer (built-in PGLite)
137
- import { getSwarmMail, createInMemorySwarmMail } from 'swarm-mail'
138
- const swarmMail = await getSwarmMail('/my/project') // persistent
139
- const swarmMail = await createInMemorySwarmMail() // in-memory
128
+ // Check file reservations
129
+ const conflicts = await swarmMail.checkConflicts([
130
+ "src/auth.ts"
131
+ ], "WorkerA");
140
132
  ```
141
133
 
142
- ## Deployment
134
+ ### Hive (Work Item Tracker)
143
135
 
144
- ### Connection Modes
136
+ Git-synced work item tracking with cells and epics:
145
137
 
146
- #### Daemon Mode (Default)
138
+ ```typescript
139
+ import { createHiveAdapter } from "swarm-mail";
147
140
 
148
- By default, swarm-mail starts an in-process `PGLiteSocketServer` when you call `getSwarmMail()`. All database operations go through this server, preventing multi-process corruption.
141
+ const hive = await createHiveAdapter({
142
+ projectPath: "/my/project"
143
+ });
149
144
 
150
- ```typescript
151
- import { getSwarmMail } from 'swarm-mail'
145
+ // Create cell
146
+ const cell = await hive.createCell({
147
+ title: "Add OAuth",
148
+ type: "feature",
149
+ priority: 2,
150
+ });
152
151
 
153
- // Default: starts daemon automatically
154
- const swarmMail = await getSwarmMail('/my/project')
152
+ // Query cells
153
+ const open = await hive.queryCells({ status: "open" });
154
+ const ready = await hive.queryCells({ ready: true });
155
155
 
156
- // Keep alive - handles cleanup on shutdown
157
- process.on('SIGTERM', async () => {
158
- await swarmMail.close() // Flushes WAL, closes cleanly
159
- process.exit(0)
160
- })
156
+ // Update cell
157
+ await hive.updateCell(cell.id, {
158
+ status: "in_progress",
159
+ description: "Implementing Google OAuth flow"
160
+ });
161
+
162
+ // Close cell
163
+ await hive.closeCell(cell.id, "Completed: OAuth implemented");
161
164
  ```
162
165
 
163
- **Why daemon mode is the default:**
166
+ ### Semantic Memory
164
167
 
165
- - **No external dependencies** - Uses `@electric-sql/pglite-socket` in-process
166
- - **Multi-process safe** - One PGLite instance, multiple clients can connect safely
167
- - **WAL safety** - Single process prevents WAL accumulation from multiple instances
168
- - **Proper cleanup** - Graceful shutdown triggers checkpoint, preventing unclean state
169
- - **Resource efficiency** - One PGLite instance shared across operations
168
+ Vector embeddings for persistent agent learnings:
170
169
 
171
- #### Embedded Mode (Opt-out)
170
+ ```typescript
171
+ import { createSemanticMemory } from "swarm-mail";
172
172
 
173
- For single-process use cases where you're certain only one process will access the database, you can opt out of daemon mode:
173
+ const memory = await createSemanticMemory("/my/project");
174
174
 
175
- ```bash
176
- SWARM_MAIL_SOCKET=false
177
- ```
175
+ // Store a learning
176
+ const { id } = await memory.store(
177
+ "OAuth refresh tokens need 5min buffer before expiry to avoid race conditions",
178
+ { tags: "auth,tokens,debugging" }
179
+ );
178
180
 
179
- ⚠️ **Warning:** Embedded mode is NOT safe for multi-process access. PGLite uses a single connection, so concurrent processes will cause database corruption. Use only when you're absolutely certain only one process will access the database
181
+ // Search by meaning (vector similarity)
182
+ const results = await memory.find("token refresh issues", { limit: 5 });
180
183
 
181
- ### WAL Safety Features
184
+ // Get memory by ID
185
+ const mem = await memory.get(id);
182
186
 
183
- PGLite uses PostgreSQL's Write-Ahead Log (WAL) for durability. Swarm Mail includes safeguards against WAL bloat:
187
+ // Validate (resets decay timer)
188
+ await memory.validate(id);
184
189
 
185
- **Automatic checkpointing:**
186
- ```typescript
187
- // After batch operations, force WAL flush
188
- await db.checkpoint()
190
+ // Check Ollama health
191
+ const health = await memory.checkHealth();
192
+ // { ollama: true, model: "mxbai-embed-large" }
189
193
  ```
190
194
 
191
- **Health monitoring:**
192
- ```typescript
193
- // Check WAL size (default threshold: 100MB)
194
- const health = await swarmMail.healthCheck({ walThresholdMb: 100 })
195
-
196
- if (!health.walHealth?.healthy) {
197
- console.warn(health.walHealth?.message)
198
- // "WAL size 120MB exceeds 100MB threshold (15 files)"
199
- }
195
+ > **Note:** Requires [Ollama](https://ollama.ai/) for vector embeddings. Falls back to full-text search if unavailable.
196
+ >
197
+ > ```bash
198
+ > ollama pull mxbai-embed-large
199
+ > ```
200
200
 
201
- // Get detailed stats
202
- const stats = await swarmMail.getDatabaseStats()
203
- // { connected: true, wal: { size: 120_000_000, fileCount: 15 } }
204
- ```
201
+ ### Custom Database Setup
205
202
 
206
- **When to checkpoint manually:**
203
+ Bring your own libSQL database:
207
204
 
208
- - After migrations: `await swarmMail.runMigrations(); await db.checkpoint()`
209
- - After bulk event appends (100+ events)
210
- - Before long-running operations
205
+ ```typescript
206
+ import { createLibSQLAdapter } from "swarm-mail";
207
+ import { drizzle } from "drizzle-orm/libsql";
208
+ import { createClient } from "@libsql/client";
211
209
 
212
- ### Ephemeral Instances (Testing)
210
+ // Create libSQL client
211
+ const client = createClient({
212
+ url: "file:///my/custom/path/swarm.db"
213
+ });
213
214
 
214
- For tests, create isolated in-memory instances:
215
+ const db = drizzle(client);
215
216
 
216
- ```typescript
217
- import { createInMemorySwarmMail } from 'swarm-mail'
217
+ // Create adapter
218
+ const swarmMail = createLibSQLAdapter(db, "/my/project");
218
219
 
219
- const swarmMail = await createInMemorySwarmMail('test-id')
220
- // ... run tests ...
221
- await swarmMail.close()
220
+ // Use as normal
221
+ await swarmMail.appendEvent({
222
+ type: "agent_registered",
223
+ agent_name: "CustomAgent",
224
+ timestamp: Date.now(),
225
+ });
222
226
  ```
223
227
 
224
- **Don't use ephemeral instances in production** - multiple short-lived processes compound WAL accumulation since each instance creates new PGLite connections without coordinated cleanup.
225
-
226
228
  ## Event Types
227
229
 
228
230
  ```typescript
@@ -234,6 +236,7 @@ type SwarmMailEvent =
234
236
  to: string[];
235
237
  subject: string;
236
238
  body: string;
239
+ thread_id?: string;
237
240
  }
238
241
  | { type: "message_read"; message_id: number; agent_name: string }
239
242
  | {
@@ -241,6 +244,7 @@ type SwarmMailEvent =
241
244
  agent_name: string;
242
245
  paths: string[];
243
246
  exclusive: boolean;
247
+ reason?: string;
244
248
  }
245
249
  | { type: "file_released"; agent_name: string; paths: string[] }
246
250
  | {
@@ -260,7 +264,7 @@ type SwarmMailEvent =
260
264
 
261
265
  ## Projections
262
266
 
263
- Materialized views derived from events:
267
+ Materialized views automatically updated from events:
264
268
 
265
269
  | Projection | Description |
266
270
  | ------------------- | ---------------------------------- |
@@ -270,46 +274,65 @@ Materialized views derived from events:
270
274
  | `swarm_contexts` | Checkpoint state for recovery |
271
275
  | `eval_records` | Outcome data for learning |
272
276
 
273
- ### Semantic Memory
277
+ ## Testing
274
278
 
275
- Persistent, searchable storage for agent learnings:
279
+ For tests, use in-memory instances:
276
280
 
277
281
  ```typescript
278
- import { createMemoryAdapter } from 'swarm-mail/memory'
282
+ import { createInMemorySwarmMail } from "swarm-mail";
283
+
284
+ describe("my feature", () => {
285
+ let swarmMail: SwarmMailAdapter;
286
+
287
+ beforeAll(async () => {
288
+ swarmMail = await createInMemorySwarmMail("test");
289
+ });
290
+
291
+ afterAll(async () => {
292
+ await swarmMail.close();
293
+ });
294
+
295
+ test("appends events", async () => {
296
+ const result = await swarmMail.appendEvent({
297
+ type: "agent_registered",
298
+ agent_name: "TestAgent",
299
+ timestamp: Date.now(),
300
+ });
301
+
302
+ expect(result.id).toBeGreaterThan(0);
303
+ });
304
+ });
305
+ ```
279
306
 
280
- const swarmMail = await getSwarmMail('/my/project')
281
- const db = await swarmMail.getDatabase()
282
- const memory = await createMemoryAdapter(db)
307
+ ## Architecture
283
308
 
284
- // Store a learning
285
- const { id } = await memory.store(
286
- "OAuth refresh tokens need 5min buffer before expiry...",
287
- { tags: "auth,tokens,debugging" }
288
- )
309
+ - **Event Sourcing** - Append-only log, projections are derived
310
+ - **Local-first** - libSQL embedded SQLite, no external servers
311
+ - **Type-safe** - TypeScript with Zod validation and Drizzle ORM
312
+ - **Effect-TS** - Composable, testable actor primitives
313
+ - **Git-synced** - Hive cells stored as JSON + git for team coordination
289
314
 
290
- // Search by meaning (vector similarity)
291
- const results = await memory.find("token refresh issues")
315
+ ## Migration from v0.31
292
316
 
293
- // Check Ollama health
294
- const health = await memory.checkHealth()
295
- // { ollama: true, model: "mxbai-embed-large" }
296
- ```
317
+ If you're migrating from PGLite-based swarm-mail:
297
318
 
298
- > **Note:** Requires [Ollama](https://ollama.ai/) for vector embeddings. Falls back to full-text search if unavailable.
299
- >
300
- > ```bash
301
- > ollama pull mxbai-embed-large
302
- > ```
319
+ ```typescript
320
+ // OLD (v0.31)
321
+ import { getSwarmMail } from "swarm-mail";
322
+ const swarmMail = await getSwarmMail("/my/project");
303
323
 
304
- > **Deprecation Notice:** The standalone [semantic-memory MCP server](https://github.com/joelhooks/semantic-memory) is deprecated. Use the embedded memory in swarm-mail instead - same API, single PGLite instance, no separate process.
324
+ // NEW (v0.32+)
325
+ import { getSwarmMailLibSQL } from "swarm-mail";
326
+ const swarmMail = await getSwarmMailLibSQL("/my/project");
327
+ ```
305
328
 
306
- ## Architecture
329
+ **Key changes:**
330
+ - Storage backend: PGLite → libSQL (SQLite-compatible)
331
+ - ORM: Raw SQL → Drizzle ORM
332
+ - Main export: `getSwarmMailLibSQL` (was `getSwarmMail`)
333
+ - Semantic memory: Embedded (was standalone MCP server)
307
334
 
308
- - **Append-only log** - Events are immutable, projections are derived
309
- - **Local-first** - PGLite embedded Postgres, no external servers
310
- - **Effect-TS** - Type-safe, composable, testable
311
- - **Exactly-once** - DurableCursor checkpoints position
312
- - **Semantic memory** - Vector embeddings with pgvector + Ollama
335
+ See [CHANGELOG.md](./CHANGELOG.md) for full migration guide.
313
336
 
314
337
  ## API Reference
315
338
 
@@ -349,6 +372,13 @@ interface SwarmMailAdapter {
349
372
  }
350
373
  ```
351
374
 
375
+ ## Resources
376
+
377
+ - **Documentation:** [swarmtools.ai/docs](https://swarmtools.ai/docs)
378
+ - **Architecture:** [SWARM-CONTEXT.md](../../SWARM-CONTEXT.md)
379
+ - **Examples:** [examples/](../../examples/)
380
+ - **Changelog:** [CHANGELOG.md](./CHANGELOG.md)
381
+
352
382
  ## License
353
383
 
354
384
  MIT
@@ -0,0 +1,168 @@
1
+ #!/usr/bin/env bun
2
+ /**
3
+ * swarm-db CLI - Human-facing analytics and SQL queries
4
+ *
5
+ * Commands:
6
+ * query <sql> Execute raw SQL (read-only, max 1000 rows)
7
+ * analytics <command> Run pre-built analytics query
8
+ * list List available analytics commands
9
+ *
10
+ * Flags:
11
+ * --format <fmt> Output format: table (default), json, csv, jsonl
12
+ * --db <path> Database path (default: ~/.config/swarm-tools/swarm.db)
13
+ * --since <range> Time range filter (e.g., 7d, 24h, 30m)
14
+ * --until <range> End time filter (e.g., 1d, 12h)
15
+ * --project <key> Filter by project key
16
+ * --epic <id> Filter by epic ID
17
+ * --help, -h Show help
18
+ *
19
+ * Examples:
20
+ * swarm-db query "SELECT type, COUNT(*) FROM events GROUP BY type"
21
+ * swarm-db analytics failed-decompositions --format json
22
+ * swarm-db analytics agent-activity --since 7d --format table
23
+ * swarm-db list
24
+ */
25
+
26
+ import { homedir } from "node:os";
27
+ import { join } from "node:path";
28
+ import { parseArgs } from "node:util";
29
+ import type { OutputFormat } from "../src/analytics/types.js";
30
+ import {
31
+ executeAnalyticsCommand,
32
+ executeQueryCommand,
33
+ listAnalyticsCommands,
34
+ } from "../src/cli/db.js";
35
+
36
+ const DEFAULT_DB = join(homedir(), ".config/swarm-tools/swarm.db");
37
+
38
+ function showHelp() {
39
+ console.log(`
40
+ swarm-db - Analytics and SQL queries for swarm coordination database
41
+
42
+ USAGE
43
+ swarm-db <command> [options]
44
+
45
+ COMMANDS
46
+ query <sql> Execute raw SQL query (read-only, max 1000 rows)
47
+ analytics <command> Run pre-built analytics query
48
+ list List all available analytics commands
49
+
50
+ ANALYTICS COMMANDS
51
+ Run 'swarm-db list' to see all available analytics commands with descriptions.
52
+
53
+ FLAGS
54
+ --format <fmt> Output format: table (default), json, csv, jsonl
55
+ --db <path> Database path (default: ~/.config/swarm-tools/swarm.db)
56
+ --since <range> Time range filter (e.g., 7d, 24h, 30m)
57
+ --until <range> End time filter (e.g., 1d, 12h)
58
+ --project <key> Filter by project key
59
+ --epic <id> Filter by epic ID
60
+ -h, --help Show this help message
61
+
62
+ EXAMPLES
63
+ # Raw SQL query
64
+ swarm-db query "SELECT type, COUNT(*) FROM events GROUP BY type"
65
+
66
+ # Analytics with default table format
67
+ swarm-db analytics failed-decompositions
68
+
69
+ # Analytics with time filter and JSON output
70
+ swarm-db analytics agent-activity --since 7d --format json
71
+
72
+ # Analytics filtered by project
73
+ swarm-db analytics lock-contention --project /path/to/project --format csv
74
+
75
+ # List all available analytics commands
76
+ swarm-db list
77
+
78
+ NOTES
79
+ - SQL queries are read-only for safety (SELECT only)
80
+ - Maximum 1000 rows returned for raw queries
81
+ - Analytics queries have built-in limits
82
+ - Time ranges: d=days, h=hours, m=minutes (e.g., 7d, 24h, 30m)
83
+ `);
84
+ }
85
+
86
+ async function main() {
87
+ const { values, positionals } = parseArgs({
88
+ args: process.argv.slice(2),
89
+ options: {
90
+ format: { type: "string", default: "table" },
91
+ db: { type: "string", default: DEFAULT_DB },
92
+ since: { type: "string" },
93
+ until: { type: "string" },
94
+ project: { type: "string" },
95
+ epic: { type: "string" },
96
+ help: { type: "boolean", short: "h", default: false },
97
+ },
98
+ allowPositionals: true,
99
+ });
100
+
101
+ if (values.help || positionals.length === 0) {
102
+ showHelp();
103
+ process.exit(0);
104
+ }
105
+
106
+ const command = positionals[0];
107
+ const format = values.format as OutputFormat;
108
+
109
+ try {
110
+ if (command === "list") {
111
+ // List analytics commands
112
+ const commands = listAnalyticsCommands();
113
+ console.log("\nAvailable Analytics Commands:\n");
114
+ for (const cmd of commands) {
115
+ console.log(` ${cmd.name.padEnd(25)} ${cmd.description}`);
116
+ }
117
+ console.log(
118
+ `\nRun 'swarm-db analytics <command>' to execute a command.\n`,
119
+ );
120
+ } else if (command === "query") {
121
+ // Execute raw SQL
122
+ const sql = positionals[1];
123
+ if (!sql) {
124
+ console.error("Error: SQL query required");
125
+ console.error("Usage: swarm-db query <sql>");
126
+ process.exit(1);
127
+ }
128
+
129
+ const output = await executeQueryCommand({
130
+ sql,
131
+ db: values.db,
132
+ format,
133
+ });
134
+
135
+ console.log(output);
136
+ } else if (command === "analytics") {
137
+ // Execute analytics command
138
+ const analyticsCmd = positionals[1];
139
+ if (!analyticsCmd) {
140
+ console.error("Error: Analytics command required");
141
+ console.error("Usage: swarm-db analytics <command>");
142
+ console.error("Run 'swarm-db list' to see available commands");
143
+ process.exit(1);
144
+ }
145
+
146
+ const output = await executeAnalyticsCommand({
147
+ command: analyticsCmd,
148
+ db: values.db,
149
+ format,
150
+ since: values.since,
151
+ until: values.until,
152
+ project: values.project,
153
+ epic: values.epic,
154
+ });
155
+
156
+ console.log(output);
157
+ } else {
158
+ console.error(`Unknown command: ${command}`);
159
+ console.error("Run 'swarm-db --help' for usage information");
160
+ process.exit(1);
161
+ }
162
+ } catch (error) {
163
+ console.error(`Error: ${error instanceof Error ? error.message : String(error)}`);
164
+ process.exit(1);
165
+ }
166
+ }
167
+
168
+ main();
package/dist/adapter.d.ts CHANGED
@@ -12,10 +12,10 @@
12
12
  *
13
13
  * ## Usage
14
14
  * ```typescript
15
- * import { createPGLiteAdapter } from '@opencode/swarm-mail/adapters/pglite';
15
+ * import { createLibSQLAdapter } from '@opencode/swarm-mail/adapters/libsql';
16
16
  * import { createSwarmMailAdapter } from '@opencode/swarm-mail';
17
17
  *
18
- * const dbAdapter = createPGLiteAdapter({ path: './streams.db' });
18
+ * const dbAdapter = createLibSQLAdapter({ path: './streams.db' });
19
19
  * const swarmMail = createSwarmMailAdapter(dbAdapter, '/path/to/project');
20
20
  *
21
21
  * // Use the adapter
@@ -0,0 +1,50 @@
1
+ /**
2
+ * Analytics Query Result Formatters
3
+ *
4
+ * Output formatters for query results in various formats.
5
+ * Each formatter takes a QueryResult and returns a string.
6
+ */
7
+ import type { QueryResult } from "./types.js";
8
+ /**
9
+ * Format query result as ASCII table with aligned columns.
10
+ *
11
+ * Produces a readable table format with headers, separators, and aligned columns.
12
+ * Empty results show headers and indicate 0 rows.
13
+ *
14
+ * @param result - Query result to format
15
+ * @returns ASCII table string
16
+ */
17
+ export declare function formatTable(result: QueryResult): string;
18
+ /**
19
+ * Format query result as pretty-printed JSON.
20
+ *
21
+ * Produces a readable JSON representation of the entire QueryResult object.
22
+ *
23
+ * @param result - Query result to format
24
+ * @returns Pretty-printed JSON string
25
+ */
26
+ export declare function formatJSON(result: QueryResult): string;
27
+ /**
28
+ * Format query result as RFC 4180 compliant CSV.
29
+ *
30
+ * Produces CSV with:
31
+ * - Header row with column names
32
+ * - Data rows with values
33
+ * - Proper escaping of quotes and commas
34
+ * - Empty strings for null/undefined values
35
+ *
36
+ * @param result - Query result to format
37
+ * @returns CSV string
38
+ */
39
+ export declare function formatCSV(result: QueryResult): string;
40
+ /**
41
+ * Format query result as newline-delimited JSON (JSONL).
42
+ *
43
+ * Produces one compact JSON object per line, one line per row.
44
+ * Empty results produce empty string.
45
+ *
46
+ * @param result - Query result to format
47
+ * @returns JSONL string (newline-delimited JSON objects)
48
+ */
49
+ export declare function formatJSONL(result: QueryResult): string;
50
+ //# sourceMappingURL=formatters.d.ts.map