swarm-mail 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +201 -0
- package/package.json +28 -0
- package/src/adapter.ts +306 -0
- package/src/index.ts +57 -0
- package/src/pglite.ts +189 -0
- package/src/streams/agent-mail.test.ts +777 -0
- package/src/streams/agent-mail.ts +535 -0
- package/src/streams/debug.test.ts +500 -0
- package/src/streams/debug.ts +727 -0
- package/src/streams/effect/ask.integration.test.ts +314 -0
- package/src/streams/effect/ask.ts +202 -0
- package/src/streams/effect/cursor.integration.test.ts +418 -0
- package/src/streams/effect/cursor.ts +288 -0
- package/src/streams/effect/deferred.test.ts +357 -0
- package/src/streams/effect/deferred.ts +445 -0
- package/src/streams/effect/index.ts +17 -0
- package/src/streams/effect/layers.ts +73 -0
- package/src/streams/effect/lock.test.ts +385 -0
- package/src/streams/effect/lock.ts +399 -0
- package/src/streams/effect/mailbox.test.ts +260 -0
- package/src/streams/effect/mailbox.ts +318 -0
- package/src/streams/events.test.ts +924 -0
- package/src/streams/events.ts +329 -0
- package/src/streams/index.test.ts +229 -0
- package/src/streams/index.ts +578 -0
- package/src/streams/migrations.test.ts +359 -0
- package/src/streams/migrations.ts +362 -0
- package/src/streams/projections.test.ts +611 -0
- package/src/streams/projections.ts +564 -0
- package/src/streams/store.integration.test.ts +658 -0
- package/src/streams/store.ts +1129 -0
- package/src/streams/swarm-mail.ts +552 -0
- package/src/types/adapter.ts +392 -0
- package/src/types/database.ts +127 -0
- package/src/types/index.ts +26 -0
- package/tsconfig.json +22 -0
|
@@ -0,0 +1,359 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Tests for Schema Migration System
|
|
3
|
+
*/
|
|
4
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
5
|
+
import { PGlite } from "@electric-sql/pglite";
|
|
6
|
+
import {
|
|
7
|
+
runMigrations,
|
|
8
|
+
getCurrentVersion,
|
|
9
|
+
rollbackTo,
|
|
10
|
+
isMigrationApplied,
|
|
11
|
+
getPendingMigrations,
|
|
12
|
+
getAppliedMigrations,
|
|
13
|
+
migrations,
|
|
14
|
+
} from "./migrations";
|
|
15
|
+
|
|
16
|
+
describe("Schema Migrations", () => {
|
|
17
|
+
let db: PGlite;
|
|
18
|
+
|
|
19
|
+
beforeEach(async () => {
|
|
20
|
+
// Use in-memory database for tests
|
|
21
|
+
db = new PGlite();
|
|
22
|
+
});
|
|
23
|
+
|
|
24
|
+
afterEach(async () => {
|
|
25
|
+
await db.close();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
describe("Fresh Install", () => {
|
|
29
|
+
it("should start with version 0", async () => {
|
|
30
|
+
const version = await getCurrentVersion(db);
|
|
31
|
+
expect(version).toBe(0);
|
|
32
|
+
});
|
|
33
|
+
|
|
34
|
+
it("should run all migrations on fresh database", async () => {
|
|
35
|
+
const result = await runMigrations(db);
|
|
36
|
+
|
|
37
|
+
expect(result.applied).toEqual([1, 2, 3, 4]);
|
|
38
|
+
expect(result.current).toBe(4);
|
|
39
|
+
|
|
40
|
+
const version = await getCurrentVersion(db);
|
|
41
|
+
expect(version).toBe(4);
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
it("should create cursors table with correct schema", async () => {
|
|
45
|
+
await runMigrations(db);
|
|
46
|
+
|
|
47
|
+
// Verify table exists
|
|
48
|
+
const tableResult = await db.query<{ exists: boolean }>(
|
|
49
|
+
`SELECT EXISTS (
|
|
50
|
+
SELECT FROM information_schema.tables
|
|
51
|
+
WHERE table_name = 'cursors'
|
|
52
|
+
) as exists`,
|
|
53
|
+
);
|
|
54
|
+
expect(tableResult.rows[0]?.exists).toBe(true);
|
|
55
|
+
|
|
56
|
+
// Verify columns
|
|
57
|
+
const columnsResult = await db.query<{ column_name: string }>(
|
|
58
|
+
`SELECT column_name FROM information_schema.columns
|
|
59
|
+
WHERE table_name = 'cursors'
|
|
60
|
+
ORDER BY ordinal_position`,
|
|
61
|
+
);
|
|
62
|
+
const columns = columnsResult.rows.map((r) => r.column_name);
|
|
63
|
+
expect(columns).toContain("id");
|
|
64
|
+
expect(columns).toContain("stream");
|
|
65
|
+
expect(columns).toContain("checkpoint");
|
|
66
|
+
expect(columns).toContain("position");
|
|
67
|
+
expect(columns).toContain("updated_at");
|
|
68
|
+
|
|
69
|
+
// Verify unique constraint exists
|
|
70
|
+
const constraintsResult = await db.query<{ constraint_name: string }>(
|
|
71
|
+
`SELECT constraint_name FROM information_schema.table_constraints
|
|
72
|
+
WHERE table_name = 'cursors' AND constraint_type = 'UNIQUE'`,
|
|
73
|
+
);
|
|
74
|
+
expect(constraintsResult.rows.length).toBeGreaterThan(0);
|
|
75
|
+
});
|
|
76
|
+
|
|
77
|
+
it("should create deferred table with correct schema", async () => {
|
|
78
|
+
await runMigrations(db);
|
|
79
|
+
|
|
80
|
+
const tableResult = await db.query<{ exists: boolean }>(
|
|
81
|
+
`SELECT EXISTS (
|
|
82
|
+
SELECT FROM information_schema.tables
|
|
83
|
+
WHERE table_name = 'deferred'
|
|
84
|
+
) as exists`,
|
|
85
|
+
);
|
|
86
|
+
expect(tableResult.rows[0]?.exists).toBe(true);
|
|
87
|
+
|
|
88
|
+
const columnsResult = await db.query<{ column_name: string }>(
|
|
89
|
+
`SELECT column_name FROM information_schema.columns
|
|
90
|
+
WHERE table_name = 'deferred'
|
|
91
|
+
ORDER BY ordinal_position`,
|
|
92
|
+
);
|
|
93
|
+
const columns = columnsResult.rows.map((r) => r.column_name);
|
|
94
|
+
expect(columns).toContain("id");
|
|
95
|
+
expect(columns).toContain("url");
|
|
96
|
+
expect(columns).toContain("resolved");
|
|
97
|
+
expect(columns).toContain("value");
|
|
98
|
+
expect(columns).toContain("error");
|
|
99
|
+
expect(columns).toContain("expires_at");
|
|
100
|
+
expect(columns).toContain("created_at");
|
|
101
|
+
});
|
|
102
|
+
});
|
|
103
|
+
|
|
104
|
+
describe("Idempotency", () => {
|
|
105
|
+
it("should be safe to run migrations multiple times", async () => {
|
|
106
|
+
// First run
|
|
107
|
+
const result1 = await runMigrations(db);
|
|
108
|
+
expect(result1.applied).toEqual([1, 2, 3, 4]);
|
|
109
|
+
|
|
110
|
+
// Second run - should apply nothing
|
|
111
|
+
const result2 = await runMigrations(db);
|
|
112
|
+
expect(result2.applied).toEqual([]);
|
|
113
|
+
expect(result2.current).toBe(4);
|
|
114
|
+
|
|
115
|
+
// Version should still be 2
|
|
116
|
+
const version = await getCurrentVersion(db);
|
|
117
|
+
expect(version).toBe(4);
|
|
118
|
+
});
|
|
119
|
+
});
|
|
120
|
+
|
|
121
|
+
describe("Incremental Upgrade", () => {
|
|
122
|
+
it("should apply only new migrations", async () => {
|
|
123
|
+
// Manually apply migration 1
|
|
124
|
+
await db.exec(migrations[0]!.up);
|
|
125
|
+
await db.exec(`
|
|
126
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
127
|
+
version INTEGER PRIMARY KEY,
|
|
128
|
+
applied_at BIGINT NOT NULL,
|
|
129
|
+
description TEXT
|
|
130
|
+
);
|
|
131
|
+
`);
|
|
132
|
+
await db.query(
|
|
133
|
+
`INSERT INTO schema_version (version, applied_at, description)
|
|
134
|
+
VALUES ($1, $2, $3)`,
|
|
135
|
+
[1, Date.now(), migrations[0]!.description],
|
|
136
|
+
);
|
|
137
|
+
|
|
138
|
+
// Now run migrations - should only apply 2
|
|
139
|
+
const result = await runMigrations(db);
|
|
140
|
+
expect(result.applied).toEqual([2, 3, 4]);
|
|
141
|
+
expect(result.current).toBe(4);
|
|
142
|
+
});
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
describe("Rollback", () => {
|
|
146
|
+
it("should rollback to target version", async () => {
|
|
147
|
+
// Apply all migrations
|
|
148
|
+
await runMigrations(db);
|
|
149
|
+
expect(await getCurrentVersion(db)).toBe(4);
|
|
150
|
+
|
|
151
|
+
// Rollback to version 1
|
|
152
|
+
const result = await rollbackTo(db, 1);
|
|
153
|
+
expect(result.rolledBack).toEqual([4, 3, 2]);
|
|
154
|
+
expect(result.current).toBe(1);
|
|
155
|
+
|
|
156
|
+
// Version should be 1
|
|
157
|
+
const version = await getCurrentVersion(db);
|
|
158
|
+
expect(version).toBe(1);
|
|
159
|
+
|
|
160
|
+
// Cursors table should still exist
|
|
161
|
+
const cursorsExists = await db.query<{ exists: boolean }>(
|
|
162
|
+
`SELECT EXISTS (
|
|
163
|
+
SELECT FROM information_schema.tables
|
|
164
|
+
WHERE table_name = 'cursors'
|
|
165
|
+
) as exists`,
|
|
166
|
+
);
|
|
167
|
+
expect(cursorsExists.rows[0]?.exists).toBe(true);
|
|
168
|
+
|
|
169
|
+
// Deferred table should be gone
|
|
170
|
+
const deferredExists = await db.query<{ exists: boolean }>(
|
|
171
|
+
`SELECT EXISTS (
|
|
172
|
+
SELECT FROM information_schema.tables
|
|
173
|
+
WHERE table_name = 'deferred'
|
|
174
|
+
) as exists`,
|
|
175
|
+
);
|
|
176
|
+
expect(deferredExists.rows[0]?.exists).toBe(false);
|
|
177
|
+
});
|
|
178
|
+
|
|
179
|
+
it("should rollback to version 0", async () => {
|
|
180
|
+
await runMigrations(db);
|
|
181
|
+
|
|
182
|
+
const result = await rollbackTo(db, 0);
|
|
183
|
+
expect(result.rolledBack).toEqual([4, 3, 2, 1]);
|
|
184
|
+
expect(result.current).toBe(0);
|
|
185
|
+
|
|
186
|
+
// All tables should be gone
|
|
187
|
+
const cursorsExists = await db.query<{ exists: boolean }>(
|
|
188
|
+
`SELECT EXISTS (
|
|
189
|
+
SELECT FROM information_schema.tables
|
|
190
|
+
WHERE table_name = 'cursors'
|
|
191
|
+
) as exists`,
|
|
192
|
+
);
|
|
193
|
+
expect(cursorsExists.rows[0]?.exists).toBe(false);
|
|
194
|
+
});
|
|
195
|
+
|
|
196
|
+
it("should do nothing if target version >= current", async () => {
|
|
197
|
+
await runMigrations(db);
|
|
198
|
+
|
|
199
|
+
const result = await rollbackTo(db, 4);
|
|
200
|
+
expect(result.rolledBack).toEqual([]);
|
|
201
|
+
expect(result.current).toBe(4);
|
|
202
|
+
});
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
describe("Migration Status", () => {
|
|
206
|
+
it("should check if migration is applied", async () => {
|
|
207
|
+
expect(await isMigrationApplied(db, 1)).toBe(false);
|
|
208
|
+
|
|
209
|
+
await runMigrations(db);
|
|
210
|
+
|
|
211
|
+
expect(await isMigrationApplied(db, 1)).toBe(true);
|
|
212
|
+
expect(await isMigrationApplied(db, 2)).toBe(true);
|
|
213
|
+
expect(await isMigrationApplied(db, 3)).toBe(true);
|
|
214
|
+
expect(await isMigrationApplied(db, 4)).toBe(true);
|
|
215
|
+
expect(await isMigrationApplied(db, 3)).toBe(true);
|
|
216
|
+
expect(await isMigrationApplied(db, 4)).toBe(true);
|
|
217
|
+
});
|
|
218
|
+
|
|
219
|
+
it("should list pending migrations", async () => {
|
|
220
|
+
const pending1 = await getPendingMigrations(db);
|
|
221
|
+
expect(pending1).toHaveLength(4);
|
|
222
|
+
expect(pending1.map((m) => m.version)).toEqual([1, 2, 3, 4]);
|
|
223
|
+
|
|
224
|
+
// Apply migration 1
|
|
225
|
+
const migration = migrations[0];
|
|
226
|
+
if (!migration) throw new Error("Migration not found");
|
|
227
|
+
|
|
228
|
+
await db.exec(migration.up);
|
|
229
|
+
await db.exec(`
|
|
230
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
231
|
+
version INTEGER PRIMARY KEY,
|
|
232
|
+
applied_at BIGINT NOT NULL,
|
|
233
|
+
description TEXT
|
|
234
|
+
);
|
|
235
|
+
`);
|
|
236
|
+
await db.query(
|
|
237
|
+
`INSERT INTO schema_version (version, applied_at, description)
|
|
238
|
+
VALUES ($1, $2, $3)`,
|
|
239
|
+
[1, Date.now(), migration.description],
|
|
240
|
+
);
|
|
241
|
+
|
|
242
|
+
const pending2 = await getPendingMigrations(db);
|
|
243
|
+
expect(pending2).toHaveLength(3);
|
|
244
|
+
expect(pending2.map((m) => m.version)).toEqual([2, 3, 4]);
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
it("should list applied migrations", async () => {
|
|
248
|
+
const applied1 = await getAppliedMigrations(db);
|
|
249
|
+
expect(applied1).toHaveLength(0);
|
|
250
|
+
|
|
251
|
+
await runMigrations(db);
|
|
252
|
+
|
|
253
|
+
const applied2 = await getAppliedMigrations(db);
|
|
254
|
+
expect(applied2).toHaveLength(4);
|
|
255
|
+
expect(applied2.map((m) => m.version)).toEqual([1, 2, 3, 4]);
|
|
256
|
+
expect(applied2[0]?.description).toBe(
|
|
257
|
+
"Add cursors table for DurableCursor",
|
|
258
|
+
);
|
|
259
|
+
});
|
|
260
|
+
});
|
|
261
|
+
|
|
262
|
+
describe("Data Persistence", () => {
|
|
263
|
+
it("should preserve data across migrations", async () => {
|
|
264
|
+
// Apply migration 1 (cursors table)
|
|
265
|
+
await db.exec(migrations[0]!.up);
|
|
266
|
+
await db.exec(`
|
|
267
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
268
|
+
version INTEGER PRIMARY KEY,
|
|
269
|
+
applied_at BIGINT NOT NULL,
|
|
270
|
+
description TEXT
|
|
271
|
+
);
|
|
272
|
+
`);
|
|
273
|
+
await db.query(
|
|
274
|
+
`INSERT INTO schema_version (version, applied_at, description)
|
|
275
|
+
VALUES ($1, $2, $3)`,
|
|
276
|
+
[1, Date.now(), migrations[0]!.description],
|
|
277
|
+
);
|
|
278
|
+
|
|
279
|
+
// Insert test data
|
|
280
|
+
await db.query(
|
|
281
|
+
`INSERT INTO cursors (stream, checkpoint, position, updated_at)
|
|
282
|
+
VALUES ($1, $2, $3, $4)`,
|
|
283
|
+
["test-stream", "test-checkpoint", 42, Date.now()],
|
|
284
|
+
);
|
|
285
|
+
|
|
286
|
+
// Apply remaining migrations
|
|
287
|
+
await runMigrations(db);
|
|
288
|
+
|
|
289
|
+
// Data should still be there
|
|
290
|
+
const result = await db.query<{ position: number }>(
|
|
291
|
+
`SELECT position FROM cursors WHERE stream = $1`,
|
|
292
|
+
["test-stream"],
|
|
293
|
+
);
|
|
294
|
+
expect(result.rows[0]?.position).toBe(42);
|
|
295
|
+
});
|
|
296
|
+
});
|
|
297
|
+
|
|
298
|
+
describe("Error Handling", () => {
|
|
299
|
+
it("should rollback failed migrations", async () => {
|
|
300
|
+
// Apply good migration first
|
|
301
|
+
const migration = migrations[0];
|
|
302
|
+
if (!migration) throw new Error("Migration not found");
|
|
303
|
+
|
|
304
|
+
await db.exec(migration.up);
|
|
305
|
+
await db.exec(`
|
|
306
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
307
|
+
version INTEGER PRIMARY KEY,
|
|
308
|
+
applied_at BIGINT NOT NULL,
|
|
309
|
+
description TEXT
|
|
310
|
+
);
|
|
311
|
+
`);
|
|
312
|
+
await db.query(
|
|
313
|
+
`INSERT INTO schema_version (version, applied_at, description)
|
|
314
|
+
VALUES ($1, $2, $3)`,
|
|
315
|
+
[1, Date.now(), migration.description],
|
|
316
|
+
);
|
|
317
|
+
|
|
318
|
+
// Try to run invalid SQL in a transaction
|
|
319
|
+
try {
|
|
320
|
+
await db.exec("BEGIN");
|
|
321
|
+
await db.exec("THIS IS INVALID SQL");
|
|
322
|
+
await db.exec("COMMIT");
|
|
323
|
+
throw new Error("Should have thrown");
|
|
324
|
+
} catch {
|
|
325
|
+
await db.exec("ROLLBACK");
|
|
326
|
+
// Expected to fail
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
// Version should still be 1
|
|
330
|
+
const version = await getCurrentVersion(db);
|
|
331
|
+
expect(version).toBe(1);
|
|
332
|
+
});
|
|
333
|
+
});
|
|
334
|
+
|
|
335
|
+
describe("Schema Version Table", () => {
|
|
336
|
+
it("should record migration metadata", async () => {
|
|
337
|
+
await runMigrations(db);
|
|
338
|
+
|
|
339
|
+
const result = await db.query<{
|
|
340
|
+
version: number;
|
|
341
|
+
applied_at: string;
|
|
342
|
+
description: string;
|
|
343
|
+
}>(
|
|
344
|
+
`SELECT version, applied_at, description FROM schema_version ORDER BY version`,
|
|
345
|
+
);
|
|
346
|
+
|
|
347
|
+
expect(result.rows).toHaveLength(4);
|
|
348
|
+
expect(result.rows[0]?.version).toBe(1);
|
|
349
|
+
expect(result.rows[0]?.description).toBe(
|
|
350
|
+
"Add cursors table for DurableCursor",
|
|
351
|
+
);
|
|
352
|
+
expect(result.rows[1]?.version).toBe(2);
|
|
353
|
+
|
|
354
|
+
// Applied_at should be recent
|
|
355
|
+
const appliedAt = parseInt(result.rows[0]?.applied_at as string);
|
|
356
|
+
expect(appliedAt).toBeGreaterThan(Date.now() - 10000);
|
|
357
|
+
});
|
|
358
|
+
});
|
|
359
|
+
});
|
|
@@ -0,0 +1,362 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Schema Migration System
|
|
3
|
+
*
|
|
4
|
+
* Handles database schema evolution for the PGLite event store.
|
|
5
|
+
*
|
|
6
|
+
* ## How It Works
|
|
7
|
+
*
|
|
8
|
+
* 1. Each migration has a unique version number (incrementing integer)
|
|
9
|
+
* 2. On startup, `runMigrations()` checks current schema version
|
|
10
|
+
* 3. Migrations are applied in order until schema is current
|
|
11
|
+
* 4. Version is stored in `schema_version` table
|
|
12
|
+
*
|
|
13
|
+
* ## Adding a New Migration
|
|
14
|
+
*
|
|
15
|
+
* ```typescript
|
|
16
|
+
* // In migrations.ts
|
|
17
|
+
* export const migrations: Migration[] = [
|
|
18
|
+
* // ... existing migrations
|
|
19
|
+
* {
|
|
20
|
+
* version: 3,
|
|
21
|
+
* description: "add_new_column",
|
|
22
|
+
* up: `ALTER TABLE events ADD COLUMN new_col TEXT`,
|
|
23
|
+
* down: `ALTER TABLE events DROP COLUMN new_col`,
|
|
24
|
+
* },
|
|
25
|
+
* ];
|
|
26
|
+
* ```
|
|
27
|
+
*
|
|
28
|
+
* ## Rollback
|
|
29
|
+
*
|
|
30
|
+
* Rollback is supported via `rollbackTo(db, targetVersion)`.
|
|
31
|
+
* Note: Some migrations may not be fully reversible (data loss).
|
|
32
|
+
*
|
|
33
|
+
* ## Best Practices
|
|
34
|
+
*
|
|
35
|
+
* - Always test migrations on a copy of production data
|
|
36
|
+
* - Keep migrations small and focused
|
|
37
|
+
* - Include both `up` and `down` SQL
|
|
38
|
+
* - Use transactions for multi-statement migrations
|
|
39
|
+
* - Document any data transformations
|
|
40
|
+
*
|
|
41
|
+
* @module migrations
|
|
42
|
+
*/
|
|
43
|
+
import type { PGlite } from "@electric-sql/pglite";
|
|
44
|
+
|
|
45
|
+
// ============================================================================
|
|
46
|
+
// Types
|
|
47
|
+
// ============================================================================
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* A database migration definition.
|
|
51
|
+
*/
|
|
52
|
+
export interface Migration {
|
|
53
|
+
/** Unique version number (must be sequential) */
|
|
54
|
+
version: number;
|
|
55
|
+
/** Human-readable migration description */
|
|
56
|
+
description: string;
|
|
57
|
+
/** SQL to apply the migration */
|
|
58
|
+
up: string;
|
|
59
|
+
/** SQL to rollback the migration (best effort) */
|
|
60
|
+
down: string;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
interface SchemaVersion {
|
|
64
|
+
version: number;
|
|
65
|
+
applied_at: number;
|
|
66
|
+
description: string | null;
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
// ============================================================================
|
|
70
|
+
// Migration Definitions
|
|
71
|
+
// ============================================================================
|
|
72
|
+
|
|
73
|
+
export const migrations: Migration[] = [
|
|
74
|
+
{
|
|
75
|
+
version: 1,
|
|
76
|
+
description: "Add cursors table for DurableCursor",
|
|
77
|
+
up: `
|
|
78
|
+
CREATE TABLE IF NOT EXISTS cursors (
|
|
79
|
+
id SERIAL PRIMARY KEY,
|
|
80
|
+
stream TEXT NOT NULL,
|
|
81
|
+
checkpoint TEXT NOT NULL,
|
|
82
|
+
position BIGINT NOT NULL DEFAULT 0,
|
|
83
|
+
updated_at BIGINT NOT NULL,
|
|
84
|
+
UNIQUE(stream, checkpoint)
|
|
85
|
+
);
|
|
86
|
+
CREATE INDEX IF NOT EXISTS idx_cursors_checkpoint ON cursors(checkpoint);
|
|
87
|
+
CREATE INDEX IF NOT EXISTS idx_cursors_stream ON cursors(stream);
|
|
88
|
+
`,
|
|
89
|
+
down: `DROP TABLE IF EXISTS cursors;`,
|
|
90
|
+
},
|
|
91
|
+
{
|
|
92
|
+
version: 2,
|
|
93
|
+
description: "Add deferred table for DurableDeferred",
|
|
94
|
+
up: `
|
|
95
|
+
CREATE TABLE IF NOT EXISTS deferred (
|
|
96
|
+
id SERIAL PRIMARY KEY,
|
|
97
|
+
url TEXT NOT NULL UNIQUE,
|
|
98
|
+
resolved BOOLEAN NOT NULL DEFAULT FALSE,
|
|
99
|
+
value JSONB,
|
|
100
|
+
error TEXT,
|
|
101
|
+
expires_at BIGINT NOT NULL,
|
|
102
|
+
created_at BIGINT NOT NULL
|
|
103
|
+
);
|
|
104
|
+
CREATE INDEX IF NOT EXISTS idx_deferred_url ON deferred(url);
|
|
105
|
+
CREATE INDEX IF NOT EXISTS idx_deferred_expires ON deferred(expires_at);
|
|
106
|
+
CREATE INDEX IF NOT EXISTS idx_deferred_resolved ON deferred(resolved);
|
|
107
|
+
`,
|
|
108
|
+
down: `DROP TABLE IF EXISTS deferred;`,
|
|
109
|
+
},
|
|
110
|
+
{
|
|
111
|
+
version: 3,
|
|
112
|
+
description: "Add eval_records table for learning system",
|
|
113
|
+
up: `
|
|
114
|
+
CREATE TABLE IF NOT EXISTS eval_records (
|
|
115
|
+
id TEXT PRIMARY KEY,
|
|
116
|
+
project_key TEXT NOT NULL,
|
|
117
|
+
task TEXT NOT NULL,
|
|
118
|
+
context TEXT,
|
|
119
|
+
strategy TEXT NOT NULL,
|
|
120
|
+
epic_title TEXT NOT NULL,
|
|
121
|
+
subtasks JSONB NOT NULL,
|
|
122
|
+
outcomes JSONB,
|
|
123
|
+
overall_success BOOLEAN,
|
|
124
|
+
total_duration_ms INTEGER,
|
|
125
|
+
total_errors INTEGER,
|
|
126
|
+
human_accepted BOOLEAN,
|
|
127
|
+
human_modified BOOLEAN,
|
|
128
|
+
human_notes TEXT,
|
|
129
|
+
file_overlap_count INTEGER,
|
|
130
|
+
scope_accuracy REAL,
|
|
131
|
+
time_balance_ratio REAL,
|
|
132
|
+
created_at BIGINT NOT NULL,
|
|
133
|
+
updated_at BIGINT NOT NULL
|
|
134
|
+
);
|
|
135
|
+
CREATE INDEX IF NOT EXISTS idx_eval_records_project ON eval_records(project_key);
|
|
136
|
+
CREATE INDEX IF NOT EXISTS idx_eval_records_strategy ON eval_records(strategy);
|
|
137
|
+
`,
|
|
138
|
+
down: `DROP TABLE IF EXISTS eval_records;`,
|
|
139
|
+
},
|
|
140
|
+
{
|
|
141
|
+
version: 4,
|
|
142
|
+
description: "Add swarm_contexts table for context recovery",
|
|
143
|
+
up: `
|
|
144
|
+
CREATE TABLE IF NOT EXISTS swarm_contexts (
|
|
145
|
+
id TEXT PRIMARY KEY,
|
|
146
|
+
epic_id TEXT NOT NULL,
|
|
147
|
+
bead_id TEXT NOT NULL,
|
|
148
|
+
strategy TEXT NOT NULL,
|
|
149
|
+
files JSONB NOT NULL,
|
|
150
|
+
dependencies JSONB NOT NULL,
|
|
151
|
+
directives JSONB NOT NULL,
|
|
152
|
+
recovery JSONB NOT NULL,
|
|
153
|
+
created_at BIGINT NOT NULL,
|
|
154
|
+
updated_at BIGINT NOT NULL
|
|
155
|
+
);
|
|
156
|
+
CREATE INDEX IF NOT EXISTS idx_swarm_contexts_epic ON swarm_contexts(epic_id);
|
|
157
|
+
CREATE INDEX IF NOT EXISTS idx_swarm_contexts_bead ON swarm_contexts(bead_id);
|
|
158
|
+
`,
|
|
159
|
+
down: `DROP TABLE IF EXISTS swarm_contexts;`,
|
|
160
|
+
},
|
|
161
|
+
];
|
|
162
|
+
|
|
163
|
+
// ============================================================================
|
|
164
|
+
// Migration Execution
|
|
165
|
+
// ============================================================================
|
|
166
|
+
|
|
167
|
+
/**
|
|
168
|
+
* Initialize schema_version table if it doesn't exist
|
|
169
|
+
*/
|
|
170
|
+
async function ensureVersionTable(db: PGlite): Promise<void> {
|
|
171
|
+
await db.exec(`
|
|
172
|
+
CREATE TABLE IF NOT EXISTS schema_version (
|
|
173
|
+
version INTEGER PRIMARY KEY,
|
|
174
|
+
applied_at BIGINT NOT NULL,
|
|
175
|
+
description TEXT
|
|
176
|
+
);
|
|
177
|
+
`);
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
/**
|
|
181
|
+
* Get the current schema version
|
|
182
|
+
*
|
|
183
|
+
* Returns 0 if no migrations have been applied
|
|
184
|
+
*/
|
|
185
|
+
export async function getCurrentVersion(db: PGlite): Promise<number> {
|
|
186
|
+
await ensureVersionTable(db);
|
|
187
|
+
|
|
188
|
+
const result = await db.query<{ version: number }>(
|
|
189
|
+
`SELECT MAX(version) as version FROM schema_version`,
|
|
190
|
+
);
|
|
191
|
+
|
|
192
|
+
return result.rows[0]?.version ?? 0;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
/**
|
|
196
|
+
* Get all applied migrations
|
|
197
|
+
*/
|
|
198
|
+
export async function getAppliedMigrations(
|
|
199
|
+
db: PGlite,
|
|
200
|
+
): Promise<SchemaVersion[]> {
|
|
201
|
+
await ensureVersionTable(db);
|
|
202
|
+
|
|
203
|
+
const result = await db.query<{
|
|
204
|
+
version: number;
|
|
205
|
+
applied_at: string;
|
|
206
|
+
description: string | null;
|
|
207
|
+
}>(
|
|
208
|
+
`SELECT version, applied_at, description FROM schema_version ORDER BY version ASC`,
|
|
209
|
+
);
|
|
210
|
+
|
|
211
|
+
return result.rows.map((row) => ({
|
|
212
|
+
version: row.version,
|
|
213
|
+
applied_at: parseInt(row.applied_at as string),
|
|
214
|
+
description: row.description,
|
|
215
|
+
}));
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
/**
|
|
219
|
+
* Run all pending migrations
|
|
220
|
+
*
|
|
221
|
+
* Idempotent - safe to run multiple times.
|
|
222
|
+
* Only runs migrations that haven't been applied yet.
|
|
223
|
+
*/
|
|
224
|
+
export async function runMigrations(db: PGlite): Promise<{
|
|
225
|
+
applied: number[];
|
|
226
|
+
current: number;
|
|
227
|
+
}> {
|
|
228
|
+
await ensureVersionTable(db);
|
|
229
|
+
|
|
230
|
+
const currentVersion = await getCurrentVersion(db);
|
|
231
|
+
const applied: number[] = [];
|
|
232
|
+
|
|
233
|
+
// Find migrations that need to be applied
|
|
234
|
+
const pendingMigrations = migrations.filter(
|
|
235
|
+
(m) => m.version > currentVersion,
|
|
236
|
+
);
|
|
237
|
+
|
|
238
|
+
if (pendingMigrations.length === 0) {
|
|
239
|
+
return { applied: [], current: currentVersion };
|
|
240
|
+
}
|
|
241
|
+
|
|
242
|
+
// Sort by version to ensure correct order
|
|
243
|
+
pendingMigrations.sort((a, b) => a.version - b.version);
|
|
244
|
+
|
|
245
|
+
// Apply each migration in a transaction
|
|
246
|
+
for (const migration of pendingMigrations) {
|
|
247
|
+
await db.exec("BEGIN");
|
|
248
|
+
try {
|
|
249
|
+
// Run the migration SQL
|
|
250
|
+
await db.exec(migration.up);
|
|
251
|
+
|
|
252
|
+
// Record the migration
|
|
253
|
+
await db.query(
|
|
254
|
+
`INSERT INTO schema_version (version, applied_at, description)
|
|
255
|
+
VALUES ($1, $2, $3)`,
|
|
256
|
+
[migration.version, Date.now(), migration.description],
|
|
257
|
+
);
|
|
258
|
+
|
|
259
|
+
await db.exec("COMMIT");
|
|
260
|
+
applied.push(migration.version);
|
|
261
|
+
|
|
262
|
+
console.log(
|
|
263
|
+
`[migrations] Applied migration ${migration.version}: ${migration.description}`,
|
|
264
|
+
);
|
|
265
|
+
} catch (error) {
|
|
266
|
+
await db.exec("ROLLBACK");
|
|
267
|
+
const err = error as Error;
|
|
268
|
+
console.error(
|
|
269
|
+
`[migrations] Failed to apply migration ${migration.version}: ${err.message}`,
|
|
270
|
+
);
|
|
271
|
+
throw new Error(`Migration ${migration.version} failed: ${err.message}`);
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
|
|
275
|
+
const finalVersion = await getCurrentVersion(db);
|
|
276
|
+
return { applied, current: finalVersion };
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
/**
|
|
280
|
+
* Rollback to a specific version
|
|
281
|
+
*
|
|
282
|
+
* WARNING: This will DROP tables and LOSE DATA.
|
|
283
|
+
* Only use for testing or emergency recovery.
|
|
284
|
+
*/
|
|
285
|
+
export async function rollbackTo(
|
|
286
|
+
db: PGlite,
|
|
287
|
+
targetVersion: number,
|
|
288
|
+
): Promise<{
|
|
289
|
+
rolledBack: number[];
|
|
290
|
+
current: number;
|
|
291
|
+
}> {
|
|
292
|
+
const currentVersion = await getCurrentVersion(db);
|
|
293
|
+
const rolledBack: number[] = [];
|
|
294
|
+
|
|
295
|
+
if (targetVersion >= currentVersion) {
|
|
296
|
+
return { rolledBack: [], current: currentVersion };
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
// Find migrations to rollback (in reverse order)
|
|
300
|
+
const migrationsToRollback = migrations
|
|
301
|
+
.filter((m) => m.version > targetVersion && m.version <= currentVersion)
|
|
302
|
+
.sort((a, b) => b.version - a.version); // Descending order
|
|
303
|
+
|
|
304
|
+
for (const migration of migrationsToRollback) {
|
|
305
|
+
await db.exec("BEGIN");
|
|
306
|
+
try {
|
|
307
|
+
// Run the down migration
|
|
308
|
+
await db.exec(migration.down);
|
|
309
|
+
|
|
310
|
+
// Remove from version table
|
|
311
|
+
await db.query(`DELETE FROM schema_version WHERE version = $1`, [
|
|
312
|
+
migration.version,
|
|
313
|
+
]);
|
|
314
|
+
|
|
315
|
+
await db.exec("COMMIT");
|
|
316
|
+
rolledBack.push(migration.version);
|
|
317
|
+
|
|
318
|
+
console.log(
|
|
319
|
+
`[migrations] Rolled back migration ${migration.version}: ${migration.description}`,
|
|
320
|
+
);
|
|
321
|
+
} catch (error) {
|
|
322
|
+
await db.exec("ROLLBACK");
|
|
323
|
+
const err = error as Error;
|
|
324
|
+
console.error(
|
|
325
|
+
`[migrations] Failed to rollback migration ${migration.version}: ${err.message}`,
|
|
326
|
+
);
|
|
327
|
+
throw new Error(
|
|
328
|
+
`Rollback of migration ${migration.version} failed: ${err.message}`,
|
|
329
|
+
);
|
|
330
|
+
}
|
|
331
|
+
}
|
|
332
|
+
|
|
333
|
+
const finalVersion = await getCurrentVersion(db);
|
|
334
|
+
return { rolledBack, current: finalVersion };
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
/**
|
|
338
|
+
* Check if a specific migration has been applied
|
|
339
|
+
*/
|
|
340
|
+
export async function isMigrationApplied(
|
|
341
|
+
db: PGlite,
|
|
342
|
+
version: number,
|
|
343
|
+
): Promise<boolean> {
|
|
344
|
+
await ensureVersionTable(db);
|
|
345
|
+
|
|
346
|
+
const result = await db.query<{ count: string }>(
|
|
347
|
+
`SELECT COUNT(*) as count FROM schema_version WHERE version = $1`,
|
|
348
|
+
[version],
|
|
349
|
+
);
|
|
350
|
+
|
|
351
|
+
return parseInt(result.rows[0]?.count || "0") > 0;
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
/**
|
|
355
|
+
* Get pending migrations (not yet applied)
|
|
356
|
+
*/
|
|
357
|
+
export async function getPendingMigrations(db: PGlite): Promise<Migration[]> {
|
|
358
|
+
const currentVersion = await getCurrentVersion(db);
|
|
359
|
+
return migrations
|
|
360
|
+
.filter((m) => m.version > currentVersion)
|
|
361
|
+
.sort((a, b) => a.version - b.version);
|
|
362
|
+
}
|