@enbox/dwn-sql-store 0.0.10 → 0.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/dist/esm/src/data-store-s3.js +12 -21
  2. package/dist/esm/src/data-store-s3.js.map +1 -1
  3. package/dist/esm/src/data-store-sql.js +14 -37
  4. package/dist/esm/src/data-store-sql.js.map +1 -1
  5. package/dist/esm/src/main.js +1 -0
  6. package/dist/esm/src/main.js.map +1 -1
  7. package/dist/esm/src/message-store-sql.js +13 -107
  8. package/dist/esm/src/message-store-sql.js.map +1 -1
  9. package/dist/esm/src/migration-provider.js +36 -0
  10. package/dist/esm/src/migration-provider.js.map +1 -0
  11. package/dist/esm/src/migration-runner.js +26 -88
  12. package/dist/esm/src/migration-runner.js.map +1 -1
  13. package/dist/esm/src/migrations/001-initial-schema.js +4 -5
  14. package/dist/esm/src/migrations/001-initial-schema.js.map +1 -1
  15. package/dist/esm/src/migrations/002-content-addressed-datastore.js +3 -4
  16. package/dist/esm/src/migrations/002-content-addressed-datastore.js.map +1 -1
  17. package/dist/esm/src/migrations/003-add-squash-column.js +3 -4
  18. package/dist/esm/src/migrations/003-add-squash-column.js.map +1 -1
  19. package/dist/esm/src/migrations/index.js +14 -6
  20. package/dist/esm/src/migrations/index.js.map +1 -1
  21. package/dist/esm/src/resumable-task-store-sql.js +14 -21
  22. package/dist/esm/src/resumable-task-store-sql.js.map +1 -1
  23. package/dist/esm/src/state-index-sql.js +17 -58
  24. package/dist/esm/src/state-index-sql.js.map +1 -1
  25. package/dist/types/src/data-store-s3.d.ts.map +1 -1
  26. package/dist/types/src/data-store-sql.d.ts.map +1 -1
  27. package/dist/types/src/main.d.ts +1 -0
  28. package/dist/types/src/main.d.ts.map +1 -1
  29. package/dist/types/src/message-store-sql.d.ts +0 -8
  30. package/dist/types/src/message-store-sql.d.ts.map +1 -1
  31. package/dist/types/src/migration-provider.d.ts +36 -0
  32. package/dist/types/src/migration-provider.d.ts.map +1 -0
  33. package/dist/types/src/migration-runner.d.ts +13 -39
  34. package/dist/types/src/migration-runner.d.ts.map +1 -1
  35. package/dist/types/src/migrations/001-initial-schema.d.ts +3 -3
  36. package/dist/types/src/migrations/001-initial-schema.d.ts.map +1 -1
  37. package/dist/types/src/migrations/002-content-addressed-datastore.d.ts +2 -2
  38. package/dist/types/src/migrations/002-content-addressed-datastore.d.ts.map +1 -1
  39. package/dist/types/src/migrations/003-add-squash-column.d.ts +2 -2
  40. package/dist/types/src/migrations/003-add-squash-column.d.ts.map +1 -1
  41. package/dist/types/src/migrations/index.d.ts +12 -4
  42. package/dist/types/src/migrations/index.d.ts.map +1 -1
  43. package/dist/types/src/resumable-task-store-sql.d.ts.map +1 -1
  44. package/dist/types/src/state-index-sql.d.ts.map +1 -1
  45. package/package.json +2 -2
  46. package/src/data-store-s3.ts +14 -25
  47. package/src/data-store-sql.ts +15 -44
  48. package/src/main.ts +1 -0
  49. package/src/message-store-sql.ts +14 -113
  50. package/src/migration-provider.ts +52 -0
  51. package/src/migration-runner.ts +33 -123
  52. package/src/migrations/001-initial-schema.ts +6 -7
  53. package/src/migrations/002-content-addressed-datastore.ts +5 -6
  54. package/src/migrations/003-add-squash-column.ts +5 -7
  55. package/src/migrations/index.ts +15 -7
  56. package/src/resumable-task-store-sql.ts +16 -25
  57. package/src/state-index-sql.ts +18 -62
@@ -1 +1 @@
1
- {"version":3,"file":"resumable-task-store-sql.d.ts","sourceRoot":"","sources":["../../../src/resumable-task-store-sql.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAEpD,OAAO,KAAK,EAAE,oBAAoB,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAMlF,qBAAa,qBAAsB,YAAW,kBAAkB;;IAC9D,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,oBAAoB,CAAM;gBAKtC,OAAO,EAAE,OAAO;IAItB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAkCrB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAKtB,QAAQ,CAAC,IAAI,EAAE,GAAG,EAAE,gBAAgB,EAAE,MAAM,GAAG,OAAO,CAAC,oBAAoB,CAAC;IAoB5E,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,oBAAoB,EAAE,CAAC;IA0CpD,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,oBAAoB,GAAG,SAAS,CAAC;IAsB/D,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAc/D,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAWrC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAS7B"}
1
+ {"version":3,"file":"resumable-task-store-sql.d.ts","sourceRoot":"","sources":["../../../src/resumable-task-store-sql.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAEpD,OAAO,KAAK,EAAE,oBAAoB,EAAE,kBAAkB,EAAE,MAAM,mBAAmB,CAAC;AAMlF,qBAAa,qBAAsB,YAAW,kBAAkB;;IAC9D,OAAO,CAAC,MAAM,CAAC,QAAQ,CAAC,oBAAoB,CAAM;gBAKtC,OAAO,EAAE,OAAO;IAItB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IAyBrB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAKtB,QAAQ,CAAC,IAAI,EAAE,GAAG,EAAE,gBAAgB,EAAE,MAAM,GAAG,OAAO,CAAC,oBAAoB,CAAC;IAoB5E,IAAI,CAAC,KAAK,EAAE,MAAM,GAAG,OAAO,CAAC,oBAAoB,EAAE,CAAC;IA0CpD,IAAI,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,oBAAoB,GAAG,SAAS,CAAC;IAsB/D,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,gBAAgB,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAc/D,MAAM,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAWrC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAS7B"}
@@ -1 +1 @@
1
- {"version":3,"file":"state-index-sql.d.ts","sourceRoot":"","sources":["../../../src/state-index-sql.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAEpD,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAOpD,qBAAa,aAAc,YAAW,UAAU;;gBAiBlC,OAAO,EAAE,OAAO;IAItB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IA2ErB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAOtB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAatB,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IA4B7E,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAsC5D,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtC,eAAe,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKhE,cAAc,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAKhE,sBAAsB,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAK1F,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAK/D,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAO/F;;;OAGG;IACH,OAAO,CAAC,aAAa;IAWrB;;;OAGG;IACH,OAAO,CAAC,eAAe;IAYvB;;OAEG;YACW,UAAU;CAWzB"}
1
+ {"version":3,"file":"state-index-sql.d.ts","sourceRoot":"","sources":["../../../src/state-index-sql.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG;AAEH,OAAO,KAAK,EAAE,OAAO,EAAE,MAAM,sBAAsB,CAAC;AAEpD,OAAO,KAAK,EAAE,IAAI,EAAE,MAAM,mBAAmB,CAAC;AAC9C,OAAO,KAAK,EAAE,SAAS,EAAE,MAAM,mBAAmB,CAAC;AACnD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,mBAAmB,CAAC;AAOpD,qBAAa,aAAc,YAAW,UAAU;;gBAiBlC,OAAO,EAAE,OAAO;IAItB,IAAI,IAAI,OAAO,CAAC,IAAI,CAAC;IA+BrB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAOtB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAatB,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM,EAAE,OAAO,EAAE,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IA4B7E,MAAM,CAAC,MAAM,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAsC5D,OAAO,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKtC,eAAe,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAKhE,cAAc,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAKhE,sBAAsB,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;IAK1F,SAAS,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAK/D,iBAAiB,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,GAAG,OAAO,CAAC,MAAM,EAAE,CAAC;IAO/F;;;OAGG;IACH,OAAO,CAAC,aAAa;IAWrB;;;OAGG;IACH,OAAO,CAAC,eAAe;IAYvB;;OAEG;YACW,UAAU;CAWzB"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@enbox/dwn-sql-store",
3
- "version": "0.0.10",
3
+ "version": "0.0.11",
4
4
  "description": "SQL backed implementations of DWN MessageStore, DataStore, and StateIndex",
5
5
  "type": "module",
6
6
  "license": "Apache-2.0",
@@ -25,7 +25,7 @@
25
25
  "dependencies": {
26
26
  "@aws-sdk/client-s3": "^3.700.0",
27
27
  "@aws-sdk/lib-storage": "^3.700.0",
28
- "@enbox/dwn-sdk-js": "0.1.1",
28
+ "@enbox/dwn-sdk-js": "0.1.2",
29
29
  "@ipld/dag-cbor": "9.0.5",
30
30
  "interface-blockstore": "5.2.3",
31
31
  "interface-store": "5.1.2",
@@ -3,7 +3,6 @@ import type { DwnDatabaseType } from './types.js';
3
3
  import type { DataStore, DataStoreGetResult, DataStorePutResult } from '@enbox/dwn-sdk-js';
4
4
 
5
5
  import { DataStream } from '@enbox/dwn-sdk-js';
6
- import { Kysely } from 'kysely';
7
6
  import { Readable } from 'stream';
8
7
  import { Upload } from '@aws-sdk/lib-storage';
9
8
  import {
@@ -14,6 +13,7 @@ import {
14
13
  PutObjectCommand,
15
14
  S3Client,
16
15
  } from '@aws-sdk/client-s3';
16
+ import { Kysely, sql } from 'kysely';
17
17
 
18
18
  /**
19
19
  * S3-backed implementation of {@link DataStore} with SQL-based reference
@@ -56,7 +56,9 @@ export class DataStoreS3 implements DataStore {
56
56
  }
57
57
 
58
58
  this.#db = new Kysely<DwnDatabaseType>({ dialect: this.#dialect });
59
- await this.#ensureRefsTable();
59
+
60
+ // Fail fast if migrations have not been run — the dataRefs table must already exist.
61
+ await this.#assertTablesExist();
60
62
  }
61
63
 
62
64
  public async close(): Promise<void> {
@@ -277,30 +279,17 @@ export class DataStoreS3 implements DataStore {
277
279
  }
278
280
 
279
281
  /**
280
- * Creates the `dataRefs` table if it doesn't already exist.
281
- * Shares the same schema as DataStoreSql's `dataRefs` table.
282
+ * Verifies that the required `dataRefs` table exists by executing a
283
+ * zero-row SELECT. Throws a clear error directing the caller to run
284
+ * migrations first.
282
285
  */
283
- async #ensureRefsTable(): Promise<void> {
284
- const db = this.#db!;
285
-
286
- if (!(await this.#dialect.hasTable(db, 'dataRefs'))) {
287
- await db.schema
288
- .createTable('dataRefs')
289
- .ifNotExists()
290
- .addColumn('tenant', 'varchar(255)', (col) => col.notNull())
291
- .addColumn('recordId', 'varchar(60)', (col) => col.notNull())
292
- .addColumn('dataCid', 'varchar(60)', (col) => col.notNull())
293
- .addColumn('dataSize', 'bigint', (col) => col.notNull())
294
- .execute();
295
-
296
- await db.schema.createIndex('index_dataRefs_tenant_recordId_dataCid')
297
- .on('dataRefs').columns(['tenant', 'recordId', 'dataCid']).unique().execute();
298
-
299
- await db.schema.createIndex('index_dataRefs_dataCid')
300
- .on('dataRefs').column('dataCid').execute();
301
-
302
- await db.schema.createIndex('index_dataRefs_tenant')
303
- .on('dataRefs').column('tenant').execute();
286
+ async #assertTablesExist(): Promise<void> {
287
+ try {
288
+ await sql`SELECT 1 FROM ${sql.table('dataRefs')} LIMIT 0`.execute(this.#db!);
289
+ } catch {
290
+ throw new Error(
291
+ 'DataStoreS3: table \'dataRefs\' does not exist. Run DWN store migrations before opening stores.'
292
+ );
304
293
  }
305
294
  }
306
295
  }
@@ -8,7 +8,7 @@ import { CID } from 'multiformats';
8
8
  import { DataStream } from '@enbox/dwn-sdk-js';
9
9
  import { exporter } from 'ipfs-unixfs-exporter';
10
10
  import { importer } from 'ipfs-unixfs-importer';
11
- import { Kysely } from 'kysely';
11
+ import { Kysely, sql } from 'kysely';
12
12
 
13
13
  /**
14
14
  * SQL-backed implementation of {@link DataStore} with content-addressed
@@ -37,10 +37,8 @@ export class DataStoreSql implements DataStore {
37
37
 
38
38
  this.#db = new Kysely<DwnDatabaseType>({ dialect: this.#dialect });
39
39
 
40
- // Create tables if they don't exist. In production the MigrationRunner
41
- // creates these before open() is called; this fallback handles standalone
42
- // usage (tests, plugins) that bypass the migration runner.
43
- await this.#ensureTables();
40
+ // Fail fast if migrations have not been run tables must already exist.
41
+ await this.#assertTablesExist();
44
42
  }
45
43
 
46
44
  public async close(): Promise<void> {
@@ -218,46 +216,19 @@ export class DataStoreSql implements DataStore {
218
216
  }
219
217
 
220
218
  /**
221
- * Creates the `dataRefs` and `dataBlocks` tables if they don't already exist.
222
- * This is a fallback for standalone usage without the MigrationRunner.
219
+ * Verifies that the required tables exist by executing a zero-row SELECT.
220
+ * Throws a clear error directing the caller to run migrations first.
223
221
  */
224
- async #ensureTables(): Promise<void> {
225
- const db = this.#db!;
226
-
227
- // ─── dataRefs ─────────────────────────────────────────────────────
228
- if (!(await this.#dialect.hasTable(db, 'dataRefs'))) {
229
- await db.schema
230
- .createTable('dataRefs')
231
- .ifNotExists()
232
- .addColumn('tenant', 'varchar(255)', (col) => col.notNull())
233
- .addColumn('recordId', 'varchar(60)', (col) => col.notNull())
234
- .addColumn('dataCid', 'varchar(60)', (col) => col.notNull())
235
- .addColumn('dataSize', 'bigint', (col) => col.notNull())
236
- .execute();
237
-
238
- await db.schema.createIndex('index_dataRefs_tenant_recordId_dataCid')
239
- .on('dataRefs').columns(['tenant', 'recordId', 'dataCid']).unique().execute();
240
-
241
- await db.schema.createIndex('index_dataRefs_dataCid')
242
- .on('dataRefs').column('dataCid').execute();
243
-
244
- await db.schema.createIndex('index_dataRefs_tenant')
245
- .on('dataRefs').column('tenant').execute();
246
- }
247
-
248
- // ─── dataBlocks ───────────────────────────────────────────────────
249
- if (!(await this.#dialect.hasTable(db, 'dataBlocks'))) {
250
- let table = db.schema
251
- .createTable('dataBlocks')
252
- .ifNotExists()
253
- .addColumn('rootDataCid', 'varchar(60)', (col) => col.notNull())
254
- .addColumn('blockCid', 'varchar(60)', (col) => col.notNull());
255
-
256
- table = this.#dialect.addBlobColumn(table, 'data', (col) => col.notNull());
257
- await table.execute();
258
-
259
- await db.schema.createIndex('index_dataBlocks_rootDataCid_blockCid')
260
- .on('dataBlocks').columns(['rootDataCid', 'blockCid']).unique().execute();
222
+ async #assertTablesExist(): Promise<void> {
223
+ const tables = ['dataRefs', 'dataBlocks'] as const;
224
+ for (const table of tables) {
225
+ try {
226
+ await sql`SELECT 1 FROM ${sql.table(table)} LIMIT 0`.execute(this.#db!);
227
+ } catch {
228
+ throw new Error(
229
+ `DataStoreSql: table '${table}' does not exist. Run DWN store migrations before opening stores.`
230
+ );
231
+ }
261
232
  }
262
233
  }
263
234
 
package/src/main.ts CHANGED
@@ -8,6 +8,7 @@ export * from './data-store-s3.js';
8
8
  export * from './data-store-sql.js';
9
9
  export * from './state-index-sql.js';
10
10
  export * from './message-store-sql.js';
11
+ export * from './migration-provider.js';
11
12
  export * from './migration-runner.js';
12
13
  export * from './migrations/index.js';
13
14
  export * from './resumable-task-store-sql.js';
@@ -43,123 +43,24 @@ export class MessageStoreSql implements MessageStore {
43
43
 
44
44
  this.#db = new Kysely<DwnDatabaseType>({ dialect: this.#dialect });
45
45
 
46
- // create messages table if it does not exist
47
- const messagesTableName = 'messageStoreMessages';
48
- const messagesTableExists = await this.#dialect.hasTable(this.#db, messagesTableName);
49
- if (!messagesTableExists) {
50
- let createMessagesTable = this.#db.schema
51
- .createTable(messagesTableName)
52
- .ifNotExists()
53
- .addColumn('tenant', 'varchar(255)', (col) => col.notNull())
54
- .addColumn('messageCid', 'varchar(60)', (col) => col.notNull())
55
- .addColumn('interface', 'varchar(20)')
56
- .addColumn('method', 'varchar(20)')
57
- .addColumn('recordId', 'varchar(60)')
58
- .addColumn('entryId','varchar(60)')
59
- .addColumn('parentId', 'varchar(60)')
60
- .addColumn('protocol', 'varchar(200)')
61
- .addColumn('protocolPath', 'varchar(200)')
62
- .addColumn('contextId', 'varchar(600)')
63
- .addColumn('schema', 'varchar(200)')
64
- .addColumn('author', 'varchar(255)')
65
- .addColumn('recipient', 'varchar(255)')
66
- .addColumn('messageTimestamp', 'varchar(30)')
67
- .addColumn('dateCreated', 'varchar(30)')
68
- .addColumn('datePublished', 'varchar(30)')
69
- .addColumn('isLatestBaseState', 'boolean')
70
- .addColumn('published', 'boolean')
71
- .addColumn('prune', 'boolean')
72
- .addColumn('squash', 'boolean')
73
- .addColumn('dataFormat', 'varchar(30)')
74
- .addColumn('dataCid', 'varchar(60)')
75
- .addColumn('dataSize', 'integer')
76
- .addColumn('encodedData', 'text') // we optionally store encoded data if it is below a threshold
77
- .addColumn('attester', 'text')
78
- .addColumn('permissionGrantId', 'varchar(60)');
79
-
80
- // Add columns that have dialect-specific constraints
81
- createMessagesTable = this.#dialect.addAutoIncrementingColumn(createMessagesTable, 'id', (col) => col.primaryKey());
82
- createMessagesTable = this.#dialect.addBlobColumn(createMessagesTable, 'encodedMessageBytes', (col) => col.notNull());
83
- await createMessagesTable.execute();
84
-
85
- // add unique index for get() and delete() by messageCid — the most fundamental lookup path
86
- await this.#db.schema
87
- .createIndex('index_tenant_messageCid')
88
- .on(messagesTableName)
89
- .columns(['tenant', 'messageCid'])
90
- .unique()
91
- .execute();
92
-
93
- // add indexes to the table
94
- await this.createIndexes(this.#db, messagesTableName, [
95
- ['tenant', 'recordId'], // multiple uses, notably heavily depended by record chain construction for protocol authorization
96
- ['tenant', 'entryId'], // used by fetchInitialRecordsWriteMessage in RecordsRead, RecordsQuery, and RecordsDelete
97
- ['tenant', 'parentId'], // used to walk down hierarchy of records, use cases include purging of records
98
- ['tenant', 'protocol', 'published', 'messageTimestamp'], // index used for basically every external query
99
- ['tenant', 'interface'], // mainly for fast fetch of ProtocolsConfigure for authorization, not needed if protocol was a DWN Record
100
- ['tenant', 'permissionGrantId'], // for deleting grant-authorized messages though pending https://github.com/enboxorg/enbox/issues/716
101
- ['tenant', 'dateCreated'], // sort optimization for RecordsQuery with DateSort.CreatedAscending/Descending
102
- ['tenant', 'datePublished'], // sort optimization for RecordsQuery with DateSort.PublishedAscending/Descending
103
- ]);
104
-
105
- // contextId index created separately because MySQL requires a prefix length to fit within
106
- // the 3072-byte InnoDB index key limit. contextId is varchar(600) × 4 bytes (utf8mb4) = 2400 bytes,
107
- // which combined with tenant (255 × 4 = 1020) and messageTimestamp (30 × 4 = 120) = 3540 bytes,
108
- // exceeding the limit. A prefix of 480 chars (1920 bytes) brings the total to 3060 bytes.
109
- // contextId values only contain ASCII chars [a-zA-Z0-9/], so a 480-char prefix is sufficient
110
- // to distinguish most records (covers ~8 nesting levels of 59-char CID segments).
111
- if (this.#dialect.name === 'MySQL') {
112
- await sql`CREATE INDEX index_tenant_contextId_messageTimestamp
113
- ON ${sql.table(messagesTableName)} (tenant, contextId(480), messageTimestamp)`
114
- .execute(this.#db);
115
- } else {
116
- await this.createIndexes(this.#db, messagesTableName, [
117
- ['tenant', 'contextId', 'messageTimestamp'], // expected to be used for common query pattern
118
- ]);
119
- }
120
- }
121
-
122
- // create tags table
123
- const tagsTableName = 'messageStoreRecordsTags';
124
- const tagsTableExists = await this.#dialect.hasTable(this.#db, tagsTableName);
125
- if (!tagsTableExists) {
126
- let createRecordsTagsTable = this.#db.schema
127
- .createTable(tagsTableName)
128
- .ifNotExists()
129
- .addColumn('tag', 'varchar(30)', (col) => col.notNull())
130
- .addColumn('valueString', 'varchar(200)')
131
- .addColumn('valueNumber', 'decimal');
132
-
133
- // Add columns that have dialect-specific constraints
134
- const foreignMessageInsertId = 'messageInsertId';
135
- createRecordsTagsTable = this.#dialect.addAutoIncrementingColumn(createRecordsTagsTable, 'id', (col) => col.primaryKey());
136
- createRecordsTagsTable = this.#dialect.addReferencedColumn(createRecordsTagsTable, tagsTableName, foreignMessageInsertId, 'integer', 'messageStoreMessages', 'id', 'cascade');
137
- await createRecordsTagsTable.execute();
138
-
139
- // add indexes to the table
140
- await this.createIndexes(this.#db, tagsTableName, [
141
- [foreignMessageInsertId],
142
- ['tag', 'valueString'],
143
- ['tag', 'valueNumber']
144
- ]);
145
- }
46
+ // Fail fast if migrations have not been run — tables must already exist.
47
+ await this.#assertTablesExist();
146
48
  }
147
49
 
148
50
  /**
149
- * Creates indexes on the given table.
150
- * @param tableName The name of the table to create the indexes on.
151
- * @param indexes Each inner array represents a single index and contains the column names to be indexed as a composite index.
152
- * If the inner array contains only one element, it will be treated as a single column index.
51
+ * Verifies that the required tables exist by executing a zero-row SELECT.
52
+ * Throws a clear error directing the caller to run migrations first.
153
53
  */
154
- async createIndexes<T>(database: Kysely<T>, tableName: string, indexes: string[][]): Promise<void> {
155
- for (const columnNames of indexes) {
156
- const indexName = 'index_' + columnNames.join('_'); // e.g. index_tenant_protocol
157
- await database.schema
158
- .createIndex(indexName)
159
- // .ifNotExists() // intentionally kept commented out code to show that it is not supported by all dialects (ie. MySQL)
160
- .on(tableName)
161
- .columns(columnNames)
162
- .execute();
54
+ async #assertTablesExist(): Promise<void> {
55
+ const tables = ['messageStoreMessages', 'messageStoreRecordsTags'] as const;
56
+ for (const table of tables) {
57
+ try {
58
+ await sql`SELECT 1 FROM ${sql.table(table)} LIMIT 0`.execute(this.#db!);
59
+ } catch {
60
+ throw new Error(
61
+ `MessageStoreSql: table '${table}' does not exist. Run DWN store migrations before opening stores.`
62
+ );
63
+ }
163
64
  }
164
65
  }
165
66
 
@@ -0,0 +1,52 @@
1
+ import type { Dialect } from './dialect/dialect.js';
2
+ import type { Migration, MigrationProvider } from 'kysely';
3
+
4
+ /**
5
+ * Factory function type for DWN migrations. Each migration module exports a
6
+ * factory that receives the {@link Dialect} and returns a standard Kysely
7
+ * {@link Migration}. This closure pattern lets migrations use dialect-specific
8
+ * DDL helpers (blob columns, auto-increment, `hasTable()`) without requiring
9
+ * Kysely's `Migration.up()` signature to accept extra parameters.
10
+ */
11
+ export type DwnMigrationFactory = (dialect: Dialect) => Migration;
12
+
13
+ /**
14
+ * Kysely {@link MigrationProvider} for DWN store migrations.
15
+ *
16
+ * Wraps an ordered list of `(name, factory)` pairs. At resolution time each
17
+ * factory is called with the dialect, producing the concrete Kysely
18
+ * {@link Migration} objects that the `Migrator` consumes.
19
+ *
20
+ * @example
21
+ * ```ts
22
+ * const provider = new DwnMigrationProvider(dialect, allDwnMigrations);
23
+ * const migrator = new Migrator({ db, provider });
24
+ * await migrator.migrateToLatest();
25
+ * ```
26
+ */
27
+ export class DwnMigrationProvider implements MigrationProvider {
28
+ #dialect: Dialect;
29
+ #factories: ReadonlyArray<readonly [name: string, factory: DwnMigrationFactory]>;
30
+
31
+ constructor(
32
+ dialect: Dialect,
33
+ factories: ReadonlyArray<readonly [name: string, factory: DwnMigrationFactory]>,
34
+ ) {
35
+ this.#dialect = dialect;
36
+ this.#factories = factories;
37
+ }
38
+
39
+ /**
40
+ * Called by the Kysely `Migrator` to retrieve all available migrations.
41
+ * Keys are migration names (e.g. `'001-initial-schema'`); values are the
42
+ * concrete `Migration` objects produced by invoking each factory with the
43
+ * captured dialect.
44
+ */
45
+ public async getMigrations(): Promise<Record<string, Migration>> {
46
+ const migrations: Record<string, Migration> = {};
47
+ for (const [name, factory] of this.#factories) {
48
+ migrations[name] = factory(this.#dialect);
49
+ }
50
+ return migrations;
51
+ }
52
+ }
@@ -1,137 +1,47 @@
1
1
  import type { Dialect } from './dialect/dialect.js';
2
- import type { Kysely } from 'kysely';
2
+ import type { DwnMigrationFactory } from './migration-provider.js';
3
+ import type { Kysely, MigrationResultSet } from 'kysely';
3
4
 
4
- import { allMigrations } from './migrations/index.js';
5
+ import { allDwnMigrations } from './migrations/index.js';
6
+ import { DwnMigrationProvider } from './migration-provider.js';
7
+ import { Migrator } from 'kysely';
5
8
 
6
- /**
7
- * A single migration step. Migrations are TypeScript functions (not raw SQL)
8
- * so they can use the Dialect abstraction for cross-dialect column types.
9
- */
10
- export type Migration = {
11
- /** Unique sequential name, e.g. '001-initial-schema'. */
12
- name: string;
13
- /**
14
- * Apply this migration. Receives the Kysely instance and dialect for
15
- * dialect-aware DDL (blob types, auto-increment, etc.).
16
- */
17
- up(db: Kysely<any>, dialect: Dialect): Promise<void>;
18
- };
19
-
20
- type MigrationRecord = {
21
- name: string;
22
- appliedAt: string;
23
- };
24
-
25
- type MigrationDatabaseType = {
26
- dwn_migrations: MigrationRecord;
27
- };
28
-
29
- /**
30
- * Minimal forward-only migration runner for dwn-sql-store.
31
- *
32
- * Tracks applied migrations in a `dwn_migrations` table and applies
33
- * pending migrations in sequential order on each call to `run()`.
34
- *
35
- * Design decisions:
36
- * - Forward-only: no rollback support. Keep migrations simple and additive.
37
- * - TypeScript migrations: use the Dialect interface for cross-dialect DDL.
38
- * - Idempotent: calling `run()` on an up-to-date database is a no-op.
39
- * - Transaction per migration: each migration runs in its own transaction
40
- * so a failure leaves the database in the last known-good state.
41
- */
42
9
  /**
43
10
  * Convenience function to run all DWN store migrations against a database.
44
11
  *
45
- * Creates a `MigrationRunner` with the full set of built-in migrations and
46
- * runs them. Call this once during application startup, before opening any
47
- * stores e.g. in `getDwnConfig()` or equivalent initialization code.
12
+ * Uses Kysely's native {@link Migrator} with the {@link DwnMigrationProvider}
13
+ * to apply pending migrations. The Migrator handles locking (Postgres uses
14
+ * `pg_advisory_xact_lock`, MySQL uses `GET_LOCK`/`RELEASE_LOCK`, SQLite is
15
+ * single-writer) and migration tracking via its own `kysely_migration` /
16
+ * `kysely_migration_lock` tables.
17
+ *
18
+ * Call this once during application startup, before opening any stores —
19
+ * e.g. in `getDwnConfig()` or equivalent initialization code.
48
20
  *
49
21
  * @param db - An open Kysely instance connected to the target database.
50
22
  * @param dialect - The dialect for the target database.
23
+ * @param migrations - Optional custom migration list; defaults to the
24
+ * built-in {@link allDwnMigrations}.
51
25
  * @returns The names of newly applied migrations (empty if already up-to-date).
26
+ * @throws If any migration fails (the Migrator rolls back only the failed migration).
52
27
  */
53
- export async function runDwnStoreMigrations(db: Kysely<any>, dialect: Dialect): Promise<string[]> {
54
- const runner = new MigrationRunner(db, dialect, allMigrations);
55
- return runner.run();
56
- }
57
-
58
- export class MigrationRunner {
59
- #db: Kysely<MigrationDatabaseType>;
60
- #dialect: Dialect;
61
- #migrations: Migration[];
62
-
63
- constructor(db: Kysely<any>, dialect: Dialect, migrations: Migration[]) {
64
- this.#db = db as Kysely<MigrationDatabaseType>;
65
- this.#dialect = dialect;
66
- this.#migrations = migrations;
28
+ export async function runDwnStoreMigrations(
29
+ db: Kysely<any>,
30
+ dialect: Dialect,
31
+ migrations?: ReadonlyArray<readonly [name: string, factory: DwnMigrationFactory]>,
32
+ ): Promise<string[]> {
33
+ const provider = new DwnMigrationProvider(dialect, migrations ?? allDwnMigrations);
34
+ const migrator = new Migrator({ db, provider });
35
+ const resultSet: MigrationResultSet = await migrator.migrateToLatest();
36
+
37
+ if (resultSet.error) {
38
+ // Re-throw the underlying error so callers get a useful stack trace.
39
+ // The resultSet.results still contains info about which migrations
40
+ // succeeded before the failure.
41
+ throw resultSet.error;
67
42
  }
68
43
 
69
- /**
70
- * Ensure the `dwn_migrations` tracking table exists, then apply any
71
- * pending migrations in order. Returns the names of newly applied migrations.
72
- */
73
- public async run(): Promise<string[]> {
74
- await this.#ensureMigrationTable();
75
-
76
- const applied = await this.#getAppliedMigrations();
77
- const appliedSet = new Set(applied);
78
- const pending = this.#migrations.filter((m) => !appliedSet.has(m.name));
79
-
80
- const newlyApplied: string[] = [];
81
- for (const migration of pending) {
82
- await this.#applyMigration(migration);
83
- newlyApplied.push(migration.name);
84
- }
85
-
86
- return newlyApplied;
87
- }
88
-
89
- /**
90
- * Create the `dwn_migrations` table if it does not already exist.
91
- */
92
- async #ensureMigrationTable(): Promise<void> {
93
- const exists = await this.#dialect.hasTable(this.#db, 'dwn_migrations');
94
- if (exists) {
95
- return;
96
- }
97
-
98
- await this.#db.schema
99
- .createTable('dwn_migrations')
100
- .ifNotExists()
101
- .addColumn('name', 'varchar(255)', (col) => col.primaryKey().notNull())
102
- .addColumn('appliedAt', 'varchar(30)', (col) => col.notNull())
103
- .execute();
104
- }
105
-
106
- /**
107
- * Get the list of migration names that have already been applied.
108
- */
109
- async #getAppliedMigrations(): Promise<string[]> {
110
- const rows = await this.#db
111
- .selectFrom('dwn_migrations')
112
- .select('name')
113
- .orderBy('name', 'asc')
114
- .execute();
115
-
116
- return rows.map((r) => r.name);
117
- }
118
-
119
- /**
120
- * Apply a single migration within a transaction and record it.
121
- */
122
- async #applyMigration(migration: Migration): Promise<void> {
123
- await this.#db.transaction().execute(async (trx) => {
124
- // Run the migration
125
- await migration.up(trx as unknown as Kysely<any>, this.#dialect);
126
-
127
- // Record it as applied
128
- await (trx as unknown as Kysely<MigrationDatabaseType>)
129
- .insertInto('dwn_migrations')
130
- .values({
131
- name : migration.name,
132
- appliedAt : new Date().toISOString(),
133
- })
134
- .execute();
135
- });
136
- }
44
+ return (resultSet.results ?? [])
45
+ .filter((r) => r.status === 'Success')
46
+ .map((r) => r.migrationName);
137
47
  }
@@ -1,6 +1,6 @@
1
1
  import type { Dialect } from '../dialect/dialect.js';
2
- import type { Kysely } from 'kysely';
3
- import type { Migration } from '../migration-runner.js';
2
+ import type { DwnMigrationFactory } from '../migration-provider.js';
3
+ import type { Kysely, Migration } from 'kysely';
4
4
 
5
5
  import { sql } from 'kysely';
6
6
 
@@ -8,13 +8,12 @@ import { sql } from 'kysely';
8
8
  * Baseline migration: captures the schema as of the pre-migration era.
9
9
  *
10
10
  * For existing databases that already have these tables, this migration is
11
- * detected as "already applied" during the adoption bootstrap (see MigrationRunner).
11
+ * detected as "already applied" during the adoption bootstrap (see runDwnStoreMigrations).
12
12
  * For new databases, this creates the full initial schema.
13
13
  */
14
- export const migration001InitialSchema: Migration = {
15
- name: '001-initial-schema',
14
+ export const migration001InitialSchema: DwnMigrationFactory = (dialect: Dialect): Migration => ({
16
15
 
17
- async up(db: Kysely<any>, dialect: Dialect): Promise<void> {
16
+ async up(db: Kysely<any>): Promise<void> {
18
17
 
19
18
  // ─── messageStoreMessages ───────────────────────────────────────────
20
19
  if (!(await dialect.hasTable(db, 'messageStoreMessages'))) {
@@ -187,4 +186,4 @@ export const migration001InitialSchema: Migration = {
187
186
  .on('stateIndexMeta').columns(['tenant', 'messageCid']).execute();
188
187
  }
189
188
  },
190
- };
189
+ });
@@ -1,6 +1,6 @@
1
1
  import type { Dialect } from '../dialect/dialect.js';
2
- import type { Kysely } from 'kysely';
3
- import type { Migration } from '../migration-runner.js';
2
+ import type { DwnMigrationFactory } from '../migration-provider.js';
3
+ import type { Kysely, Migration } from 'kysely';
4
4
 
5
5
  import { sql } from 'kysely';
6
6
 
@@ -29,10 +29,9 @@ import { sql } from 'kysely';
29
29
  * NOTE: For large databases, the data migration may take significant time.
30
30
  * The migration runs in a single transaction for atomicity.
31
31
  */
32
- export const migration002ContentAddressedDatastore: Migration = {
33
- name: '002-content-addressed-datastore',
32
+ export const migration002ContentAddressedDatastore: DwnMigrationFactory = (dialect: Dialect): Migration => ({
34
33
 
35
- async up(db: Kysely<any>, dialect: Dialect): Promise<void> {
34
+ async up(db: Kysely<any>): Promise<void> {
36
35
 
37
36
  // ─── Create dataRefs table ──────────────────────────────────────────
38
37
  if (!(await dialect.hasTable(db, 'dataRefs'))) {
@@ -137,4 +136,4 @@ export const migration002ContentAddressedDatastore: Migration = {
137
136
  await db.schema.dropTable('dataStore').execute();
138
137
  }
139
138
  },
140
- };
139
+ });
@@ -1,6 +1,5 @@
1
- import type { Dialect } from '../dialect/dialect.js';
2
- import type { Kysely } from 'kysely';
3
- import type { Migration } from '../migration-runner.js';
1
+ import type { DwnMigrationFactory } from '../migration-provider.js';
2
+ import type { Kysely, Migration } from 'kysely';
4
3
 
5
4
  /**
6
5
  * Migration 003: Add `squash` boolean column to `messageStoreMessages`.
@@ -9,13 +8,12 @@ import type { Migration } from '../migration-runner.js';
9
8
  * introduced in the DWN spec. It follows the same pattern as `published`
10
9
  * and `prune` — a nullable boolean column used for query filtering.
11
10
  */
12
- export const migration003AddSquashColumn: Migration = {
13
- name: '003-add-squash-column',
11
+ export const migration003AddSquashColumn: DwnMigrationFactory = (): Migration => ({
14
12
 
15
- async up(db: Kysely<any>, _dialect: Dialect): Promise<void> {
13
+ async up(db: Kysely<any>): Promise<void> {
16
14
  await db.schema
17
15
  .alterTable('messageStoreMessages')
18
16
  .addColumn('squash', 'boolean')
19
17
  .execute();
20
18
  },
21
- };
19
+ });