@apibara/plugin-drizzle 2.1.0-beta.20 → 2.1.0-beta.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -5,20 +5,10 @@ const plugins = require('@apibara/indexer/plugins');
5
5
  const internal = require('@apibara/indexer/internal');
6
6
  const plugins$1 = require('@apibara/indexer/internal/plugins');
7
7
  const constants = require('./shared/plugin-drizzle.cae20704.cjs');
8
- const pglite$1 = require('@electric-sql/pglite');
9
- const nodePostgres = require('drizzle-orm/node-postgres');
10
- const migrator$1 = require('drizzle-orm/node-postgres/migrator');
11
- const pglite = require('drizzle-orm/pglite');
12
- const migrator = require('drizzle-orm/pglite/migrator');
13
- const pg = require('pg');
14
- const protocol = require('@apibara/protocol');
15
8
  const drizzleOrm = require('drizzle-orm');
9
+ const protocol = require('@apibara/protocol');
16
10
  const pgCore = require('drizzle-orm/pg-core');
17
11
 
18
- function _interopDefaultCompat (e) { return e && typeof e === 'object' && 'default' in e ? e.default : e; }
19
-
20
- const pg__default = /*#__PURE__*/_interopDefaultCompat(pg);
21
-
22
12
  class DrizzleStorageError extends Error {
23
13
  constructor(message, options) {
24
14
  super(message, options);
@@ -46,6 +36,12 @@ function serialize(obj) {
46
36
  function sleep(ms) {
47
37
  return new Promise((resolve) => setTimeout(resolve, ms));
48
38
  }
39
+ const getIdColumnForTable = (tableName, idColumn) => {
40
+ if (idColumn[tableName]) {
41
+ return idColumn[tableName];
42
+ }
43
+ return idColumn["*"];
44
+ };
49
45
 
50
46
  function drizzle(options) {
51
47
  const {
@@ -55,15 +51,9 @@ function drizzle(options) {
55
51
  config,
56
52
  poolConfig
57
53
  } = options ?? {};
58
- if (isPostgresConnectionString(connectionString) || type === "node-postgres") {
59
- const pool = new pg__default.Pool({
60
- connectionString,
61
- ...poolConfig || {}
62
- });
63
- return nodePostgres.drizzle(pool, { schema, ...config || {} });
64
- }
65
- if (type === "pglite") {
66
- return pglite.drizzle({
54
+ if (isPgliteConnectionString(connectionString) && type === "pglite") {
55
+ const { drizzle: drizzlePGLite } = require("drizzle-orm/pglite");
56
+ return drizzlePGLite({
67
57
  schema,
68
58
  connection: {
69
59
  dataDir: connectionString || "memory://pglite"
@@ -71,15 +61,25 @@ function drizzle(options) {
71
61
  ...config || {}
72
62
  });
73
63
  }
74
- throw new Error("Invalid database type");
64
+ const { Pool } = require("pg");
65
+ const { drizzle: drizzleNode } = require("drizzle-orm/node-postgres");
66
+ const pool = new Pool({
67
+ connectionString,
68
+ ...poolConfig || {}
69
+ });
70
+ return drizzleNode(pool, { schema, ...config || {} });
75
71
  }
76
72
  async function migrate(db, options) {
77
- const isPglite = !!("$client" in db && db.$client instanceof pglite$1.PGlite);
73
+ const isPglite = isDrizzleKind(db, "PgliteDatabase");
78
74
  try {
79
75
  if (isPglite) {
80
- await migrator.migrate(db, options);
76
+ const { migrate: migratePGLite } = require("drizzle-orm/pglite/migrator");
77
+ await migratePGLite(db, options);
81
78
  } else {
82
- await migrator$1.migrate(db, options);
79
+ const {
80
+ migrate: migrateNode
81
+ } = require("drizzle-orm/node-postgres/migrator");
82
+ await migrateNode(db, options);
83
83
  }
84
84
  } catch (error) {
85
85
  throw new DrizzleStorageError(
@@ -90,8 +90,23 @@ async function migrate(db, options) {
90
90
  );
91
91
  }
92
92
  }
93
- function isPostgresConnectionString(conn) {
94
- return conn.startsWith("postgres://") || conn.startsWith("postgresql://");
93
+ function isPgliteConnectionString(conn) {
94
+ return conn.startsWith("memory://") || conn.startsWith("file://") || conn.startsWith("idb://");
95
+ }
96
+ function isDrizzleKind(value, entityKindValue) {
97
+ if (!value || typeof value !== "object") {
98
+ return false;
99
+ }
100
+ let cls = Object.getPrototypeOf(value).constructor;
101
+ if (cls) {
102
+ while (cls) {
103
+ if (drizzleOrm.entityKind in cls && cls[drizzleOrm.entityKind] === entityKindValue) {
104
+ return true;
105
+ }
106
+ cls = Object.getPrototypeOf(cls);
107
+ }
108
+ }
109
+ return false;
95
110
  }
96
111
 
97
112
  const CHECKPOINTS_TABLE_NAME = "checkpoints";
@@ -375,9 +390,10 @@ async function initializeReorgRollbackTable(tx, indexerId) {
375
390
  );
376
391
  }
377
392
  }
378
- async function registerTriggers(tx, tables, endCursor, idColumn, indexerId) {
393
+ async function registerTriggers(tx, tables, endCursor, idColumnMap, indexerId) {
379
394
  try {
380
395
  for (const table of tables) {
396
+ const tableIdColumn = getIdColumnForTable(table, idColumnMap);
381
397
  await tx.execute(
382
398
  drizzleOrm.sql.raw(
383
399
  `DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`
@@ -388,7 +404,7 @@ async function registerTriggers(tx, tables, endCursor, idColumn, indexerId) {
388
404
  CREATE CONSTRAINT TRIGGER ${getReorgTriggerName(table, indexerId)}
389
405
  AFTER INSERT OR UPDATE OR DELETE ON ${table}
390
406
  DEFERRABLE INITIALLY DEFERRED
391
- FOR EACH ROW EXECUTE FUNCTION ${constants.SCHEMA_NAME}.reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`}, '${indexerId}');
407
+ FOR EACH ROW EXECUTE FUNCTION ${constants.SCHEMA_NAME}.reorg_checkpoint('${tableIdColumn}', ${Number(endCursor.orderKey)}, '${indexerId}');
392
408
  `)
393
409
  );
394
410
  }
@@ -413,7 +429,7 @@ async function removeTriggers(db, tables, indexerId) {
413
429
  });
414
430
  }
415
431
  }
416
- async function invalidate(tx, cursor, idColumn, indexerId) {
432
+ async function invalidate(tx, cursor, idColumnMap, indexerId) {
417
433
  const { rows: result } = await tx.execute(
418
434
  drizzleOrm.sql.raw(`
419
435
  WITH deleted AS (
@@ -431,6 +447,7 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
431
447
  );
432
448
  }
433
449
  for (const op of result) {
450
+ const tableIdColumn = getIdColumnForTable(op.table_name, idColumnMap);
434
451
  switch (op.op) {
435
452
  case "I":
436
453
  try {
@@ -440,7 +457,7 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
440
457
  await tx.execute(
441
458
  drizzleOrm.sql.raw(`
442
459
  DELETE FROM ${op.table_name}
443
- WHERE ${idColumn} = '${op.row_id}'
460
+ WHERE ${tableIdColumn} = '${op.row_id}'
444
461
  `)
445
462
  );
446
463
  } catch (error) {
@@ -480,7 +497,9 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
480
497
  );
481
498
  }
482
499
  const rowValue = typeof op.row_value === "string" ? JSON.parse(op.row_value) : op.row_value;
483
- const nonIdKeys = Object.keys(rowValue).filter((k) => k !== idColumn);
500
+ const nonIdKeys = Object.keys(rowValue).filter(
501
+ (k) => k !== tableIdColumn
502
+ );
484
503
  const fields = nonIdKeys.map((c) => `${c} = prev.${c}`).join(", ");
485
504
  const query = drizzleOrm.sql.raw(`
486
505
  UPDATE ${op.table_name}
@@ -488,7 +507,7 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
488
507
  FROM (
489
508
  SELECT * FROM json_populate_record(null::${op.table_name}, '${JSON.stringify(op.row_value)}'::json)
490
509
  ) as prev
491
- WHERE ${op.table_name}.${idColumn} = '${op.row_id}'
510
+ WHERE ${op.table_name}.${tableIdColumn} = '${op.row_id}'
492
511
  `);
493
512
  await tx.execute(query);
494
513
  } catch (error) {
@@ -566,8 +585,8 @@ function drizzleStorage({
566
585
  db,
567
586
  persistState: enablePersistence = true,
568
587
  indexerName: identifier = "default",
569
- schema,
570
- idColumn = "id",
588
+ schema: _schema,
589
+ idColumn,
571
590
  migrate: migrateOptions
572
591
  }) {
573
592
  return plugins.defineIndexerPlugin((indexer$1) => {
@@ -575,15 +594,30 @@ function drizzleStorage({
575
594
  let indexerId = "";
576
595
  const alwaysReindex = process.env["APIBARA_ALWAYS_REINDEX"] === "true";
577
596
  let prevFinality;
597
+ const schema = _schema ?? db._.schema ?? {};
598
+ const idColumnMap = {
599
+ "*": typeof idColumn === "string" ? idColumn : "id",
600
+ ...typeof idColumn === "object" ? idColumn : {}
601
+ };
578
602
  try {
579
- tableNames = Object.values(schema ?? db._.schema ?? {}).map(
580
- (table) => table.dbName
581
- );
603
+ tableNames = Object.values(schema).map((table) => table.dbName);
582
604
  } catch (error) {
583
605
  throw new DrizzleStorageError("Failed to get table names from schema", {
584
606
  cause: error
585
607
  });
586
608
  }
609
+ for (const table of Object.values(schema)) {
610
+ const columns = table.columns;
611
+ const tableIdColumn = getIdColumnForTable(table.dbName, idColumnMap);
612
+ const columnExists = Object.values(columns).some(
613
+ (column) => column.name === tableIdColumn
614
+ );
615
+ if (!columnExists) {
616
+ throw new DrizzleStorageError(
617
+ `Column \`"${tableIdColumn}"\` does not exist in table \`"${table.dbName}"\`. Make sure the table has the specified column or provide a valid \`idColumn\` mapping to \`drizzleStorage\`.`
618
+ );
619
+ }
620
+ }
587
621
  indexer$1.hooks.hook("run:before", async () => {
588
622
  const internalContext = plugins$1.useInternalContext();
589
623
  const context = indexer.useIndexerContext();
@@ -591,20 +625,9 @@ function drizzleStorage({
591
625
  context[constants.DRIZZLE_STORAGE_DB_PROPERTY] = db;
592
626
  const { indexerName: indexerFileName, availableIndexers } = internalContext;
593
627
  indexerId = internal.generateIndexerId(indexerFileName, identifier);
594
- if (alwaysReindex) {
595
- logger.warn(
596
- `Reindexing: Deleting all data from tables - ${tableNames.join(", ")}`
597
- );
598
- await withTransaction(db, async (tx) => {
599
- await cleanupStorage(tx, tableNames, indexerId);
600
- if (enablePersistence) {
601
- await resetPersistence({ tx, indexerId });
602
- }
603
- logger.success("Tables have been cleaned up for reindexing");
604
- });
605
- }
606
628
  let retries = 0;
607
629
  let migrationsApplied = false;
630
+ let cleanupApplied = false;
608
631
  while (retries <= MAX_RETRIES) {
609
632
  try {
610
633
  if (migrateOptions && !migrationsApplied) {
@@ -617,6 +640,17 @@ function drizzleStorage({
617
640
  if (enablePersistence) {
618
641
  await initializePersistentState(tx);
619
642
  }
643
+ if (alwaysReindex && !cleanupApplied) {
644
+ logger.warn(
645
+ `Reindexing: Deleting all data from tables - ${tableNames.join(", ")}`
646
+ );
647
+ await cleanupStorage(tx, tableNames, indexerId);
648
+ if (enablePersistence) {
649
+ await resetPersistence({ tx, indexerId });
650
+ }
651
+ cleanupApplied = true;
652
+ logger.success("Tables have been cleaned up for reindexing");
653
+ }
620
654
  });
621
655
  break;
622
656
  } catch (error) {
@@ -659,7 +693,7 @@ function drizzleStorage({
659
693
  return;
660
694
  }
661
695
  await withTransaction(db, async (tx) => {
662
- await invalidate(tx, cursor, idColumn, indexerId);
696
+ await invalidate(tx, cursor, idColumnMap, indexerId);
663
697
  if (enablePersistence) {
664
698
  await invalidateState({ tx, cursor, indexerId });
665
699
  }
@@ -697,7 +731,7 @@ function drizzleStorage({
697
731
  throw new DrizzleStorageError("Invalidate Cursor is undefined");
698
732
  }
699
733
  await withTransaction(db, async (tx) => {
700
- await invalidate(tx, cursor, idColumn, indexerId);
734
+ await invalidate(tx, cursor, idColumnMap, indexerId);
701
735
  if (enablePersistence) {
702
736
  await invalidateState({ tx, cursor, indexerId });
703
737
  }
@@ -713,14 +747,14 @@ function drizzleStorage({
713
747
  await withTransaction(db, async (tx) => {
714
748
  context[constants.DRIZZLE_PROPERTY] = { db: tx };
715
749
  if (prevFinality === "pending") {
716
- await invalidate(tx, cursor, idColumn, indexerId);
750
+ await invalidate(tx, cursor, idColumnMap, indexerId);
717
751
  }
718
752
  if (finality !== "finalized") {
719
753
  await registerTriggers(
720
754
  tx,
721
755
  tableNames,
722
756
  endCursor,
723
- idColumn,
757
+ idColumnMap,
724
758
  indexerId
725
759
  );
726
760
  }
package/dist/index.d.cts CHANGED
@@ -113,6 +113,13 @@ type MigrateOptions = MigrationConfig;
113
113
  */
114
114
  declare function migrate<TSchema extends Record<string, unknown>>(db: PgliteDatabase<TSchema> | NodePgDatabase<TSchema>, options: MigrateOptions): Promise<void>;
115
115
 
116
+ interface IdColumnMap extends Record<string, string> {
117
+ /**
118
+ * Wildcard mapping for all tables.
119
+ */
120
+ "*": string;
121
+ }
122
+
116
123
  type DrizzleStorage<TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>> = {
117
124
  db: PgTransaction<TQueryResult, TFullSchema, TSchema>;
118
125
  };
@@ -135,9 +142,32 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
135
142
  */
136
143
  schema?: Record<string, unknown>;
137
144
  /**
138
- * The column to use as the id. Defaults to 'id'.
145
+ * The column to use as the primary identifier for each table.
146
+ *
147
+ * This identifier is used for tracking changes during reorgs and rollbacks.
148
+ *
149
+ * Can be specified in two ways:
150
+ *
151
+ * 1. As a single string that applies to all tables:
152
+ * ```ts
153
+ * idColumn: "_id" // Uses "_id" column for all tables
154
+ * ```
155
+ *
156
+ * 2. As an object mapping table names to their ID columns:
157
+ * ```ts
158
+ * idColumn: {
159
+ * transfers: "transaction_hash", // Use "transaction_hash" for transfers table
160
+ * blocks: "block_number", // Use "block_number" for blocks table
161
+ * "*": "_id" // Use "_id" for all other tables | defaults to "id"
162
+ * }
163
+ * ```
164
+ *
165
+ * The special "*" key acts as a fallback for any tables not explicitly mapped.
166
+ *
167
+ * @default "id"
168
+ * @type {string | Partial<IdColumnMap>}
139
169
  */
140
- idColumn?: string;
170
+ idColumn?: string | Partial<IdColumnMap>;
141
171
  /**
142
172
  * The options for the database migration. When provided, the database will automatically run migrations before the indexer runs.
143
173
  */
@@ -154,6 +184,6 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
154
184
  * @param options.idColumn - The column to use as the id. Defaults to 'id'.
155
185
  * @param options.migrate - The options for the database migration. when provided, the database will automatically run migrations before the indexer runs.
156
186
  */
157
- declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema, idColumn, migrate: migrateOptions, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
187
+ declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema: _schema, idColumn, migrate: migrateOptions, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
158
188
 
159
- export { type Database, type DrizzleOptions, type DrizzleStorage, type DrizzleStorageOptions, type MigrateOptions, type NodePgDatabase, type NodePgDrizzleOptions, type PgliteDatabase, type PgliteDrizzleOptions, drizzle, drizzleStorage, migrate, useDrizzleStorage };
189
+ export { type Database, type DrizzleOptions, type DrizzleStorage, type DrizzleStorageOptions, type IdColumnMap, type MigrateOptions, type NodePgDatabase, type NodePgDrizzleOptions, type PgliteDatabase, type PgliteDrizzleOptions, drizzle, drizzleStorage, migrate, useDrizzleStorage };
package/dist/index.d.mts CHANGED
@@ -113,6 +113,13 @@ type MigrateOptions = MigrationConfig;
113
113
  */
114
114
  declare function migrate<TSchema extends Record<string, unknown>>(db: PgliteDatabase<TSchema> | NodePgDatabase<TSchema>, options: MigrateOptions): Promise<void>;
115
115
 
116
+ interface IdColumnMap extends Record<string, string> {
117
+ /**
118
+ * Wildcard mapping for all tables.
119
+ */
120
+ "*": string;
121
+ }
122
+
116
123
  type DrizzleStorage<TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>> = {
117
124
  db: PgTransaction<TQueryResult, TFullSchema, TSchema>;
118
125
  };
@@ -135,9 +142,32 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
135
142
  */
136
143
  schema?: Record<string, unknown>;
137
144
  /**
138
- * The column to use as the id. Defaults to 'id'.
145
+ * The column to use as the primary identifier for each table.
146
+ *
147
+ * This identifier is used for tracking changes during reorgs and rollbacks.
148
+ *
149
+ * Can be specified in two ways:
150
+ *
151
+ * 1. As a single string that applies to all tables:
152
+ * ```ts
153
+ * idColumn: "_id" // Uses "_id" column for all tables
154
+ * ```
155
+ *
156
+ * 2. As an object mapping table names to their ID columns:
157
+ * ```ts
158
+ * idColumn: {
159
+ * transfers: "transaction_hash", // Use "transaction_hash" for transfers table
160
+ * blocks: "block_number", // Use "block_number" for blocks table
161
+ * "*": "_id" // Use "_id" for all other tables | defaults to "id"
162
+ * }
163
+ * ```
164
+ *
165
+ * The special "*" key acts as a fallback for any tables not explicitly mapped.
166
+ *
167
+ * @default "id"
168
+ * @type {string | Partial<IdColumnMap>}
139
169
  */
140
- idColumn?: string;
170
+ idColumn?: string | Partial<IdColumnMap>;
141
171
  /**
142
172
  * The options for the database migration. When provided, the database will automatically run migrations before the indexer runs.
143
173
  */
@@ -154,6 +184,6 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
154
184
  * @param options.idColumn - The column to use as the id. Defaults to 'id'.
155
185
  * @param options.migrate - The options for the database migration. when provided, the database will automatically run migrations before the indexer runs.
156
186
  */
157
- declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema, idColumn, migrate: migrateOptions, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
187
+ declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema: _schema, idColumn, migrate: migrateOptions, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
158
188
 
159
- export { type Database, type DrizzleOptions, type DrizzleStorage, type DrizzleStorageOptions, type MigrateOptions, type NodePgDatabase, type NodePgDrizzleOptions, type PgliteDatabase, type PgliteDrizzleOptions, drizzle, drizzleStorage, migrate, useDrizzleStorage };
189
+ export { type Database, type DrizzleOptions, type DrizzleStorage, type DrizzleStorageOptions, type IdColumnMap, type MigrateOptions, type NodePgDatabase, type NodePgDrizzleOptions, type PgliteDatabase, type PgliteDrizzleOptions, drizzle, drizzleStorage, migrate, useDrizzleStorage };
package/dist/index.d.ts CHANGED
@@ -113,6 +113,13 @@ type MigrateOptions = MigrationConfig;
113
113
  */
114
114
  declare function migrate<TSchema extends Record<string, unknown>>(db: PgliteDatabase<TSchema> | NodePgDatabase<TSchema>, options: MigrateOptions): Promise<void>;
115
115
 
116
+ interface IdColumnMap extends Record<string, string> {
117
+ /**
118
+ * Wildcard mapping for all tables.
119
+ */
120
+ "*": string;
121
+ }
122
+
116
123
  type DrizzleStorage<TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>> = {
117
124
  db: PgTransaction<TQueryResult, TFullSchema, TSchema>;
118
125
  };
@@ -135,9 +142,32 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
135
142
  */
136
143
  schema?: Record<string, unknown>;
137
144
  /**
138
- * The column to use as the id. Defaults to 'id'.
145
+ * The column to use as the primary identifier for each table.
146
+ *
147
+ * This identifier is used for tracking changes during reorgs and rollbacks.
148
+ *
149
+ * Can be specified in two ways:
150
+ *
151
+ * 1. As a single string that applies to all tables:
152
+ * ```ts
153
+ * idColumn: "_id" // Uses "_id" column for all tables
154
+ * ```
155
+ *
156
+ * 2. As an object mapping table names to their ID columns:
157
+ * ```ts
158
+ * idColumn: {
159
+ * transfers: "transaction_hash", // Use "transaction_hash" for transfers table
160
+ * blocks: "block_number", // Use "block_number" for blocks table
161
+ * "*": "_id" // Use "_id" for all other tables | defaults to "id"
162
+ * }
163
+ * ```
164
+ *
165
+ * The special "*" key acts as a fallback for any tables not explicitly mapped.
166
+ *
167
+ * @default "id"
168
+ * @type {string | Partial<IdColumnMap>}
139
169
  */
140
- idColumn?: string;
170
+ idColumn?: string | Partial<IdColumnMap>;
141
171
  /**
142
172
  * The options for the database migration. When provided, the database will automatically run migrations before the indexer runs.
143
173
  */
@@ -154,6 +184,6 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
154
184
  * @param options.idColumn - The column to use as the id. Defaults to 'id'.
155
185
  * @param options.migrate - The options for the database migration. when provided, the database will automatically run migrations before the indexer runs.
156
186
  */
157
- declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema, idColumn, migrate: migrateOptions, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
187
+ declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema: _schema, idColumn, migrate: migrateOptions, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
158
188
 
159
- export { type Database, type DrizzleOptions, type DrizzleStorage, type DrizzleStorageOptions, type MigrateOptions, type NodePgDatabase, type NodePgDrizzleOptions, type PgliteDatabase, type PgliteDrizzleOptions, drizzle, drizzleStorage, migrate, useDrizzleStorage };
189
+ export { type Database, type DrizzleOptions, type DrizzleStorage, type DrizzleStorageOptions, type IdColumnMap, type MigrateOptions, type NodePgDatabase, type NodePgDrizzleOptions, type PgliteDatabase, type PgliteDrizzleOptions, drizzle, drizzleStorage, migrate, useDrizzleStorage };
package/dist/index.mjs CHANGED
@@ -3,14 +3,8 @@ import { defineIndexerPlugin, useLogger } from '@apibara/indexer/plugins';
3
3
  import { generateIndexerId } from '@apibara/indexer/internal';
4
4
  import { useInternalContext } from '@apibara/indexer/internal/plugins';
5
5
  import { S as SCHEMA_NAME, D as DRIZZLE_PROPERTY, a as DRIZZLE_STORAGE_DB_PROPERTY } from './shared/plugin-drizzle.2d226351.mjs';
6
- import { PGlite } from '@electric-sql/pglite';
7
- import { drizzle as drizzle$1 } from 'drizzle-orm/node-postgres';
8
- import { migrate as migrate$2 } from 'drizzle-orm/node-postgres/migrator';
9
- import { drizzle as drizzle$2 } from 'drizzle-orm/pglite';
10
- import { migrate as migrate$1 } from 'drizzle-orm/pglite/migrator';
11
- import pg from 'pg';
6
+ import { entityKind, sql, eq, and, isNull, gt, lt } from 'drizzle-orm';
12
7
  import { normalizeCursor } from '@apibara/protocol';
13
- import { sql, eq, and, isNull, gt, lt } from 'drizzle-orm';
14
8
  import { pgSchema, text, integer, primaryKey, serial, char, jsonb } from 'drizzle-orm/pg-core';
15
9
 
16
10
  class DrizzleStorageError extends Error {
@@ -40,6 +34,12 @@ function serialize(obj) {
40
34
  function sleep(ms) {
41
35
  return new Promise((resolve) => setTimeout(resolve, ms));
42
36
  }
37
+ const getIdColumnForTable = (tableName, idColumn) => {
38
+ if (idColumn[tableName]) {
39
+ return idColumn[tableName];
40
+ }
41
+ return idColumn["*"];
42
+ };
43
43
 
44
44
  function drizzle(options) {
45
45
  const {
@@ -49,15 +49,9 @@ function drizzle(options) {
49
49
  config,
50
50
  poolConfig
51
51
  } = options ?? {};
52
- if (isPostgresConnectionString(connectionString) || type === "node-postgres") {
53
- const pool = new pg.Pool({
54
- connectionString,
55
- ...poolConfig || {}
56
- });
57
- return drizzle$1(pool, { schema, ...config || {} });
58
- }
59
- if (type === "pglite") {
60
- return drizzle$2({
52
+ if (isPgliteConnectionString(connectionString) && type === "pglite") {
53
+ const { drizzle: drizzlePGLite } = require("drizzle-orm/pglite");
54
+ return drizzlePGLite({
61
55
  schema,
62
56
  connection: {
63
57
  dataDir: connectionString || "memory://pglite"
@@ -65,15 +59,25 @@ function drizzle(options) {
65
59
  ...config || {}
66
60
  });
67
61
  }
68
- throw new Error("Invalid database type");
62
+ const { Pool } = require("pg");
63
+ const { drizzle: drizzleNode } = require("drizzle-orm/node-postgres");
64
+ const pool = new Pool({
65
+ connectionString,
66
+ ...poolConfig || {}
67
+ });
68
+ return drizzleNode(pool, { schema, ...config || {} });
69
69
  }
70
70
  async function migrate(db, options) {
71
- const isPglite = !!("$client" in db && db.$client instanceof PGlite);
71
+ const isPglite = isDrizzleKind(db, "PgliteDatabase");
72
72
  try {
73
73
  if (isPglite) {
74
- await migrate$1(db, options);
74
+ const { migrate: migratePGLite } = require("drizzle-orm/pglite/migrator");
75
+ await migratePGLite(db, options);
75
76
  } else {
76
- await migrate$2(db, options);
77
+ const {
78
+ migrate: migrateNode
79
+ } = require("drizzle-orm/node-postgres/migrator");
80
+ await migrateNode(db, options);
77
81
  }
78
82
  } catch (error) {
79
83
  throw new DrizzleStorageError(
@@ -84,8 +88,23 @@ async function migrate(db, options) {
84
88
  );
85
89
  }
86
90
  }
87
- function isPostgresConnectionString(conn) {
88
- return conn.startsWith("postgres://") || conn.startsWith("postgresql://");
91
+ function isPgliteConnectionString(conn) {
92
+ return conn.startsWith("memory://") || conn.startsWith("file://") || conn.startsWith("idb://");
93
+ }
94
+ function isDrizzleKind(value, entityKindValue) {
95
+ if (!value || typeof value !== "object") {
96
+ return false;
97
+ }
98
+ let cls = Object.getPrototypeOf(value).constructor;
99
+ if (cls) {
100
+ while (cls) {
101
+ if (entityKind in cls && cls[entityKind] === entityKindValue) {
102
+ return true;
103
+ }
104
+ cls = Object.getPrototypeOf(cls);
105
+ }
106
+ }
107
+ return false;
89
108
  }
90
109
 
91
110
  const CHECKPOINTS_TABLE_NAME = "checkpoints";
@@ -369,9 +388,10 @@ async function initializeReorgRollbackTable(tx, indexerId) {
369
388
  );
370
389
  }
371
390
  }
372
- async function registerTriggers(tx, tables, endCursor, idColumn, indexerId) {
391
+ async function registerTriggers(tx, tables, endCursor, idColumnMap, indexerId) {
373
392
  try {
374
393
  for (const table of tables) {
394
+ const tableIdColumn = getIdColumnForTable(table, idColumnMap);
375
395
  await tx.execute(
376
396
  sql.raw(
377
397
  `DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`
@@ -382,7 +402,7 @@ async function registerTriggers(tx, tables, endCursor, idColumn, indexerId) {
382
402
  CREATE CONSTRAINT TRIGGER ${getReorgTriggerName(table, indexerId)}
383
403
  AFTER INSERT OR UPDATE OR DELETE ON ${table}
384
404
  DEFERRABLE INITIALLY DEFERRED
385
- FOR EACH ROW EXECUTE FUNCTION ${SCHEMA_NAME}.reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`}, '${indexerId}');
405
+ FOR EACH ROW EXECUTE FUNCTION ${SCHEMA_NAME}.reorg_checkpoint('${tableIdColumn}', ${Number(endCursor.orderKey)}, '${indexerId}');
386
406
  `)
387
407
  );
388
408
  }
@@ -407,7 +427,7 @@ async function removeTriggers(db, tables, indexerId) {
407
427
  });
408
428
  }
409
429
  }
410
- async function invalidate(tx, cursor, idColumn, indexerId) {
430
+ async function invalidate(tx, cursor, idColumnMap, indexerId) {
411
431
  const { rows: result } = await tx.execute(
412
432
  sql.raw(`
413
433
  WITH deleted AS (
@@ -425,6 +445,7 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
425
445
  );
426
446
  }
427
447
  for (const op of result) {
448
+ const tableIdColumn = getIdColumnForTable(op.table_name, idColumnMap);
428
449
  switch (op.op) {
429
450
  case "I":
430
451
  try {
@@ -434,7 +455,7 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
434
455
  await tx.execute(
435
456
  sql.raw(`
436
457
  DELETE FROM ${op.table_name}
437
- WHERE ${idColumn} = '${op.row_id}'
458
+ WHERE ${tableIdColumn} = '${op.row_id}'
438
459
  `)
439
460
  );
440
461
  } catch (error) {
@@ -474,7 +495,9 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
474
495
  );
475
496
  }
476
497
  const rowValue = typeof op.row_value === "string" ? JSON.parse(op.row_value) : op.row_value;
477
- const nonIdKeys = Object.keys(rowValue).filter((k) => k !== idColumn);
498
+ const nonIdKeys = Object.keys(rowValue).filter(
499
+ (k) => k !== tableIdColumn
500
+ );
478
501
  const fields = nonIdKeys.map((c) => `${c} = prev.${c}`).join(", ");
479
502
  const query = sql.raw(`
480
503
  UPDATE ${op.table_name}
@@ -482,7 +505,7 @@ async function invalidate(tx, cursor, idColumn, indexerId) {
482
505
  FROM (
483
506
  SELECT * FROM json_populate_record(null::${op.table_name}, '${JSON.stringify(op.row_value)}'::json)
484
507
  ) as prev
485
- WHERE ${op.table_name}.${idColumn} = '${op.row_id}'
508
+ WHERE ${op.table_name}.${tableIdColumn} = '${op.row_id}'
486
509
  `);
487
510
  await tx.execute(query);
488
511
  } catch (error) {
@@ -560,8 +583,8 @@ function drizzleStorage({
560
583
  db,
561
584
  persistState: enablePersistence = true,
562
585
  indexerName: identifier = "default",
563
- schema,
564
- idColumn = "id",
586
+ schema: _schema,
587
+ idColumn,
565
588
  migrate: migrateOptions
566
589
  }) {
567
590
  return defineIndexerPlugin((indexer) => {
@@ -569,15 +592,30 @@ function drizzleStorage({
569
592
  let indexerId = "";
570
593
  const alwaysReindex = process.env["APIBARA_ALWAYS_REINDEX"] === "true";
571
594
  let prevFinality;
595
+ const schema = _schema ?? db._.schema ?? {};
596
+ const idColumnMap = {
597
+ "*": typeof idColumn === "string" ? idColumn : "id",
598
+ ...typeof idColumn === "object" ? idColumn : {}
599
+ };
572
600
  try {
573
- tableNames = Object.values(schema ?? db._.schema ?? {}).map(
574
- (table) => table.dbName
575
- );
601
+ tableNames = Object.values(schema).map((table) => table.dbName);
576
602
  } catch (error) {
577
603
  throw new DrizzleStorageError("Failed to get table names from schema", {
578
604
  cause: error
579
605
  });
580
606
  }
607
+ for (const table of Object.values(schema)) {
608
+ const columns = table.columns;
609
+ const tableIdColumn = getIdColumnForTable(table.dbName, idColumnMap);
610
+ const columnExists = Object.values(columns).some(
611
+ (column) => column.name === tableIdColumn
612
+ );
613
+ if (!columnExists) {
614
+ throw new DrizzleStorageError(
615
+ `Column \`"${tableIdColumn}"\` does not exist in table \`"${table.dbName}"\`. Make sure the table has the specified column or provide a valid \`idColumn\` mapping to \`drizzleStorage\`.`
616
+ );
617
+ }
618
+ }
581
619
  indexer.hooks.hook("run:before", async () => {
582
620
  const internalContext = useInternalContext();
583
621
  const context = useIndexerContext();
@@ -585,20 +623,9 @@ function drizzleStorage({
585
623
  context[DRIZZLE_STORAGE_DB_PROPERTY] = db;
586
624
  const { indexerName: indexerFileName, availableIndexers } = internalContext;
587
625
  indexerId = generateIndexerId(indexerFileName, identifier);
588
- if (alwaysReindex) {
589
- logger.warn(
590
- `Reindexing: Deleting all data from tables - ${tableNames.join(", ")}`
591
- );
592
- await withTransaction(db, async (tx) => {
593
- await cleanupStorage(tx, tableNames, indexerId);
594
- if (enablePersistence) {
595
- await resetPersistence({ tx, indexerId });
596
- }
597
- logger.success("Tables have been cleaned up for reindexing");
598
- });
599
- }
600
626
  let retries = 0;
601
627
  let migrationsApplied = false;
628
+ let cleanupApplied = false;
602
629
  while (retries <= MAX_RETRIES) {
603
630
  try {
604
631
  if (migrateOptions && !migrationsApplied) {
@@ -611,6 +638,17 @@ function drizzleStorage({
611
638
  if (enablePersistence) {
612
639
  await initializePersistentState(tx);
613
640
  }
641
+ if (alwaysReindex && !cleanupApplied) {
642
+ logger.warn(
643
+ `Reindexing: Deleting all data from tables - ${tableNames.join(", ")}`
644
+ );
645
+ await cleanupStorage(tx, tableNames, indexerId);
646
+ if (enablePersistence) {
647
+ await resetPersistence({ tx, indexerId });
648
+ }
649
+ cleanupApplied = true;
650
+ logger.success("Tables have been cleaned up for reindexing");
651
+ }
614
652
  });
615
653
  break;
616
654
  } catch (error) {
@@ -653,7 +691,7 @@ function drizzleStorage({
653
691
  return;
654
692
  }
655
693
  await withTransaction(db, async (tx) => {
656
- await invalidate(tx, cursor, idColumn, indexerId);
694
+ await invalidate(tx, cursor, idColumnMap, indexerId);
657
695
  if (enablePersistence) {
658
696
  await invalidateState({ tx, cursor, indexerId });
659
697
  }
@@ -691,7 +729,7 @@ function drizzleStorage({
691
729
  throw new DrizzleStorageError("Invalidate Cursor is undefined");
692
730
  }
693
731
  await withTransaction(db, async (tx) => {
694
- await invalidate(tx, cursor, idColumn, indexerId);
732
+ await invalidate(tx, cursor, idColumnMap, indexerId);
695
733
  if (enablePersistence) {
696
734
  await invalidateState({ tx, cursor, indexerId });
697
735
  }
@@ -707,14 +745,14 @@ function drizzleStorage({
707
745
  await withTransaction(db, async (tx) => {
708
746
  context[DRIZZLE_PROPERTY] = { db: tx };
709
747
  if (prevFinality === "pending") {
710
- await invalidate(tx, cursor, idColumn, indexerId);
748
+ await invalidate(tx, cursor, idColumnMap, indexerId);
711
749
  }
712
750
  if (finality !== "finalized") {
713
751
  await registerTriggers(
714
752
  tx,
715
753
  tableNames,
716
754
  endCursor,
717
- idColumn,
755
+ idColumnMap,
718
756
  indexerId
719
757
  );
720
758
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@apibara/plugin-drizzle",
3
- "version": "2.1.0-beta.20",
3
+ "version": "2.1.0-beta.22",
4
4
  "type": "module",
5
5
  "files": [
6
6
  "dist",
@@ -32,9 +32,18 @@
32
32
  "test:ci": "vitest run"
33
33
  },
34
34
  "peerDependencies": {
35
+ "@electric-sql/pglite": ">=0.2.0",
35
36
  "drizzle-orm": "<1",
36
37
  "pg": ">=8"
37
38
  },
39
+ "peerDependenciesMeta": {
40
+ "@electric-sql/pglite": {
41
+ "optional": true
42
+ },
43
+ "pg": {
44
+ "optional": true
45
+ }
46
+ },
38
47
  "devDependencies": {
39
48
  "@electric-sql/pglite": "^0.2.17",
40
49
  "@types/node": "^20.14.0",
@@ -45,8 +54,8 @@
45
54
  "vitest": "^1.6.0"
46
55
  },
47
56
  "dependencies": {
48
- "@apibara/indexer": "2.1.0-beta.20",
49
- "@apibara/protocol": "2.1.0-beta.20",
57
+ "@apibara/indexer": "2.1.0-beta.22",
58
+ "@apibara/protocol": "2.1.0-beta.22",
50
59
  "postgres-range": "^1.1.4"
51
60
  }
52
61
  }
package/src/helper.ts CHANGED
@@ -1,18 +1,10 @@
1
- import { PGlite, type PGliteOptions } from "@electric-sql/pglite";
1
+ import type { PGlite, PGliteOptions } from "@electric-sql/pglite";
2
2
  import type { DrizzleConfig } from "drizzle-orm";
3
+ import { entityKind } from "drizzle-orm";
3
4
  import type { MigrationConfig } from "drizzle-orm/migrator";
4
- import {
5
- type NodePgDatabase as OriginalNodePgDatabase,
6
- drizzle as drizzleNode,
7
- } from "drizzle-orm/node-postgres";
8
- import { migrate as migrateNode } from "drizzle-orm/node-postgres/migrator";
9
- import {} from "drizzle-orm/pg-core";
10
- import {
11
- type PgliteDatabase as OriginalPgliteDatabase,
12
- drizzle as drizzlePGLite,
13
- } from "drizzle-orm/pglite";
14
- import { migrate as migratePGLite } from "drizzle-orm/pglite/migrator";
15
- import pg from "pg";
5
+ import type { NodePgDatabase as OriginalNodePgDatabase } from "drizzle-orm/node-postgres";
6
+ import type { PgliteDatabase as OriginalPgliteDatabase } from "drizzle-orm/pglite";
7
+ import type pg from "pg";
16
8
  import { DrizzleStorageError } from "./utils";
17
9
 
18
10
  /**
@@ -129,21 +121,9 @@ export function drizzle<
129
121
  poolConfig,
130
122
  } = options ?? {};
131
123
 
132
- if (
133
- isPostgresConnectionString(connectionString) ||
134
- type === "node-postgres"
135
- ) {
136
- const pool = new pg.Pool({
137
- connectionString,
138
- ...(poolConfig || {}),
139
- });
140
- return drizzleNode(pool, { schema, ...(config || {}) }) as Database<
141
- TOptions,
142
- TSchema
143
- >;
144
- }
124
+ if (isPgliteConnectionString(connectionString) && type === "pglite") {
125
+ const { drizzle: drizzlePGLite } = require("drizzle-orm/pglite");
145
126
 
146
- if (type === "pglite") {
147
127
  return drizzlePGLite({
148
128
  schema: schema as TSchema,
149
129
  connection: {
@@ -153,7 +133,16 @@ export function drizzle<
153
133
  }) as Database<TOptions, TSchema>;
154
134
  }
155
135
 
156
- throw new Error("Invalid database type");
136
+ const { Pool } = require("pg");
137
+ const { drizzle: drizzleNode } = require("drizzle-orm/node-postgres");
138
+ const pool = new Pool({
139
+ connectionString,
140
+ ...(poolConfig || {}),
141
+ });
142
+ return drizzleNode(pool, { schema, ...(config || {}) }) as Database<
143
+ TOptions,
144
+ TSchema
145
+ >;
157
146
  }
158
147
 
159
148
  /**
@@ -179,12 +168,16 @@ export async function migrate<TSchema extends Record<string, unknown>>(
179
168
  db: PgliteDatabase<TSchema> | NodePgDatabase<TSchema>,
180
169
  options: MigrateOptions,
181
170
  ) {
182
- const isPglite = !!("$client" in db && db.$client instanceof PGlite);
171
+ const isPglite = isDrizzleKind(db, "PgliteDatabase");
183
172
 
184
173
  try {
185
174
  if (isPglite) {
175
+ const { migrate: migratePGLite } = require("drizzle-orm/pglite/migrator");
186
176
  await migratePGLite(db as PgliteDatabase<TSchema>, options);
187
177
  } else {
178
+ const {
179
+ migrate: migrateNode,
180
+ } = require("drizzle-orm/node-postgres/migrator");
188
181
  await migrateNode(db as NodePgDatabase<TSchema>, options);
189
182
  }
190
183
  } catch (error) {
@@ -197,6 +190,30 @@ export async function migrate<TSchema extends Record<string, unknown>>(
197
190
  }
198
191
  }
199
192
 
200
- function isPostgresConnectionString(conn: string) {
201
- return conn.startsWith("postgres://") || conn.startsWith("postgresql://");
193
+ function isPgliteConnectionString(conn: string) {
194
+ return (
195
+ conn.startsWith("memory://") ||
196
+ conn.startsWith("file://") ||
197
+ conn.startsWith("idb://")
198
+ );
199
+ }
200
+
201
+ function isDrizzleKind(value: unknown, entityKindValue: string) {
202
+ if (!value || typeof value !== "object") {
203
+ return false;
204
+ }
205
+ // https://github.com/drizzle-team/drizzle-orm/blob/f39f885779800982e90dd3c89aba6df3217a6fd2/drizzle-orm/src/entity.ts#L29-L41
206
+ let cls = Object.getPrototypeOf(value).constructor;
207
+ if (cls) {
208
+ // Traverse the prototype chain to find the entityKind
209
+ while (cls) {
210
+ // https://github.com/drizzle-team/drizzle-orm/blob/f39f885779800982e90dd3c89aba6df3217a6fd2/drizzle-orm/src/pglite/driver.ts#L41
211
+ if (entityKind in cls && cls[entityKind] === entityKindValue) {
212
+ return true;
213
+ }
214
+ cls = Object.getPrototypeOf(cls);
215
+ }
216
+ }
217
+
218
+ return false;
202
219
  }
package/src/index.ts CHANGED
@@ -32,10 +32,18 @@ import {
32
32
  registerTriggers,
33
33
  removeTriggers,
34
34
  } from "./storage";
35
- import { DrizzleStorageError, sleep, withTransaction } from "./utils";
35
+ import {
36
+ DrizzleStorageError,
37
+ type IdColumnMap,
38
+ getIdColumnForTable,
39
+ sleep,
40
+ withTransaction,
41
+ } from "./utils";
36
42
 
37
43
  export * from "./helper";
38
44
 
45
+ export type { IdColumnMap };
46
+
39
47
  const MAX_RETRIES = 5;
40
48
 
41
49
  export type DrizzleStorage<
@@ -89,9 +97,32 @@ export interface DrizzleStorageOptions<
89
97
  */
90
98
  schema?: Record<string, unknown>;
91
99
  /**
92
- * The column to use as the id. Defaults to 'id'.
100
+ * The column to use as the primary identifier for each table.
101
+ *
102
+ * This identifier is used for tracking changes during reorgs and rollbacks.
103
+ *
104
+ * Can be specified in two ways:
105
+ *
106
+ * 1. As a single string that applies to all tables:
107
+ * ```ts
108
+ * idColumn: "_id" // Uses "_id" column for all tables
109
+ * ```
110
+ *
111
+ * 2. As an object mapping table names to their ID columns:
112
+ * ```ts
113
+ * idColumn: {
114
+ * transfers: "transaction_hash", // Use "transaction_hash" for transfers table
115
+ * blocks: "block_number", // Use "block_number" for blocks table
116
+ * "*": "_id" // Use "_id" for all other tables | defaults to "id"
117
+ * }
118
+ * ```
119
+ *
120
+ * The special "*" key acts as a fallback for any tables not explicitly mapped.
121
+ *
122
+ * @default "id"
123
+ * @type {string | Partial<IdColumnMap>}
93
124
  */
94
- idColumn?: string;
125
+ idColumn?: string | Partial<IdColumnMap>;
95
126
  /**
96
127
  * The options for the database migration. When provided, the database will automatically run migrations before the indexer runs.
97
128
  */
@@ -120,8 +151,8 @@ export function drizzleStorage<
120
151
  db,
121
152
  persistState: enablePersistence = true,
122
153
  indexerName: identifier = "default",
123
- schema,
124
- idColumn = "id",
154
+ schema: _schema,
155
+ idColumn,
125
156
  migrate: migrateOptions,
126
157
  }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>) {
127
158
  return defineIndexerPlugin<TFilter, TBlock>((indexer) => {
@@ -129,17 +160,37 @@ export function drizzleStorage<
129
160
  let indexerId = "";
130
161
  const alwaysReindex = process.env["APIBARA_ALWAYS_REINDEX"] === "true";
131
162
  let prevFinality: DataFinality | undefined;
163
+ const schema: TSchema = (_schema as TSchema) ?? db._.schema ?? {};
164
+ const idColumnMap: IdColumnMap = {
165
+ "*": typeof idColumn === "string" ? idColumn : "id",
166
+ ...(typeof idColumn === "object" ? idColumn : {}),
167
+ };
132
168
 
133
169
  try {
134
- tableNames = Object.values((schema as TSchema) ?? db._.schema ?? {}).map(
135
- (table) => table.dbName,
136
- );
170
+ tableNames = Object.values(schema).map((table) => table.dbName);
137
171
  } catch (error) {
138
172
  throw new DrizzleStorageError("Failed to get table names from schema", {
139
173
  cause: error,
140
174
  });
141
175
  }
142
176
 
177
+ // Check if specified idColumn exists in all the tables in schema
178
+ for (const table of Object.values(schema)) {
179
+ const columns = table.columns;
180
+ const tableIdColumn = getIdColumnForTable(table.dbName, idColumnMap);
181
+
182
+ const columnExists = Object.values(columns).some(
183
+ (column) => column.name === tableIdColumn,
184
+ );
185
+
186
+ if (!columnExists) {
187
+ throw new DrizzleStorageError(
188
+ `Column \`"${tableIdColumn}"\` does not exist in table \`"${table.dbName}"\`. ` +
189
+ "Make sure the table has the specified column or provide a valid `idColumn` mapping to `drizzleStorage`.",
190
+ );
191
+ }
192
+ }
193
+
143
194
  indexer.hooks.hook("run:before", async () => {
144
195
  const internalContext = useInternalContext();
145
196
  const context = useIndexerContext();
@@ -153,25 +204,11 @@ export function drizzleStorage<
153
204
 
154
205
  indexerId = generateIndexerId(indexerFileName, identifier);
155
206
 
156
- if (alwaysReindex) {
157
- logger.warn(
158
- `Reindexing: Deleting all data from tables - ${tableNames.join(", ")}`,
159
- );
160
- await withTransaction(db, async (tx) => {
161
- await cleanupStorage(tx, tableNames, indexerId);
162
-
163
- if (enablePersistence) {
164
- await resetPersistence({ tx, indexerId });
165
- }
166
-
167
- logger.success("Tables have been cleaned up for reindexing");
168
- });
169
- }
170
-
171
207
  let retries = 0;
172
208
 
173
209
  // incase the migrations are already applied, we don't want to run them again
174
210
  let migrationsApplied = false;
211
+ let cleanupApplied = false;
175
212
 
176
213
  while (retries <= MAX_RETRIES) {
177
214
  try {
@@ -186,6 +223,22 @@ export function drizzleStorage<
186
223
  if (enablePersistence) {
187
224
  await initializePersistentState(tx);
188
225
  }
226
+
227
+ if (alwaysReindex && !cleanupApplied) {
228
+ logger.warn(
229
+ `Reindexing: Deleting all data from tables - ${tableNames.join(", ")}`,
230
+ );
231
+
232
+ await cleanupStorage(tx, tableNames, indexerId);
233
+
234
+ if (enablePersistence) {
235
+ await resetPersistence({ tx, indexerId });
236
+ }
237
+
238
+ cleanupApplied = true;
239
+
240
+ logger.success("Tables have been cleaned up for reindexing");
241
+ }
189
242
  });
190
243
  break;
191
244
  } catch (error) {
@@ -239,7 +292,8 @@ export function drizzleStorage<
239
292
  }
240
293
 
241
294
  await withTransaction(db, async (tx) => {
242
- await invalidate(tx, cursor, idColumn, indexerId);
295
+ // Use the appropriate idColumn for each table when calling invalidate
296
+ await invalidate(tx, cursor, idColumnMap, indexerId);
243
297
 
244
298
  if (enablePersistence) {
245
299
  await invalidateState({ tx, cursor, indexerId });
@@ -289,7 +343,8 @@ export function drizzleStorage<
289
343
  }
290
344
 
291
345
  await withTransaction(db, async (tx) => {
292
- await invalidate(tx, cursor, idColumn, indexerId);
346
+ // Use the appropriate idColumn for each table when calling invalidate
347
+ await invalidate(tx, cursor, idColumnMap, indexerId);
293
348
 
294
349
  if (enablePersistence) {
295
350
  await invalidateState({ tx, cursor, indexerId });
@@ -319,7 +374,7 @@ export function drizzleStorage<
319
374
 
320
375
  if (prevFinality === "pending") {
321
376
  // invalidate if previous block's finality was "pending"
322
- await invalidate(tx, cursor, idColumn, indexerId);
377
+ await invalidate(tx, cursor, idColumnMap, indexerId);
323
378
  }
324
379
 
325
380
  if (finality !== "finalized") {
@@ -327,7 +382,7 @@ export function drizzleStorage<
327
382
  tx,
328
383
  tableNames,
329
384
  endCursor,
330
- idColumn,
385
+ idColumnMap,
331
386
  indexerId,
332
387
  );
333
388
  }
package/src/storage.ts CHANGED
@@ -16,7 +16,11 @@ import {
16
16
  text,
17
17
  } from "drizzle-orm/pg-core";
18
18
  import { SCHEMA_NAME } from "./constants";
19
- import { DrizzleStorageError } from "./utils";
19
+ import {
20
+ DrizzleStorageError,
21
+ type IdColumnMap,
22
+ getIdColumnForTable,
23
+ } from "./utils";
20
24
 
21
25
  const ROLLBACK_TABLE_NAME = "reorg_rollback";
22
26
 
@@ -125,11 +129,14 @@ export async function registerTriggers<
125
129
  tx: PgTransaction<TQueryResult, TFullSchema, TSchema>,
126
130
  tables: string[],
127
131
  endCursor: Cursor,
128
- idColumn: string,
132
+ idColumnMap: IdColumnMap,
129
133
  indexerId: string,
130
134
  ) {
131
135
  try {
132
136
  for (const table of tables) {
137
+ // Determine the column ID for this specific table
138
+ const tableIdColumn = getIdColumnForTable(table, idColumnMap);
139
+
133
140
  await tx.execute(
134
141
  sql.raw(
135
142
  `DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`,
@@ -140,7 +147,7 @@ export async function registerTriggers<
140
147
  CREATE CONSTRAINT TRIGGER ${getReorgTriggerName(table, indexerId)}
141
148
  AFTER INSERT OR UPDATE OR DELETE ON ${table}
142
149
  DEFERRABLE INITIALLY DEFERRED
143
- FOR EACH ROW EXECUTE FUNCTION ${SCHEMA_NAME}.reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`}, '${indexerId}');
150
+ FOR EACH ROW EXECUTE FUNCTION ${SCHEMA_NAME}.reorg_checkpoint('${tableIdColumn}', ${Number(endCursor.orderKey)}, '${indexerId}');
144
151
  `),
145
152
  );
146
153
  }
@@ -184,7 +191,7 @@ export async function invalidate<
184
191
  >(
185
192
  tx: PgTransaction<TQueryResult, TFullSchema, TSchema>,
186
193
  cursor: Cursor,
187
- idColumn: string,
194
+ idColumnMap: IdColumnMap,
188
195
  indexerId: string,
189
196
  ) {
190
197
  // Get and delete operations after cursor in one query, ordered by newest first
@@ -208,6 +215,9 @@ export async function invalidate<
208
215
 
209
216
  // Process each operation in reverse order
210
217
  for (const op of result) {
218
+ // Determine the column ID for this specific table
219
+ const tableIdColumn = getIdColumnForTable(op.table_name, idColumnMap);
220
+
211
221
  switch (op.op) {
212
222
  case "I":
213
223
  try {
@@ -218,7 +228,7 @@ export async function invalidate<
218
228
  await tx.execute(
219
229
  sql.raw(`
220
230
  DELETE FROM ${op.table_name}
221
- WHERE ${idColumn} = '${op.row_id}'
231
+ WHERE ${tableIdColumn} = '${op.row_id}'
222
232
  `),
223
233
  );
224
234
  } catch (error) {
@@ -271,7 +281,9 @@ export async function invalidate<
271
281
  ? JSON.parse(op.row_value)
272
282
  : op.row_value;
273
283
 
274
- const nonIdKeys = Object.keys(rowValue).filter((k) => k !== idColumn);
284
+ const nonIdKeys = Object.keys(rowValue).filter(
285
+ (k) => k !== tableIdColumn,
286
+ );
275
287
 
276
288
  const fields = nonIdKeys.map((c) => `${c} = prev.${c}`).join(", ");
277
289
 
@@ -281,7 +293,7 @@ export async function invalidate<
281
293
  FROM (
282
294
  SELECT * FROM json_populate_record(null::${op.table_name}, '${JSON.stringify(op.row_value)}'::json)
283
295
  ) as prev
284
- WHERE ${op.table_name}.${idColumn} = '${op.row_id}'
296
+ WHERE ${op.table_name}.${tableIdColumn} = '${op.row_id}'
285
297
  `);
286
298
 
287
299
  await tx.execute(query);
package/src/utils.ts CHANGED
@@ -48,3 +48,22 @@ export function serialize<T>(obj: T): string {
48
48
  export function sleep(ms: number) {
49
49
  return new Promise((resolve) => setTimeout(resolve, ms));
50
50
  }
51
+
52
+ export interface IdColumnMap extends Record<string, string> {
53
+ /**
54
+ * Wildcard mapping for all tables.
55
+ */
56
+ "*": string;
57
+ }
58
+
59
+ export const getIdColumnForTable = (
60
+ tableName: string,
61
+ idColumn: IdColumnMap,
62
+ ): string => {
63
+ // If there's a specific mapping for this table, use it
64
+ if (idColumn[tableName]) {
65
+ return idColumn[tableName];
66
+ }
67
+ // Default fallback
68
+ return idColumn["*"];
69
+ };