@apibara/plugin-drizzle 2.0.0-beta.35 → 2.0.0-beta.37
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +120 -62
- package/dist/index.d.cts +1 -1
- package/dist/index.d.mts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.mjs +121 -63
- package/package.json +3 -3
- package/src/index.ts +52 -19
- package/src/persistence.ts +16 -16
- package/src/storage.ts +69 -30
- package/src/utils.ts +4 -0
package/dist/index.cjs
CHANGED
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
const indexer = require('@apibara/indexer');
|
|
4
4
|
const plugins = require('@apibara/indexer/plugins');
|
|
5
|
+
const internal = require('@apibara/indexer/internal');
|
|
6
|
+
const plugins$1 = require('@apibara/indexer/internal/plugins');
|
|
5
7
|
const drizzleOrm = require('drizzle-orm');
|
|
6
8
|
const pgCore = require('drizzle-orm/pg-core');
|
|
7
9
|
|
|
@@ -29,6 +31,9 @@ function serialize(obj) {
|
|
|
29
31
|
" "
|
|
30
32
|
);
|
|
31
33
|
}
|
|
34
|
+
function sleep(ms) {
|
|
35
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
36
|
+
}
|
|
32
37
|
|
|
33
38
|
const CHECKPOINTS_TABLE_NAME = "__indexer_checkpoints";
|
|
34
39
|
const FILTERS_TABLE_NAME = "__indexer_filters";
|
|
@@ -117,11 +122,11 @@ async function initializePersistentState(tx) {
|
|
|
117
122
|
}
|
|
118
123
|
}
|
|
119
124
|
async function persistState(props) {
|
|
120
|
-
const { tx, endCursor, filter,
|
|
125
|
+
const { tx, endCursor, filter, indexerId } = props;
|
|
121
126
|
try {
|
|
122
127
|
if (endCursor) {
|
|
123
128
|
await tx.insert(checkpoints).values({
|
|
124
|
-
id:
|
|
129
|
+
id: indexerId,
|
|
125
130
|
orderKey: Number(endCursor.orderKey),
|
|
126
131
|
uniqueKey: endCursor.uniqueKey
|
|
127
132
|
}).onConflictDoUpdate({
|
|
@@ -132,9 +137,9 @@ async function persistState(props) {
|
|
|
132
137
|
}
|
|
133
138
|
});
|
|
134
139
|
if (filter) {
|
|
135
|
-
await tx.update(filters).set({ toBlock: Number(endCursor.orderKey) }).where(drizzleOrm.and(drizzleOrm.eq(filters.id,
|
|
140
|
+
await tx.update(filters).set({ toBlock: Number(endCursor.orderKey) }).where(drizzleOrm.and(drizzleOrm.eq(filters.id, indexerId), drizzleOrm.isNull(filters.toBlock)));
|
|
136
141
|
await tx.insert(filters).values({
|
|
137
|
-
id:
|
|
142
|
+
id: indexerId,
|
|
138
143
|
filter: serialize(filter),
|
|
139
144
|
fromBlock: Number(endCursor.orderKey),
|
|
140
145
|
toBlock: null
|
|
@@ -155,14 +160,14 @@ async function persistState(props) {
|
|
|
155
160
|
}
|
|
156
161
|
}
|
|
157
162
|
async function getState(props) {
|
|
158
|
-
const { tx,
|
|
163
|
+
const { tx, indexerId } = props;
|
|
159
164
|
try {
|
|
160
|
-
const checkpointRows = await tx.select().from(checkpoints).where(drizzleOrm.eq(checkpoints.id,
|
|
165
|
+
const checkpointRows = await tx.select().from(checkpoints).where(drizzleOrm.eq(checkpoints.id, indexerId));
|
|
161
166
|
const cursor = checkpointRows[0] ? {
|
|
162
167
|
orderKey: BigInt(checkpointRows[0].orderKey),
|
|
163
168
|
uniqueKey: checkpointRows[0].uniqueKey
|
|
164
169
|
} : void 0;
|
|
165
|
-
const filterRows = await tx.select().from(filters).where(drizzleOrm.and(drizzleOrm.eq(filters.id,
|
|
170
|
+
const filterRows = await tx.select().from(filters).where(drizzleOrm.and(drizzleOrm.eq(filters.id, indexerId), drizzleOrm.isNull(filters.toBlock)));
|
|
166
171
|
const filter = filterRows[0] ? deserialize(filterRows[0].filter) : void 0;
|
|
167
172
|
return { cursor, filter };
|
|
168
173
|
} catch (error) {
|
|
@@ -172,17 +177,17 @@ async function getState(props) {
|
|
|
172
177
|
}
|
|
173
178
|
}
|
|
174
179
|
async function invalidateState(props) {
|
|
175
|
-
const { tx, cursor,
|
|
180
|
+
const { tx, cursor, indexerId } = props;
|
|
176
181
|
try {
|
|
177
182
|
await tx.delete(filters).where(
|
|
178
183
|
drizzleOrm.and(
|
|
179
|
-
drizzleOrm.eq(filters.id,
|
|
184
|
+
drizzleOrm.eq(filters.id, indexerId),
|
|
180
185
|
drizzleOrm.gt(filters.fromBlock, Number(cursor.orderKey))
|
|
181
186
|
)
|
|
182
187
|
);
|
|
183
188
|
await tx.update(filters).set({ toBlock: null }).where(
|
|
184
189
|
drizzleOrm.and(
|
|
185
|
-
drizzleOrm.eq(filters.id,
|
|
190
|
+
drizzleOrm.eq(filters.id, indexerId),
|
|
186
191
|
drizzleOrm.gt(filters.toBlock, Number(cursor.orderKey))
|
|
187
192
|
)
|
|
188
193
|
);
|
|
@@ -193,11 +198,11 @@ async function invalidateState(props) {
|
|
|
193
198
|
}
|
|
194
199
|
}
|
|
195
200
|
async function finalizeState(props) {
|
|
196
|
-
const { tx, cursor,
|
|
201
|
+
const { tx, cursor, indexerId } = props;
|
|
197
202
|
try {
|
|
198
203
|
await tx.delete(filters).where(
|
|
199
204
|
drizzleOrm.and(
|
|
200
|
-
drizzleOrm.eq(filters.id,
|
|
205
|
+
drizzleOrm.eq(filters.id, indexerId),
|
|
201
206
|
drizzleOrm.lt(filters.toBlock, Number(cursor.orderKey))
|
|
202
207
|
)
|
|
203
208
|
);
|
|
@@ -208,15 +213,19 @@ async function finalizeState(props) {
|
|
|
208
213
|
}
|
|
209
214
|
}
|
|
210
215
|
|
|
216
|
+
function getReorgTriggerName(table, indexerId) {
|
|
217
|
+
return `${table}_reorg_${indexerId}`;
|
|
218
|
+
}
|
|
211
219
|
pgCore.pgTable("__reorg_rollback", {
|
|
212
220
|
n: pgCore.serial("n").primaryKey(),
|
|
213
221
|
op: pgCore.char("op", { length: 1 }).$type().notNull(),
|
|
214
222
|
table_name: pgCore.text("table_name").notNull(),
|
|
215
223
|
cursor: pgCore.integer("cursor").notNull(),
|
|
216
224
|
row_id: pgCore.text("row_id"),
|
|
217
|
-
row_value: pgCore.jsonb("row_value")
|
|
225
|
+
row_value: pgCore.jsonb("row_value"),
|
|
226
|
+
indexer_id: pgCore.text("indexer_id").notNull()
|
|
218
227
|
});
|
|
219
|
-
async function initializeReorgRollbackTable(tx) {
|
|
228
|
+
async function initializeReorgRollbackTable(tx, indexerId) {
|
|
220
229
|
try {
|
|
221
230
|
await tx.execute(
|
|
222
231
|
drizzleOrm.sql.raw(`
|
|
@@ -226,33 +235,14 @@ async function initializeReorgRollbackTable(tx) {
|
|
|
226
235
|
table_name TEXT NOT NULL,
|
|
227
236
|
cursor INTEGER NOT NULL,
|
|
228
237
|
row_id TEXT,
|
|
229
|
-
row_value JSONB
|
|
238
|
+
row_value JSONB,
|
|
239
|
+
indexer_id TEXT NOT NULL
|
|
230
240
|
);
|
|
231
241
|
`)
|
|
232
242
|
);
|
|
233
243
|
await tx.execute(
|
|
234
244
|
drizzleOrm.sql.raw(`
|
|
235
|
-
CREATE
|
|
236
|
-
RETURNS TRIGGER AS $$
|
|
237
|
-
DECLARE
|
|
238
|
-
id_col TEXT := TG_ARGV[0]::TEXT;
|
|
239
|
-
order_key INTEGER := TG_ARGV[1]::INTEGER;
|
|
240
|
-
new_id_value TEXT := row_to_json(NEW.*)->>id_col;
|
|
241
|
-
old_id_value TEXT := row_to_json(OLD.*)->>id_col;
|
|
242
|
-
BEGIN
|
|
243
|
-
IF (TG_OP = 'DELETE') THEN
|
|
244
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
245
|
-
SELECT 'D', TG_TABLE_NAME, order_key, old_id_value, row_to_json(OLD.*);
|
|
246
|
-
ELSIF (TG_OP = 'UPDATE') THEN
|
|
247
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
248
|
-
SELECT 'U', TG_TABLE_NAME, order_key, new_id_value, row_to_json(OLD.*);
|
|
249
|
-
ELSIF (TG_OP = 'INSERT') THEN
|
|
250
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
251
|
-
SELECT 'I', TG_TABLE_NAME, order_key, new_id_value, null;
|
|
252
|
-
END IF;
|
|
253
|
-
RETURN NULL;
|
|
254
|
-
END;
|
|
255
|
-
$$ LANGUAGE plpgsql;
|
|
245
|
+
CREATE INDEX IF NOT EXISTS idx_reorg_rollback_indexer_id_cursor ON __reorg_rollback(indexer_id, cursor);
|
|
256
246
|
`)
|
|
257
247
|
);
|
|
258
248
|
} catch (error) {
|
|
@@ -260,19 +250,56 @@ async function initializeReorgRollbackTable(tx) {
|
|
|
260
250
|
cause: error
|
|
261
251
|
});
|
|
262
252
|
}
|
|
253
|
+
try {
|
|
254
|
+
await tx.execute(
|
|
255
|
+
drizzleOrm.sql.raw(`
|
|
256
|
+
CREATE OR REPLACE FUNCTION reorg_checkpoint()
|
|
257
|
+
RETURNS TRIGGER AS $$
|
|
258
|
+
DECLARE
|
|
259
|
+
id_col TEXT := TG_ARGV[0]::TEXT;
|
|
260
|
+
order_key INTEGER := TG_ARGV[1]::INTEGER;
|
|
261
|
+
indexer_id TEXT := TG_ARGV[2]::TEXT;
|
|
262
|
+
new_id_value TEXT := row_to_json(NEW.*)->>id_col;
|
|
263
|
+
old_id_value TEXT := row_to_json(OLD.*)->>id_col;
|
|
264
|
+
BEGIN
|
|
265
|
+
IF (TG_OP = 'DELETE') THEN
|
|
266
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
267
|
+
SELECT 'D', TG_TABLE_NAME, order_key, old_id_value, row_to_json(OLD.*), indexer_id;
|
|
268
|
+
ELSIF (TG_OP = 'UPDATE') THEN
|
|
269
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
270
|
+
SELECT 'U', TG_TABLE_NAME, order_key, new_id_value, row_to_json(OLD.*), indexer_id;
|
|
271
|
+
ELSIF (TG_OP = 'INSERT') THEN
|
|
272
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
273
|
+
SELECT 'I', TG_TABLE_NAME, order_key, new_id_value, null, indexer_id;
|
|
274
|
+
END IF;
|
|
275
|
+
RETURN NULL;
|
|
276
|
+
END;
|
|
277
|
+
$$ LANGUAGE plpgsql;
|
|
278
|
+
`)
|
|
279
|
+
);
|
|
280
|
+
} catch (error) {
|
|
281
|
+
throw new DrizzleStorageError(
|
|
282
|
+
"Failed to create reorg checkpoint function",
|
|
283
|
+
{
|
|
284
|
+
cause: error
|
|
285
|
+
}
|
|
286
|
+
);
|
|
287
|
+
}
|
|
263
288
|
}
|
|
264
|
-
async function registerTriggers(tx, tables, endCursor, idColumn) {
|
|
289
|
+
async function registerTriggers(tx, tables, endCursor, idColumn, indexerId) {
|
|
265
290
|
try {
|
|
266
291
|
for (const table of tables) {
|
|
267
292
|
await tx.execute(
|
|
268
|
-
drizzleOrm.sql.raw(
|
|
293
|
+
drizzleOrm.sql.raw(
|
|
294
|
+
`DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`
|
|
295
|
+
)
|
|
269
296
|
);
|
|
270
297
|
await tx.execute(
|
|
271
298
|
drizzleOrm.sql.raw(`
|
|
272
|
-
CREATE CONSTRAINT TRIGGER ${table}
|
|
299
|
+
CREATE CONSTRAINT TRIGGER ${getReorgTriggerName(table, indexerId)}
|
|
273
300
|
AFTER INSERT OR UPDATE OR DELETE ON ${table}
|
|
274
301
|
DEFERRABLE INITIALLY DEFERRED
|
|
275
|
-
FOR EACH ROW EXECUTE FUNCTION reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`});
|
|
302
|
+
FOR EACH ROW EXECUTE FUNCTION reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`}, '${indexerId}');
|
|
276
303
|
`)
|
|
277
304
|
);
|
|
278
305
|
}
|
|
@@ -282,11 +309,13 @@ async function registerTriggers(tx, tables, endCursor, idColumn) {
|
|
|
282
309
|
});
|
|
283
310
|
}
|
|
284
311
|
}
|
|
285
|
-
async function removeTriggers(db, tables) {
|
|
312
|
+
async function removeTriggers(db, tables, indexerId) {
|
|
286
313
|
try {
|
|
287
314
|
for (const table of tables) {
|
|
288
315
|
await db.execute(
|
|
289
|
-
drizzleOrm.sql.raw(
|
|
316
|
+
drizzleOrm.sql.raw(
|
|
317
|
+
`DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`
|
|
318
|
+
)
|
|
290
319
|
);
|
|
291
320
|
}
|
|
292
321
|
} catch (error) {
|
|
@@ -295,12 +324,13 @@ async function removeTriggers(db, tables) {
|
|
|
295
324
|
});
|
|
296
325
|
}
|
|
297
326
|
}
|
|
298
|
-
async function invalidate(tx, cursor, idColumn) {
|
|
327
|
+
async function invalidate(tx, cursor, idColumn, indexerId) {
|
|
299
328
|
const { rows: result } = await tx.execute(
|
|
300
329
|
drizzleOrm.sql.raw(`
|
|
301
330
|
WITH deleted AS (
|
|
302
331
|
DELETE FROM __reorg_rollback
|
|
303
332
|
WHERE cursor > ${Number(cursor.orderKey)}
|
|
333
|
+
AND indexer_id = '${indexerId}'
|
|
304
334
|
RETURNING *
|
|
305
335
|
)
|
|
306
336
|
SELECT * FROM deleted ORDER BY n DESC;
|
|
@@ -387,12 +417,13 @@ async function invalidate(tx, cursor, idColumn) {
|
|
|
387
417
|
}
|
|
388
418
|
}
|
|
389
419
|
}
|
|
390
|
-
async function finalize(tx, cursor) {
|
|
420
|
+
async function finalize(tx, cursor, indexerId) {
|
|
391
421
|
try {
|
|
392
422
|
await tx.execute(
|
|
393
423
|
drizzleOrm.sql.raw(`
|
|
394
424
|
DELETE FROM __reorg_rollback
|
|
395
425
|
WHERE cursor <= ${Number(cursor.orderKey)}
|
|
426
|
+
AND indexer_id = '${indexerId}'
|
|
396
427
|
`)
|
|
397
428
|
);
|
|
398
429
|
} catch (error) {
|
|
@@ -403,6 +434,7 @@ async function finalize(tx, cursor) {
|
|
|
403
434
|
}
|
|
404
435
|
|
|
405
436
|
const DRIZZLE_PROPERTY = "_drizzle";
|
|
437
|
+
const MAX_RETRIES = 5;
|
|
406
438
|
function useDrizzleStorage(_db) {
|
|
407
439
|
const context = indexer.useIndexerContext();
|
|
408
440
|
if (!context[DRIZZLE_PROPERTY]) {
|
|
@@ -415,12 +447,13 @@ function useDrizzleStorage(_db) {
|
|
|
415
447
|
function drizzleStorage({
|
|
416
448
|
db,
|
|
417
449
|
persistState: enablePersistence = true,
|
|
418
|
-
indexerName = "default",
|
|
450
|
+
indexerName: identifier = "default",
|
|
419
451
|
schema,
|
|
420
452
|
idColumn = "id"
|
|
421
453
|
}) {
|
|
422
454
|
return plugins.defineIndexerPlugin((indexer) => {
|
|
423
455
|
let tableNames = [];
|
|
456
|
+
let indexerId = "";
|
|
424
457
|
try {
|
|
425
458
|
tableNames = Object.values(schema ?? db._.schema ?? {}).map(
|
|
426
459
|
(table) => table.dbName
|
|
@@ -431,12 +464,31 @@ function drizzleStorage({
|
|
|
431
464
|
});
|
|
432
465
|
}
|
|
433
466
|
indexer.hooks.hook("run:before", async () => {
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
467
|
+
const { indexerName: indexerFileName, availableIndexers } = plugins$1.useInternalContext();
|
|
468
|
+
indexerId = internal.generateIndexerId(indexerFileName, identifier);
|
|
469
|
+
let retries = 0;
|
|
470
|
+
while (retries <= MAX_RETRIES) {
|
|
471
|
+
try {
|
|
472
|
+
await withTransaction(db, async (tx) => {
|
|
473
|
+
await initializeReorgRollbackTable(tx, indexerId);
|
|
474
|
+
if (enablePersistence) {
|
|
475
|
+
await initializePersistentState(tx);
|
|
476
|
+
}
|
|
477
|
+
});
|
|
478
|
+
break;
|
|
479
|
+
} catch (error) {
|
|
480
|
+
if (retries === MAX_RETRIES) {
|
|
481
|
+
throw new DrizzleStorageError(
|
|
482
|
+
"Initialization failed after 5 retries",
|
|
483
|
+
{
|
|
484
|
+
cause: error
|
|
485
|
+
}
|
|
486
|
+
);
|
|
487
|
+
}
|
|
488
|
+
await sleep(retries * 1e3);
|
|
489
|
+
retries++;
|
|
438
490
|
}
|
|
439
|
-
}
|
|
491
|
+
}
|
|
440
492
|
});
|
|
441
493
|
indexer.hooks.hook("connect:before", async ({ request }) => {
|
|
442
494
|
if (!enablePersistence) {
|
|
@@ -445,7 +497,7 @@ function drizzleStorage({
|
|
|
445
497
|
await withTransaction(db, async (tx) => {
|
|
446
498
|
const { cursor, filter } = await getState({
|
|
447
499
|
tx,
|
|
448
|
-
|
|
500
|
+
indexerId
|
|
449
501
|
});
|
|
450
502
|
if (cursor) {
|
|
451
503
|
request.startingCursor = cursor;
|
|
@@ -461,9 +513,9 @@ function drizzleStorage({
|
|
|
461
513
|
return;
|
|
462
514
|
}
|
|
463
515
|
await withTransaction(db, async (tx) => {
|
|
464
|
-
await invalidate(tx, cursor, idColumn);
|
|
516
|
+
await invalidate(tx, cursor, idColumn, indexerId);
|
|
465
517
|
if (enablePersistence) {
|
|
466
|
-
await invalidateState({ tx, cursor,
|
|
518
|
+
await invalidateState({ tx, cursor, indexerId });
|
|
467
519
|
}
|
|
468
520
|
});
|
|
469
521
|
});
|
|
@@ -477,7 +529,7 @@ function drizzleStorage({
|
|
|
477
529
|
tx,
|
|
478
530
|
endCursor,
|
|
479
531
|
filter: request.filter[1],
|
|
480
|
-
|
|
532
|
+
indexerId
|
|
481
533
|
});
|
|
482
534
|
}
|
|
483
535
|
});
|
|
@@ -487,9 +539,9 @@ function drizzleStorage({
|
|
|
487
539
|
throw new DrizzleStorageError("Finalized Cursor is undefined");
|
|
488
540
|
}
|
|
489
541
|
await withTransaction(db, async (tx) => {
|
|
490
|
-
await finalize(tx, cursor);
|
|
542
|
+
await finalize(tx, cursor, indexerId);
|
|
491
543
|
if (enablePersistence) {
|
|
492
|
-
await finalizeState({ tx, cursor,
|
|
544
|
+
await finalizeState({ tx, cursor, indexerId });
|
|
493
545
|
}
|
|
494
546
|
});
|
|
495
547
|
});
|
|
@@ -499,9 +551,9 @@ function drizzleStorage({
|
|
|
499
551
|
throw new DrizzleStorageError("Invalidate Cursor is undefined");
|
|
500
552
|
}
|
|
501
553
|
await withTransaction(db, async (tx) => {
|
|
502
|
-
await invalidate(tx, cursor, idColumn);
|
|
554
|
+
await invalidate(tx, cursor, idColumn, indexerId);
|
|
503
555
|
if (enablePersistence) {
|
|
504
|
-
await invalidateState({ tx, cursor,
|
|
556
|
+
await invalidateState({ tx, cursor, indexerId });
|
|
505
557
|
}
|
|
506
558
|
});
|
|
507
559
|
});
|
|
@@ -515,7 +567,13 @@ function drizzleStorage({
|
|
|
515
567
|
await withTransaction(db, async (tx) => {
|
|
516
568
|
context[DRIZZLE_PROPERTY] = { db: tx };
|
|
517
569
|
if (finality !== "finalized") {
|
|
518
|
-
await registerTriggers(
|
|
570
|
+
await registerTriggers(
|
|
571
|
+
tx,
|
|
572
|
+
tableNames,
|
|
573
|
+
endCursor,
|
|
574
|
+
idColumn,
|
|
575
|
+
indexerId
|
|
576
|
+
);
|
|
519
577
|
}
|
|
520
578
|
await next();
|
|
521
579
|
delete context[DRIZZLE_PROPERTY];
|
|
@@ -523,15 +581,15 @@ function drizzleStorage({
|
|
|
523
581
|
await persistState({
|
|
524
582
|
tx,
|
|
525
583
|
endCursor,
|
|
526
|
-
|
|
584
|
+
indexerId
|
|
527
585
|
});
|
|
528
586
|
}
|
|
529
587
|
});
|
|
530
588
|
if (finality !== "finalized") {
|
|
531
|
-
await removeTriggers(db, tableNames);
|
|
589
|
+
await removeTriggers(db, tableNames, indexerId);
|
|
532
590
|
}
|
|
533
591
|
} catch (error) {
|
|
534
|
-
await removeTriggers(db, tableNames);
|
|
592
|
+
await removeTriggers(db, tableNames, indexerId);
|
|
535
593
|
throw new DrizzleStorageError("Failed to run handler:middleware", {
|
|
536
594
|
cause: error
|
|
537
595
|
});
|
package/dist/index.d.cts
CHANGED
|
@@ -23,6 +23,6 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
|
|
|
23
23
|
* @param options.schema - The schema of the database.
|
|
24
24
|
* @param options.idColumn - The column to use as the id. Defaults to 'id'.
|
|
25
25
|
*/
|
|
26
|
-
declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName, schema, idColumn, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
|
|
26
|
+
declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema, idColumn, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
|
|
27
27
|
|
|
28
28
|
export { type DrizzleStorage, type DrizzleStorageOptions, drizzleStorage, useDrizzleStorage };
|
package/dist/index.d.mts
CHANGED
|
@@ -23,6 +23,6 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
|
|
|
23
23
|
* @param options.schema - The schema of the database.
|
|
24
24
|
* @param options.idColumn - The column to use as the id. Defaults to 'id'.
|
|
25
25
|
*/
|
|
26
|
-
declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName, schema, idColumn, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
|
|
26
|
+
declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema, idColumn, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
|
|
27
27
|
|
|
28
28
|
export { type DrizzleStorage, type DrizzleStorageOptions, drizzleStorage, useDrizzleStorage };
|
package/dist/index.d.ts
CHANGED
|
@@ -23,6 +23,6 @@ interface DrizzleStorageOptions<TQueryResult extends PgQueryResultHKT, TFullSche
|
|
|
23
23
|
* @param options.schema - The schema of the database.
|
|
24
24
|
* @param options.idColumn - The column to use as the id. Defaults to 'id'.
|
|
25
25
|
*/
|
|
26
|
-
declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName, schema, idColumn, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
|
|
26
|
+
declare function drizzleStorage<TFilter, TBlock, TQueryResult extends PgQueryResultHKT, TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>>({ db, persistState: enablePersistence, indexerName: identifier, schema, idColumn, }: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>): _apibara_indexer_plugins.IndexerPlugin<TFilter, TBlock>;
|
|
27
27
|
|
|
28
28
|
export { type DrizzleStorage, type DrizzleStorageOptions, drizzleStorage, useDrizzleStorage };
|
package/dist/index.mjs
CHANGED
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import { useIndexerContext } from '@apibara/indexer';
|
|
2
2
|
import { defineIndexerPlugin } from '@apibara/indexer/plugins';
|
|
3
|
-
import {
|
|
3
|
+
import { generateIndexerId } from '@apibara/indexer/internal';
|
|
4
|
+
import { useInternalContext } from '@apibara/indexer/internal/plugins';
|
|
5
|
+
import { eq, and, isNull, gt, lt, sql } from 'drizzle-orm';
|
|
4
6
|
import { pgTable, text, integer, primaryKey, serial, char, jsonb } from 'drizzle-orm/pg-core';
|
|
5
7
|
|
|
6
8
|
class DrizzleStorageError extends Error {
|
|
@@ -27,6 +29,9 @@ function serialize(obj) {
|
|
|
27
29
|
" "
|
|
28
30
|
);
|
|
29
31
|
}
|
|
32
|
+
function sleep(ms) {
|
|
33
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
34
|
+
}
|
|
30
35
|
|
|
31
36
|
const CHECKPOINTS_TABLE_NAME = "__indexer_checkpoints";
|
|
32
37
|
const FILTERS_TABLE_NAME = "__indexer_filters";
|
|
@@ -115,11 +120,11 @@ async function initializePersistentState(tx) {
|
|
|
115
120
|
}
|
|
116
121
|
}
|
|
117
122
|
async function persistState(props) {
|
|
118
|
-
const { tx, endCursor, filter,
|
|
123
|
+
const { tx, endCursor, filter, indexerId } = props;
|
|
119
124
|
try {
|
|
120
125
|
if (endCursor) {
|
|
121
126
|
await tx.insert(checkpoints).values({
|
|
122
|
-
id:
|
|
127
|
+
id: indexerId,
|
|
123
128
|
orderKey: Number(endCursor.orderKey),
|
|
124
129
|
uniqueKey: endCursor.uniqueKey
|
|
125
130
|
}).onConflictDoUpdate({
|
|
@@ -130,9 +135,9 @@ async function persistState(props) {
|
|
|
130
135
|
}
|
|
131
136
|
});
|
|
132
137
|
if (filter) {
|
|
133
|
-
await tx.update(filters).set({ toBlock: Number(endCursor.orderKey) }).where(and(eq(filters.id,
|
|
138
|
+
await tx.update(filters).set({ toBlock: Number(endCursor.orderKey) }).where(and(eq(filters.id, indexerId), isNull(filters.toBlock)));
|
|
134
139
|
await tx.insert(filters).values({
|
|
135
|
-
id:
|
|
140
|
+
id: indexerId,
|
|
136
141
|
filter: serialize(filter),
|
|
137
142
|
fromBlock: Number(endCursor.orderKey),
|
|
138
143
|
toBlock: null
|
|
@@ -153,14 +158,14 @@ async function persistState(props) {
|
|
|
153
158
|
}
|
|
154
159
|
}
|
|
155
160
|
async function getState(props) {
|
|
156
|
-
const { tx,
|
|
161
|
+
const { tx, indexerId } = props;
|
|
157
162
|
try {
|
|
158
|
-
const checkpointRows = await tx.select().from(checkpoints).where(eq(checkpoints.id,
|
|
163
|
+
const checkpointRows = await tx.select().from(checkpoints).where(eq(checkpoints.id, indexerId));
|
|
159
164
|
const cursor = checkpointRows[0] ? {
|
|
160
165
|
orderKey: BigInt(checkpointRows[0].orderKey),
|
|
161
166
|
uniqueKey: checkpointRows[0].uniqueKey
|
|
162
167
|
} : void 0;
|
|
163
|
-
const filterRows = await tx.select().from(filters).where(and(eq(filters.id,
|
|
168
|
+
const filterRows = await tx.select().from(filters).where(and(eq(filters.id, indexerId), isNull(filters.toBlock)));
|
|
164
169
|
const filter = filterRows[0] ? deserialize(filterRows[0].filter) : void 0;
|
|
165
170
|
return { cursor, filter };
|
|
166
171
|
} catch (error) {
|
|
@@ -170,17 +175,17 @@ async function getState(props) {
|
|
|
170
175
|
}
|
|
171
176
|
}
|
|
172
177
|
async function invalidateState(props) {
|
|
173
|
-
const { tx, cursor,
|
|
178
|
+
const { tx, cursor, indexerId } = props;
|
|
174
179
|
try {
|
|
175
180
|
await tx.delete(filters).where(
|
|
176
181
|
and(
|
|
177
|
-
eq(filters.id,
|
|
182
|
+
eq(filters.id, indexerId),
|
|
178
183
|
gt(filters.fromBlock, Number(cursor.orderKey))
|
|
179
184
|
)
|
|
180
185
|
);
|
|
181
186
|
await tx.update(filters).set({ toBlock: null }).where(
|
|
182
187
|
and(
|
|
183
|
-
eq(filters.id,
|
|
188
|
+
eq(filters.id, indexerId),
|
|
184
189
|
gt(filters.toBlock, Number(cursor.orderKey))
|
|
185
190
|
)
|
|
186
191
|
);
|
|
@@ -191,11 +196,11 @@ async function invalidateState(props) {
|
|
|
191
196
|
}
|
|
192
197
|
}
|
|
193
198
|
async function finalizeState(props) {
|
|
194
|
-
const { tx, cursor,
|
|
199
|
+
const { tx, cursor, indexerId } = props;
|
|
195
200
|
try {
|
|
196
201
|
await tx.delete(filters).where(
|
|
197
202
|
and(
|
|
198
|
-
eq(filters.id,
|
|
203
|
+
eq(filters.id, indexerId),
|
|
199
204
|
lt(filters.toBlock, Number(cursor.orderKey))
|
|
200
205
|
)
|
|
201
206
|
);
|
|
@@ -206,15 +211,19 @@ async function finalizeState(props) {
|
|
|
206
211
|
}
|
|
207
212
|
}
|
|
208
213
|
|
|
214
|
+
function getReorgTriggerName(table, indexerId) {
|
|
215
|
+
return `${table}_reorg_${indexerId}`;
|
|
216
|
+
}
|
|
209
217
|
pgTable("__reorg_rollback", {
|
|
210
218
|
n: serial("n").primaryKey(),
|
|
211
219
|
op: char("op", { length: 1 }).$type().notNull(),
|
|
212
220
|
table_name: text("table_name").notNull(),
|
|
213
221
|
cursor: integer("cursor").notNull(),
|
|
214
222
|
row_id: text("row_id"),
|
|
215
|
-
row_value: jsonb("row_value")
|
|
223
|
+
row_value: jsonb("row_value"),
|
|
224
|
+
indexer_id: text("indexer_id").notNull()
|
|
216
225
|
});
|
|
217
|
-
async function initializeReorgRollbackTable(tx) {
|
|
226
|
+
async function initializeReorgRollbackTable(tx, indexerId) {
|
|
218
227
|
try {
|
|
219
228
|
await tx.execute(
|
|
220
229
|
sql.raw(`
|
|
@@ -224,33 +233,14 @@ async function initializeReorgRollbackTable(tx) {
|
|
|
224
233
|
table_name TEXT NOT NULL,
|
|
225
234
|
cursor INTEGER NOT NULL,
|
|
226
235
|
row_id TEXT,
|
|
227
|
-
row_value JSONB
|
|
236
|
+
row_value JSONB,
|
|
237
|
+
indexer_id TEXT NOT NULL
|
|
228
238
|
);
|
|
229
239
|
`)
|
|
230
240
|
);
|
|
231
241
|
await tx.execute(
|
|
232
242
|
sql.raw(`
|
|
233
|
-
CREATE
|
|
234
|
-
RETURNS TRIGGER AS $$
|
|
235
|
-
DECLARE
|
|
236
|
-
id_col TEXT := TG_ARGV[0]::TEXT;
|
|
237
|
-
order_key INTEGER := TG_ARGV[1]::INTEGER;
|
|
238
|
-
new_id_value TEXT := row_to_json(NEW.*)->>id_col;
|
|
239
|
-
old_id_value TEXT := row_to_json(OLD.*)->>id_col;
|
|
240
|
-
BEGIN
|
|
241
|
-
IF (TG_OP = 'DELETE') THEN
|
|
242
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
243
|
-
SELECT 'D', TG_TABLE_NAME, order_key, old_id_value, row_to_json(OLD.*);
|
|
244
|
-
ELSIF (TG_OP = 'UPDATE') THEN
|
|
245
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
246
|
-
SELECT 'U', TG_TABLE_NAME, order_key, new_id_value, row_to_json(OLD.*);
|
|
247
|
-
ELSIF (TG_OP = 'INSERT') THEN
|
|
248
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
249
|
-
SELECT 'I', TG_TABLE_NAME, order_key, new_id_value, null;
|
|
250
|
-
END IF;
|
|
251
|
-
RETURN NULL;
|
|
252
|
-
END;
|
|
253
|
-
$$ LANGUAGE plpgsql;
|
|
243
|
+
CREATE INDEX IF NOT EXISTS idx_reorg_rollback_indexer_id_cursor ON __reorg_rollback(indexer_id, cursor);
|
|
254
244
|
`)
|
|
255
245
|
);
|
|
256
246
|
} catch (error) {
|
|
@@ -258,19 +248,56 @@ async function initializeReorgRollbackTable(tx) {
|
|
|
258
248
|
cause: error
|
|
259
249
|
});
|
|
260
250
|
}
|
|
251
|
+
try {
|
|
252
|
+
await tx.execute(
|
|
253
|
+
sql.raw(`
|
|
254
|
+
CREATE OR REPLACE FUNCTION reorg_checkpoint()
|
|
255
|
+
RETURNS TRIGGER AS $$
|
|
256
|
+
DECLARE
|
|
257
|
+
id_col TEXT := TG_ARGV[0]::TEXT;
|
|
258
|
+
order_key INTEGER := TG_ARGV[1]::INTEGER;
|
|
259
|
+
indexer_id TEXT := TG_ARGV[2]::TEXT;
|
|
260
|
+
new_id_value TEXT := row_to_json(NEW.*)->>id_col;
|
|
261
|
+
old_id_value TEXT := row_to_json(OLD.*)->>id_col;
|
|
262
|
+
BEGIN
|
|
263
|
+
IF (TG_OP = 'DELETE') THEN
|
|
264
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
265
|
+
SELECT 'D', TG_TABLE_NAME, order_key, old_id_value, row_to_json(OLD.*), indexer_id;
|
|
266
|
+
ELSIF (TG_OP = 'UPDATE') THEN
|
|
267
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
268
|
+
SELECT 'U', TG_TABLE_NAME, order_key, new_id_value, row_to_json(OLD.*), indexer_id;
|
|
269
|
+
ELSIF (TG_OP = 'INSERT') THEN
|
|
270
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
271
|
+
SELECT 'I', TG_TABLE_NAME, order_key, new_id_value, null, indexer_id;
|
|
272
|
+
END IF;
|
|
273
|
+
RETURN NULL;
|
|
274
|
+
END;
|
|
275
|
+
$$ LANGUAGE plpgsql;
|
|
276
|
+
`)
|
|
277
|
+
);
|
|
278
|
+
} catch (error) {
|
|
279
|
+
throw new DrizzleStorageError(
|
|
280
|
+
"Failed to create reorg checkpoint function",
|
|
281
|
+
{
|
|
282
|
+
cause: error
|
|
283
|
+
}
|
|
284
|
+
);
|
|
285
|
+
}
|
|
261
286
|
}
|
|
262
|
-
async function registerTriggers(tx, tables, endCursor, idColumn) {
|
|
287
|
+
async function registerTriggers(tx, tables, endCursor, idColumn, indexerId) {
|
|
263
288
|
try {
|
|
264
289
|
for (const table of tables) {
|
|
265
290
|
await tx.execute(
|
|
266
|
-
sql.raw(
|
|
291
|
+
sql.raw(
|
|
292
|
+
`DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`
|
|
293
|
+
)
|
|
267
294
|
);
|
|
268
295
|
await tx.execute(
|
|
269
296
|
sql.raw(`
|
|
270
|
-
CREATE CONSTRAINT TRIGGER ${table}
|
|
297
|
+
CREATE CONSTRAINT TRIGGER ${getReorgTriggerName(table, indexerId)}
|
|
271
298
|
AFTER INSERT OR UPDATE OR DELETE ON ${table}
|
|
272
299
|
DEFERRABLE INITIALLY DEFERRED
|
|
273
|
-
FOR EACH ROW EXECUTE FUNCTION reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`});
|
|
300
|
+
FOR EACH ROW EXECUTE FUNCTION reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`}, '${indexerId}');
|
|
274
301
|
`)
|
|
275
302
|
);
|
|
276
303
|
}
|
|
@@ -280,11 +307,13 @@ async function registerTriggers(tx, tables, endCursor, idColumn) {
|
|
|
280
307
|
});
|
|
281
308
|
}
|
|
282
309
|
}
|
|
283
|
-
async function removeTriggers(db, tables) {
|
|
310
|
+
async function removeTriggers(db, tables, indexerId) {
|
|
284
311
|
try {
|
|
285
312
|
for (const table of tables) {
|
|
286
313
|
await db.execute(
|
|
287
|
-
sql.raw(
|
|
314
|
+
sql.raw(
|
|
315
|
+
`DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`
|
|
316
|
+
)
|
|
288
317
|
);
|
|
289
318
|
}
|
|
290
319
|
} catch (error) {
|
|
@@ -293,12 +322,13 @@ async function removeTriggers(db, tables) {
|
|
|
293
322
|
});
|
|
294
323
|
}
|
|
295
324
|
}
|
|
296
|
-
async function invalidate(tx, cursor, idColumn) {
|
|
325
|
+
async function invalidate(tx, cursor, idColumn, indexerId) {
|
|
297
326
|
const { rows: result } = await tx.execute(
|
|
298
327
|
sql.raw(`
|
|
299
328
|
WITH deleted AS (
|
|
300
329
|
DELETE FROM __reorg_rollback
|
|
301
330
|
WHERE cursor > ${Number(cursor.orderKey)}
|
|
331
|
+
AND indexer_id = '${indexerId}'
|
|
302
332
|
RETURNING *
|
|
303
333
|
)
|
|
304
334
|
SELECT * FROM deleted ORDER BY n DESC;
|
|
@@ -385,12 +415,13 @@ async function invalidate(tx, cursor, idColumn) {
|
|
|
385
415
|
}
|
|
386
416
|
}
|
|
387
417
|
}
|
|
388
|
-
async function finalize(tx, cursor) {
|
|
418
|
+
async function finalize(tx, cursor, indexerId) {
|
|
389
419
|
try {
|
|
390
420
|
await tx.execute(
|
|
391
421
|
sql.raw(`
|
|
392
422
|
DELETE FROM __reorg_rollback
|
|
393
423
|
WHERE cursor <= ${Number(cursor.orderKey)}
|
|
424
|
+
AND indexer_id = '${indexerId}'
|
|
394
425
|
`)
|
|
395
426
|
);
|
|
396
427
|
} catch (error) {
|
|
@@ -401,6 +432,7 @@ async function finalize(tx, cursor) {
|
|
|
401
432
|
}
|
|
402
433
|
|
|
403
434
|
const DRIZZLE_PROPERTY = "_drizzle";
|
|
435
|
+
const MAX_RETRIES = 5;
|
|
404
436
|
function useDrizzleStorage(_db) {
|
|
405
437
|
const context = useIndexerContext();
|
|
406
438
|
if (!context[DRIZZLE_PROPERTY]) {
|
|
@@ -413,12 +445,13 @@ function useDrizzleStorage(_db) {
|
|
|
413
445
|
function drizzleStorage({
|
|
414
446
|
db,
|
|
415
447
|
persistState: enablePersistence = true,
|
|
416
|
-
indexerName = "default",
|
|
448
|
+
indexerName: identifier = "default",
|
|
417
449
|
schema,
|
|
418
450
|
idColumn = "id"
|
|
419
451
|
}) {
|
|
420
452
|
return defineIndexerPlugin((indexer) => {
|
|
421
453
|
let tableNames = [];
|
|
454
|
+
let indexerId = "";
|
|
422
455
|
try {
|
|
423
456
|
tableNames = Object.values(schema ?? db._.schema ?? {}).map(
|
|
424
457
|
(table) => table.dbName
|
|
@@ -429,12 +462,31 @@ function drizzleStorage({
|
|
|
429
462
|
});
|
|
430
463
|
}
|
|
431
464
|
indexer.hooks.hook("run:before", async () => {
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
465
|
+
const { indexerName: indexerFileName, availableIndexers } = useInternalContext();
|
|
466
|
+
indexerId = generateIndexerId(indexerFileName, identifier);
|
|
467
|
+
let retries = 0;
|
|
468
|
+
while (retries <= MAX_RETRIES) {
|
|
469
|
+
try {
|
|
470
|
+
await withTransaction(db, async (tx) => {
|
|
471
|
+
await initializeReorgRollbackTable(tx, indexerId);
|
|
472
|
+
if (enablePersistence) {
|
|
473
|
+
await initializePersistentState(tx);
|
|
474
|
+
}
|
|
475
|
+
});
|
|
476
|
+
break;
|
|
477
|
+
} catch (error) {
|
|
478
|
+
if (retries === MAX_RETRIES) {
|
|
479
|
+
throw new DrizzleStorageError(
|
|
480
|
+
"Initialization failed after 5 retries",
|
|
481
|
+
{
|
|
482
|
+
cause: error
|
|
483
|
+
}
|
|
484
|
+
);
|
|
485
|
+
}
|
|
486
|
+
await sleep(retries * 1e3);
|
|
487
|
+
retries++;
|
|
436
488
|
}
|
|
437
|
-
}
|
|
489
|
+
}
|
|
438
490
|
});
|
|
439
491
|
indexer.hooks.hook("connect:before", async ({ request }) => {
|
|
440
492
|
if (!enablePersistence) {
|
|
@@ -443,7 +495,7 @@ function drizzleStorage({
|
|
|
443
495
|
await withTransaction(db, async (tx) => {
|
|
444
496
|
const { cursor, filter } = await getState({
|
|
445
497
|
tx,
|
|
446
|
-
|
|
498
|
+
indexerId
|
|
447
499
|
});
|
|
448
500
|
if (cursor) {
|
|
449
501
|
request.startingCursor = cursor;
|
|
@@ -459,9 +511,9 @@ function drizzleStorage({
|
|
|
459
511
|
return;
|
|
460
512
|
}
|
|
461
513
|
await withTransaction(db, async (tx) => {
|
|
462
|
-
await invalidate(tx, cursor, idColumn);
|
|
514
|
+
await invalidate(tx, cursor, idColumn, indexerId);
|
|
463
515
|
if (enablePersistence) {
|
|
464
|
-
await invalidateState({ tx, cursor,
|
|
516
|
+
await invalidateState({ tx, cursor, indexerId });
|
|
465
517
|
}
|
|
466
518
|
});
|
|
467
519
|
});
|
|
@@ -475,7 +527,7 @@ function drizzleStorage({
|
|
|
475
527
|
tx,
|
|
476
528
|
endCursor,
|
|
477
529
|
filter: request.filter[1],
|
|
478
|
-
|
|
530
|
+
indexerId
|
|
479
531
|
});
|
|
480
532
|
}
|
|
481
533
|
});
|
|
@@ -485,9 +537,9 @@ function drizzleStorage({
|
|
|
485
537
|
throw new DrizzleStorageError("Finalized Cursor is undefined");
|
|
486
538
|
}
|
|
487
539
|
await withTransaction(db, async (tx) => {
|
|
488
|
-
await finalize(tx, cursor);
|
|
540
|
+
await finalize(tx, cursor, indexerId);
|
|
489
541
|
if (enablePersistence) {
|
|
490
|
-
await finalizeState({ tx, cursor,
|
|
542
|
+
await finalizeState({ tx, cursor, indexerId });
|
|
491
543
|
}
|
|
492
544
|
});
|
|
493
545
|
});
|
|
@@ -497,9 +549,9 @@ function drizzleStorage({
|
|
|
497
549
|
throw new DrizzleStorageError("Invalidate Cursor is undefined");
|
|
498
550
|
}
|
|
499
551
|
await withTransaction(db, async (tx) => {
|
|
500
|
-
await invalidate(tx, cursor, idColumn);
|
|
552
|
+
await invalidate(tx, cursor, idColumn, indexerId);
|
|
501
553
|
if (enablePersistence) {
|
|
502
|
-
await invalidateState({ tx, cursor,
|
|
554
|
+
await invalidateState({ tx, cursor, indexerId });
|
|
503
555
|
}
|
|
504
556
|
});
|
|
505
557
|
});
|
|
@@ -513,7 +565,13 @@ function drizzleStorage({
|
|
|
513
565
|
await withTransaction(db, async (tx) => {
|
|
514
566
|
context[DRIZZLE_PROPERTY] = { db: tx };
|
|
515
567
|
if (finality !== "finalized") {
|
|
516
|
-
await registerTriggers(
|
|
568
|
+
await registerTriggers(
|
|
569
|
+
tx,
|
|
570
|
+
tableNames,
|
|
571
|
+
endCursor,
|
|
572
|
+
idColumn,
|
|
573
|
+
indexerId
|
|
574
|
+
);
|
|
517
575
|
}
|
|
518
576
|
await next();
|
|
519
577
|
delete context[DRIZZLE_PROPERTY];
|
|
@@ -521,15 +579,15 @@ function drizzleStorage({
|
|
|
521
579
|
await persistState({
|
|
522
580
|
tx,
|
|
523
581
|
endCursor,
|
|
524
|
-
|
|
582
|
+
indexerId
|
|
525
583
|
});
|
|
526
584
|
}
|
|
527
585
|
});
|
|
528
586
|
if (finality !== "finalized") {
|
|
529
|
-
await removeTriggers(db, tableNames);
|
|
587
|
+
await removeTriggers(db, tableNames, indexerId);
|
|
530
588
|
}
|
|
531
589
|
} catch (error) {
|
|
532
|
-
await removeTriggers(db, tableNames);
|
|
590
|
+
await removeTriggers(db, tableNames, indexerId);
|
|
533
591
|
throw new DrizzleStorageError("Failed to run handler:middleware", {
|
|
534
592
|
cause: error
|
|
535
593
|
});
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@apibara/plugin-drizzle",
|
|
3
|
-
"version": "2.0.0-beta.
|
|
3
|
+
"version": "2.0.0-beta.37",
|
|
4
4
|
"type": "module",
|
|
5
5
|
"files": [
|
|
6
6
|
"dist",
|
|
@@ -39,8 +39,8 @@
|
|
|
39
39
|
"vitest": "^1.6.0"
|
|
40
40
|
},
|
|
41
41
|
"dependencies": {
|
|
42
|
-
"@apibara/indexer": "2.0.0-beta.
|
|
43
|
-
"@apibara/protocol": "2.0.0-beta.
|
|
42
|
+
"@apibara/indexer": "2.0.0-beta.38",
|
|
43
|
+
"@apibara/protocol": "2.0.0-beta.38",
|
|
44
44
|
"postgres-range": "^1.1.4"
|
|
45
45
|
}
|
|
46
46
|
}
|
package/src/index.ts
CHANGED
|
@@ -6,6 +6,8 @@ import type {
|
|
|
6
6
|
TablesRelationalConfig,
|
|
7
7
|
} from "drizzle-orm";
|
|
8
8
|
|
|
9
|
+
import { generateIndexerId } from "@apibara/indexer/internal";
|
|
10
|
+
import { useInternalContext } from "@apibara/indexer/internal/plugins";
|
|
9
11
|
import type { Cursor, DataFinality } from "@apibara/protocol";
|
|
10
12
|
import type {
|
|
11
13
|
PgDatabase,
|
|
@@ -26,9 +28,10 @@ import {
|
|
|
26
28
|
registerTriggers,
|
|
27
29
|
removeTriggers,
|
|
28
30
|
} from "./storage";
|
|
29
|
-
import { DrizzleStorageError, withTransaction } from "./utils";
|
|
31
|
+
import { DrizzleStorageError, sleep, withTransaction } from "./utils";
|
|
30
32
|
|
|
31
33
|
const DRIZZLE_PROPERTY = "_drizzle";
|
|
34
|
+
const MAX_RETRIES = 5;
|
|
32
35
|
|
|
33
36
|
export type DrizzleStorage<
|
|
34
37
|
TQueryResult extends PgQueryResultHKT,
|
|
@@ -91,12 +94,13 @@ export function drizzleStorage<
|
|
|
91
94
|
>({
|
|
92
95
|
db,
|
|
93
96
|
persistState: enablePersistence = true,
|
|
94
|
-
indexerName = "default",
|
|
97
|
+
indexerName: identifier = "default",
|
|
95
98
|
schema,
|
|
96
99
|
idColumn = "id",
|
|
97
100
|
}: DrizzleStorageOptions<TQueryResult, TFullSchema, TSchema>) {
|
|
98
101
|
return defineIndexerPlugin<TFilter, TBlock>((indexer) => {
|
|
99
102
|
let tableNames: string[] = [];
|
|
103
|
+
let indexerId = "";
|
|
100
104
|
|
|
101
105
|
try {
|
|
102
106
|
tableNames = Object.values((schema as TSchema) ?? db._.schema ?? {}).map(
|
|
@@ -109,12 +113,35 @@ export function drizzleStorage<
|
|
|
109
113
|
}
|
|
110
114
|
|
|
111
115
|
indexer.hooks.hook("run:before", async () => {
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
+
const { indexerName: indexerFileName, availableIndexers } =
|
|
117
|
+
useInternalContext();
|
|
118
|
+
|
|
119
|
+
indexerId = generateIndexerId(indexerFileName, identifier);
|
|
120
|
+
|
|
121
|
+
let retries = 0;
|
|
122
|
+
|
|
123
|
+
while (retries <= MAX_RETRIES) {
|
|
124
|
+
try {
|
|
125
|
+
await withTransaction(db, async (tx) => {
|
|
126
|
+
await initializeReorgRollbackTable(tx, indexerId);
|
|
127
|
+
if (enablePersistence) {
|
|
128
|
+
await initializePersistentState(tx);
|
|
129
|
+
}
|
|
130
|
+
});
|
|
131
|
+
break;
|
|
132
|
+
} catch (error) {
|
|
133
|
+
if (retries === MAX_RETRIES) {
|
|
134
|
+
throw new DrizzleStorageError(
|
|
135
|
+
"Initialization failed after 5 retries",
|
|
136
|
+
{
|
|
137
|
+
cause: error,
|
|
138
|
+
},
|
|
139
|
+
);
|
|
140
|
+
}
|
|
141
|
+
await sleep(retries * 1000);
|
|
142
|
+
retries++;
|
|
116
143
|
}
|
|
117
|
-
}
|
|
144
|
+
}
|
|
118
145
|
});
|
|
119
146
|
|
|
120
147
|
indexer.hooks.hook("connect:before", async ({ request }) => {
|
|
@@ -130,7 +157,7 @@ export function drizzleStorage<
|
|
|
130
157
|
TSchema
|
|
131
158
|
>({
|
|
132
159
|
tx,
|
|
133
|
-
|
|
160
|
+
indexerId,
|
|
134
161
|
});
|
|
135
162
|
if (cursor) {
|
|
136
163
|
request.startingCursor = cursor;
|
|
@@ -150,10 +177,10 @@ export function drizzleStorage<
|
|
|
150
177
|
}
|
|
151
178
|
|
|
152
179
|
await withTransaction(db, async (tx) => {
|
|
153
|
-
await invalidate(tx, cursor, idColumn);
|
|
180
|
+
await invalidate(tx, cursor, idColumn, indexerId);
|
|
154
181
|
|
|
155
182
|
if (enablePersistence) {
|
|
156
|
-
await invalidateState({ tx, cursor,
|
|
183
|
+
await invalidateState({ tx, cursor, indexerId });
|
|
157
184
|
}
|
|
158
185
|
});
|
|
159
186
|
});
|
|
@@ -171,7 +198,7 @@ export function drizzleStorage<
|
|
|
171
198
|
tx,
|
|
172
199
|
endCursor,
|
|
173
200
|
filter: request.filter[1],
|
|
174
|
-
|
|
201
|
+
indexerId,
|
|
175
202
|
});
|
|
176
203
|
}
|
|
177
204
|
});
|
|
@@ -184,10 +211,10 @@ export function drizzleStorage<
|
|
|
184
211
|
}
|
|
185
212
|
|
|
186
213
|
await withTransaction(db, async (tx) => {
|
|
187
|
-
await finalize(tx, cursor);
|
|
214
|
+
await finalize(tx, cursor, indexerId);
|
|
188
215
|
|
|
189
216
|
if (enablePersistence) {
|
|
190
|
-
await finalizeState({ tx, cursor,
|
|
217
|
+
await finalizeState({ tx, cursor, indexerId });
|
|
191
218
|
}
|
|
192
219
|
});
|
|
193
220
|
});
|
|
@@ -200,10 +227,10 @@ export function drizzleStorage<
|
|
|
200
227
|
}
|
|
201
228
|
|
|
202
229
|
await withTransaction(db, async (tx) => {
|
|
203
|
-
await invalidate(tx, cursor, idColumn);
|
|
230
|
+
await invalidate(tx, cursor, idColumn, indexerId);
|
|
204
231
|
|
|
205
232
|
if (enablePersistence) {
|
|
206
|
-
await invalidateState({ tx, cursor,
|
|
233
|
+
await invalidateState({ tx, cursor, indexerId });
|
|
207
234
|
}
|
|
208
235
|
});
|
|
209
236
|
});
|
|
@@ -228,7 +255,13 @@ export function drizzleStorage<
|
|
|
228
255
|
>;
|
|
229
256
|
|
|
230
257
|
if (finality !== "finalized") {
|
|
231
|
-
await registerTriggers(
|
|
258
|
+
await registerTriggers(
|
|
259
|
+
tx,
|
|
260
|
+
tableNames,
|
|
261
|
+
endCursor,
|
|
262
|
+
idColumn,
|
|
263
|
+
indexerId,
|
|
264
|
+
);
|
|
232
265
|
}
|
|
233
266
|
|
|
234
267
|
await next();
|
|
@@ -238,17 +271,17 @@ export function drizzleStorage<
|
|
|
238
271
|
await persistState({
|
|
239
272
|
tx,
|
|
240
273
|
endCursor,
|
|
241
|
-
|
|
274
|
+
indexerId,
|
|
242
275
|
});
|
|
243
276
|
}
|
|
244
277
|
});
|
|
245
278
|
|
|
246
279
|
if (finality !== "finalized") {
|
|
247
280
|
// remove trigger outside of the transaction or it won't be triggered.
|
|
248
|
-
await removeTriggers(db, tableNames);
|
|
281
|
+
await removeTriggers(db, tableNames, indexerId);
|
|
249
282
|
}
|
|
250
283
|
} catch (error) {
|
|
251
|
-
await removeTriggers(db, tableNames);
|
|
284
|
+
await removeTriggers(db, tableNames, indexerId);
|
|
252
285
|
|
|
253
286
|
throw new DrizzleStorageError("Failed to run handler:middleware", {
|
|
254
287
|
cause: error,
|
package/src/persistence.ts
CHANGED
|
@@ -141,16 +141,16 @@ export async function persistState<
|
|
|
141
141
|
tx: PgTransaction<TQueryResult, TFullSchema, TSchema>;
|
|
142
142
|
endCursor: Cursor;
|
|
143
143
|
filter?: TFilter;
|
|
144
|
-
|
|
144
|
+
indexerId: string;
|
|
145
145
|
}) {
|
|
146
|
-
const { tx, endCursor, filter,
|
|
146
|
+
const { tx, endCursor, filter, indexerId } = props;
|
|
147
147
|
|
|
148
148
|
try {
|
|
149
149
|
if (endCursor) {
|
|
150
150
|
await tx
|
|
151
151
|
.insert(checkpoints)
|
|
152
152
|
.values({
|
|
153
|
-
id:
|
|
153
|
+
id: indexerId,
|
|
154
154
|
orderKey: Number(endCursor.orderKey),
|
|
155
155
|
uniqueKey: endCursor.uniqueKey,
|
|
156
156
|
})
|
|
@@ -166,12 +166,12 @@ export async function persistState<
|
|
|
166
166
|
await tx
|
|
167
167
|
.update(filters)
|
|
168
168
|
.set({ toBlock: Number(endCursor.orderKey) })
|
|
169
|
-
.where(and(eq(filters.id,
|
|
169
|
+
.where(and(eq(filters.id, indexerId), isNull(filters.toBlock)));
|
|
170
170
|
|
|
171
171
|
await tx
|
|
172
172
|
.insert(filters)
|
|
173
173
|
.values({
|
|
174
|
-
id:
|
|
174
|
+
id: indexerId,
|
|
175
175
|
filter: serialize(filter),
|
|
176
176
|
fromBlock: Number(endCursor.orderKey),
|
|
177
177
|
toBlock: null,
|
|
@@ -201,15 +201,15 @@ export async function getState<
|
|
|
201
201
|
TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>,
|
|
202
202
|
>(props: {
|
|
203
203
|
tx: PgTransaction<TQueryResult, TFullSchema, TSchema>;
|
|
204
|
-
|
|
204
|
+
indexerId: string;
|
|
205
205
|
}): Promise<{ cursor?: Cursor; filter?: TFilter }> {
|
|
206
|
-
const { tx,
|
|
206
|
+
const { tx, indexerId } = props;
|
|
207
207
|
|
|
208
208
|
try {
|
|
209
209
|
const checkpointRows = await tx
|
|
210
210
|
.select()
|
|
211
211
|
.from(checkpoints)
|
|
212
|
-
.where(eq(checkpoints.id,
|
|
212
|
+
.where(eq(checkpoints.id, indexerId));
|
|
213
213
|
|
|
214
214
|
const cursor = checkpointRows[0]
|
|
215
215
|
? {
|
|
@@ -221,7 +221,7 @@ export async function getState<
|
|
|
221
221
|
const filterRows = await tx
|
|
222
222
|
.select()
|
|
223
223
|
.from(filters)
|
|
224
|
-
.where(and(eq(filters.id,
|
|
224
|
+
.where(and(eq(filters.id, indexerId), isNull(filters.toBlock)));
|
|
225
225
|
|
|
226
226
|
const filter = filterRows[0]
|
|
227
227
|
? deserialize<TFilter>(filterRows[0].filter)
|
|
@@ -243,16 +243,16 @@ export async function invalidateState<
|
|
|
243
243
|
>(props: {
|
|
244
244
|
tx: PgTransaction<TQueryResult, TFullSchema, TSchema>;
|
|
245
245
|
cursor: Cursor;
|
|
246
|
-
|
|
246
|
+
indexerId: string;
|
|
247
247
|
}) {
|
|
248
|
-
const { tx, cursor,
|
|
248
|
+
const { tx, cursor, indexerId } = props;
|
|
249
249
|
|
|
250
250
|
try {
|
|
251
251
|
await tx
|
|
252
252
|
.delete(filters)
|
|
253
253
|
.where(
|
|
254
254
|
and(
|
|
255
|
-
eq(filters.id,
|
|
255
|
+
eq(filters.id, indexerId),
|
|
256
256
|
gt(filters.fromBlock, Number(cursor.orderKey)),
|
|
257
257
|
),
|
|
258
258
|
);
|
|
@@ -262,7 +262,7 @@ export async function invalidateState<
|
|
|
262
262
|
.set({ toBlock: null })
|
|
263
263
|
.where(
|
|
264
264
|
and(
|
|
265
|
-
eq(filters.id,
|
|
265
|
+
eq(filters.id, indexerId),
|
|
266
266
|
gt(filters.toBlock, Number(cursor.orderKey)),
|
|
267
267
|
),
|
|
268
268
|
);
|
|
@@ -281,16 +281,16 @@ export async function finalizeState<
|
|
|
281
281
|
>(props: {
|
|
282
282
|
tx: PgTransaction<TQueryResult, TFullSchema, TSchema>;
|
|
283
283
|
cursor: Cursor;
|
|
284
|
-
|
|
284
|
+
indexerId: string;
|
|
285
285
|
}) {
|
|
286
|
-
const { tx, cursor,
|
|
286
|
+
const { tx, cursor, indexerId } = props;
|
|
287
287
|
|
|
288
288
|
try {
|
|
289
289
|
await tx
|
|
290
290
|
.delete(filters)
|
|
291
291
|
.where(
|
|
292
292
|
and(
|
|
293
|
-
eq(filters.id,
|
|
293
|
+
eq(filters.id, indexerId),
|
|
294
294
|
lt(filters.toBlock, Number(cursor.orderKey)),
|
|
295
295
|
),
|
|
296
296
|
);
|
package/src/storage.ts
CHANGED
|
@@ -16,6 +16,11 @@ import {
|
|
|
16
16
|
text,
|
|
17
17
|
} from "drizzle-orm/pg-core";
|
|
18
18
|
import { DrizzleStorageError } from "./utils";
|
|
19
|
+
|
|
20
|
+
function getReorgTriggerName(table: string, indexerId: string) {
|
|
21
|
+
return `${table}_reorg_${indexerId}`;
|
|
22
|
+
}
|
|
23
|
+
|
|
19
24
|
export type ReorgOperation = "I" | "U" | "D";
|
|
20
25
|
|
|
21
26
|
export const reorgRollbackTable = pgTable("__reorg_rollback", {
|
|
@@ -25,6 +30,7 @@ export const reorgRollbackTable = pgTable("__reorg_rollback", {
|
|
|
25
30
|
cursor: integer("cursor").notNull(),
|
|
26
31
|
row_id: text("row_id"),
|
|
27
32
|
row_value: jsonb("row_value"),
|
|
33
|
+
indexer_id: text("indexer_id").notNull(),
|
|
28
34
|
});
|
|
29
35
|
|
|
30
36
|
export type ReorgRollbackRow = typeof reorgRollbackTable.$inferSelect;
|
|
@@ -34,7 +40,7 @@ export async function initializeReorgRollbackTable<
|
|
|
34
40
|
TFullSchema extends Record<string, unknown> = Record<string, never>,
|
|
35
41
|
TSchema extends
|
|
36
42
|
TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>,
|
|
37
|
-
>(tx: PgTransaction<TQueryResult, TFullSchema, TSchema
|
|
43
|
+
>(tx: PgTransaction<TQueryResult, TFullSchema, TSchema>, indexerId: string) {
|
|
38
44
|
try {
|
|
39
45
|
// Create the audit log table
|
|
40
46
|
await tx.execute(
|
|
@@ -45,35 +51,15 @@ export async function initializeReorgRollbackTable<
|
|
|
45
51
|
table_name TEXT NOT NULL,
|
|
46
52
|
cursor INTEGER NOT NULL,
|
|
47
53
|
row_id TEXT,
|
|
48
|
-
row_value JSONB
|
|
54
|
+
row_value JSONB,
|
|
55
|
+
indexer_id TEXT NOT NULL
|
|
49
56
|
);
|
|
50
57
|
`),
|
|
51
58
|
);
|
|
52
59
|
|
|
53
|
-
// Create the trigger function
|
|
54
60
|
await tx.execute(
|
|
55
61
|
sql.raw(`
|
|
56
|
-
CREATE
|
|
57
|
-
RETURNS TRIGGER AS $$
|
|
58
|
-
DECLARE
|
|
59
|
-
id_col TEXT := TG_ARGV[0]::TEXT;
|
|
60
|
-
order_key INTEGER := TG_ARGV[1]::INTEGER;
|
|
61
|
-
new_id_value TEXT := row_to_json(NEW.*)->>id_col;
|
|
62
|
-
old_id_value TEXT := row_to_json(OLD.*)->>id_col;
|
|
63
|
-
BEGIN
|
|
64
|
-
IF (TG_OP = 'DELETE') THEN
|
|
65
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
66
|
-
SELECT 'D', TG_TABLE_NAME, order_key, old_id_value, row_to_json(OLD.*);
|
|
67
|
-
ELSIF (TG_OP = 'UPDATE') THEN
|
|
68
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
69
|
-
SELECT 'U', TG_TABLE_NAME, order_key, new_id_value, row_to_json(OLD.*);
|
|
70
|
-
ELSIF (TG_OP = 'INSERT') THEN
|
|
71
|
-
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value)
|
|
72
|
-
SELECT 'I', TG_TABLE_NAME, order_key, new_id_value, null;
|
|
73
|
-
END IF;
|
|
74
|
-
RETURN NULL;
|
|
75
|
-
END;
|
|
76
|
-
$$ LANGUAGE plpgsql;
|
|
62
|
+
CREATE INDEX IF NOT EXISTS idx_reorg_rollback_indexer_id_cursor ON __reorg_rollback(indexer_id, cursor);
|
|
77
63
|
`),
|
|
78
64
|
);
|
|
79
65
|
} catch (error) {
|
|
@@ -81,6 +67,43 @@ export async function initializeReorgRollbackTable<
|
|
|
81
67
|
cause: error,
|
|
82
68
|
});
|
|
83
69
|
}
|
|
70
|
+
|
|
71
|
+
try {
|
|
72
|
+
// Create the trigger function
|
|
73
|
+
await tx.execute(
|
|
74
|
+
sql.raw(`
|
|
75
|
+
CREATE OR REPLACE FUNCTION reorg_checkpoint()
|
|
76
|
+
RETURNS TRIGGER AS $$
|
|
77
|
+
DECLARE
|
|
78
|
+
id_col TEXT := TG_ARGV[0]::TEXT;
|
|
79
|
+
order_key INTEGER := TG_ARGV[1]::INTEGER;
|
|
80
|
+
indexer_id TEXT := TG_ARGV[2]::TEXT;
|
|
81
|
+
new_id_value TEXT := row_to_json(NEW.*)->>id_col;
|
|
82
|
+
old_id_value TEXT := row_to_json(OLD.*)->>id_col;
|
|
83
|
+
BEGIN
|
|
84
|
+
IF (TG_OP = 'DELETE') THEN
|
|
85
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
86
|
+
SELECT 'D', TG_TABLE_NAME, order_key, old_id_value, row_to_json(OLD.*), indexer_id;
|
|
87
|
+
ELSIF (TG_OP = 'UPDATE') THEN
|
|
88
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
89
|
+
SELECT 'U', TG_TABLE_NAME, order_key, new_id_value, row_to_json(OLD.*), indexer_id;
|
|
90
|
+
ELSIF (TG_OP = 'INSERT') THEN
|
|
91
|
+
INSERT INTO __reorg_rollback(op, table_name, cursor, row_id, row_value, indexer_id)
|
|
92
|
+
SELECT 'I', TG_TABLE_NAME, order_key, new_id_value, null, indexer_id;
|
|
93
|
+
END IF;
|
|
94
|
+
RETURN NULL;
|
|
95
|
+
END;
|
|
96
|
+
$$ LANGUAGE plpgsql;
|
|
97
|
+
`),
|
|
98
|
+
);
|
|
99
|
+
} catch (error) {
|
|
100
|
+
throw new DrizzleStorageError(
|
|
101
|
+
"Failed to create reorg checkpoint function",
|
|
102
|
+
{
|
|
103
|
+
cause: error,
|
|
104
|
+
},
|
|
105
|
+
);
|
|
106
|
+
}
|
|
84
107
|
}
|
|
85
108
|
|
|
86
109
|
export async function registerTriggers<
|
|
@@ -93,18 +116,21 @@ export async function registerTriggers<
|
|
|
93
116
|
tables: string[],
|
|
94
117
|
endCursor: Cursor,
|
|
95
118
|
idColumn: string,
|
|
119
|
+
indexerId: string,
|
|
96
120
|
) {
|
|
97
121
|
try {
|
|
98
122
|
for (const table of tables) {
|
|
99
123
|
await tx.execute(
|
|
100
|
-
sql.raw(
|
|
124
|
+
sql.raw(
|
|
125
|
+
`DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`,
|
|
126
|
+
),
|
|
101
127
|
);
|
|
102
128
|
await tx.execute(
|
|
103
129
|
sql.raw(`
|
|
104
|
-
CREATE CONSTRAINT TRIGGER ${table}
|
|
130
|
+
CREATE CONSTRAINT TRIGGER ${getReorgTriggerName(table, indexerId)}
|
|
105
131
|
AFTER INSERT OR UPDATE OR DELETE ON ${table}
|
|
106
132
|
DEFERRABLE INITIALLY DEFERRED
|
|
107
|
-
FOR EACH ROW EXECUTE FUNCTION reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`});
|
|
133
|
+
FOR EACH ROW EXECUTE FUNCTION reorg_checkpoint('${idColumn}', ${`${Number(endCursor.orderKey)}`}, '${indexerId}');
|
|
108
134
|
`),
|
|
109
135
|
);
|
|
110
136
|
}
|
|
@@ -120,11 +146,17 @@ export async function removeTriggers<
|
|
|
120
146
|
TFullSchema extends Record<string, unknown> = Record<string, never>,
|
|
121
147
|
TSchema extends
|
|
122
148
|
TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>,
|
|
123
|
-
>(
|
|
149
|
+
>(
|
|
150
|
+
db: PgDatabase<TQueryResult, TFullSchema, TSchema>,
|
|
151
|
+
tables: string[],
|
|
152
|
+
indexerId: string,
|
|
153
|
+
) {
|
|
124
154
|
try {
|
|
125
155
|
for (const table of tables) {
|
|
126
156
|
await db.execute(
|
|
127
|
-
sql.raw(
|
|
157
|
+
sql.raw(
|
|
158
|
+
`DROP TRIGGER IF EXISTS ${getReorgTriggerName(table, indexerId)} ON ${table};`,
|
|
159
|
+
),
|
|
128
160
|
);
|
|
129
161
|
}
|
|
130
162
|
} catch (error) {
|
|
@@ -143,6 +175,7 @@ export async function invalidate<
|
|
|
143
175
|
tx: PgTransaction<TQueryResult, TFullSchema, TSchema>,
|
|
144
176
|
cursor: Cursor,
|
|
145
177
|
idColumn: string,
|
|
178
|
+
indexerId: string,
|
|
146
179
|
) {
|
|
147
180
|
// Get and delete operations after cursor in one query, ordered by newest first
|
|
148
181
|
const { rows: result } = (await tx.execute(
|
|
@@ -150,6 +183,7 @@ export async function invalidate<
|
|
|
150
183
|
WITH deleted AS (
|
|
151
184
|
DELETE FROM __reorg_rollback
|
|
152
185
|
WHERE cursor > ${Number(cursor.orderKey)}
|
|
186
|
+
AND indexer_id = '${indexerId}'
|
|
153
187
|
RETURNING *
|
|
154
188
|
)
|
|
155
189
|
SELECT * FROM deleted ORDER BY n DESC;
|
|
@@ -263,12 +297,17 @@ export async function finalize<
|
|
|
263
297
|
TFullSchema extends Record<string, unknown> = Record<string, never>,
|
|
264
298
|
TSchema extends
|
|
265
299
|
TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>,
|
|
266
|
-
>(
|
|
300
|
+
>(
|
|
301
|
+
tx: PgTransaction<TQueryResult, TFullSchema, TSchema>,
|
|
302
|
+
cursor: Cursor,
|
|
303
|
+
indexerId: string,
|
|
304
|
+
) {
|
|
267
305
|
try {
|
|
268
306
|
await tx.execute(
|
|
269
307
|
sql.raw(`
|
|
270
308
|
DELETE FROM __reorg_rollback
|
|
271
309
|
WHERE cursor <= ${Number(cursor.orderKey)}
|
|
310
|
+
AND indexer_id = '${indexerId}'
|
|
272
311
|
`),
|
|
273
312
|
);
|
|
274
313
|
} catch (error) {
|