@thru/indexer 0.1.38
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +477 -0
- package/dist/index.cjs +1208 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +1064 -0
- package/dist/index.d.ts +1064 -0
- package/dist/index.mjs +1185 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +46 -0
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,1208 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
var pgCore = require('drizzle-orm/pg-core');
|
|
4
|
+
var zod = require('zod');
|
|
5
|
+
var replay = require('@thru/replay');
|
|
6
|
+
var drizzleOrm = require('drizzle-orm');
|
|
7
|
+
var zodOpenapi = require('@hono/zod-openapi');
|
|
8
|
+
var drizzleZod = require('drizzle-zod');
|
|
9
|
+
|
|
10
|
+
// src/schema/builder.ts
|
|
11
|
+
function createBuilder(state) {
|
|
12
|
+
const builder = {
|
|
13
|
+
get _type() {
|
|
14
|
+
return state._type;
|
|
15
|
+
},
|
|
16
|
+
get _columnType() {
|
|
17
|
+
return state._columnType;
|
|
18
|
+
},
|
|
19
|
+
get _nullable() {
|
|
20
|
+
return state._nullable;
|
|
21
|
+
},
|
|
22
|
+
get _indexed() {
|
|
23
|
+
return state._indexed;
|
|
24
|
+
},
|
|
25
|
+
get _unique() {
|
|
26
|
+
return state._unique;
|
|
27
|
+
},
|
|
28
|
+
get _primary() {
|
|
29
|
+
return state._primary;
|
|
30
|
+
},
|
|
31
|
+
get _default() {
|
|
32
|
+
return state._default;
|
|
33
|
+
},
|
|
34
|
+
get _defaultNow() {
|
|
35
|
+
return state._defaultNow;
|
|
36
|
+
},
|
|
37
|
+
get _references() {
|
|
38
|
+
return state._references;
|
|
39
|
+
},
|
|
40
|
+
notNull() {
|
|
41
|
+
state._nullable = false;
|
|
42
|
+
return createBuilder(state);
|
|
43
|
+
},
|
|
44
|
+
index() {
|
|
45
|
+
state._indexed = true;
|
|
46
|
+
return builder;
|
|
47
|
+
},
|
|
48
|
+
unique() {
|
|
49
|
+
state._unique = true;
|
|
50
|
+
return builder;
|
|
51
|
+
},
|
|
52
|
+
primaryKey() {
|
|
53
|
+
state._primary = true;
|
|
54
|
+
state._nullable = false;
|
|
55
|
+
return createBuilder(state);
|
|
56
|
+
},
|
|
57
|
+
default(value) {
|
|
58
|
+
state._default = value;
|
|
59
|
+
return builder;
|
|
60
|
+
},
|
|
61
|
+
defaultNow() {
|
|
62
|
+
state._defaultNow = true;
|
|
63
|
+
return builder;
|
|
64
|
+
},
|
|
65
|
+
references(table, column) {
|
|
66
|
+
const resolvedTable = typeof table === "function" ? table() : table;
|
|
67
|
+
state._references = { table: resolvedTable, column };
|
|
68
|
+
return builder;
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
return builder;
|
|
72
|
+
}
|
|
73
|
+
function createColumnDef(type) {
|
|
74
|
+
const state = {
|
|
75
|
+
_type: void 0,
|
|
76
|
+
_columnType: type,
|
|
77
|
+
_nullable: true,
|
|
78
|
+
_indexed: false,
|
|
79
|
+
_unique: false,
|
|
80
|
+
_primary: false,
|
|
81
|
+
_default: void 0,
|
|
82
|
+
_defaultNow: false,
|
|
83
|
+
_references: void 0
|
|
84
|
+
};
|
|
85
|
+
return createBuilder(state);
|
|
86
|
+
}
|
|
87
|
+
var t = {
|
|
88
|
+
text: () => createColumnDef("text"),
|
|
89
|
+
bigint: () => createColumnDef("bigint"),
|
|
90
|
+
integer: () => createColumnDef("integer"),
|
|
91
|
+
boolean: () => createColumnDef("boolean"),
|
|
92
|
+
timestamp: () => createColumnDef("timestamp")
|
|
93
|
+
};
|
|
94
|
+
var columnBuilder = t;
|
|
95
|
+
function camelToSnake(str) {
|
|
96
|
+
return str.replace(/[A-Z]/g, (letter) => `_${letter.toLowerCase()}`);
|
|
97
|
+
}
|
|
98
|
+
function buildDrizzleTable(tableName, schema) {
|
|
99
|
+
const columns = {};
|
|
100
|
+
const indices = [];
|
|
101
|
+
for (const [name, def] of Object.entries(schema)) {
|
|
102
|
+
const internal = def;
|
|
103
|
+
const snakeName = camelToSnake(name);
|
|
104
|
+
let col;
|
|
105
|
+
switch (internal._columnType) {
|
|
106
|
+
case "text":
|
|
107
|
+
col = pgCore.text(snakeName);
|
|
108
|
+
break;
|
|
109
|
+
case "bigint":
|
|
110
|
+
col = pgCore.bigint(snakeName, { mode: "bigint" });
|
|
111
|
+
break;
|
|
112
|
+
case "integer":
|
|
113
|
+
col = pgCore.integer(snakeName);
|
|
114
|
+
break;
|
|
115
|
+
case "boolean":
|
|
116
|
+
col = pgCore.boolean(snakeName);
|
|
117
|
+
break;
|
|
118
|
+
case "timestamp":
|
|
119
|
+
col = pgCore.timestamp(snakeName, { withTimezone: true });
|
|
120
|
+
break;
|
|
121
|
+
default:
|
|
122
|
+
throw new Error(`Unknown column type: ${internal._columnType}`);
|
|
123
|
+
}
|
|
124
|
+
if (internal._primary) {
|
|
125
|
+
col = col.primaryKey();
|
|
126
|
+
}
|
|
127
|
+
if (internal._unique) {
|
|
128
|
+
col = col.unique();
|
|
129
|
+
}
|
|
130
|
+
if (!internal._nullable) {
|
|
131
|
+
col = col.notNull();
|
|
132
|
+
}
|
|
133
|
+
if (internal._defaultNow && internal._columnType === "timestamp") {
|
|
134
|
+
col = col.defaultNow();
|
|
135
|
+
} else if (internal._default !== void 0) {
|
|
136
|
+
col = col.default(internal._default);
|
|
137
|
+
}
|
|
138
|
+
if (internal._references) {
|
|
139
|
+
const refTable = internal._references.table;
|
|
140
|
+
const refColumn = internal._references.column;
|
|
141
|
+
col = col.references(() => refTable[refColumn]);
|
|
142
|
+
}
|
|
143
|
+
columns[name] = col;
|
|
144
|
+
if (internal._indexed) {
|
|
145
|
+
indices.push(
|
|
146
|
+
(table) => pgCore.index(`${tableName}_${snakeName}_idx`).on(table[name])
|
|
147
|
+
);
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
if (indices.length > 0) {
|
|
151
|
+
return pgCore.pgTable(
|
|
152
|
+
tableName,
|
|
153
|
+
columns,
|
|
154
|
+
(table) => indices.map((fn) => fn(table))
|
|
155
|
+
);
|
|
156
|
+
}
|
|
157
|
+
return pgCore.pgTable(tableName, columns);
|
|
158
|
+
}
|
|
159
|
+
function generateZodSchema(schema) {
|
|
160
|
+
const shape = {};
|
|
161
|
+
for (const [key, col] of Object.entries(schema)) {
|
|
162
|
+
const colDef = col;
|
|
163
|
+
let zodType;
|
|
164
|
+
switch (colDef._columnType) {
|
|
165
|
+
case "text":
|
|
166
|
+
zodType = zod.z.string();
|
|
167
|
+
break;
|
|
168
|
+
case "bigint":
|
|
169
|
+
zodType = zod.z.bigint();
|
|
170
|
+
break;
|
|
171
|
+
case "integer":
|
|
172
|
+
zodType = zod.z.number().int();
|
|
173
|
+
break;
|
|
174
|
+
case "boolean":
|
|
175
|
+
zodType = zod.z.boolean();
|
|
176
|
+
break;
|
|
177
|
+
case "timestamp":
|
|
178
|
+
zodType = zod.z.date();
|
|
179
|
+
break;
|
|
180
|
+
default:
|
|
181
|
+
zodType = zod.z.unknown();
|
|
182
|
+
}
|
|
183
|
+
if (colDef._nullable) {
|
|
184
|
+
zodType = zodType.nullable();
|
|
185
|
+
}
|
|
186
|
+
shape[key] = zodType;
|
|
187
|
+
}
|
|
188
|
+
return zod.z.object(shape);
|
|
189
|
+
}
|
|
190
|
+
function validateParsedData(schema, data, streamName) {
|
|
191
|
+
const zodSchema = generateZodSchema(schema);
|
|
192
|
+
const result = zodSchema.safeParse(data);
|
|
193
|
+
if (result.success) {
|
|
194
|
+
return { success: true, data: result.data };
|
|
195
|
+
}
|
|
196
|
+
const errorMessages = result.error.errors.map((e) => ` - ${e.path.join(".")}: ${e.message}`).join("\n");
|
|
197
|
+
return {
|
|
198
|
+
success: false,
|
|
199
|
+
error: `Stream "${streamName}" parse returned invalid data:
|
|
200
|
+
${errorMessages}`
|
|
201
|
+
};
|
|
202
|
+
}
|
|
203
|
+
|
|
204
|
+
// src/streams/define.ts
|
|
205
|
+
function pascalCase(str) {
|
|
206
|
+
return str.split(/[-_\s]+/).map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join("");
|
|
207
|
+
}
|
|
208
|
+
function defineEventStream(definition) {
|
|
209
|
+
if (!definition.filter && !definition.filterFactory) {
|
|
210
|
+
throw new Error(`Stream "${definition.name}" must provide either filter or filterFactory`);
|
|
211
|
+
}
|
|
212
|
+
const tableName = `${definition.name.replace(/s$/, "")}_events`;
|
|
213
|
+
const table = buildDrizzleTable(tableName, definition.schema);
|
|
214
|
+
let cachedFilter = definition.filter ?? null;
|
|
215
|
+
const getFilter = () => {
|
|
216
|
+
if (!cachedFilter) {
|
|
217
|
+
if (definition.filterFactory) {
|
|
218
|
+
cachedFilter = definition.filterFactory();
|
|
219
|
+
} else {
|
|
220
|
+
throw new Error(`Stream "${definition.name}" has no filter configured`);
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
return cachedFilter;
|
|
224
|
+
};
|
|
225
|
+
return {
|
|
226
|
+
name: definition.name,
|
|
227
|
+
description: definition.description ?? `${pascalCase(definition.name)} events`,
|
|
228
|
+
schema: definition.schema,
|
|
229
|
+
table,
|
|
230
|
+
// Column accessors for Drizzle operators
|
|
231
|
+
c: table,
|
|
232
|
+
getFilter,
|
|
233
|
+
parse: definition.parse,
|
|
234
|
+
api: definition.api,
|
|
235
|
+
filterBatch: definition.filterBatch,
|
|
236
|
+
onCommit: definition.onCommit
|
|
237
|
+
};
|
|
238
|
+
}
|
|
239
|
+
var checkpointTable = pgCore.pgTable("indexer_checkpoints", {
|
|
240
|
+
/** Stream name (primary key) */
|
|
241
|
+
streamName: pgCore.text("stream_name").primaryKey(),
|
|
242
|
+
/** Last indexed slot number */
|
|
243
|
+
lastIndexedSlot: pgCore.bigint("last_indexed_slot", { mode: "bigint" }).notNull(),
|
|
244
|
+
/** Last event ID (for cursor-based resume) */
|
|
245
|
+
lastEventId: pgCore.text("last_event_id"),
|
|
246
|
+
/** When the checkpoint was last updated */
|
|
247
|
+
updatedAt: pgCore.timestamp("updated_at", { withTimezone: true }).defaultNow().notNull()
|
|
248
|
+
});
|
|
249
|
+
async function getCheckpoint(db, streamName) {
|
|
250
|
+
const [row] = await db.select().from(checkpointTable).where(drizzleOrm.eq(checkpointTable.streamName, streamName)).limit(1);
|
|
251
|
+
if (!row) {
|
|
252
|
+
return null;
|
|
253
|
+
}
|
|
254
|
+
return {
|
|
255
|
+
slot: row.lastIndexedSlot,
|
|
256
|
+
eventId: row.lastEventId
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
async function updateCheckpoint(db, streamName, slot, eventId = null) {
|
|
260
|
+
await db.insert(checkpointTable).values({
|
|
261
|
+
streamName,
|
|
262
|
+
lastIndexedSlot: slot,
|
|
263
|
+
lastEventId: eventId,
|
|
264
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
265
|
+
}).onConflictDoUpdate({
|
|
266
|
+
target: checkpointTable.streamName,
|
|
267
|
+
set: {
|
|
268
|
+
lastIndexedSlot: slot,
|
|
269
|
+
lastEventId: eventId,
|
|
270
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
271
|
+
}
|
|
272
|
+
});
|
|
273
|
+
}
|
|
274
|
+
async function deleteCheckpoint(db, streamName) {
|
|
275
|
+
await db.delete(checkpointTable).where(drizzleOrm.eq(checkpointTable.streamName, streamName));
|
|
276
|
+
}
|
|
277
|
+
async function getAllCheckpoints(db) {
|
|
278
|
+
const rows = await db.select().from(checkpointTable);
|
|
279
|
+
return rows.map((row) => ({
|
|
280
|
+
streamName: row.streamName,
|
|
281
|
+
checkpoint: {
|
|
282
|
+
slot: row.lastIndexedSlot,
|
|
283
|
+
eventId: row.lastEventId
|
|
284
|
+
}
|
|
285
|
+
}));
|
|
286
|
+
}
|
|
287
|
+
|
|
288
|
+
// src/checkpoint/index.ts
|
|
289
|
+
function getSchemaExports(config) {
|
|
290
|
+
const { eventStreams = [], accountStreams = [], tableNames = {} } = config;
|
|
291
|
+
const exports$1 = {
|
|
292
|
+
checkpointTable
|
|
293
|
+
};
|
|
294
|
+
for (const stream of eventStreams) {
|
|
295
|
+
const exportName = tableNames[stream.name] ?? `${stream.name}Table`;
|
|
296
|
+
exports$1[exportName] = stream.table;
|
|
297
|
+
}
|
|
298
|
+
for (const stream of accountStreams) {
|
|
299
|
+
const exportName = tableNames[stream.name] ?? `${stream.name.replace(/-/g, "")}Table`;
|
|
300
|
+
exports$1[exportName] = stream.table;
|
|
301
|
+
}
|
|
302
|
+
return exports$1;
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
// src/streams/processor.ts
|
|
306
|
+
var StreamBatcher = class {
|
|
307
|
+
currentSlot = null;
|
|
308
|
+
pendingEvents = [];
|
|
309
|
+
lastFlushTime = Date.now();
|
|
310
|
+
maxPendingCount = 100;
|
|
311
|
+
maxPendingMs = 5e3;
|
|
312
|
+
addEvent(event, slot) {
|
|
313
|
+
if (this.shouldFlush(slot)) {
|
|
314
|
+
const batch = this.flush();
|
|
315
|
+
this.currentSlot = slot;
|
|
316
|
+
this.pendingEvents = [event];
|
|
317
|
+
return batch;
|
|
318
|
+
}
|
|
319
|
+
this.currentSlot = slot;
|
|
320
|
+
this.pendingEvents.push(event);
|
|
321
|
+
return null;
|
|
322
|
+
}
|
|
323
|
+
shouldFlush(newSlot) {
|
|
324
|
+
if (this.pendingEvents.length === 0) return false;
|
|
325
|
+
if (this.currentSlot !== null && newSlot !== this.currentSlot) return true;
|
|
326
|
+
if (this.pendingEvents.length >= this.maxPendingCount) return true;
|
|
327
|
+
if (Date.now() - this.lastFlushTime >= this.maxPendingMs) return true;
|
|
328
|
+
return false;
|
|
329
|
+
}
|
|
330
|
+
flush() {
|
|
331
|
+
if (this.pendingEvents.length === 0 || this.currentSlot === null) {
|
|
332
|
+
return null;
|
|
333
|
+
}
|
|
334
|
+
const batch = {
|
|
335
|
+
slot: this.currentSlot,
|
|
336
|
+
events: this.pendingEvents
|
|
337
|
+
};
|
|
338
|
+
this.pendingEvents = [];
|
|
339
|
+
this.lastFlushTime = Date.now();
|
|
340
|
+
return batch;
|
|
341
|
+
}
|
|
342
|
+
getPendingCount() {
|
|
343
|
+
return this.pendingEvents.length;
|
|
344
|
+
}
|
|
345
|
+
/**
|
|
346
|
+
* Flush if the timeout has elapsed since last flush.
|
|
347
|
+
* Called by background timer to ensure events don't sit in buffer indefinitely.
|
|
348
|
+
*/
|
|
349
|
+
flushIfStale() {
|
|
350
|
+
if (this.pendingEvents.length > 0 && Date.now() - this.lastFlushTime >= this.maxPendingMs) {
|
|
351
|
+
return this.flush();
|
|
352
|
+
}
|
|
353
|
+
return null;
|
|
354
|
+
}
|
|
355
|
+
};
|
|
356
|
+
async function runEventStreamProcessor(stream, options, abortSignal) {
|
|
357
|
+
const {
|
|
358
|
+
clientFactory,
|
|
359
|
+
db,
|
|
360
|
+
defaultStartSlot,
|
|
361
|
+
safetyMargin = 64,
|
|
362
|
+
pageSize = 512,
|
|
363
|
+
logLevel = "info",
|
|
364
|
+
validateParse = false
|
|
365
|
+
} = options;
|
|
366
|
+
const log = (level, msg) => {
|
|
367
|
+
if (logLevel === "debug" || level !== "debug") {
|
|
368
|
+
console.log(`[${stream.name}] ${msg}`);
|
|
369
|
+
}
|
|
370
|
+
};
|
|
371
|
+
log("info", `Starting stream processor: ${stream.description}`);
|
|
372
|
+
const checkpoint = await getCheckpoint(db, stream.name);
|
|
373
|
+
const startSlot = checkpoint ? checkpoint.slot + 1n : defaultStartSlot;
|
|
374
|
+
log(
|
|
375
|
+
"info",
|
|
376
|
+
`Starting from slot ${startSlot}${checkpoint ? " (resuming)" : " (fresh start)"}`
|
|
377
|
+
);
|
|
378
|
+
const logger = logLevel === "debug" ? replay.createConsoleLogger(stream.name) : {
|
|
379
|
+
debug: () => {
|
|
380
|
+
},
|
|
381
|
+
info: (msg) => log("info", msg),
|
|
382
|
+
warn: (msg) => log("warn", msg),
|
|
383
|
+
error: (msg) => log("error", msg)
|
|
384
|
+
};
|
|
385
|
+
const replay$1 = replay.createEventReplay({
|
|
386
|
+
clientFactory,
|
|
387
|
+
startSlot,
|
|
388
|
+
safetyMargin: BigInt(safetyMargin),
|
|
389
|
+
pageSize,
|
|
390
|
+
filter: stream.getFilter(),
|
|
391
|
+
logger,
|
|
392
|
+
resubscribeOnEnd: true
|
|
393
|
+
});
|
|
394
|
+
const batcher = new StreamBatcher();
|
|
395
|
+
const stats = {
|
|
396
|
+
eventsProcessed: 0,
|
|
397
|
+
batchesCommitted: 0,
|
|
398
|
+
lastSlot: null
|
|
399
|
+
};
|
|
400
|
+
let lastLogTime = Date.now();
|
|
401
|
+
let eventsReceivedSinceLastLog = 0;
|
|
402
|
+
const commitBatch = async (batch) => {
|
|
403
|
+
let eventsToCommit = batch.events;
|
|
404
|
+
if (stream.filterBatch) {
|
|
405
|
+
try {
|
|
406
|
+
eventsToCommit = await stream.filterBatch(
|
|
407
|
+
eventsToCommit,
|
|
408
|
+
{ db }
|
|
409
|
+
);
|
|
410
|
+
if (eventsToCommit.length === 0) {
|
|
411
|
+
log(
|
|
412
|
+
"debug",
|
|
413
|
+
`All ${batch.events.length} events filtered out at slot ${batch.slot}`
|
|
414
|
+
);
|
|
415
|
+
return;
|
|
416
|
+
}
|
|
417
|
+
if (eventsToCommit.length < batch.events.length) {
|
|
418
|
+
log(
|
|
419
|
+
"debug",
|
|
420
|
+
`Filtered ${batch.events.length - eventsToCommit.length} of ${batch.events.length} events at slot ${batch.slot}`
|
|
421
|
+
);
|
|
422
|
+
}
|
|
423
|
+
} catch (filterErr) {
|
|
424
|
+
log(
|
|
425
|
+
"error",
|
|
426
|
+
`filterBatch hook failed: ${filterErr instanceof Error ? filterErr.message : String(filterErr)}`
|
|
427
|
+
);
|
|
428
|
+
return;
|
|
429
|
+
}
|
|
430
|
+
}
|
|
431
|
+
await db.transaction(async (tx) => {
|
|
432
|
+
await tx.insert(stream.table).values(eventsToCommit).onConflictDoNothing();
|
|
433
|
+
const lastEvent = eventsToCommit[eventsToCommit.length - 1];
|
|
434
|
+
await updateCheckpoint(
|
|
435
|
+
tx,
|
|
436
|
+
stream.name,
|
|
437
|
+
batch.slot,
|
|
438
|
+
lastEvent.id
|
|
439
|
+
);
|
|
440
|
+
});
|
|
441
|
+
stats.batchesCommitted++;
|
|
442
|
+
stats.lastSlot = batch.slot;
|
|
443
|
+
if (stream.onCommit) {
|
|
444
|
+
try {
|
|
445
|
+
await stream.onCommit({ ...batch, events: eventsToCommit }, { db });
|
|
446
|
+
} catch (hookErr) {
|
|
447
|
+
log(
|
|
448
|
+
"error",
|
|
449
|
+
`onCommit hook failed: ${hookErr instanceof Error ? hookErr.message : String(hookErr)}`
|
|
450
|
+
);
|
|
451
|
+
}
|
|
452
|
+
}
|
|
453
|
+
};
|
|
454
|
+
const flushInterval = setInterval(async () => {
|
|
455
|
+
const batch = batcher.flushIfStale();
|
|
456
|
+
if (batch) {
|
|
457
|
+
try {
|
|
458
|
+
await commitBatch(batch);
|
|
459
|
+
log(
|
|
460
|
+
"debug",
|
|
461
|
+
`Timeout flush: ${batch.events.length} event(s) at slot ${batch.slot}`
|
|
462
|
+
);
|
|
463
|
+
} catch (err) {
|
|
464
|
+
log(
|
|
465
|
+
"error",
|
|
466
|
+
`Timeout flush failed: ${err instanceof Error ? err.message : String(err)}`
|
|
467
|
+
);
|
|
468
|
+
}
|
|
469
|
+
}
|
|
470
|
+
}, 1e3);
|
|
471
|
+
try {
|
|
472
|
+
for await (const event of replay$1) {
|
|
473
|
+
if (abortSignal?.aborted) {
|
|
474
|
+
log("info", "Abort signal received, stopping...");
|
|
475
|
+
break;
|
|
476
|
+
}
|
|
477
|
+
eventsReceivedSinceLastLog++;
|
|
478
|
+
const parsed = stream.parse(event);
|
|
479
|
+
if (!parsed) continue;
|
|
480
|
+
if (validateParse) {
|
|
481
|
+
const validation = validateParsedData(stream.schema, parsed, stream.name);
|
|
482
|
+
if (!validation.success) {
|
|
483
|
+
log("error", validation.error);
|
|
484
|
+
continue;
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
stats.eventsProcessed++;
|
|
488
|
+
const batch = batcher.addEvent(parsed, event.slot);
|
|
489
|
+
if (batch) {
|
|
490
|
+
await commitBatch(batch);
|
|
491
|
+
log(
|
|
492
|
+
"info",
|
|
493
|
+
`Committed ${batch.events.length} event(s) at slot ${batch.slot} (total: ${stats.eventsProcessed})`
|
|
494
|
+
);
|
|
495
|
+
}
|
|
496
|
+
const now = Date.now();
|
|
497
|
+
if (now - lastLogTime >= 3e4) {
|
|
498
|
+
log(
|
|
499
|
+
"info",
|
|
500
|
+
`Heartbeat: ${eventsReceivedSinceLastLog} events received, ${batcher.getPendingCount()} pending`
|
|
501
|
+
);
|
|
502
|
+
eventsReceivedSinceLastLog = 0;
|
|
503
|
+
lastLogTime = now;
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
const finalBatch = batcher.flush();
|
|
507
|
+
if (finalBatch) {
|
|
508
|
+
await commitBatch(finalBatch);
|
|
509
|
+
log(
|
|
510
|
+
"info",
|
|
511
|
+
`Final flush: ${finalBatch.events.length} event(s) at slot ${finalBatch.slot}`
|
|
512
|
+
);
|
|
513
|
+
}
|
|
514
|
+
} catch (err) {
|
|
515
|
+
log(
|
|
516
|
+
"error",
|
|
517
|
+
`Stream error: ${err instanceof Error ? err.message : String(err)}`
|
|
518
|
+
);
|
|
519
|
+
throw err;
|
|
520
|
+
} finally {
|
|
521
|
+
clearInterval(flushInterval);
|
|
522
|
+
}
|
|
523
|
+
log(
|
|
524
|
+
"info",
|
|
525
|
+
`Stream stopped. Processed ${stats.eventsProcessed} events in ${stats.batchesCommitted} batches.`
|
|
526
|
+
);
|
|
527
|
+
return stats;
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
// src/accounts/define.ts
|
|
531
|
+
function pascalCase2(str) {
|
|
532
|
+
return str.split(/[-_\s]+/).map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join("");
|
|
533
|
+
}
|
|
534
|
+
function defineAccountStream(definition) {
|
|
535
|
+
if (!definition.ownerProgram && !definition.ownerProgramFactory) {
|
|
536
|
+
throw new Error(`Stream "${definition.name}" must provide either ownerProgram or ownerProgramFactory`);
|
|
537
|
+
}
|
|
538
|
+
const tableName = definition.name.replace(/-/g, "_");
|
|
539
|
+
const table = buildDrizzleTable(tableName, definition.schema);
|
|
540
|
+
let cachedOwnerProgram = definition.ownerProgram ?? null;
|
|
541
|
+
const getOwnerProgram = () => {
|
|
542
|
+
if (!cachedOwnerProgram) {
|
|
543
|
+
if (definition.ownerProgramFactory) {
|
|
544
|
+
cachedOwnerProgram = definition.ownerProgramFactory();
|
|
545
|
+
} else {
|
|
546
|
+
throw new Error(`Stream "${definition.name}" has no ownerProgram configured`);
|
|
547
|
+
}
|
|
548
|
+
}
|
|
549
|
+
return cachedOwnerProgram;
|
|
550
|
+
};
|
|
551
|
+
return {
|
|
552
|
+
name: definition.name,
|
|
553
|
+
description: definition.description ?? `${pascalCase2(definition.name)} accounts`,
|
|
554
|
+
schema: definition.schema,
|
|
555
|
+
getOwnerProgram,
|
|
556
|
+
expectedSize: definition.expectedSize,
|
|
557
|
+
dataSizes: definition.dataSizes,
|
|
558
|
+
table,
|
|
559
|
+
// Column accessors for Drizzle operators
|
|
560
|
+
c: table,
|
|
561
|
+
parse: definition.parse,
|
|
562
|
+
api: definition.api
|
|
563
|
+
};
|
|
564
|
+
}
|
|
565
|
+
function shouldLog(level, minLevel) {
|
|
566
|
+
const levels = ["debug", "info", "warn", "error"];
|
|
567
|
+
return levels.indexOf(level) >= levels.indexOf(minLevel);
|
|
568
|
+
}
|
|
569
|
+
async function runAccountStreamProcessor(stream, options, abortSignal) {
|
|
570
|
+
const { clientFactory, db, logLevel = "info", validateParse = false } = options;
|
|
571
|
+
const checkpointName = `account:${stream.name}`;
|
|
572
|
+
const log = (level, msg, meta) => {
|
|
573
|
+
if (shouldLog(level, logLevel)) {
|
|
574
|
+
console.log(
|
|
575
|
+
`[account-stream:${stream.name}] ${msg}`,
|
|
576
|
+
""
|
|
577
|
+
);
|
|
578
|
+
}
|
|
579
|
+
};
|
|
580
|
+
const stats = {
|
|
581
|
+
accountsProcessed: 0,
|
|
582
|
+
accountsUpdated: 0,
|
|
583
|
+
accountsDeleted: 0
|
|
584
|
+
};
|
|
585
|
+
const checkpoint = await getCheckpoint(db, checkpointName);
|
|
586
|
+
const minUpdatedSlot = checkpoint?.slot ?? void 0;
|
|
587
|
+
if (minUpdatedSlot) {
|
|
588
|
+
log("info", `Resuming from checkpoint: slot ${minUpdatedSlot}`);
|
|
589
|
+
}
|
|
590
|
+
log("info", `Starting account stream: ${stream.description}`);
|
|
591
|
+
if (stream.expectedSize) {
|
|
592
|
+
log("info", `Expected data size: ${stream.expectedSize} bytes`);
|
|
593
|
+
}
|
|
594
|
+
let lastProcessedSlot = minUpdatedSlot ?? 0n;
|
|
595
|
+
try {
|
|
596
|
+
const replay$1 = replay.createAccountsByOwnerReplay({
|
|
597
|
+
clientFactory,
|
|
598
|
+
owner: stream.getOwnerProgram(),
|
|
599
|
+
view: replay.AccountView.FULL,
|
|
600
|
+
dataSizes: stream.dataSizes ?? (stream.expectedSize ? [stream.expectedSize] : void 0),
|
|
601
|
+
minUpdatedSlot,
|
|
602
|
+
onBackfillComplete: (highestSlot) => {
|
|
603
|
+
log(
|
|
604
|
+
"info",
|
|
605
|
+
`Backfill complete. Highest slot: ${highestSlot}, accounts processed: ${stats.accountsProcessed}`
|
|
606
|
+
);
|
|
607
|
+
lastProcessedSlot = highestSlot;
|
|
608
|
+
}
|
|
609
|
+
});
|
|
610
|
+
for await (const event of replay$1) {
|
|
611
|
+
if (abortSignal?.aborted) {
|
|
612
|
+
log("info", "Abort signal received, stopping");
|
|
613
|
+
break;
|
|
614
|
+
}
|
|
615
|
+
if (event.type === "account") {
|
|
616
|
+
const account = event.account;
|
|
617
|
+
stats.accountsProcessed++;
|
|
618
|
+
if (stats.accountsProcessed <= 3) {
|
|
619
|
+
log(
|
|
620
|
+
"info",
|
|
621
|
+
`Account ${stats.accountsProcessed}: ${account.addressHex}, slot=${account.slot}, dataLen=${account.data.length}`
|
|
622
|
+
);
|
|
623
|
+
}
|
|
624
|
+
const parsed = stream.parse(account);
|
|
625
|
+
if (!parsed) {
|
|
626
|
+
log(
|
|
627
|
+
"debug",
|
|
628
|
+
`Skipped account ${account.addressHex} - parser returned null (dataLen=${account.data.length})`
|
|
629
|
+
);
|
|
630
|
+
continue;
|
|
631
|
+
}
|
|
632
|
+
if (validateParse) {
|
|
633
|
+
const validation = validateParsedData(stream.schema, parsed, stream.name);
|
|
634
|
+
if (!validation.success) {
|
|
635
|
+
log("error", validation.error);
|
|
636
|
+
continue;
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
if (account.isDelete) {
|
|
640
|
+
log("debug", `Account deleted: ${account.addressHex}`);
|
|
641
|
+
stats.accountsDeleted++;
|
|
642
|
+
continue;
|
|
643
|
+
}
|
|
644
|
+
const table = stream.table;
|
|
645
|
+
const idField = stream.api?.idField ?? "address";
|
|
646
|
+
try {
|
|
647
|
+
await db.insert(stream.table).values(parsed).onConflictDoUpdate({
|
|
648
|
+
target: table[idField],
|
|
649
|
+
set: parsed,
|
|
650
|
+
where: drizzleOrm.sql`${table.slot} <= ${parsed.slot}`
|
|
651
|
+
});
|
|
652
|
+
stats.accountsUpdated++;
|
|
653
|
+
if (stats.accountsUpdated <= 3) {
|
|
654
|
+
log(
|
|
655
|
+
"info",
|
|
656
|
+
`Successfully inserted account ${stats.accountsUpdated}`
|
|
657
|
+
);
|
|
658
|
+
}
|
|
659
|
+
} catch (err) {
|
|
660
|
+
log(
|
|
661
|
+
"error",
|
|
662
|
+
`Failed to upsert account ${account.addressHex}: ${err}`
|
|
663
|
+
);
|
|
664
|
+
}
|
|
665
|
+
if (account.slot > lastProcessedSlot) {
|
|
666
|
+
lastProcessedSlot = account.slot;
|
|
667
|
+
}
|
|
668
|
+
if (stats.accountsProcessed % 100 === 0) {
|
|
669
|
+
log(
|
|
670
|
+
"info",
|
|
671
|
+
`Processed ${stats.accountsProcessed} accounts, updated ${stats.accountsUpdated}`
|
|
672
|
+
);
|
|
673
|
+
}
|
|
674
|
+
} else if (event.type === "blockFinished") {
|
|
675
|
+
const slot = event.block.slot;
|
|
676
|
+
if (slot > lastProcessedSlot) {
|
|
677
|
+
lastProcessedSlot = slot;
|
|
678
|
+
}
|
|
679
|
+
await updateCheckpoint(db, checkpointName, lastProcessedSlot, null);
|
|
680
|
+
log("debug", `Block finished: slot ${slot}, checkpoint saved`);
|
|
681
|
+
}
|
|
682
|
+
}
|
|
683
|
+
if (lastProcessedSlot > 0n) {
|
|
684
|
+
await updateCheckpoint(db, checkpointName, lastProcessedSlot, null);
|
|
685
|
+
log("info", `Final checkpoint saved: slot ${lastProcessedSlot}`);
|
|
686
|
+
}
|
|
687
|
+
} catch (err) {
|
|
688
|
+
if (abortSignal?.aborted) {
|
|
689
|
+
log("info", "Stream aborted");
|
|
690
|
+
} else {
|
|
691
|
+
log(
|
|
692
|
+
"error",
|
|
693
|
+
`Stream error: ${err instanceof Error ? err.message : String(err)}`
|
|
694
|
+
);
|
|
695
|
+
throw err;
|
|
696
|
+
}
|
|
697
|
+
}
|
|
698
|
+
log(
|
|
699
|
+
"info",
|
|
700
|
+
`Finished. Processed: ${stats.accountsProcessed}, Updated: ${stats.accountsUpdated}, Deleted: ${stats.accountsDeleted}`
|
|
701
|
+
);
|
|
702
|
+
return stats;
|
|
703
|
+
}
|
|
704
|
+
var paginationQuerySchema = zodOpenapi.z.object({
|
|
705
|
+
limit: zodOpenapi.z.coerce.number().int().min(1).max(100).default(20).openapi({
|
|
706
|
+
description: "Number of results to return (1-100)",
|
|
707
|
+
example: 20
|
|
708
|
+
}),
|
|
709
|
+
offset: zodOpenapi.z.coerce.number().int().min(0).default(0).openapi({
|
|
710
|
+
description: "Number of results to skip",
|
|
711
|
+
example: 0
|
|
712
|
+
}),
|
|
713
|
+
cursor: zodOpenapi.z.string().optional().openapi({
|
|
714
|
+
description: "Cursor for pagination (format: slot:id)",
|
|
715
|
+
example: "3181195:abc123"
|
|
716
|
+
})
|
|
717
|
+
});
|
|
718
|
+
var paginationResponseSchema = zodOpenapi.z.object({
|
|
719
|
+
limit: zodOpenapi.z.number().openapi({ example: 20 }),
|
|
720
|
+
offset: zodOpenapi.z.number().openapi({ example: 0 }),
|
|
721
|
+
hasMore: zodOpenapi.z.boolean().openapi({ example: true }),
|
|
722
|
+
nextCursor: zodOpenapi.z.string().nullable().openapi({ example: "3181195:abc123" })
|
|
723
|
+
});
|
|
724
|
+
function dataResponse(schema) {
|
|
725
|
+
return zodOpenapi.z.object({ data: schema });
|
|
726
|
+
}
|
|
727
|
+
function listResponse(schema) {
|
|
728
|
+
return zodOpenapi.z.object({
|
|
729
|
+
data: zodOpenapi.z.array(schema),
|
|
730
|
+
pagination: paginationResponseSchema
|
|
731
|
+
});
|
|
732
|
+
}
|
|
733
|
+
var errorSchema = zodOpenapi.z.object({
|
|
734
|
+
error: zodOpenapi.z.string().openapi({ example: "Not found" }),
|
|
735
|
+
code: zodOpenapi.z.string().optional().openapi({ example: "NOT_FOUND" })
|
|
736
|
+
}).openapi("Error");
|
|
737
|
+
function paginate(rows, query, getCursor) {
|
|
738
|
+
const hasMore = rows.length > query.limit;
|
|
739
|
+
const data = hasMore ? rows.slice(0, -1) : rows;
|
|
740
|
+
const lastItem = data[data.length - 1];
|
|
741
|
+
let nextCursor = null;
|
|
742
|
+
if (hasMore && lastItem) {
|
|
743
|
+
if (getCursor) {
|
|
744
|
+
nextCursor = getCursor(lastItem);
|
|
745
|
+
} else {
|
|
746
|
+
const item = lastItem;
|
|
747
|
+
if (item.slot !== void 0 && item.id !== void 0) {
|
|
748
|
+
nextCursor = `${item.slot}:${item.id}`;
|
|
749
|
+
}
|
|
750
|
+
}
|
|
751
|
+
}
|
|
752
|
+
return {
|
|
753
|
+
data,
|
|
754
|
+
pagination: {
|
|
755
|
+
limit: query.limit,
|
|
756
|
+
offset: query.offset,
|
|
757
|
+
hasMore,
|
|
758
|
+
nextCursor
|
|
759
|
+
}
|
|
760
|
+
};
|
|
761
|
+
}
|
|
762
|
+
function parseCursor(cursor) {
|
|
763
|
+
const colonIndex = cursor.indexOf(":");
|
|
764
|
+
if (colonIndex === -1) return null;
|
|
765
|
+
try {
|
|
766
|
+
const slot = BigInt(cursor.slice(0, colonIndex));
|
|
767
|
+
const id = cursor.slice(colonIndex + 1);
|
|
768
|
+
return { slot, id };
|
|
769
|
+
} catch {
|
|
770
|
+
return null;
|
|
771
|
+
}
|
|
772
|
+
}
|
|
773
|
+
function pascalCase3(str) {
|
|
774
|
+
return str.split(/[-_\s]+/).map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join("");
|
|
775
|
+
}
|
|
776
|
+
function generateSchemas(table, name, suffix = "") {
|
|
777
|
+
const rowSchema = drizzleZod.createSelectSchema(table);
|
|
778
|
+
const insertSchema = drizzleZod.createInsertSchema(table);
|
|
779
|
+
const apiFields = {};
|
|
780
|
+
const columns = drizzleOrm.getTableColumns(table);
|
|
781
|
+
for (const [colName, col] of Object.entries(columns)) {
|
|
782
|
+
const dataType = col.dataType;
|
|
783
|
+
const notNull = col.notNull;
|
|
784
|
+
let fieldSchema;
|
|
785
|
+
switch (dataType) {
|
|
786
|
+
case "bigint":
|
|
787
|
+
fieldSchema = zodOpenapi.z.string().openapi({ description: `${colName} (bigint)` });
|
|
788
|
+
break;
|
|
789
|
+
case "date":
|
|
790
|
+
fieldSchema = zodOpenapi.z.string().openapi({ description: `${colName} (ISO timestamp)` });
|
|
791
|
+
break;
|
|
792
|
+
case "string":
|
|
793
|
+
fieldSchema = zodOpenapi.z.string();
|
|
794
|
+
break;
|
|
795
|
+
case "number":
|
|
796
|
+
fieldSchema = zodOpenapi.z.number();
|
|
797
|
+
break;
|
|
798
|
+
case "boolean":
|
|
799
|
+
fieldSchema = zodOpenapi.z.boolean();
|
|
800
|
+
break;
|
|
801
|
+
default:
|
|
802
|
+
fieldSchema = zodOpenapi.z.any();
|
|
803
|
+
}
|
|
804
|
+
if (!notNull) {
|
|
805
|
+
fieldSchema = fieldSchema.nullable();
|
|
806
|
+
}
|
|
807
|
+
apiFields[colName] = fieldSchema;
|
|
808
|
+
}
|
|
809
|
+
const schemaName = suffix ? `${pascalCase3(name)}${suffix}` : pascalCase3(name);
|
|
810
|
+
const apiSchema = zodOpenapi.z.object(apiFields).openapi(schemaName);
|
|
811
|
+
const serialize = (row) => {
|
|
812
|
+
const result = {};
|
|
813
|
+
for (const [key, value] of Object.entries(row)) {
|
|
814
|
+
if (typeof value === "bigint") {
|
|
815
|
+
result[key] = value.toString();
|
|
816
|
+
} else if (value instanceof Date) {
|
|
817
|
+
result[key] = value.toISOString();
|
|
818
|
+
} else {
|
|
819
|
+
result[key] = value;
|
|
820
|
+
}
|
|
821
|
+
}
|
|
822
|
+
return result;
|
|
823
|
+
};
|
|
824
|
+
return {
|
|
825
|
+
row: rowSchema,
|
|
826
|
+
insert: insertSchema,
|
|
827
|
+
api: apiSchema,
|
|
828
|
+
serialize
|
|
829
|
+
};
|
|
830
|
+
}
|
|
831
|
+
|
|
832
|
+
// src/api/routes.ts
|
|
833
|
+
function pascalCase4(str) {
|
|
834
|
+
return str.split(/[-_\s]+/).map((word) => word.charAt(0).toUpperCase() + word.slice(1).toLowerCase()).join("");
|
|
835
|
+
}
|
|
836
|
+
function buildRoutes(config) {
|
|
837
|
+
const {
|
|
838
|
+
db,
|
|
839
|
+
name,
|
|
840
|
+
table,
|
|
841
|
+
schemas,
|
|
842
|
+
filters,
|
|
843
|
+
idField,
|
|
844
|
+
resourceType,
|
|
845
|
+
includeSlotFilters,
|
|
846
|
+
sortField,
|
|
847
|
+
secondarySortField
|
|
848
|
+
} = config;
|
|
849
|
+
const router = new zodOpenapi.OpenAPIHono();
|
|
850
|
+
const tag = pascalCase4(name);
|
|
851
|
+
const filterFields = {};
|
|
852
|
+
for (const field of filters) {
|
|
853
|
+
filterFields[field] = zodOpenapi.z.string().optional().openapi({
|
|
854
|
+
description: `Filter by ${field}`
|
|
855
|
+
});
|
|
856
|
+
}
|
|
857
|
+
if (includeSlotFilters) {
|
|
858
|
+
filterFields.fromSlot = zodOpenapi.z.string().optional().openapi({
|
|
859
|
+
description: "Minimum slot number"
|
|
860
|
+
});
|
|
861
|
+
filterFields.toSlot = zodOpenapi.z.string().optional().openapi({
|
|
862
|
+
description: "Maximum slot number"
|
|
863
|
+
});
|
|
864
|
+
}
|
|
865
|
+
const listQuerySchema = paginationQuerySchema.extend(filterFields);
|
|
866
|
+
const listRoute = zodOpenapi.createRoute({
|
|
867
|
+
method: "get",
|
|
868
|
+
path: "/",
|
|
869
|
+
tags: [tag],
|
|
870
|
+
summary: `List ${name} ${resourceType}s`,
|
|
871
|
+
description: `Returns a paginated list of ${name} ${resourceType}s with optional filtering.`,
|
|
872
|
+
request: { query: listQuerySchema },
|
|
873
|
+
responses: {
|
|
874
|
+
200: {
|
|
875
|
+
description: `List of ${name} ${resourceType}s`,
|
|
876
|
+
content: { "application/json": { schema: listResponse(schemas.api) } }
|
|
877
|
+
}
|
|
878
|
+
}
|
|
879
|
+
});
|
|
880
|
+
router.openapi(listRoute, async (c) => {
|
|
881
|
+
const query = c.req.valid("query");
|
|
882
|
+
const conditions = [];
|
|
883
|
+
for (const field of filters) {
|
|
884
|
+
const value = query[field];
|
|
885
|
+
if (value !== void 0 && value !== "") {
|
|
886
|
+
conditions.push(drizzleOrm.eq(table[field], value));
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
if (includeSlotFilters) {
|
|
890
|
+
if (query.fromSlot) {
|
|
891
|
+
conditions.push(drizzleOrm.gte(table.slot, BigInt(query.fromSlot)));
|
|
892
|
+
}
|
|
893
|
+
if (query.toSlot) {
|
|
894
|
+
conditions.push(drizzleOrm.lte(table.slot, BigInt(query.toSlot)));
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
if (query.cursor && secondarySortField) {
|
|
898
|
+
const parsed = parseCursor(query.cursor);
|
|
899
|
+
if (parsed) {
|
|
900
|
+
conditions.push(
|
|
901
|
+
drizzleOrm.sql`(${table.slot} < ${parsed.slot} OR (${table.slot} = ${parsed.slot} AND ${table[secondarySortField]} < ${parsed.id}))`
|
|
902
|
+
);
|
|
903
|
+
}
|
|
904
|
+
}
|
|
905
|
+
const orderBy = secondarySortField ? [drizzleOrm.desc(table[sortField]), drizzleOrm.desc(table[secondarySortField])] : [drizzleOrm.desc(table[sortField])];
|
|
906
|
+
const rows = await db.select().from(table).where(conditions.length > 0 ? drizzleOrm.and(...conditions) : void 0).orderBy(...orderBy).limit(query.limit + 1).offset(query.cursor ? 0 : query.offset);
|
|
907
|
+
const result = paginate(rows, query);
|
|
908
|
+
return c.json({
|
|
909
|
+
data: result.data.map(
|
|
910
|
+
(row) => schemas.serialize(row)
|
|
911
|
+
),
|
|
912
|
+
pagination: result.pagination
|
|
913
|
+
});
|
|
914
|
+
});
|
|
915
|
+
const getRoute = zodOpenapi.createRoute({
|
|
916
|
+
method: "get",
|
|
917
|
+
path: `/{${idField}}`,
|
|
918
|
+
tags: [tag],
|
|
919
|
+
summary: `Get ${name} ${resourceType} by ${idField}`,
|
|
920
|
+
description: `Returns a single ${name} ${resourceType} by its ${idField}.`,
|
|
921
|
+
request: {
|
|
922
|
+
params: zodOpenapi.z.object({
|
|
923
|
+
[idField]: zodOpenapi.z.string().openapi({ description: `${pascalCase4(resourceType)} ${idField}` })
|
|
924
|
+
})
|
|
925
|
+
},
|
|
926
|
+
responses: {
|
|
927
|
+
200: {
|
|
928
|
+
description: `${pascalCase4(name)} ${resourceType} found`,
|
|
929
|
+
content: { "application/json": { schema: dataResponse(schemas.api) } }
|
|
930
|
+
},
|
|
931
|
+
404: {
|
|
932
|
+
description: `${pascalCase4(resourceType)} not found`,
|
|
933
|
+
content: { "application/json": { schema: errorSchema } }
|
|
934
|
+
}
|
|
935
|
+
}
|
|
936
|
+
});
|
|
937
|
+
router.openapi(getRoute, async (c) => {
|
|
938
|
+
const id = c.req.param(idField);
|
|
939
|
+
const [row] = await db.select().from(table).where(drizzleOrm.eq(table[idField], id)).limit(1);
|
|
940
|
+
if (!row) {
|
|
941
|
+
return c.json({ error: "Not found" }, 404);
|
|
942
|
+
}
|
|
943
|
+
return c.json({
|
|
944
|
+
data: schemas.serialize(row)
|
|
945
|
+
});
|
|
946
|
+
});
|
|
947
|
+
return router;
|
|
948
|
+
}
|
|
949
|
+
function mountStreamRoutes(app, options) {
|
|
950
|
+
const {
|
|
951
|
+
db,
|
|
952
|
+
eventStreams = [],
|
|
953
|
+
accountStreams = [],
|
|
954
|
+
pathPrefix = "/api/v1"
|
|
955
|
+
} = options;
|
|
956
|
+
for (const stream of eventStreams) {
|
|
957
|
+
if (stream.api?.enabled === false) continue;
|
|
958
|
+
const schemas = generateSchemas(stream.table, stream.name, "Event");
|
|
959
|
+
const filters = stream.api?.filters ?? [];
|
|
960
|
+
const idField = stream.api?.idField ?? "id";
|
|
961
|
+
const router = buildRoutes({
|
|
962
|
+
db,
|
|
963
|
+
name: stream.name,
|
|
964
|
+
table: stream.table,
|
|
965
|
+
schemas,
|
|
966
|
+
filters,
|
|
967
|
+
idField,
|
|
968
|
+
resourceType: "event",
|
|
969
|
+
includeSlotFilters: true,
|
|
970
|
+
sortField: "slot",
|
|
971
|
+
secondarySortField: "id"
|
|
972
|
+
});
|
|
973
|
+
app.route(`${pathPrefix}/${stream.name}`, router);
|
|
974
|
+
}
|
|
975
|
+
for (const stream of accountStreams) {
|
|
976
|
+
if (stream.api?.enabled === false) continue;
|
|
977
|
+
const schemas = generateSchemas(stream.table, stream.name, "Account");
|
|
978
|
+
const filters = stream.api?.filters ?? [];
|
|
979
|
+
const idField = stream.api?.idField ?? "address";
|
|
980
|
+
const router = buildRoutes({
|
|
981
|
+
db,
|
|
982
|
+
name: stream.name,
|
|
983
|
+
table: stream.table,
|
|
984
|
+
schemas,
|
|
985
|
+
filters,
|
|
986
|
+
idField,
|
|
987
|
+
resourceType: "account",
|
|
988
|
+
includeSlotFilters: false,
|
|
989
|
+
sortField: "updatedAt",
|
|
990
|
+
secondarySortField: void 0
|
|
991
|
+
});
|
|
992
|
+
app.route(`${pathPrefix}/${stream.name}`, router);
|
|
993
|
+
}
|
|
994
|
+
}
|
|
995
|
+
var Indexer = class {
|
|
996
|
+
config;
|
|
997
|
+
abortController = null;
|
|
998
|
+
running = false;
|
|
999
|
+
shutdownRequested = false;
|
|
1000
|
+
constructor(config) {
|
|
1001
|
+
this.config = {
|
|
1002
|
+
defaultStartSlot: 0n,
|
|
1003
|
+
safetyMargin: 64,
|
|
1004
|
+
pageSize: 512,
|
|
1005
|
+
logLevel: "info",
|
|
1006
|
+
...config
|
|
1007
|
+
};
|
|
1008
|
+
}
|
|
1009
|
+
/**
|
|
1010
|
+
* Check if the checkpoint table exists in the database.
|
|
1011
|
+
* Logs a warning if it doesn't exist (user may have forgotten to export it in schema).
|
|
1012
|
+
*/
|
|
1013
|
+
async checkCheckpointTable() {
|
|
1014
|
+
try {
|
|
1015
|
+
await this.config.db.execute(
|
|
1016
|
+
drizzleOrm.sql`SELECT 1 FROM indexer_checkpoints LIMIT 1`
|
|
1017
|
+
);
|
|
1018
|
+
} catch (err) {
|
|
1019
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
1020
|
+
if (message.includes("does not exist") || message.includes("relation")) {
|
|
1021
|
+
console.warn(
|
|
1022
|
+
`[indexer] WARNING: Checkpoint table "indexer_checkpoints" not found.
|
|
1023
|
+
[indexer] Make sure to export checkpointTable from your Drizzle schema:
|
|
1024
|
+
|
|
1025
|
+
// db/schema.ts
|
|
1026
|
+
export { checkpointTable } from "@thru/indexer";
|
|
1027
|
+
|
|
1028
|
+
[indexer] Then run: pnpm drizzle-kit push (or generate + migrate)
|
|
1029
|
+
`
|
|
1030
|
+
);
|
|
1031
|
+
}
|
|
1032
|
+
throw err;
|
|
1033
|
+
}
|
|
1034
|
+
}
|
|
1035
|
+
/**
|
|
1036
|
+
* Start the indexer.
|
|
1037
|
+
*
|
|
1038
|
+
* Runs all configured event and account streams concurrently.
|
|
1039
|
+
* Returns when all streams complete or when stop() is called.
|
|
1040
|
+
*
|
|
1041
|
+
* @returns Results from all stream processors
|
|
1042
|
+
*/
|
|
1043
|
+
async start() {
|
|
1044
|
+
if (this.running) {
|
|
1045
|
+
throw new Error("Indexer is already running");
|
|
1046
|
+
}
|
|
1047
|
+
await this.checkCheckpointTable();
|
|
1048
|
+
this.running = true;
|
|
1049
|
+
this.shutdownRequested = false;
|
|
1050
|
+
this.abortController = new AbortController();
|
|
1051
|
+
const {
|
|
1052
|
+
db,
|
|
1053
|
+
clientFactory,
|
|
1054
|
+
eventStreams = [],
|
|
1055
|
+
accountStreams = [],
|
|
1056
|
+
defaultStartSlot,
|
|
1057
|
+
safetyMargin,
|
|
1058
|
+
pageSize,
|
|
1059
|
+
logLevel,
|
|
1060
|
+
validateParse
|
|
1061
|
+
} = this.config;
|
|
1062
|
+
console.log("[indexer] Starting indexer...");
|
|
1063
|
+
console.log(
|
|
1064
|
+
`[indexer] Running ${eventStreams.length} event stream(s): ${eventStreams.map((s) => s.name).join(", ") || "none"}`
|
|
1065
|
+
);
|
|
1066
|
+
console.log(
|
|
1067
|
+
`[indexer] Running ${accountStreams.length} account stream(s): ${accountStreams.map((s) => s.name).join(", ") || "none"}`
|
|
1068
|
+
);
|
|
1069
|
+
try {
|
|
1070
|
+
const eventProcessorOptions = {
|
|
1071
|
+
clientFactory,
|
|
1072
|
+
db,
|
|
1073
|
+
defaultStartSlot,
|
|
1074
|
+
safetyMargin,
|
|
1075
|
+
pageSize,
|
|
1076
|
+
logLevel,
|
|
1077
|
+
validateParse
|
|
1078
|
+
};
|
|
1079
|
+
const accountProcessorOptions = {
|
|
1080
|
+
clientFactory,
|
|
1081
|
+
db,
|
|
1082
|
+
logLevel,
|
|
1083
|
+
validateParse
|
|
1084
|
+
};
|
|
1085
|
+
const eventStreamPromises = eventStreams.map(
|
|
1086
|
+
(stream) => runEventStreamProcessor(
|
|
1087
|
+
stream,
|
|
1088
|
+
eventProcessorOptions,
|
|
1089
|
+
this.abortController.signal
|
|
1090
|
+
)
|
|
1091
|
+
);
|
|
1092
|
+
const accountStreamPromises = accountStreams.map(
|
|
1093
|
+
(stream) => runAccountStreamProcessor(
|
|
1094
|
+
stream,
|
|
1095
|
+
accountProcessorOptions,
|
|
1096
|
+
this.abortController.signal
|
|
1097
|
+
)
|
|
1098
|
+
);
|
|
1099
|
+
const [eventResults, accountResults] = await Promise.all([
|
|
1100
|
+
Promise.allSettled(eventStreamPromises),
|
|
1101
|
+
Promise.allSettled(accountStreamPromises)
|
|
1102
|
+
]);
|
|
1103
|
+
const result = {
|
|
1104
|
+
eventStreams: eventStreams.map((stream, i) => {
|
|
1105
|
+
const r = eventResults[i];
|
|
1106
|
+
if (r.status === "fulfilled") {
|
|
1107
|
+
console.log(
|
|
1108
|
+
`[indexer] Event stream "${stream.name}" completed: ${r.value.eventsProcessed} events in ${r.value.batchesCommitted} batches`
|
|
1109
|
+
);
|
|
1110
|
+
return {
|
|
1111
|
+
name: stream.name,
|
|
1112
|
+
status: "fulfilled",
|
|
1113
|
+
result: r.value
|
|
1114
|
+
};
|
|
1115
|
+
} else {
|
|
1116
|
+
console.error(
|
|
1117
|
+
`[indexer] Event stream "${stream.name}" failed:`,
|
|
1118
|
+
r.reason
|
|
1119
|
+
);
|
|
1120
|
+
return {
|
|
1121
|
+
name: stream.name,
|
|
1122
|
+
status: "rejected",
|
|
1123
|
+
error: r.reason
|
|
1124
|
+
};
|
|
1125
|
+
}
|
|
1126
|
+
}),
|
|
1127
|
+
accountStreams: accountStreams.map((stream, i) => {
|
|
1128
|
+
const r = accountResults[i];
|
|
1129
|
+
if (r.status === "fulfilled") {
|
|
1130
|
+
console.log(
|
|
1131
|
+
`[indexer] Account stream "${stream.name}" completed: ${r.value.accountsUpdated} accounts updated, ${r.value.accountsDeleted} deleted`
|
|
1132
|
+
);
|
|
1133
|
+
return {
|
|
1134
|
+
name: stream.name,
|
|
1135
|
+
status: "fulfilled",
|
|
1136
|
+
result: r.value
|
|
1137
|
+
};
|
|
1138
|
+
} else {
|
|
1139
|
+
console.error(
|
|
1140
|
+
`[indexer] Account stream "${stream.name}" failed:`,
|
|
1141
|
+
r.reason
|
|
1142
|
+
);
|
|
1143
|
+
return {
|
|
1144
|
+
name: stream.name,
|
|
1145
|
+
status: "rejected",
|
|
1146
|
+
error: r.reason
|
|
1147
|
+
};
|
|
1148
|
+
}
|
|
1149
|
+
})
|
|
1150
|
+
};
|
|
1151
|
+
console.log("[indexer] All streams stopped.");
|
|
1152
|
+
return result;
|
|
1153
|
+
} finally {
|
|
1154
|
+
this.running = false;
|
|
1155
|
+
this.abortController = null;
|
|
1156
|
+
}
|
|
1157
|
+
}
|
|
1158
|
+
/**
|
|
1159
|
+
* Stop the indexer gracefully.
|
|
1160
|
+
*
|
|
1161
|
+
* Signals all streams to finish their current batch and stop.
|
|
1162
|
+
* The start() promise will resolve once all streams have stopped.
|
|
1163
|
+
*/
|
|
1164
|
+
stop() {
|
|
1165
|
+
if (!this.running || !this.abortController) {
|
|
1166
|
+
console.log("[indexer] Not running");
|
|
1167
|
+
return;
|
|
1168
|
+
}
|
|
1169
|
+
if (this.shutdownRequested) {
|
|
1170
|
+
console.log("[indexer] Force shutdown...");
|
|
1171
|
+
process.exit(1);
|
|
1172
|
+
}
|
|
1173
|
+
console.log("[indexer] Shutdown requested, finishing current batches...");
|
|
1174
|
+
this.shutdownRequested = true;
|
|
1175
|
+
this.abortController.abort();
|
|
1176
|
+
}
|
|
1177
|
+
/**
|
|
1178
|
+
* Check if the indexer is currently running.
|
|
1179
|
+
*/
|
|
1180
|
+
isRunning() {
|
|
1181
|
+
return this.running;
|
|
1182
|
+
}
|
|
1183
|
+
};
|
|
1184
|
+
|
|
1185
|
+
exports.Indexer = Indexer;
|
|
1186
|
+
exports.checkpointTable = checkpointTable;
|
|
1187
|
+
exports.columnBuilder = columnBuilder;
|
|
1188
|
+
exports.dataResponse = dataResponse;
|
|
1189
|
+
exports.defineAccountStream = defineAccountStream;
|
|
1190
|
+
exports.defineEventStream = defineEventStream;
|
|
1191
|
+
exports.deleteCheckpoint = deleteCheckpoint;
|
|
1192
|
+
exports.errorSchema = errorSchema;
|
|
1193
|
+
exports.generateSchemas = generateSchemas;
|
|
1194
|
+
exports.generateZodSchema = generateZodSchema;
|
|
1195
|
+
exports.getAllCheckpoints = getAllCheckpoints;
|
|
1196
|
+
exports.getCheckpoint = getCheckpoint;
|
|
1197
|
+
exports.getSchemaExports = getSchemaExports;
|
|
1198
|
+
exports.listResponse = listResponse;
|
|
1199
|
+
exports.mountStreamRoutes = mountStreamRoutes;
|
|
1200
|
+
exports.paginate = paginate;
|
|
1201
|
+
exports.paginationQuerySchema = paginationQuerySchema;
|
|
1202
|
+
exports.paginationResponseSchema = paginationResponseSchema;
|
|
1203
|
+
exports.parseCursor = parseCursor;
|
|
1204
|
+
exports.t = t;
|
|
1205
|
+
exports.updateCheckpoint = updateCheckpoint;
|
|
1206
|
+
exports.validateParsedData = validateParsedData;
|
|
1207
|
+
//# sourceMappingURL=index.cjs.map
|
|
1208
|
+
//# sourceMappingURL=index.cjs.map
|