@latticexyz/store-indexer 2.2.18-f0433092876e2ac9b5b12cd0ecae9c120a2d0368 → 2.2.19-900ac35deebfa260bafb1697d15e95eef855cd69
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/bin/postgres-decoded-indexer.js +90 -1
- package/dist/bin/postgres-decoded-indexer.js.map +1 -1
- package/dist/bin/postgres-frontend.js +256 -5
- package/dist/bin/postgres-frontend.js.map +1 -1
- package/dist/bin/postgres-indexer.js +104 -1
- package/dist/bin/postgres-indexer.js.map +1 -1
- package/dist/bin/sqlite-indexer.js +241 -1
- package/dist/bin/sqlite-indexer.js.map +1 -1
- package/dist/chunk-7E7HV6WZ.js +38 -0
- package/dist/{chunk-R7HX5BT2.js.map → chunk-7E7HV6WZ.js.map} +1 -1
- package/dist/chunk-ALQNRR4A.js +99 -0
- package/dist/{chunk-JDWVOODJ.js.map → chunk-ALQNRR4A.js.map} +1 -1
- package/dist/chunk-DRMERYGH.js +53 -0
- package/dist/{chunk-O2SDU7EQ.js.map → chunk-DRMERYGH.js.map} +1 -1
- package/dist/chunk-H3UGY6JG.js +72 -0
- package/dist/{chunk-ED45N3IT.js.map → chunk-H3UGY6JG.js.map} +1 -1
- package/dist/chunk-JSDKBP77.js +16 -0
- package/dist/{chunk-AYPBOJNL.js.map → chunk-JSDKBP77.js.map} +1 -1
- package/dist/chunk-MGRTFMMG.js +44 -0
- package/dist/{chunk-YQ7E5W26.js.map → chunk-MGRTFMMG.js.map} +1 -1
- package/dist/chunk-YBZTPLEM.js +31 -0
- package/dist/{chunk-7O2ZWWUX.js.map → chunk-YBZTPLEM.js.map} +1 -1
- package/dist/healthcheck-I7MZ4QZU.js +7 -0
- package/dist/helloWorld-SETMCIYX.js +7 -0
- package/dist/metrics-UNOJV54N.js +7 -0
- package/package.json +7 -7
- package/dist/chunk-7O2ZWWUX.js +0 -2
- package/dist/chunk-AYPBOJNL.js +0 -2
- package/dist/chunk-ED45N3IT.js +0 -2
- package/dist/chunk-JDWVOODJ.js +0 -2
- package/dist/chunk-O2SDU7EQ.js +0 -7
- package/dist/chunk-R7HX5BT2.js +0 -2
- package/dist/chunk-YQ7E5W26.js +0 -2
- package/dist/healthcheck-57YETUEX.js +0 -2
- package/dist/helloWorld-4VT4FZ7F.js +0 -2
- package/dist/metrics-4BMCDEZZ.js +0 -2
- /package/dist/{healthcheck-57YETUEX.js.map → healthcheck-I7MZ4QZU.js.map} +0 -0
- /package/dist/{helloWorld-4VT4FZ7F.js.map → helloWorld-SETMCIYX.js.map} +0 -0
- /package/dist/{metrics-4BMCDEZZ.js.map → metrics-UNOJV54N.js.map} +0 -0
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../src/bin/postgres-indexer.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { cleanDatabase, createStorageAdapter, shouldCleanDatabase } from \"@latticexyz/store-sync/postgres\";\nimport { createStoreSync } from \"@latticexyz/store-sync\";\nimport { indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { getClientOptions } from \"./getClientOptions\";\nimport { getBlock, getChainId } from \"viem/actions\";\nimport { getRpcClient } from \"@latticexyz/block-logs-stream\";\n\nconst env = parseEnv(\n z.intersection(\n indexerEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n HEALTHCHECK_HOST: z.string().optional(),\n HEALTHCHECK_PORT: z.coerce.number().optional(),\n }),\n ),\n);\n\nconst clientOptions = await getClientOptions(env);\n\nconst chainId = await getChainId(getRpcClient(clientOptions));\nconst database = drizzle(postgres(env.DATABASE_URL, { prepare: false }));\n\nif (await shouldCleanDatabase(database, chainId)) {\n console.log(\"outdated database detected, clearing data to start fresh\");\n await cleanDatabase(database);\n}\n\nconst { storageAdapter, tables } = await createStorageAdapter({ ...clientOptions, database });\n\nlet startBlock = env.START_BLOCK;\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n // Fetch latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n // TODO: query if the DB exists instead of try/catch\n try {\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n\n return chainState?.blockNumber;\n } catch (error) {\n // ignore errors for now\n }\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n getBlock(getRpcClient(clientOptions), { blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst latestStoredBlockNumber = await getLatestStoredBlockNumber();\nif (latestStoredBlockNumber != null) {\n startBlock = latestStoredBlockNumber + 1n;\n console.log(\"resuming from block number\", startBlock);\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await createStoreSync({\n ...clientOptions,\n storageAdapter,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nstoredBlockLogs$.subscribe();\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nif (env.HEALTHCHECK_HOST != null || env.HEALTHCHECK_PORT != null) {\n const { default: Koa } = await import(\"koa\");\n const { default: cors } = await import(\"@koa/cors\");\n const { healthcheck } = await import(\"../koa-middleware/healthcheck\");\n const { metrics } = await import(\"../koa-middleware/metrics\");\n const { helloWorld } = await import(\"../koa-middleware/helloWorld\");\n\n const server = new Koa();\n\n server.use(cors());\n server.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n );\n server.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n );\n server.use(helloWorld());\n\n server.listen({ host: env.HEALTHCHECK_HOST, port: env.HEALTHCHECK_PORT });\n console.log(\n `postgres indexer healthcheck server listening on http://${env.HEALTHCHECK_HOST}:${env.HEALTHCHECK_PORT}`,\n );\n}\n"],"mappings":";
|
1
|
+
{"version":3,"sources":["../../src/bin/postgres-indexer.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { cleanDatabase, createStorageAdapter, shouldCleanDatabase } from \"@latticexyz/store-sync/postgres\";\nimport { createStoreSync } from \"@latticexyz/store-sync\";\nimport { indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { getClientOptions } from \"./getClientOptions\";\nimport { getBlock, getChainId } from \"viem/actions\";\nimport { getRpcClient } from \"@latticexyz/block-logs-stream\";\n\nconst env = parseEnv(\n z.intersection(\n indexerEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n HEALTHCHECK_HOST: z.string().optional(),\n HEALTHCHECK_PORT: z.coerce.number().optional(),\n }),\n ),\n);\n\nconst clientOptions = await getClientOptions(env);\n\nconst chainId = await getChainId(getRpcClient(clientOptions));\nconst database = drizzle(postgres(env.DATABASE_URL, { prepare: false }));\n\nif (await shouldCleanDatabase(database, chainId)) {\n console.log(\"outdated database detected, clearing data to start fresh\");\n await cleanDatabase(database);\n}\n\nconst { storageAdapter, tables } = await createStorageAdapter({ ...clientOptions, database });\n\nlet startBlock = env.START_BLOCK;\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n // Fetch latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n // TODO: query if the DB exists instead of try/catch\n try {\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n\n return chainState?.blockNumber;\n } catch (error) {\n // ignore errors for now\n }\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n getBlock(getRpcClient(clientOptions), { blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst latestStoredBlockNumber = await getLatestStoredBlockNumber();\nif (latestStoredBlockNumber != null) {\n startBlock = latestStoredBlockNumber + 1n;\n console.log(\"resuming from block number\", startBlock);\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await createStoreSync({\n ...clientOptions,\n storageAdapter,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nstoredBlockLogs$.subscribe();\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nif (env.HEALTHCHECK_HOST != null || env.HEALTHCHECK_PORT != null) {\n const { default: Koa } = await import(\"koa\");\n const { default: cors } = await import(\"@koa/cors\");\n const { healthcheck } = await import(\"../koa-middleware/healthcheck\");\n const { metrics } = await import(\"../koa-middleware/metrics\");\n const { helloWorld } = await import(\"../koa-middleware/helloWorld\");\n\n const server = new Koa();\n\n server.use(cors());\n server.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n );\n server.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n );\n server.use(helloWorld());\n\n server.listen({ host: env.HEALTHCHECK_HOST, port: env.HEALTHCHECK_PORT });\n console.log(\n `postgres indexer healthcheck server listening on http://${env.HEALTHCHECK_HOST}:${env.HEALTHCHECK_PORT}`,\n );\n}\n"],"mappings":";;;;;;;;;;AACA,OAAO;AACP,SAAS,SAAS;AAClB,SAAS,UAAU;AACnB,SAAS,eAAe,QAAQ,aAAa;AAC7C,SAAS,eAAe;AACxB,OAAO,cAAc;AACrB,SAAS,eAAe,sBAAsB,2BAA2B;AACzE,SAAS,uBAAuB;AAGhC,SAAS,UAAU,kBAAkB;AACrC,SAAS,oBAAoB;AAE7B,IAAM,MAAM;AAAA,EACV,EAAE;AAAA,IACA;AAAA,IACA,EAAE,OAAO;AAAA,MACP,cAAc,EAAE,OAAO;AAAA,MACvB,kBAAkB,EAAE,OAAO,EAAE,SAAS;AAAA,MACtC,kBAAkB,EAAE,OAAO,OAAO,EAAE,SAAS;AAAA,IAC/C,CAAC;AAAA,EACH;AACF;AAEA,IAAM,gBAAgB,MAAM,iBAAiB,GAAG;AAEhD,IAAM,UAAU,MAAM,WAAW,aAAa,aAAa,CAAC;AAC5D,IAAM,WAAW,QAAQ,SAAS,IAAI,cAAc,EAAE,SAAS,MAAM,CAAC,CAAC;AAEvE,IAAI,MAAM,oBAAoB,UAAU,OAAO,GAAG;AAChD,UAAQ,IAAI,0DAA0D;AACtE,QAAM,cAAc,QAAQ;AAC9B;AAEA,IAAM,EAAE,gBAAgB,OAAO,IAAI,MAAM,qBAAqB,EAAE,GAAG,eAAe,SAAS,CAAC;AAE5F,IAAI,aAAa,IAAI;AAErB,eAAe,6BAA0D;AAGvE,MAAI;AACF,UAAM,aAAa,MAAM,SACtB,OAAO,EACP,KAAK,OAAO,WAAW,EACvB,MAAM,GAAG,OAAO,YAAY,SAAS,OAAO,CAAC,EAC7C,MAAM,CAAC,EACP,QAAQ,EAGR,KAAK,CAAC,SAAS,KAAK,KAAK,MAAM,IAAI,CAAC;AAEvC,WAAO,YAAY;AAAA,EACrB,SAAS,OAAO;AAAA,EAEhB;AACF;AAEA,eAAe,6BAA8C;AAC3D,QAAM,CAACA,0BAAyB,iBAAiB,IAAI,MAAM,QAAQ,IAAI;AAAA,IACrE,2BAA2B;AAAA,IAC3B,SAAS,aAAa,aAAa,GAAG,EAAE,UAAU,IAAI,iBAAiB,CAAC;AAAA,EAC1E,CAAC;AACD,SAAO,kBAAkB,UAAUA,4BAA2B,CAAC;AACjE;AAEA,IAAM,0BAA0B,MAAM,2BAA2B;AACjE,IAAI,2BAA2B,MAAM;AACnC,eAAa,0BAA0B;AACvC,UAAQ,IAAI,8BAA8B,UAAU;AACtD;AAEA,IAAM,EAAE,oBAAoB,iBAAiB,IAAI,MAAM,gBAAgB;AAAA,EACrE,GAAG;AAAA,EACH;AAAA,EACA,gBAAgB,IAAI;AAAA,EACpB;AAAA,EACA,eAAe,IAAI;AAAA,EACnB,SAAS,IAAI;AACf,CAAC;AAED,iBAAiB,UAAU;AAE3B,IAAI,aAAa;AACjB,cAAc,CAAC,oBAAoB,gBAAgB,CAAC,EACjD;AAAA,EACC;AAAA,IACE,CAAC,CAAC,mBAAmB,EAAE,aAAa,yBAAyB,CAAC,MAC5D,sBAAsB;AAAA,EAC1B;AAAA,EACA,MAAM;AACR,EACC,UAAU,MAAM;AACf,eAAa;AACb,UAAQ,IAAI,eAAe;AAC7B,CAAC;AAEH,IAAI,IAAI,oBAAoB,QAAQ,IAAI,oBAAoB,MAAM;AAChE,QAAM,EAAE,SAAS,IAAI,IAAI,MAAM,OAAO,KAAK;AAC3C,QAAM,EAAE,SAAS,KAAK,IAAI,MAAM,OAAO,WAAW;AAClD,QAAM,EAAE,YAAY,IAAI,MAAM,OAAO,4BAA+B;AACpE,QAAM,EAAE,QAAQ,IAAI,MAAM,OAAO,wBAA2B;AAC5D,QAAM,EAAE,WAAW,IAAI,MAAM,OAAO,2BAA8B;AAElE,QAAM,SAAS,IAAI,IAAI;AAEvB,SAAO,IAAI,KAAK,CAAC;AACjB,SAAO;AAAA,IACL,YAAY;AAAA,MACV,SAAS,MAAM;AAAA,IACjB,CAAC;AAAA,EACH;AACA,SAAO;AAAA,IACL,QAAQ;AAAA,MACN,WAAW,MAAM;AAAA,MACjB,SAAS,MAAM;AAAA,MACf;AAAA,MACA;AAAA,MACA,gBAAgB,IAAI;AAAA,IACtB,CAAC;AAAA,EACH;AACA,SAAO,IAAI,WAAW,CAAC;AAEvB,SAAO,OAAO,EAAE,MAAM,IAAI,kBAAkB,MAAM,IAAI,iBAAiB,CAAC;AACxE,UAAQ;AAAA,IACN,2DAA2D,IAAI,gBAAgB,IAAI,IAAI,gBAAgB;AAAA,EACzG;AACF;","names":["latestStoredBlockNumber"]}
|
@@ -1,3 +1,243 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
|
-
import
|
2
|
+
import {
|
3
|
+
compress
|
4
|
+
} from "../chunk-7E7HV6WZ.js";
|
5
|
+
import {
|
6
|
+
debug,
|
7
|
+
sentry
|
8
|
+
} from "../chunk-ALQNRR4A.js";
|
9
|
+
import {
|
10
|
+
getClientOptions
|
11
|
+
} from "../chunk-MGRTFMMG.js";
|
12
|
+
import {
|
13
|
+
frontendEnvSchema,
|
14
|
+
indexerEnvSchema,
|
15
|
+
parseEnv
|
16
|
+
} from "../chunk-DRMERYGH.js";
|
17
|
+
import {
|
18
|
+
healthcheck
|
19
|
+
} from "../chunk-YBZTPLEM.js";
|
20
|
+
import {
|
21
|
+
helloWorld
|
22
|
+
} from "../chunk-JSDKBP77.js";
|
23
|
+
import {
|
24
|
+
metrics
|
25
|
+
} from "../chunk-H3UGY6JG.js";
|
26
|
+
|
27
|
+
// src/bin/sqlite-indexer.ts
|
28
|
+
import "dotenv/config";
|
29
|
+
import fs from "node:fs";
|
30
|
+
import { z } from "zod";
|
31
|
+
import { eq as eq2 } from "drizzle-orm";
|
32
|
+
import { drizzle } from "drizzle-orm/better-sqlite3";
|
33
|
+
import Database from "better-sqlite3";
|
34
|
+
import Koa from "koa";
|
35
|
+
import cors from "@koa/cors";
|
36
|
+
import { createKoaMiddleware } from "trpc-koa-adapter";
|
37
|
+
import { createAppRouter } from "@latticexyz/store-sync/trpc-indexer";
|
38
|
+
import { chainState as chainState2, schemaVersion, syncToSqlite } from "@latticexyz/store-sync/sqlite";
|
39
|
+
|
40
|
+
// src/sqlite/getTablesWithRecords.ts
|
41
|
+
import { asc, eq } from "drizzle-orm";
|
42
|
+
import { buildTable, chainState, getTables } from "@latticexyz/store-sync/sqlite";
|
43
|
+
import { getAddress } from "viem";
|
44
|
+
import { decodeDynamicField } from "@latticexyz/protocol-parser/internal";
|
45
|
+
import { hexToResource } from "@latticexyz/common";
|
46
|
+
import { mapObject } from "@latticexyz/common/utils";
|
47
|
+
function getTablesWithRecords(database2, {
|
48
|
+
chainId: chainId2,
|
49
|
+
address,
|
50
|
+
filters = []
|
51
|
+
}) {
|
52
|
+
const metadata = database2.select().from(chainState).where(eq(chainState.chainId, chainId2)).limit(1).all().find(() => true);
|
53
|
+
const tableIds = Array.from(new Set(filters.map((filter2) => filter2.tableId)));
|
54
|
+
const tables = getTables(database2).filter((table) => address == null || getAddress(address) === getAddress(table.address)).filter((table) => !tableIds.length || tableIds.includes(table.tableId));
|
55
|
+
const tablesWithRecords = tables.map((table) => {
|
56
|
+
const sqliteTable = buildTable(table);
|
57
|
+
const records = database2.select().from(sqliteTable).where(eq(sqliteTable.__isDeleted, false)).orderBy(
|
58
|
+
asc(sqliteTable.__lastUpdatedBlockNumber)
|
59
|
+
// TODO: add logIndex (https://github.com/latticexyz/mud/issues/1979)
|
60
|
+
).all();
|
61
|
+
const filteredRecords = !filters.length ? records : records.filter((record) => {
|
62
|
+
const keyTuple = decodeDynamicField("bytes32[]", record.__key);
|
63
|
+
return filters.some(
|
64
|
+
(filter2) => filter2.tableId === table.tableId && (filter2.key0 == null || filter2.key0 === keyTuple[0]) && (filter2.key1 == null || filter2.key1 === keyTuple[1])
|
65
|
+
);
|
66
|
+
});
|
67
|
+
const resource = hexToResource(table.tableId);
|
68
|
+
return {
|
69
|
+
...table,
|
70
|
+
type: resource.type,
|
71
|
+
schema: mapObject({ ...table.keySchema, ...table.valueSchema }, (type) => ({ type, internalType: type })),
|
72
|
+
key: Object.keys(table.keySchema),
|
73
|
+
records: filteredRecords.map((record) => {
|
74
|
+
const key = Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]]));
|
75
|
+
const value = Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]]));
|
76
|
+
return { key, value, fields: { ...key, ...value } };
|
77
|
+
})
|
78
|
+
};
|
79
|
+
});
|
80
|
+
return {
|
81
|
+
blockNumber: metadata?.lastUpdatedBlockNumber ?? null,
|
82
|
+
tables: tablesWithRecords
|
83
|
+
};
|
84
|
+
}
|
85
|
+
|
86
|
+
// src/sqlite/createQueryAdapter.ts
|
87
|
+
import { tablesWithRecordsToLogs } from "@latticexyz/store-sync";
|
88
|
+
async function createQueryAdapter(database2) {
|
89
|
+
const adapter = {
|
90
|
+
async getLogs(opts) {
|
91
|
+
const { blockNumber, tables } = getTablesWithRecords(database2, opts);
|
92
|
+
const logs = tablesWithRecordsToLogs(tables);
|
93
|
+
return { blockNumber: blockNumber ?? 0n, logs };
|
94
|
+
},
|
95
|
+
async findAll(opts) {
|
96
|
+
return getTablesWithRecords(database2, opts);
|
97
|
+
}
|
98
|
+
};
|
99
|
+
return adapter;
|
100
|
+
}
|
101
|
+
|
102
|
+
// src/bin/sqlite-indexer.ts
|
103
|
+
import { combineLatest, filter, first } from "rxjs";
|
104
|
+
|
105
|
+
// src/sqlite/apiRoutes.ts
|
106
|
+
import Router from "@koa/router";
|
107
|
+
import compose from "koa-compose";
|
108
|
+
import { input } from "@latticexyz/store-sync/indexer-client";
|
109
|
+
import { schemasTable, tablesWithRecordsToLogs as tablesWithRecordsToLogs2 } from "@latticexyz/store-sync";
|
110
|
+
import { createBenchmark } from "@latticexyz/common";
|
111
|
+
function apiRoutes(database2) {
|
112
|
+
const router = new Router();
|
113
|
+
router.get("/api/logs", compress(), async (ctx) => {
|
114
|
+
const benchmark = createBenchmark("sqlite:logs");
|
115
|
+
let options;
|
116
|
+
try {
|
117
|
+
options = input.parse(typeof ctx.query.input === "string" ? JSON.parse(ctx.query.input) : {});
|
118
|
+
} catch (error) {
|
119
|
+
ctx.status = 400;
|
120
|
+
ctx.body = JSON.stringify(error);
|
121
|
+
debug(error);
|
122
|
+
return;
|
123
|
+
}
|
124
|
+
try {
|
125
|
+
options.filters = options.filters.length > 0 ? [...options.filters, { tableId: schemasTable.tableId }] : [];
|
126
|
+
benchmark("parse config");
|
127
|
+
const { blockNumber, tables } = getTablesWithRecords(database2, options);
|
128
|
+
benchmark("query tables with records");
|
129
|
+
const logs = tablesWithRecordsToLogs2(tables);
|
130
|
+
benchmark("convert records to logs");
|
131
|
+
ctx.body = JSON.stringify({ blockNumber: blockNumber?.toString() ?? "-1", logs });
|
132
|
+
ctx.status = 200;
|
133
|
+
} catch (error) {
|
134
|
+
ctx.status = 500;
|
135
|
+
ctx.body = JSON.stringify(error);
|
136
|
+
debug(error);
|
137
|
+
}
|
138
|
+
});
|
139
|
+
return compose([router.routes(), router.allowedMethods()]);
|
140
|
+
}
|
141
|
+
|
142
|
+
// src/bin/sqlite-indexer.ts
|
143
|
+
import { getRpcClient } from "@latticexyz/block-logs-stream";
|
144
|
+
import { getBlock, getChainId } from "viem/actions";
|
145
|
+
var env = parseEnv(
|
146
|
+
z.intersection(
|
147
|
+
z.intersection(indexerEnvSchema, frontendEnvSchema),
|
148
|
+
z.object({
|
149
|
+
SQLITE_FILENAME: z.string().default("indexer.db"),
|
150
|
+
SENTRY_DSN: z.string().optional()
|
151
|
+
})
|
152
|
+
)
|
153
|
+
);
|
154
|
+
var clientOptions = await getClientOptions(env);
|
155
|
+
var chainId = await getChainId(getRpcClient(clientOptions));
|
156
|
+
var database = drizzle(new Database(env.SQLITE_FILENAME));
|
157
|
+
var startBlock = env.START_BLOCK;
|
158
|
+
async function getCurrentChainState() {
|
159
|
+
try {
|
160
|
+
const currentChainStates = database.select().from(chainState2).where(eq2(chainState2.chainId, chainId)).all();
|
161
|
+
const currentChainState2 = currentChainStates[0];
|
162
|
+
return currentChainState2;
|
163
|
+
} catch (error) {
|
164
|
+
}
|
165
|
+
}
|
166
|
+
async function getLatestStoredBlockNumber() {
|
167
|
+
const currentChainState2 = await getCurrentChainState();
|
168
|
+
return currentChainState2?.lastUpdatedBlockNumber ?? void 0;
|
169
|
+
}
|
170
|
+
async function getDistanceFromFollowBlock() {
|
171
|
+
const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([
|
172
|
+
getLatestStoredBlockNumber(),
|
173
|
+
getBlock(getRpcClient(clientOptions), { blockTag: env.FOLLOW_BLOCK_TAG })
|
174
|
+
]);
|
175
|
+
return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);
|
176
|
+
}
|
177
|
+
var currentChainState = await getCurrentChainState();
|
178
|
+
if (currentChainState) {
|
179
|
+
if (currentChainState.schemaVersion != schemaVersion) {
|
180
|
+
console.log(
|
181
|
+
"schema version changed from",
|
182
|
+
currentChainState.schemaVersion,
|
183
|
+
"to",
|
184
|
+
schemaVersion,
|
185
|
+
"recreating database"
|
186
|
+
);
|
187
|
+
fs.truncateSync(env.SQLITE_FILENAME);
|
188
|
+
} else if (currentChainState.lastUpdatedBlockNumber != null) {
|
189
|
+
console.log("resuming from block number", currentChainState.lastUpdatedBlockNumber + 1n);
|
190
|
+
startBlock = currentChainState.lastUpdatedBlockNumber + 1n;
|
191
|
+
}
|
192
|
+
}
|
193
|
+
var { latestBlockNumber$, storedBlockLogs$ } = await syncToSqlite({
|
194
|
+
...clientOptions,
|
195
|
+
database,
|
196
|
+
followBlockTag: env.FOLLOW_BLOCK_TAG,
|
197
|
+
startBlock,
|
198
|
+
maxBlockRange: env.MAX_BLOCK_RANGE,
|
199
|
+
address: env.STORE_ADDRESS
|
200
|
+
});
|
201
|
+
var isCaughtUp = false;
|
202
|
+
combineLatest([latestBlockNumber$, storedBlockLogs$]).pipe(
|
203
|
+
filter(
|
204
|
+
([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) => latestBlockNumber === lastBlockNumberProcessed
|
205
|
+
),
|
206
|
+
first()
|
207
|
+
).subscribe(() => {
|
208
|
+
isCaughtUp = true;
|
209
|
+
console.log("all caught up");
|
210
|
+
});
|
211
|
+
var server = new Koa();
|
212
|
+
if (env.SENTRY_DSN) {
|
213
|
+
server.use(sentry(env.SENTRY_DSN));
|
214
|
+
}
|
215
|
+
server.use(cors());
|
216
|
+
server.use(
|
217
|
+
healthcheck({
|
218
|
+
isReady: () => isCaughtUp
|
219
|
+
})
|
220
|
+
);
|
221
|
+
server.use(
|
222
|
+
metrics({
|
223
|
+
isHealthy: () => true,
|
224
|
+
isReady: () => isCaughtUp,
|
225
|
+
getLatestStoredBlockNumber,
|
226
|
+
getDistanceFromFollowBlock,
|
227
|
+
followBlockTag: env.FOLLOW_BLOCK_TAG
|
228
|
+
})
|
229
|
+
);
|
230
|
+
server.use(helloWorld());
|
231
|
+
server.use(apiRoutes(database));
|
232
|
+
server.use(
|
233
|
+
createKoaMiddleware({
|
234
|
+
prefix: "/trpc",
|
235
|
+
router: createAppRouter(),
|
236
|
+
createContext: async () => ({
|
237
|
+
queryAdapter: await createQueryAdapter(database)
|
238
|
+
})
|
239
|
+
})
|
240
|
+
);
|
241
|
+
server.listen({ host: env.HOST, port: env.PORT });
|
242
|
+
console.log(`sqlite indexer frontend listening on http://${env.HOST}:${env.PORT}`);
|
3
243
|
//# sourceMappingURL=sqlite-indexer.js.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../../src/bin/sqlite-indexer.ts","../../src/sqlite/getTablesWithRecords.ts","../../src/sqlite/createQueryAdapter.ts","../../src/sqlite/apiRoutes.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport fs from \"node:fs\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { drizzle } from \"drizzle-orm/better-sqlite3\";\nimport Database from \"better-sqlite3\";\nimport Koa from \"koa\";\nimport cors from \"@koa/cors\";\nimport { createKoaMiddleware } from \"trpc-koa-adapter\";\nimport { createAppRouter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { chainState, schemaVersion, syncToSqlite } from \"@latticexyz/store-sync/sqlite\";\nimport { createQueryAdapter } from \"../sqlite/createQueryAdapter\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { frontendEnvSchema, indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { healthcheck } from \"../koa-middleware/healthcheck\";\nimport { helloWorld } from \"../koa-middleware/helloWorld\";\nimport { apiRoutes } from \"../sqlite/apiRoutes\";\nimport { sentry } from \"../koa-middleware/sentry\";\nimport { metrics } from \"../koa-middleware/metrics\";\nimport { getClientOptions } from \"./getClientOptions\";\nimport { getRpcClient } from \"@latticexyz/block-logs-stream\";\nimport { getBlock, getChainId } from \"viem/actions\";\n\nconst env = parseEnv(\n z.intersection(\n z.intersection(indexerEnvSchema, frontendEnvSchema),\n z.object({\n SQLITE_FILENAME: z.string().default(\"indexer.db\"),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst clientOptions = await getClientOptions(env);\n\nconst chainId = await getChainId(getRpcClient(clientOptions));\nconst database = drizzle(new Database(env.SQLITE_FILENAME));\n\nlet startBlock = env.START_BLOCK;\n\nasync function getCurrentChainState(): Promise<\n | {\n schemaVersion: number;\n chainId: number;\n lastUpdatedBlockNumber: bigint | null;\n lastError: string | null;\n }\n | undefined\n> {\n // This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n try {\n const currentChainStates = database.select().from(chainState).where(eq(chainState.chainId, chainId)).all();\n // TODO: replace this type workaround with `noUncheckedIndexedAccess: true` when we can fix all the issues related (https://github.com/latticexyz/mud/issues/1212)\n const currentChainState: (typeof currentChainStates)[number] | undefined = currentChainStates[0];\n return currentChainState;\n } catch (error) {\n // ignore errors, this is optional\n }\n}\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n const currentChainState = await getCurrentChainState();\n return currentChainState?.lastUpdatedBlockNumber ?? undefined;\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n getBlock(getRpcClient(clientOptions), { blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst currentChainState = await getCurrentChainState();\nif (currentChainState) {\n // Reset the db if the version changed\n if (currentChainState.schemaVersion != schemaVersion) {\n console.log(\n \"schema version changed from\",\n currentChainState.schemaVersion,\n \"to\",\n schemaVersion,\n \"recreating database\",\n );\n fs.truncateSync(env.SQLITE_FILENAME);\n } else if (currentChainState.lastUpdatedBlockNumber != null) {\n // Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n console.log(\"resuming from block number\", currentChainState.lastUpdatedBlockNumber + 1n);\n startBlock = currentChainState.lastUpdatedBlockNumber + 1n;\n }\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await syncToSqlite({\n ...clientOptions,\n database,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nconst server = new Koa();\n\nif (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n}\n\nserver.use(cors());\nserver.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n);\nserver.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n);\nserver.use(helloWorld());\nserver.use(apiRoutes(database));\n\nserver.use(\n createKoaMiddleware({\n prefix: \"/trpc\",\n router: createAppRouter(),\n createContext: async () => ({\n queryAdapter: await createQueryAdapter(database),\n }),\n }),\n);\n\nserver.listen({ host: env.HOST, port: env.PORT });\nconsole.log(`sqlite indexer frontend listening on http://${env.HOST}:${env.PORT}`);\n","import { asc, eq } from \"drizzle-orm\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { buildTable, chainState, getTables } from \"@latticexyz/store-sync/sqlite\";\nimport { Hex, getAddress } from \"viem\";\nimport { decodeDynamicField } from \"@latticexyz/protocol-parser/internal\";\nimport { SyncFilter, TableRecord, TableWithRecords } from \"@latticexyz/store-sync\";\nimport { hexToResource } from \"@latticexyz/common\";\nimport { mapObject } from \"@latticexyz/common/utils\";\n\n// TODO: refactor sqlite and replace this with getLogs to match postgres (https://github.com/latticexyz/mud/issues/1970)\n\n/**\n * @deprecated\n * */\nexport function getTablesWithRecords(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n database: BaseSQLiteDatabase<\"sync\", any>,\n {\n chainId,\n address,\n filters = [],\n }: {\n readonly chainId: number;\n readonly address?: Hex;\n readonly filters?: readonly SyncFilter[];\n },\n): { blockNumber: bigint | null; tables: readonly TableWithRecords[] } {\n const metadata = database\n .select()\n .from(chainState)\n .where(eq(chainState.chainId, chainId))\n .limit(1)\n .all()\n .find(() => true);\n\n // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters.\n // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily)\n const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId)));\n const tables = getTables(database)\n .filter((table) => address == null || getAddress(address) === getAddress(table.address))\n .filter((table) => !tableIds.length || tableIds.includes(table.tableId));\n\n const tablesWithRecords = tables.map((table) => {\n const sqliteTable = buildTable(table);\n const records = database\n .select()\n .from(sqliteTable)\n .where(eq(sqliteTable.__isDeleted, false))\n .orderBy(\n asc(sqliteTable.__lastUpdatedBlockNumber),\n // TODO: add logIndex (https://github.com/latticexyz/mud/issues/1979)\n )\n .all();\n const filteredRecords = !filters.length\n ? records\n : records.filter((record) => {\n const keyTuple = decodeDynamicField(\"bytes32[]\", record.__key);\n return filters.some(\n (filter) =>\n filter.tableId === table.tableId &&\n (filter.key0 == null || filter.key0 === keyTuple[0]) &&\n (filter.key1 == null || filter.key1 === keyTuple[1]),\n );\n });\n const resource = hexToResource(table.tableId);\n return {\n ...table,\n type: resource.type as never,\n schema: mapObject({ ...table.keySchema, ...table.valueSchema }, (type) => ({ type, internalType: type })),\n key: Object.keys(table.keySchema),\n records: filteredRecords.map((record): TableRecord => {\n const key = Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]]));\n const value = Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]]));\n return { key, value, fields: { ...key, ...value } };\n }),\n } satisfies TableWithRecords;\n });\n\n return {\n blockNumber: metadata?.lastUpdatedBlockNumber ?? null,\n tables: tablesWithRecords,\n };\n}\n","import { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { QueryAdapter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\n\n/**\n * Creates a storage adapter for the tRPC server/client to query data from SQLite.\n *\n * @param {BaseSQLiteDatabase<\"sync\", any>} database SQLite database object from Drizzle\n * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function createQueryAdapter(database: BaseSQLiteDatabase<\"sync\", any>): Promise<QueryAdapter> {\n const adapter: QueryAdapter = {\n async getLogs(opts) {\n const { blockNumber, tables } = getTablesWithRecords(database, opts);\n const logs = tablesWithRecordsToLogs(tables);\n return { blockNumber: blockNumber ?? 0n, logs };\n },\n async findAll(opts) {\n return getTablesWithRecords(database, opts);\n },\n };\n return adapter;\n}\n","import { Middleware } from \"koa\";\nimport Router from \"@koa/router\";\nimport compose from \"koa-compose\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { schemasTable, tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\nimport { debug } from \"../debug\";\nimport { createBenchmark } from \"@latticexyz/common\";\nimport { compress } from \"../koa-middleware/compress\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function apiRoutes(database: BaseSQLiteDatabase<\"sync\", any>): Middleware {\n const router = new Router();\n\n router.get(\"/api/logs\", compress(), async (ctx) => {\n const benchmark = createBenchmark(\"sqlite:logs\");\n\n let options: ReturnType<typeof input.parse>;\n\n try {\n options = input.parse(typeof ctx.query.input === \"string\" ? JSON.parse(ctx.query.input) : {});\n } catch (error) {\n ctx.status = 400;\n ctx.body = JSON.stringify(error);\n debug(error);\n return;\n }\n\n try {\n options.filters = options.filters.length > 0 ? [...options.filters, { tableId: schemasTable.tableId }] : [];\n benchmark(\"parse config\");\n const { blockNumber, tables } = getTablesWithRecords(database, options);\n benchmark(\"query tables with records\");\n const logs = tablesWithRecordsToLogs(tables);\n benchmark(\"convert records to logs\");\n\n ctx.body = JSON.stringify({ blockNumber: blockNumber?.toString() ?? \"-1\", logs });\n ctx.status = 200;\n } catch (error) {\n ctx.status = 500;\n ctx.body = JSON.stringify(error);\n debug(error);\n }\n });\n\n return compose([router.routes(), router.allowedMethods()]) as Middleware;\n}\n"],"mappings":";oTACA,MAAO,gBACP,OAAOA,OAAQ,UACf,OAAS,KAAAC,MAAS,MAClB,OAAS,MAAAC,OAAU,cACnB,OAAS,WAAAC,OAAe,6BACxB,OAAOC,OAAc,iBACrB,OAAOC,OAAS,MAChB,OAAOC,OAAU,YACjB,OAAS,uBAAAC,OAA2B,mBACpC,OAAS,mBAAAC,OAAuB,sCAChC,OAAS,cAAAC,EAAY,iBAAAC,EAAe,gBAAAC,OAAoB,gCCXxD,OAAS,OAAAC,EAAK,MAAAC,MAAU,cAExB,OAAS,cAAAC,EAAY,cAAAC,EAAY,aAAAC,MAAiB,gCAClD,OAAc,cAAAC,MAAkB,OAChC,OAAS,sBAAAC,MAA0B,uCAEnC,OAAS,iBAAAC,MAAqB,qBAC9B,OAAS,aAAAC,MAAiB,2BAOnB,SAASC,EAEdC,EACA,CACE,QAAAC,EACA,QAAAC,EACA,QAAAC,EAAU,CAAC,CACb,EAKqE,CACrE,IAAMC,EAAWJ,EACd,OAAO,EACP,KAAKP,CAAU,EACf,MAAMF,EAAGE,EAAW,QAASQ,CAAO,CAAC,EACrC,MAAM,CAAC,EACP,IAAI,EACJ,KAAK,IAAM,EAAI,EAIZI,EAAW,MAAM,KAAK,IAAI,IAAIF,EAAQ,IAAKG,GAAWA,EAAO,OAAO,CAAC,CAAC,EAKtEC,EAJSb,EAAUM,CAAQ,EAC9B,OAAQQ,GAAUN,GAAW,MAAQP,EAAWO,CAAO,IAAMP,EAAWa,EAAM,OAAO,CAAC,EACtF,OAAQA,GAAU,CAACH,EAAS,QAAUA,EAAS,SAASG,EAAM,OAAO,CAAC,EAExC,IAAKA,GAAU,CAC9C,IAAMC,EAAcjB,EAAWgB,CAAK,EAC9BE,EAAUV,EACb,OAAO,EACP,KAAKS,CAAW,EAChB,MAAMlB,EAAGkB,EAAY,YAAa,EAAK,CAAC,EACxC,QACCnB,EAAImB,EAAY,wBAAwB,CAE1C,EACC,IAAI,EACDE,EAAmBR,EAAQ,OAE7BO,EAAQ,OAAQE,GAAW,CACzB,IAAMC,EAAWjB,EAAmB,YAAagB,EAAO,KAAK,EAC7D,OAAOT,EAAQ,KACZG,GACCA,EAAO,UAAYE,EAAM,UACxBF,EAAO,MAAQ,MAAQA,EAAO,OAASO,EAAS,CAAC,KACjDP,EAAO,MAAQ,MAAQA,EAAO,OAASO,EAAS,CAAC,EACtD,CACF,CAAC,EATDH,EAUEI,EAAWjB,EAAcW,EAAM,OAAO,EAC5C,MAAO,CACL,GAAGA,EACH,KAAMM,EAAS,KACf,OAAQhB,EAAU,CAAE,GAAGU,EAAM,UAAW,GAAGA,EAAM,WAAY,EAAIO,IAAU,CAAE,KAAAA,EAAM,aAAcA,CAAK,EAAE,EACxG,IAAK,OAAO,KAAKP,EAAM,SAAS,EAChC,QAASG,EAAgB,IAAKC,GAAwB,CACpD,IAAMI,EAAM,OAAO,YAAY,OAAO,QAAQR,EAAM,SAAS,EAAE,IAAI,CAAC,CAACS,CAAI,IAAM,CAACA,EAAML,EAAOK,CAAI,CAAC,CAAC,CAAC,EAC9FC,EAAQ,OAAO,YAAY,OAAO,QAAQV,EAAM,WAAW,EAAE,IAAI,CAAC,CAACS,CAAI,IAAM,CAACA,EAAML,EAAOK,CAAI,CAAC,CAAC,CAAC,EACxG,MAAO,CAAE,IAAAD,EAAK,MAAAE,EAAO,OAAQ,CAAE,GAAGF,EAAK,GAAGE,CAAM,CAAE,CACpD,CAAC,CACH,CACF,CAAC,EAED,MAAO,CACL,YAAad,GAAU,wBAA0B,KACjD,OAAQG,CACV,CACF,CC/EA,OAAS,2BAAAY,MAA+B,yBASxC,eAAsBC,EAAmBC,EAAkE,CAWzG,MAV8B,CAC5B,MAAM,QAAQC,EAAM,CAClB,GAAM,CAAE,YAAAC,EAAa,OAAAC,CAAO,EAAIC,EAAqBJ,EAAUC,CAAI,EAC7DI,EAAOP,EAAwBK,CAAM,EAC3C,MAAO,CAAE,YAAaD,GAAe,GAAI,KAAAG,CAAK,CAChD,EACA,MAAM,QAAQJ,EAAM,CAClB,OAAOG,EAAqBJ,EAAUC,CAAI,CAC5C,CACF,CAEF,CFXA,OAAS,iBAAAK,GAAe,UAAAC,GAAQ,SAAAC,OAAa,OGZ7C,OAAOC,MAAY,cACnB,OAAOC,OAAa,cACpB,OAAS,SAAAC,OAAa,wCACtB,OAAS,gBAAAC,GAAc,2BAAAC,OAA+B,yBAEtD,OAAS,mBAAAC,OAAuB,qBAMzB,SAASC,EAAUC,EAAuD,CAC/E,IAAMC,EAAS,IAAIC,EAEnB,OAAAD,EAAO,IAAI,YAAaE,EAAS,EAAG,MAAOC,GAAQ,CACjD,IAAMC,EAAYC,GAAgB,aAAa,EAE3CC,EAEJ,GAAI,CACFA,EAAUC,GAAM,MAAM,OAAOJ,EAAI,MAAM,OAAU,SAAW,KAAK,MAAMA,EAAI,MAAM,KAAK,EAAI,CAAC,CAAC,CAC9F,OAASK,EAAO,CACdL,EAAI,OAAS,IACbA,EAAI,KAAO,KAAK,UAAUK,CAAK,EAC/BC,EAAMD,CAAK,EACX,MACF,CAEA,GAAI,CACFF,EAAQ,QAAUA,EAAQ,QAAQ,OAAS,EAAI,CAAC,GAAGA,EAAQ,QAAS,CAAE,QAASI,GAAa,OAAQ,CAAC,EAAI,CAAC,EAC1GN,EAAU,cAAc,EACxB,GAAM,CAAE,YAAAO,EAAa,OAAAC,CAAO,EAAIC,EAAqBd,EAAUO,CAAO,EACtEF,EAAU,2BAA2B,EACrC,IAAMU,EAAOC,GAAwBH,CAAM,EAC3CR,EAAU,yBAAyB,EAEnCD,EAAI,KAAO,KAAK,UAAU,CAAE,YAAaQ,GAAa,SAAS,GAAK,KAAM,KAAAG,CAAK,CAAC,EAChFX,EAAI,OAAS,GACf,OAASK,EAAO,CACdL,EAAI,OAAS,IACbA,EAAI,KAAO,KAAK,UAAUK,CAAK,EAC/BC,EAAMD,CAAK,CACb,CACF,CAAC,EAEMQ,GAAQ,CAAChB,EAAO,OAAO,EAAGA,EAAO,eAAe,CAAC,CAAC,CAC3D,CH1BA,OAAS,gBAAAiB,MAAoB,gCAC7B,OAAS,YAAAC,GAAU,cAAAC,OAAkB,eAErC,IAAMC,EAAMC,EACVC,EAAE,aACAA,EAAE,aAAaC,EAAkBC,CAAiB,EAClDF,EAAE,OAAO,CACP,gBAAiBA,EAAE,OAAO,EAAE,QAAQ,YAAY,EAChD,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,CACH,CACF,EAEMG,EAAgB,MAAMC,EAAiBN,CAAG,EAE1CO,GAAU,MAAMR,GAAWF,EAAaQ,CAAa,CAAC,EACtDG,EAAWC,GAAQ,IAAIC,GAASV,EAAI,eAAe,CAAC,EAEtDW,EAAaX,EAAI,YAErB,eAAeY,GAQb,CAEA,GAAI,CAIF,OAH2BJ,EAAS,OAAO,EAAE,KAAKK,CAAU,EAAE,MAAMC,GAAGD,EAAW,QAASN,EAAO,CAAC,EAAE,IAAI,EAEX,CAAC,CAEjG,MAAgB,CAEhB,CACF,CAEA,eAAeQ,GAA0D,CAEvE,OAD0B,MAAMH,EAAqB,IAC3B,wBAA0B,MACtD,CAEA,eAAeI,IAA8C,CAC3D,GAAM,CAACC,EAAyBC,CAAiB,EAAI,MAAM,QAAQ,IAAI,CACrEH,EAA2B,EAC3BjB,GAASD,EAAaQ,CAAa,EAAG,CAAE,SAAUL,EAAI,gBAAiB,CAAC,CAC1E,CAAC,EACD,OAAOkB,EAAkB,QAAUD,GAA2B,CAAC,GACjE,CAEA,IAAME,EAAoB,MAAMP,EAAqB,EACjDO,IAEEA,EAAkB,eAAiBC,GACrC,QAAQ,IACN,8BACAD,EAAkB,cAClB,KACAC,EACA,qBACF,EACAC,GAAG,aAAarB,EAAI,eAAe,GAC1BmB,EAAkB,wBAA0B,OAErD,QAAQ,IAAI,6BAA8BA,EAAkB,uBAAyB,EAAE,EACvFR,EAAaQ,EAAkB,uBAAyB,KAI5D,GAAM,CAAE,mBAAAG,GAAoB,iBAAAC,EAAiB,EAAI,MAAMC,GAAa,CAClE,GAAGnB,EACH,SAAAG,EACA,eAAgBR,EAAI,iBACpB,WAAAW,EACA,cAAeX,EAAI,gBACnB,QAASA,EAAI,aACf,CAAC,EAEGyB,EAAa,GACjBC,GAAc,CAACJ,GAAoBC,EAAgB,CAAC,EACjD,KACCI,GACE,CAAC,CAACC,EAAmB,CAAE,YAAaC,CAAyB,CAAC,IAC5DD,IAAsBC,CAC1B,EACAC,GAAM,CACR,EACC,UAAU,IAAM,CACfL,EAAa,GACb,QAAQ,IAAI,eAAe,CAC7B,CAAC,EAEH,IAAMM,EAAS,IAAIC,GAEfhC,EAAI,YACN+B,EAAO,IAAIE,EAAOjC,EAAI,UAAU,CAAC,EAGnC+B,EAAO,IAAIG,GAAK,CAAC,EACjBH,EAAO,IACLI,EAAY,CACV,QAAS,IAAMV,CACjB,CAAC,CACH,EACAM,EAAO,IACLK,EAAQ,CACN,UAAW,IAAM,GACjB,QAAS,IAAMX,EACf,2BAAAV,EACA,2BAAAC,GACA,eAAgBhB,EAAI,gBACtB,CAAC,CACH,EACA+B,EAAO,IAAIM,EAAW,CAAC,EACvBN,EAAO,IAAIO,EAAU9B,CAAQ,CAAC,EAE9BuB,EAAO,IACLQ,GAAoB,CAClB,OAAQ,QACR,OAAQC,GAAgB,EACxB,cAAe,UAAa,CAC1B,aAAc,MAAMC,EAAmBjC,CAAQ,CACjD,EACF,CAAC,CACH,EAEAuB,EAAO,OAAO,CAAE,KAAM/B,EAAI,KAAM,KAAMA,EAAI,IAAK,CAAC,EAChD,QAAQ,IAAI,+CAA+CA,EAAI,IAAI,IAAIA,EAAI,IAAI,EAAE","names":["fs","z","eq","drizzle","Database","Koa","cors","createKoaMiddleware","createAppRouter","chainState","schemaVersion","syncToSqlite","asc","eq","buildTable","chainState","getTables","getAddress","decodeDynamicField","hexToResource","mapObject","getTablesWithRecords","database","chainId","address","filters","metadata","tableIds","filter","tablesWithRecords","table","sqliteTable","records","filteredRecords","record","keyTuple","resource","type","key","name","value","tablesWithRecordsToLogs","createQueryAdapter","database","opts","blockNumber","tables","getTablesWithRecords","logs","combineLatest","filter","first","Router","compose","input","schemasTable","tablesWithRecordsToLogs","createBenchmark","apiRoutes","database","router","Router","compress","ctx","benchmark","createBenchmark","options","input","error","debug","schemasTable","blockNumber","tables","getTablesWithRecords","logs","tablesWithRecordsToLogs","compose","getRpcClient","getBlock","getChainId","env","parseEnv","z","indexerEnvSchema","frontendEnvSchema","clientOptions","getClientOptions","chainId","database","drizzle","Database","startBlock","getCurrentChainState","chainState","eq","getLatestStoredBlockNumber","getDistanceFromFollowBlock","latestStoredBlockNumber","latestFollowBlock","currentChainState","schemaVersion","fs","latestBlockNumber$","storedBlockLogs$","syncToSqlite","isCaughtUp","combineLatest","filter","latestBlockNumber","lastBlockNumberProcessed","first","server","Koa","sentry","cors","healthcheck","metrics","helloWorld","apiRoutes","createKoaMiddleware","createAppRouter","createQueryAdapter"]}
|
1
|
+
{"version":3,"sources":["../../src/bin/sqlite-indexer.ts","../../src/sqlite/getTablesWithRecords.ts","../../src/sqlite/createQueryAdapter.ts","../../src/sqlite/apiRoutes.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport fs from \"node:fs\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { drizzle } from \"drizzle-orm/better-sqlite3\";\nimport Database from \"better-sqlite3\";\nimport Koa from \"koa\";\nimport cors from \"@koa/cors\";\nimport { createKoaMiddleware } from \"trpc-koa-adapter\";\nimport { createAppRouter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { chainState, schemaVersion, syncToSqlite } from \"@latticexyz/store-sync/sqlite\";\nimport { createQueryAdapter } from \"../sqlite/createQueryAdapter\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { frontendEnvSchema, indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { healthcheck } from \"../koa-middleware/healthcheck\";\nimport { helloWorld } from \"../koa-middleware/helloWorld\";\nimport { apiRoutes } from \"../sqlite/apiRoutes\";\nimport { sentry } from \"../koa-middleware/sentry\";\nimport { metrics } from \"../koa-middleware/metrics\";\nimport { getClientOptions } from \"./getClientOptions\";\nimport { getRpcClient } from \"@latticexyz/block-logs-stream\";\nimport { getBlock, getChainId } from \"viem/actions\";\n\nconst env = parseEnv(\n z.intersection(\n z.intersection(indexerEnvSchema, frontendEnvSchema),\n z.object({\n SQLITE_FILENAME: z.string().default(\"indexer.db\"),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst clientOptions = await getClientOptions(env);\n\nconst chainId = await getChainId(getRpcClient(clientOptions));\nconst database = drizzle(new Database(env.SQLITE_FILENAME));\n\nlet startBlock = env.START_BLOCK;\n\nasync function getCurrentChainState(): Promise<\n | {\n schemaVersion: number;\n chainId: number;\n lastUpdatedBlockNumber: bigint | null;\n lastError: string | null;\n }\n | undefined\n> {\n // This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n try {\n const currentChainStates = database.select().from(chainState).where(eq(chainState.chainId, chainId)).all();\n // TODO: replace this type workaround with `noUncheckedIndexedAccess: true` when we can fix all the issues related (https://github.com/latticexyz/mud/issues/1212)\n const currentChainState: (typeof currentChainStates)[number] | undefined = currentChainStates[0];\n return currentChainState;\n } catch (error) {\n // ignore errors, this is optional\n }\n}\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n const currentChainState = await getCurrentChainState();\n return currentChainState?.lastUpdatedBlockNumber ?? undefined;\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n getBlock(getRpcClient(clientOptions), { blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst currentChainState = await getCurrentChainState();\nif (currentChainState) {\n // Reset the db if the version changed\n if (currentChainState.schemaVersion != schemaVersion) {\n console.log(\n \"schema version changed from\",\n currentChainState.schemaVersion,\n \"to\",\n schemaVersion,\n \"recreating database\",\n );\n fs.truncateSync(env.SQLITE_FILENAME);\n } else if (currentChainState.lastUpdatedBlockNumber != null) {\n // Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n console.log(\"resuming from block number\", currentChainState.lastUpdatedBlockNumber + 1n);\n startBlock = currentChainState.lastUpdatedBlockNumber + 1n;\n }\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await syncToSqlite({\n ...clientOptions,\n database,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nconst server = new Koa();\n\nif (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n}\n\nserver.use(cors());\nserver.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n);\nserver.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n);\nserver.use(helloWorld());\nserver.use(apiRoutes(database));\n\nserver.use(\n createKoaMiddleware({\n prefix: \"/trpc\",\n router: createAppRouter(),\n createContext: async () => ({\n queryAdapter: await createQueryAdapter(database),\n }),\n }),\n);\n\nserver.listen({ host: env.HOST, port: env.PORT });\nconsole.log(`sqlite indexer frontend listening on http://${env.HOST}:${env.PORT}`);\n","import { asc, eq } from \"drizzle-orm\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { buildTable, chainState, getTables } from \"@latticexyz/store-sync/sqlite\";\nimport { Hex, getAddress } from \"viem\";\nimport { decodeDynamicField } from \"@latticexyz/protocol-parser/internal\";\nimport { SyncFilter, TableRecord, TableWithRecords } from \"@latticexyz/store-sync\";\nimport { hexToResource } from \"@latticexyz/common\";\nimport { mapObject } from \"@latticexyz/common/utils\";\n\n// TODO: refactor sqlite and replace this with getLogs to match postgres (https://github.com/latticexyz/mud/issues/1970)\n\n/**\n * @deprecated\n * */\nexport function getTablesWithRecords(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n database: BaseSQLiteDatabase<\"sync\", any>,\n {\n chainId,\n address,\n filters = [],\n }: {\n readonly chainId: number;\n readonly address?: Hex;\n readonly filters?: readonly SyncFilter[];\n },\n): { blockNumber: bigint | null; tables: readonly TableWithRecords[] } {\n const metadata = database\n .select()\n .from(chainState)\n .where(eq(chainState.chainId, chainId))\n .limit(1)\n .all()\n .find(() => true);\n\n // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters.\n // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily)\n const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId)));\n const tables = getTables(database)\n .filter((table) => address == null || getAddress(address) === getAddress(table.address))\n .filter((table) => !tableIds.length || tableIds.includes(table.tableId));\n\n const tablesWithRecords = tables.map((table) => {\n const sqliteTable = buildTable(table);\n const records = database\n .select()\n .from(sqliteTable)\n .where(eq(sqliteTable.__isDeleted, false))\n .orderBy(\n asc(sqliteTable.__lastUpdatedBlockNumber),\n // TODO: add logIndex (https://github.com/latticexyz/mud/issues/1979)\n )\n .all();\n const filteredRecords = !filters.length\n ? records\n : records.filter((record) => {\n const keyTuple = decodeDynamicField(\"bytes32[]\", record.__key);\n return filters.some(\n (filter) =>\n filter.tableId === table.tableId &&\n (filter.key0 == null || filter.key0 === keyTuple[0]) &&\n (filter.key1 == null || filter.key1 === keyTuple[1]),\n );\n });\n const resource = hexToResource(table.tableId);\n return {\n ...table,\n type: resource.type as never,\n schema: mapObject({ ...table.keySchema, ...table.valueSchema }, (type) => ({ type, internalType: type })),\n key: Object.keys(table.keySchema),\n records: filteredRecords.map((record): TableRecord => {\n const key = Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]]));\n const value = Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]]));\n return { key, value, fields: { ...key, ...value } };\n }),\n } satisfies TableWithRecords;\n });\n\n return {\n blockNumber: metadata?.lastUpdatedBlockNumber ?? null,\n tables: tablesWithRecords,\n };\n}\n","import { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { QueryAdapter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\n\n/**\n * Creates a storage adapter for the tRPC server/client to query data from SQLite.\n *\n * @param {BaseSQLiteDatabase<\"sync\", any>} database SQLite database object from Drizzle\n * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function createQueryAdapter(database: BaseSQLiteDatabase<\"sync\", any>): Promise<QueryAdapter> {\n const adapter: QueryAdapter = {\n async getLogs(opts) {\n const { blockNumber, tables } = getTablesWithRecords(database, opts);\n const logs = tablesWithRecordsToLogs(tables);\n return { blockNumber: blockNumber ?? 0n, logs };\n },\n async findAll(opts) {\n return getTablesWithRecords(database, opts);\n },\n };\n return adapter;\n}\n","import { Middleware } from \"koa\";\nimport Router from \"@koa/router\";\nimport compose from \"koa-compose\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { schemasTable, tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\nimport { debug } from \"../debug\";\nimport { createBenchmark } from \"@latticexyz/common\";\nimport { compress } from \"../koa-middleware/compress\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function apiRoutes(database: BaseSQLiteDatabase<\"sync\", any>): Middleware {\n const router = new Router();\n\n router.get(\"/api/logs\", compress(), async (ctx) => {\n const benchmark = createBenchmark(\"sqlite:logs\");\n\n let options: ReturnType<typeof input.parse>;\n\n try {\n options = input.parse(typeof ctx.query.input === \"string\" ? JSON.parse(ctx.query.input) : {});\n } catch (error) {\n ctx.status = 400;\n ctx.body = JSON.stringify(error);\n debug(error);\n return;\n }\n\n try {\n options.filters = options.filters.length > 0 ? [...options.filters, { tableId: schemasTable.tableId }] : [];\n benchmark(\"parse config\");\n const { blockNumber, tables } = getTablesWithRecords(database, options);\n benchmark(\"query tables with records\");\n const logs = tablesWithRecordsToLogs(tables);\n benchmark(\"convert records to logs\");\n\n ctx.body = JSON.stringify({ blockNumber: blockNumber?.toString() ?? \"-1\", logs });\n ctx.status = 200;\n } catch (error) {\n ctx.status = 500;\n ctx.body = JSON.stringify(error);\n debug(error);\n }\n });\n\n return compose([router.routes(), router.allowedMethods()]) as Middleware;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;AACA,OAAO;AACP,OAAO,QAAQ;AACf,SAAS,SAAS;AAClB,SAAS,MAAAA,WAAU;AACnB,SAAS,eAAe;AACxB,OAAO,cAAc;AACrB,OAAO,SAAS;AAChB,OAAO,UAAU;AACjB,SAAS,2BAA2B;AACpC,SAAS,uBAAuB;AAChC,SAAS,cAAAC,aAAY,eAAe,oBAAoB;;;ACXxD,SAAS,KAAK,UAAU;AAExB,SAAS,YAAY,YAAY,iBAAiB;AAClD,SAAc,kBAAkB;AAChC,SAAS,0BAA0B;AAEnC,SAAS,qBAAqB;AAC9B,SAAS,iBAAiB;AAOnB,SAAS,qBAEdC,WACA;AAAA,EACE,SAAAC;AAAA,EACA;AAAA,EACA,UAAU,CAAC;AACb,GAKqE;AACrE,QAAM,WAAWD,UACd,OAAO,EACP,KAAK,UAAU,EACf,MAAM,GAAG,WAAW,SAASC,QAAO,CAAC,EACrC,MAAM,CAAC,EACP,IAAI,EACJ,KAAK,MAAM,IAAI;AAIlB,QAAM,WAAW,MAAM,KAAK,IAAI,IAAI,QAAQ,IAAI,CAACC,YAAWA,QAAO,OAAO,CAAC,CAAC;AAC5E,QAAM,SAAS,UAAUF,SAAQ,EAC9B,OAAO,CAAC,UAAU,WAAW,QAAQ,WAAW,OAAO,MAAM,WAAW,MAAM,OAAO,CAAC,EACtF,OAAO,CAAC,UAAU,CAAC,SAAS,UAAU,SAAS,SAAS,MAAM,OAAO,CAAC;AAEzE,QAAM,oBAAoB,OAAO,IAAI,CAAC,UAAU;AAC9C,UAAM,cAAc,WAAW,KAAK;AACpC,UAAM,UAAUA,UACb,OAAO,EACP,KAAK,WAAW,EAChB,MAAM,GAAG,YAAY,aAAa,KAAK,CAAC,EACxC;AAAA,MACC,IAAI,YAAY,wBAAwB;AAAA;AAAA,IAE1C,EACC,IAAI;AACP,UAAM,kBAAkB,CAAC,QAAQ,SAC7B,UACA,QAAQ,OAAO,CAAC,WAAW;AACzB,YAAM,WAAW,mBAAmB,aAAa,OAAO,KAAK;AAC7D,aAAO,QAAQ;AAAA,QACb,CAACE,YACCA,QAAO,YAAY,MAAM,YACxBA,QAAO,QAAQ,QAAQA,QAAO,SAAS,SAAS,CAAC,OACjDA,QAAO,QAAQ,QAAQA,QAAO,SAAS,SAAS,CAAC;AAAA,MACtD;AAAA,IACF,CAAC;AACL,UAAM,WAAW,cAAc,MAAM,OAAO;AAC5C,WAAO;AAAA,MACL,GAAG;AAAA,MACH,MAAM,SAAS;AAAA,MACf,QAAQ,UAAU,EAAE,GAAG,MAAM,WAAW,GAAG,MAAM,YAAY,GAAG,CAAC,UAAU,EAAE,MAAM,cAAc,KAAK,EAAE;AAAA,MACxG,KAAK,OAAO,KAAK,MAAM,SAAS;AAAA,MAChC,SAAS,gBAAgB,IAAI,CAAC,WAAwB;AACpD,cAAM,MAAM,OAAO,YAAY,OAAO,QAAQ,MAAM,SAAS,EAAE,IAAI,CAAC,CAAC,IAAI,MAAM,CAAC,MAAM,OAAO,IAAI,CAAC,CAAC,CAAC;AACpG,cAAM,QAAQ,OAAO,YAAY,OAAO,QAAQ,MAAM,WAAW,EAAE,IAAI,CAAC,CAAC,IAAI,MAAM,CAAC,MAAM,OAAO,IAAI,CAAC,CAAC,CAAC;AACxG,eAAO,EAAE,KAAK,OAAO,QAAQ,EAAE,GAAG,KAAK,GAAG,MAAM,EAAE;AAAA,MACpD,CAAC;AAAA,IACH;AAAA,EACF,CAAC;AAED,SAAO;AAAA,IACL,aAAa,UAAU,0BAA0B;AAAA,IACjD,QAAQ;AAAA,EACV;AACF;;;AC/EA,SAAS,+BAA+B;AASxC,eAAsB,mBAAmBC,WAAkE;AACzG,QAAM,UAAwB;AAAA,IAC5B,MAAM,QAAQ,MAAM;AAClB,YAAM,EAAE,aAAa,OAAO,IAAI,qBAAqBA,WAAU,IAAI;AACnE,YAAM,OAAO,wBAAwB,MAAM;AAC3C,aAAO,EAAE,aAAa,eAAe,IAAI,KAAK;AAAA,IAChD;AAAA,IACA,MAAM,QAAQ,MAAM;AAClB,aAAO,qBAAqBA,WAAU,IAAI;AAAA,IAC5C;AAAA,EACF;AACA,SAAO;AACT;;;AFXA,SAAS,eAAe,QAAQ,aAAa;;;AGZ7C,OAAO,YAAY;AACnB,OAAO,aAAa;AACpB,SAAS,aAAa;AACtB,SAAS,cAAc,2BAAAC,gCAA+B;AAEtD,SAAS,uBAAuB;AAMzB,SAAS,UAAUC,WAAuD;AAC/E,QAAM,SAAS,IAAI,OAAO;AAE1B,SAAO,IAAI,aAAa,SAAS,GAAG,OAAO,QAAQ;AACjD,UAAM,YAAY,gBAAgB,aAAa;AAE/C,QAAI;AAEJ,QAAI;AACF,gBAAU,MAAM,MAAM,OAAO,IAAI,MAAM,UAAU,WAAW,KAAK,MAAM,IAAI,MAAM,KAAK,IAAI,CAAC,CAAC;AAAA,IAC9F,SAAS,OAAO;AACd,UAAI,SAAS;AACb,UAAI,OAAO,KAAK,UAAU,KAAK;AAC/B,YAAM,KAAK;AACX;AAAA,IACF;AAEA,QAAI;AACF,cAAQ,UAAU,QAAQ,QAAQ,SAAS,IAAI,CAAC,GAAG,QAAQ,SAAS,EAAE,SAAS,aAAa,QAAQ,CAAC,IAAI,CAAC;AAC1G,gBAAU,cAAc;AACxB,YAAM,EAAE,aAAa,OAAO,IAAI,qBAAqBA,WAAU,OAAO;AACtE,gBAAU,2BAA2B;AACrC,YAAM,OAAOC,yBAAwB,MAAM;AAC3C,gBAAU,yBAAyB;AAEnC,UAAI,OAAO,KAAK,UAAU,EAAE,aAAa,aAAa,SAAS,KAAK,MAAM,KAAK,CAAC;AAChF,UAAI,SAAS;AAAA,IACf,SAAS,OAAO;AACd,UAAI,SAAS;AACb,UAAI,OAAO,KAAK,UAAU,KAAK;AAC/B,YAAM,KAAK;AAAA,IACb;AAAA,EACF,CAAC;AAED,SAAO,QAAQ,CAAC,OAAO,OAAO,GAAG,OAAO,eAAe,CAAC,CAAC;AAC3D;;;AH1BA,SAAS,oBAAoB;AAC7B,SAAS,UAAU,kBAAkB;AAErC,IAAM,MAAM;AAAA,EACV,EAAE;AAAA,IACA,EAAE,aAAa,kBAAkB,iBAAiB;AAAA,IAClD,EAAE,OAAO;AAAA,MACP,iBAAiB,EAAE,OAAO,EAAE,QAAQ,YAAY;AAAA,MAChD,YAAY,EAAE,OAAO,EAAE,SAAS;AAAA,IAClC,CAAC;AAAA,EACH;AACF;AAEA,IAAM,gBAAgB,MAAM,iBAAiB,GAAG;AAEhD,IAAM,UAAU,MAAM,WAAW,aAAa,aAAa,CAAC;AAC5D,IAAM,WAAW,QAAQ,IAAI,SAAS,IAAI,eAAe,CAAC;AAE1D,IAAI,aAAa,IAAI;AAErB,eAAe,uBAQb;AAEA,MAAI;AACF,UAAM,qBAAqB,SAAS,OAAO,EAAE,KAAKC,WAAU,EAAE,MAAMC,IAAGD,YAAW,SAAS,OAAO,CAAC,EAAE,IAAI;AAEzG,UAAME,qBAAqE,mBAAmB,CAAC;AAC/F,WAAOA;AAAA,EACT,SAAS,OAAO;AAAA,EAEhB;AACF;AAEA,eAAe,6BAA0D;AACvE,QAAMA,qBAAoB,MAAM,qBAAqB;AACrD,SAAOA,oBAAmB,0BAA0B;AACtD;AAEA,eAAe,6BAA8C;AAC3D,QAAM,CAAC,yBAAyB,iBAAiB,IAAI,MAAM,QAAQ,IAAI;AAAA,IACrE,2BAA2B;AAAA,IAC3B,SAAS,aAAa,aAAa,GAAG,EAAE,UAAU,IAAI,iBAAiB,CAAC;AAAA,EAC1E,CAAC;AACD,SAAO,kBAAkB,UAAU,2BAA2B,CAAC;AACjE;AAEA,IAAM,oBAAoB,MAAM,qBAAqB;AACrD,IAAI,mBAAmB;AAErB,MAAI,kBAAkB,iBAAiB,eAAe;AACpD,YAAQ;AAAA,MACN;AAAA,MACA,kBAAkB;AAAA,MAClB;AAAA,MACA;AAAA,MACA;AAAA,IACF;AACA,OAAG,aAAa,IAAI,eAAe;AAAA,EACrC,WAAW,kBAAkB,0BAA0B,MAAM;AAE3D,YAAQ,IAAI,8BAA8B,kBAAkB,yBAAyB,EAAE;AACvF,iBAAa,kBAAkB,yBAAyB;AAAA,EAC1D;AACF;AAEA,IAAM,EAAE,oBAAoB,iBAAiB,IAAI,MAAM,aAAa;AAAA,EAClE,GAAG;AAAA,EACH;AAAA,EACA,gBAAgB,IAAI;AAAA,EACpB;AAAA,EACA,eAAe,IAAI;AAAA,EACnB,SAAS,IAAI;AACf,CAAC;AAED,IAAI,aAAa;AACjB,cAAc,CAAC,oBAAoB,gBAAgB,CAAC,EACjD;AAAA,EACC;AAAA,IACE,CAAC,CAAC,mBAAmB,EAAE,aAAa,yBAAyB,CAAC,MAC5D,sBAAsB;AAAA,EAC1B;AAAA,EACA,MAAM;AACR,EACC,UAAU,MAAM;AACf,eAAa;AACb,UAAQ,IAAI,eAAe;AAC7B,CAAC;AAEH,IAAM,SAAS,IAAI,IAAI;AAEvB,IAAI,IAAI,YAAY;AAClB,SAAO,IAAI,OAAO,IAAI,UAAU,CAAC;AACnC;AAEA,OAAO,IAAI,KAAK,CAAC;AACjB,OAAO;AAAA,EACL,YAAY;AAAA,IACV,SAAS,MAAM;AAAA,EACjB,CAAC;AACH;AACA,OAAO;AAAA,EACL,QAAQ;AAAA,IACN,WAAW,MAAM;AAAA,IACjB,SAAS,MAAM;AAAA,IACf;AAAA,IACA;AAAA,IACA,gBAAgB,IAAI;AAAA,EACtB,CAAC;AACH;AACA,OAAO,IAAI,WAAW,CAAC;AACvB,OAAO,IAAI,UAAU,QAAQ,CAAC;AAE9B,OAAO;AAAA,EACL,oBAAoB;AAAA,IAClB,QAAQ;AAAA,IACR,QAAQ,gBAAgB;AAAA,IACxB,eAAe,aAAa;AAAA,MAC1B,cAAc,MAAM,mBAAmB,QAAQ;AAAA,IACjD;AAAA,EACF,CAAC;AACH;AAEA,OAAO,OAAO,EAAE,MAAM,IAAI,MAAM,MAAM,IAAI,KAAK,CAAC;AAChD,QAAQ,IAAI,+CAA+C,IAAI,IAAI,IAAI,IAAI,IAAI,EAAE;","names":["eq","chainState","database","chainId","filter","database","tablesWithRecordsToLogs","database","tablesWithRecordsToLogs","chainState","eq","currentChainState"]}
|
@@ -0,0 +1,38 @@
|
|
1
|
+
// src/koa-middleware/compress.ts
|
2
|
+
import { Stream } from "node:stream";
|
3
|
+
import accepts from "accepts";
|
4
|
+
import { createBrotliCompress, createDeflate, createGzip } from "node:zlib";
|
5
|
+
import { includes } from "@latticexyz/common/utils";
|
6
|
+
var encodings = {
|
7
|
+
br: createBrotliCompress,
|
8
|
+
gzip: createGzip,
|
9
|
+
deflate: createDeflate
|
10
|
+
};
|
11
|
+
var encodingNames = Object.keys(encodings);
|
12
|
+
function flushEvery(stream, bytesThreshold) {
|
13
|
+
let bytesSinceFlush = 0;
|
14
|
+
stream.on("data", (data) => {
|
15
|
+
bytesSinceFlush += data.length;
|
16
|
+
if (bytesSinceFlush > bytesThreshold) {
|
17
|
+
bytesSinceFlush = 0;
|
18
|
+
stream.flush();
|
19
|
+
}
|
20
|
+
});
|
21
|
+
return stream;
|
22
|
+
}
|
23
|
+
function compress({ flushThreshold = 1024 * 4 } = {}) {
|
24
|
+
return async function compressMiddleware(ctx, next) {
|
25
|
+
ctx.vary("Accept-Encoding");
|
26
|
+
await next();
|
27
|
+
const encoding = accepts(ctx.req).encoding(encodingNames);
|
28
|
+
if (!includes(encodingNames, encoding)) return;
|
29
|
+
const compressed = flushEvery(encodings[encoding](), flushThreshold);
|
30
|
+
ctx.set("Content-Encoding", encoding);
|
31
|
+
ctx.body = ctx.body instanceof Stream ? ctx.body.pipe(compressed) : compressed.end(ctx.body);
|
32
|
+
};
|
33
|
+
}
|
34
|
+
|
35
|
+
export {
|
36
|
+
compress
|
37
|
+
};
|
38
|
+
//# sourceMappingURL=chunk-7E7HV6WZ.js.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/koa-middleware/compress.ts"],"sourcesContent":["import { Middleware } from \"koa\";\nimport { Readable, Stream } from \"node:stream\";\nimport accepts from \"accepts\";\nimport { Zlib, createBrotliCompress, createDeflate, createGzip } from \"node:zlib\";\nimport { includes } from \"@latticexyz/common/utils\";\n\n// Loosely based on https://github.com/holic/koa-compress/blob/master/lib/index.js\n// with better handling of streams better with occasional flushing\n\nconst encodings = {\n br: createBrotliCompress,\n gzip: createGzip,\n deflate: createDeflate,\n} as const;\n\nconst encodingNames = Object.keys(encodings) as (keyof typeof encodings)[];\n\nfunction flushEvery<stream extends Zlib & Readable>(stream: stream, bytesThreshold: number): stream {\n let bytesSinceFlush = 0;\n stream.on(\"data\", (data) => {\n bytesSinceFlush += data.length;\n if (bytesSinceFlush > bytesThreshold) {\n bytesSinceFlush = 0;\n stream.flush();\n }\n });\n return stream;\n}\n\ntype CompressOptions = {\n flushThreshold?: number;\n};\n\nexport function compress({ flushThreshold = 1024 * 4 }: CompressOptions = {}): Middleware {\n return async function compressMiddleware(ctx, next) {\n ctx.vary(\"Accept-Encoding\");\n\n await next();\n\n const encoding = accepts(ctx.req).encoding(encodingNames);\n if (!includes(encodingNames, encoding)) return;\n\n const compressed = flushEvery(encodings[encoding](), flushThreshold);\n\n ctx.set(\"Content-Encoding\", encoding);\n ctx.body = ctx.body instanceof Stream ? ctx.body.pipe(compressed) : compressed.end(ctx.body);\n };\n}\n"],"mappings":"AACA,
|
1
|
+
{"version":3,"sources":["../src/koa-middleware/compress.ts"],"sourcesContent":["import { Middleware } from \"koa\";\nimport { Readable, Stream } from \"node:stream\";\nimport accepts from \"accepts\";\nimport { Zlib, createBrotliCompress, createDeflate, createGzip } from \"node:zlib\";\nimport { includes } from \"@latticexyz/common/utils\";\n\n// Loosely based on https://github.com/holic/koa-compress/blob/master/lib/index.js\n// with better handling of streams better with occasional flushing\n\nconst encodings = {\n br: createBrotliCompress,\n gzip: createGzip,\n deflate: createDeflate,\n} as const;\n\nconst encodingNames = Object.keys(encodings) as (keyof typeof encodings)[];\n\nfunction flushEvery<stream extends Zlib & Readable>(stream: stream, bytesThreshold: number): stream {\n let bytesSinceFlush = 0;\n stream.on(\"data\", (data) => {\n bytesSinceFlush += data.length;\n if (bytesSinceFlush > bytesThreshold) {\n bytesSinceFlush = 0;\n stream.flush();\n }\n });\n return stream;\n}\n\ntype CompressOptions = {\n flushThreshold?: number;\n};\n\nexport function compress({ flushThreshold = 1024 * 4 }: CompressOptions = {}): Middleware {\n return async function compressMiddleware(ctx, next) {\n ctx.vary(\"Accept-Encoding\");\n\n await next();\n\n const encoding = accepts(ctx.req).encoding(encodingNames);\n if (!includes(encodingNames, encoding)) return;\n\n const compressed = flushEvery(encodings[encoding](), flushThreshold);\n\n ctx.set(\"Content-Encoding\", encoding);\n ctx.body = ctx.body instanceof Stream ? ctx.body.pipe(compressed) : compressed.end(ctx.body);\n };\n}\n"],"mappings":";AACA,SAAmB,cAAc;AACjC,OAAO,aAAa;AACpB,SAAe,sBAAsB,eAAe,kBAAkB;AACtE,SAAS,gBAAgB;AAKzB,IAAM,YAAY;AAAA,EAChB,IAAI;AAAA,EACJ,MAAM;AAAA,EACN,SAAS;AACX;AAEA,IAAM,gBAAgB,OAAO,KAAK,SAAS;AAE3C,SAAS,WAA2C,QAAgB,gBAAgC;AAClG,MAAI,kBAAkB;AACtB,SAAO,GAAG,QAAQ,CAAC,SAAS;AAC1B,uBAAmB,KAAK;AACxB,QAAI,kBAAkB,gBAAgB;AACpC,wBAAkB;AAClB,aAAO,MAAM;AAAA,IACf;AAAA,EACF,CAAC;AACD,SAAO;AACT;AAMO,SAAS,SAAS,EAAE,iBAAiB,OAAO,EAAE,IAAqB,CAAC,GAAe;AACxF,SAAO,eAAe,mBAAmB,KAAK,MAAM;AAClD,QAAI,KAAK,iBAAiB;AAE1B,UAAM,KAAK;AAEX,UAAM,WAAW,QAAQ,IAAI,GAAG,EAAE,SAAS,aAAa;AACxD,QAAI,CAAC,SAAS,eAAe,QAAQ,EAAG;AAExC,UAAM,aAAa,WAAW,UAAU,QAAQ,EAAE,GAAG,cAAc;AAEnE,QAAI,IAAI,oBAAoB,QAAQ;AACpC,QAAI,OAAO,IAAI,gBAAgB,SAAS,IAAI,KAAK,KAAK,UAAU,IAAI,WAAW,IAAI,IAAI,IAAI;AAAA,EAC7F;AACF;","names":[]}
|
@@ -0,0 +1,99 @@
|
|
1
|
+
// src/koa-middleware/sentry.ts
|
2
|
+
import * as Sentry from "@sentry/node";
|
3
|
+
import { ProfilingIntegration } from "@sentry/profiling-node";
|
4
|
+
import { stripUrlQueryAndFragment } from "@sentry/utils";
|
5
|
+
|
6
|
+
// src/debug.ts
|
7
|
+
import createDebug from "debug";
|
8
|
+
var debug = createDebug("mud:store-indexer");
|
9
|
+
var error = createDebug("mud:store-indexer");
|
10
|
+
debug.log = console.debug.bind(console);
|
11
|
+
error.log = console.error.bind(console);
|
12
|
+
|
13
|
+
// src/koa-middleware/sentry.ts
|
14
|
+
import compose from "koa-compose";
|
15
|
+
function errorHandler() {
|
16
|
+
return async function errorHandlerMiddleware(ctx, next) {
|
17
|
+
try {
|
18
|
+
await next();
|
19
|
+
} catch (err) {
|
20
|
+
Sentry.withScope((scope) => {
|
21
|
+
scope.addEventProcessor((event) => {
|
22
|
+
return Sentry.addRequestDataToEvent(event, ctx.request);
|
23
|
+
});
|
24
|
+
Sentry.captureException(err);
|
25
|
+
});
|
26
|
+
throw err;
|
27
|
+
}
|
28
|
+
};
|
29
|
+
}
|
30
|
+
function requestHandler() {
|
31
|
+
return async function requestHandlerMiddleware(ctx, next) {
|
32
|
+
await Sentry.runWithAsyncContext(async () => {
|
33
|
+
const hub = Sentry.getCurrentHub();
|
34
|
+
hub.configureScope(
|
35
|
+
(scope) => scope.addEventProcessor(
|
36
|
+
(event) => Sentry.addRequestDataToEvent(event, ctx.request, {
|
37
|
+
include: {
|
38
|
+
user: false
|
39
|
+
}
|
40
|
+
})
|
41
|
+
)
|
42
|
+
);
|
43
|
+
await next();
|
44
|
+
});
|
45
|
+
};
|
46
|
+
}
|
47
|
+
function tracing() {
|
48
|
+
return async function tracingMiddleware(ctx, next) {
|
49
|
+
const reqMethod = (ctx.method || "").toUpperCase();
|
50
|
+
const reqUrl = ctx.url && stripUrlQueryAndFragment(ctx.url);
|
51
|
+
let traceparentData;
|
52
|
+
if (ctx.request.get("sentry-trace")) {
|
53
|
+
traceparentData = Sentry.extractTraceparentData(ctx.request.get("sentry-trace"));
|
54
|
+
}
|
55
|
+
const transaction = Sentry.startTransaction({
|
56
|
+
name: `${reqMethod} ${reqUrl}`,
|
57
|
+
op: "http.server",
|
58
|
+
...traceparentData
|
59
|
+
});
|
60
|
+
ctx.__sentry_transaction = transaction;
|
61
|
+
Sentry.getCurrentHub().configureScope((scope) => {
|
62
|
+
scope.setSpan(transaction);
|
63
|
+
});
|
64
|
+
ctx.res.on("finish", () => {
|
65
|
+
setImmediate(() => {
|
66
|
+
if (ctx._matchedRoute) {
|
67
|
+
const mountPath = ctx.mountPath || "";
|
68
|
+
transaction.setName(`${reqMethod} ${mountPath}${ctx._matchedRoute}`);
|
69
|
+
}
|
70
|
+
transaction.setHttpStatus(ctx.status);
|
71
|
+
transaction.finish();
|
72
|
+
});
|
73
|
+
});
|
74
|
+
await next();
|
75
|
+
};
|
76
|
+
}
|
77
|
+
function sentry(dsn) {
|
78
|
+
debug("Initializing Sentry");
|
79
|
+
Sentry.init({
|
80
|
+
dsn,
|
81
|
+
integrations: [
|
82
|
+
// Automatically instrument Node.js libraries and frameworks
|
83
|
+
...Sentry.autoDiscoverNodePerformanceMonitoringIntegrations(),
|
84
|
+
new ProfilingIntegration()
|
85
|
+
],
|
86
|
+
// Performance Monitoring
|
87
|
+
tracesSampleRate: 1,
|
88
|
+
// Set sampling rate for profiling - this is relative to tracesSampleRate
|
89
|
+
profilesSampleRate: 1
|
90
|
+
});
|
91
|
+
return compose([errorHandler(), requestHandler(), tracing()]);
|
92
|
+
}
|
93
|
+
|
94
|
+
export {
|
95
|
+
debug,
|
96
|
+
error,
|
97
|
+
sentry
|
98
|
+
};
|
99
|
+
//# sourceMappingURL=chunk-ALQNRR4A.js.map
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/koa-middleware/sentry.ts","../src/debug.ts"],"sourcesContent":["import * as Sentry from \"@sentry/node\";\nimport { ProfilingIntegration } from \"@sentry/profiling-node\";\nimport { stripUrlQueryAndFragment } from \"@sentry/utils\";\nimport { debug } from \"../debug\";\nimport Koa from \"koa\";\nimport compose from \"koa-compose\";\n\nexport function errorHandler(): Koa.Middleware {\n return async function errorHandlerMiddleware(ctx, next) {\n try {\n await next();\n } catch (err) {\n Sentry.withScope((scope) => {\n scope.addEventProcessor((event) => {\n return Sentry.addRequestDataToEvent(event, ctx.request);\n });\n Sentry.captureException(err);\n });\n throw err;\n }\n };\n}\n\nexport function requestHandler(): Koa.Middleware {\n return async function requestHandlerMiddleware(ctx, next) {\n await Sentry.runWithAsyncContext(async () => {\n const hub = Sentry.getCurrentHub();\n hub.configureScope((scope) =>\n scope.addEventProcessor((event) =>\n Sentry.addRequestDataToEvent(event, ctx.request, {\n include: {\n user: false,\n },\n }),\n ),\n );\n await next();\n });\n };\n}\n\nexport function tracing(): Koa.Middleware {\n // creates a Sentry transaction per request\n return async function tracingMiddleware(ctx, next) {\n const reqMethod = (ctx.method || \"\").toUpperCase();\n const reqUrl = ctx.url && stripUrlQueryAndFragment(ctx.url);\n\n // Connect to trace of upstream app\n let traceparentData;\n if (ctx.request.get(\"sentry-trace\")) {\n traceparentData = Sentry.extractTraceparentData(ctx.request.get(\"sentry-trace\"));\n }\n\n const transaction = Sentry.startTransaction({\n name: `${reqMethod} ${reqUrl}`,\n op: \"http.server\",\n ...traceparentData,\n });\n\n ctx.__sentry_transaction = transaction;\n\n // We put the transaction on the scope so users can attach children to it\n Sentry.getCurrentHub().configureScope((scope) => {\n scope.setSpan(transaction);\n });\n\n ctx.res.on(\"finish\", () => {\n // Push `transaction.finish` to the next event loop so open spans have a chance to finish before the transaction closes\n setImmediate(() => {\n // If you're using koa router, set the matched route as transaction name\n if (ctx._matchedRoute) {\n const mountPath = ctx.mountPath || \"\";\n transaction.setName(`${reqMethod} ${mountPath}${ctx._matchedRoute}`);\n }\n\n transaction.setHttpStatus(ctx.status);\n transaction.finish();\n });\n });\n\n await next();\n };\n}\n\nexport function sentry(dsn: string): Koa.Middleware {\n debug(\"Initializing Sentry\");\n Sentry.init({\n dsn,\n integrations: [\n // Automatically instrument Node.js libraries and frameworks\n ...Sentry.autoDiscoverNodePerformanceMonitoringIntegrations(),\n new ProfilingIntegration(),\n ],\n // Performance Monitoring\n tracesSampleRate: 1.0,\n // Set sampling rate for profiling - this is relative to tracesSampleRate\n profilesSampleRate: 1.0,\n });\n\n return compose([errorHandler(), requestHandler(), tracing()]);\n}\n","import createDebug from \"debug\";\n\nexport const debug = createDebug(\"mud:store-indexer\");\nexport const error = createDebug(\"mud:store-indexer\");\n\n// Pipe debug output to stdout instead of stderr\ndebug.log = console.debug.bind(console);\n\n// Pipe error output to stderr\nerror.log = console.error.bind(console);\n"],"mappings":"AAAA,
|
1
|
+
{"version":3,"sources":["../src/koa-middleware/sentry.ts","../src/debug.ts"],"sourcesContent":["import * as Sentry from \"@sentry/node\";\nimport { ProfilingIntegration } from \"@sentry/profiling-node\";\nimport { stripUrlQueryAndFragment } from \"@sentry/utils\";\nimport { debug } from \"../debug\";\nimport Koa from \"koa\";\nimport compose from \"koa-compose\";\n\nexport function errorHandler(): Koa.Middleware {\n return async function errorHandlerMiddleware(ctx, next) {\n try {\n await next();\n } catch (err) {\n Sentry.withScope((scope) => {\n scope.addEventProcessor((event) => {\n return Sentry.addRequestDataToEvent(event, ctx.request);\n });\n Sentry.captureException(err);\n });\n throw err;\n }\n };\n}\n\nexport function requestHandler(): Koa.Middleware {\n return async function requestHandlerMiddleware(ctx, next) {\n await Sentry.runWithAsyncContext(async () => {\n const hub = Sentry.getCurrentHub();\n hub.configureScope((scope) =>\n scope.addEventProcessor((event) =>\n Sentry.addRequestDataToEvent(event, ctx.request, {\n include: {\n user: false,\n },\n }),\n ),\n );\n await next();\n });\n };\n}\n\nexport function tracing(): Koa.Middleware {\n // creates a Sentry transaction per request\n return async function tracingMiddleware(ctx, next) {\n const reqMethod = (ctx.method || \"\").toUpperCase();\n const reqUrl = ctx.url && stripUrlQueryAndFragment(ctx.url);\n\n // Connect to trace of upstream app\n let traceparentData;\n if (ctx.request.get(\"sentry-trace\")) {\n traceparentData = Sentry.extractTraceparentData(ctx.request.get(\"sentry-trace\"));\n }\n\n const transaction = Sentry.startTransaction({\n name: `${reqMethod} ${reqUrl}`,\n op: \"http.server\",\n ...traceparentData,\n });\n\n ctx.__sentry_transaction = transaction;\n\n // We put the transaction on the scope so users can attach children to it\n Sentry.getCurrentHub().configureScope((scope) => {\n scope.setSpan(transaction);\n });\n\n ctx.res.on(\"finish\", () => {\n // Push `transaction.finish` to the next event loop so open spans have a chance to finish before the transaction closes\n setImmediate(() => {\n // If you're using koa router, set the matched route as transaction name\n if (ctx._matchedRoute) {\n const mountPath = ctx.mountPath || \"\";\n transaction.setName(`${reqMethod} ${mountPath}${ctx._matchedRoute}`);\n }\n\n transaction.setHttpStatus(ctx.status);\n transaction.finish();\n });\n });\n\n await next();\n };\n}\n\nexport function sentry(dsn: string): Koa.Middleware {\n debug(\"Initializing Sentry\");\n Sentry.init({\n dsn,\n integrations: [\n // Automatically instrument Node.js libraries and frameworks\n ...Sentry.autoDiscoverNodePerformanceMonitoringIntegrations(),\n new ProfilingIntegration(),\n ],\n // Performance Monitoring\n tracesSampleRate: 1.0,\n // Set sampling rate for profiling - this is relative to tracesSampleRate\n profilesSampleRate: 1.0,\n });\n\n return compose([errorHandler(), requestHandler(), tracing()]);\n}\n","import createDebug from \"debug\";\n\nexport const debug = createDebug(\"mud:store-indexer\");\nexport const error = createDebug(\"mud:store-indexer\");\n\n// Pipe debug output to stdout instead of stderr\ndebug.log = console.debug.bind(console);\n\n// Pipe error output to stderr\nerror.log = console.error.bind(console);\n"],"mappings":";AAAA,YAAY,YAAY;AACxB,SAAS,4BAA4B;AACrC,SAAS,gCAAgC;;;ACFzC,OAAO,iBAAiB;AAEjB,IAAM,QAAQ,YAAY,mBAAmB;AAC7C,IAAM,QAAQ,YAAY,mBAAmB;AAGpD,MAAM,MAAM,QAAQ,MAAM,KAAK,OAAO;AAGtC,MAAM,MAAM,QAAQ,MAAM,KAAK,OAAO;;;ADJtC,OAAO,aAAa;AAEb,SAAS,eAA+B;AAC7C,SAAO,eAAe,uBAAuB,KAAK,MAAM;AACtD,QAAI;AACF,YAAM,KAAK;AAAA,IACb,SAAS,KAAK;AACZ,MAAO,iBAAU,CAAC,UAAU;AAC1B,cAAM,kBAAkB,CAAC,UAAU;AACjC,iBAAc,6BAAsB,OAAO,IAAI,OAAO;AAAA,QACxD,CAAC;AACD,QAAO,wBAAiB,GAAG;AAAA,MAC7B,CAAC;AACD,YAAM;AAAA,IACR;AAAA,EACF;AACF;AAEO,SAAS,iBAAiC;AAC/C,SAAO,eAAe,yBAAyB,KAAK,MAAM;AACxD,UAAa,2BAAoB,YAAY;AAC3C,YAAM,MAAa,qBAAc;AACjC,UAAI;AAAA,QAAe,CAAC,UAClB,MAAM;AAAA,UAAkB,CAAC,UAChB,6BAAsB,OAAO,IAAI,SAAS;AAAA,YAC/C,SAAS;AAAA,cACP,MAAM;AAAA,YACR;AAAA,UACF,CAAC;AAAA,QACH;AAAA,MACF;AACA,YAAM,KAAK;AAAA,IACb,CAAC;AAAA,EACH;AACF;AAEO,SAAS,UAA0B;AAExC,SAAO,eAAe,kBAAkB,KAAK,MAAM;AACjD,UAAM,aAAa,IAAI,UAAU,IAAI,YAAY;AACjD,UAAM,SAAS,IAAI,OAAO,yBAAyB,IAAI,GAAG;AAG1D,QAAI;AACJ,QAAI,IAAI,QAAQ,IAAI,cAAc,GAAG;AACnC,wBAAyB,8BAAuB,IAAI,QAAQ,IAAI,cAAc,CAAC;AAAA,IACjF;AAEA,UAAM,cAAqB,wBAAiB;AAAA,MAC1C,MAAM,GAAG,SAAS,IAAI,MAAM;AAAA,MAC5B,IAAI;AAAA,MACJ,GAAG;AAAA,IACL,CAAC;AAED,QAAI,uBAAuB;AAG3B,IAAO,qBAAc,EAAE,eAAe,CAAC,UAAU;AAC/C,YAAM,QAAQ,WAAW;AAAA,IAC3B,CAAC;AAED,QAAI,IAAI,GAAG,UAAU,MAAM;AAEzB,mBAAa,MAAM;AAEjB,YAAI,IAAI,eAAe;AACrB,gBAAM,YAAY,IAAI,aAAa;AACnC,sBAAY,QAAQ,GAAG,SAAS,IAAI,SAAS,GAAG,IAAI,aAAa,EAAE;AAAA,QACrE;AAEA,oBAAY,cAAc,IAAI,MAAM;AACpC,oBAAY,OAAO;AAAA,MACrB,CAAC;AAAA,IACH,CAAC;AAED,UAAM,KAAK;AAAA,EACb;AACF;AAEO,SAAS,OAAO,KAA6B;AAClD,QAAM,qBAAqB;AAC3B,EAAO,YAAK;AAAA,IACV;AAAA,IACA,cAAc;AAAA;AAAA,MAEZ,GAAU,yDAAkD;AAAA,MAC5D,IAAI,qBAAqB;AAAA,IAC3B;AAAA;AAAA,IAEA,kBAAkB;AAAA;AAAA,IAElB,oBAAoB;AAAA,EACtB,CAAC;AAED,SAAO,QAAQ,CAAC,aAAa,GAAG,eAAe,GAAG,QAAQ,CAAC,CAAC;AAC9D;","names":[]}
|