@latticexyz/store-indexer 2.2.12-pending-logs-72bb2264a91a3266f5f15bbd754d32a1e2a62ccb → 2.2.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ // workaround for https://github.com/pnpm/pnpm/issues/1801
3
+ import "../dist/bin/postgres-decoded-indexer.js";
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ // workaround for https://github.com/pnpm/pnpm/issues/1801
3
+ import "../dist/bin/postgres-frontend.js";
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ // workaround for https://github.com/pnpm/pnpm/issues/1801
3
+ import "../dist/bin/postgres-indexer.js";
@@ -0,0 +1,3 @@
1
+ #!/usr/bin/env node
2
+ // workaround for https://github.com/pnpm/pnpm/issues/1801
3
+ import "../dist/bin/sqlite-indexer.js";
@@ -1,3 +1,3 @@
1
1
  #!/usr/bin/env node
2
- import{a as c}from"../chunk-VCBWGHIO.js";import{b as s,c as l}from"../chunk-7B4KE2DO.js";import{a as m}from"../chunk-KDDXIBYJ.js";import{a as p}from"../chunk-OUZYPRYF.js";import"dotenv/config";import{z as o}from"zod";import{eq as _}from"drizzle-orm";import{createPublicClient as C,fallback as E,webSocket as b,http as L}from"viem";import{isDefined as R}from"@latticexyz/common/utils";import{combineLatest as S,filter as d,first as A}from"rxjs";import{drizzle as g}from"drizzle-orm/postgres-js";import h from"postgres";import{createStorageAdapter as O}from"@latticexyz/store-sync/postgres-decoded";import{createStoreSync as k}from"@latticexyz/store-sync";var e=l(o.intersection(s,o.object({DATABASE_URL:o.string(),HEALTHCHECK_HOST:o.string().optional(),HEALTHCHECK_PORT:o.coerce.number().optional(),SENTRY_DSN:o.string().optional()}))),N=[e.RPC_WS_URL?b(e.RPC_WS_URL):void 0,e.RPC_HTTP_URL?L(e.RPC_HTTP_URL):void 0].filter(R),a=C({transport:E(N),pollingInterval:e.POLLING_INTERVAL}),K=await a.getChainId(),f=g(h(e.DATABASE_URL,{prepare:!1})),{storageAdapter:P,tables:T}=await O({database:f,publicClient:a}),n=e.START_BLOCK;try{let t=await f.select().from(T.configTable).where(_(T.configTable.chainId,K)).limit(1).execute().then(r=>r.find(()=>!0));t?.blockNumber!=null&&(n=t.blockNumber+1n,console.log("resuming from block number",n))}catch{}var{latestBlockNumber$:w,storedBlockLogs$:H}=await k({storageAdapter:P,publicClient:a,followBlockTag:e.FOLLOW_BLOCK_TAG,startBlock:n,maxBlockRange:e.MAX_BLOCK_RANGE,address:e.STORE_ADDRESS});H.subscribe();var u=!1;S([w,H]).pipe(d(([t,{blockNumber:r}])=>t===r),A()).subscribe(()=>{u=!0,console.log("all caught up")});if(e.HEALTHCHECK_HOST!=null||e.HEALTHCHECK_PORT!=null){let{default:t}=await import("koa"),{default:r}=await import("@koa/cors"),i=new t;e.SENTRY_DSN&&i.use(c(e.SENTRY_DSN)),i.use(r()),i.use(m({isReady:()=>u})),i.use(p()),i.listen({host:e.HEALTHCHECK_HOST,port:e.HEALTHCHECK_PORT}),console.log(`postgres indexer healthcheck server listening on http://${e.HEALTHCHECK_HOST}:${e.HEALTHCHECK_PORT}`)}
2
+ import{c}from"../chunk-32XITDZW.js";import{b as s,c as l}from"../chunk-M2HHWHGP.js";import{a as m}from"../chunk-KDDXIBYJ.js";import{a as p}from"../chunk-OUZYPRYF.js";import"dotenv/config";import{z as o}from"zod";import{eq as _}from"drizzle-orm";import{createPublicClient as C,fallback as E,webSocket as b,http as L}from"viem";import{isDefined as R}from"@latticexyz/common/utils";import{combineLatest as S,filter as d,first as A}from"rxjs";import{drizzle as g}from"drizzle-orm/postgres-js";import h from"postgres";import{createStorageAdapter as O}from"@latticexyz/store-sync/postgres-decoded";import{createStoreSync as k}from"@latticexyz/store-sync";var e=l(o.intersection(s,o.object({DATABASE_URL:o.string(),HEALTHCHECK_HOST:o.string().optional(),HEALTHCHECK_PORT:o.coerce.number().optional(),SENTRY_DSN:o.string().optional()}))),N=[e.RPC_WS_URL?b(e.RPC_WS_URL):void 0,e.RPC_HTTP_URL?L(e.RPC_HTTP_URL):void 0].filter(R),a=C({transport:E(N),pollingInterval:e.POLLING_INTERVAL}),K=await a.getChainId(),f=g(h(e.DATABASE_URL,{prepare:!1})),{storageAdapter:P,tables:T}=await O({database:f,publicClient:a}),n=e.START_BLOCK;try{let t=await f.select().from(T.configTable).where(_(T.configTable.chainId,K)).limit(1).execute().then(r=>r.find(()=>!0));t?.blockNumber!=null&&(n=t.blockNumber+1n,console.log("resuming from block number",n))}catch{}var{latestBlockNumber$:w,storedBlockLogs$:H}=await k({storageAdapter:P,publicClient:a,followBlockTag:e.FOLLOW_BLOCK_TAG,startBlock:n,maxBlockRange:e.MAX_BLOCK_RANGE,address:e.STORE_ADDRESS});H.subscribe();var u=!1;S([w,H]).pipe(d(([t,{blockNumber:r}])=>t===r),A()).subscribe(()=>{u=!0,console.log("all caught up")});if(e.HEALTHCHECK_HOST!=null||e.HEALTHCHECK_PORT!=null){let{default:t}=await import("koa"),{default:r}=await import("@koa/cors"),i=new t;e.SENTRY_DSN&&i.use(c(e.SENTRY_DSN)),i.use(r()),i.use(m({isReady:()=>u})),i.use(p()),i.listen({host:e.HEALTHCHECK_HOST,port:e.HEALTHCHECK_PORT}),console.log(`postgres indexer healthcheck server listening on http://${e.HEALTHCHECK_HOST}:${e.HEALTHCHECK_PORT}`)}
3
3
  //# sourceMappingURL=postgres-decoded-indexer.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../bin/postgres-decoded-indexer.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { createPublicClient, fallback, webSocket, http, Transport } from \"viem\";\nimport { isDefined } from \"@latticexyz/common/utils\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { createStorageAdapter } from \"@latticexyz/store-sync/postgres-decoded\";\nimport { createStoreSync } from \"@latticexyz/store-sync\";\nimport { indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { sentry } from \"../src/koa-middleware/sentry\";\nimport { healthcheck } from \"../src/koa-middleware/healthcheck\";\nimport { helloWorld } from \"../src/koa-middleware/helloWorld\";\n\nconst env = parseEnv(\n z.intersection(\n indexerEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n HEALTHCHECK_HOST: z.string().optional(),\n HEALTHCHECK_PORT: z.coerce.number().optional(),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst transports: Transport[] = [\n // prefer WS when specified\n env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : undefined,\n // otherwise use or fallback to HTTP\n env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : undefined,\n].filter(isDefined);\n\nconst publicClient = createPublicClient({\n transport: fallback(transports),\n pollingInterval: env.POLLING_INTERVAL,\n});\n\nconst chainId = await publicClient.getChainId();\nconst database = drizzle(postgres(env.DATABASE_URL, { prepare: false }));\n\nconst { storageAdapter, tables } = await createStorageAdapter({ database, publicClient });\n\nlet startBlock = env.START_BLOCK;\n\n// Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n// TODO: query if the DB exists instead of try/catch\ntry {\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n\n if (chainState?.blockNumber != null) {\n startBlock = chainState.blockNumber + 1n;\n console.log(\"resuming from block number\", startBlock);\n }\n} catch (error) {\n // ignore errors for now\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await createStoreSync({\n storageAdapter,\n publicClient,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nstoredBlockLogs$.subscribe();\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nif (env.HEALTHCHECK_HOST != null || env.HEALTHCHECK_PORT != null) {\n const { default: Koa } = await import(\"koa\");\n const { default: cors } = await import(\"@koa/cors\");\n\n const server = new Koa();\n\n if (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n }\n\n server.use(cors());\n server.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n );\n server.use(helloWorld());\n\n server.listen({ host: env.HEALTHCHECK_HOST, port: env.HEALTHCHECK_PORT });\n console.log(\n `postgres indexer healthcheck server listening on http://${env.HEALTHCHECK_HOST}:${env.HEALTHCHECK_PORT}`,\n );\n}\n"],"mappings":";2KACA,MAAO,gBACP,OAAS,KAAAA,MAAS,MAClB,OAAS,MAAAC,MAAU,cACnB,OAAS,sBAAAC,EAAoB,YAAAC,EAAU,aAAAC,EAAW,QAAAC,MAAuB,OACzE,OAAS,aAAAC,MAAiB,2BAC1B,OAAS,iBAAAC,EAAe,UAAAC,EAAQ,SAAAC,MAAa,OAC7C,OAAS,WAAAC,MAAe,0BACxB,OAAOC,MAAc,WACrB,OAAS,wBAAAC,MAA4B,0CACrC,OAAS,mBAAAC,MAAuB,yBAMhC,IAAMC,EAAMC,EACVC,EAAE,aACAC,EACAD,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,iBAAkBA,EAAE,OAAO,EAAE,SAAS,EACtC,iBAAkBA,EAAE,OAAO,OAAO,EAAE,SAAS,EAC7C,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,CACH,CACF,EAEME,EAA0B,CAE9BJ,EAAI,WAAaK,EAAUL,EAAI,UAAU,EAAI,OAE7CA,EAAI,aAAeM,EAAKN,EAAI,YAAY,EAAI,MAC9C,EAAE,OAAOO,CAAS,EAEZC,EAAeC,EAAmB,CACtC,UAAWC,EAASN,CAAU,EAC9B,gBAAiBJ,EAAI,gBACvB,CAAC,EAEKW,EAAU,MAAMH,EAAa,WAAW,EACxCI,EAAWC,EAAQC,EAASd,EAAI,aAAc,CAAE,QAAS,EAAM,CAAC,CAAC,EAEjE,CAAE,eAAAe,EAAgB,OAAAC,CAAO,EAAI,MAAMC,EAAqB,CAAE,SAAAL,EAAU,aAAAJ,CAAa,CAAC,EAEpFU,EAAalB,EAAI,YAIrB,GAAI,CACF,IAAMmB,EAAa,MAAMP,EACtB,OAAO,EACP,KAAKI,EAAO,WAAW,EACvB,MAAMI,EAAGJ,EAAO,YAAY,QAASL,CAAO,CAAC,EAC7C,MAAM,CAAC,EACP,QAAQ,EAGR,KAAMU,GAASA,EAAK,KAAK,IAAM,EAAI,CAAC,EAEnCF,GAAY,aAAe,OAC7BD,EAAaC,EAAW,YAAc,GACtC,QAAQ,IAAI,6BAA8BD,CAAU,EAExD,MAAE,CAEF,CAEA,GAAM,CAAE,mBAAAI,EAAoB,iBAAAC,CAAiB,EAAI,MAAMC,EAAgB,CACrE,eAAAT,EACA,aAAAP,EACA,eAAgBR,EAAI,iBACpB,WAAAkB,EACA,cAAelB,EAAI,gBACnB,QAASA,EAAI,aACf,CAAC,EAEDuB,EAAiB,UAAU,EAE3B,IAAIE,EAAa,GACjBC,EAAc,CAACJ,EAAoBC,CAAgB,CAAC,EACjD,KACCI,EACE,CAAC,CAACC,EAAmB,CAAE,YAAaC,CAAyB,CAAC,IAC5DD,IAAsBC,CAC1B,EACAC,EAAM,CACR,EACC,UAAU,IAAM,CACfL,EAAa,GACb,QAAQ,IAAI,eAAe,CAC7B,CAAC,EAEH,GAAIzB,EAAI,kBAAoB,MAAQA,EAAI,kBAAoB,KAAM,CAChE,GAAM,CAAE,QAAS+B,CAAI,EAAI,KAAM,QAAO,KAAK,EACrC,CAAE,QAASC,CAAK,EAAI,KAAM,QAAO,WAAW,EAE5CC,EAAS,IAAIF,EAEf/B,EAAI,YACNiC,EAAO,IAAIC,EAAOlC,EAAI,UAAU,CAAC,EAGnCiC,EAAO,IAAID,EAAK,CAAC,EACjBC,EAAO,IACLE,EAAY,CACV,QAAS,IAAMV,CACjB,CAAC,CACH,EACAQ,EAAO,IAAIG,EAAW,CAAC,EAEvBH,EAAO,OAAO,CAAE,KAAMjC,EAAI,iBAAkB,KAAMA,EAAI,gBAAiB,CAAC,EACxE,QAAQ,IACN,2DAA2DA,EAAI,oBAAoBA,EAAI,kBACzF","names":["z","eq","createPublicClient","fallback","webSocket","http","isDefined","combineLatest","filter","first","drizzle","postgres","createStorageAdapter","createStoreSync","env","parseEnv","z","indexerEnvSchema","transports","webSocket","http","isDefined","publicClient","createPublicClient","fallback","chainId","database","drizzle","postgres","storageAdapter","tables","createStorageAdapter","startBlock","chainState","eq","rows","latestBlockNumber$","storedBlockLogs$","createStoreSync","isCaughtUp","combineLatest","filter","latestBlockNumber","lastBlockNumberProcessed","first","Koa","cors","server","sentry","healthcheck","helloWorld"]}
1
+ {"version":3,"sources":["../../src/bin/postgres-decoded-indexer.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { createPublicClient, fallback, webSocket, http, Transport } from \"viem\";\nimport { isDefined } from \"@latticexyz/common/utils\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { createStorageAdapter } from \"@latticexyz/store-sync/postgres-decoded\";\nimport { createStoreSync } from \"@latticexyz/store-sync\";\nimport { indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { sentry } from \"../koa-middleware/sentry\";\nimport { healthcheck } from \"../koa-middleware/healthcheck\";\nimport { helloWorld } from \"../koa-middleware/helloWorld\";\n\nconst env = parseEnv(\n z.intersection(\n indexerEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n HEALTHCHECK_HOST: z.string().optional(),\n HEALTHCHECK_PORT: z.coerce.number().optional(),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst transports: Transport[] = [\n // prefer WS when specified\n env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : undefined,\n // otherwise use or fallback to HTTP\n env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : undefined,\n].filter(isDefined);\n\nconst publicClient = createPublicClient({\n transport: fallback(transports),\n pollingInterval: env.POLLING_INTERVAL,\n});\n\nconst chainId = await publicClient.getChainId();\nconst database = drizzle(postgres(env.DATABASE_URL, { prepare: false }));\n\nconst { storageAdapter, tables } = await createStorageAdapter({ database, publicClient });\n\nlet startBlock = env.START_BLOCK;\n\n// Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n// TODO: query if the DB exists instead of try/catch\ntry {\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n\n if (chainState?.blockNumber != null) {\n startBlock = chainState.blockNumber + 1n;\n console.log(\"resuming from block number\", startBlock);\n }\n} catch (error) {\n // ignore errors for now\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await createStoreSync({\n storageAdapter,\n publicClient,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nstoredBlockLogs$.subscribe();\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nif (env.HEALTHCHECK_HOST != null || env.HEALTHCHECK_PORT != null) {\n const { default: Koa } = await import(\"koa\");\n const { default: cors } = await import(\"@koa/cors\");\n\n const server = new Koa();\n\n if (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n }\n\n server.use(cors());\n server.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n );\n server.use(helloWorld());\n\n server.listen({ host: env.HEALTHCHECK_HOST, port: env.HEALTHCHECK_PORT });\n console.log(\n `postgres indexer healthcheck server listening on http://${env.HEALTHCHECK_HOST}:${env.HEALTHCHECK_PORT}`,\n );\n}\n"],"mappings":";sKACA,MAAO,gBACP,OAAS,KAAAA,MAAS,MAClB,OAAS,MAAAC,MAAU,cACnB,OAAS,sBAAAC,EAAoB,YAAAC,EAAU,aAAAC,EAAW,QAAAC,MAAuB,OACzE,OAAS,aAAAC,MAAiB,2BAC1B,OAAS,iBAAAC,EAAe,UAAAC,EAAQ,SAAAC,MAAa,OAC7C,OAAS,WAAAC,MAAe,0BACxB,OAAOC,MAAc,WACrB,OAAS,wBAAAC,MAA4B,0CACrC,OAAS,mBAAAC,MAAuB,yBAMhC,IAAMC,EAAMC,EACVC,EAAE,aACAC,EACAD,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,iBAAkBA,EAAE,OAAO,EAAE,SAAS,EACtC,iBAAkBA,EAAE,OAAO,OAAO,EAAE,SAAS,EAC7C,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,CACH,CACF,EAEME,EAA0B,CAE9BJ,EAAI,WAAaK,EAAUL,EAAI,UAAU,EAAI,OAE7CA,EAAI,aAAeM,EAAKN,EAAI,YAAY,EAAI,MAC9C,EAAE,OAAOO,CAAS,EAEZC,EAAeC,EAAmB,CACtC,UAAWC,EAASN,CAAU,EAC9B,gBAAiBJ,EAAI,gBACvB,CAAC,EAEKW,EAAU,MAAMH,EAAa,WAAW,EACxCI,EAAWC,EAAQC,EAASd,EAAI,aAAc,CAAE,QAAS,EAAM,CAAC,CAAC,EAEjE,CAAE,eAAAe,EAAgB,OAAAC,CAAO,EAAI,MAAMC,EAAqB,CAAE,SAAAL,EAAU,aAAAJ,CAAa,CAAC,EAEpFU,EAAalB,EAAI,YAIrB,GAAI,CACF,IAAMmB,EAAa,MAAMP,EACtB,OAAO,EACP,KAAKI,EAAO,WAAW,EACvB,MAAMI,EAAGJ,EAAO,YAAY,QAASL,CAAO,CAAC,EAC7C,MAAM,CAAC,EACP,QAAQ,EAGR,KAAMU,GAASA,EAAK,KAAK,IAAM,EAAI,CAAC,EAEnCF,GAAY,aAAe,OAC7BD,EAAaC,EAAW,YAAc,GACtC,QAAQ,IAAI,6BAA8BD,CAAU,EAExD,MAAE,CAEF,CAEA,GAAM,CAAE,mBAAAI,EAAoB,iBAAAC,CAAiB,EAAI,MAAMC,EAAgB,CACrE,eAAAT,EACA,aAAAP,EACA,eAAgBR,EAAI,iBACpB,WAAAkB,EACA,cAAelB,EAAI,gBACnB,QAASA,EAAI,aACf,CAAC,EAEDuB,EAAiB,UAAU,EAE3B,IAAIE,EAAa,GACjBC,EAAc,CAACJ,EAAoBC,CAAgB,CAAC,EACjD,KACCI,EACE,CAAC,CAACC,EAAmB,CAAE,YAAaC,CAAyB,CAAC,IAC5DD,IAAsBC,CAC1B,EACAC,EAAM,CACR,EACC,UAAU,IAAM,CACfL,EAAa,GACb,QAAQ,IAAI,eAAe,CAC7B,CAAC,EAEH,GAAIzB,EAAI,kBAAoB,MAAQA,EAAI,kBAAoB,KAAM,CAChE,GAAM,CAAE,QAAS+B,CAAI,EAAI,KAAM,QAAO,KAAK,EACrC,CAAE,QAASC,CAAK,EAAI,KAAM,QAAO,WAAW,EAE5CC,EAAS,IAAIF,EAEf/B,EAAI,YACNiC,EAAO,IAAIC,EAAOlC,EAAI,UAAU,CAAC,EAGnCiC,EAAO,IAAID,EAAK,CAAC,EACjBC,EAAO,IACLE,EAAY,CACV,QAAS,IAAMV,CACjB,CAAC,CACH,EACAQ,EAAO,IAAIG,EAAW,CAAC,EAEvBH,EAAO,OAAO,CAAE,KAAMjC,EAAI,iBAAkB,KAAMA,EAAI,gBAAiB,CAAC,EACxE,QAAQ,IACN,2DAA2DA,EAAI,oBAAoBA,EAAI,kBACzF","names":["z","eq","createPublicClient","fallback","webSocket","http","isDefined","combineLatest","filter","first","drizzle","postgres","createStorageAdapter","createStoreSync","env","parseEnv","z","indexerEnvSchema","transports","webSocket","http","isDefined","publicClient","createPublicClient","fallback","chainId","database","drizzle","postgres","storageAdapter","tables","createStorageAdapter","startBlock","chainState","eq","rows","latestBlockNumber$","storedBlockLogs$","createStoreSync","isCaughtUp","combineLatest","filter","latestBlockNumber","lastBlockNumberProcessed","first","Koa","cors","server","sentry","healthcheck","helloWorld"]}
@@ -1,5 +1,5 @@
1
1
  #!/usr/bin/env node
2
- import{a as b,b as T,c as x}from"../chunk-ZS3IQEZ4.js";import{a as w}from"../chunk-VCBWGHIO.js";import{a as $,c as L}from"../chunk-7B4KE2DO.js";import{a as I}from"../chunk-KDDXIBYJ.js";import{a as D}from"../chunk-OUZYPRYF.js";import{a as _}from"../chunk-GQCEMYLA.js";import"dotenv/config";import{z as S}from"zod";import ae from"koa";import se from"@koa/cors";import{createKoaMiddleware as ie}from"trpc-koa-adapter";import{createAppRouter as de}from"@latticexyz/store-sync/trpc-indexer";import{drizzle as ce}from"drizzle-orm/postgres-js";import me from"postgres";import{getAddress as E}from"viem";import{isTableRegistrationLog as j,logToTable as Y,schemasTable as U}from"@latticexyz/store-sync";import{decodeKey as V,decodeValueArgs as q}from"@latticexyz/protocol-parser/internal";import{tables as d}from"@latticexyz/store-sync/postgres";import{and as F,asc as H,eq as u,or as W}from"drizzle-orm";import{bigIntMax as J}from"@latticexyz/common/utils";import{decodeDynamicField as z}from"@latticexyz/protocol-parser/internal";function h(e){return{address:e.address,eventName:"Store_SetRecord",args:{tableId:e.tableId,keyTuple:z("bytes32[]",e.keyBytes),staticData:e.staticData??"0x",encodedLengths:e.encodedLengths??"0x",dynamicData:e.dynamicData??"0x"}}}import{createBenchmark as K}from"@latticexyz/common";async function k(e,{chainId:t,address:r,filters:a=[]}){let o=K("drizzleGetLogs"),n=a.length?a.map(s=>F(r!=null?u(d.recordsTable.address,r):void 0,u(d.recordsTable.tableId,s.tableId),s.key0!=null?u(d.recordsTable.key0,s.key0):void 0,s.key1!=null?u(d.recordsTable.key1,s.key1):void 0)):r!=null?[u(d.recordsTable.address,r)]:[];o("parse config");let f=(await e.select().from(d.configTable).where(u(d.configTable.chainId,t)).limit(1).execute().then(s=>s.find(()=>!0)))?.blockNumber??0n;o("query chainState");let l=await e.select().from(d.recordsTable).where(W(...n)).orderBy(H(d.recordsTable.blockNumber));o("query records");let i=l.reduce((s,y)=>J(s,y.blockNumber??0n),f);o("find block number");let R=l.filter(s=>!s.isDeleted).map(h);return o("map records to logs"),{blockNumber:i,logs:R}}import{groupBy as G}from"@latticexyz/common/utils";async function O(e){return{async getLogs(r){return k(e,r)},async findAll(r){let a=r.filters??[],{blockNumber:o,logs:n}=await k(e,{...r,filters:a.length>0?[...a,{tableId:U.tableId}]:[]}),p=n.filter(j).map(Y),f=G(n,i=>`${E(i.address)}:${i.args.tableId}`),l=p.map(i=>{let s=(f.get(`${E(i.address)}:${i.tableId}`)??[]).map(y=>{let A=V(i.keySchema,y.args.keyTuple),N=q(i.valueSchema,y.args);return{key:A,value:N,fields:{...A,...N}}});return{...i,records:s}});return b("findAll: decoded %d logs across %d tables",n.length,p.length),{blockNumber:o,tables:l}}}}import ee from"@koa/router";import re from"koa-compose";import{input as oe}from"@latticexyz/store-sync/indexer-client";import{schemasTable as te}from"@latticexyz/store-sync";import{isNotNull as B}from"@latticexyz/common/utils";import{hexToBytes as g}from"viem";import{transformSchemaName as X}from"@latticexyz/store-sync/postgres";var P=X("mud");function v(e,t){return e`(${t.reduce((r,a)=>e`${r} AND ${a}`)})`}function Z(e,t){return e`(${t.reduce((r,a)=>e`${r} OR ${a}`)})`}function C(e,t){let r=t.filters.length?t.filters.map(o=>v(e,[t.address!=null?e`address = ${g(t.address)}`:null,e`table_id = ${g(o.tableId)}`,o.key0!=null?e`key0 = ${g(o.key0)}`:null,o.key1!=null?e`key1 = ${g(o.key1)}`:null].filter(B))):t.address!=null?[e`address = ${g(t.address)}`]:[],a=e`WHERE ${v(e,[e`is_deleted != true`,r.length?Z(e,r):null].filter(B))}`;return e`
2
+ import{a as x}from"../chunk-R7HX5BT2.js";import{a as b,b as T,c as w}from"../chunk-32XITDZW.js";import{a as $,c as L}from"../chunk-M2HHWHGP.js";import{a as I}from"../chunk-KDDXIBYJ.js";import{a as D}from"../chunk-OUZYPRYF.js";import{a as _}from"../chunk-GQCEMYLA.js";import"dotenv/config";import{z as S}from"zod";import ae from"koa";import se from"@koa/cors";import{createKoaMiddleware as ie}from"trpc-koa-adapter";import{createAppRouter as de}from"@latticexyz/store-sync/trpc-indexer";import{drizzle as ce}from"drizzle-orm/postgres-js";import me from"postgres";import{getAddress as E}from"viem";import{isTableRegistrationLog as j,logToTable as Y,schemasTable as U}from"@latticexyz/store-sync";import{decodeKey as V,decodeValueArgs as q}from"@latticexyz/protocol-parser/internal";import{tables as d}from"@latticexyz/store-sync/postgres";import{and as F,asc as H,eq as u,or as W}from"drizzle-orm";import{bigIntMax as J}from"@latticexyz/common/utils";import{decodeDynamicField as z}from"@latticexyz/protocol-parser/internal";function h(e){return{address:e.address,eventName:"Store_SetRecord",args:{tableId:e.tableId,keyTuple:z("bytes32[]",e.keyBytes),staticData:e.staticData??"0x",encodedLengths:e.encodedLengths??"0x",dynamicData:e.dynamicData??"0x"}}}import{createBenchmark as K}from"@latticexyz/common";async function k(e,{chainId:t,address:r,filters:a=[]}){let o=K("drizzleGetLogs"),n=a.length?a.map(s=>F(r!=null?u(d.recordsTable.address,r):void 0,u(d.recordsTable.tableId,s.tableId),s.key0!=null?u(d.recordsTable.key0,s.key0):void 0,s.key1!=null?u(d.recordsTable.key1,s.key1):void 0)):r!=null?[u(d.recordsTable.address,r)]:[];o("parse config");let f=(await e.select().from(d.configTable).where(u(d.configTable.chainId,t)).limit(1).execute().then(s=>s.find(()=>!0)))?.blockNumber??0n;o("query chainState");let l=await e.select().from(d.recordsTable).where(W(...n)).orderBy(H(d.recordsTable.blockNumber));o("query records");let i=l.reduce((s,y)=>J(s,y.blockNumber??0n),f);o("find block number");let R=l.filter(s=>!s.isDeleted).map(h);return o("map records to logs"),{blockNumber:i,logs:R}}import{groupBy as G}from"@latticexyz/common/utils";async function O(e){return{async getLogs(r){return k(e,r)},async findAll(r){let a=r.filters??[],{blockNumber:o,logs:n}=await k(e,{...r,filters:a.length>0?[...a,{tableId:U.tableId}]:[]}),p=n.filter(j).map(Y),f=G(n,i=>`${E(i.address)}:${i.args.tableId}`),l=p.map(i=>{let s=(f.get(`${E(i.address)}:${i.tableId}`)??[]).map(y=>{let A=V(i.keySchema,y.args.keyTuple),N=q(i.valueSchema,y.args);return{key:A,value:N,fields:{...A,...N}}});return{...i,records:s}});return b("findAll: decoded %d logs across %d tables",n.length,p.length),{blockNumber:o,tables:l}}}}import ee from"@koa/router";import re from"koa-compose";import{input as oe}from"@latticexyz/store-sync/indexer-client";import{schemasTable as te}from"@latticexyz/store-sync";import{isNotNull as B}from"@latticexyz/common/utils";import{hexToBytes as g}from"viem";import{transformSchemaName as X}from"@latticexyz/store-sync/postgres";var P=X("mud");function v(e,t){return e`(${t.reduce((r,a)=>e`${r} AND ${a}`)})`}function Z(e,t){return e`(${t.reduce((r,a)=>e`${r} OR ${a}`)})`}function C(e,t){let r=t.filters.length?t.filters.map(o=>v(e,[t.address!=null?e`address = ${g(t.address)}`:null,e`table_id = ${g(o.tableId)}`,o.key0!=null?e`key0 = ${g(o.key0)}`:null,o.key1!=null?e`key1 = ${g(o.key1)}`:null].filter(B))):t.address!=null?[e`address = ${g(t.address)}`]:[],a=e`WHERE ${v(e,[e`is_deleted != true`,r.length?Z(e,r):null].filter(B))}`;return e`
3
3
  WITH
4
4
  config AS (
5
5
  SELECT
@@ -1 +1 @@
1
- {"version":3,"sources":["../../bin/postgres-frontend.ts","../../src/postgres/deprecated/createQueryAdapter.ts","../../src/postgres/deprecated/getLogs.ts","../../src/postgres/recordToLog.ts","../../src/postgres/apiRoutes.ts","../../src/postgres/queryLogs.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport Koa from \"koa\";\nimport cors from \"@koa/cors\";\nimport { createKoaMiddleware } from \"trpc-koa-adapter\";\nimport { createAppRouter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { frontendEnvSchema, parseEnv } from \"./parseEnv\";\nimport { createQueryAdapter } from \"../src/postgres/deprecated/createQueryAdapter\";\nimport { apiRoutes } from \"../src/postgres/apiRoutes\";\nimport { sentry } from \"../src/koa-middleware/sentry\";\nimport { healthcheck } from \"../src/koa-middleware/healthcheck\";\nimport { helloWorld } from \"../src/koa-middleware/helloWorld\";\nimport { metrics } from \"../src/koa-middleware/metrics\";\n\nconst env = parseEnv(\n z.intersection(\n frontendEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst database = postgres(env.DATABASE_URL, { prepare: false });\n\nconst server = new Koa();\n\nif (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n}\n\nserver.use(cors());\nserver.use(healthcheck());\nserver.use(\n metrics({\n isHealthy: () => true,\n isReady: () => true,\n }),\n);\nserver.use(helloWorld());\nserver.use(apiRoutes(database));\n\nserver.use(\n createKoaMiddleware({\n prefix: \"/trpc\",\n router: createAppRouter(),\n createContext: async () => ({\n queryAdapter: await createQueryAdapter(drizzle(database)),\n }),\n }),\n);\n\nserver.listen({ host: env.HOST, port: env.PORT });\nconsole.log(`postgres indexer frontend listening on http://${env.HOST}:${env.PORT}`);\n","import { getAddress } from \"viem\";\nimport { PgDatabase } from \"drizzle-orm/pg-core\";\nimport { TableWithRecords, isTableRegistrationLog, logToTable, schemasTable } from \"@latticexyz/store-sync\";\nimport { KeySchema, decodeKey, decodeValueArgs } from \"@latticexyz/protocol-parser/internal\";\nimport { QueryAdapter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { debug } from \"../../debug\";\nimport { getLogs } from \"./getLogs\";\nimport { groupBy } from \"@latticexyz/common/utils\";\n\n/**\n * Creates a query adapter for the tRPC server/client to query data from Postgres.\n *\n * @param {PgDatabase<any>} database Postgres database object from Drizzle\n * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.\n * @deprecated\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function createQueryAdapter(database: PgDatabase<any>): Promise<QueryAdapter> {\n const adapter: QueryAdapter = {\n async getLogs(opts) {\n return getLogs(database, opts);\n },\n async findAll(opts) {\n const filters = opts.filters ?? [];\n const { blockNumber, logs } = await getLogs(database, {\n ...opts,\n // make sure we're always retrieving `store.Tables` table, so we can decode table values\n filters: filters.length > 0 ? [...filters, { tableId: schemasTable.tableId }] : [],\n });\n\n const tables = logs.filter(isTableRegistrationLog).map(logToTable);\n\n const logsByTable = groupBy(logs, (log) => `${getAddress(log.address)}:${log.args.tableId}`);\n\n const tablesWithRecords: readonly TableWithRecords[] = tables.map((table) => {\n const tableLogs = logsByTable.get(`${getAddress(table.address)}:${table.tableId}`) ?? [];\n const records = tableLogs.map((log) => {\n const key = decodeKey(table.keySchema as KeySchema, log.args.keyTuple);\n const value = decodeValueArgs(table.valueSchema, log.args);\n return { key, value, fields: { ...key, ...value } };\n });\n\n return {\n ...table,\n records,\n };\n });\n\n debug(\"findAll: decoded %d logs across %d tables\", logs.length, tables.length);\n\n return {\n blockNumber,\n tables: tablesWithRecords,\n };\n },\n };\n return adapter;\n}\n","import { PgDatabase } from \"drizzle-orm/pg-core\";\nimport { Hex } from \"viem\";\nimport { StorageAdapterLog, SyncFilter } from \"@latticexyz/store-sync\";\nimport { tables } from \"@latticexyz/store-sync/postgres\";\nimport { and, asc, eq, or } from \"drizzle-orm\";\nimport { bigIntMax } from \"@latticexyz/common/utils\";\nimport { recordToLog } from \"../recordToLog\";\nimport { createBenchmark } from \"@latticexyz/common\";\n\n/**\n * @deprecated\n */\nexport async function getLogs(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n database: PgDatabase<any>,\n {\n chainId,\n address,\n filters = [],\n }: {\n readonly chainId: number;\n readonly address?: Hex;\n readonly filters?: readonly SyncFilter[];\n },\n): Promise<{ blockNumber: bigint; logs: (StorageAdapterLog & { eventName: \"Store_SetRecord\" })[] }> {\n const benchmark = createBenchmark(\"drizzleGetLogs\");\n\n const conditions = filters.length\n ? filters.map((filter) =>\n and(\n address != null ? eq(tables.recordsTable.address, address) : undefined,\n eq(tables.recordsTable.tableId, filter.tableId),\n filter.key0 != null ? eq(tables.recordsTable.key0, filter.key0) : undefined,\n filter.key1 != null ? eq(tables.recordsTable.key1, filter.key1) : undefined,\n ),\n )\n : address != null\n ? [eq(tables.recordsTable.address, address)]\n : [];\n benchmark(\"parse config\");\n\n // Query for the block number that the indexer (i.e. chain) is at, in case the\n // indexer is further along in the chain than a given store/table's last updated\n // block number. We'll then take the highest block number between the indexer's\n // chain state and all the records in the query (in case the records updated\n // between these queries). Using just the highest block number from the queries\n // could potentially signal to the client an older-than-necessary block number,\n // for stores/tables that haven't seen recent activity.\n // TODO: move the block number query into the records query for atomicity so we don't have to merge them here\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n const indexerBlockNumber = chainState?.blockNumber ?? 0n;\n benchmark(\"query chainState\");\n\n const records = await database\n .select()\n .from(tables.recordsTable)\n .where(or(...conditions))\n .orderBy(\n asc(tables.recordsTable.blockNumber),\n // TODO: add logIndex (https://github.com/latticexyz/mud/issues/1979)\n );\n benchmark(\"query records\");\n\n const blockNumber = records.reduce((max, record) => bigIntMax(max, record.blockNumber ?? 0n), indexerBlockNumber);\n benchmark(\"find block number\");\n\n const logs = records\n // TODO: add this to the query, assuming we can optimize with an index\n .filter((record) => !record.isDeleted)\n .map(recordToLog);\n benchmark(\"map records to logs\");\n\n return { blockNumber, logs };\n}\n","import { StorageAdapterLog } from \"@latticexyz/store-sync\";\nimport { decodeDynamicField } from \"@latticexyz/protocol-parser/internal\";\nimport { RecordData } from \"./common\";\n\nexport function recordToLog(\n record: Omit<RecordData, \"recordBlockNumber\">,\n): StorageAdapterLog & { eventName: \"Store_SetRecord\" } {\n return {\n address: record.address,\n eventName: \"Store_SetRecord\",\n args: {\n tableId: record.tableId,\n keyTuple: decodeDynamicField(\"bytes32[]\", record.keyBytes),\n staticData: record.staticData ?? \"0x\",\n encodedLengths: record.encodedLengths ?? \"0x\",\n dynamicData: record.dynamicData ?? \"0x\",\n },\n } as const;\n}\n","import { Sql } from \"postgres\";\nimport { Middleware } from \"koa\";\nimport Router from \"@koa/router\";\nimport compose from \"koa-compose\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { schemasTable } from \"@latticexyz/store-sync\";\nimport { queryLogs } from \"./queryLogs\";\nimport { recordToLog } from \"./recordToLog\";\nimport { debug, error } from \"../debug\";\nimport { createBenchmark } from \"@latticexyz/common\";\nimport { compress } from \"../koa-middleware/compress\";\n\nexport function apiRoutes(database: Sql): Middleware {\n const router = new Router();\n\n router.get(\"/api/logs\", compress(), async (ctx) => {\n const benchmark = createBenchmark(\"postgres:logs\");\n let options: ReturnType<typeof input.parse>;\n\n try {\n options = input.parse(typeof ctx.query.input === \"string\" ? JSON.parse(ctx.query.input) : {});\n } catch (e) {\n ctx.status = 400;\n ctx.set(\"Content-Type\", \"application/json\");\n ctx.body = JSON.stringify(e);\n debug(e);\n return;\n }\n\n try {\n options.filters = options.filters.length > 0 ? [...options.filters, { tableId: schemasTable.tableId }] : [];\n const records = await queryLogs(database, options ?? {}).execute();\n benchmark(\"query records\");\n const logs = records.map(recordToLog);\n benchmark(\"map records to logs\");\n\n // Ideally we would immediately return an error if the request is for a Store that the indexer\n // is not configured to index. Since we don't have easy access to this information here,\n // we return an error if there are no logs found for a given Store, since that would never\n // be the case for a Store that is being indexed (since there would at least be records for the\n // Tables table with tables created during Store initialization).\n if (records.length === 0) {\n ctx.status = 404;\n ctx.body = \"no logs found\";\n error(\n `no logs found for chainId ${options.chainId}, address ${options.address}, filters ${JSON.stringify(\n options.filters,\n )}`,\n );\n return;\n }\n\n const blockNumber = records[0].chainBlockNumber;\n ctx.status = 200;\n\n // max age is set to several multiples of the uncached response time (currently ~10s, but using 60s for wiggle room) to ensure only ~one origin request at a time\n // and stale-while-revalidate below means that the cache is refreshed under the hood while still responding fast (cached)\n const maxAgeSeconds = 60 * 5;\n // we set stale-while-revalidate to the time elapsed by the number of blocks we can fetch from the RPC in the same amount of time as an uncached response\n // meaning it would take ~the same about of time to get an uncached response from the origin as it would to catch up from the currently cached response\n // if an uncached response takes ~10 seconds, we have ~10s to catch up, so let's say we can do enough RPC calls to fetch 4000 blocks\n // with a block per 2 seconds, that means we can serve a stale/cached response for 8000 seconds before we should require the response be returned by the origin\n const staleWhileRevalidateSeconds = 4000 * 2;\n\n ctx.set(\n \"Cache-Control\",\n `public, max-age=${maxAgeSeconds}, stale-while-revalidate=${staleWhileRevalidateSeconds}`,\n );\n\n ctx.set(\"Content-Type\", \"application/json\");\n ctx.body = JSON.stringify({ blockNumber, logs });\n } catch (e) {\n ctx.status = 500;\n ctx.set(\"Content-Type\", \"application/json\");\n ctx.body = JSON.stringify(e);\n error(e);\n }\n });\n\n return compose([router.routes(), router.allowedMethods()]) as Middleware;\n}\n","import { isNotNull } from \"@latticexyz/common/utils\";\nimport { PendingQuery, Row, Sql } from \"postgres\";\nimport { hexToBytes } from \"viem\";\nimport { z } from \"zod\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { transformSchemaName } from \"@latticexyz/store-sync/postgres\";\nimport { Record } from \"./common\";\n\nconst schemaName = transformSchemaName(\"mud\");\n\nfunction and(sql: Sql, conditions: PendingQuery<Row[]>[]): PendingQuery<Row[]> {\n return sql`(${conditions.reduce((query, condition) => sql`${query} AND ${condition}`)})`;\n}\n\nfunction or(sql: Sql, conditions: PendingQuery<Row[]>[]): PendingQuery<Row[]> {\n return sql`(${conditions.reduce((query, condition) => sql`${query} OR ${condition}`)})`;\n}\n\nexport function queryLogs(sql: Sql, opts: z.infer<typeof input>): PendingQuery<Record[]> {\n const conditions = opts.filters.length\n ? opts.filters.map((filter) =>\n and(\n sql,\n [\n opts.address != null ? sql`address = ${hexToBytes(opts.address)}` : null,\n sql`table_id = ${hexToBytes(filter.tableId)}`,\n filter.key0 != null ? sql`key0 = ${hexToBytes(filter.key0)}` : null,\n filter.key1 != null ? sql`key1 = ${hexToBytes(filter.key1)}` : null,\n ].filter(isNotNull),\n ),\n )\n : opts.address != null\n ? [sql`address = ${hexToBytes(opts.address)}`]\n : [];\n\n const where = sql`WHERE ${and(\n sql,\n [sql`is_deleted != true`, conditions.length ? or(sql, conditions) : null].filter(isNotNull),\n )}`;\n\n // TODO: implement bytea <> hex columns via custom types: https://github.com/porsager/postgres#custom-types\n return sql<Record[]>`\n WITH\n config AS (\n SELECT\n version AS \"indexerVersion\",\n chain_id AS \"chainId\",\n block_number AS \"chainBlockNumber\"\n FROM ${sql(`${schemaName}.config`)}\n LIMIT 1\n ),\n records AS (\n SELECT\n '0x' || encode(address, 'hex') AS address,\n '0x' || encode(table_id, 'hex') AS \"tableId\",\n '0x' || encode(key_bytes, 'hex') AS \"keyBytes\",\n '0x' || encode(static_data, 'hex') AS \"staticData\",\n '0x' || encode(encoded_lengths, 'hex') AS \"encodedLengths\",\n '0x' || encode(dynamic_data, 'hex') AS \"dynamicData\",\n block_number AS \"recordBlockNumber\",\n log_index AS \"logIndex\"\n FROM ${sql(`${schemaName}.records`)}\n ${where}\n ORDER BY block_number, log_index ASC\n )\n SELECT\n (SELECT COUNT(*) FROM records) AS \"totalRows\",\n *\n FROM config, records\n `;\n}\n"],"mappings":";2QACA,MAAO,gBACP,OAAS,KAAAA,MAAS,MAClB,OAAOC,OAAS,MAChB,OAAOC,OAAU,YACjB,OAAS,uBAAAC,OAA2B,mBACpC,OAAS,mBAAAC,OAAuB,sCAChC,OAAS,WAAAC,OAAe,0BACxB,OAAOC,OAAc,WCRrB,OAAS,cAAAC,MAAkB,OAE3B,OAA2B,0BAAAC,EAAwB,cAAAC,EAAY,gBAAAC,MAAoB,yBACnF,OAAoB,aAAAC,EAAW,mBAAAC,MAAuB,uCCAtD,OAAS,UAAAC,MAAc,kCACvB,OAAS,OAAAC,EAAK,OAAAC,EAAK,MAAAC,EAAI,MAAAC,MAAU,cACjC,OAAS,aAAAC,MAAiB,2BCJ1B,OAAS,sBAAAC,MAA0B,uCAG5B,SAASC,EACdC,EACsD,CACtD,MAAO,CACL,QAASA,EAAO,QAChB,UAAW,kBACX,KAAM,CACJ,QAASA,EAAO,QAChB,SAAUF,EAAmB,YAAaE,EAAO,QAAQ,EACzD,WAAYA,EAAO,YAAc,KACjC,eAAgBA,EAAO,gBAAkB,KACzC,YAAaA,EAAO,aAAe,IACrC,CACF,CACF,CDXA,OAAS,mBAAAC,MAAuB,qBAKhC,eAAsBC,EAEpBC,EACA,CACE,QAAAC,EACA,QAAAC,EACA,QAAAC,EAAU,CAAC,CACb,EAKkG,CAClG,IAAMC,EAAYN,EAAgB,gBAAgB,EAE5CO,EAAaF,EAAQ,OACvBA,EAAQ,IAAKG,GACXC,EACEL,GAAW,KAAOM,EAAGC,EAAO,aAAa,QAASP,CAAO,EAAI,OAC7DM,EAAGC,EAAO,aAAa,QAASH,EAAO,OAAO,EAC9CA,EAAO,MAAQ,KAAOE,EAAGC,EAAO,aAAa,KAAMH,EAAO,IAAI,EAAI,OAClEA,EAAO,MAAQ,KAAOE,EAAGC,EAAO,aAAa,KAAMH,EAAO,IAAI,EAAI,MACpE,CACF,EACAJ,GAAW,KACT,CAACM,EAAGC,EAAO,aAAa,QAASP,CAAO,CAAC,EACzC,CAAC,EACPE,EAAU,cAAc,EAmBxB,IAAMM,GATa,MAAMV,EACtB,OAAO,EACP,KAAKS,EAAO,WAAW,EACvB,MAAMD,EAAGC,EAAO,YAAY,QAASR,CAAO,CAAC,EAC7C,MAAM,CAAC,EACP,QAAQ,EAGR,KAAMU,GAASA,EAAK,KAAK,IAAM,EAAI,CAAC,IACA,aAAe,GACtDP,EAAU,kBAAkB,EAE5B,IAAMQ,EAAU,MAAMZ,EACnB,OAAO,EACP,KAAKS,EAAO,YAAY,EACxB,MAAMI,EAAG,GAAGR,CAAU,CAAC,EACvB,QACCS,EAAIL,EAAO,aAAa,WAAW,CAErC,EACFL,EAAU,eAAe,EAEzB,IAAMW,EAAcH,EAAQ,OAAO,CAACI,EAAKC,IAAWC,EAAUF,EAAKC,EAAO,aAAe,EAAE,EAAGP,CAAkB,EAChHN,EAAU,mBAAmB,EAE7B,IAAMe,EAAOP,EAEV,OAAQK,GAAW,CAACA,EAAO,SAAS,EACpC,IAAIG,CAAW,EAClB,OAAAhB,EAAU,qBAAqB,EAExB,CAAE,YAAAW,EAAa,KAAAI,CAAK,CAC7B,CD1EA,OAAS,WAAAE,MAAe,2BAUxB,eAAsBC,EAAmBC,EAAkD,CAuCzF,MAtC8B,CAC5B,MAAM,QAAQC,EAAM,CAClB,OAAOC,EAAQF,EAAUC,CAAI,CAC/B,EACA,MAAM,QAAQA,EAAM,CAClB,IAAME,EAAUF,EAAK,SAAW,CAAC,EAC3B,CAAE,YAAAG,EAAa,KAAAC,CAAK,EAAI,MAAMH,EAAQF,EAAU,CACpD,GAAGC,EAEH,QAASE,EAAQ,OAAS,EAAI,CAAC,GAAGA,EAAS,CAAE,QAASG,EAAa,OAAQ,CAAC,EAAI,CAAC,CACnF,CAAC,EAEKC,EAASF,EAAK,OAAOG,CAAsB,EAAE,IAAIC,CAAU,EAE3DC,EAAcZ,EAAQO,EAAOM,GAAQ,GAAGC,EAAWD,EAAI,OAAO,KAAKA,EAAI,KAAK,SAAS,EAErFE,EAAiDN,EAAO,IAAKO,GAAU,CAE3E,IAAMC,GADYL,EAAY,IAAI,GAAGE,EAAWE,EAAM,OAAO,KAAKA,EAAM,SAAS,GAAK,CAAC,GAC7D,IAAKH,GAAQ,CACrC,IAAMK,EAAMC,EAAUH,EAAM,UAAwBH,EAAI,KAAK,QAAQ,EAC/DO,EAAQC,EAAgBL,EAAM,YAAaH,EAAI,IAAI,EACzD,MAAO,CAAE,IAAAK,EAAK,MAAAE,EAAO,OAAQ,CAAE,GAAGF,EAAK,GAAGE,CAAM,CAAE,CACpD,CAAC,EAED,MAAO,CACL,GAAGJ,EACH,QAAAC,CACF,CACF,CAAC,EAED,OAAAK,EAAM,4CAA6Cf,EAAK,OAAQE,EAAO,MAAM,EAEtE,CACL,YAAAH,EACA,OAAQS,CACV,CACF,CACF,CAEF,CGvDA,OAAOQ,OAAY,cACnB,OAAOC,OAAa,cACpB,OAAS,SAAAC,OAAa,wCACtB,OAAS,gBAAAC,OAAoB,yBCL7B,OAAS,aAAAC,MAAiB,2BAE1B,OAAS,cAAAC,MAAkB,OAG3B,OAAS,uBAAAC,MAA2B,kCAGpC,IAAMC,EAAaD,EAAoB,KAAK,EAE5C,SAASE,EAAIC,EAAUC,EAAwD,CAC7E,OAAOD,KAAOC,EAAW,OAAO,CAACC,EAAOC,IAAcH,IAAME,SAAaC,GAAW,IACtF,CAEA,SAASC,EAAGJ,EAAUC,EAAwD,CAC5E,OAAOD,KAAOC,EAAW,OAAO,CAACC,EAAOC,IAAcH,IAAME,QAAYC,GAAW,IACrF,CAEO,SAASE,EAAUL,EAAUM,EAAqD,CACvF,IAAML,EAAaK,EAAK,QAAQ,OAC5BA,EAAK,QAAQ,IAAKC,GAChBR,EACEC,EACA,CACEM,EAAK,SAAW,KAAON,cAAgBJ,EAAWU,EAAK,OAAO,IAAM,KACpEN,eAAiBJ,EAAWW,EAAO,OAAO,IAC1CA,EAAO,MAAQ,KAAOP,WAAaJ,EAAWW,EAAO,IAAI,IAAM,KAC/DA,EAAO,MAAQ,KAAOP,WAAaJ,EAAWW,EAAO,IAAI,IAAM,IACjE,EAAE,OAAOZ,CAAS,CACpB,CACF,EACAW,EAAK,SAAW,KACd,CAACN,cAAgBJ,EAAWU,EAAK,OAAO,GAAG,EAC3C,CAAC,EAEDE,EAAQR,UAAYD,EACxBC,EACA,CAACA,sBAAyBC,EAAW,OAASG,EAAGJ,EAAKC,CAAU,EAAI,IAAI,EAAE,OAAON,CAAS,CAC5F,IAGA,OAAOK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,eAOMA,EAAI,GAAGF,UAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,eAa1BE,EAAI,GAAGF,WAAoB;AAAA,UAChCU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAQV,CD7DA,OAAS,mBAAAC,OAAuB,qBAGzB,SAASC,EAAUC,EAA2B,CACnD,IAAMC,EAAS,IAAIC,GAEnB,OAAAD,EAAO,IAAI,YAAaE,EAAS,EAAG,MAAOC,GAAQ,CACjD,IAAMC,EAAYC,GAAgB,eAAe,EAC7CC,EAEJ,GAAI,CACFA,EAAUC,GAAM,MAAM,OAAOJ,EAAI,MAAM,OAAU,SAAW,KAAK,MAAMA,EAAI,MAAM,KAAK,EAAI,CAAC,CAAC,CAC9F,OAASK,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,IAAI,eAAgB,kBAAkB,EAC1CA,EAAI,KAAO,KAAK,UAAUK,CAAC,EAC3BC,EAAMD,CAAC,EACP,MACF,CAEA,GAAI,CACFF,EAAQ,QAAUA,EAAQ,QAAQ,OAAS,EAAI,CAAC,GAAGA,EAAQ,QAAS,CAAE,QAASI,GAAa,OAAQ,CAAC,EAAI,CAAC,EAC1G,IAAMC,EAAU,MAAMC,EAAUb,EAAUO,GAAW,CAAC,CAAC,EAAE,QAAQ,EACjEF,EAAU,eAAe,EACzB,IAAMS,EAAOF,EAAQ,IAAIG,CAAW,EAQpC,GAPAV,EAAU,qBAAqB,EAO3BO,EAAQ,SAAW,EAAG,CACxBR,EAAI,OAAS,IACbA,EAAI,KAAO,gBACXY,EACE,6BAA6BT,EAAQ,oBAAoBA,EAAQ,oBAAoB,KAAK,UACxFA,EAAQ,OACV,GACF,EACA,OAGF,IAAMU,EAAcL,EAAQ,CAAC,EAAE,iBAC/BR,EAAI,OAAS,IAIb,IAAMc,EAAgB,GAAK,EAKrBC,EAA8B,IAAO,EAE3Cf,EAAI,IACF,gBACA,mBAAmBc,6BAAyCC,GAC9D,EAEAf,EAAI,IAAI,eAAgB,kBAAkB,EAC1CA,EAAI,KAAO,KAAK,UAAU,CAAE,YAAAa,EAAa,KAAAH,CAAK,CAAC,CACjD,OAASL,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,IAAI,eAAgB,kBAAkB,EAC1CA,EAAI,KAAO,KAAK,UAAUK,CAAC,EAC3BO,EAAMP,CAAC,CACT,CACF,CAAC,EAEMW,GAAQ,CAACnB,EAAO,OAAO,EAAGA,EAAO,eAAe,CAAC,CAAC,CAC3D,CJ/DA,IAAMoB,EAAMC,EACVC,EAAE,aACAC,EACAD,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,CACH,CACF,EAEME,EAAWC,GAASL,EAAI,aAAc,CAAE,QAAS,EAAM,CAAC,EAExDM,EAAS,IAAIC,GAEfP,EAAI,YACNM,EAAO,IAAIE,EAAOR,EAAI,UAAU,CAAC,EAGnCM,EAAO,IAAIG,GAAK,CAAC,EACjBH,EAAO,IAAII,EAAY,CAAC,EACxBJ,EAAO,IACLK,EAAQ,CACN,UAAW,IAAM,GACjB,QAAS,IAAM,EACjB,CAAC,CACH,EACAL,EAAO,IAAIM,EAAW,CAAC,EACvBN,EAAO,IAAIO,EAAUT,CAAQ,CAAC,EAE9BE,EAAO,IACLQ,GAAoB,CAClB,OAAQ,QACR,OAAQC,GAAgB,EACxB,cAAe,UAAa,CAC1B,aAAc,MAAMC,EAAmBC,GAAQb,CAAQ,CAAC,CAC1D,EACF,CAAC,CACH,EAEAE,EAAO,OAAO,CAAE,KAAMN,EAAI,KAAM,KAAMA,EAAI,IAAK,CAAC,EAChD,QAAQ,IAAI,iDAAiDA,EAAI,QAAQA,EAAI,MAAM","names":["z","Koa","cors","createKoaMiddleware","createAppRouter","drizzle","postgres","getAddress","isTableRegistrationLog","logToTable","schemasTable","decodeKey","decodeValueArgs","tables","and","asc","eq","or","bigIntMax","decodeDynamicField","recordToLog","record","createBenchmark","getLogs","database","chainId","address","filters","benchmark","conditions","filter","and","eq","tables","indexerBlockNumber","rows","records","or","asc","blockNumber","max","record","bigIntMax","logs","recordToLog","groupBy","createQueryAdapter","database","opts","getLogs","filters","blockNumber","logs","schemasTable","tables","isTableRegistrationLog","logToTable","logsByTable","log","getAddress","tablesWithRecords","table","records","key","decodeKey","value","decodeValueArgs","debug","Router","compose","input","schemasTable","isNotNull","hexToBytes","transformSchemaName","schemaName","and","sql","conditions","query","condition","or","queryLogs","opts","filter","where","createBenchmark","apiRoutes","database","router","Router","compress","ctx","benchmark","createBenchmark","options","input","e","debug","schemasTable","records","queryLogs","logs","recordToLog","error","blockNumber","maxAgeSeconds","staleWhileRevalidateSeconds","compose","env","parseEnv","z","frontendEnvSchema","database","postgres","server","Koa","sentry","cors","healthcheck","metrics","helloWorld","apiRoutes","createKoaMiddleware","createAppRouter","createQueryAdapter","drizzle"]}
1
+ {"version":3,"sources":["../../src/bin/postgres-frontend.ts","../../src/postgres/deprecated/createQueryAdapter.ts","../../src/postgres/deprecated/getLogs.ts","../../src/postgres/recordToLog.ts","../../src/postgres/apiRoutes.ts","../../src/postgres/queryLogs.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport Koa from \"koa\";\nimport cors from \"@koa/cors\";\nimport { createKoaMiddleware } from \"trpc-koa-adapter\";\nimport { createAppRouter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { frontendEnvSchema, parseEnv } from \"./parseEnv\";\nimport { createQueryAdapter } from \"../postgres/deprecated/createQueryAdapter\";\nimport { apiRoutes } from \"../postgres/apiRoutes\";\nimport { sentry } from \"../koa-middleware/sentry\";\nimport { healthcheck } from \"../koa-middleware/healthcheck\";\nimport { helloWorld } from \"../koa-middleware/helloWorld\";\nimport { metrics } from \"../koa-middleware/metrics\";\n\nconst env = parseEnv(\n z.intersection(\n frontendEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst database = postgres(env.DATABASE_URL, { prepare: false });\n\nconst server = new Koa();\n\nif (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n}\n\nserver.use(cors());\nserver.use(healthcheck());\nserver.use(\n metrics({\n isHealthy: () => true,\n isReady: () => true,\n }),\n);\nserver.use(helloWorld());\nserver.use(apiRoutes(database));\n\nserver.use(\n createKoaMiddleware({\n prefix: \"/trpc\",\n router: createAppRouter(),\n createContext: async () => ({\n queryAdapter: await createQueryAdapter(drizzle(database)),\n }),\n }),\n);\n\nserver.listen({ host: env.HOST, port: env.PORT });\nconsole.log(`postgres indexer frontend listening on http://${env.HOST}:${env.PORT}`);\n","import { getAddress } from \"viem\";\nimport { PgDatabase } from \"drizzle-orm/pg-core\";\nimport { TableWithRecords, isTableRegistrationLog, logToTable, schemasTable } from \"@latticexyz/store-sync\";\nimport { KeySchema, decodeKey, decodeValueArgs } from \"@latticexyz/protocol-parser/internal\";\nimport { QueryAdapter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { debug } from \"../../debug\";\nimport { getLogs } from \"./getLogs\";\nimport { groupBy } from \"@latticexyz/common/utils\";\n\n/**\n * Creates a query adapter for the tRPC server/client to query data from Postgres.\n *\n * @param {PgDatabase<any>} database Postgres database object from Drizzle\n * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.\n * @deprecated\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function createQueryAdapter(database: PgDatabase<any>): Promise<QueryAdapter> {\n const adapter: QueryAdapter = {\n async getLogs(opts) {\n return getLogs(database, opts);\n },\n async findAll(opts) {\n const filters = opts.filters ?? [];\n const { blockNumber, logs } = await getLogs(database, {\n ...opts,\n // make sure we're always retrieving `store.Tables` table, so we can decode table values\n filters: filters.length > 0 ? [...filters, { tableId: schemasTable.tableId }] : [],\n });\n\n const tables = logs.filter(isTableRegistrationLog).map(logToTable);\n\n const logsByTable = groupBy(logs, (log) => `${getAddress(log.address)}:${log.args.tableId}`);\n\n const tablesWithRecords: readonly TableWithRecords[] = tables.map((table) => {\n const tableLogs = logsByTable.get(`${getAddress(table.address)}:${table.tableId}`) ?? [];\n const records = tableLogs.map((log) => {\n const key = decodeKey(table.keySchema as KeySchema, log.args.keyTuple);\n const value = decodeValueArgs(table.valueSchema, log.args);\n return { key, value, fields: { ...key, ...value } };\n });\n\n return {\n ...table,\n records,\n };\n });\n\n debug(\"findAll: decoded %d logs across %d tables\", logs.length, tables.length);\n\n return {\n blockNumber,\n tables: tablesWithRecords,\n };\n },\n };\n return adapter;\n}\n","import { PgDatabase } from \"drizzle-orm/pg-core\";\nimport { Hex } from \"viem\";\nimport { StorageAdapterLog, SyncFilter } from \"@latticexyz/store-sync\";\nimport { tables } from \"@latticexyz/store-sync/postgres\";\nimport { and, asc, eq, or } from \"drizzle-orm\";\nimport { bigIntMax } from \"@latticexyz/common/utils\";\nimport { recordToLog } from \"../recordToLog\";\nimport { createBenchmark } from \"@latticexyz/common\";\n\n/**\n * @deprecated\n */\nexport async function getLogs(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n database: PgDatabase<any>,\n {\n chainId,\n address,\n filters = [],\n }: {\n readonly chainId: number;\n readonly address?: Hex;\n readonly filters?: readonly SyncFilter[];\n },\n): Promise<{ blockNumber: bigint; logs: (StorageAdapterLog & { eventName: \"Store_SetRecord\" })[] }> {\n const benchmark = createBenchmark(\"drizzleGetLogs\");\n\n const conditions = filters.length\n ? filters.map((filter) =>\n and(\n address != null ? eq(tables.recordsTable.address, address) : undefined,\n eq(tables.recordsTable.tableId, filter.tableId),\n filter.key0 != null ? eq(tables.recordsTable.key0, filter.key0) : undefined,\n filter.key1 != null ? eq(tables.recordsTable.key1, filter.key1) : undefined,\n ),\n )\n : address != null\n ? [eq(tables.recordsTable.address, address)]\n : [];\n benchmark(\"parse config\");\n\n // Query for the block number that the indexer (i.e. chain) is at, in case the\n // indexer is further along in the chain than a given store/table's last updated\n // block number. We'll then take the highest block number between the indexer's\n // chain state and all the records in the query (in case the records updated\n // between these queries). Using just the highest block number from the queries\n // could potentially signal to the client an older-than-necessary block number,\n // for stores/tables that haven't seen recent activity.\n // TODO: move the block number query into the records query for atomicity so we don't have to merge them here\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n const indexerBlockNumber = chainState?.blockNumber ?? 0n;\n benchmark(\"query chainState\");\n\n const records = await database\n .select()\n .from(tables.recordsTable)\n .where(or(...conditions))\n .orderBy(\n asc(tables.recordsTable.blockNumber),\n // TODO: add logIndex (https://github.com/latticexyz/mud/issues/1979)\n );\n benchmark(\"query records\");\n\n const blockNumber = records.reduce((max, record) => bigIntMax(max, record.blockNumber ?? 0n), indexerBlockNumber);\n benchmark(\"find block number\");\n\n const logs = records\n // TODO: add this to the query, assuming we can optimize with an index\n .filter((record) => !record.isDeleted)\n .map(recordToLog);\n benchmark(\"map records to logs\");\n\n return { blockNumber, logs };\n}\n","import { StorageAdapterLog } from \"@latticexyz/store-sync\";\nimport { decodeDynamicField } from \"@latticexyz/protocol-parser/internal\";\nimport { RecordData } from \"./common\";\n\nexport function recordToLog(\n record: Omit<RecordData, \"recordBlockNumber\">,\n): StorageAdapterLog & { eventName: \"Store_SetRecord\" } {\n return {\n address: record.address,\n eventName: \"Store_SetRecord\",\n args: {\n tableId: record.tableId,\n keyTuple: decodeDynamicField(\"bytes32[]\", record.keyBytes),\n staticData: record.staticData ?? \"0x\",\n encodedLengths: record.encodedLengths ?? \"0x\",\n dynamicData: record.dynamicData ?? \"0x\",\n },\n } as const;\n}\n","import { Sql } from \"postgres\";\nimport { Middleware } from \"koa\";\nimport Router from \"@koa/router\";\nimport compose from \"koa-compose\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { schemasTable } from \"@latticexyz/store-sync\";\nimport { queryLogs } from \"./queryLogs\";\nimport { recordToLog } from \"./recordToLog\";\nimport { debug, error } from \"../debug\";\nimport { createBenchmark } from \"@latticexyz/common\";\nimport { compress } from \"../koa-middleware/compress\";\n\nexport function apiRoutes(database: Sql): Middleware {\n const router = new Router();\n\n router.get(\"/api/logs\", compress(), async (ctx) => {\n const benchmark = createBenchmark(\"postgres:logs\");\n let options: ReturnType<typeof input.parse>;\n\n try {\n options = input.parse(typeof ctx.query.input === \"string\" ? JSON.parse(ctx.query.input) : {});\n } catch (e) {\n ctx.status = 400;\n ctx.set(\"Content-Type\", \"application/json\");\n ctx.body = JSON.stringify(e);\n debug(e);\n return;\n }\n\n try {\n options.filters = options.filters.length > 0 ? [...options.filters, { tableId: schemasTable.tableId }] : [];\n const records = await queryLogs(database, options ?? {}).execute();\n benchmark(\"query records\");\n const logs = records.map(recordToLog);\n benchmark(\"map records to logs\");\n\n // Ideally we would immediately return an error if the request is for a Store that the indexer\n // is not configured to index. Since we don't have easy access to this information here,\n // we return an error if there are no logs found for a given Store, since that would never\n // be the case for a Store that is being indexed (since there would at least be records for the\n // Tables table with tables created during Store initialization).\n if (records.length === 0) {\n ctx.status = 404;\n ctx.body = \"no logs found\";\n error(\n `no logs found for chainId ${options.chainId}, address ${options.address}, filters ${JSON.stringify(\n options.filters,\n )}`,\n );\n return;\n }\n\n const blockNumber = records[0].chainBlockNumber;\n ctx.status = 200;\n\n // max age is set to several multiples of the uncached response time (currently ~10s, but using 60s for wiggle room) to ensure only ~one origin request at a time\n // and stale-while-revalidate below means that the cache is refreshed under the hood while still responding fast (cached)\n const maxAgeSeconds = 60 * 5;\n // we set stale-while-revalidate to the time elapsed by the number of blocks we can fetch from the RPC in the same amount of time as an uncached response\n // meaning it would take ~the same about of time to get an uncached response from the origin as it would to catch up from the currently cached response\n // if an uncached response takes ~10 seconds, we have ~10s to catch up, so let's say we can do enough RPC calls to fetch 4000 blocks\n // with a block per 2 seconds, that means we can serve a stale/cached response for 8000 seconds before we should require the response be returned by the origin\n const staleWhileRevalidateSeconds = 4000 * 2;\n\n ctx.set(\n \"Cache-Control\",\n `public, max-age=${maxAgeSeconds}, stale-while-revalidate=${staleWhileRevalidateSeconds}`,\n );\n\n ctx.set(\"Content-Type\", \"application/json\");\n ctx.body = JSON.stringify({ blockNumber, logs });\n } catch (e) {\n ctx.status = 500;\n ctx.set(\"Content-Type\", \"application/json\");\n ctx.body = JSON.stringify(e);\n error(e);\n }\n });\n\n return compose([router.routes(), router.allowedMethods()]) as Middleware;\n}\n","import { isNotNull } from \"@latticexyz/common/utils\";\nimport { PendingQuery, Row, Sql } from \"postgres\";\nimport { hexToBytes } from \"viem\";\nimport { z } from \"zod\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { transformSchemaName } from \"@latticexyz/store-sync/postgres\";\nimport { Record } from \"./common\";\n\nconst schemaName = transformSchemaName(\"mud\");\n\nfunction and(sql: Sql, conditions: PendingQuery<Row[]>[]): PendingQuery<Row[]> {\n return sql`(${conditions.reduce((query, condition) => sql`${query} AND ${condition}`)})`;\n}\n\nfunction or(sql: Sql, conditions: PendingQuery<Row[]>[]): PendingQuery<Row[]> {\n return sql`(${conditions.reduce((query, condition) => sql`${query} OR ${condition}`)})`;\n}\n\nexport function queryLogs(sql: Sql, opts: z.infer<typeof input>): PendingQuery<Record[]> {\n const conditions = opts.filters.length\n ? opts.filters.map((filter) =>\n and(\n sql,\n [\n opts.address != null ? sql`address = ${hexToBytes(opts.address)}` : null,\n sql`table_id = ${hexToBytes(filter.tableId)}`,\n filter.key0 != null ? sql`key0 = ${hexToBytes(filter.key0)}` : null,\n filter.key1 != null ? sql`key1 = ${hexToBytes(filter.key1)}` : null,\n ].filter(isNotNull),\n ),\n )\n : opts.address != null\n ? [sql`address = ${hexToBytes(opts.address)}`]\n : [];\n\n const where = sql`WHERE ${and(\n sql,\n [sql`is_deleted != true`, conditions.length ? or(sql, conditions) : null].filter(isNotNull),\n )}`;\n\n // TODO: implement bytea <> hex columns via custom types: https://github.com/porsager/postgres#custom-types\n return sql<Record[]>`\n WITH\n config AS (\n SELECT\n version AS \"indexerVersion\",\n chain_id AS \"chainId\",\n block_number AS \"chainBlockNumber\"\n FROM ${sql(`${schemaName}.config`)}\n LIMIT 1\n ),\n records AS (\n SELECT\n '0x' || encode(address, 'hex') AS address,\n '0x' || encode(table_id, 'hex') AS \"tableId\",\n '0x' || encode(key_bytes, 'hex') AS \"keyBytes\",\n '0x' || encode(static_data, 'hex') AS \"staticData\",\n '0x' || encode(encoded_lengths, 'hex') AS \"encodedLengths\",\n '0x' || encode(dynamic_data, 'hex') AS \"dynamicData\",\n block_number AS \"recordBlockNumber\",\n log_index AS \"logIndex\"\n FROM ${sql(`${schemaName}.records`)}\n ${where}\n ORDER BY block_number, log_index ASC\n )\n SELECT\n (SELECT COUNT(*) FROM records) AS \"totalRows\",\n *\n FROM config, records\n `;\n}\n"],"mappings":";2QACA,MAAO,gBACP,OAAS,KAAAA,MAAS,MAClB,OAAOC,OAAS,MAChB,OAAOC,OAAU,YACjB,OAAS,uBAAAC,OAA2B,mBACpC,OAAS,mBAAAC,OAAuB,sCAChC,OAAS,WAAAC,OAAe,0BACxB,OAAOC,OAAc,WCRrB,OAAS,cAAAC,MAAkB,OAE3B,OAA2B,0BAAAC,EAAwB,cAAAC,EAAY,gBAAAC,MAAoB,yBACnF,OAAoB,aAAAC,EAAW,mBAAAC,MAAuB,uCCAtD,OAAS,UAAAC,MAAc,kCACvB,OAAS,OAAAC,EAAK,OAAAC,EAAK,MAAAC,EAAI,MAAAC,MAAU,cACjC,OAAS,aAAAC,MAAiB,2BCJ1B,OAAS,sBAAAC,MAA0B,uCAG5B,SAASC,EACdC,EACsD,CACtD,MAAO,CACL,QAASA,EAAO,QAChB,UAAW,kBACX,KAAM,CACJ,QAASA,EAAO,QAChB,SAAUF,EAAmB,YAAaE,EAAO,QAAQ,EACzD,WAAYA,EAAO,YAAc,KACjC,eAAgBA,EAAO,gBAAkB,KACzC,YAAaA,EAAO,aAAe,IACrC,CACF,CACF,CDXA,OAAS,mBAAAC,MAAuB,qBAKhC,eAAsBC,EAEpBC,EACA,CACE,QAAAC,EACA,QAAAC,EACA,QAAAC,EAAU,CAAC,CACb,EAKkG,CAClG,IAAMC,EAAYN,EAAgB,gBAAgB,EAE5CO,EAAaF,EAAQ,OACvBA,EAAQ,IAAKG,GACXC,EACEL,GAAW,KAAOM,EAAGC,EAAO,aAAa,QAASP,CAAO,EAAI,OAC7DM,EAAGC,EAAO,aAAa,QAASH,EAAO,OAAO,EAC9CA,EAAO,MAAQ,KAAOE,EAAGC,EAAO,aAAa,KAAMH,EAAO,IAAI,EAAI,OAClEA,EAAO,MAAQ,KAAOE,EAAGC,EAAO,aAAa,KAAMH,EAAO,IAAI,EAAI,MACpE,CACF,EACAJ,GAAW,KACT,CAACM,EAAGC,EAAO,aAAa,QAASP,CAAO,CAAC,EACzC,CAAC,EACPE,EAAU,cAAc,EAmBxB,IAAMM,GATa,MAAMV,EACtB,OAAO,EACP,KAAKS,EAAO,WAAW,EACvB,MAAMD,EAAGC,EAAO,YAAY,QAASR,CAAO,CAAC,EAC7C,MAAM,CAAC,EACP,QAAQ,EAGR,KAAMU,GAASA,EAAK,KAAK,IAAM,EAAI,CAAC,IACA,aAAe,GACtDP,EAAU,kBAAkB,EAE5B,IAAMQ,EAAU,MAAMZ,EACnB,OAAO,EACP,KAAKS,EAAO,YAAY,EACxB,MAAMI,EAAG,GAAGR,CAAU,CAAC,EACvB,QACCS,EAAIL,EAAO,aAAa,WAAW,CAErC,EACFL,EAAU,eAAe,EAEzB,IAAMW,EAAcH,EAAQ,OAAO,CAACI,EAAKC,IAAWC,EAAUF,EAAKC,EAAO,aAAe,EAAE,EAAGP,CAAkB,EAChHN,EAAU,mBAAmB,EAE7B,IAAMe,EAAOP,EAEV,OAAQK,GAAW,CAACA,EAAO,SAAS,EACpC,IAAIG,CAAW,EAClB,OAAAhB,EAAU,qBAAqB,EAExB,CAAE,YAAAW,EAAa,KAAAI,CAAK,CAC7B,CD1EA,OAAS,WAAAE,MAAe,2BAUxB,eAAsBC,EAAmBC,EAAkD,CAuCzF,MAtC8B,CAC5B,MAAM,QAAQC,EAAM,CAClB,OAAOC,EAAQF,EAAUC,CAAI,CAC/B,EACA,MAAM,QAAQA,EAAM,CAClB,IAAME,EAAUF,EAAK,SAAW,CAAC,EAC3B,CAAE,YAAAG,EAAa,KAAAC,CAAK,EAAI,MAAMH,EAAQF,EAAU,CACpD,GAAGC,EAEH,QAASE,EAAQ,OAAS,EAAI,CAAC,GAAGA,EAAS,CAAE,QAASG,EAAa,OAAQ,CAAC,EAAI,CAAC,CACnF,CAAC,EAEKC,EAASF,EAAK,OAAOG,CAAsB,EAAE,IAAIC,CAAU,EAE3DC,EAAcZ,EAAQO,EAAOM,GAAQ,GAAGC,EAAWD,EAAI,OAAO,KAAKA,EAAI,KAAK,SAAS,EAErFE,EAAiDN,EAAO,IAAKO,GAAU,CAE3E,IAAMC,GADYL,EAAY,IAAI,GAAGE,EAAWE,EAAM,OAAO,KAAKA,EAAM,SAAS,GAAK,CAAC,GAC7D,IAAKH,GAAQ,CACrC,IAAMK,EAAMC,EAAUH,EAAM,UAAwBH,EAAI,KAAK,QAAQ,EAC/DO,EAAQC,EAAgBL,EAAM,YAAaH,EAAI,IAAI,EACzD,MAAO,CAAE,IAAAK,EAAK,MAAAE,EAAO,OAAQ,CAAE,GAAGF,EAAK,GAAGE,CAAM,CAAE,CACpD,CAAC,EAED,MAAO,CACL,GAAGJ,EACH,QAAAC,CACF,CACF,CAAC,EAED,OAAAK,EAAM,4CAA6Cf,EAAK,OAAQE,EAAO,MAAM,EAEtE,CACL,YAAAH,EACA,OAAQS,CACV,CACF,CACF,CAEF,CGvDA,OAAOQ,OAAY,cACnB,OAAOC,OAAa,cACpB,OAAS,SAAAC,OAAa,wCACtB,OAAS,gBAAAC,OAAoB,yBCL7B,OAAS,aAAAC,MAAiB,2BAE1B,OAAS,cAAAC,MAAkB,OAG3B,OAAS,uBAAAC,MAA2B,kCAGpC,IAAMC,EAAaD,EAAoB,KAAK,EAE5C,SAASE,EAAIC,EAAUC,EAAwD,CAC7E,OAAOD,KAAOC,EAAW,OAAO,CAACC,EAAOC,IAAcH,IAAME,SAAaC,GAAW,IACtF,CAEA,SAASC,EAAGJ,EAAUC,EAAwD,CAC5E,OAAOD,KAAOC,EAAW,OAAO,CAACC,EAAOC,IAAcH,IAAME,QAAYC,GAAW,IACrF,CAEO,SAASE,EAAUL,EAAUM,EAAqD,CACvF,IAAML,EAAaK,EAAK,QAAQ,OAC5BA,EAAK,QAAQ,IAAKC,GAChBR,EACEC,EACA,CACEM,EAAK,SAAW,KAAON,cAAgBJ,EAAWU,EAAK,OAAO,IAAM,KACpEN,eAAiBJ,EAAWW,EAAO,OAAO,IAC1CA,EAAO,MAAQ,KAAOP,WAAaJ,EAAWW,EAAO,IAAI,IAAM,KAC/DA,EAAO,MAAQ,KAAOP,WAAaJ,EAAWW,EAAO,IAAI,IAAM,IACjE,EAAE,OAAOZ,CAAS,CACpB,CACF,EACAW,EAAK,SAAW,KACd,CAACN,cAAgBJ,EAAWU,EAAK,OAAO,GAAG,EAC3C,CAAC,EAEDE,EAAQR,UAAYD,EACxBC,EACA,CAACA,sBAAyBC,EAAW,OAASG,EAAGJ,EAAKC,CAAU,EAAI,IAAI,EAAE,OAAON,CAAS,CAC5F,IAGA,OAAOK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,eAOMA,EAAI,GAAGF,UAAmB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,eAa1BE,EAAI,GAAGF,WAAoB;AAAA,UAChCU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,GAQV,CD7DA,OAAS,mBAAAC,OAAuB,qBAGzB,SAASC,EAAUC,EAA2B,CACnD,IAAMC,EAAS,IAAIC,GAEnB,OAAAD,EAAO,IAAI,YAAaE,EAAS,EAAG,MAAOC,GAAQ,CACjD,IAAMC,EAAYC,GAAgB,eAAe,EAC7CC,EAEJ,GAAI,CACFA,EAAUC,GAAM,MAAM,OAAOJ,EAAI,MAAM,OAAU,SAAW,KAAK,MAAMA,EAAI,MAAM,KAAK,EAAI,CAAC,CAAC,CAC9F,OAASK,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,IAAI,eAAgB,kBAAkB,EAC1CA,EAAI,KAAO,KAAK,UAAUK,CAAC,EAC3BC,EAAMD,CAAC,EACP,MACF,CAEA,GAAI,CACFF,EAAQ,QAAUA,EAAQ,QAAQ,OAAS,EAAI,CAAC,GAAGA,EAAQ,QAAS,CAAE,QAASI,GAAa,OAAQ,CAAC,EAAI,CAAC,EAC1G,IAAMC,EAAU,MAAMC,EAAUb,EAAUO,GAAW,CAAC,CAAC,EAAE,QAAQ,EACjEF,EAAU,eAAe,EACzB,IAAMS,EAAOF,EAAQ,IAAIG,CAAW,EAQpC,GAPAV,EAAU,qBAAqB,EAO3BO,EAAQ,SAAW,EAAG,CACxBR,EAAI,OAAS,IACbA,EAAI,KAAO,gBACXY,EACE,6BAA6BT,EAAQ,oBAAoBA,EAAQ,oBAAoB,KAAK,UACxFA,EAAQ,OACV,GACF,EACA,OAGF,IAAMU,EAAcL,EAAQ,CAAC,EAAE,iBAC/BR,EAAI,OAAS,IAIb,IAAMc,EAAgB,GAAK,EAKrBC,EAA8B,IAAO,EAE3Cf,EAAI,IACF,gBACA,mBAAmBc,6BAAyCC,GAC9D,EAEAf,EAAI,IAAI,eAAgB,kBAAkB,EAC1CA,EAAI,KAAO,KAAK,UAAU,CAAE,YAAAa,EAAa,KAAAH,CAAK,CAAC,CACjD,OAASL,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,IAAI,eAAgB,kBAAkB,EAC1CA,EAAI,KAAO,KAAK,UAAUK,CAAC,EAC3BO,EAAMP,CAAC,CACT,CACF,CAAC,EAEMW,GAAQ,CAACnB,EAAO,OAAO,EAAGA,EAAO,eAAe,CAAC,CAAC,CAC3D,CJ/DA,IAAMoB,EAAMC,EACVC,EAAE,aACAC,EACAD,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,CACH,CACF,EAEME,EAAWC,GAASL,EAAI,aAAc,CAAE,QAAS,EAAM,CAAC,EAExDM,EAAS,IAAIC,GAEfP,EAAI,YACNM,EAAO,IAAIE,EAAOR,EAAI,UAAU,CAAC,EAGnCM,EAAO,IAAIG,GAAK,CAAC,EACjBH,EAAO,IAAII,EAAY,CAAC,EACxBJ,EAAO,IACLK,EAAQ,CACN,UAAW,IAAM,GACjB,QAAS,IAAM,EACjB,CAAC,CACH,EACAL,EAAO,IAAIM,EAAW,CAAC,EACvBN,EAAO,IAAIO,EAAUT,CAAQ,CAAC,EAE9BE,EAAO,IACLQ,GAAoB,CAClB,OAAQ,QACR,OAAQC,GAAgB,EACxB,cAAe,UAAa,CAC1B,aAAc,MAAMC,EAAmBC,GAAQb,CAAQ,CAAC,CAC1D,EACF,CAAC,CACH,EAEAE,EAAO,OAAO,CAAE,KAAMN,EAAI,KAAM,KAAMA,EAAI,IAAK,CAAC,EAChD,QAAQ,IAAI,iDAAiDA,EAAI,QAAQA,EAAI,MAAM","names":["z","Koa","cors","createKoaMiddleware","createAppRouter","drizzle","postgres","getAddress","isTableRegistrationLog","logToTable","schemasTable","decodeKey","decodeValueArgs","tables","and","asc","eq","or","bigIntMax","decodeDynamicField","recordToLog","record","createBenchmark","getLogs","database","chainId","address","filters","benchmark","conditions","filter","and","eq","tables","indexerBlockNumber","rows","records","or","asc","blockNumber","max","record","bigIntMax","logs","recordToLog","groupBy","createQueryAdapter","database","opts","getLogs","filters","blockNumber","logs","schemasTable","tables","isTableRegistrationLog","logToTable","logsByTable","log","getAddress","tablesWithRecords","table","records","key","decodeKey","value","decodeValueArgs","debug","Router","compose","input","schemasTable","isNotNull","hexToBytes","transformSchemaName","schemaName","and","sql","conditions","query","condition","or","queryLogs","opts","filter","where","createBenchmark","apiRoutes","database","router","Router","compress","ctx","benchmark","createBenchmark","options","input","e","debug","schemasTable","records","queryLogs","logs","recordToLog","error","blockNumber","maxAgeSeconds","staleWhileRevalidateSeconds","compose","env","parseEnv","z","frontendEnvSchema","database","postgres","server","Koa","sentry","cors","healthcheck","metrics","helloWorld","apiRoutes","createKoaMiddleware","createAppRouter","createQueryAdapter","drizzle"]}
@@ -1,3 +1,3 @@
1
1
  #!/usr/bin/env node
2
- import{b as m,c as u}from"../chunk-7B4KE2DO.js";import"dotenv/config";import{z as a}from"zod";import{eq as C}from"drizzle-orm";import{createPublicClient as g,fallback as _,webSocket as h,http as A}from"viem";import{isDefined as E}from"@latticexyz/common/utils";import{combineLatest as R,filter as S,first as O}from"rxjs";import{drizzle as k}from"drizzle-orm/postgres-js";import w from"postgres";import{cleanDatabase as B,createStorageAdapter as P,shouldCleanDatabase as K}from"@latticexyz/store-sync/postgres";import{createStoreSync as N}from"@latticexyz/store-sync";var t=u(a.intersection(m,a.object({DATABASE_URL:a.string(),HEALTHCHECK_HOST:a.string().optional(),HEALTHCHECK_PORT:a.coerce.number().optional()}))),D=[t.RPC_WS_URL?h(t.RPC_WS_URL):void 0,t.RPC_HTTP_URL?A(t.RPC_HTTP_URL):void 0].filter(E),n=g({transport:_(D),pollingInterval:t.POLLING_INTERVAL}),T=await n.getChainId(),i=k(w(t.DATABASE_URL,{prepare:!1}));await K(i,T)&&(console.log("outdated database detected, clearing data to start fresh"),await B(i));var{storageAdapter:v,tables:p}=await P({database:i,publicClient:n}),s=t.START_BLOCK;async function c(){try{return(await i.select().from(p.configTable).where(C(p.configTable.chainId,T)).limit(1).execute().then(o=>o.find(()=>!0)))?.blockNumber}catch{}}async function y(){let[e,o]=await Promise.all([c(),n.getBlock({blockTag:t.FOLLOW_BLOCK_TAG})]);return o.number-(e??-1n)}var d=await c();d!=null&&(s=d+1n,console.log("resuming from block number",s));var{latestBlockNumber$:U,storedBlockLogs$:f}=await N({storageAdapter:v,publicClient:n,followBlockTag:t.FOLLOW_BLOCK_TAG,startBlock:s,maxBlockRange:t.MAX_BLOCK_RANGE,address:t.STORE_ADDRESS});f.subscribe();var l=!1;R([U,f]).pipe(S(([e,{blockNumber:o}])=>e===o),O()).subscribe(()=>{l=!0,console.log("all caught up")});if(t.HEALTHCHECK_HOST!=null||t.HEALTHCHECK_PORT!=null){let{default:e}=await import("koa"),{default:o}=await import("@koa/cors"),{healthcheck:b}=await import("../healthcheck-7XXWJH5U.js"),{metrics:H}=await import("../metrics-Q6NJ3DNN.js"),{helloWorld:L}=await import("../helloWorld-BMBNVEA7.js"),r=new e;r.use(o()),r.use(b({isReady:()=>l})),r.use(H({isHealthy:()=>!0,isReady:()=>l,getLatestStoredBlockNumber:c,getDistanceFromFollowBlock:y,followBlockTag:t.FOLLOW_BLOCK_TAG})),r.use(L()),r.listen({host:t.HEALTHCHECK_HOST,port:t.HEALTHCHECK_PORT}),console.log(`postgres indexer healthcheck server listening on http://${t.HEALTHCHECK_HOST}:${t.HEALTHCHECK_PORT}`)}
2
+ import{b as m,c as u}from"../chunk-M2HHWHGP.js";import"dotenv/config";import{z as a}from"zod";import{eq as C}from"drizzle-orm";import{createPublicClient as g,fallback as _,webSocket as h,http as A}from"viem";import{isDefined as E}from"@latticexyz/common/utils";import{combineLatest as R,filter as S,first as O}from"rxjs";import{drizzle as k}from"drizzle-orm/postgres-js";import w from"postgres";import{cleanDatabase as B,createStorageAdapter as P,shouldCleanDatabase as K}from"@latticexyz/store-sync/postgres";import{createStoreSync as N}from"@latticexyz/store-sync";var t=u(a.intersection(m,a.object({DATABASE_URL:a.string(),HEALTHCHECK_HOST:a.string().optional(),HEALTHCHECK_PORT:a.coerce.number().optional()}))),D=[t.RPC_WS_URL?h(t.RPC_WS_URL):void 0,t.RPC_HTTP_URL?A(t.RPC_HTTP_URL):void 0].filter(E),n=g({transport:_(D),pollingInterval:t.POLLING_INTERVAL}),T=await n.getChainId(),i=k(w(t.DATABASE_URL,{prepare:!1}));await K(i,T)&&(console.log("outdated database detected, clearing data to start fresh"),await B(i));var{storageAdapter:v,tables:p}=await P({database:i,publicClient:n}),s=t.START_BLOCK;async function c(){try{return(await i.select().from(p.configTable).where(C(p.configTable.chainId,T)).limit(1).execute().then(o=>o.find(()=>!0)))?.blockNumber}catch{}}async function y(){let[e,o]=await Promise.all([c(),n.getBlock({blockTag:t.FOLLOW_BLOCK_TAG})]);return o.number-(e??-1n)}var d=await c();d!=null&&(s=d+1n,console.log("resuming from block number",s));var{latestBlockNumber$:U,storedBlockLogs$:f}=await N({storageAdapter:v,publicClient:n,followBlockTag:t.FOLLOW_BLOCK_TAG,startBlock:s,maxBlockRange:t.MAX_BLOCK_RANGE,address:t.STORE_ADDRESS});f.subscribe();var l=!1;R([U,f]).pipe(S(([e,{blockNumber:o}])=>e===o),O()).subscribe(()=>{l=!0,console.log("all caught up")});if(t.HEALTHCHECK_HOST!=null||t.HEALTHCHECK_PORT!=null){let{default:e}=await import("koa"),{default:o}=await import("@koa/cors"),{healthcheck:b}=await import("../healthcheck-7XXWJH5U.js"),{metrics:H}=await import("../metrics-Q6NJ3DNN.js"),{helloWorld:L}=await import("../helloWorld-BMBNVEA7.js"),r=new e;r.use(o()),r.use(b({isReady:()=>l})),r.use(H({isHealthy:()=>!0,isReady:()=>l,getLatestStoredBlockNumber:c,getDistanceFromFollowBlock:y,followBlockTag:t.FOLLOW_BLOCK_TAG})),r.use(L()),r.listen({host:t.HEALTHCHECK_HOST,port:t.HEALTHCHECK_PORT}),console.log(`postgres indexer healthcheck server listening on http://${t.HEALTHCHECK_HOST}:${t.HEALTHCHECK_PORT}`)}
3
3
  //# sourceMappingURL=postgres-indexer.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../bin/postgres-indexer.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { createPublicClient, fallback, webSocket, http, Transport } from \"viem\";\nimport { isDefined } from \"@latticexyz/common/utils\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { cleanDatabase, createStorageAdapter, shouldCleanDatabase } from \"@latticexyz/store-sync/postgres\";\nimport { createStoreSync } from \"@latticexyz/store-sync\";\nimport { indexerEnvSchema, parseEnv } from \"./parseEnv\";\n\nconst env = parseEnv(\n z.intersection(\n indexerEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n HEALTHCHECK_HOST: z.string().optional(),\n HEALTHCHECK_PORT: z.coerce.number().optional(),\n }),\n ),\n);\n\nconst transports: Transport[] = [\n // prefer WS when specified\n env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : undefined,\n // otherwise use or fallback to HTTP\n env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : undefined,\n].filter(isDefined);\n\nconst publicClient = createPublicClient({\n transport: fallback(transports),\n pollingInterval: env.POLLING_INTERVAL,\n});\n\nconst chainId = await publicClient.getChainId();\nconst database = drizzle(postgres(env.DATABASE_URL, { prepare: false }));\n\nif (await shouldCleanDatabase(database, chainId)) {\n console.log(\"outdated database detected, clearing data to start fresh\");\n await cleanDatabase(database);\n}\n\nconst { storageAdapter, tables } = await createStorageAdapter({ database, publicClient });\n\nlet startBlock = env.START_BLOCK;\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n // Fetch latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n // TODO: query if the DB exists instead of try/catch\n try {\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n\n return chainState?.blockNumber;\n } catch (error) {\n // ignore errors for now\n }\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n publicClient.getBlock({ blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst latestStoredBlockNumber = await getLatestStoredBlockNumber();\nif (latestStoredBlockNumber != null) {\n startBlock = latestStoredBlockNumber + 1n;\n console.log(\"resuming from block number\", startBlock);\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await createStoreSync({\n storageAdapter,\n publicClient,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nstoredBlockLogs$.subscribe();\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nif (env.HEALTHCHECK_HOST != null || env.HEALTHCHECK_PORT != null) {\n const { default: Koa } = await import(\"koa\");\n const { default: cors } = await import(\"@koa/cors\");\n const { healthcheck } = await import(\"../src/koa-middleware/healthcheck\");\n const { metrics } = await import(\"../src/koa-middleware/metrics\");\n const { helloWorld } = await import(\"../src/koa-middleware/helloWorld\");\n\n const server = new Koa();\n\n server.use(cors());\n server.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n );\n server.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n );\n server.use(helloWorld());\n\n server.listen({ host: env.HEALTHCHECK_HOST, port: env.HEALTHCHECK_PORT });\n console.log(\n `postgres indexer healthcheck server listening on http://${env.HEALTHCHECK_HOST}:${env.HEALTHCHECK_PORT}`,\n );\n}\n"],"mappings":";gDACA,MAAO,gBACP,OAAS,KAAAA,MAAS,MAClB,OAAS,MAAAC,MAAU,cACnB,OAAS,sBAAAC,EAAoB,YAAAC,EAAU,aAAAC,EAAW,QAAAC,MAAuB,OACzE,OAAS,aAAAC,MAAiB,2BAC1B,OAAS,iBAAAC,EAAe,UAAAC,EAAQ,SAAAC,MAAa,OAC7C,OAAS,WAAAC,MAAe,0BACxB,OAAOC,MAAc,WACrB,OAAS,iBAAAC,EAAe,wBAAAC,EAAsB,uBAAAC,MAA2B,kCACzE,OAAS,mBAAAC,MAAuB,yBAGhC,IAAMC,EAAMC,EACVC,EAAE,aACAC,EACAD,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,iBAAkBA,EAAE,OAAO,EAAE,SAAS,EACtC,iBAAkBA,EAAE,OAAO,OAAO,EAAE,SAAS,CAC/C,CAAC,CACH,CACF,EAEME,EAA0B,CAE9BJ,EAAI,WAAaK,EAAUL,EAAI,UAAU,EAAI,OAE7CA,EAAI,aAAeM,EAAKN,EAAI,YAAY,EAAI,MAC9C,EAAE,OAAOO,CAAS,EAEZC,EAAeC,EAAmB,CACtC,UAAWC,EAASN,CAAU,EAC9B,gBAAiBJ,EAAI,gBACvB,CAAC,EAEKW,EAAU,MAAMH,EAAa,WAAW,EACxCI,EAAWC,EAAQC,EAASd,EAAI,aAAc,CAAE,QAAS,EAAM,CAAC,CAAC,EAEnE,MAAMe,EAAoBH,EAAUD,CAAO,IAC7C,QAAQ,IAAI,0DAA0D,EACtE,MAAMK,EAAcJ,CAAQ,GAG9B,GAAM,CAAE,eAAAK,EAAgB,OAAAC,CAAO,EAAI,MAAMC,EAAqB,CAAE,SAAAP,EAAU,aAAAJ,CAAa,CAAC,EAEpFY,EAAapB,EAAI,YAErB,eAAeqB,GAA0D,CAGvE,GAAI,CAWF,OAVmB,MAAMT,EACtB,OAAO,EACP,KAAKM,EAAO,WAAW,EACvB,MAAMI,EAAGJ,EAAO,YAAY,QAASP,CAAO,CAAC,EAC7C,MAAM,CAAC,EACP,QAAQ,EAGR,KAAMY,GAASA,EAAK,KAAK,IAAM,EAAI,CAAC,IAEpB,WACrB,MAAE,CAEF,CACF,CAEA,eAAeC,GAA8C,CAC3D,GAAM,CAACC,EAAyBC,CAAiB,EAAI,MAAM,QAAQ,IAAI,CACrEL,EAA2B,EAC3Bb,EAAa,SAAS,CAAE,SAAUR,EAAI,gBAAiB,CAAC,CAC1D,CAAC,EACD,OAAO0B,EAAkB,QAAUD,GAA2B,CAAC,GACjE,CAEA,IAAMA,EAA0B,MAAMJ,EAA2B,EAC7DI,GAA2B,OAC7BL,EAAaK,EAA0B,GACvC,QAAQ,IAAI,6BAA8BL,CAAU,GAGtD,GAAM,CAAE,mBAAAO,EAAoB,iBAAAC,CAAiB,EAAI,MAAMC,EAAgB,CACrE,eAAAZ,EACA,aAAAT,EACA,eAAgBR,EAAI,iBACpB,WAAAoB,EACA,cAAepB,EAAI,gBACnB,QAASA,EAAI,aACf,CAAC,EAED4B,EAAiB,UAAU,EAE3B,IAAIE,EAAa,GACjBC,EAAc,CAACJ,EAAoBC,CAAgB,CAAC,EACjD,KACCI,EACE,CAAC,CAACC,EAAmB,CAAE,YAAaC,CAAyB,CAAC,IAC5DD,IAAsBC,CAC1B,EACAC,EAAM,CACR,EACC,UAAU,IAAM,CACfL,EAAa,GACb,QAAQ,IAAI,eAAe,CAC7B,CAAC,EAEH,GAAI9B,EAAI,kBAAoB,MAAQA,EAAI,kBAAoB,KAAM,CAChE,GAAM,CAAE,QAASoC,CAAI,EAAI,KAAM,QAAO,KAAK,EACrC,CAAE,QAASC,CAAK,EAAI,KAAM,QAAO,WAAW,EAC5C,CAAE,YAAAC,CAAY,EAAI,KAAM,QAAO,4BAAmC,EAClE,CAAE,QAAAC,CAAQ,EAAI,KAAM,QAAO,wBAA+B,EAC1D,CAAE,WAAAC,CAAW,EAAI,KAAM,QAAO,2BAAkC,EAEhEC,EAAS,IAAIL,EAEnBK,EAAO,IAAIJ,EAAK,CAAC,EACjBI,EAAO,IACLH,EAAY,CACV,QAAS,IAAMR,CACjB,CAAC,CACH,EACAW,EAAO,IACLF,EAAQ,CACN,UAAW,IAAM,GACjB,QAAS,IAAMT,EACf,2BAAAT,EACA,2BAAAG,EACA,eAAgBxB,EAAI,gBACtB,CAAC,CACH,EACAyC,EAAO,IAAID,EAAW,CAAC,EAEvBC,EAAO,OAAO,CAAE,KAAMzC,EAAI,iBAAkB,KAAMA,EAAI,gBAAiB,CAAC,EACxE,QAAQ,IACN,2DAA2DA,EAAI,oBAAoBA,EAAI,kBACzF","names":["z","eq","createPublicClient","fallback","webSocket","http","isDefined","combineLatest","filter","first","drizzle","postgres","cleanDatabase","createStorageAdapter","shouldCleanDatabase","createStoreSync","env","parseEnv","z","indexerEnvSchema","transports","webSocket","http","isDefined","publicClient","createPublicClient","fallback","chainId","database","drizzle","postgres","shouldCleanDatabase","cleanDatabase","storageAdapter","tables","createStorageAdapter","startBlock","getLatestStoredBlockNumber","eq","rows","getDistanceFromFollowBlock","latestStoredBlockNumber","latestFollowBlock","latestBlockNumber$","storedBlockLogs$","createStoreSync","isCaughtUp","combineLatest","filter","latestBlockNumber","lastBlockNumberProcessed","first","Koa","cors","healthcheck","metrics","helloWorld","server"]}
1
+ {"version":3,"sources":["../../src/bin/postgres-indexer.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { createPublicClient, fallback, webSocket, http, Transport } from \"viem\";\nimport { isDefined } from \"@latticexyz/common/utils\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { drizzle } from \"drizzle-orm/postgres-js\";\nimport postgres from \"postgres\";\nimport { cleanDatabase, createStorageAdapter, shouldCleanDatabase } from \"@latticexyz/store-sync/postgres\";\nimport { createStoreSync } from \"@latticexyz/store-sync\";\nimport { indexerEnvSchema, parseEnv } from \"./parseEnv\";\n\nconst env = parseEnv(\n z.intersection(\n indexerEnvSchema,\n z.object({\n DATABASE_URL: z.string(),\n HEALTHCHECK_HOST: z.string().optional(),\n HEALTHCHECK_PORT: z.coerce.number().optional(),\n }),\n ),\n);\n\nconst transports: Transport[] = [\n // prefer WS when specified\n env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : undefined,\n // otherwise use or fallback to HTTP\n env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : undefined,\n].filter(isDefined);\n\nconst publicClient = createPublicClient({\n transport: fallback(transports),\n pollingInterval: env.POLLING_INTERVAL,\n});\n\nconst chainId = await publicClient.getChainId();\nconst database = drizzle(postgres(env.DATABASE_URL, { prepare: false }));\n\nif (await shouldCleanDatabase(database, chainId)) {\n console.log(\"outdated database detected, clearing data to start fresh\");\n await cleanDatabase(database);\n}\n\nconst { storageAdapter, tables } = await createStorageAdapter({ database, publicClient });\n\nlet startBlock = env.START_BLOCK;\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n // Fetch latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n // TODO: query if the DB exists instead of try/catch\n try {\n const chainState = await database\n .select()\n .from(tables.configTable)\n .where(eq(tables.configTable.chainId, chainId))\n .limit(1)\n .execute()\n // Get the first record in a way that returns a possible `undefined`\n // TODO: move this to `.findFirst` after upgrading drizzle or `rows[0]` after enabling `noUncheckedIndexedAccess: true`\n .then((rows) => rows.find(() => true));\n\n return chainState?.blockNumber;\n } catch (error) {\n // ignore errors for now\n }\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n publicClient.getBlock({ blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst latestStoredBlockNumber = await getLatestStoredBlockNumber();\nif (latestStoredBlockNumber != null) {\n startBlock = latestStoredBlockNumber + 1n;\n console.log(\"resuming from block number\", startBlock);\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await createStoreSync({\n storageAdapter,\n publicClient,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nstoredBlockLogs$.subscribe();\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nif (env.HEALTHCHECK_HOST != null || env.HEALTHCHECK_PORT != null) {\n const { default: Koa } = await import(\"koa\");\n const { default: cors } = await import(\"@koa/cors\");\n const { healthcheck } = await import(\"../koa-middleware/healthcheck\");\n const { metrics } = await import(\"../koa-middleware/metrics\");\n const { helloWorld } = await import(\"../koa-middleware/helloWorld\");\n\n const server = new Koa();\n\n server.use(cors());\n server.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n );\n server.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n );\n server.use(helloWorld());\n\n server.listen({ host: env.HEALTHCHECK_HOST, port: env.HEALTHCHECK_PORT });\n console.log(\n `postgres indexer healthcheck server listening on http://${env.HEALTHCHECK_HOST}:${env.HEALTHCHECK_PORT}`,\n );\n}\n"],"mappings":";gDACA,MAAO,gBACP,OAAS,KAAAA,MAAS,MAClB,OAAS,MAAAC,MAAU,cACnB,OAAS,sBAAAC,EAAoB,YAAAC,EAAU,aAAAC,EAAW,QAAAC,MAAuB,OACzE,OAAS,aAAAC,MAAiB,2BAC1B,OAAS,iBAAAC,EAAe,UAAAC,EAAQ,SAAAC,MAAa,OAC7C,OAAS,WAAAC,MAAe,0BACxB,OAAOC,MAAc,WACrB,OAAS,iBAAAC,EAAe,wBAAAC,EAAsB,uBAAAC,MAA2B,kCACzE,OAAS,mBAAAC,MAAuB,yBAGhC,IAAMC,EAAMC,EACVC,EAAE,aACAC,EACAD,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,iBAAkBA,EAAE,OAAO,EAAE,SAAS,EACtC,iBAAkBA,EAAE,OAAO,OAAO,EAAE,SAAS,CAC/C,CAAC,CACH,CACF,EAEME,EAA0B,CAE9BJ,EAAI,WAAaK,EAAUL,EAAI,UAAU,EAAI,OAE7CA,EAAI,aAAeM,EAAKN,EAAI,YAAY,EAAI,MAC9C,EAAE,OAAOO,CAAS,EAEZC,EAAeC,EAAmB,CACtC,UAAWC,EAASN,CAAU,EAC9B,gBAAiBJ,EAAI,gBACvB,CAAC,EAEKW,EAAU,MAAMH,EAAa,WAAW,EACxCI,EAAWC,EAAQC,EAASd,EAAI,aAAc,CAAE,QAAS,EAAM,CAAC,CAAC,EAEnE,MAAMe,EAAoBH,EAAUD,CAAO,IAC7C,QAAQ,IAAI,0DAA0D,EACtE,MAAMK,EAAcJ,CAAQ,GAG9B,GAAM,CAAE,eAAAK,EAAgB,OAAAC,CAAO,EAAI,MAAMC,EAAqB,CAAE,SAAAP,EAAU,aAAAJ,CAAa,CAAC,EAEpFY,EAAapB,EAAI,YAErB,eAAeqB,GAA0D,CAGvE,GAAI,CAWF,OAVmB,MAAMT,EACtB,OAAO,EACP,KAAKM,EAAO,WAAW,EACvB,MAAMI,EAAGJ,EAAO,YAAY,QAASP,CAAO,CAAC,EAC7C,MAAM,CAAC,EACP,QAAQ,EAGR,KAAMY,GAASA,EAAK,KAAK,IAAM,EAAI,CAAC,IAEpB,WACrB,MAAE,CAEF,CACF,CAEA,eAAeC,GAA8C,CAC3D,GAAM,CAACC,EAAyBC,CAAiB,EAAI,MAAM,QAAQ,IAAI,CACrEL,EAA2B,EAC3Bb,EAAa,SAAS,CAAE,SAAUR,EAAI,gBAAiB,CAAC,CAC1D,CAAC,EACD,OAAO0B,EAAkB,QAAUD,GAA2B,CAAC,GACjE,CAEA,IAAMA,EAA0B,MAAMJ,EAA2B,EAC7DI,GAA2B,OAC7BL,EAAaK,EAA0B,GACvC,QAAQ,IAAI,6BAA8BL,CAAU,GAGtD,GAAM,CAAE,mBAAAO,EAAoB,iBAAAC,CAAiB,EAAI,MAAMC,EAAgB,CACrE,eAAAZ,EACA,aAAAT,EACA,eAAgBR,EAAI,iBACpB,WAAAoB,EACA,cAAepB,EAAI,gBACnB,QAASA,EAAI,aACf,CAAC,EAED4B,EAAiB,UAAU,EAE3B,IAAIE,EAAa,GACjBC,EAAc,CAACJ,EAAoBC,CAAgB,CAAC,EACjD,KACCI,EACE,CAAC,CAACC,EAAmB,CAAE,YAAaC,CAAyB,CAAC,IAC5DD,IAAsBC,CAC1B,EACAC,EAAM,CACR,EACC,UAAU,IAAM,CACfL,EAAa,GACb,QAAQ,IAAI,eAAe,CAC7B,CAAC,EAEH,GAAI9B,EAAI,kBAAoB,MAAQA,EAAI,kBAAoB,KAAM,CAChE,GAAM,CAAE,QAASoC,CAAI,EAAI,KAAM,QAAO,KAAK,EACrC,CAAE,QAASC,CAAK,EAAI,KAAM,QAAO,WAAW,EAC5C,CAAE,YAAAC,CAAY,EAAI,KAAM,QAAO,4BAA+B,EAC9D,CAAE,QAAAC,CAAQ,EAAI,KAAM,QAAO,wBAA2B,EACtD,CAAE,WAAAC,CAAW,EAAI,KAAM,QAAO,2BAA8B,EAE5DC,EAAS,IAAIL,EAEnBK,EAAO,IAAIJ,EAAK,CAAC,EACjBI,EAAO,IACLH,EAAY,CACV,QAAS,IAAMR,CACjB,CAAC,CACH,EACAW,EAAO,IACLF,EAAQ,CACN,UAAW,IAAM,GACjB,QAAS,IAAMT,EACf,2BAAAT,EACA,2BAAAG,EACA,eAAgBxB,EAAI,gBACtB,CAAC,CACH,EACAyC,EAAO,IAAID,EAAW,CAAC,EAEvBC,EAAO,OAAO,CAAE,KAAMzC,EAAI,iBAAkB,KAAMA,EAAI,gBAAiB,CAAC,EACxE,QAAQ,IACN,2DAA2DA,EAAI,oBAAoBA,EAAI,kBACzF","names":["z","eq","createPublicClient","fallback","webSocket","http","isDefined","combineLatest","filter","first","drizzle","postgres","cleanDatabase","createStorageAdapter","shouldCleanDatabase","createStoreSync","env","parseEnv","z","indexerEnvSchema","transports","webSocket","http","isDefined","publicClient","createPublicClient","fallback","chainId","database","drizzle","postgres","shouldCleanDatabase","cleanDatabase","storageAdapter","tables","createStorageAdapter","startBlock","getLatestStoredBlockNumber","eq","rows","getDistanceFromFollowBlock","latestStoredBlockNumber","latestFollowBlock","latestBlockNumber$","storedBlockLogs$","createStoreSync","isCaughtUp","combineLatest","filter","latestBlockNumber","lastBlockNumberProcessed","first","Koa","cors","healthcheck","metrics","helloWorld","server"]}
@@ -1,3 +1,3 @@
1
1
  #!/usr/bin/env node
2
- import{a as g,c as O}from"../chunk-ZS3IQEZ4.js";import{a as w}from"../chunk-VCBWGHIO.js";import{a as N,b as B,c as _}from"../chunk-7B4KE2DO.js";import{a as C}from"../chunk-KDDXIBYJ.js";import{a as I}from"../chunk-OUZYPRYF.js";import{a as E}from"../chunk-GQCEMYLA.js";import"dotenv/config";import oe from"node:fs";import{z as b}from"zod";import{eq as ae}from"drizzle-orm";import{drizzle as se}from"drizzle-orm/better-sqlite3";import ne from"better-sqlite3";import{createPublicClient as ie,fallback as ce,webSocket as le,http as me}from"viem";import ue from"koa";import de from"@koa/cors";import{createKoaMiddleware as pe}from"trpc-koa-adapter";import{createAppRouter as fe}from"@latticexyz/store-sync/trpc-indexer";import{chainState as v,schemaVersion as F,syncToSqlite as be}from"@latticexyz/store-sync/sqlite";import{asc as K,eq as A}from"drizzle-orm";import{buildTable as x,chainState as D,getTables as G}from"@latticexyz/store-sync/sqlite";import{getAddress as P}from"viem";import{decodeDynamicField as V}from"@latticexyz/protocol-parser/internal";import{hexToResource as J}from"@latticexyz/common";import{mapObject as $}from"@latticexyz/common/utils";function f(t,{chainId:s,address:r,filters:n=[]}){let i=t.select().from(D).where(A(D.chainId,s)).limit(1).all().find(()=>!0),a=Array.from(new Set(n.map(o=>o.tableId))),h=G(t).filter(o=>r==null||P(r)===P(o.address)).filter(o=>!a.length||a.includes(o.tableId)).map(o=>{let S=x(o),L=t.select().from(S).where(A(S.__isDeleted,!1)).orderBy(K(S.__lastUpdatedBlockNumber)).all(),j=n.length?L.filter(m=>{let d=V("bytes32[]",m.__key);return n.some(c=>c.tableId===o.tableId&&(c.key0==null||c.key0===d[0])&&(c.key1==null||c.key1===d[1]))}):L,H=J(o.tableId);return{...o,type:H.type,schema:$({...o.keySchema,...o.valueSchema},m=>({type:m,internalType:m})),key:Object.keys(o.keySchema),records:j.map(m=>{let d=Object.fromEntries(Object.entries(o.keySchema).map(([p])=>[p,m[p]])),c=Object.fromEntries(Object.entries(o.valueSchema).map(([p])=>[p,m[p]]));return{key:d,value:c,fields:{...d,...c}}})}});return{blockNumber:i?.lastUpdatedBlockNumber??null,tables:h}}import{tablesWithRecordsToLogs as z}from"@latticexyz/store-sync";async function W(t){return{async getLogs(r){let{blockNumber:n,tables:i}=f(t,r),a=z(i);return{blockNumber:n??0n,logs:a}},async findAll(r){return f(t,r)}}}import{isDefined as ye}from"@latticexyz/common/utils";import{combineLatest as he,filter as Se,first as ge}from"rxjs";import Y from"@koa/router";import X from"koa-compose";import{input as Z}from"@latticexyz/store-sync/indexer-client";import{schemasTable as ee,tablesWithRecordsToLogs as te}from"@latticexyz/store-sync";import{createBenchmark as re}from"@latticexyz/common";function Q(t){let s=new Y;return s.get("/api/logs",O(),async r=>{let n=re("sqlite:logs"),i;try{i=Z.parse(typeof r.query.input=="string"?JSON.parse(r.query.input):{})}catch(a){r.status=400,r.body=JSON.stringify(a),g(a);return}try{i.filters=i.filters.length>0?[...i.filters,{tableId:ee.tableId}]:[],n("parse config");let{blockNumber:a,tables:R}=f(t,i);n("query tables with records");let h=te(R);n("convert records to logs"),r.body=JSON.stringify({blockNumber:a?.toString()??"-1",logs:h}),r.status=200}catch(a){r.status=500,r.body=JSON.stringify(a),g(a)}}),X([s.routes(),s.allowedMethods()])}var e=_(b.intersection(b.intersection(B,N),b.object({SQLITE_FILENAME:b.string().default("indexer.db"),SENTRY_DSN:b.string().optional()}))),Te=[e.RPC_WS_URL?le(e.RPC_WS_URL):void 0,e.RPC_HTTP_URL?me(e.RPC_HTTP_URL):void 0].filter(ye),T=ie({transport:ce(Te),pollingInterval:e.POLLING_INTERVAL}),ke=await T.getChainId(),y=se(new ne(e.SQLITE_FILENAME)),U=e.START_BLOCK;async function q(){try{return y.select().from(v).where(ae(v.chainId,ke)).all()[0]}catch{}}async function M(){return(await q())?.lastUpdatedBlockNumber??void 0}async function Re(){let[t,s]=await Promise.all([M(),T.getBlock({blockTag:e.FOLLOW_BLOCK_TAG})]);return s.number-(t??-1n)}var u=await q();u&&(u.schemaVersion!=F?(console.log("schema version changed from",u.schemaVersion,"to",F,"recreating database"),oe.truncateSync(e.SQLITE_FILENAME)):u.lastUpdatedBlockNumber!=null&&(console.log("resuming from block number",u.lastUpdatedBlockNumber+1n),U=u.lastUpdatedBlockNumber+1n));var{latestBlockNumber$:Le,storedBlockLogs$:Ne}=await be({database:y,publicClient:T,followBlockTag:e.FOLLOW_BLOCK_TAG,startBlock:U,maxBlockRange:e.MAX_BLOCK_RANGE,address:e.STORE_ADDRESS}),k=!1;he([Le,Ne]).pipe(Se(([t,{blockNumber:s}])=>t===s),ge()).subscribe(()=>{k=!0,console.log("all caught up")});var l=new ue;e.SENTRY_DSN&&l.use(w(e.SENTRY_DSN));l.use(de());l.use(C({isReady:()=>k}));l.use(E({isHealthy:()=>!0,isReady:()=>k,getLatestStoredBlockNumber:M,getDistanceFromFollowBlock:Re,followBlockTag:e.FOLLOW_BLOCK_TAG}));l.use(I());l.use(Q(y));l.use(pe({prefix:"/trpc",router:fe(),createContext:async()=>({queryAdapter:await W(y)})}));l.listen({host:e.HOST,port:e.PORT});console.log(`sqlite indexer frontend listening on http://${e.HOST}:${e.PORT}`);
2
+ import{a as O}from"../chunk-R7HX5BT2.js";import{a as g,c as w}from"../chunk-32XITDZW.js";import{a as N,b as B,c as _}from"../chunk-M2HHWHGP.js";import{a as C}from"../chunk-KDDXIBYJ.js";import{a as I}from"../chunk-OUZYPRYF.js";import{a as E}from"../chunk-GQCEMYLA.js";import"dotenv/config";import oe from"node:fs";import{z as b}from"zod";import{eq as ae}from"drizzle-orm";import{drizzle as se}from"drizzle-orm/better-sqlite3";import ne from"better-sqlite3";import{createPublicClient as ie,fallback as ce,webSocket as le,http as me}from"viem";import ue from"koa";import de from"@koa/cors";import{createKoaMiddleware as pe}from"trpc-koa-adapter";import{createAppRouter as fe}from"@latticexyz/store-sync/trpc-indexer";import{chainState as v,schemaVersion as F,syncToSqlite as be}from"@latticexyz/store-sync/sqlite";import{asc as K,eq as A}from"drizzle-orm";import{buildTable as x,chainState as D,getTables as G}from"@latticexyz/store-sync/sqlite";import{getAddress as P}from"viem";import{decodeDynamicField as V}from"@latticexyz/protocol-parser/internal";import{hexToResource as J}from"@latticexyz/common";import{mapObject as $}from"@latticexyz/common/utils";function f(t,{chainId:s,address:r,filters:n=[]}){let i=t.select().from(D).where(A(D.chainId,s)).limit(1).all().find(()=>!0),a=Array.from(new Set(n.map(o=>o.tableId))),h=G(t).filter(o=>r==null||P(r)===P(o.address)).filter(o=>!a.length||a.includes(o.tableId)).map(o=>{let S=x(o),L=t.select().from(S).where(A(S.__isDeleted,!1)).orderBy(K(S.__lastUpdatedBlockNumber)).all(),j=n.length?L.filter(m=>{let d=V("bytes32[]",m.__key);return n.some(c=>c.tableId===o.tableId&&(c.key0==null||c.key0===d[0])&&(c.key1==null||c.key1===d[1]))}):L,H=J(o.tableId);return{...o,type:H.type,schema:$({...o.keySchema,...o.valueSchema},m=>({type:m,internalType:m})),key:Object.keys(o.keySchema),records:j.map(m=>{let d=Object.fromEntries(Object.entries(o.keySchema).map(([p])=>[p,m[p]])),c=Object.fromEntries(Object.entries(o.valueSchema).map(([p])=>[p,m[p]]));return{key:d,value:c,fields:{...d,...c}}})}});return{blockNumber:i?.lastUpdatedBlockNumber??null,tables:h}}import{tablesWithRecordsToLogs as z}from"@latticexyz/store-sync";async function W(t){return{async getLogs(r){let{blockNumber:n,tables:i}=f(t,r),a=z(i);return{blockNumber:n??0n,logs:a}},async findAll(r){return f(t,r)}}}import{isDefined as ye}from"@latticexyz/common/utils";import{combineLatest as he,filter as Se,first as ge}from"rxjs";import Y from"@koa/router";import X from"koa-compose";import{input as Z}from"@latticexyz/store-sync/indexer-client";import{schemasTable as ee,tablesWithRecordsToLogs as te}from"@latticexyz/store-sync";import{createBenchmark as re}from"@latticexyz/common";function Q(t){let s=new Y;return s.get("/api/logs",O(),async r=>{let n=re("sqlite:logs"),i;try{i=Z.parse(typeof r.query.input=="string"?JSON.parse(r.query.input):{})}catch(a){r.status=400,r.body=JSON.stringify(a),g(a);return}try{i.filters=i.filters.length>0?[...i.filters,{tableId:ee.tableId}]:[],n("parse config");let{blockNumber:a,tables:R}=f(t,i);n("query tables with records");let h=te(R);n("convert records to logs"),r.body=JSON.stringify({blockNumber:a?.toString()??"-1",logs:h}),r.status=200}catch(a){r.status=500,r.body=JSON.stringify(a),g(a)}}),X([s.routes(),s.allowedMethods()])}var e=_(b.intersection(b.intersection(B,N),b.object({SQLITE_FILENAME:b.string().default("indexer.db"),SENTRY_DSN:b.string().optional()}))),Te=[e.RPC_WS_URL?le(e.RPC_WS_URL):void 0,e.RPC_HTTP_URL?me(e.RPC_HTTP_URL):void 0].filter(ye),T=ie({transport:ce(Te),pollingInterval:e.POLLING_INTERVAL}),ke=await T.getChainId(),y=se(new ne(e.SQLITE_FILENAME)),U=e.START_BLOCK;async function q(){try{return y.select().from(v).where(ae(v.chainId,ke)).all()[0]}catch{}}async function M(){return(await q())?.lastUpdatedBlockNumber??void 0}async function Re(){let[t,s]=await Promise.all([M(),T.getBlock({blockTag:e.FOLLOW_BLOCK_TAG})]);return s.number-(t??-1n)}var u=await q();u&&(u.schemaVersion!=F?(console.log("schema version changed from",u.schemaVersion,"to",F,"recreating database"),oe.truncateSync(e.SQLITE_FILENAME)):u.lastUpdatedBlockNumber!=null&&(console.log("resuming from block number",u.lastUpdatedBlockNumber+1n),U=u.lastUpdatedBlockNumber+1n));var{latestBlockNumber$:Le,storedBlockLogs$:Ne}=await be({database:y,publicClient:T,followBlockTag:e.FOLLOW_BLOCK_TAG,startBlock:U,maxBlockRange:e.MAX_BLOCK_RANGE,address:e.STORE_ADDRESS}),k=!1;he([Le,Ne]).pipe(Se(([t,{blockNumber:s}])=>t===s),ge()).subscribe(()=>{k=!0,console.log("all caught up")});var l=new ue;e.SENTRY_DSN&&l.use(w(e.SENTRY_DSN));l.use(de());l.use(C({isReady:()=>k}));l.use(E({isHealthy:()=>!0,isReady:()=>k,getLatestStoredBlockNumber:M,getDistanceFromFollowBlock:Re,followBlockTag:e.FOLLOW_BLOCK_TAG}));l.use(I());l.use(Q(y));l.use(pe({prefix:"/trpc",router:fe(),createContext:async()=>({queryAdapter:await W(y)})}));l.listen({host:e.HOST,port:e.PORT});console.log(`sqlite indexer frontend listening on http://${e.HOST}:${e.PORT}`);
3
3
  //# sourceMappingURL=sqlite-indexer.js.map
@@ -1 +1 @@
1
- {"version":3,"sources":["../../bin/sqlite-indexer.ts","../../src/sqlite/getTablesWithRecords.ts","../../src/sqlite/createQueryAdapter.ts","../../src/sqlite/apiRoutes.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport fs from \"node:fs\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { drizzle } from \"drizzle-orm/better-sqlite3\";\nimport Database from \"better-sqlite3\";\nimport { createPublicClient, fallback, webSocket, http, Transport } from \"viem\";\nimport Koa from \"koa\";\nimport cors from \"@koa/cors\";\nimport { createKoaMiddleware } from \"trpc-koa-adapter\";\nimport { createAppRouter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { chainState, schemaVersion, syncToSqlite } from \"@latticexyz/store-sync/sqlite\";\nimport { createQueryAdapter } from \"../src/sqlite/createQueryAdapter\";\nimport { isDefined } from \"@latticexyz/common/utils\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { frontendEnvSchema, indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { healthcheck } from \"../src/koa-middleware/healthcheck\";\nimport { helloWorld } from \"../src/koa-middleware/helloWorld\";\nimport { apiRoutes } from \"../src/sqlite/apiRoutes\";\nimport { sentry } from \"../src/koa-middleware/sentry\";\nimport { metrics } from \"../src/koa-middleware/metrics\";\n\nconst env = parseEnv(\n z.intersection(\n z.intersection(indexerEnvSchema, frontendEnvSchema),\n z.object({\n SQLITE_FILENAME: z.string().default(\"indexer.db\"),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst transports: Transport[] = [\n // prefer WS when specified\n env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : undefined,\n // otherwise use or fallback to HTTP\n env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : undefined,\n].filter(isDefined);\n\nconst publicClient = createPublicClient({\n transport: fallback(transports),\n pollingInterval: env.POLLING_INTERVAL,\n});\n\nconst chainId = await publicClient.getChainId();\nconst database = drizzle(new Database(env.SQLITE_FILENAME));\n\nlet startBlock = env.START_BLOCK;\n\nasync function getCurrentChainState(): Promise<\n | {\n schemaVersion: number;\n chainId: number;\n lastUpdatedBlockNumber: bigint | null;\n lastError: string | null;\n }\n | undefined\n> {\n // This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n try {\n const currentChainStates = database.select().from(chainState).where(eq(chainState.chainId, chainId)).all();\n // TODO: replace this type workaround with `noUncheckedIndexedAccess: true` when we can fix all the issues related (https://github.com/latticexyz/mud/issues/1212)\n const currentChainState: (typeof currentChainStates)[number] | undefined = currentChainStates[0];\n return currentChainState;\n } catch (error) {\n // ignore errors, this is optional\n }\n}\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n const currentChainState = await getCurrentChainState();\n return currentChainState?.lastUpdatedBlockNumber ?? undefined;\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n publicClient.getBlock({ blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst currentChainState = await getCurrentChainState();\nif (currentChainState) {\n // Reset the db if the version changed\n if (currentChainState.schemaVersion != schemaVersion) {\n console.log(\n \"schema version changed from\",\n currentChainState.schemaVersion,\n \"to\",\n schemaVersion,\n \"recreating database\",\n );\n fs.truncateSync(env.SQLITE_FILENAME);\n } else if (currentChainState.lastUpdatedBlockNumber != null) {\n // Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n console.log(\"resuming from block number\", currentChainState.lastUpdatedBlockNumber + 1n);\n startBlock = currentChainState.lastUpdatedBlockNumber + 1n;\n }\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await syncToSqlite({\n database,\n publicClient,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nconst server = new Koa();\n\nif (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n}\n\nserver.use(cors());\nserver.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n);\nserver.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n);\nserver.use(helloWorld());\nserver.use(apiRoutes(database));\n\nserver.use(\n createKoaMiddleware({\n prefix: \"/trpc\",\n router: createAppRouter(),\n createContext: async () => ({\n queryAdapter: await createQueryAdapter(database),\n }),\n }),\n);\n\nserver.listen({ host: env.HOST, port: env.PORT });\nconsole.log(`sqlite indexer frontend listening on http://${env.HOST}:${env.PORT}`);\n","import { asc, eq } from \"drizzle-orm\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { buildTable, chainState, getTables } from \"@latticexyz/store-sync/sqlite\";\nimport { Hex, getAddress } from \"viem\";\nimport { decodeDynamicField } from \"@latticexyz/protocol-parser/internal\";\nimport { SyncFilter, TableRecord, TableWithRecords } from \"@latticexyz/store-sync\";\nimport { hexToResource } from \"@latticexyz/common\";\nimport { mapObject } from \"@latticexyz/common/utils\";\n\n// TODO: refactor sqlite and replace this with getLogs to match postgres (https://github.com/latticexyz/mud/issues/1970)\n\n/**\n * @deprecated\n * */\nexport function getTablesWithRecords(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n database: BaseSQLiteDatabase<\"sync\", any>,\n {\n chainId,\n address,\n filters = [],\n }: {\n readonly chainId: number;\n readonly address?: Hex;\n readonly filters?: readonly SyncFilter[];\n },\n): { blockNumber: bigint | null; tables: readonly TableWithRecords[] } {\n const metadata = database\n .select()\n .from(chainState)\n .where(eq(chainState.chainId, chainId))\n .limit(1)\n .all()\n .find(() => true);\n\n // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters.\n // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily)\n const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId)));\n const tables = getTables(database)\n .filter((table) => address == null || getAddress(address) === getAddress(table.address))\n .filter((table) => !tableIds.length || tableIds.includes(table.tableId));\n\n const tablesWithRecords = tables.map((table) => {\n const sqliteTable = buildTable(table);\n const records = database\n .select()\n .from(sqliteTable)\n .where(eq(sqliteTable.__isDeleted, false))\n .orderBy(\n asc(sqliteTable.__lastUpdatedBlockNumber),\n // TODO: add logIndex (https://github.com/latticexyz/mud/issues/1979)\n )\n .all();\n const filteredRecords = !filters.length\n ? records\n : records.filter((record) => {\n const keyTuple = decodeDynamicField(\"bytes32[]\", record.__key);\n return filters.some(\n (filter) =>\n filter.tableId === table.tableId &&\n (filter.key0 == null || filter.key0 === keyTuple[0]) &&\n (filter.key1 == null || filter.key1 === keyTuple[1]),\n );\n });\n const resource = hexToResource(table.tableId);\n return {\n ...table,\n type: resource.type as never,\n schema: mapObject({ ...table.keySchema, ...table.valueSchema }, (type) => ({ type, internalType: type })),\n key: Object.keys(table.keySchema),\n records: filteredRecords.map((record): TableRecord => {\n const key = Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]]));\n const value = Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]]));\n return { key, value, fields: { ...key, ...value } };\n }),\n } satisfies TableWithRecords;\n });\n\n return {\n blockNumber: metadata?.lastUpdatedBlockNumber ?? null,\n tables: tablesWithRecords,\n };\n}\n","import { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { QueryAdapter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\n\n/**\n * Creates a storage adapter for the tRPC server/client to query data from SQLite.\n *\n * @param {BaseSQLiteDatabase<\"sync\", any>} database SQLite database object from Drizzle\n * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function createQueryAdapter(database: BaseSQLiteDatabase<\"sync\", any>): Promise<QueryAdapter> {\n const adapter: QueryAdapter = {\n async getLogs(opts) {\n const { blockNumber, tables } = getTablesWithRecords(database, opts);\n const logs = tablesWithRecordsToLogs(tables);\n return { blockNumber: blockNumber ?? 0n, logs };\n },\n async findAll(opts) {\n return getTablesWithRecords(database, opts);\n },\n };\n return adapter;\n}\n","import { Middleware } from \"koa\";\nimport Router from \"@koa/router\";\nimport compose from \"koa-compose\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { schemasTable, tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\nimport { debug } from \"../debug\";\nimport { createBenchmark } from \"@latticexyz/common\";\nimport { compress } from \"../koa-middleware/compress\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function apiRoutes(database: BaseSQLiteDatabase<\"sync\", any>): Middleware {\n const router = new Router();\n\n router.get(\"/api/logs\", compress(), async (ctx) => {\n const benchmark = createBenchmark(\"sqlite:logs\");\n\n let options: ReturnType<typeof input.parse>;\n\n try {\n options = input.parse(typeof ctx.query.input === \"string\" ? JSON.parse(ctx.query.input) : {});\n } catch (error) {\n ctx.status = 400;\n ctx.body = JSON.stringify(error);\n debug(error);\n return;\n }\n\n try {\n options.filters = options.filters.length > 0 ? [...options.filters, { tableId: schemasTable.tableId }] : [];\n benchmark(\"parse config\");\n const { blockNumber, tables } = getTablesWithRecords(database, options);\n benchmark(\"query tables with records\");\n const logs = tablesWithRecordsToLogs(tables);\n benchmark(\"convert records to logs\");\n\n ctx.body = JSON.stringify({ blockNumber: blockNumber?.toString() ?? \"-1\", logs });\n ctx.status = 200;\n } catch (error) {\n ctx.status = 500;\n ctx.body = JSON.stringify(error);\n debug(error);\n }\n });\n\n return compose([router.routes(), router.allowedMethods()]) as Middleware;\n}\n"],"mappings":";2QACA,MAAO,gBACP,OAAOA,OAAQ,UACf,OAAS,KAAAC,MAAS,MAClB,OAAS,MAAAC,OAAU,cACnB,OAAS,WAAAC,OAAe,6BACxB,OAAOC,OAAc,iBACrB,OAAS,sBAAAC,GAAoB,YAAAC,GAAU,aAAAC,GAAW,QAAAC,OAAuB,OACzE,OAAOC,OAAS,MAChB,OAAOC,OAAU,YACjB,OAAS,uBAAAC,OAA2B,mBACpC,OAAS,mBAAAC,OAAuB,sCAChC,OAAS,cAAAC,EAAY,iBAAAC,EAAe,gBAAAC,OAAoB,gCCZxD,OAAS,OAAAC,EAAK,MAAAC,MAAU,cAExB,OAAS,cAAAC,EAAY,cAAAC,EAAY,aAAAC,MAAiB,gCAClD,OAAc,cAAAC,MAAkB,OAChC,OAAS,sBAAAC,MAA0B,uCAEnC,OAAS,iBAAAC,MAAqB,qBAC9B,OAAS,aAAAC,MAAiB,2BAOnB,SAASC,EAEdC,EACA,CACE,QAAAC,EACA,QAAAC,EACA,QAAAC,EAAU,CAAC,CACb,EAKqE,CACrE,IAAMC,EAAWJ,EACd,OAAO,EACP,KAAKP,CAAU,EACf,MAAMF,EAAGE,EAAW,QAASQ,CAAO,CAAC,EACrC,MAAM,CAAC,EACP,IAAI,EACJ,KAAK,IAAM,EAAI,EAIZI,EAAW,MAAM,KAAK,IAAI,IAAIF,EAAQ,IAAKG,GAAWA,EAAO,OAAO,CAAC,CAAC,EAKtEC,EAJSb,EAAUM,CAAQ,EAC9B,OAAQQ,GAAUN,GAAW,MAAQP,EAAWO,CAAO,IAAMP,EAAWa,EAAM,OAAO,CAAC,EACtF,OAAQA,GAAU,CAACH,EAAS,QAAUA,EAAS,SAASG,EAAM,OAAO,CAAC,EAExC,IAAKA,GAAU,CAC9C,IAAMC,EAAcjB,EAAWgB,CAAK,EAC9BE,EAAUV,EACb,OAAO,EACP,KAAKS,CAAW,EAChB,MAAMlB,EAAGkB,EAAY,YAAa,EAAK,CAAC,EACxC,QACCnB,EAAImB,EAAY,wBAAwB,CAE1C,EACC,IAAI,EACDE,EAAmBR,EAAQ,OAE7BO,EAAQ,OAAQE,GAAW,CACzB,IAAMC,EAAWjB,EAAmB,YAAagB,EAAO,KAAK,EAC7D,OAAOT,EAAQ,KACZG,GACCA,EAAO,UAAYE,EAAM,UACxBF,EAAO,MAAQ,MAAQA,EAAO,OAASO,EAAS,CAAC,KACjDP,EAAO,MAAQ,MAAQA,EAAO,OAASO,EAAS,CAAC,EACtD,CACF,CAAC,EATDH,EAUEI,EAAWjB,EAAcW,EAAM,OAAO,EAC5C,MAAO,CACL,GAAGA,EACH,KAAMM,EAAS,KACf,OAAQhB,EAAU,CAAE,GAAGU,EAAM,UAAW,GAAGA,EAAM,WAAY,EAAIO,IAAU,CAAE,KAAAA,EAAM,aAAcA,CAAK,EAAE,EACxG,IAAK,OAAO,KAAKP,EAAM,SAAS,EAChC,QAASG,EAAgB,IAAKC,GAAwB,CACpD,IAAMI,EAAM,OAAO,YAAY,OAAO,QAAQR,EAAM,SAAS,EAAE,IAAI,CAAC,CAACS,CAAI,IAAM,CAACA,EAAML,EAAOK,CAAI,CAAC,CAAC,CAAC,EAC9FC,EAAQ,OAAO,YAAY,OAAO,QAAQV,EAAM,WAAW,EAAE,IAAI,CAAC,CAACS,CAAI,IAAM,CAACA,EAAML,EAAOK,CAAI,CAAC,CAAC,CAAC,EACxG,MAAO,CAAE,IAAAD,EAAK,MAAAE,EAAO,OAAQ,CAAE,GAAGF,EAAK,GAAGE,CAAM,CAAE,CACpD,CAAC,CACH,CACF,CAAC,EAED,MAAO,CACL,YAAad,GAAU,wBAA0B,KACjD,OAAQG,CACV,CACF,CC/EA,OAAS,2BAAAY,MAA+B,yBASxC,eAAsBC,EAAmBC,EAAkE,CAWzG,MAV8B,CAC5B,MAAM,QAAQC,EAAM,CAClB,GAAM,CAAE,YAAAC,EAAa,OAAAC,CAAO,EAAIC,EAAqBJ,EAAUC,CAAI,EAC7DI,EAAOP,EAAwBK,CAAM,EAC3C,MAAO,CAAE,YAAaD,GAAe,GAAI,KAAAG,CAAK,CAChD,EACA,MAAM,QAAQJ,EAAM,CAClB,OAAOG,EAAqBJ,EAAUC,CAAI,CAC5C,CACF,CAEF,CFVA,OAAS,aAAAK,OAAiB,2BAC1B,OAAS,iBAAAC,GAAe,UAAAC,GAAQ,SAAAC,OAAa,OGd7C,OAAOC,MAAY,cACnB,OAAOC,MAAa,cACpB,OAAS,SAAAC,MAAa,wCACtB,OAAS,gBAAAC,GAAc,2BAAAC,OAA+B,yBAEtD,OAAS,mBAAAC,OAAuB,qBAMzB,SAASC,EAAUC,EAAuD,CAC/E,IAAMC,EAAS,IAAIC,EAEnB,OAAAD,EAAO,IAAI,YAAaE,EAAS,EAAG,MAAOC,GAAQ,CACjD,IAAMC,EAAYC,GAAgB,aAAa,EAE3CC,EAEJ,GAAI,CACFA,EAAUC,EAAM,MAAM,OAAOJ,EAAI,MAAM,OAAU,SAAW,KAAK,MAAMA,EAAI,MAAM,KAAK,EAAI,CAAC,CAAC,CAC9F,OAASK,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,KAAO,KAAK,UAAUK,CAAK,EAC/BC,EAAMD,CAAK,EACX,MACF,CAEA,GAAI,CACFF,EAAQ,QAAUA,EAAQ,QAAQ,OAAS,EAAI,CAAC,GAAGA,EAAQ,QAAS,CAAE,QAASI,GAAa,OAAQ,CAAC,EAAI,CAAC,EAC1GN,EAAU,cAAc,EACxB,GAAM,CAAE,YAAAO,EAAa,OAAAC,CAAO,EAAIC,EAAqBd,EAAUO,CAAO,EACtEF,EAAU,2BAA2B,EACrC,IAAMU,EAAOC,GAAwBH,CAAM,EAC3CR,EAAU,yBAAyB,EAEnCD,EAAI,KAAO,KAAK,UAAU,CAAE,YAAaQ,GAAa,SAAS,GAAK,KAAM,KAAAG,CAAK,CAAC,EAChFX,EAAI,OAAS,GACf,OAASK,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,KAAO,KAAK,UAAUK,CAAK,EAC/BC,EAAMD,CAAK,CACb,CACF,CAAC,EAEMQ,EAAQ,CAAChB,EAAO,OAAO,EAAGA,EAAO,eAAe,CAAC,CAAC,CAC3D,CHxBA,IAAMiB,EAAMC,EACVC,EAAE,aACAA,EAAE,aAAaC,EAAkBC,CAAiB,EAClDF,EAAE,OAAO,CACP,gBAAiBA,EAAE,OAAO,EAAE,QAAQ,YAAY,EAChD,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,CACH,CACF,EAEMG,GAA0B,CAE9BL,EAAI,WAAaM,GAAUN,EAAI,UAAU,EAAI,OAE7CA,EAAI,aAAeO,GAAKP,EAAI,YAAY,EAAI,MAC9C,EAAE,OAAOQ,EAAS,EAEZC,EAAeC,GAAmB,CACtC,UAAWC,GAASN,EAAU,EAC9B,gBAAiBL,EAAI,gBACvB,CAAC,EAEKY,GAAU,MAAMH,EAAa,WAAW,EACxCI,EAAWC,GAAQ,IAAIC,GAASf,EAAI,eAAe,CAAC,EAEtDgB,EAAahB,EAAI,YAErB,eAAeiB,GAQb,CAEA,GAAI,CAIF,OAH2BJ,EAAS,OAAO,EAAE,KAAKK,CAAU,EAAE,MAAMC,GAAGD,EAAW,QAASN,EAAO,CAAC,EAAE,IAAI,EAEX,CAAC,CAEjG,MAAE,CAEF,CACF,CAEA,eAAeQ,GAA0D,CAEvE,OAD0B,MAAMH,EAAqB,IAC3B,wBAA0B,MACtD,CAEA,eAAeI,IAA8C,CAC3D,GAAM,CAACC,EAAyBC,CAAiB,EAAI,MAAM,QAAQ,IAAI,CACrEH,EAA2B,EAC3BX,EAAa,SAAS,CAAE,SAAUT,EAAI,gBAAiB,CAAC,CAC1D,CAAC,EACD,OAAOuB,EAAkB,QAAUD,GAA2B,CAAC,GACjE,CAEA,IAAME,EAAoB,MAAMP,EAAqB,EACjDO,IAEEA,EAAkB,eAAiBC,GACrC,QAAQ,IACN,8BACAD,EAAkB,cAClB,KACAC,EACA,qBACF,EACAC,GAAG,aAAa1B,EAAI,eAAe,GAC1BwB,EAAkB,wBAA0B,OAErD,QAAQ,IAAI,6BAA8BA,EAAkB,uBAAyB,EAAE,EACvFR,EAAaQ,EAAkB,uBAAyB,KAI5D,GAAM,CAAE,mBAAAG,GAAoB,iBAAAC,EAAiB,EAAI,MAAMC,GAAa,CAClE,SAAAhB,EACA,aAAAJ,EACA,eAAgBT,EAAI,iBACpB,WAAAgB,EACA,cAAehB,EAAI,gBACnB,QAASA,EAAI,aACf,CAAC,EAEG8B,EAAa,GACjBC,GAAc,CAACJ,GAAoBC,EAAgB,CAAC,EACjD,KACCI,GACE,CAAC,CAACC,EAAmB,CAAE,YAAaC,CAAyB,CAAC,IAC5DD,IAAsBC,CAC1B,EACAC,GAAM,CACR,EACC,UAAU,IAAM,CACfL,EAAa,GACb,QAAQ,IAAI,eAAe,CAC7B,CAAC,EAEH,IAAMM,EAAS,IAAIC,GAEfrC,EAAI,YACNoC,EAAO,IAAIE,EAAOtC,EAAI,UAAU,CAAC,EAGnCoC,EAAO,IAAIG,GAAK,CAAC,EACjBH,EAAO,IACLI,EAAY,CACV,QAAS,IAAMV,CACjB,CAAC,CACH,EACAM,EAAO,IACLK,EAAQ,CACN,UAAW,IAAM,GACjB,QAAS,IAAMX,EACf,2BAAAV,EACA,2BAAAC,GACA,eAAgBrB,EAAI,gBACtB,CAAC,CACH,EACAoC,EAAO,IAAIM,EAAW,CAAC,EACvBN,EAAO,IAAIO,EAAU9B,CAAQ,CAAC,EAE9BuB,EAAO,IACLQ,GAAoB,CAClB,OAAQ,QACR,OAAQC,GAAgB,EACxB,cAAe,UAAa,CAC1B,aAAc,MAAMC,EAAmBjC,CAAQ,CACjD,EACF,CAAC,CACH,EAEAuB,EAAO,OAAO,CAAE,KAAMpC,EAAI,KAAM,KAAMA,EAAI,IAAK,CAAC,EAChD,QAAQ,IAAI,+CAA+CA,EAAI,QAAQA,EAAI,MAAM","names":["fs","z","eq","drizzle","Database","createPublicClient","fallback","webSocket","http","Koa","cors","createKoaMiddleware","createAppRouter","chainState","schemaVersion","syncToSqlite","asc","eq","buildTable","chainState","getTables","getAddress","decodeDynamicField","hexToResource","mapObject","getTablesWithRecords","database","chainId","address","filters","metadata","tableIds","filter","tablesWithRecords","table","sqliteTable","records","filteredRecords","record","keyTuple","resource","type","key","name","value","tablesWithRecordsToLogs","createQueryAdapter","database","opts","blockNumber","tables","getTablesWithRecords","logs","isDefined","combineLatest","filter","first","Router","compose","input","schemasTable","tablesWithRecordsToLogs","createBenchmark","apiRoutes","database","router","Router","compress","ctx","benchmark","createBenchmark","options","input","error","debug","schemasTable","blockNumber","tables","getTablesWithRecords","logs","tablesWithRecordsToLogs","compose","env","parseEnv","z","indexerEnvSchema","frontendEnvSchema","transports","webSocket","http","isDefined","publicClient","createPublicClient","fallback","chainId","database","drizzle","Database","startBlock","getCurrentChainState","chainState","eq","getLatestStoredBlockNumber","getDistanceFromFollowBlock","latestStoredBlockNumber","latestFollowBlock","currentChainState","schemaVersion","fs","latestBlockNumber$","storedBlockLogs$","syncToSqlite","isCaughtUp","combineLatest","filter","latestBlockNumber","lastBlockNumberProcessed","first","server","Koa","sentry","cors","healthcheck","metrics","helloWorld","apiRoutes","createKoaMiddleware","createAppRouter","createQueryAdapter"]}
1
+ {"version":3,"sources":["../../src/bin/sqlite-indexer.ts","../../src/sqlite/getTablesWithRecords.ts","../../src/sqlite/createQueryAdapter.ts","../../src/sqlite/apiRoutes.ts"],"sourcesContent":["#!/usr/bin/env node\nimport \"dotenv/config\";\nimport fs from \"node:fs\";\nimport { z } from \"zod\";\nimport { eq } from \"drizzle-orm\";\nimport { drizzle } from \"drizzle-orm/better-sqlite3\";\nimport Database from \"better-sqlite3\";\nimport { createPublicClient, fallback, webSocket, http, Transport } from \"viem\";\nimport Koa from \"koa\";\nimport cors from \"@koa/cors\";\nimport { createKoaMiddleware } from \"trpc-koa-adapter\";\nimport { createAppRouter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { chainState, schemaVersion, syncToSqlite } from \"@latticexyz/store-sync/sqlite\";\nimport { createQueryAdapter } from \"../sqlite/createQueryAdapter\";\nimport { isDefined } from \"@latticexyz/common/utils\";\nimport { combineLatest, filter, first } from \"rxjs\";\nimport { frontendEnvSchema, indexerEnvSchema, parseEnv } from \"./parseEnv\";\nimport { healthcheck } from \"../koa-middleware/healthcheck\";\nimport { helloWorld } from \"../koa-middleware/helloWorld\";\nimport { apiRoutes } from \"../sqlite/apiRoutes\";\nimport { sentry } from \"../koa-middleware/sentry\";\nimport { metrics } from \"../koa-middleware/metrics\";\n\nconst env = parseEnv(\n z.intersection(\n z.intersection(indexerEnvSchema, frontendEnvSchema),\n z.object({\n SQLITE_FILENAME: z.string().default(\"indexer.db\"),\n SENTRY_DSN: z.string().optional(),\n }),\n ),\n);\n\nconst transports: Transport[] = [\n // prefer WS when specified\n env.RPC_WS_URL ? webSocket(env.RPC_WS_URL) : undefined,\n // otherwise use or fallback to HTTP\n env.RPC_HTTP_URL ? http(env.RPC_HTTP_URL) : undefined,\n].filter(isDefined);\n\nconst publicClient = createPublicClient({\n transport: fallback(transports),\n pollingInterval: env.POLLING_INTERVAL,\n});\n\nconst chainId = await publicClient.getChainId();\nconst database = drizzle(new Database(env.SQLITE_FILENAME));\n\nlet startBlock = env.START_BLOCK;\n\nasync function getCurrentChainState(): Promise<\n | {\n schemaVersion: number;\n chainId: number;\n lastUpdatedBlockNumber: bigint | null;\n lastError: string | null;\n }\n | undefined\n> {\n // This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n try {\n const currentChainStates = database.select().from(chainState).where(eq(chainState.chainId, chainId)).all();\n // TODO: replace this type workaround with `noUncheckedIndexedAccess: true` when we can fix all the issues related (https://github.com/latticexyz/mud/issues/1212)\n const currentChainState: (typeof currentChainStates)[number] | undefined = currentChainStates[0];\n return currentChainState;\n } catch (error) {\n // ignore errors, this is optional\n }\n}\n\nasync function getLatestStoredBlockNumber(): Promise<bigint | undefined> {\n const currentChainState = await getCurrentChainState();\n return currentChainState?.lastUpdatedBlockNumber ?? undefined;\n}\n\nasync function getDistanceFromFollowBlock(): Promise<bigint> {\n const [latestStoredBlockNumber, latestFollowBlock] = await Promise.all([\n getLatestStoredBlockNumber(),\n publicClient.getBlock({ blockTag: env.FOLLOW_BLOCK_TAG }),\n ]);\n return latestFollowBlock.number - (latestStoredBlockNumber ?? -1n);\n}\n\nconst currentChainState = await getCurrentChainState();\nif (currentChainState) {\n // Reset the db if the version changed\n if (currentChainState.schemaVersion != schemaVersion) {\n console.log(\n \"schema version changed from\",\n currentChainState.schemaVersion,\n \"to\",\n schemaVersion,\n \"recreating database\",\n );\n fs.truncateSync(env.SQLITE_FILENAME);\n } else if (currentChainState.lastUpdatedBlockNumber != null) {\n // Resume from latest block stored in DB. This will throw if the DB doesn't exist yet, so we wrap in a try/catch and ignore the error.\n console.log(\"resuming from block number\", currentChainState.lastUpdatedBlockNumber + 1n);\n startBlock = currentChainState.lastUpdatedBlockNumber + 1n;\n }\n}\n\nconst { latestBlockNumber$, storedBlockLogs$ } = await syncToSqlite({\n database,\n publicClient,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n startBlock,\n maxBlockRange: env.MAX_BLOCK_RANGE,\n address: env.STORE_ADDRESS,\n});\n\nlet isCaughtUp = false;\ncombineLatest([latestBlockNumber$, storedBlockLogs$])\n .pipe(\n filter(\n ([latestBlockNumber, { blockNumber: lastBlockNumberProcessed }]) =>\n latestBlockNumber === lastBlockNumberProcessed,\n ),\n first(),\n )\n .subscribe(() => {\n isCaughtUp = true;\n console.log(\"all caught up\");\n });\n\nconst server = new Koa();\n\nif (env.SENTRY_DSN) {\n server.use(sentry(env.SENTRY_DSN));\n}\n\nserver.use(cors());\nserver.use(\n healthcheck({\n isReady: () => isCaughtUp,\n }),\n);\nserver.use(\n metrics({\n isHealthy: () => true,\n isReady: () => isCaughtUp,\n getLatestStoredBlockNumber,\n getDistanceFromFollowBlock,\n followBlockTag: env.FOLLOW_BLOCK_TAG,\n }),\n);\nserver.use(helloWorld());\nserver.use(apiRoutes(database));\n\nserver.use(\n createKoaMiddleware({\n prefix: \"/trpc\",\n router: createAppRouter(),\n createContext: async () => ({\n queryAdapter: await createQueryAdapter(database),\n }),\n }),\n);\n\nserver.listen({ host: env.HOST, port: env.PORT });\nconsole.log(`sqlite indexer frontend listening on http://${env.HOST}:${env.PORT}`);\n","import { asc, eq } from \"drizzle-orm\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { buildTable, chainState, getTables } from \"@latticexyz/store-sync/sqlite\";\nimport { Hex, getAddress } from \"viem\";\nimport { decodeDynamicField } from \"@latticexyz/protocol-parser/internal\";\nimport { SyncFilter, TableRecord, TableWithRecords } from \"@latticexyz/store-sync\";\nimport { hexToResource } from \"@latticexyz/common\";\nimport { mapObject } from \"@latticexyz/common/utils\";\n\n// TODO: refactor sqlite and replace this with getLogs to match postgres (https://github.com/latticexyz/mud/issues/1970)\n\n/**\n * @deprecated\n * */\nexport function getTablesWithRecords(\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n database: BaseSQLiteDatabase<\"sync\", any>,\n {\n chainId,\n address,\n filters = [],\n }: {\n readonly chainId: number;\n readonly address?: Hex;\n readonly filters?: readonly SyncFilter[];\n },\n): { blockNumber: bigint | null; tables: readonly TableWithRecords[] } {\n const metadata = database\n .select()\n .from(chainState)\n .where(eq(chainState.chainId, chainId))\n .limit(1)\n .all()\n .find(() => true);\n\n // If _any_ filter has a table ID, this will filter down all data to just those tables. Which mean we can't yet mix table filters with key-only filters.\n // TODO: improve this so we can express this in the query (need to be able to query data across tables more easily)\n const tableIds = Array.from(new Set(filters.map((filter) => filter.tableId)));\n const tables = getTables(database)\n .filter((table) => address == null || getAddress(address) === getAddress(table.address))\n .filter((table) => !tableIds.length || tableIds.includes(table.tableId));\n\n const tablesWithRecords = tables.map((table) => {\n const sqliteTable = buildTable(table);\n const records = database\n .select()\n .from(sqliteTable)\n .where(eq(sqliteTable.__isDeleted, false))\n .orderBy(\n asc(sqliteTable.__lastUpdatedBlockNumber),\n // TODO: add logIndex (https://github.com/latticexyz/mud/issues/1979)\n )\n .all();\n const filteredRecords = !filters.length\n ? records\n : records.filter((record) => {\n const keyTuple = decodeDynamicField(\"bytes32[]\", record.__key);\n return filters.some(\n (filter) =>\n filter.tableId === table.tableId &&\n (filter.key0 == null || filter.key0 === keyTuple[0]) &&\n (filter.key1 == null || filter.key1 === keyTuple[1]),\n );\n });\n const resource = hexToResource(table.tableId);\n return {\n ...table,\n type: resource.type as never,\n schema: mapObject({ ...table.keySchema, ...table.valueSchema }, (type) => ({ type, internalType: type })),\n key: Object.keys(table.keySchema),\n records: filteredRecords.map((record): TableRecord => {\n const key = Object.fromEntries(Object.entries(table.keySchema).map(([name]) => [name, record[name]]));\n const value = Object.fromEntries(Object.entries(table.valueSchema).map(([name]) => [name, record[name]]));\n return { key, value, fields: { ...key, ...value } };\n }),\n } satisfies TableWithRecords;\n });\n\n return {\n blockNumber: metadata?.lastUpdatedBlockNumber ?? null,\n tables: tablesWithRecords,\n };\n}\n","import { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\nimport { QueryAdapter } from \"@latticexyz/store-sync/trpc-indexer\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\n\n/**\n * Creates a storage adapter for the tRPC server/client to query data from SQLite.\n *\n * @param {BaseSQLiteDatabase<\"sync\", any>} database SQLite database object from Drizzle\n * @returns {Promise<QueryAdapter>} A set of methods used by tRPC endpoints.\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport async function createQueryAdapter(database: BaseSQLiteDatabase<\"sync\", any>): Promise<QueryAdapter> {\n const adapter: QueryAdapter = {\n async getLogs(opts) {\n const { blockNumber, tables } = getTablesWithRecords(database, opts);\n const logs = tablesWithRecordsToLogs(tables);\n return { blockNumber: blockNumber ?? 0n, logs };\n },\n async findAll(opts) {\n return getTablesWithRecords(database, opts);\n },\n };\n return adapter;\n}\n","import { Middleware } from \"koa\";\nimport Router from \"@koa/router\";\nimport compose from \"koa-compose\";\nimport { input } from \"@latticexyz/store-sync/indexer-client\";\nimport { schemasTable, tablesWithRecordsToLogs } from \"@latticexyz/store-sync\";\nimport { debug } from \"../debug\";\nimport { createBenchmark } from \"@latticexyz/common\";\nimport { compress } from \"../koa-middleware/compress\";\nimport { getTablesWithRecords } from \"./getTablesWithRecords\";\nimport { BaseSQLiteDatabase } from \"drizzle-orm/sqlite-core\";\n\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nexport function apiRoutes(database: BaseSQLiteDatabase<\"sync\", any>): Middleware {\n const router = new Router();\n\n router.get(\"/api/logs\", compress(), async (ctx) => {\n const benchmark = createBenchmark(\"sqlite:logs\");\n\n let options: ReturnType<typeof input.parse>;\n\n try {\n options = input.parse(typeof ctx.query.input === \"string\" ? JSON.parse(ctx.query.input) : {});\n } catch (error) {\n ctx.status = 400;\n ctx.body = JSON.stringify(error);\n debug(error);\n return;\n }\n\n try {\n options.filters = options.filters.length > 0 ? [...options.filters, { tableId: schemasTable.tableId }] : [];\n benchmark(\"parse config\");\n const { blockNumber, tables } = getTablesWithRecords(database, options);\n benchmark(\"query tables with records\");\n const logs = tablesWithRecordsToLogs(tables);\n benchmark(\"convert records to logs\");\n\n ctx.body = JSON.stringify({ blockNumber: blockNumber?.toString() ?? \"-1\", logs });\n ctx.status = 200;\n } catch (error) {\n ctx.status = 500;\n ctx.body = JSON.stringify(error);\n debug(error);\n }\n });\n\n return compose([router.routes(), router.allowedMethods()]) as Middleware;\n}\n"],"mappings":";2QACA,MAAO,gBACP,OAAOA,OAAQ,UACf,OAAS,KAAAC,MAAS,MAClB,OAAS,MAAAC,OAAU,cACnB,OAAS,WAAAC,OAAe,6BACxB,OAAOC,OAAc,iBACrB,OAAS,sBAAAC,GAAoB,YAAAC,GAAU,aAAAC,GAAW,QAAAC,OAAuB,OACzE,OAAOC,OAAS,MAChB,OAAOC,OAAU,YACjB,OAAS,uBAAAC,OAA2B,mBACpC,OAAS,mBAAAC,OAAuB,sCAChC,OAAS,cAAAC,EAAY,iBAAAC,EAAe,gBAAAC,OAAoB,gCCZxD,OAAS,OAAAC,EAAK,MAAAC,MAAU,cAExB,OAAS,cAAAC,EAAY,cAAAC,EAAY,aAAAC,MAAiB,gCAClD,OAAc,cAAAC,MAAkB,OAChC,OAAS,sBAAAC,MAA0B,uCAEnC,OAAS,iBAAAC,MAAqB,qBAC9B,OAAS,aAAAC,MAAiB,2BAOnB,SAASC,EAEdC,EACA,CACE,QAAAC,EACA,QAAAC,EACA,QAAAC,EAAU,CAAC,CACb,EAKqE,CACrE,IAAMC,EAAWJ,EACd,OAAO,EACP,KAAKP,CAAU,EACf,MAAMF,EAAGE,EAAW,QAASQ,CAAO,CAAC,EACrC,MAAM,CAAC,EACP,IAAI,EACJ,KAAK,IAAM,EAAI,EAIZI,EAAW,MAAM,KAAK,IAAI,IAAIF,EAAQ,IAAKG,GAAWA,EAAO,OAAO,CAAC,CAAC,EAKtEC,EAJSb,EAAUM,CAAQ,EAC9B,OAAQQ,GAAUN,GAAW,MAAQP,EAAWO,CAAO,IAAMP,EAAWa,EAAM,OAAO,CAAC,EACtF,OAAQA,GAAU,CAACH,EAAS,QAAUA,EAAS,SAASG,EAAM,OAAO,CAAC,EAExC,IAAKA,GAAU,CAC9C,IAAMC,EAAcjB,EAAWgB,CAAK,EAC9BE,EAAUV,EACb,OAAO,EACP,KAAKS,CAAW,EAChB,MAAMlB,EAAGkB,EAAY,YAAa,EAAK,CAAC,EACxC,QACCnB,EAAImB,EAAY,wBAAwB,CAE1C,EACC,IAAI,EACDE,EAAmBR,EAAQ,OAE7BO,EAAQ,OAAQE,GAAW,CACzB,IAAMC,EAAWjB,EAAmB,YAAagB,EAAO,KAAK,EAC7D,OAAOT,EAAQ,KACZG,GACCA,EAAO,UAAYE,EAAM,UACxBF,EAAO,MAAQ,MAAQA,EAAO,OAASO,EAAS,CAAC,KACjDP,EAAO,MAAQ,MAAQA,EAAO,OAASO,EAAS,CAAC,EACtD,CACF,CAAC,EATDH,EAUEI,EAAWjB,EAAcW,EAAM,OAAO,EAC5C,MAAO,CACL,GAAGA,EACH,KAAMM,EAAS,KACf,OAAQhB,EAAU,CAAE,GAAGU,EAAM,UAAW,GAAGA,EAAM,WAAY,EAAIO,IAAU,CAAE,KAAAA,EAAM,aAAcA,CAAK,EAAE,EACxG,IAAK,OAAO,KAAKP,EAAM,SAAS,EAChC,QAASG,EAAgB,IAAKC,GAAwB,CACpD,IAAMI,EAAM,OAAO,YAAY,OAAO,QAAQR,EAAM,SAAS,EAAE,IAAI,CAAC,CAACS,CAAI,IAAM,CAACA,EAAML,EAAOK,CAAI,CAAC,CAAC,CAAC,EAC9FC,EAAQ,OAAO,YAAY,OAAO,QAAQV,EAAM,WAAW,EAAE,IAAI,CAAC,CAACS,CAAI,IAAM,CAACA,EAAML,EAAOK,CAAI,CAAC,CAAC,CAAC,EACxG,MAAO,CAAE,IAAAD,EAAK,MAAAE,EAAO,OAAQ,CAAE,GAAGF,EAAK,GAAGE,CAAM,CAAE,CACpD,CAAC,CACH,CACF,CAAC,EAED,MAAO,CACL,YAAad,GAAU,wBAA0B,KACjD,OAAQG,CACV,CACF,CC/EA,OAAS,2BAAAY,MAA+B,yBASxC,eAAsBC,EAAmBC,EAAkE,CAWzG,MAV8B,CAC5B,MAAM,QAAQC,EAAM,CAClB,GAAM,CAAE,YAAAC,EAAa,OAAAC,CAAO,EAAIC,EAAqBJ,EAAUC,CAAI,EAC7DI,EAAOP,EAAwBK,CAAM,EAC3C,MAAO,CAAE,YAAaD,GAAe,GAAI,KAAAG,CAAK,CAChD,EACA,MAAM,QAAQJ,EAAM,CAClB,OAAOG,EAAqBJ,EAAUC,CAAI,CAC5C,CACF,CAEF,CFVA,OAAS,aAAAK,OAAiB,2BAC1B,OAAS,iBAAAC,GAAe,UAAAC,GAAQ,SAAAC,OAAa,OGd7C,OAAOC,MAAY,cACnB,OAAOC,MAAa,cACpB,OAAS,SAAAC,MAAa,wCACtB,OAAS,gBAAAC,GAAc,2BAAAC,OAA+B,yBAEtD,OAAS,mBAAAC,OAAuB,qBAMzB,SAASC,EAAUC,EAAuD,CAC/E,IAAMC,EAAS,IAAIC,EAEnB,OAAAD,EAAO,IAAI,YAAaE,EAAS,EAAG,MAAOC,GAAQ,CACjD,IAAMC,EAAYC,GAAgB,aAAa,EAE3CC,EAEJ,GAAI,CACFA,EAAUC,EAAM,MAAM,OAAOJ,EAAI,MAAM,OAAU,SAAW,KAAK,MAAMA,EAAI,MAAM,KAAK,EAAI,CAAC,CAAC,CAC9F,OAASK,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,KAAO,KAAK,UAAUK,CAAK,EAC/BC,EAAMD,CAAK,EACX,MACF,CAEA,GAAI,CACFF,EAAQ,QAAUA,EAAQ,QAAQ,OAAS,EAAI,CAAC,GAAGA,EAAQ,QAAS,CAAE,QAASI,GAAa,OAAQ,CAAC,EAAI,CAAC,EAC1GN,EAAU,cAAc,EACxB,GAAM,CAAE,YAAAO,EAAa,OAAAC,CAAO,EAAIC,EAAqBd,EAAUO,CAAO,EACtEF,EAAU,2BAA2B,EACrC,IAAMU,EAAOC,GAAwBH,CAAM,EAC3CR,EAAU,yBAAyB,EAEnCD,EAAI,KAAO,KAAK,UAAU,CAAE,YAAaQ,GAAa,SAAS,GAAK,KAAM,KAAAG,CAAK,CAAC,EAChFX,EAAI,OAAS,GACf,OAASK,EAAP,CACAL,EAAI,OAAS,IACbA,EAAI,KAAO,KAAK,UAAUK,CAAK,EAC/BC,EAAMD,CAAK,CACb,CACF,CAAC,EAEMQ,EAAQ,CAAChB,EAAO,OAAO,EAAGA,EAAO,eAAe,CAAC,CAAC,CAC3D,CHxBA,IAAMiB,EAAMC,EACVC,EAAE,aACAA,EAAE,aAAaC,EAAkBC,CAAiB,EAClDF,EAAE,OAAO,CACP,gBAAiBA,EAAE,OAAO,EAAE,QAAQ,YAAY,EAChD,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,CACH,CACF,EAEMG,GAA0B,CAE9BL,EAAI,WAAaM,GAAUN,EAAI,UAAU,EAAI,OAE7CA,EAAI,aAAeO,GAAKP,EAAI,YAAY,EAAI,MAC9C,EAAE,OAAOQ,EAAS,EAEZC,EAAeC,GAAmB,CACtC,UAAWC,GAASN,EAAU,EAC9B,gBAAiBL,EAAI,gBACvB,CAAC,EAEKY,GAAU,MAAMH,EAAa,WAAW,EACxCI,EAAWC,GAAQ,IAAIC,GAASf,EAAI,eAAe,CAAC,EAEtDgB,EAAahB,EAAI,YAErB,eAAeiB,GAQb,CAEA,GAAI,CAIF,OAH2BJ,EAAS,OAAO,EAAE,KAAKK,CAAU,EAAE,MAAMC,GAAGD,EAAW,QAASN,EAAO,CAAC,EAAE,IAAI,EAEX,CAAC,CAEjG,MAAE,CAEF,CACF,CAEA,eAAeQ,GAA0D,CAEvE,OAD0B,MAAMH,EAAqB,IAC3B,wBAA0B,MACtD,CAEA,eAAeI,IAA8C,CAC3D,GAAM,CAACC,EAAyBC,CAAiB,EAAI,MAAM,QAAQ,IAAI,CACrEH,EAA2B,EAC3BX,EAAa,SAAS,CAAE,SAAUT,EAAI,gBAAiB,CAAC,CAC1D,CAAC,EACD,OAAOuB,EAAkB,QAAUD,GAA2B,CAAC,GACjE,CAEA,IAAME,EAAoB,MAAMP,EAAqB,EACjDO,IAEEA,EAAkB,eAAiBC,GACrC,QAAQ,IACN,8BACAD,EAAkB,cAClB,KACAC,EACA,qBACF,EACAC,GAAG,aAAa1B,EAAI,eAAe,GAC1BwB,EAAkB,wBAA0B,OAErD,QAAQ,IAAI,6BAA8BA,EAAkB,uBAAyB,EAAE,EACvFR,EAAaQ,EAAkB,uBAAyB,KAI5D,GAAM,CAAE,mBAAAG,GAAoB,iBAAAC,EAAiB,EAAI,MAAMC,GAAa,CAClE,SAAAhB,EACA,aAAAJ,EACA,eAAgBT,EAAI,iBACpB,WAAAgB,EACA,cAAehB,EAAI,gBACnB,QAASA,EAAI,aACf,CAAC,EAEG8B,EAAa,GACjBC,GAAc,CAACJ,GAAoBC,EAAgB,CAAC,EACjD,KACCI,GACE,CAAC,CAACC,EAAmB,CAAE,YAAaC,CAAyB,CAAC,IAC5DD,IAAsBC,CAC1B,EACAC,GAAM,CACR,EACC,UAAU,IAAM,CACfL,EAAa,GACb,QAAQ,IAAI,eAAe,CAC7B,CAAC,EAEH,IAAMM,EAAS,IAAIC,GAEfrC,EAAI,YACNoC,EAAO,IAAIE,EAAOtC,EAAI,UAAU,CAAC,EAGnCoC,EAAO,IAAIG,GAAK,CAAC,EACjBH,EAAO,IACLI,EAAY,CACV,QAAS,IAAMV,CACjB,CAAC,CACH,EACAM,EAAO,IACLK,EAAQ,CACN,UAAW,IAAM,GACjB,QAAS,IAAMX,EACf,2BAAAV,EACA,2BAAAC,GACA,eAAgBrB,EAAI,gBACtB,CAAC,CACH,EACAoC,EAAO,IAAIM,EAAW,CAAC,EACvBN,EAAO,IAAIO,EAAU9B,CAAQ,CAAC,EAE9BuB,EAAO,IACLQ,GAAoB,CAClB,OAAQ,QACR,OAAQC,GAAgB,EACxB,cAAe,UAAa,CAC1B,aAAc,MAAMC,EAAmBjC,CAAQ,CACjD,EACF,CAAC,CACH,EAEAuB,EAAO,OAAO,CAAE,KAAMpC,EAAI,KAAM,KAAMA,EAAI,IAAK,CAAC,EAChD,QAAQ,IAAI,+CAA+CA,EAAI,QAAQA,EAAI,MAAM","names":["fs","z","eq","drizzle","Database","createPublicClient","fallback","webSocket","http","Koa","cors","createKoaMiddleware","createAppRouter","chainState","schemaVersion","syncToSqlite","asc","eq","buildTable","chainState","getTables","getAddress","decodeDynamicField","hexToResource","mapObject","getTablesWithRecords","database","chainId","address","filters","metadata","tableIds","filter","tablesWithRecords","table","sqliteTable","records","filteredRecords","record","keyTuple","resource","type","key","name","value","tablesWithRecordsToLogs","createQueryAdapter","database","opts","blockNumber","tables","getTablesWithRecords","logs","isDefined","combineLatest","filter","first","Router","compose","input","schemasTable","tablesWithRecordsToLogs","createBenchmark","apiRoutes","database","router","Router","compress","ctx","benchmark","createBenchmark","options","input","error","debug","schemasTable","blockNumber","tables","getTablesWithRecords","logs","tablesWithRecordsToLogs","compose","env","parseEnv","z","indexerEnvSchema","frontendEnvSchema","transports","webSocket","http","isDefined","publicClient","createPublicClient","fallback","chainId","database","drizzle","Database","startBlock","getCurrentChainState","chainState","eq","getLatestStoredBlockNumber","getDistanceFromFollowBlock","latestStoredBlockNumber","latestFollowBlock","currentChainState","schemaVersion","fs","latestBlockNumber$","storedBlockLogs$","syncToSqlite","isCaughtUp","combineLatest","filter","latestBlockNumber","lastBlockNumberProcessed","first","server","Koa","sentry","cors","healthcheck","metrics","helloWorld","apiRoutes","createKoaMiddleware","createAppRouter","createQueryAdapter"]}
@@ -0,0 +1,2 @@
1
+ import*as e from"@sentry/node";import{ProfilingIntegration as m}from"@sentry/profiling-node";import{stripUrlQueryAndFragment as l}from"@sentry/utils";import c from"debug";var d=c("mud:store-indexer"),p=c("mud:store-indexer");d.log=console.debug.bind(console);p.log=console.error.bind(console);import f from"koa-compose";function y(){return async function(r,o){try{await o()}catch(t){throw e.withScope(a=>{a.addEventProcessor(n=>e.addRequestDataToEvent(n,r.request)),e.captureException(t)}),t}}}function g(){return async function(r,o){await e.runWithAsyncContext(async()=>{e.getCurrentHub().configureScope(a=>a.addEventProcessor(n=>e.addRequestDataToEvent(n,r.request,{include:{user:!1}}))),await o()})}}function S(){return async function(r,o){let t=(r.method||"").toUpperCase(),a=r.url&&l(r.url),n;r.request.get("sentry-trace")&&(n=e.extractTraceparentData(r.request.get("sentry-trace")));let i=e.startTransaction({name:`${t} ${a}`,op:"http.server",...n});r.__sentry_transaction=i,e.getCurrentHub().configureScope(u=>{u.setSpan(i)}),r.res.on("finish",()=>{setImmediate(()=>{if(r._matchedRoute){let u=r.mountPath||"";i.setName(`${t} ${u}${r._matchedRoute}`)}i.setHttpStatus(r.status),i.finish()})}),await o()}}function v(s){return d("Initializing Sentry"),e.init({dsn:s,integrations:[...e.autoDiscoverNodePerformanceMonitoringIntegrations(),new m],tracesSampleRate:1,profilesSampleRate:1}),f([y(),g(),S()])}export{d as a,p as b,v as c};
2
+ //# sourceMappingURL=chunk-32XITDZW.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/koa-middleware/sentry.ts","../src/debug.ts"],"sourcesContent":["import * as Sentry from \"@sentry/node\";\nimport { ProfilingIntegration } from \"@sentry/profiling-node\";\nimport { stripUrlQueryAndFragment } from \"@sentry/utils\";\nimport { debug } from \"../debug\";\nimport Koa from \"koa\";\nimport compose from \"koa-compose\";\n\nexport function errorHandler(): Koa.Middleware {\n return async function errorHandlerMiddleware(ctx, next) {\n try {\n await next();\n } catch (err) {\n Sentry.withScope((scope) => {\n scope.addEventProcessor((event) => {\n return Sentry.addRequestDataToEvent(event, ctx.request);\n });\n Sentry.captureException(err);\n });\n throw err;\n }\n };\n}\n\nexport function requestHandler(): Koa.Middleware {\n return async function requestHandlerMiddleware(ctx, next) {\n await Sentry.runWithAsyncContext(async () => {\n const hub = Sentry.getCurrentHub();\n hub.configureScope((scope) =>\n scope.addEventProcessor((event) =>\n Sentry.addRequestDataToEvent(event, ctx.request, {\n include: {\n user: false,\n },\n }),\n ),\n );\n await next();\n });\n };\n}\n\nexport function tracing(): Koa.Middleware {\n // creates a Sentry transaction per request\n return async function tracingMiddleware(ctx, next) {\n const reqMethod = (ctx.method || \"\").toUpperCase();\n const reqUrl = ctx.url && stripUrlQueryAndFragment(ctx.url);\n\n // Connect to trace of upstream app\n let traceparentData;\n if (ctx.request.get(\"sentry-trace\")) {\n traceparentData = Sentry.extractTraceparentData(ctx.request.get(\"sentry-trace\"));\n }\n\n const transaction = Sentry.startTransaction({\n name: `${reqMethod} ${reqUrl}`,\n op: \"http.server\",\n ...traceparentData,\n });\n\n ctx.__sentry_transaction = transaction;\n\n // We put the transaction on the scope so users can attach children to it\n Sentry.getCurrentHub().configureScope((scope) => {\n scope.setSpan(transaction);\n });\n\n ctx.res.on(\"finish\", () => {\n // Push `transaction.finish` to the next event loop so open spans have a chance to finish before the transaction closes\n setImmediate(() => {\n // If you're using koa router, set the matched route as transaction name\n if (ctx._matchedRoute) {\n const mountPath = ctx.mountPath || \"\";\n transaction.setName(`${reqMethod} ${mountPath}${ctx._matchedRoute}`);\n }\n\n transaction.setHttpStatus(ctx.status);\n transaction.finish();\n });\n });\n\n await next();\n };\n}\n\nexport function sentry(dsn: string): Koa.Middleware {\n debug(\"Initializing Sentry\");\n Sentry.init({\n dsn,\n integrations: [\n // Automatically instrument Node.js libraries and frameworks\n ...Sentry.autoDiscoverNodePerformanceMonitoringIntegrations(),\n new ProfilingIntegration(),\n ],\n // Performance Monitoring\n tracesSampleRate: 1.0,\n // Set sampling rate for profiling - this is relative to tracesSampleRate\n profilesSampleRate: 1.0,\n });\n\n return compose([errorHandler(), requestHandler(), tracing()]);\n}\n","import createDebug from \"debug\";\n\nexport const debug = createDebug(\"mud:store-indexer\");\nexport const error = createDebug(\"mud:store-indexer\");\n\n// Pipe debug output to stdout instead of stderr\ndebug.log = console.debug.bind(console);\n\n// Pipe error output to stderr\nerror.log = console.error.bind(console);\n"],"mappings":"AAAA,UAAYA,MAAY,eACxB,OAAS,wBAAAC,MAA4B,yBACrC,OAAS,4BAAAC,MAAgC,gBCFzC,OAAOC,MAAiB,QAEjB,IAAMC,EAAQD,EAAY,mBAAmB,EACvCE,EAAQF,EAAY,mBAAmB,EAGpDC,EAAM,IAAM,QAAQ,MAAM,KAAK,OAAO,EAGtCC,EAAM,IAAM,QAAQ,MAAM,KAAK,OAAO,EDJtC,OAAOC,MAAa,cAEb,SAASC,GAA+B,CAC7C,OAAO,eAAsCC,EAAKC,EAAM,CACtD,GAAI,CACF,MAAMA,EAAK,CACb,OAASC,EAAP,CACA,MAAO,YAAWC,GAAU,CAC1BA,EAAM,kBAAmBC,GACT,wBAAsBA,EAAOJ,EAAI,OAAO,CACvD,EACM,mBAAiBE,CAAG,CAC7B,CAAC,EACKA,CACR,CACF,CACF,CAEO,SAASG,GAAiC,CAC/C,OAAO,eAAwCL,EAAKC,EAAM,CACxD,MAAa,sBAAoB,SAAY,CACxB,gBAAc,EAC7B,eAAgBE,GAClBA,EAAM,kBAAmBC,GAChB,wBAAsBA,EAAOJ,EAAI,QAAS,CAC/C,QAAS,CACP,KAAM,EACR,CACF,CAAC,CACH,CACF,EACA,MAAMC,EAAK,CACb,CAAC,CACH,CACF,CAEO,SAASK,GAA0B,CAExC,OAAO,eAAiCN,EAAKC,EAAM,CACjD,IAAMM,GAAaP,EAAI,QAAU,IAAI,YAAY,EAC3CQ,EAASR,EAAI,KAAOS,EAAyBT,EAAI,GAAG,EAGtDU,EACAV,EAAI,QAAQ,IAAI,cAAc,IAChCU,EAAyB,yBAAuBV,EAAI,QAAQ,IAAI,cAAc,CAAC,GAGjF,IAAMW,EAAqB,mBAAiB,CAC1C,KAAM,GAAGJ,KAAaC,IACtB,GAAI,cACJ,GAAGE,CACL,CAAC,EAEDV,EAAI,qBAAuBW,EAGpB,gBAAc,EAAE,eAAgBR,GAAU,CAC/CA,EAAM,QAAQQ,CAAW,CAC3B,CAAC,EAEDX,EAAI,IAAI,GAAG,SAAU,IAAM,CAEzB,aAAa,IAAM,CAEjB,GAAIA,EAAI,cAAe,CACrB,IAAMY,EAAYZ,EAAI,WAAa,GACnCW,EAAY,QAAQ,GAAGJ,KAAaK,IAAYZ,EAAI,eAAe,EAGrEW,EAAY,cAAcX,EAAI,MAAM,EACpCW,EAAY,OAAO,CACrB,CAAC,CACH,CAAC,EAED,MAAMV,EAAK,CACb,CACF,CAEO,SAASY,EAAOC,EAA6B,CAClD,OAAAC,EAAM,qBAAqB,EACpB,OAAK,CACV,IAAAD,EACA,aAAc,CAEZ,GAAU,oDAAkD,EAC5D,IAAIE,CACN,EAEA,iBAAkB,EAElB,mBAAoB,CACtB,CAAC,EAEMlB,EAAQ,CAACC,EAAa,EAAGM,EAAe,EAAGC,EAAQ,CAAC,CAAC,CAC9D","names":["Sentry","ProfilingIntegration","stripUrlQueryAndFragment","createDebug","debug","error","compose","errorHandler","ctx","next","err","scope","event","requestHandler","tracing","reqMethod","reqUrl","stripUrlQueryAndFragment","traceparentData","transaction","mountPath","sentry","dsn","debug","ProfilingIntegration"]}
@@ -4,4 +4,4 @@ Missing or invalid environment variables:
4
4
  ${Object.keys(r).join(`
5
5
  `)}
6
6
  `),process.exit(1)}throw o}}export{u as a,p as b,T as c};
7
- //# sourceMappingURL=chunk-7B4KE2DO.js.map
7
+ //# sourceMappingURL=chunk-M2HHWHGP.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/bin/parseEnv.ts"],"sourcesContent":["import { Hex, isHex } from \"viem\";\nimport { z, ZodError, ZodTypeAny } from \"zod\";\n\nexport const frontendEnvSchema = z.object({\n HOST: z.string().default(\"0.0.0.0\"),\n PORT: z.coerce.number().positive().default(3001),\n});\n\nfunction isHexOrUndefined(input: unknown): input is Hex | undefined {\n return input === undefined || isHex(input);\n}\n\nexport const indexerEnvSchema = z.intersection(\n z.object({\n FOLLOW_BLOCK_TAG: z.enum([\"latest\", \"safe\", \"finalized\"]).default(\"safe\"),\n START_BLOCK: z.coerce.bigint().nonnegative().default(0n),\n MAX_BLOCK_RANGE: z.coerce.bigint().positive().default(1000n),\n POLLING_INTERVAL: z.coerce.number().positive().default(1000),\n STORE_ADDRESS: z\n .string()\n .optional()\n .transform((input) => (input === \"\" ? undefined : input))\n .refine(isHexOrUndefined),\n }),\n z.union([\n z.object({\n RPC_HTTP_URL: z.string(),\n RPC_WS_URL: z.string().optional(),\n }),\n z.object({\n RPC_HTTP_URL: z.string().optional(),\n RPC_WS_URL: z.string(),\n }),\n ]),\n);\n\nexport function parseEnv<TSchema extends ZodTypeAny>(envSchema: TSchema): z.infer<TSchema> {\n try {\n return envSchema.parse(process.env);\n } catch (error) {\n if (error instanceof ZodError) {\n const { ...invalidEnvVars } = error.format();\n console.error(`\\nMissing or invalid environment variables:\\n\\n ${Object.keys(invalidEnvVars).join(\"\\n \")}\\n`);\n process.exit(1);\n }\n throw error;\n }\n}\n"],"mappings":"AAAA,OAAc,SAAAA,MAAa,OAC3B,OAAS,KAAAC,EAAG,YAAAC,MAA4B,MAEjC,IAAMC,EAAoBF,EAAE,OAAO,CACxC,KAAMA,EAAE,OAAO,EAAE,QAAQ,SAAS,EAClC,KAAMA,EAAE,OAAO,OAAO,EAAE,SAAS,EAAE,QAAQ,IAAI,CACjD,CAAC,EAED,SAASG,EAAiBC,EAA0C,CAClE,OAAOA,IAAU,QAAaL,EAAMK,CAAK,CAC3C,CAEO,IAAMC,EAAmBL,EAAE,aAChCA,EAAE,OAAO,CACP,iBAAkBA,EAAE,KAAK,CAAC,SAAU,OAAQ,WAAW,CAAC,EAAE,QAAQ,MAAM,EACxE,YAAaA,EAAE,OAAO,OAAO,EAAE,YAAY,EAAE,QAAQ,EAAE,EACvD,gBAAiBA,EAAE,OAAO,OAAO,EAAE,SAAS,EAAE,QAAQ,KAAK,EAC3D,iBAAkBA,EAAE,OAAO,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAI,EAC3D,cAAeA,EACZ,OAAO,EACP,SAAS,EACT,UAAWI,GAAWA,IAAU,GAAK,OAAYA,CAAM,EACvD,OAAOD,CAAgB,CAC5B,CAAC,EACDH,EAAE,MAAM,CACNA,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,EACDA,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EAAE,SAAS,EAClC,WAAYA,EAAE,OAAO,CACvB,CAAC,CACH,CAAC,CACH,EAEO,SAASM,EAAqCC,EAAsC,CACzF,GAAI,CACF,OAAOA,EAAU,MAAM,QAAQ,GAAG,CACpC,OAASC,EAAP,CACA,GAAIA,aAAiBP,EAAU,CAC7B,GAAM,CAAE,GAAGQ,CAAe,EAAID,EAAM,OAAO,EAC3C,QAAQ,MAAM;AAAA;AAAA;AAAA,IAAoD,OAAO,KAAKC,CAAc,EAAE,KAAK;AAAA,GAAM;AAAA,CAAK,EAC9G,QAAQ,KAAK,CAAC,EAEhB,MAAMD,CACR,CACF","names":["isHex","z","ZodError","frontendEnvSchema","isHexOrUndefined","input","indexerEnvSchema","parseEnv","envSchema","error","invalidEnvVars"]}
@@ -0,0 +1,2 @@
1
+ import{Stream as a}from"node:stream";import c from"accepts";import{createBrotliCompress as p,createDeflate as m,createGzip as f}from"node:zlib";import{includes as l}from"@latticexyz/common/utils";var d={br:p,gzip:f,deflate:m},i=Object.keys(d);function b(o,s){let e=0;return o.on("data",n=>{e+=n.length,e>s&&(e=0,o.flush())}),o}function E({flushThreshold:o=1024*4}={}){return async function(e,n){e.vary("Accept-Encoding"),await n();let r=c(e.req).encoding(i);if(!l(i,r))return;let t=b(d[r](),o);e.set("Content-Encoding",r),e.body=e.body instanceof a?e.body.pipe(t):t.end(e.body)}}export{E as a};
2
+ //# sourceMappingURL=chunk-R7HX5BT2.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"sources":["../src/koa-middleware/compress.ts"],"sourcesContent":["import { Middleware } from \"koa\";\nimport { Readable, Stream } from \"node:stream\";\nimport accepts from \"accepts\";\nimport { Zlib, createBrotliCompress, createDeflate, createGzip } from \"node:zlib\";\nimport { includes } from \"@latticexyz/common/utils\";\n\n// Loosely based on https://github.com/holic/koa-compress/blob/master/lib/index.js\n// with better handling of streams better with occasional flushing\n\nconst encodings = {\n br: createBrotliCompress,\n gzip: createGzip,\n deflate: createDeflate,\n} as const;\n\nconst encodingNames = Object.keys(encodings) as (keyof typeof encodings)[];\n\nfunction flushEvery<stream extends Zlib & Readable>(stream: stream, bytesThreshold: number): stream {\n let bytesSinceFlush = 0;\n stream.on(\"data\", (data) => {\n bytesSinceFlush += data.length;\n if (bytesSinceFlush > bytesThreshold) {\n bytesSinceFlush = 0;\n stream.flush();\n }\n });\n return stream;\n}\n\ntype CompressOptions = {\n flushThreshold?: number;\n};\n\nexport function compress({ flushThreshold = 1024 * 4 }: CompressOptions = {}): Middleware {\n return async function compressMiddleware(ctx, next) {\n ctx.vary(\"Accept-Encoding\");\n\n await next();\n\n const encoding = accepts(ctx.req).encoding(encodingNames);\n if (!includes(encodingNames, encoding)) return;\n\n const compressed = flushEvery(encodings[encoding](), flushThreshold);\n\n ctx.set(\"Content-Encoding\", encoding);\n ctx.body = ctx.body instanceof Stream ? ctx.body.pipe(compressed) : compressed.end(ctx.body);\n };\n}\n"],"mappings":"AACA,OAAmB,UAAAA,MAAc,cACjC,OAAOC,MAAa,UACpB,OAAe,wBAAAC,EAAsB,iBAAAC,EAAe,cAAAC,MAAkB,YACtE,OAAS,YAAAC,MAAgB,2BAKzB,IAAMC,EAAY,CAChB,GAAIJ,EACJ,KAAME,EACN,QAASD,CACX,EAEMI,EAAgB,OAAO,KAAKD,CAAS,EAE3C,SAASE,EAA2CC,EAAgBC,EAAgC,CAClG,IAAIC,EAAkB,EACtB,OAAAF,EAAO,GAAG,OAASG,GAAS,CAC1BD,GAAmBC,EAAK,OACpBD,EAAkBD,IACpBC,EAAkB,EAClBF,EAAO,MAAM,EAEjB,CAAC,EACMA,CACT,CAMO,SAASI,EAAS,CAAE,eAAAC,EAAiB,KAAO,CAAE,EAAqB,CAAC,EAAe,CACxF,OAAO,eAAkCC,EAAKC,EAAM,CAClDD,EAAI,KAAK,iBAAiB,EAE1B,MAAMC,EAAK,EAEX,IAAMC,EAAWhB,EAAQc,EAAI,GAAG,EAAE,SAASR,CAAa,EACxD,GAAI,CAACF,EAASE,EAAeU,CAAQ,EAAG,OAExC,IAAMC,EAAaV,EAAWF,EAAUW,CAAQ,EAAE,EAAGH,CAAc,EAEnEC,EAAI,IAAI,mBAAoBE,CAAQ,EACpCF,EAAI,KAAOA,EAAI,gBAAgBf,EAASe,EAAI,KAAK,KAAKG,CAAU,EAAIA,EAAW,IAAIH,EAAI,IAAI,CAC7F,CACF","names":["Stream","accepts","createBrotliCompress","createDeflate","createGzip","includes","encodings","encodingNames","flushEvery","stream","bytesThreshold","bytesSinceFlush","data","compress","flushThreshold","ctx","next","encoding","compressed"]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@latticexyz/store-indexer",
3
- "version": "2.2.12-pending-logs-72bb2264a91a3266f5f15bbd754d32a1e2a62ccb",
3
+ "version": "2.2.12",
4
4
  "description": "Minimal Typescript indexer for Store",
5
5
  "repository": {
6
6
  "type": "git",
@@ -15,17 +15,18 @@
15
15
  "typesVersions": {
16
16
  "*": {
17
17
  "index": [
18
- "./dist/src/index.d.ts"
18
+ "./dist/index.d.ts"
19
19
  ]
20
20
  }
21
21
  },
22
22
  "bin": {
23
- "postgres-decoded-indexer": "./dist/bin/postgres-decoded-indexer.js",
24
- "postgres-frontend": "./dist/bin/postgres-frontend.js",
25
- "postgres-indexer": "./dist/bin/postgres-indexer.js",
26
- "sqlite-indexer": "./dist/bin/sqlite-indexer.js"
23
+ "postgres-decoded-indexer": "./bin/postgres-decoded-indexer.js",
24
+ "postgres-frontend": "./bin/postgres-frontend.js",
25
+ "postgres-indexer": "./bin/postgres-indexer.js",
26
+ "sqlite-indexer": "./bin/sqlite-indexer.js"
27
27
  },
28
28
  "files": [
29
+ "bin",
29
30
  "dist"
30
31
  ],
31
32
  "dependencies": {
@@ -50,11 +51,11 @@
50
51
  "trpc-koa-adapter": "^1.1.3",
51
52
  "viem": "2.21.19",
52
53
  "zod": "3.23.8",
53
- "@latticexyz/block-logs-stream": "2.2.12-pending-logs-72bb2264a91a3266f5f15bbd754d32a1e2a62ccb",
54
- "@latticexyz/common": "2.2.12-pending-logs-72bb2264a91a3266f5f15bbd754d32a1e2a62ccb",
55
- "@latticexyz/protocol-parser": "2.2.12-pending-logs-72bb2264a91a3266f5f15bbd754d32a1e2a62ccb",
56
- "@latticexyz/store": "2.2.12-pending-logs-72bb2264a91a3266f5f15bbd754d32a1e2a62ccb",
57
- "@latticexyz/store-sync": "2.2.12-pending-logs-72bb2264a91a3266f5f15bbd754d32a1e2a62ccb"
54
+ "@latticexyz/block-logs-stream": "2.2.12",
55
+ "@latticexyz/common": "2.2.12",
56
+ "@latticexyz/protocol-parser": "2.2.12",
57
+ "@latticexyz/store": "2.2.12",
58
+ "@latticexyz/store-sync": "2.2.12"
58
59
  },
59
60
  "devDependencies": {
60
61
  "@types/accepts": "^1.3.7",
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../bin/parseEnv.ts"],"sourcesContent":["import { Hex, isHex } from \"viem\";\nimport { z, ZodError, ZodTypeAny } from \"zod\";\n\nexport const frontendEnvSchema = z.object({\n HOST: z.string().default(\"0.0.0.0\"),\n PORT: z.coerce.number().positive().default(3001),\n});\n\nfunction isHexOrUndefined(input: unknown): input is Hex | undefined {\n return input === undefined || isHex(input);\n}\n\nexport const indexerEnvSchema = z.intersection(\n z.object({\n FOLLOW_BLOCK_TAG: z.enum([\"latest\", \"safe\", \"finalized\"]).default(\"safe\"),\n START_BLOCK: z.coerce.bigint().nonnegative().default(0n),\n MAX_BLOCK_RANGE: z.coerce.bigint().positive().default(1000n),\n POLLING_INTERVAL: z.coerce.number().positive().default(1000),\n STORE_ADDRESS: z\n .string()\n .optional()\n .transform((input) => (input === \"\" ? undefined : input))\n .refine(isHexOrUndefined),\n }),\n z.union([\n z.object({\n RPC_HTTP_URL: z.string(),\n RPC_WS_URL: z.string().optional(),\n }),\n z.object({\n RPC_HTTP_URL: z.string().optional(),\n RPC_WS_URL: z.string(),\n }),\n ]),\n);\n\nexport function parseEnv<TSchema extends ZodTypeAny>(envSchema: TSchema): z.infer<TSchema> {\n try {\n return envSchema.parse(process.env);\n } catch (error) {\n if (error instanceof ZodError) {\n const { ...invalidEnvVars } = error.format();\n console.error(`\\nMissing or invalid environment variables:\\n\\n ${Object.keys(invalidEnvVars).join(\"\\n \")}\\n`);\n process.exit(1);\n }\n throw error;\n }\n}\n"],"mappings":"AAAA,OAAc,SAAAA,MAAa,OAC3B,OAAS,KAAAC,EAAG,YAAAC,MAA4B,MAEjC,IAAMC,EAAoBF,EAAE,OAAO,CACxC,KAAMA,EAAE,OAAO,EAAE,QAAQ,SAAS,EAClC,KAAMA,EAAE,OAAO,OAAO,EAAE,SAAS,EAAE,QAAQ,IAAI,CACjD,CAAC,EAED,SAASG,EAAiBC,EAA0C,CAClE,OAAOA,IAAU,QAAaL,EAAMK,CAAK,CAC3C,CAEO,IAAMC,EAAmBL,EAAE,aAChCA,EAAE,OAAO,CACP,iBAAkBA,EAAE,KAAK,CAAC,SAAU,OAAQ,WAAW,CAAC,EAAE,QAAQ,MAAM,EACxE,YAAaA,EAAE,OAAO,OAAO,EAAE,YAAY,EAAE,QAAQ,EAAE,EACvD,gBAAiBA,EAAE,OAAO,OAAO,EAAE,SAAS,EAAE,QAAQ,KAAK,EAC3D,iBAAkBA,EAAE,OAAO,OAAO,EAAE,SAAS,EAAE,QAAQ,GAAI,EAC3D,cAAeA,EACZ,OAAO,EACP,SAAS,EACT,UAAWI,GAAWA,IAAU,GAAK,OAAYA,CAAM,EACvD,OAAOD,CAAgB,CAC5B,CAAC,EACDH,EAAE,MAAM,CACNA,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EACvB,WAAYA,EAAE,OAAO,EAAE,SAAS,CAClC,CAAC,EACDA,EAAE,OAAO,CACP,aAAcA,EAAE,OAAO,EAAE,SAAS,EAClC,WAAYA,EAAE,OAAO,CACvB,CAAC,CACH,CAAC,CACH,EAEO,SAASM,EAAqCC,EAAsC,CACzF,GAAI,CACF,OAAOA,EAAU,MAAM,QAAQ,GAAG,CACpC,OAASC,EAAP,CACA,GAAIA,aAAiBP,EAAU,CAC7B,GAAM,CAAE,GAAGQ,CAAe,EAAID,EAAM,OAAO,EAC3C,QAAQ,MAAM;AAAA;AAAA;AAAA,IAAoD,OAAO,KAAKC,CAAc,EAAE,KAAK;AAAA,GAAM;AAAA,CAAK,EAC9G,QAAQ,KAAK,CAAC,EAEhB,MAAMD,CACR,CACF","names":["isHex","z","ZodError","frontendEnvSchema","isHexOrUndefined","input","indexerEnvSchema","parseEnv","envSchema","error","invalidEnvVars"]}
@@ -1,2 +0,0 @@
1
- import*as e from"@sentry/node";import{ProfilingIntegration as c}from"@sentry/profiling-node";import{stripUrlQueryAndFragment as d}from"@sentry/utils";import p from"debug";import m from"koa-compose";function y(){return async function(t,a){try{await a()}catch(r){throw e.withScope(o=>{o.addEventProcessor(n=>e.addRequestDataToEvent(n,t.request)),e.captureException(r)}),r}}}function f(){return async function(t,a){await e.runWithAsyncContext(async()=>{e.getCurrentHub().configureScope(o=>o.addEventProcessor(n=>e.addRequestDataToEvent(n,t.request,{include:{user:!1}}))),await a()})}}function l(){return async function(t,a){let r=(t.method||"").toUpperCase(),o=t.url&&d(t.url),n;t.request.get("sentry-trace")&&(n=e.extractTraceparentData(t.request.get("sentry-trace")));let i=e.startTransaction({name:`${r} ${o}`,op:"http.server",...n});t.__sentry_transaction=i,e.getCurrentHub().configureScope(u=>{u.setSpan(i)}),t.res.on("finish",()=>{setImmediate(()=>{if(t._matchedRoute){let u=t.mountPath||"";i.setName(`${r} ${u}${t._matchedRoute}`)}i.setHttpStatus(t.status),i.finish()})}),await a()}}function q(s){return p("Initializing Sentry"),e.init({dsn:s,integrations:[...e.autoDiscoverNodePerformanceMonitoringIntegrations(),new c],tracesSampleRate:1,profilesSampleRate:1}),m([y(),f(),l()])}export{q as a};
2
- //# sourceMappingURL=chunk-VCBWGHIO.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/koa-middleware/sentry.ts"],"sourcesContent":["import * as Sentry from \"@sentry/node\";\nimport { ProfilingIntegration } from \"@sentry/profiling-node\";\nimport { stripUrlQueryAndFragment } from \"@sentry/utils\";\nimport debug from \"debug\";\nimport Koa from \"koa\";\nimport compose from \"koa-compose\";\n\nexport function errorHandler(): Koa.Middleware {\n return async function errorHandlerMiddleware(ctx, next) {\n try {\n await next();\n } catch (err) {\n Sentry.withScope((scope) => {\n scope.addEventProcessor((event) => {\n return Sentry.addRequestDataToEvent(event, ctx.request);\n });\n Sentry.captureException(err);\n });\n throw err;\n }\n };\n}\n\nexport function requestHandler(): Koa.Middleware {\n return async function requestHandlerMiddleware(ctx, next) {\n await Sentry.runWithAsyncContext(async () => {\n const hub = Sentry.getCurrentHub();\n hub.configureScope((scope) =>\n scope.addEventProcessor((event) =>\n Sentry.addRequestDataToEvent(event, ctx.request, {\n include: {\n user: false,\n },\n }),\n ),\n );\n await next();\n });\n };\n}\n\nexport function tracing(): Koa.Middleware {\n // creates a Sentry transaction per request\n return async function tracingMiddleware(ctx, next) {\n const reqMethod = (ctx.method || \"\").toUpperCase();\n const reqUrl = ctx.url && stripUrlQueryAndFragment(ctx.url);\n\n // Connect to trace of upstream app\n let traceparentData;\n if (ctx.request.get(\"sentry-trace\")) {\n traceparentData = Sentry.extractTraceparentData(ctx.request.get(\"sentry-trace\"));\n }\n\n const transaction = Sentry.startTransaction({\n name: `${reqMethod} ${reqUrl}`,\n op: \"http.server\",\n ...traceparentData,\n });\n\n ctx.__sentry_transaction = transaction;\n\n // We put the transaction on the scope so users can attach children to it\n Sentry.getCurrentHub().configureScope((scope) => {\n scope.setSpan(transaction);\n });\n\n ctx.res.on(\"finish\", () => {\n // Push `transaction.finish` to the next event loop so open spans have a chance to finish before the transaction closes\n setImmediate(() => {\n // If you're using koa router, set the matched route as transaction name\n if (ctx._matchedRoute) {\n const mountPath = ctx.mountPath || \"\";\n transaction.setName(`${reqMethod} ${mountPath}${ctx._matchedRoute}`);\n }\n\n transaction.setHttpStatus(ctx.status);\n transaction.finish();\n });\n });\n\n await next();\n };\n}\n\nexport function sentry(dsn: string): Koa.Middleware {\n debug(\"Initializing Sentry\");\n Sentry.init({\n dsn,\n integrations: [\n // Automatically instrument Node.js libraries and frameworks\n ...Sentry.autoDiscoverNodePerformanceMonitoringIntegrations(),\n new ProfilingIntegration(),\n ],\n // Performance Monitoring\n tracesSampleRate: 1.0,\n // Set sampling rate for profiling - this is relative to tracesSampleRate\n profilesSampleRate: 1.0,\n });\n\n return compose([errorHandler(), requestHandler(), tracing()]);\n}\n"],"mappings":"AAAA,UAAYA,MAAY,eACxB,OAAS,wBAAAC,MAA4B,yBACrC,OAAS,4BAAAC,MAAgC,gBACzC,OAAOC,MAAW,QAElB,OAAOC,MAAa,cAEb,SAASC,GAA+B,CAC7C,OAAO,eAAsCC,EAAKC,EAAM,CACtD,GAAI,CACF,MAAMA,EAAK,CACb,OAASC,EAAP,CACA,MAAO,YAAWC,GAAU,CAC1BA,EAAM,kBAAmBC,GACT,wBAAsBA,EAAOJ,EAAI,OAAO,CACvD,EACM,mBAAiBE,CAAG,CAC7B,CAAC,EACKA,CACR,CACF,CACF,CAEO,SAASG,GAAiC,CAC/C,OAAO,eAAwCL,EAAKC,EAAM,CACxD,MAAa,sBAAoB,SAAY,CACxB,gBAAc,EAC7B,eAAgBE,GAClBA,EAAM,kBAAmBC,GAChB,wBAAsBA,EAAOJ,EAAI,QAAS,CAC/C,QAAS,CACP,KAAM,EACR,CACF,CAAC,CACH,CACF,EACA,MAAMC,EAAK,CACb,CAAC,CACH,CACF,CAEO,SAASK,GAA0B,CAExC,OAAO,eAAiCN,EAAKC,EAAM,CACjD,IAAMM,GAAaP,EAAI,QAAU,IAAI,YAAY,EAC3CQ,EAASR,EAAI,KAAOJ,EAAyBI,EAAI,GAAG,EAGtDS,EACAT,EAAI,QAAQ,IAAI,cAAc,IAChCS,EAAyB,yBAAuBT,EAAI,QAAQ,IAAI,cAAc,CAAC,GAGjF,IAAMU,EAAqB,mBAAiB,CAC1C,KAAM,GAAGH,KAAaC,IACtB,GAAI,cACJ,GAAGC,CACL,CAAC,EAEDT,EAAI,qBAAuBU,EAGpB,gBAAc,EAAE,eAAgBP,GAAU,CAC/CA,EAAM,QAAQO,CAAW,CAC3B,CAAC,EAEDV,EAAI,IAAI,GAAG,SAAU,IAAM,CAEzB,aAAa,IAAM,CAEjB,GAAIA,EAAI,cAAe,CACrB,IAAMW,EAAYX,EAAI,WAAa,GACnCU,EAAY,QAAQ,GAAGH,KAAaI,IAAYX,EAAI,eAAe,EAGrEU,EAAY,cAAcV,EAAI,MAAM,EACpCU,EAAY,OAAO,CACrB,CAAC,CACH,CAAC,EAED,MAAMT,EAAK,CACb,CACF,CAEO,SAASW,EAAOC,EAA6B,CAClD,OAAAhB,EAAM,qBAAqB,EACpB,OAAK,CACV,IAAAgB,EACA,aAAc,CAEZ,GAAU,oDAAkD,EAC5D,IAAIlB,CACN,EAEA,iBAAkB,EAElB,mBAAoB,CACtB,CAAC,EAEMG,EAAQ,CAACC,EAAa,EAAGM,EAAe,EAAGC,EAAQ,CAAC,CAAC,CAC9D","names":["Sentry","ProfilingIntegration","stripUrlQueryAndFragment","debug","compose","errorHandler","ctx","next","err","scope","event","requestHandler","tracing","reqMethod","reqUrl","traceparentData","transaction","mountPath","sentry","dsn"]}
@@ -1,2 +0,0 @@
1
- import i from"debug";var a=i("mud:store-indexer"),m=i("mud:store-indexer");a.log=console.debug.bind(console);m.log=console.error.bind(console);import{Stream as p}from"node:stream";import l from"accepts";import{createBrotliCompress as f,createDeflate as b,createGzip as u}from"node:zlib";import{includes as g}from"@latticexyz/common/utils";var c={br:f,gzip:u,deflate:b},d=Object.keys(c);function y(o,s){let e=0;return o.on("data",r=>{e+=r.length,e>s&&(e=0,o.flush())}),o}function z({flushThreshold:o=1024*4}={}){return async function(e,r){e.vary("Accept-Encoding"),await r();let n=l(e.req).encoding(d);if(!g(d,n))return;let t=y(c[n](),o);e.set("Content-Encoding",n),e.body=e.body instanceof p?e.body.pipe(t):t.end(e.body)}}export{a,m as b,z as c};
2
- //# sourceMappingURL=chunk-ZS3IQEZ4.js.map
@@ -1 +0,0 @@
1
- {"version":3,"sources":["../src/debug.ts","../src/koa-middleware/compress.ts"],"sourcesContent":["import createDebug from \"debug\";\n\nexport const debug = createDebug(\"mud:store-indexer\");\nexport const error = createDebug(\"mud:store-indexer\");\n\n// Pipe debug output to stdout instead of stderr\ndebug.log = console.debug.bind(console);\n\n// Pipe error output to stderr\nerror.log = console.error.bind(console);\n","import { Middleware } from \"koa\";\nimport { Readable, Stream } from \"node:stream\";\nimport accepts from \"accepts\";\nimport { Zlib, createBrotliCompress, createDeflate, createGzip } from \"node:zlib\";\nimport { includes } from \"@latticexyz/common/utils\";\n\n// Loosely based on https://github.com/holic/koa-compress/blob/master/lib/index.js\n// with better handling of streams better with occasional flushing\n\nconst encodings = {\n br: createBrotliCompress,\n gzip: createGzip,\n deflate: createDeflate,\n} as const;\n\nconst encodingNames = Object.keys(encodings) as (keyof typeof encodings)[];\n\nfunction flushEvery<stream extends Zlib & Readable>(stream: stream, bytesThreshold: number): stream {\n let bytesSinceFlush = 0;\n stream.on(\"data\", (data) => {\n bytesSinceFlush += data.length;\n if (bytesSinceFlush > bytesThreshold) {\n bytesSinceFlush = 0;\n stream.flush();\n }\n });\n return stream;\n}\n\ntype CompressOptions = {\n flushThreshold?: number;\n};\n\nexport function compress({ flushThreshold = 1024 * 4 }: CompressOptions = {}): Middleware {\n return async function compressMiddleware(ctx, next) {\n ctx.vary(\"Accept-Encoding\");\n\n await next();\n\n const encoding = accepts(ctx.req).encoding(encodingNames);\n if (!includes(encodingNames, encoding)) return;\n\n const compressed = flushEvery(encodings[encoding](), flushThreshold);\n\n ctx.set(\"Content-Encoding\", encoding);\n ctx.body = ctx.body instanceof Stream ? ctx.body.pipe(compressed) : compressed.end(ctx.body);\n };\n}\n"],"mappings":"AAAA,OAAOA,MAAiB,QAEjB,IAAMC,EAAQD,EAAY,mBAAmB,EACvCE,EAAQF,EAAY,mBAAmB,EAGpDC,EAAM,IAAM,QAAQ,MAAM,KAAK,OAAO,EAGtCC,EAAM,IAAM,QAAQ,MAAM,KAAK,OAAO,ECRtC,OAAmB,UAAAC,MAAc,cACjC,OAAOC,MAAa,UACpB,OAAe,wBAAAC,EAAsB,iBAAAC,EAAe,cAAAC,MAAkB,YACtE,OAAS,YAAAC,MAAgB,2BAKzB,IAAMC,EAAY,CAChB,GAAIJ,EACJ,KAAME,EACN,QAASD,CACX,EAEMI,EAAgB,OAAO,KAAKD,CAAS,EAE3C,SAASE,EAA2CC,EAAgBC,EAAgC,CAClG,IAAIC,EAAkB,EACtB,OAAAF,EAAO,GAAG,OAASG,GAAS,CAC1BD,GAAmBC,EAAK,OACpBD,EAAkBD,IACpBC,EAAkB,EAClBF,EAAO,MAAM,EAEjB,CAAC,EACMA,CACT,CAMO,SAASI,EAAS,CAAE,eAAAC,EAAiB,KAAO,CAAE,EAAqB,CAAC,EAAe,CACxF,OAAO,eAAkCC,EAAKC,EAAM,CAClDD,EAAI,KAAK,iBAAiB,EAE1B,MAAMC,EAAK,EAEX,IAAMC,EAAWhB,EAAQc,EAAI,GAAG,EAAE,SAASR,CAAa,EACxD,GAAI,CAACF,EAASE,EAAeU,CAAQ,EAAG,OAExC,IAAMC,EAAaV,EAAWF,EAAUW,CAAQ,EAAE,EAAGH,CAAc,EAEnEC,EAAI,IAAI,mBAAoBE,CAAQ,EACpCF,EAAI,KAAOA,EAAI,gBAAgBf,EAASe,EAAI,KAAK,KAAKG,CAAU,EAAIA,EAAW,IAAIH,EAAI,IAAI,CAC7F,CACF","names":["createDebug","debug","error","Stream","accepts","createBrotliCompress","createDeflate","createGzip","includes","encodings","encodingNames","flushEvery","stream","bytesThreshold","bytesSinceFlush","data","compress","flushThreshold","ctx","next","encoding","compressed"]}
File without changes
File without changes
File without changes