@sentio/runtime 2.62.0-rc.8 → 2.62.0-rc.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,7 +10,7 @@ import {
10
10
  require_cjs,
11
11
  require_lib3 as require_lib,
12
12
  require_lib4 as require_lib2
13
- } from "./chunk-YBKSM3GO.js";
13
+ } from "./chunk-AHIIUVQL.js";
14
14
  import "./chunk-I5YHR3CE.js";
15
15
  import "./chunk-W3VN25ER.js";
16
16
  import {
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import { DeepPartial, Empty, ProcessStreamRequest, ProcessStreamResponse, StartRequest } from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { ProcessorServiceImpl } from './service.js'\nimport { MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { Subject } from 'rxjs'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n .on('exit', () => {\n console.info('Worker thread exiting, threadId:', threadId)\n })\n\nlet service: ProcessorServiceImpl | undefined\n\nconst loader = async (options: any) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst emptyCallContext = <CallContext>{}\n\nasync function start(request: StartRequest, options: any): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n service = new ProcessorServiceImpl(() => loader(options), options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await service.start(request, emptyCallContext)\n started = true\n return {}\n}\n\nexport default async function ({\n processId,\n request: firstRequest,\n workerPort\n}: {\n processId: number\n request: ProcessStreamRequest\n workerPort: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options.logFormat === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker', threadId, ' started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await service?.getConfig(configRequest, emptyCallContext)\n console.debug('worker', threadId, ' configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n console.error('Unhandled exception/rejection in previous request:', err)\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n const timeout = (options.workerTimeout || 0) * 1000 // convert to milliseconds\n const enablePartition = options.enablePartition || false\n await new Promise<void>((resolve, reject) => {\n const subject = new Subject<DeepPartial<ProcessStreamResponse>>()\n let timeoutId: NodeJS.Timeout | undefined = undefined\n subject.subscribe((resp: ProcessStreamResponse) => {\n console.debug('Worker', threadId, 'send response:', resp.result ? 'result' : 'dbResult')\n workerPort.postMessage(resp)\n // receive the response from the processor , close and resolve the promise\n if (resp.result) {\n if (timeoutId) clearTimeout(timeoutId)\n resolve()\n workerPort.close()\n }\n })\n workerPort.on('message', (msg: ProcessStreamRequest) => {\n const request = msg as ProcessStreamRequest\n console.debug('Worker', threadId, 'received request:', request.start ? 'start' : 'dbResult')\n service?.handleRequest(request, firstRequest.binding, subject)\n if (enablePartition && request.start && timeout > 0) {\n timeoutId = setTimeout(async () => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n console.debug('Worker', threadId, 'handle request: binding')\n service?.handleRequest(firstRequest, firstRequest.binding, subject)\n if (!enablePartition && timeout > 0) {\n timeoutId = setTimeout(() => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;;;AACA,uBAAiD;AAGjD,qCAA2C;AAE3C,SAAsB,gBAAgB;AACtC,SAAS,eAAe;AAGxB,kBAAwB;AAExB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC,EACA,GAAG,QAAQ,MAAM;AAChB,UAAQ,KAAK,oCAAoC,QAAQ;AAC3D,CAAC;AAEH,IAAI;AAEJ,IAAM,SAAS,OAAO,YAAiB;AACrC,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAgC,CAAC;AAEvC,eAAe,MAAM,SAAuB,SAA8B;AACxE,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,cAAU,IAAI,qBAAqB,MAAM,OAAO,OAAO,GAAG,OAAO;AAAA,EACnE,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,QAAQ,MAAM,SAAS,gBAAgB;AAC7C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA,SAAS;AAAA,EACT;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,cAAc,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAE/F,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,UAAU,UAAU,gCAAgC,aAAa,mBAAmB,MAAM;AAAA,IAC1G;AAEA,QAAI,eAAe;AACjB,YAAM,SAAS,UAAU,eAAe,gBAAgB;AACxD,cAAQ,MAAM,UAAU,UAAU,aAAa;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,YAAQ,MAAM,sDAAsD,GAAG;AACvE,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,QAAM,WAAW,QAAQ,iBAAiB,KAAK;AAC/C,QAAM,kBAAkB,QAAQ,mBAAmB;AACnD,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,UAAM,UAAU,IAAI,oBAA4C;AAChE,QAAI,YAAwC;AAC5C,YAAQ,UAAU,CAAC,SAAgC;AACjD,cAAQ,MAAM,UAAU,UAAU,kBAAkB,KAAK,SAAS,WAAW,UAAU;AACvF,iBAAW,YAAY,IAAI;AAE3B,UAAI,KAAK,QAAQ;AACf,YAAI,UAAW,cAAa,SAAS;AACrC,gBAAQ;AACR,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,eAAW,GAAG,WAAW,CAAC,QAA8B;AACtD,YAAM,UAAU;AAChB,cAAQ,MAAM,UAAU,UAAU,qBAAqB,QAAQ,QAAQ,UAAU,UAAU;AAC3F,eAAS,cAAc,SAAS,aAAa,SAAS,OAAO;AAC7D,UAAI,mBAAmB,QAAQ,SAAS,UAAU,GAAG;AACnD,oBAAY,WAAW,YAAY;AACjC,iBAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,QACjF,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AACD,YAAQ,MAAM,UAAU,UAAU,yBAAyB;AAC3D,aAAS,cAAc,cAAc,aAAa,SAAS,OAAO;AAClE,QAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,MACjF,GAAG,OAAO;AAAA,IACZ;AAAA,EACF,CAAC;AACH;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
1
+ {"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import { DeepPartial, Empty, ProcessStreamRequest, ProcessStreamResponse, StartRequest } from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { ProcessorServiceImpl } from './service.js'\nimport { MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { Subject } from 'rxjs'\nimport { ProcessorRuntimeOptions } from 'processor-runner-program.js'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n .on('exit', () => {\n console.info('Worker thread exiting, threadId:', threadId)\n })\n\nlet service: ProcessorServiceImpl | undefined\n\nconst loader = async (options: ProcessorRuntimeOptions) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst emptyCallContext = <CallContext>{}\n\nasync function start(request: StartRequest, options: ProcessorRuntimeOptions): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n service = new ProcessorServiceImpl(() => loader(options), options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await service.start(request, emptyCallContext)\n started = true\n return {}\n}\n\nexport default async function ({\n processId,\n request: firstRequest,\n workerPort\n}: {\n processId: number\n request: ProcessStreamRequest\n workerPort: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options.logFormat === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker', threadId, ' started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await service?.getConfig(configRequest, emptyCallContext)\n console.debug('worker', threadId, ' configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n console.error('Unhandled exception/rejection in previous request:', err)\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n const timeout = (options.workerTimeout || 0) * 1000 // convert to milliseconds\n const enablePartition = options.enablePartition || false\n await new Promise<void>((resolve, reject) => {\n const subject = new Subject<DeepPartial<ProcessStreamResponse>>()\n let timeoutId: NodeJS.Timeout | undefined = undefined\n subject.subscribe((resp: ProcessStreamResponse) => {\n console.debug('Worker', threadId, 'send response:', resp.result ? 'result' : 'dbResult')\n workerPort.postMessage(resp)\n // receive the response from the processor , close and resolve the promise\n if (resp.result) {\n if (timeoutId) clearTimeout(timeoutId)\n resolve()\n workerPort.close()\n }\n })\n workerPort.on('message', (msg: ProcessStreamRequest) => {\n const request = msg as ProcessStreamRequest\n console.debug('Worker', threadId, 'received request:', request.start ? 'start' : 'dbResult')\n service?.handleRequest(request, firstRequest.binding, subject)\n if (enablePartition && request.start && timeout > 0) {\n timeoutId = setTimeout(async () => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n console.debug('Worker', threadId, 'handle request: binding')\n service?.handleRequest(firstRequest, firstRequest.binding, subject)\n if (!enablePartition && timeout > 0) {\n timeoutId = setTimeout(() => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;;;AACA,uBAAiD;AAGjD,qCAA2C;AAE3C,SAAsB,gBAAgB;AACtC,SAAS,eAAe;AAGxB,kBAAwB;AAGxB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC,EACA,GAAG,QAAQ,MAAM;AAChB,UAAQ,KAAK,oCAAoC,QAAQ;AAC3D,CAAC;AAEH,IAAI;AAEJ,IAAM,SAAS,OAAO,YAAqC;AACzD,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAgC,CAAC;AAEvC,eAAe,MAAM,SAAuB,SAAkD;AAC5F,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,cAAU,IAAI,qBAAqB,MAAM,OAAO,OAAO,GAAG,OAAO;AAAA,EACnE,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,QAAQ,MAAM,SAAS,gBAAgB;AAC7C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA,SAAS;AAAA,EACT;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,cAAc,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAE/F,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,UAAU,UAAU,gCAAgC,aAAa,mBAAmB,MAAM;AAAA,IAC1G;AAEA,QAAI,eAAe;AACjB,YAAM,SAAS,UAAU,eAAe,gBAAgB;AACxD,cAAQ,MAAM,UAAU,UAAU,aAAa;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,YAAQ,MAAM,sDAAsD,GAAG;AACvE,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,QAAM,WAAW,QAAQ,iBAAiB,KAAK;AAC/C,QAAM,kBAAkB,QAAQ,mBAAmB;AACnD,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,UAAM,UAAU,IAAI,oBAA4C;AAChE,QAAI,YAAwC;AAC5C,YAAQ,UAAU,CAAC,SAAgC;AACjD,cAAQ,MAAM,UAAU,UAAU,kBAAkB,KAAK,SAAS,WAAW,UAAU;AACvF,iBAAW,YAAY,IAAI;AAE3B,UAAI,KAAK,QAAQ;AACf,YAAI,UAAW,cAAa,SAAS;AACrC,gBAAQ;AACR,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,eAAW,GAAG,WAAW,CAAC,QAA8B;AACtD,YAAM,UAAU;AAChB,cAAQ,MAAM,UAAU,UAAU,qBAAqB,QAAQ,QAAQ,UAAU,UAAU;AAC3F,eAAS,cAAc,SAAS,aAAa,SAAS,OAAO;AAC7D,UAAI,mBAAmB,QAAQ,SAAS,UAAU,GAAG;AACnD,oBAAY,WAAW,YAAY;AACjC,iBAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,QACjF,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AACD,YAAQ,MAAM,UAAU,UAAU,yBAAyB;AAC3D,aAAS,cAAc,cAAc,aAAa,SAAS,OAAO;AAClE,QAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,MACjF,GAAG,OAAO;AAAA,IACZ;AAAA,EACF,CAAC;AACH;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sentio/runtime",
3
- "version": "2.62.0-rc.8",
3
+ "version": "2.62.0-rc.9",
4
4
  "license": "Apache-2.0",
5
5
  "type": "module",
6
6
  "exports": {
@@ -18,6 +18,7 @@
18
18
  "piscina": "5.1.3"
19
19
  },
20
20
  "devDependencies": {
21
+ "@commander-js/extra-typings": "^14.0.0",
21
22
  "@types/fs-extra": "^11.0.4"
22
23
  },
23
24
  "engines": {
@@ -31,7 +32,7 @@
31
32
  "run": "tsx src/processor-runner.ts --log-format=json",
32
33
  "run-benchmark": "tsx src/decode-benchmark.ts",
33
34
  "start_js": "tsx ./lib/processor-runner.js $PWD/../../debug/dist/lib.js",
34
- "start_ts": "tsx src/processor-runner.ts --log-format=json $PWD/../../examples/x2y2/src/processor.ts",
35
+ "start_ts": "tsx src/processor-runner.ts --debug --chains-config chains-config.json $PWD/../../examples/x2y2/src/processor.ts",
35
36
  "test": "glob -c 'tsx --test' '**/*.test.ts'"
36
37
  }
37
38
  }
@@ -0,0 +1,57 @@
1
+ import { Command, InvalidArgumentError } from '@commander-js/extra-typings'
2
+
3
+ let workerNum = 1
4
+ try {
5
+ workerNum = parseInt(process.env['PROCESSOR_WORKER']?.trim() ?? '1')
6
+ } catch (e) {
7
+ console.error('Failed to parse worker number', e)
8
+ }
9
+
10
+ function myParseInt(value: string, dummyPrevious: number): number {
11
+ // parseInt takes a string and a radix
12
+ const parsedValue = parseInt(value, 10)
13
+ if (isNaN(parsedValue)) {
14
+ throw new InvalidArgumentError('Not a number.')
15
+ }
16
+ return parsedValue
17
+ }
18
+
19
+ export const program = new Command('processor-runner')
20
+ .allowUnknownOption()
21
+ .allowExcessArguments()
22
+ .name('processor-runner')
23
+ .description('Sentio Processor Runtime')
24
+ .argument('<target>', 'Path to the processor module to load')
25
+ .option('-p, --port <port>', 'Port to listen on', '4000')
26
+ .option('--concurrency <number>', 'Number of concurrent workers', myParseInt, 4)
27
+ .option('--batch-count <number>', 'Batch count for processing', myParseInt, 1)
28
+ .option('-c, --chains-config <path>', 'Path to chains configuration file', 'chains-config.json')
29
+ .option('--chainquery-server <url>', 'Chain query server URL')
30
+ .option('--pricefeed-server <url>', 'Price feed server URL')
31
+ .option('--log-format <format>', 'Log format (console|json)', 'console')
32
+ .option('--debug', 'Enable debug mode')
33
+ .option('--otlp-debug', 'Enable OTLP debug mode')
34
+ .option('--start-action-server', 'Start action server instead of processor server')
35
+ .option('--worker <number>', 'Number of worker threads', myParseInt, workerNum)
36
+ .option('--process-timeout <seconds>', 'Process timeout in seconds', myParseInt, 60)
37
+ .option(
38
+ '--worker-timeout <seconds>',
39
+ 'Worker timeout in seconds',
40
+ myParseInt,
41
+ parseInt(process.env['WORKER_TIMEOUT_SECONDS'] || '60')
42
+ )
43
+ .option(
44
+ '--enable-partition',
45
+ 'Enable binding data partition',
46
+ process.env['SENTIO_ENABLE_BINDING_DATA_PARTITION'] === 'true'
47
+ )
48
+
49
+ export type ProcessorRuntimeOptions = ReturnType<typeof program.opts> & { target: string }
50
+
51
+ export function getTestConfig(config?: Partial<ProcessorRuntimeOptions>): ProcessorRuntimeOptions {
52
+ return {
53
+ ...program.opts(),
54
+ target: './test-processor.test.js',
55
+ ...config
56
+ }
57
+ }
@@ -3,7 +3,7 @@
3
3
  import fs from 'fs-extra'
4
4
 
5
5
  import { compressionAlgorithms } from '@grpc/grpc-js'
6
- import { Command, InvalidArgumentError } from 'commander'
6
+
7
7
  import { createServer } from 'nice-grpc'
8
8
  import { errorDetailsServerMiddleware } from 'nice-grpc-error-details'
9
9
  // import { registry as niceGrpcRegistry } from 'nice-grpc-prometheus'
@@ -21,76 +21,21 @@ import { setupLogger } from './logger.js'
21
21
  import { setupOTLP } from './otlp.js'
22
22
  import { ActionServer } from './action-server.js'
23
23
  import { ServiceManager } from './service-manager.js'
24
- import path from 'path'
25
24
  import { ProcessorV3Definition } from '@sentio/protos'
26
25
  import { ProcessorServiceImplV3 } from './service-v3.js'
27
- import { readFileSync } from 'fs'
28
- import { fileURLToPath } from 'url'
29
26
  import { dirname, join } from 'path'
27
+ import { program, ProcessorRuntimeOptions } from 'processor-runner-program.js'
30
28
 
31
- const __filename = fileURLToPath(import.meta.url)
32
- const __dirname = dirname(__filename)
33
- const packageJson = JSON.parse(readFileSync(join(__dirname, '../package.json'), 'utf8'))
34
-
35
- // const mergedRegistry = Registry.merge([globalRegistry, niceGrpcRegistry])
29
+ program.parse()
36
30
 
37
- let workerNum = 1
38
- try {
39
- workerNum = parseInt(process.env['PROCESSOR_WORKER']?.trim() ?? '1')
40
- } catch (e) {
41
- console.error('Failed to parse worker number', e)
42
- }
43
-
44
- function myParseInt(value: string, dummyPrevious: unknown): number {
45
- // parseInt takes a string and a radix
46
- const parsedValue = parseInt(value, 10)
47
- if (isNaN(parsedValue)) {
48
- throw new InvalidArgumentError('Not a number.')
49
- }
50
- return parsedValue
31
+ const options: ProcessorRuntimeOptions = {
32
+ ...program.opts(),
33
+ target: program.args[program.args.length - 1]
51
34
  }
52
35
 
53
- // Create Commander.js program
54
- const program = new Command()
55
-
56
- program
57
- .allowUnknownOption()
58
- // .allowExcessArguments()
59
- .name('processor-runner')
60
- .description('Sentio Processor Runtime')
61
- .version(packageJson.version)
62
- .argument('<target>', 'Path to the processor module to load')
63
- .option('-p, --port <port>', 'Port to listen on', '4000')
64
- .option('--concurrency <number>', 'Number of concurrent workers', myParseInt, 4)
65
- .option('--batch-count <number>', 'Batch count for processing', myParseInt, 1)
66
- .option('-c, --chains-config <path>', 'Path to chains configuration file', 'chains-config.json')
67
- .option('--chainquery-server <url>', 'Chain query server URL', '')
68
- .option('--pricefeed-server <url>', 'Price feed server URL', '')
69
- .option('--log-format <format>', 'Log format (console|json)', 'console')
70
- .option('--debug', 'Enable debug mode', false)
71
- .option('--otlp-debug', 'Enable OTLP debug mode', false)
72
- .option('--start-action-server', 'Start action server instead of processor server', false)
73
- .option('--worker <number>', 'Number of worker threads', myParseInt, workerNum)
74
- .option('--process-timeout <seconds>', 'Process timeout in seconds', myParseInt, 60)
75
- .option(
76
- '--worker-timeout <seconds>',
77
- 'Worker timeout in seconds',
78
- myParseInt,
79
- parseInt(process.env['WORKER_TIMEOUT_SECONDS'] || '60')
80
- )
81
- .option(
82
- '--enable-partition',
83
- 'Enable binding data partition',
84
- process.env['SENTIO_ENABLE_BINDING_DATA_PARTITION'] === 'true'
85
- )
86
- .parse()
87
-
88
- const options = program.opts()
89
- options.target = program.processedArgs[0]
90
-
91
36
  const logLevel = process.env['LOG_LEVEL']?.toLowerCase()
92
37
 
93
- setupLogger(options.logFormat === 'json', logLevel === 'debug' ? true : options.debug)
38
+ setupLogger(options.logFormat === 'json', logLevel === 'debug' ? true : options.debug!)
94
39
  console.debug('Starting with', options.target)
95
40
 
96
41
  await setupOTLP(options.otlpDebug)
@@ -230,7 +175,7 @@ if (process.env['OOM_DUMP_MEMORY_SIZE_GB']) {
230
175
  console.log('Current Memory Usage', mem)
231
176
  // if memory usage is greater this size, dump heap and exit
232
177
  if (mem.heapTotal > memorySize * 1024 * 1024 * 1024 && !dumping) {
233
- const file = path.join(dir, `${Date.now()}.heapsnapshot`)
178
+ const file = join(dir, `${Date.now()}.heapsnapshot`)
234
179
  dumping = true
235
180
  await dumpHeap(file)
236
181
  // force exit and keep pod running
@@ -243,11 +188,11 @@ if (process.env['OOM_DUMP_MEMORY_SIZE_GB']) {
243
188
  async function dumpHeap(file: string): Promise<void> {
244
189
  console.log('Heap dumping to', file)
245
190
  const session = new Session()
246
- fs.mkdirSync(path.dirname(file), { recursive: true })
191
+ fs.mkdirSync(dirname(file), { recursive: true })
247
192
  const fd = fs.openSync(file, 'w')
248
193
  try {
249
194
  session.connect()
250
- session.on('HeapProfiler.addHeapSnapshotChunk', (m) => {
195
+ session.on('HeapProfiler.addHeapSnapshotChunk', (m: any) => {
251
196
  fs.writeSync(fd, m.params.chunk)
252
197
  })
253
198
 
@@ -16,6 +16,7 @@ import { Subject } from 'rxjs'
16
16
  import { MessageChannel } from 'node:worker_threads'
17
17
  import { ProcessorServiceImpl } from './service.js'
18
18
  import { TemplateInstanceState } from './state.js'
19
+ import { ProcessorRuntimeOptions } from 'processor-runner-program.js'
19
20
  ;(BigInt.prototype as any).toJSON = function () {
20
21
  return this.toString()
21
22
  }
@@ -26,7 +27,7 @@ export class ServiceManager extends ProcessorServiceImpl {
26
27
 
27
28
  constructor(
28
29
  loader: () => Promise<any>,
29
- readonly options: any,
30
+ readonly options: ProcessorRuntimeOptions,
30
31
  shutdownHandler?: () => void
31
32
  ) {
32
33
  super(loader, options, shutdownHandler)
@@ -123,7 +124,7 @@ export class ServiceManager extends ProcessorServiceImpl {
123
124
 
124
125
  if (this.enablePartition) {
125
126
  const concurrent = parseInt(process.env['PROCESS_CONCURRENCY'] || '0')
126
- if (this.options.worker < concurrent) {
127
+ if (this.options.worker! < concurrent) {
127
128
  console.warn(
128
129
  `When partition is enabled, the worker count must >= 'PROCESS_CONCURRENCY', will set worker count to ${concurrent})`
129
130
  )
package/src/service-v3.ts CHANGED
@@ -26,6 +26,7 @@ import { recordRuntimeInfo } from './service.js'
26
26
  import { DataBindingContext } from './db-context.js'
27
27
  import { TemplateInstanceState } from './state.js'
28
28
  import { freezeGlobalConfig } from './global-config.js'
29
+ import { ProcessorRuntimeOptions } from 'processor-runner-program.js'
29
30
 
30
31
  const { process_binding_count, process_binding_time, process_binding_error } = processMetrics
31
32
 
@@ -35,11 +36,11 @@ export class ProcessorServiceImplV3 implements ProcessorV3ServiceImplementation
35
36
  private readonly shutdownHandler?: () => void
36
37
  private started = false
37
38
 
38
- constructor(loader: () => Promise<any>, options?: any, shutdownHandler?: () => void) {
39
+ constructor(loader: () => Promise<any>, options?: ProcessorRuntimeOptions, shutdownHandler?: () => void) {
39
40
  this.loader = loader
40
41
  this.shutdownHandler = shutdownHandler
41
42
 
42
- this.enablePartition = options?.['enable-partition'] == true
43
+ this.enablePartition = options?.enablePartition == true
43
44
  }
44
45
 
45
46
  async start(request: StartRequest, context: CallContext): Promise<Empty> {
@@ -9,6 +9,7 @@ import { Piscina } from 'piscina'
9
9
  import { configureEndpoints } from './endpoints.js'
10
10
  import { setupLogger } from './logger.js'
11
11
  import { Subject } from 'rxjs'
12
+ import { ProcessorRuntimeOptions } from 'processor-runner-program.js'
12
13
 
13
14
  let started = false
14
15
 
@@ -34,7 +35,7 @@ process
34
35
 
35
36
  let service: ProcessorServiceImpl | undefined
36
37
 
37
- const loader = async (options: any) => {
38
+ const loader = async (options: ProcessorRuntimeOptions) => {
38
39
  if (options.target) {
39
40
  const m = await import(options.target)
40
41
  console.debug('Module loaded, path:', options.target, 'module:', m)
@@ -44,7 +45,7 @@ const loader = async (options: any) => {
44
45
 
45
46
  const emptyCallContext = <CallContext>{}
46
47
 
47
- async function start(request: StartRequest, options: any): Promise<Empty> {
48
+ async function start(request: StartRequest, options: ProcessorRuntimeOptions): Promise<Empty> {
48
49
  if (started) {
49
50
  return {}
50
51
  }
package/src/service.ts CHANGED
@@ -36,6 +36,7 @@ import { Provider } from 'ethers'
36
36
  import { decodeMulticallResult, encodeMulticallData, getMulticallAddress, Multicall3Call } from './multicall.js'
37
37
 
38
38
  import { processMetrics } from './metrics.js'
39
+ import { ProcessorRuntimeOptions } from 'processor-runner-program.js'
39
40
 
40
41
  const { process_binding_count, process_binding_time, process_binding_error } = processMetrics
41
42
 
@@ -58,7 +59,7 @@ export class ProcessorServiceImpl implements ProcessorServiceImplementation {
58
59
  private preparedData: PreparedData | undefined
59
60
  readonly enablePartition: boolean
60
61
 
61
- constructor(loader: () => Promise<any>, options?: any, shutdownHandler?: () => void) {
62
+ constructor(loader: () => Promise<any>, options?: ProcessorRuntimeOptions, shutdownHandler?: () => void) {
62
63
  this.loader = loader
63
64
  this.shutdownHandler = shutdownHandler
64
65
 
@@ -66,7 +67,7 @@ export class ProcessorServiceImpl implements ProcessorServiceImplementation {
66
67
  ? process.env['ENABLE_PREPROCESS'].toLowerCase() == 'true'
67
68
  : false
68
69
 
69
- this.enablePartition = options?.['enable-partition'] == true
70
+ this.enablePartition = options?.enablePartition == true
70
71
  }
71
72
 
72
73
  async getConfig(request: ProcessConfigRequest, context: CallContext): Promise<ProcessConfigResponse> {