@sentio/runtime 2.62.0-rc.5 → 2.62.0-rc.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -10,7 +10,7 @@ import {
10
10
  require_cjs,
11
11
  require_lib3 as require_lib,
12
12
  require_lib4 as require_lib2
13
- } from "./chunk-RPV67F56.js";
13
+ } from "./chunk-YBKSM3GO.js";
14
14
  import "./chunk-I5YHR3CE.js";
15
15
  import "./chunk-W3VN25ER.js";
16
16
  import {
@@ -68,7 +68,7 @@ async function service_worker_default({
68
68
  const { startRequest, configRequest, options } = Piscina.workerData;
69
69
  if (!started) {
70
70
  const logLevel = process.env["LOG_LEVEL"]?.toUpperCase();
71
- setupLogger(options["log-format"] === "json", logLevel === "debug" ? true : options.debug, threadId);
71
+ setupLogger(options.logFormat === "json", logLevel === "debug" ? true : options.debug, threadId);
72
72
  configureEndpoints(options);
73
73
  if (startRequest) {
74
74
  await start(startRequest, options);
@@ -94,8 +94,8 @@ async function service_worker_default({
94
94
  ]
95
95
  );
96
96
  }
97
- const timeout = (options["worker-timeout"] || 0) * 1e3;
98
- const enablePartition = options["enable-partition"] || false;
97
+ const timeout = (options.workerTimeout || 0) * 1e3;
98
+ const enablePartition = options.enablePartition || false;
99
99
  await new Promise((resolve, reject) => {
100
100
  const subject = new import_rxjs.Subject();
101
101
  let timeoutId = void 0;
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import { DeepPartial, Empty, ProcessStreamRequest, ProcessStreamResponse, StartRequest } from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { ProcessorServiceImpl } from './service.js'\nimport { MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { Subject } from 'rxjs'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n .on('exit', () => {\n console.info('Worker thread exiting, threadId:', threadId)\n })\n\nlet service: ProcessorServiceImpl | undefined\n\nconst loader = async (options: any) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst emptyCallContext = <CallContext>{}\n\nasync function start(request: StartRequest, options: any): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n service = new ProcessorServiceImpl(() => loader(options), options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await service.start(request, emptyCallContext)\n started = true\n return {}\n}\n\nexport default async function ({\n processId,\n request: firstRequest,\n workerPort\n}: {\n processId: number\n request: ProcessStreamRequest\n workerPort: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker', threadId, ' started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await service?.getConfig(configRequest, emptyCallContext)\n console.debug('worker', threadId, ' configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n console.error('Unhandled exception/rejection in previous request:', err)\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n const timeout = (options['worker-timeout'] || 0) * 1000 // convert to milliseconds\n const enablePartition = options['enable-partition'] || false\n await new Promise<void>((resolve, reject) => {\n const subject = new Subject<DeepPartial<ProcessStreamResponse>>()\n let timeoutId: NodeJS.Timeout | undefined = undefined\n subject.subscribe((resp: ProcessStreamResponse) => {\n console.debug('Worker', threadId, 'send response:', resp.result ? 'result' : 'dbResult')\n workerPort.postMessage(resp)\n // receive the response from the processor , close and resolve the promise\n if (resp.result) {\n if (timeoutId) clearTimeout(timeoutId)\n resolve()\n workerPort.close()\n }\n })\n workerPort.on('message', (msg: ProcessStreamRequest) => {\n const request = msg as ProcessStreamRequest\n console.debug('Worker', threadId, 'received request:', request.start ? 'start' : 'dbResult')\n service?.handleRequest(request, firstRequest.binding, subject)\n if (enablePartition && request.start && timeout > 0) {\n timeoutId = setTimeout(async () => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n console.debug('Worker', threadId, 'handle request: binding')\n service?.handleRequest(firstRequest, firstRequest.binding, subject)\n if (!enablePartition && timeout > 0) {\n timeoutId = setTimeout(() => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;;;AACA,uBAAiD;AAGjD,qCAA2C;AAE3C,SAAsB,gBAAgB;AACtC,SAAS,eAAe;AAGxB,kBAAwB;AAExB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC,EACA,GAAG,QAAQ,MAAM;AAChB,UAAQ,KAAK,oCAAoC,QAAQ;AAC3D,CAAC;AAEH,IAAI;AAEJ,IAAM,SAAS,OAAO,YAAiB;AACrC,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAgC,CAAC;AAEvC,eAAe,MAAM,SAAuB,SAA8B;AACxE,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,cAAU,IAAI,qBAAqB,MAAM,OAAO,OAAO,GAAG,OAAO;AAAA,EACnE,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,QAAQ,MAAM,SAAS,gBAAgB;AAC7C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA,SAAS;AAAA,EACT;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,YAAY,MAAM,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAEnG,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,UAAU,UAAU,gCAAgC,aAAa,mBAAmB,MAAM;AAAA,IAC1G;AAEA,QAAI,eAAe;AACjB,YAAM,SAAS,UAAU,eAAe,gBAAgB;AACxD,cAAQ,MAAM,UAAU,UAAU,aAAa;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,YAAQ,MAAM,sDAAsD,GAAG;AACvE,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,QAAM,WAAW,QAAQ,gBAAgB,KAAK,KAAK;AACnD,QAAM,kBAAkB,QAAQ,kBAAkB,KAAK;AACvD,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,UAAM,UAAU,IAAI,oBAA4C;AAChE,QAAI,YAAwC;AAC5C,YAAQ,UAAU,CAAC,SAAgC;AACjD,cAAQ,MAAM,UAAU,UAAU,kBAAkB,KAAK,SAAS,WAAW,UAAU;AACvF,iBAAW,YAAY,IAAI;AAE3B,UAAI,KAAK,QAAQ;AACf,YAAI,UAAW,cAAa,SAAS;AACrC,gBAAQ;AACR,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,eAAW,GAAG,WAAW,CAAC,QAA8B;AACtD,YAAM,UAAU;AAChB,cAAQ,MAAM,UAAU,UAAU,qBAAqB,QAAQ,QAAQ,UAAU,UAAU;AAC3F,eAAS,cAAc,SAAS,aAAa,SAAS,OAAO;AAC7D,UAAI,mBAAmB,QAAQ,SAAS,UAAU,GAAG;AACnD,oBAAY,WAAW,YAAY;AACjC,iBAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,QACjF,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AACD,YAAQ,MAAM,UAAU,UAAU,yBAAyB;AAC3D,aAAS,cAAc,cAAc,aAAa,SAAS,OAAO;AAClE,QAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,MACjF,GAAG,OAAO;AAAA,IACZ;AAAA,EACF,CAAC;AACH;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
1
+ {"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import { DeepPartial, Empty, ProcessStreamRequest, ProcessStreamResponse, StartRequest } from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { ProcessorServiceImpl } from './service.js'\nimport { MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { Subject } from 'rxjs'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n .on('exit', () => {\n console.info('Worker thread exiting, threadId:', threadId)\n })\n\nlet service: ProcessorServiceImpl | undefined\n\nconst loader = async (options: any) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst emptyCallContext = <CallContext>{}\n\nasync function start(request: StartRequest, options: any): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n service = new ProcessorServiceImpl(() => loader(options), options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await service.start(request, emptyCallContext)\n started = true\n return {}\n}\n\nexport default async function ({\n processId,\n request: firstRequest,\n workerPort\n}: {\n processId: number\n request: ProcessStreamRequest\n workerPort: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options.logFormat === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker', threadId, ' started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await service?.getConfig(configRequest, emptyCallContext)\n console.debug('worker', threadId, ' configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n console.error('Unhandled exception/rejection in previous request:', err)\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n const timeout = (options.workerTimeout || 0) * 1000 // convert to milliseconds\n const enablePartition = options.enablePartition || false\n await new Promise<void>((resolve, reject) => {\n const subject = new Subject<DeepPartial<ProcessStreamResponse>>()\n let timeoutId: NodeJS.Timeout | undefined = undefined\n subject.subscribe((resp: ProcessStreamResponse) => {\n console.debug('Worker', threadId, 'send response:', resp.result ? 'result' : 'dbResult')\n workerPort.postMessage(resp)\n // receive the response from the processor , close and resolve the promise\n if (resp.result) {\n if (timeoutId) clearTimeout(timeoutId)\n resolve()\n workerPort.close()\n }\n })\n workerPort.on('message', (msg: ProcessStreamRequest) => {\n const request = msg as ProcessStreamRequest\n console.debug('Worker', threadId, 'received request:', request.start ? 'start' : 'dbResult')\n service?.handleRequest(request, firstRequest.binding, subject)\n if (enablePartition && request.start && timeout > 0) {\n timeoutId = setTimeout(async () => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n console.debug('Worker', threadId, 'handle request: binding')\n service?.handleRequest(firstRequest, firstRequest.binding, subject)\n if (!enablePartition && timeout > 0) {\n timeoutId = setTimeout(() => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;;;AACA,uBAAiD;AAGjD,qCAA2C;AAE3C,SAAsB,gBAAgB;AACtC,SAAS,eAAe;AAGxB,kBAAwB;AAExB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC,EACA,GAAG,QAAQ,MAAM;AAChB,UAAQ,KAAK,oCAAoC,QAAQ;AAC3D,CAAC;AAEH,IAAI;AAEJ,IAAM,SAAS,OAAO,YAAiB;AACrC,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAgC,CAAC;AAEvC,eAAe,MAAM,SAAuB,SAA8B;AACxE,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,cAAU,IAAI,qBAAqB,MAAM,OAAO,OAAO,GAAG,OAAO;AAAA,EACnE,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,QAAQ,MAAM,SAAS,gBAAgB;AAC7C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA,SAAS;AAAA,EACT;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,cAAc,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAE/F,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,UAAU,UAAU,gCAAgC,aAAa,mBAAmB,MAAM;AAAA,IAC1G;AAEA,QAAI,eAAe;AACjB,YAAM,SAAS,UAAU,eAAe,gBAAgB;AACxD,cAAQ,MAAM,UAAU,UAAU,aAAa;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,YAAQ,MAAM,sDAAsD,GAAG;AACvE,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,QAAM,WAAW,QAAQ,iBAAiB,KAAK;AAC/C,QAAM,kBAAkB,QAAQ,mBAAmB;AACnD,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,UAAM,UAAU,IAAI,oBAA4C;AAChE,QAAI,YAAwC;AAC5C,YAAQ,UAAU,CAAC,SAAgC;AACjD,cAAQ,MAAM,UAAU,UAAU,kBAAkB,KAAK,SAAS,WAAW,UAAU;AACvF,iBAAW,YAAY,IAAI;AAE3B,UAAI,KAAK,QAAQ;AACf,YAAI,UAAW,cAAa,SAAS;AACrC,gBAAQ;AACR,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,eAAW,GAAG,WAAW,CAAC,QAA8B;AACtD,YAAM,UAAU;AAChB,cAAQ,MAAM,UAAU,UAAU,qBAAqB,QAAQ,QAAQ,UAAU,UAAU;AAC3F,eAAS,cAAc,SAAS,aAAa,SAAS,OAAO;AAC7D,UAAI,mBAAmB,QAAQ,SAAS,UAAU,GAAG;AACnD,oBAAY,WAAW,YAAY;AACjC,iBAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,QACjF,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AACD,YAAQ,MAAM,UAAU,UAAU,yBAAyB;AAC3D,aAAS,cAAc,cAAc,aAAa,SAAS,OAAO;AAClE,QAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,MACjF,GAAG,OAAO;AAAA,IACZ;AAAA,EACF,CAAC;AACH;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sentio/runtime",
3
- "version": "2.62.0-rc.5",
3
+ "version": "2.62.0-rc.7",
4
4
  "license": "Apache-2.0",
5
5
  "type": "module",
6
6
  "exports": {
package/src/endpoints.ts CHANGED
@@ -15,7 +15,7 @@ export class Endpoints {
15
15
  }
16
16
 
17
17
  export function configureEndpoints(options: any) {
18
- const fullPath = path.resolve(options['chains-config'])
18
+ const fullPath = path.resolve(options.chainsConfig)
19
19
  const chainsConfig = fs.readJsonSync(fullPath)
20
20
 
21
21
  const concurrencyOverride = process.env['OVERRIDE_CONCURRENCY']
@@ -26,9 +26,9 @@ export function configureEndpoints(options: any) {
26
26
  : undefined
27
27
 
28
28
  Endpoints.INSTANCE.concurrency = concurrencyOverride ?? options.concurrency
29
- Endpoints.INSTANCE.batchCount = batchCountOverride ?? options['batch-count']
30
- Endpoints.INSTANCE.chainQueryAPI = options['chainquery-server']
31
- Endpoints.INSTANCE.priceFeedAPI = options['pricefeed-server']
29
+ Endpoints.INSTANCE.batchCount = batchCountOverride ?? options.batchCount
30
+ Endpoints.INSTANCE.chainQueryAPI = options.chainqueryServer
31
+ Endpoints.INSTANCE.priceFeedAPI = options.pricefeedServer
32
32
 
33
33
  for (const [id, config] of Object.entries(chainsConfig)) {
34
34
  const chainConfig = config as ChainConfig
@@ -24,6 +24,13 @@ import { ServiceManager } from './service-manager.js'
24
24
  import path from 'path'
25
25
  import { ProcessorV3Definition } from '@sentio/protos'
26
26
  import { ProcessorServiceImplV3 } from './service-v3.js'
27
+ import { readFileSync } from 'fs'
28
+ import { fileURLToPath } from 'url'
29
+ import { dirname, join } from 'path'
30
+
31
+ const __filename = fileURLToPath(import.meta.url)
32
+ const __dirname = dirname(__filename)
33
+ const packageJson = JSON.parse(readFileSync(join(__dirname, '../package.json'), 'utf8'))
27
34
 
28
35
  // const mergedRegistry = Registry.merge([globalRegistry, niceGrpcRegistry])
29
36
 
@@ -47,9 +54,11 @@ function myParseInt(value: string, dummyPrevious: unknown): number {
47
54
  const program = new Command()
48
55
 
49
56
  program
57
+ .allowUnknownOption()
58
+ .allowExcessArguments()
50
59
  .name('processor-runner')
51
60
  .description('Sentio Processor Runtime')
52
- .version('2.0.0-development')
61
+ .version(packageJson.version)
53
62
  .option('--target <path>', 'Path to the processor module to load')
54
63
  .option('-p, --port <port>', 'Port to listen on', '4000')
55
64
  .option('--concurrency <number>', 'Number of concurrent workers', myParseInt, 4)
@@ -89,10 +98,10 @@ program.parse()
89
98
  async function startServer(options: any): Promise<void> {
90
99
  const logLevel = process.env['LOG_LEVEL']?.toLowerCase()
91
100
 
92
- setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug)
101
+ setupLogger(options.logFormat === 'json', logLevel === 'debug' ? true : options.debug)
93
102
  console.debug('Starting with', options.target)
94
103
 
95
- await setupOTLP(options['otlp-debug'])
104
+ await setupOTLP(options.otlpDebug)
96
105
 
97
106
  Error.stackTraceLimit = 20
98
107
 
@@ -107,7 +116,7 @@ async function startServer(options: any): Promise<void> {
107
116
  console.debug('Module loaded', m)
108
117
  return m
109
118
  }
110
- if (options['start-action-server']) {
119
+ if (options.startActionServer) {
111
120
  server = new ActionServer(loader)
112
121
  server.listen(options.port)
113
122
  } else {
@@ -73,7 +73,7 @@ export default async function ({
73
73
  const { startRequest, configRequest, options } = Piscina.workerData
74
74
  if (!started) {
75
75
  const logLevel = process.env['LOG_LEVEL']?.toUpperCase()
76
- setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug, threadId)
76
+ setupLogger(options.logFormat === 'json', logLevel === 'debug' ? true : options.debug, threadId)
77
77
 
78
78
  configureEndpoints(options)
79
79
 
@@ -103,8 +103,8 @@ export default async function ({
103
103
  ]
104
104
  )
105
105
  }
106
- const timeout = (options['worker-timeout'] || 0) * 1000 // convert to milliseconds
107
- const enablePartition = options['enable-partition'] || false
106
+ const timeout = (options.workerTimeout || 0) * 1000 // convert to milliseconds
107
+ const enablePartition = options.enablePartition || false
108
108
  await new Promise<void>((resolve, reject) => {
109
109
  const subject = new Subject<DeepPartial<ProcessStreamResponse>>()
110
110
  let timeoutId: NodeJS.Timeout | undefined = undefined