@sentio/runtime 2.59.0-rc.41 → 2.59.0-rc.42

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -18,7 +18,7 @@ import {
18
18
  // src/service-worker.ts
19
19
  var import_nice_grpc = __toESM(require_lib(), 1);
20
20
  var import_nice_grpc_error_details = __toESM(require_lib2(), 1);
21
- import { BroadcastChannel, threadId } from "worker_threads";
21
+ import { threadId } from "worker_threads";
22
22
  import { Piscina } from "piscina";
23
23
  var import_rxjs = __toESM(require_cjs(), 1);
24
24
  var started = false;
@@ -43,10 +43,6 @@ var loader = async (options) => {
43
43
  return m;
44
44
  }
45
45
  };
46
- var configureChannel = new BroadcastChannel("configure_channel");
47
- configureChannel.onmessage = (request) => {
48
- service?.getConfig(request, emptyCallContext);
49
- };
50
46
  var emptyCallContext = {};
51
47
  async function start(request, options) {
52
48
  if (started) {
@@ -96,20 +92,37 @@ async function service_worker_default({
96
92
  ]
97
93
  );
98
94
  }
99
- await new Promise((resolve) => {
95
+ const timeout = options["worker-timeout"] || 0;
96
+ const enablePartition = options["enable-partition"] || false;
97
+ await new Promise((resolve, reject) => {
100
98
  const subject = new import_rxjs.Subject();
99
+ let timeoutId = void 0;
101
100
  subject.subscribe((resp) => {
101
+ console.debug("Worker", threadId, "send response:", resp.result ? "result" : "dbResult");
102
102
  workerPort.postMessage(resp);
103
103
  if (resp.result) {
104
+ if (timeoutId) clearTimeout(timeoutId);
104
105
  resolve();
105
106
  workerPort.close();
106
107
  }
107
108
  });
108
109
  workerPort.on("message", (msg) => {
109
110
  const request = msg;
111
+ console.debug("Worker", threadId, "received request:", request.start ? "start" : "dbResult");
110
112
  service?.handleRequest(request, firstRequest.binding, subject);
113
+ if (enablePartition && request.start && timeout > 0) {
114
+ timeoutId = setTimeout(async () => {
115
+ reject(new import_nice_grpc_error_details.RichServerError(import_nice_grpc.Status.DEADLINE_EXCEEDED, "Worker timeout exceeded"));
116
+ }, timeout);
117
+ }
111
118
  });
119
+ console.debug("Worker", threadId, "handle request: binding");
112
120
  service?.handleRequest(firstRequest, firstRequest.binding, subject);
121
+ if (!enablePartition && timeout > 0) {
122
+ timeoutId = setTimeout(() => {
123
+ reject(new import_nice_grpc_error_details.RichServerError(import_nice_grpc.Status.DEADLINE_EXCEEDED, "Worker timeout exceeded"));
124
+ }, timeout);
125
+ }
113
126
  });
114
127
  }
115
128
  import("node:process").then((p) => p.stdout.write(""));
@@ -1 +1 @@
1
- {"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import {\n DeepPartial,\n Empty,\n ProcessConfigRequest,\n ProcessStreamRequest,\n ProcessStreamResponse,\n StartRequest\n} from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { ProcessorServiceImpl } from './service.js'\nimport { BroadcastChannel, MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { Subject } from 'rxjs'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n .on('exit', () => {\n console.info('Worker thread exiting, threadId:', threadId)\n })\n\nlet service: ProcessorServiceImpl | undefined\n\nconst loader = async (options: any) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst configureChannel = new BroadcastChannel('configure_channel')\nconfigureChannel.onmessage = (request: ProcessConfigRequest) => {\n service?.getConfig(request, emptyCallContext)\n}\n\nconst emptyCallContext = <CallContext>{}\n\nasync function start(request: StartRequest, options: any): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n service = new ProcessorServiceImpl(() => loader(options), options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await service.start(request, emptyCallContext)\n started = true\n return {}\n}\n\nexport default async function ({\n processId,\n request: firstRequest,\n workerPort\n}: {\n processId: number\n request: ProcessStreamRequest\n workerPort: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker', threadId, ' started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await service?.getConfig(configRequest, emptyCallContext)\n console.debug('worker', threadId, ' configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n console.error('Unhandled exception/rejection in previous request:', err)\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n\n await new Promise<void>((resolve) => {\n const subject = new Subject<DeepPartial<ProcessStreamResponse>>()\n subject.subscribe((resp: ProcessStreamResponse) => {\n workerPort.postMessage(resp)\n // receive the response from the processor , close and resolve the promise\n if (resp.result) {\n resolve()\n workerPort.close()\n }\n })\n workerPort.on('message', (msg: ProcessStreamRequest) => {\n const request = msg as ProcessStreamRequest\n service?.handleRequest(request, firstRequest.binding, subject)\n })\n service?.handleRequest(firstRequest, firstRequest.binding, subject)\n })\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;AAQA,uBAAiD;AAGjD,qCAA2C;AAE3C,SAAS,kBAA+B,gBAAgB;AACxD,SAAS,eAAe;AAGxB,kBAAwB;AAExB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC,EACA,GAAG,QAAQ,MAAM;AAChB,UAAQ,KAAK,oCAAoC,QAAQ;AAC3D,CAAC;AAEH,IAAI;AAEJ,IAAM,SAAS,OAAO,YAAiB;AACrC,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAmB,IAAI,iBAAiB,mBAAmB;AACjE,iBAAiB,YAAY,CAAC,YAAkC;AAC9D,WAAS,UAAU,SAAS,gBAAgB;AAC9C;AAEA,IAAM,mBAAgC,CAAC;AAEvC,eAAe,MAAM,SAAuB,SAA8B;AACxE,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,cAAU,IAAI,qBAAqB,MAAM,OAAO,OAAO,GAAG,OAAO;AAAA,EACnE,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,QAAQ,MAAM,SAAS,gBAAgB;AAC7C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA,SAAS;AAAA,EACT;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,YAAY,MAAM,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAEnG,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,UAAU,UAAU,gCAAgC,aAAa,mBAAmB,MAAM;AAAA,IAC1G;AAEA,QAAI,eAAe;AACjB,YAAM,SAAS,UAAU,eAAe,gBAAgB;AACxD,cAAQ,MAAM,UAAU,UAAU,aAAa;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,YAAQ,MAAM,sDAAsD,GAAG;AACvE,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,QAAM,IAAI,QAAc,CAAC,YAAY;AACnC,UAAM,UAAU,IAAI,oBAA4C;AAChE,YAAQ,UAAU,CAAC,SAAgC;AACjD,iBAAW,YAAY,IAAI;AAE3B,UAAI,KAAK,QAAQ;AACf,gBAAQ;AACR,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,eAAW,GAAG,WAAW,CAAC,QAA8B;AACtD,YAAM,UAAU;AAChB,eAAS,cAAc,SAAS,aAAa,SAAS,OAAO;AAAA,IAC/D,CAAC;AACD,aAAS,cAAc,cAAc,aAAa,SAAS,OAAO;AAAA,EACpE,CAAC;AACH;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
1
+ {"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import { DeepPartial, Empty, ProcessStreamRequest, ProcessStreamResponse, StartRequest } from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { ProcessorServiceImpl } from './service.js'\nimport { MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { Subject } from 'rxjs'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n .on('exit', () => {\n console.info('Worker thread exiting, threadId:', threadId)\n })\n\nlet service: ProcessorServiceImpl | undefined\n\nconst loader = async (options: any) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst emptyCallContext = <CallContext>{}\n\nasync function start(request: StartRequest, options: any): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n service = new ProcessorServiceImpl(() => loader(options), options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await service.start(request, emptyCallContext)\n started = true\n return {}\n}\n\nexport default async function ({\n processId,\n request: firstRequest,\n workerPort\n}: {\n processId: number\n request: ProcessStreamRequest\n workerPort: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker', threadId, ' started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await service?.getConfig(configRequest, emptyCallContext)\n console.debug('worker', threadId, ' configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n console.error('Unhandled exception/rejection in previous request:', err)\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n const timeout = options['worker-timeout'] || 0\n const enablePartition = options['enable-partition'] || false\n await new Promise<void>((resolve, reject) => {\n const subject = new Subject<DeepPartial<ProcessStreamResponse>>()\n let timeoutId: NodeJS.Timeout | undefined = undefined\n subject.subscribe((resp: ProcessStreamResponse) => {\n console.debug('Worker', threadId, 'send response:', resp.result ? 'result' : 'dbResult')\n workerPort.postMessage(resp)\n // receive the response from the processor , close and resolve the promise\n if (resp.result) {\n if (timeoutId) clearTimeout(timeoutId)\n resolve()\n workerPort.close()\n }\n })\n workerPort.on('message', (msg: ProcessStreamRequest) => {\n const request = msg as ProcessStreamRequest\n console.debug('Worker', threadId, 'received request:', request.start ? 'start' : 'dbResult')\n service?.handleRequest(request, firstRequest.binding, subject)\n if (enablePartition && request.start && timeout > 0) {\n timeoutId = setTimeout(async () => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n console.debug('Worker', threadId, 'handle request: binding')\n service?.handleRequest(firstRequest, firstRequest.binding, subject)\n if (!enablePartition && timeout > 0) {\n timeoutId = setTimeout(() => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;AACA,uBAAiD;AAGjD,qCAA2C;AAE3C,SAAsB,gBAAgB;AACtC,SAAS,eAAe;AAGxB,kBAAwB;AAExB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC,EACA,GAAG,QAAQ,MAAM;AAChB,UAAQ,KAAK,oCAAoC,QAAQ;AAC3D,CAAC;AAEH,IAAI;AAEJ,IAAM,SAAS,OAAO,YAAiB;AACrC,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAgC,CAAC;AAEvC,eAAe,MAAM,SAAuB,SAA8B;AACxE,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,cAAU,IAAI,qBAAqB,MAAM,OAAO,OAAO,GAAG,OAAO;AAAA,EACnE,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,QAAQ,MAAM,SAAS,gBAAgB;AAC7C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA,SAAS;AAAA,EACT;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,YAAY,MAAM,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAEnG,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,UAAU,UAAU,gCAAgC,aAAa,mBAAmB,MAAM;AAAA,IAC1G;AAEA,QAAI,eAAe;AACjB,YAAM,SAAS,UAAU,eAAe,gBAAgB;AACxD,cAAQ,MAAM,UAAU,UAAU,aAAa;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,YAAQ,MAAM,sDAAsD,GAAG;AACvE,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,QAAM,UAAU,QAAQ,gBAAgB,KAAK;AAC7C,QAAM,kBAAkB,QAAQ,kBAAkB,KAAK;AACvD,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,UAAM,UAAU,IAAI,oBAA4C;AAChE,QAAI,YAAwC;AAC5C,YAAQ,UAAU,CAAC,SAAgC;AACjD,cAAQ,MAAM,UAAU,UAAU,kBAAkB,KAAK,SAAS,WAAW,UAAU;AACvF,iBAAW,YAAY,IAAI;AAE3B,UAAI,KAAK,QAAQ;AACf,YAAI,UAAW,cAAa,SAAS;AACrC,gBAAQ;AACR,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,eAAW,GAAG,WAAW,CAAC,QAA8B;AACtD,YAAM,UAAU;AAChB,cAAQ,MAAM,UAAU,UAAU,qBAAqB,QAAQ,QAAQ,UAAU,UAAU;AAC3F,eAAS,cAAc,SAAS,aAAa,SAAS,OAAO;AAC7D,UAAI,mBAAmB,QAAQ,SAAS,UAAU,GAAG;AACnD,oBAAY,WAAW,YAAY;AACjC,iBAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,QACjF,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AACD,YAAQ,MAAM,UAAU,UAAU,yBAAyB;AAC3D,aAAS,cAAc,cAAc,aAAa,SAAS,OAAO;AAClE,QAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,MACjF,GAAG,OAAO;AAAA,IACZ;AAAA,EACF,CAAC;AACH;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sentio/runtime",
3
- "version": "2.59.0-rc.41",
3
+ "version": "2.59.0-rc.42",
4
4
  "license": "Apache-2.0",
5
5
  "type": "module",
6
6
  "exports": {
@@ -115,6 +115,17 @@ export class ServiceManager extends ProcessorServiceImpl {
115
115
  if (this.pool) {
116
116
  await this.pool.close()
117
117
  }
118
+
119
+ if (this.enablePartition) {
120
+ const concurrent = parseInt(process.env['PROCESS_CONCURRENCY'] || '0')
121
+ if (this.options.worker < concurrent) {
122
+ console.warn(
123
+ `When partition is enabled, the worker count must >= 'PROCESS_CONCURRENCY', will set worker count to ${concurrent})`
124
+ )
125
+ this.options.worker = concurrent
126
+ }
127
+ }
128
+
118
129
  console.info('Initializing worker pool with worker count:', this.options.worker)
119
130
  this.pool = new Piscina({
120
131
  maxThreads: this.options.worker,
@@ -1,17 +1,10 @@
1
- import {
2
- DeepPartial,
3
- Empty,
4
- ProcessConfigRequest,
5
- ProcessStreamRequest,
6
- ProcessStreamResponse,
7
- StartRequest
8
- } from '@sentio/protos'
1
+ import { DeepPartial, Empty, ProcessStreamRequest, ProcessStreamResponse, StartRequest } from '@sentio/protos'
9
2
  import { CallContext, ServerError, Status } from 'nice-grpc'
10
3
  import { errorString } from './utils.js'
11
4
  import { freezeGlobalConfig } from './global-config.js'
12
5
  import { DebugInfo, RichServerError } from 'nice-grpc-error-details'
13
6
  import { ProcessorServiceImpl } from './service.js'
14
- import { BroadcastChannel, MessagePort, threadId } from 'worker_threads'
7
+ import { MessagePort, threadId } from 'worker_threads'
15
8
  import { Piscina } from 'piscina'
16
9
  import { configureEndpoints } from './endpoints.js'
17
10
  import { setupLogger } from './logger.js'
@@ -49,11 +42,6 @@ const loader = async (options: any) => {
49
42
  }
50
43
  }
51
44
 
52
- const configureChannel = new BroadcastChannel('configure_channel')
53
- configureChannel.onmessage = (request: ProcessConfigRequest) => {
54
- service?.getConfig(request, emptyCallContext)
55
- }
56
-
57
45
  const emptyCallContext = <CallContext>{}
58
46
 
59
47
  async function start(request: StartRequest, options: any): Promise<Empty> {
@@ -115,21 +103,37 @@ export default async function ({
115
103
  ]
116
104
  )
117
105
  }
118
-
119
- await new Promise<void>((resolve) => {
106
+ const timeout = options['worker-timeout'] || 0
107
+ const enablePartition = options['enable-partition'] || false
108
+ await new Promise<void>((resolve, reject) => {
120
109
  const subject = new Subject<DeepPartial<ProcessStreamResponse>>()
110
+ let timeoutId: NodeJS.Timeout | undefined = undefined
121
111
  subject.subscribe((resp: ProcessStreamResponse) => {
112
+ console.debug('Worker', threadId, 'send response:', resp.result ? 'result' : 'dbResult')
122
113
  workerPort.postMessage(resp)
123
114
  // receive the response from the processor , close and resolve the promise
124
115
  if (resp.result) {
116
+ if (timeoutId) clearTimeout(timeoutId)
125
117
  resolve()
126
118
  workerPort.close()
127
119
  }
128
120
  })
129
121
  workerPort.on('message', (msg: ProcessStreamRequest) => {
130
122
  const request = msg as ProcessStreamRequest
123
+ console.debug('Worker', threadId, 'received request:', request.start ? 'start' : 'dbResult')
131
124
  service?.handleRequest(request, firstRequest.binding, subject)
125
+ if (enablePartition && request.start && timeout > 0) {
126
+ timeoutId = setTimeout(async () => {
127
+ reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))
128
+ }, timeout)
129
+ }
132
130
  })
131
+ console.debug('Worker', threadId, 'handle request: binding')
133
132
  service?.handleRequest(firstRequest, firstRequest.binding, subject)
133
+ if (!enablePartition && timeout > 0) {
134
+ timeoutId = setTimeout(() => {
135
+ reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))
136
+ }, timeout)
137
+ }
134
138
  })
135
139
  }