@sentio/runtime 2.57.12 → 2.57.13-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/chunk-6XHWJ2VS.js +92 -0
- package/lib/chunk-6XHWJ2VS.js.map +1 -0
- package/lib/chunk-BPGFX5S5.js +10975 -0
- package/lib/chunk-BPGFX5S5.js.map +1 -0
- package/lib/chunk-NLEBXJPP.js +59726 -0
- package/lib/chunk-NLEBXJPP.js.map +1 -0
- package/lib/chunk-ZUTD563J.js +23006 -0
- package/lib/chunk-ZUTD563J.js.map +1 -0
- package/lib/index.d.ts +31 -9
- package/lib/index.js +70 -1
- package/lib/index.js.map +1 -1
- package/lib/processor-runner.d.ts +33 -0
- package/lib/processor-runner.js +26559 -62
- package/lib/processor-runner.js.map +1 -1
- package/lib/service-worker.d.ts +11 -0
- package/lib/service-worker.js +130 -0
- package/lib/service-worker.js.map +1 -0
- package/lib/test-processor.test.d.ts +2 -0
- package/lib/test-processor.test.js.map +1 -0
- package/package.json +1 -1
- package/src/db-context.ts +39 -18
- package/src/endpoints.ts +35 -0
- package/src/logger.ts +3 -2
- package/src/plugin.ts +3 -3
- package/src/processor-runner.ts +15 -36
- package/src/service-manager.ts +232 -0
- package/src/service-worker.ts +150 -0
- package/src/service.ts +2 -2
- package/src/state.ts +19 -0
- package/src/tsup.config.ts +3 -2
- package/lib/chunk-VDRKULG2.js +0 -131
- package/lib/chunk-VDRKULG2.js.map +0 -1
@@ -0,0 +1,11 @@
|
|
1
|
+
import * as _sentio_protos from '@sentio/protos';
|
2
|
+
import { DataBinding } from '@sentio/protos';
|
3
|
+
import { MessagePort } from 'worker_threads';
|
4
|
+
|
5
|
+
declare function export_default({ request, processId, workerPort }: {
|
6
|
+
request: DataBinding;
|
7
|
+
processId: number;
|
8
|
+
workerPort?: MessagePort;
|
9
|
+
}): Promise<_sentio_protos.ProcessResult>;
|
10
|
+
|
11
|
+
export { export_default as default };
|
@@ -0,0 +1,130 @@
|
|
1
|
+
import { createRequire as createRequireShim } from 'module'; const require = createRequireShim(import.meta.url);
|
2
|
+
import {
|
3
|
+
setupLogger
|
4
|
+
} from "./chunk-BPGFX5S5.js";
|
5
|
+
import {
|
6
|
+
AbstractStoreContext,
|
7
|
+
configureEndpoints,
|
8
|
+
errorString,
|
9
|
+
freezeGlobalConfig,
|
10
|
+
recordRuntimeInfo,
|
11
|
+
require_lib3 as require_lib,
|
12
|
+
require_lib4 as require_lib2
|
13
|
+
} from "./chunk-NLEBXJPP.js";
|
14
|
+
import {
|
15
|
+
PluginManager,
|
16
|
+
ProcessConfigResponse,
|
17
|
+
__toESM
|
18
|
+
} from "./chunk-ZUTD563J.js";
|
19
|
+
|
20
|
+
// src/service-worker.ts
|
21
|
+
var import_nice_grpc = __toESM(require_lib(), 1);
|
22
|
+
var import_nice_grpc_error_details = __toESM(require_lib2(), 1);
|
23
|
+
import { BroadcastChannel, threadId } from "worker_threads";
|
24
|
+
import { Piscina } from "piscina";
|
25
|
+
var started = false;
|
26
|
+
var unhandled;
|
27
|
+
process.on("uncaughtException", (err) => {
|
28
|
+
console.error("Uncaught Exception, please checking if await is properly used", err);
|
29
|
+
unhandled = err;
|
30
|
+
}).on("unhandledRejection", (reason, p) => {
|
31
|
+
if (reason?.message.startsWith('invalid ENS name (disallowed character: "*"')) {
|
32
|
+
return;
|
33
|
+
}
|
34
|
+
console.error("Unhandled Rejection, please checking if await is properly", reason);
|
35
|
+
unhandled = reason;
|
36
|
+
});
|
37
|
+
async function getConfig(request, context) {
|
38
|
+
if (!started) {
|
39
|
+
throw new import_nice_grpc.ServerError(import_nice_grpc.Status.UNAVAILABLE, "Service Not started.");
|
40
|
+
}
|
41
|
+
const newConfig = ProcessConfigResponse.fromPartial({});
|
42
|
+
await PluginManager.INSTANCE.configure(newConfig);
|
43
|
+
return newConfig;
|
44
|
+
}
|
45
|
+
var loader = async (options) => {
|
46
|
+
if (options.target) {
|
47
|
+
const m = await import(options.target);
|
48
|
+
console.debug("Module loaded, path:", options.target, "module:", m);
|
49
|
+
return m;
|
50
|
+
}
|
51
|
+
};
|
52
|
+
var configureChannel = new BroadcastChannel("configure_channel");
|
53
|
+
configureChannel.onmessage = (request) => {
|
54
|
+
getConfig(request);
|
55
|
+
};
|
56
|
+
async function start(request, options) {
|
57
|
+
if (started) {
|
58
|
+
return {};
|
59
|
+
}
|
60
|
+
freezeGlobalConfig();
|
61
|
+
try {
|
62
|
+
await loader(options);
|
63
|
+
} catch (e) {
|
64
|
+
throw new import_nice_grpc.ServerError(import_nice_grpc.Status.INVALID_ARGUMENT, "Failed to load processor: " + errorString(e));
|
65
|
+
}
|
66
|
+
await PluginManager.INSTANCE.start(request);
|
67
|
+
started = true;
|
68
|
+
return {};
|
69
|
+
}
|
70
|
+
async function service_worker_default({
|
71
|
+
request,
|
72
|
+
processId,
|
73
|
+
workerPort
|
74
|
+
}) {
|
75
|
+
const { startRequest, configRequest, options } = Piscina.workerData;
|
76
|
+
if (!started) {
|
77
|
+
const logLevel = process.env["LOG_LEVEL"]?.toUpperCase();
|
78
|
+
setupLogger(options["log-format"] === "json", logLevel === "debug" ? true : options.debug, threadId);
|
79
|
+
configureEndpoints(options);
|
80
|
+
if (startRequest) {
|
81
|
+
await start(startRequest, options);
|
82
|
+
console.debug("worker started, template instance:", startRequest.templateInstances?.length);
|
83
|
+
}
|
84
|
+
if (configRequest) {
|
85
|
+
await getConfig(configRequest);
|
86
|
+
console.debug("worker configured");
|
87
|
+
}
|
88
|
+
}
|
89
|
+
if (unhandled) {
|
90
|
+
const err = unhandled;
|
91
|
+
unhandled = void 0;
|
92
|
+
throw new import_nice_grpc_error_details.RichServerError(
|
93
|
+
import_nice_grpc.Status.UNAVAILABLE,
|
94
|
+
"Unhandled exception/rejection in previous request: " + errorString(err),
|
95
|
+
[
|
96
|
+
import_nice_grpc_error_details.DebugInfo.fromPartial({
|
97
|
+
detail: err.message,
|
98
|
+
stackEntries: err.stack?.split("\n")
|
99
|
+
})
|
100
|
+
]
|
101
|
+
);
|
102
|
+
}
|
103
|
+
const result = await PluginManager.INSTANCE.processBinding(
|
104
|
+
request,
|
105
|
+
void 0,
|
106
|
+
workerPort ? new WorkerStoreContext(workerPort, processId) : void 0
|
107
|
+
);
|
108
|
+
recordRuntimeInfo(result, request.handlerType);
|
109
|
+
return result;
|
110
|
+
}
|
111
|
+
var WorkerStoreContext = class extends AbstractStoreContext {
|
112
|
+
constructor(port, processId) {
|
113
|
+
super(processId);
|
114
|
+
this.port = port;
|
115
|
+
this.port.on("message", (resp) => {
|
116
|
+
this.result(resp);
|
117
|
+
});
|
118
|
+
this.port.on("close", () => {
|
119
|
+
this.close();
|
120
|
+
});
|
121
|
+
}
|
122
|
+
doSend(req) {
|
123
|
+
this.port.postMessage(req);
|
124
|
+
}
|
125
|
+
};
|
126
|
+
import("node:process").then((p) => p.stdout.write(""));
|
127
|
+
export {
|
128
|
+
service_worker_default as default
|
129
|
+
};
|
130
|
+
//# sourceMappingURL=service-worker.js.map
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import {\n DataBinding,\n DBResponse,\n DeepPartial,\n Empty,\n ProcessConfigRequest,\n ProcessConfigResponse,\n ProcessStreamResponse,\n StartRequest\n} from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { PluginManager } from './plugin.js'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { recordRuntimeInfo } from './service.js'\nimport { BroadcastChannel, MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { AbstractStoreContext } from './db-context.js'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n\nasync function getConfig(request: ProcessConfigRequest, context?: CallContext): Promise<ProcessConfigResponse> {\n if (!started) {\n throw new ServerError(Status.UNAVAILABLE, 'Service Not started.')\n }\n\n const newConfig = ProcessConfigResponse.fromPartial({})\n await PluginManager.INSTANCE.configure(newConfig)\n return newConfig\n}\n\nconst loader = async (options: any) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst configureChannel = new BroadcastChannel('configure_channel')\nconfigureChannel.onmessage = (request: ProcessConfigRequest) => {\n getConfig(request)\n}\n\nasync function start(request: StartRequest, options: any): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n await loader(options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await PluginManager.INSTANCE.start(request)\n started = true\n return {}\n}\n\nexport default async function ({\n request,\n processId,\n workerPort\n}: {\n request: DataBinding\n processId: number\n workerPort?: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await getConfig(configRequest)\n console.debug('worker configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n\n const result = await PluginManager.INSTANCE.processBinding(\n request,\n undefined,\n workerPort ? new WorkerStoreContext(workerPort, processId) : undefined\n )\n recordRuntimeInfo(result, request.handlerType)\n return result\n}\n\nclass WorkerStoreContext extends AbstractStoreContext {\n constructor(\n readonly port: MessagePort,\n processId: number\n ) {\n super(processId)\n this.port.on('message', (resp: DBResponse) => {\n this.result(resp)\n })\n this.port.on('close', () => {\n this.close()\n })\n }\n\n doSend(req: DeepPartial<ProcessStreamResponse>): void {\n this.port.postMessage(req)\n }\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;;;AAUA,uBAAiD;AAIjD,qCAA2C;AAE3C,SAAS,kBAA+B,gBAAgB;AACxD,SAAS,eAAe;AAKxB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC;AAEH,eAAe,UAAU,SAA+B,SAAuD;AAC7G,MAAI,CAAC,SAAS;AACZ,UAAM,IAAI,6BAAY,wBAAO,aAAa,sBAAsB;AAAA,EAClE;AAEA,QAAM,YAAY,sBAAsB,YAAY,CAAC,CAAC;AACtD,QAAM,cAAc,SAAS,UAAU,SAAS;AAChD,SAAO;AACT;AAEA,IAAM,SAAS,OAAO,YAAiB;AACrC,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAmB,IAAI,iBAAiB,mBAAmB;AACjE,iBAAiB,YAAY,CAAC,YAAkC;AAC9D,YAAU,OAAO;AACnB;AAEA,eAAe,MAAM,SAAuB,SAA8B;AACxE,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,UAAM,OAAO,OAAO;AAAA,EACtB,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,cAAc,SAAS,MAAM,OAAO;AAC1C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA;AAAA,EACA;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,YAAY,MAAM,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAEnG,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,sCAAsC,aAAa,mBAAmB,MAAM;AAAA,IAC5F;AAEA,QAAI,eAAe;AACjB,YAAM,UAAU,aAAa;AAC7B,cAAQ,MAAM,mBAAmB;AAAA,IACnC;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AAEA,QAAM,SAAS,MAAM,cAAc,SAAS;AAAA,IAC1C;AAAA,IACA;AAAA,IACA,aAAa,IAAI,mBAAmB,YAAY,SAAS,IAAI;AAAA,EAC/D;AACA,oBAAkB,QAAQ,QAAQ,WAAW;AAC7C,SAAO;AACT;AAEA,IAAM,qBAAN,cAAiC,qBAAqB;AAAA,EACpD,YACW,MACT,WACA;AACA,UAAM,SAAS;AAHN;AAIT,SAAK,KAAK,GAAG,WAAW,CAAC,SAAqB;AAC5C,WAAK,OAAO,IAAI;AAAA,IAClB,CAAC;AACD,SAAK,KAAK,GAAG,SAAS,MAAM;AAC1B,WAAK,MAAM;AAAA,IACb,CAAC;AAAA,EACH;AAAA,EAEA,OAAO,KAA+C;AACpD,SAAK,KAAK,YAAY,GAAG;AAAA,EAC3B;AACF;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
|
@@ -0,0 +1 @@
|
|
1
|
+
{"version":3,"sources":["../src/test-processor.test.ts"],"sourcesContent":["import { Plugin, PluginManager } from './plugin.js'\nimport { DataBinding, HandlerType, ProcessResult } from './gen/processor/protos/processor.js'\n\nclass TestPlugin extends Plugin {\n async processBinding(request: DataBinding): Promise<ProcessResult> {\n const dbContext = PluginManager.INSTANCE.dbContextLocalStorage.getStore()\n if (dbContext) {\n await dbContext.sendRequest({\n get: {\n entity: 'Test',\n id: '1'\n }\n })\n }\n\n return ProcessResult.fromPartial({\n states: {\n configUpdated: true\n }\n })\n }\n supportedHandlers = [HandlerType.UNKNOWN]\n}\n\nPluginManager.INSTANCE.plugins = []\nPluginManager.INSTANCE.register(new TestPlugin())\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;AAGA,IAAM,aAAN,cAAyB,OAAO;AAAA,EAC9B,MAAM,eAAe,SAA8C;AACjE,UAAM,YAAY,cAAc,SAAS,sBAAsB,SAAS;AACxE,QAAI,WAAW;AACb,YAAM,UAAU,YAAY;AAAA,QAC1B,KAAK;AAAA,UACH,QAAQ;AAAA,UACR,IAAI;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,cAAc,YAAY;AAAA,MAC/B,QAAQ;AAAA,QACN,eAAe;AAAA,MACjB;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EACA,oBAAoB,gBAAoB;AAC1C;AAEA,cAAc,SAAS,UAAU,CAAC;AAClC,cAAc,SAAS,SAAS,IAAI,WAAW,CAAC;AAC/C,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
|
package/package.json
CHANGED
package/src/db-context.ts
CHANGED
@@ -9,6 +9,7 @@ import {
|
|
9
9
|
} from '@sentio/protos'
|
10
10
|
import * as process from 'node:process'
|
11
11
|
import { dbMetrics } from './metrics.js'
|
12
|
+
|
12
13
|
const {
|
13
14
|
request_errors,
|
14
15
|
unsolved_requests,
|
@@ -27,19 +28,26 @@ type RequestType = keyof Request
|
|
27
28
|
|
28
29
|
export const timeoutError = new Error('timeout')
|
29
30
|
|
30
|
-
export
|
31
|
+
export interface IStoreContext {
|
32
|
+
sendRequest(request: DeepPartial<Request>, timeoutSecs?: number): Promise<DBResponse>
|
33
|
+
|
34
|
+
result(dbResult: DBResponse): void
|
35
|
+
|
36
|
+
error(processId: number, e: any): void
|
37
|
+
|
38
|
+
close(): void
|
39
|
+
}
|
40
|
+
|
41
|
+
export abstract class AbstractStoreContext implements IStoreContext {
|
31
42
|
private static opCounter = 0n
|
32
|
-
|
43
|
+
protected defers = new Map<
|
33
44
|
bigint,
|
34
45
|
{ resolve: (value: any) => void; reject: (reason?: any) => void; requestType?: RequestType }
|
35
46
|
>()
|
36
47
|
private statsInterval: NodeJS.Timeout | undefined
|
37
48
|
private pendings: Promise<unknown>[] = []
|
38
49
|
|
39
|
-
constructor(
|
40
|
-
readonly subject: Subject<DeepPartial<ProcessStreamResponse>>,
|
41
|
-
readonly processId: number
|
42
|
-
) {}
|
50
|
+
constructor(readonly processId: number) {}
|
43
51
|
|
44
52
|
newPromise<T>(opId: bigint, requestType?: RequestType) {
|
45
53
|
return new Promise<T>((resolve, reject) => {
|
@@ -48,6 +56,8 @@ export class StoreContext {
|
|
48
56
|
})
|
49
57
|
}
|
50
58
|
|
59
|
+
abstract doSend(resp: DeepPartial<ProcessStreamResponse>): void
|
60
|
+
|
51
61
|
sendRequest(request: DeepPartial<Request>, timeoutSecs?: number): Promise<DBResponse> {
|
52
62
|
if (STORE_BATCH_IDLE > 0 && STORE_BATCH_SIZE > 1 && request.upsert) {
|
53
63
|
// batch upsert if possible
|
@@ -67,12 +77,11 @@ export class StoreContext {
|
|
67
77
|
promises.push(timeoutPromise)
|
68
78
|
}
|
69
79
|
|
70
|
-
this.
|
80
|
+
this.doSend({
|
71
81
|
dbRequest: {
|
72
82
|
...request,
|
73
83
|
opId
|
74
|
-
}
|
75
|
-
processId: this.processId
|
84
|
+
}
|
76
85
|
})
|
77
86
|
|
78
87
|
send_counts[requestType]?.add(1)
|
@@ -80,7 +89,7 @@ export class StoreContext {
|
|
80
89
|
if (requestType === 'upsert' && STORE_UPSERT_NO_WAIT) {
|
81
90
|
this.pendings.push(promise)
|
82
91
|
return Promise.resolve({
|
83
|
-
opId
|
92
|
+
opId
|
84
93
|
} as DBResponse)
|
85
94
|
}
|
86
95
|
|
@@ -131,10 +140,7 @@ export class StoreContext {
|
|
131
140
|
error: e?.toString()
|
132
141
|
}
|
133
142
|
})
|
134
|
-
this.
|
135
|
-
result: errorResult,
|
136
|
-
processId
|
137
|
-
})
|
143
|
+
this.doSend({ result: errorResult, processId })
|
138
144
|
}
|
139
145
|
|
140
146
|
close() {
|
@@ -188,7 +194,7 @@ export class StoreContext {
|
|
188
194
|
opId,
|
189
195
|
request: req,
|
190
196
|
promise,
|
191
|
-
timer: timeout
|
197
|
+
timer: timeout
|
192
198
|
}
|
193
199
|
|
194
200
|
if (STORE_UPSERT_NO_WAIT) {
|
@@ -208,12 +214,11 @@ export class StoreContext {
|
|
208
214
|
// console.debug('sending batch upsert', opId, 'batch size', request?.entity.length)
|
209
215
|
clearTimeout(timer)
|
210
216
|
this.upsertBatch = undefined
|
211
|
-
this.
|
217
|
+
this.doSend({
|
212
218
|
dbRequest: {
|
213
219
|
upsert: request,
|
214
220
|
opId
|
215
|
-
}
|
216
|
-
processId: this.processId
|
221
|
+
}
|
217
222
|
})
|
218
223
|
send_counts['upsert']?.add(1)
|
219
224
|
batched_request_count.add(1)
|
@@ -225,3 +230,19 @@ export class StoreContext {
|
|
225
230
|
await Promise.all(this.pendings)
|
226
231
|
}
|
227
232
|
}
|
233
|
+
|
234
|
+
export class StoreContext extends AbstractStoreContext {
|
235
|
+
constructor(
|
236
|
+
readonly subject: Subject<DeepPartial<ProcessStreamResponse>>,
|
237
|
+
processId: number
|
238
|
+
) {
|
239
|
+
super(processId)
|
240
|
+
}
|
241
|
+
|
242
|
+
doSend(resp: DeepPartial<ProcessStreamResponse>) {
|
243
|
+
this.subject.next({
|
244
|
+
...resp,
|
245
|
+
processId: this.processId
|
246
|
+
})
|
247
|
+
}
|
248
|
+
}
|
package/src/endpoints.ts
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
import path from 'path'
|
2
|
+
import fs from 'fs-extra'
|
3
|
+
import { ChainConfig } from './chain-config.js'
|
4
|
+
|
1
5
|
export class Endpoints {
|
2
6
|
static INSTANCE: Endpoints = new Endpoints()
|
3
7
|
|
@@ -9,3 +13,34 @@ export class Endpoints {
|
|
9
13
|
|
10
14
|
batchCount = 1
|
11
15
|
}
|
16
|
+
|
17
|
+
export function configureEndpoints(options: any) {
|
18
|
+
const fullPath = path.resolve(options['chains-config'])
|
19
|
+
const chainsConfig = fs.readJsonSync(fullPath)
|
20
|
+
|
21
|
+
const concurrencyOverride = process.env['OVERRIDE_CONCURRENCY']
|
22
|
+
? parseInt(process.env['OVERRIDE_CONCURRENCY'])
|
23
|
+
: undefined
|
24
|
+
const batchCountOverride = process.env['OVERRIDE_BATCH_COUNT']
|
25
|
+
? parseInt(process.env['OVERRIDE_BATCH_COUNT'])
|
26
|
+
: undefined
|
27
|
+
|
28
|
+
Endpoints.INSTANCE.concurrency = concurrencyOverride ?? options.concurrency
|
29
|
+
Endpoints.INSTANCE.batchCount = batchCountOverride ?? options['batch-count']
|
30
|
+
Endpoints.INSTANCE.chainQueryAPI = options['chainquery-server']
|
31
|
+
Endpoints.INSTANCE.priceFeedAPI = options['pricefeed-server']
|
32
|
+
|
33
|
+
for (const [id, config] of Object.entries(chainsConfig)) {
|
34
|
+
const chainConfig = config as ChainConfig
|
35
|
+
if (chainConfig.ChainServer) {
|
36
|
+
Endpoints.INSTANCE.chainServer.set(id, chainConfig.ChainServer)
|
37
|
+
} else {
|
38
|
+
const http = chainConfig.Https?.[0]
|
39
|
+
if (http) {
|
40
|
+
Endpoints.INSTANCE.chainServer.set(id, http)
|
41
|
+
} else {
|
42
|
+
console.error('not valid config for chain', id)
|
43
|
+
}
|
44
|
+
}
|
45
|
+
}
|
46
|
+
}
|
package/src/logger.ts
CHANGED
@@ -13,7 +13,7 @@ function stringify(obj: any): string {
|
|
13
13
|
})
|
14
14
|
}
|
15
15
|
|
16
|
-
export function setupLogger(json: boolean, enableDebug: boolean) {
|
16
|
+
export function setupLogger(json: boolean, enableDebug: boolean, workerId?: number) {
|
17
17
|
const utilFormatter = {
|
18
18
|
transform: (info: any) => {
|
19
19
|
const stringRes = []
|
@@ -45,7 +45,8 @@ export function setupLogger(json: boolean, enableDebug: boolean) {
|
|
45
45
|
format.timestamp({ format: 'YYYY-MM-DDTHH:mm:ss.SSSZ' }),
|
46
46
|
utilFormatter,
|
47
47
|
format.errors({ stack: true }),
|
48
|
-
json ? format.json() : format.simple()
|
48
|
+
json ? format.json() : format.simple(),
|
49
|
+
format.label({ label: workerId ? `worker #{workerId}` : '' })
|
49
50
|
),
|
50
51
|
level: enableDebug ? 'debug' : 'info',
|
51
52
|
transports: [new transports.Console()]
|
package/src/plugin.ts
CHANGED
@@ -7,7 +7,7 @@ import {
|
|
7
7
|
ProcessResult,
|
8
8
|
StartRequest
|
9
9
|
} from '@sentio/protos'
|
10
|
-
import { StoreContext } from './db-context.js'
|
10
|
+
import { IStoreContext, StoreContext } from './db-context.js'
|
11
11
|
import { AsyncLocalStorage } from 'node:async_hooks'
|
12
12
|
|
13
13
|
export abstract class Plugin {
|
@@ -48,7 +48,7 @@ export abstract class Plugin {
|
|
48
48
|
export class PluginManager {
|
49
49
|
static INSTANCE = new PluginManager()
|
50
50
|
|
51
|
-
dbContextLocalStorage = new AsyncLocalStorage<
|
51
|
+
dbContextLocalStorage = new AsyncLocalStorage<IStoreContext | undefined>()
|
52
52
|
plugins: Plugin[] = []
|
53
53
|
typesToPlugin = new Map<HandlerType, Plugin>()
|
54
54
|
|
@@ -93,7 +93,7 @@ export class PluginManager {
|
|
93
93
|
processBinding(
|
94
94
|
request: DataBinding,
|
95
95
|
preparedData: PreparedData | undefined,
|
96
|
-
dbContext?:
|
96
|
+
dbContext?: IStoreContext
|
97
97
|
): Promise<ProcessResult> {
|
98
98
|
const plugin = this.typesToPlugin.get(request.handlerType)
|
99
99
|
if (!plugin) {
|
package/src/processor-runner.ts
CHANGED
@@ -1,6 +1,5 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
2
|
|
3
|
-
import path from 'path'
|
4
3
|
import fs from 'fs-extra'
|
5
4
|
|
6
5
|
import { compressionAlgorithms } from '@grpc/grpc-js'
|
@@ -15,17 +14,17 @@ import { Session } from 'node:inspector/promises'
|
|
15
14
|
|
16
15
|
import { ProcessorDefinition } from './gen/processor/protos/processor.js'
|
17
16
|
import { ProcessorServiceImpl } from './service.js'
|
18
|
-
import {
|
17
|
+
import { configureEndpoints } from './endpoints.js'
|
19
18
|
import { FullProcessorServiceImpl } from './full-service.js'
|
20
|
-
import { ChainConfig } from './chain-config.js'
|
21
19
|
import { setupLogger } from './logger.js'
|
22
20
|
|
23
21
|
import { setupOTLP } from './otlp.js'
|
24
22
|
import { ActionServer } from './action-server.js'
|
23
|
+
import { ServiceManager } from './service-manager.js'
|
25
24
|
|
26
25
|
// const mergedRegistry = Registry.merge([globalRegistry, niceGrpcRegistry])
|
27
26
|
|
28
|
-
const optionDefinitions = [
|
27
|
+
export const optionDefinitions = [
|
29
28
|
{ name: 'target', type: String, defaultOption: true },
|
30
29
|
{ name: 'port', alias: 'p', type: String, defaultValue: '4000' },
|
31
30
|
{ name: 'concurrency', type: Number, defaultValue: 4 },
|
@@ -42,12 +41,13 @@ const optionDefinitions = [
|
|
42
41
|
{ name: 'log-format', type: String, defaultValue: 'console' },
|
43
42
|
{ name: 'debug', type: Boolean, defaultValue: false },
|
44
43
|
{ name: 'otlp-debug', type: Boolean, defaultValue: false },
|
45
|
-
{ name: 'start-action-server', type: Boolean, defaultValue: false }
|
44
|
+
{ name: 'start-action-server', type: Boolean, defaultValue: false },
|
45
|
+
{ name: 'worker', type: Number, defaultValue: process.env['PROCESSOR_WORKER'] ?? 1 }
|
46
46
|
]
|
47
47
|
|
48
48
|
const options = commandLineArgs(optionDefinitions, { partial: true })
|
49
49
|
|
50
|
-
const logLevel = process.env['LOG_LEVEL']?.
|
50
|
+
const logLevel = process.env['LOG_LEVEL']?.toLowerCase()
|
51
51
|
|
52
52
|
setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug)
|
53
53
|
console.debug('Starting with', options.target)
|
@@ -56,39 +56,12 @@ await setupOTLP(options['otlp-debug'])
|
|
56
56
|
|
57
57
|
Error.stackTraceLimit = 20
|
58
58
|
|
59
|
-
|
60
|
-
const chainsConfig = fs.readJsonSync(fullPath)
|
61
|
-
|
62
|
-
const concurrencyOverride = process.env['OVERRIDE_CONCURRENCY']
|
63
|
-
? parseInt(process.env['OVERRIDE_CONCURRENCY'])
|
64
|
-
: undefined
|
65
|
-
const batchCountOverride = process.env['OVERRIDE_BATCH_COUNT']
|
66
|
-
? parseInt(process.env['OVERRIDE_BATCH_COUNT'])
|
67
|
-
: undefined
|
68
|
-
|
69
|
-
Endpoints.INSTANCE.concurrency = concurrencyOverride ?? options.concurrency
|
70
|
-
Endpoints.INSTANCE.batchCount = batchCountOverride ?? options['batch-count']
|
71
|
-
Endpoints.INSTANCE.chainQueryAPI = options['chainquery-server']
|
72
|
-
Endpoints.INSTANCE.priceFeedAPI = options['pricefeed-server']
|
73
|
-
|
74
|
-
for (const [id, config] of Object.entries(chainsConfig)) {
|
75
|
-
const chainConfig = config as ChainConfig
|
76
|
-
if (chainConfig.ChainServer) {
|
77
|
-
Endpoints.INSTANCE.chainServer.set(id, chainConfig.ChainServer)
|
78
|
-
} else {
|
79
|
-
const http = chainConfig.Https?.[0]
|
80
|
-
if (http) {
|
81
|
-
Endpoints.INSTANCE.chainServer.set(id, http)
|
82
|
-
} else {
|
83
|
-
console.error('not valid config for chain', id)
|
84
|
-
}
|
85
|
-
}
|
86
|
-
}
|
59
|
+
configureEndpoints(options)
|
87
60
|
|
88
61
|
console.debug('Starting Server', options)
|
89
62
|
|
90
63
|
let server: any
|
91
|
-
let baseService: ProcessorServiceImpl
|
64
|
+
let baseService: ProcessorServiceImpl | ServiceManager
|
92
65
|
const loader = async () => {
|
93
66
|
const m = await import(options.target)
|
94
67
|
console.debug('Module loaded', m)
|
@@ -106,7 +79,13 @@ if (options['start-action-server']) {
|
|
106
79
|
// .use(prometheusServerMiddleware())
|
107
80
|
.use(openTelemetryServerMiddleware())
|
108
81
|
.use(errorDetailsServerMiddleware)
|
109
|
-
|
82
|
+
|
83
|
+
if (options.worker > 1) {
|
84
|
+
baseService = new ServiceManager(options, loader, server.shutdown)
|
85
|
+
} else {
|
86
|
+
baseService = new ProcessorServiceImpl(loader, server.shutdown)
|
87
|
+
}
|
88
|
+
|
110
89
|
const service = new FullProcessorServiceImpl(baseService)
|
111
90
|
|
112
91
|
server.add(ProcessorDefinition, service)
|