@sentio/runtime 2.59.0-rc.9 → 2.59.1-rc.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/{chunk-J23VJQSB.js → chunk-4UYFH4YB.js} +4387 -4321
- package/lib/{chunk-J23VJQSB.js.map → chunk-4UYFH4YB.js.map} +1 -1
- package/lib/{chunk-3T4PWGQU.js → chunk-LCS6SRJY.js} +1036 -122
- package/lib/chunk-LCS6SRJY.js.map +1 -0
- package/lib/{chunk-GD7EL7GH.js → chunk-U5UZYRQ4.js} +2 -2
- package/lib/index.d.ts +193 -3
- package/lib/index.js +4 -2
- package/lib/index.js.map +1 -1
- package/lib/processor-runner.js +116 -94
- package/lib/processor-runner.js.map +1 -1
- package/lib/service-worker.d.ts +5 -6
- package/lib/service-worker.js +49 -47
- package/lib/service-worker.js.map +1 -1
- package/lib/test-processor.test.js.map +1 -1
- package/package.json +1 -1
- package/src/db-context.ts +3 -5
- package/src/full-service.ts +59 -13
- package/src/gen/processor/protos/processor.ts +1077 -159
- package/src/gen/service/common/protos/common.ts +17 -0
- package/src/metrics.ts +8 -4
- package/src/plugin.ts +24 -0
- package/src/processor-runner.ts +10 -3
- package/src/service-manager.ts +72 -106
- package/src/service-worker.ts +49 -60
- package/src/service.ts +89 -46
- package/src/utils.ts +20 -4
- package/lib/chunk-3T4PWGQU.js.map +0 -1
- /package/lib/{chunk-GD7EL7GH.js.map → chunk-U5UZYRQ4.js.map} +0 -0
package/lib/service-worker.d.ts
CHANGED
@@ -1,11 +1,10 @@
|
|
1
|
-
import
|
2
|
-
import { DataBinding } from '@sentio/protos';
|
1
|
+
import { ProcessStreamRequest } from '@sentio/protos';
|
3
2
|
import { MessagePort } from 'worker_threads';
|
4
3
|
|
5
|
-
declare function export_default({
|
6
|
-
request: DataBinding;
|
4
|
+
declare function export_default({ processId, request: firstRequest, workerPort }: {
|
7
5
|
processId: number;
|
8
|
-
|
9
|
-
|
6
|
+
request: ProcessStreamRequest;
|
7
|
+
workerPort: MessagePort;
|
8
|
+
}): Promise<void>;
|
10
9
|
|
11
10
|
export { export_default as default };
|
package/lib/service-worker.js
CHANGED
@@ -1,27 +1,26 @@
|
|
1
1
|
import { createRequire as createRequireShim } from 'module'; const require = createRequireShim(import.meta.url);
|
2
2
|
import {
|
3
3
|
setupLogger
|
4
|
-
} from "./chunk-
|
4
|
+
} from "./chunk-U5UZYRQ4.js";
|
5
5
|
import {
|
6
|
-
|
6
|
+
ProcessorServiceImpl,
|
7
7
|
configureEndpoints,
|
8
8
|
errorString,
|
9
9
|
freezeGlobalConfig,
|
10
|
-
|
10
|
+
require_cjs,
|
11
11
|
require_lib3 as require_lib,
|
12
12
|
require_lib4 as require_lib2
|
13
|
-
} from "./chunk-
|
13
|
+
} from "./chunk-4UYFH4YB.js";
|
14
14
|
import {
|
15
|
-
PluginManager,
|
16
|
-
ProcessConfigResponse,
|
17
15
|
__toESM
|
18
|
-
} from "./chunk-
|
16
|
+
} from "./chunk-LCS6SRJY.js";
|
19
17
|
|
20
18
|
// src/service-worker.ts
|
21
19
|
var import_nice_grpc = __toESM(require_lib(), 1);
|
22
20
|
var import_nice_grpc_error_details = __toESM(require_lib2(), 1);
|
23
|
-
import {
|
21
|
+
import { threadId } from "worker_threads";
|
24
22
|
import { Piscina } from "piscina";
|
23
|
+
var import_rxjs = __toESM(require_cjs(), 1);
|
25
24
|
var started = false;
|
26
25
|
var unhandled;
|
27
26
|
process.on("uncaughtException", (err) => {
|
@@ -33,15 +32,10 @@ process.on("uncaughtException", (err) => {
|
|
33
32
|
}
|
34
33
|
console.error("Unhandled Rejection, please checking if await is properly", reason);
|
35
34
|
unhandled = reason;
|
35
|
+
}).on("exit", () => {
|
36
|
+
console.info("Worker thread exiting, threadId:", threadId);
|
36
37
|
});
|
37
|
-
|
38
|
-
if (!started) {
|
39
|
-
throw new import_nice_grpc.ServerError(import_nice_grpc.Status.UNAVAILABLE, "Service Not started.");
|
40
|
-
}
|
41
|
-
const newConfig = ProcessConfigResponse.fromPartial({});
|
42
|
-
await PluginManager.INSTANCE.configure(newConfig);
|
43
|
-
return newConfig;
|
44
|
-
}
|
38
|
+
var service;
|
45
39
|
var loader = async (options) => {
|
46
40
|
if (options.target) {
|
47
41
|
const m = await import(options.target);
|
@@ -49,27 +43,24 @@ var loader = async (options) => {
|
|
49
43
|
return m;
|
50
44
|
}
|
51
45
|
};
|
52
|
-
var
|
53
|
-
configureChannel.onmessage = (request) => {
|
54
|
-
getConfig(request);
|
55
|
-
};
|
46
|
+
var emptyCallContext = {};
|
56
47
|
async function start(request, options) {
|
57
48
|
if (started) {
|
58
49
|
return {};
|
59
50
|
}
|
60
51
|
freezeGlobalConfig();
|
61
52
|
try {
|
62
|
-
|
53
|
+
service = new ProcessorServiceImpl(() => loader(options), options);
|
63
54
|
} catch (e) {
|
64
55
|
throw new import_nice_grpc.ServerError(import_nice_grpc.Status.INVALID_ARGUMENT, "Failed to load processor: " + errorString(e));
|
65
56
|
}
|
66
|
-
await
|
57
|
+
await service.start(request, emptyCallContext);
|
67
58
|
started = true;
|
68
59
|
return {};
|
69
60
|
}
|
70
61
|
async function service_worker_default({
|
71
|
-
request,
|
72
62
|
processId,
|
63
|
+
request: firstRequest,
|
73
64
|
workerPort
|
74
65
|
}) {
|
75
66
|
const { startRequest, configRequest, options } = Piscina.workerData;
|
@@ -79,16 +70,17 @@ async function service_worker_default({
|
|
79
70
|
configureEndpoints(options);
|
80
71
|
if (startRequest) {
|
81
72
|
await start(startRequest, options);
|
82
|
-
console.debug("worker started, template instance:", startRequest.templateInstances?.length);
|
73
|
+
console.debug("worker", threadId, " started, template instance:", startRequest.templateInstances?.length);
|
83
74
|
}
|
84
75
|
if (configRequest) {
|
85
|
-
await getConfig(configRequest);
|
86
|
-
console.debug("worker configured");
|
76
|
+
await service?.getConfig(configRequest, emptyCallContext);
|
77
|
+
console.debug("worker", threadId, " configured");
|
87
78
|
}
|
88
79
|
}
|
89
80
|
if (unhandled) {
|
90
81
|
const err = unhandled;
|
91
82
|
unhandled = void 0;
|
83
|
+
console.error("Unhandled exception/rejection in previous request:", err);
|
92
84
|
throw new import_nice_grpc_error_details.RichServerError(
|
93
85
|
import_nice_grpc.Status.UNAVAILABLE,
|
94
86
|
"Unhandled exception/rejection in previous request: " + errorString(err),
|
@@ -100,29 +92,39 @@ async function service_worker_default({
|
|
100
92
|
]
|
101
93
|
);
|
102
94
|
}
|
103
|
-
const
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
this.result(resp);
|
95
|
+
const timeout = options["worker-timeout"] || 0;
|
96
|
+
const enablePartition = options["enable-partition"] || false;
|
97
|
+
await new Promise((resolve, reject) => {
|
98
|
+
const subject = new import_rxjs.Subject();
|
99
|
+
let timeoutId = void 0;
|
100
|
+
subject.subscribe((resp) => {
|
101
|
+
console.debug("Worker", threadId, "send response:", resp.result ? "result" : "dbResult");
|
102
|
+
workerPort.postMessage(resp);
|
103
|
+
if (resp.result) {
|
104
|
+
if (timeoutId) clearTimeout(timeoutId);
|
105
|
+
resolve();
|
106
|
+
workerPort.close();
|
107
|
+
}
|
117
108
|
});
|
118
|
-
|
119
|
-
|
109
|
+
workerPort.on("message", (msg) => {
|
110
|
+
const request = msg;
|
111
|
+
console.debug("Worker", threadId, "received request:", request.start ? "start" : "dbResult");
|
112
|
+
service?.handleRequest(request, firstRequest.binding, subject);
|
113
|
+
if (enablePartition && request.start && timeout > 0) {
|
114
|
+
timeoutId = setTimeout(async () => {
|
115
|
+
reject(new import_nice_grpc_error_details.RichServerError(import_nice_grpc.Status.DEADLINE_EXCEEDED, "Worker timeout exceeded"));
|
116
|
+
}, timeout);
|
117
|
+
}
|
120
118
|
});
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
119
|
+
console.debug("Worker", threadId, "handle request: binding");
|
120
|
+
service?.handleRequest(firstRequest, firstRequest.binding, subject);
|
121
|
+
if (!enablePartition && timeout > 0) {
|
122
|
+
timeoutId = setTimeout(() => {
|
123
|
+
reject(new import_nice_grpc_error_details.RichServerError(import_nice_grpc.Status.DEADLINE_EXCEEDED, "Worker timeout exceeded"));
|
124
|
+
}, timeout);
|
125
|
+
}
|
126
|
+
});
|
127
|
+
}
|
126
128
|
import("node:process").then((p) => p.stdout.write(""));
|
127
129
|
export {
|
128
130
|
service_worker_default as default
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import {
|
1
|
+
{"version":3,"sources":["../src/service-worker.ts"],"sourcesContent":["import { DeepPartial, Empty, ProcessStreamRequest, ProcessStreamResponse, StartRequest } from '@sentio/protos'\nimport { CallContext, ServerError, Status } from 'nice-grpc'\nimport { errorString } from './utils.js'\nimport { freezeGlobalConfig } from './global-config.js'\nimport { DebugInfo, RichServerError } from 'nice-grpc-error-details'\nimport { ProcessorServiceImpl } from './service.js'\nimport { MessagePort, threadId } from 'worker_threads'\nimport { Piscina } from 'piscina'\nimport { configureEndpoints } from './endpoints.js'\nimport { setupLogger } from './logger.js'\nimport { Subject } from 'rxjs'\n\nlet started = false\n\nlet unhandled: Error | undefined\n\nprocess\n .on('uncaughtException', (err) => {\n console.error('Uncaught Exception, please checking if await is properly used', err)\n unhandled = err\n })\n .on('unhandledRejection', (reason, p) => {\n // @ts-ignore ignore invalid ens error\n if (reason?.message.startsWith('invalid ENS name (disallowed character: \"*\"')) {\n return\n }\n console.error('Unhandled Rejection, please checking if await is properly', reason)\n unhandled = reason as Error\n // shutdownServers(1)\n })\n .on('exit', () => {\n console.info('Worker thread exiting, threadId:', threadId)\n })\n\nlet service: ProcessorServiceImpl | undefined\n\nconst loader = async (options: any) => {\n if (options.target) {\n const m = await import(options.target)\n console.debug('Module loaded, path:', options.target, 'module:', m)\n return m\n }\n}\n\nconst emptyCallContext = <CallContext>{}\n\nasync function start(request: StartRequest, options: any): Promise<Empty> {\n if (started) {\n return {}\n }\n freezeGlobalConfig()\n\n try {\n service = new ProcessorServiceImpl(() => loader(options), options)\n } catch (e) {\n throw new ServerError(Status.INVALID_ARGUMENT, 'Failed to load processor: ' + errorString(e))\n }\n\n await service.start(request, emptyCallContext)\n started = true\n return {}\n}\n\nexport default async function ({\n processId,\n request: firstRequest,\n workerPort\n}: {\n processId: number\n request: ProcessStreamRequest\n workerPort: MessagePort\n}) {\n const { startRequest, configRequest, options } = Piscina.workerData\n if (!started) {\n const logLevel = process.env['LOG_LEVEL']?.toUpperCase()\n setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug, threadId)\n\n configureEndpoints(options)\n\n if (startRequest) {\n await start(startRequest, options)\n console.debug('worker', threadId, ' started, template instance:', startRequest.templateInstances?.length)\n }\n\n if (configRequest) {\n await service?.getConfig(configRequest, emptyCallContext)\n console.debug('worker', threadId, ' configured')\n }\n }\n\n if (unhandled) {\n const err = unhandled\n unhandled = undefined\n console.error('Unhandled exception/rejection in previous request:', err)\n throw new RichServerError(\n Status.UNAVAILABLE,\n 'Unhandled exception/rejection in previous request: ' + errorString(err),\n [\n DebugInfo.fromPartial({\n detail: err.message,\n stackEntries: err.stack?.split('\\n')\n })\n ]\n )\n }\n const timeout = options['worker-timeout'] || 0\n const enablePartition = options['enable-partition'] || false\n await new Promise<void>((resolve, reject) => {\n const subject = new Subject<DeepPartial<ProcessStreamResponse>>()\n let timeoutId: NodeJS.Timeout | undefined = undefined\n subject.subscribe((resp: ProcessStreamResponse) => {\n console.debug('Worker', threadId, 'send response:', resp.result ? 'result' : 'dbResult')\n workerPort.postMessage(resp)\n // receive the response from the processor , close and resolve the promise\n if (resp.result) {\n if (timeoutId) clearTimeout(timeoutId)\n resolve()\n workerPort.close()\n }\n })\n workerPort.on('message', (msg: ProcessStreamRequest) => {\n const request = msg as ProcessStreamRequest\n console.debug('Worker', threadId, 'received request:', request.start ? 'start' : 'dbResult')\n service?.handleRequest(request, firstRequest.binding, subject)\n if (enablePartition && request.start && timeout > 0) {\n timeoutId = setTimeout(async () => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n console.debug('Worker', threadId, 'handle request: binding')\n service?.handleRequest(firstRequest, firstRequest.binding, subject)\n if (!enablePartition && timeout > 0) {\n timeoutId = setTimeout(() => {\n reject(new RichServerError(Status.DEADLINE_EXCEEDED, 'Worker timeout exceeded'))\n }, timeout)\n }\n })\n}\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;;;;;;;;;;;AACA,uBAAiD;AAGjD,qCAA2C;AAE3C,SAAsB,gBAAgB;AACtC,SAAS,eAAe;AAGxB,kBAAwB;AAExB,IAAI,UAAU;AAEd,IAAI;AAEJ,QACG,GAAG,qBAAqB,CAAC,QAAQ;AAChC,UAAQ,MAAM,iEAAiE,GAAG;AAClF,cAAY;AACd,CAAC,EACA,GAAG,sBAAsB,CAAC,QAAQ,MAAM;AAEvC,MAAI,QAAQ,QAAQ,WAAW,6CAA6C,GAAG;AAC7E;AAAA,EACF;AACA,UAAQ,MAAM,6DAA6D,MAAM;AACjF,cAAY;AAEd,CAAC,EACA,GAAG,QAAQ,MAAM;AAChB,UAAQ,KAAK,oCAAoC,QAAQ;AAC3D,CAAC;AAEH,IAAI;AAEJ,IAAM,SAAS,OAAO,YAAiB;AACrC,MAAI,QAAQ,QAAQ;AAClB,UAAM,IAAI,MAAM,OAAO,QAAQ;AAC/B,YAAQ,MAAM,wBAAwB,QAAQ,QAAQ,WAAW,CAAC;AAClE,WAAO;AAAA,EACT;AACF;AAEA,IAAM,mBAAgC,CAAC;AAEvC,eAAe,MAAM,SAAuB,SAA8B;AACxE,MAAI,SAAS;AACX,WAAO,CAAC;AAAA,EACV;AACA,qBAAmB;AAEnB,MAAI;AACF,cAAU,IAAI,qBAAqB,MAAM,OAAO,OAAO,GAAG,OAAO;AAAA,EACnE,SAAS,GAAG;AACV,UAAM,IAAI,6BAAY,wBAAO,kBAAkB,+BAA+B,YAAY,CAAC,CAAC;AAAA,EAC9F;AAEA,QAAM,QAAQ,MAAM,SAAS,gBAAgB;AAC7C,YAAU;AACV,SAAO,CAAC;AACV;AAEA,eAAO,uBAAwB;AAAA,EAC7B;AAAA,EACA,SAAS;AAAA,EACT;AACF,GAIG;AACD,QAAM,EAAE,cAAc,eAAe,QAAQ,IAAI,QAAQ;AACzD,MAAI,CAAC,SAAS;AACZ,UAAM,WAAW,QAAQ,IAAI,WAAW,GAAG,YAAY;AACvD,gBAAY,QAAQ,YAAY,MAAM,QAAQ,aAAa,UAAU,OAAO,QAAQ,OAAO,QAAQ;AAEnG,uBAAmB,OAAO;AAE1B,QAAI,cAAc;AAChB,YAAM,MAAM,cAAc,OAAO;AACjC,cAAQ,MAAM,UAAU,UAAU,gCAAgC,aAAa,mBAAmB,MAAM;AAAA,IAC1G;AAEA,QAAI,eAAe;AACjB,YAAM,SAAS,UAAU,eAAe,gBAAgB;AACxD,cAAQ,MAAM,UAAU,UAAU,aAAa;AAAA,IACjD;AAAA,EACF;AAEA,MAAI,WAAW;AACb,UAAM,MAAM;AACZ,gBAAY;AACZ,YAAQ,MAAM,sDAAsD,GAAG;AACvE,UAAM,IAAI;AAAA,MACR,wBAAO;AAAA,MACP,wDAAwD,YAAY,GAAG;AAAA,MACvE;AAAA,QACE,yCAAU,YAAY;AAAA,UACpB,QAAQ,IAAI;AAAA,UACZ,cAAc,IAAI,OAAO,MAAM,IAAI;AAAA,QACrC,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACA,QAAM,UAAU,QAAQ,gBAAgB,KAAK;AAC7C,QAAM,kBAAkB,QAAQ,kBAAkB,KAAK;AACvD,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,UAAM,UAAU,IAAI,oBAA4C;AAChE,QAAI,YAAwC;AAC5C,YAAQ,UAAU,CAAC,SAAgC;AACjD,cAAQ,MAAM,UAAU,UAAU,kBAAkB,KAAK,SAAS,WAAW,UAAU;AACvF,iBAAW,YAAY,IAAI;AAE3B,UAAI,KAAK,QAAQ;AACf,YAAI,UAAW,cAAa,SAAS;AACrC,gBAAQ;AACR,mBAAW,MAAM;AAAA,MACnB;AAAA,IACF,CAAC;AACD,eAAW,GAAG,WAAW,CAAC,QAA8B;AACtD,YAAM,UAAU;AAChB,cAAQ,MAAM,UAAU,UAAU,qBAAqB,QAAQ,QAAQ,UAAU,UAAU;AAC3F,eAAS,cAAc,SAAS,aAAa,SAAS,OAAO;AAC7D,UAAI,mBAAmB,QAAQ,SAAS,UAAU,GAAG;AACnD,oBAAY,WAAW,YAAY;AACjC,iBAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,QACjF,GAAG,OAAO;AAAA,MACZ;AAAA,IACF,CAAC;AACD,YAAQ,MAAM,UAAU,UAAU,yBAAyB;AAC3D,aAAS,cAAc,cAAc,aAAa,SAAS,OAAO;AAClE,QAAI,CAAC,mBAAmB,UAAU,GAAG;AACnC,kBAAY,WAAW,MAAM;AAC3B,eAAO,IAAI,+CAAgB,wBAAO,mBAAmB,yBAAyB,CAAC;AAAA,MACjF,GAAG,OAAO;AAAA,IACZ;AAAA,EACF,CAAC;AACH;AACC,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
|
@@ -1 +1 @@
|
|
1
|
-
{"version":3,"sources":["../src/test-processor.test.ts"],"sourcesContent":["import { Plugin, PluginManager } from './plugin.js'\nimport { DataBinding, HandlerType, ProcessResult } from './gen/processor/protos/processor.js'\n\nclass TestPlugin extends Plugin {\n async processBinding(request: DataBinding): Promise<ProcessResult> {\n const dbContext = PluginManager.INSTANCE.dbContextLocalStorage.getStore()\n if (dbContext) {\n await dbContext.sendRequest({\n get: {\n entity: 'Test',\n id: '1'\n }\n })\n }\n\n return ProcessResult.fromPartial({\n states: {\n configUpdated: true\n }\n })\n }\n supportedHandlers = [HandlerType.UNKNOWN, HandlerType.ETH_LOG]\n}\n\nPluginManager.INSTANCE.plugins = []\nPluginManager.INSTANCE.register(new TestPlugin())\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;
|
1
|
+
{"version":3,"sources":["../src/test-processor.test.ts"],"sourcesContent":["import { Plugin, PluginManager } from './plugin.js'\nimport { DataBinding, HandlerType, ProcessResult } from './gen/processor/protos/processor.js'\nimport { ProcessStreamResponse_Partitions } from '@sentio/protos'\n\nclass TestPlugin extends Plugin {\n async processBinding(request: DataBinding): Promise<ProcessResult> {\n const dbContext = PluginManager.INSTANCE.dbContextLocalStorage.getStore()\n if (dbContext) {\n await dbContext.sendRequest({\n get: {\n entity: 'Test',\n id: '1'\n }\n })\n }\n\n return ProcessResult.fromPartial({\n states: {\n configUpdated: true\n }\n })\n }\n supportedHandlers = [HandlerType.UNKNOWN, HandlerType.ETH_LOG]\n\n async partition(request: DataBinding): Promise<ProcessStreamResponse_Partitions> {\n return {\n partitions: request.handlerIds.reduce(\n (acc, id) => ({\n ...acc,\n [id]: {\n userValue: 'test'\n }\n }),\n {}\n )\n }\n }\n}\n\nPluginManager.INSTANCE.plugins = []\nPluginManager.INSTANCE.register(new TestPlugin())\n;import(\"node:process\").then((p) => p.stdout.write(\"\"));"],"mappings":";;;;;;;;AAIA,IAAM,aAAN,cAAyB,OAAO;AAAA,EAC9B,MAAM,eAAe,SAA8C;AACjE,UAAM,YAAY,cAAc,SAAS,sBAAsB,SAAS;AACxE,QAAI,WAAW;AACb,YAAM,UAAU,YAAY;AAAA,QAC1B,KAAK;AAAA,UACH,QAAQ;AAAA,UACR,IAAI;AAAA,QACN;AAAA,MACF,CAAC;AAAA,IACH;AAEA,WAAO,cAAc,YAAY;AAAA,MAC/B,QAAQ;AAAA,QACN,eAAe;AAAA,MACjB;AAAA,IACF,CAAC;AAAA,EACH;AAAA,EACA,oBAAoB,iCAAyC;AAAA,EAE7D,MAAM,UAAU,SAAiE;AAC/E,WAAO;AAAA,MACL,YAAY,QAAQ,WAAW;AAAA,QAC7B,CAAC,KAAK,QAAQ;AAAA,UACZ,GAAG;AAAA,UACH,CAAC,EAAE,GAAG;AAAA,YACJ,WAAW;AAAA,UACb;AAAA,QACF;AAAA,QACA,CAAC;AAAA,MACH;AAAA,IACF;AAAA,EACF;AACF;AAEA,cAAc,SAAS,UAAU,CAAC;AAClC,cAAc,SAAS,SAAS,IAAI,WAAW,CAAC;AAC/C,OAAO,cAAc,EAAE,KAAK,CAAC,MAAM,EAAE,OAAO,MAAM,EAAE,CAAC;","names":[]}
|
package/package.json
CHANGED
package/src/db-context.ts
CHANGED
@@ -95,9 +95,6 @@ export abstract class AbstractStoreContext implements IStoreContext {
|
|
95
95
|
|
96
96
|
return Promise.race(promises)
|
97
97
|
.then((result: DBResponse) => {
|
98
|
-
if (timeoutSecs) {
|
99
|
-
console.debug('db request', requestType, 'op', opId, ' took', Date.now() - start, 'ms')
|
100
|
-
}
|
101
98
|
request_times[requestType]?.add(Date.now() - start)
|
102
99
|
return result
|
103
100
|
})
|
@@ -134,10 +131,11 @@ export abstract class AbstractStoreContext implements IStoreContext {
|
|
134
131
|
}
|
135
132
|
|
136
133
|
error(processId: number, e: any) {
|
137
|
-
|
134
|
+
const stack = e.stack
|
135
|
+
console.error('process error', processId, e, stack)
|
138
136
|
const errorResult = ProcessResult.create({
|
139
137
|
states: {
|
140
|
-
error: e?.toString()
|
138
|
+
error: e?.toString() + (stack ? `\n${stack}` : '')
|
141
139
|
}
|
142
140
|
})
|
143
141
|
this.doSend({ result: errorResult, processId })
|
package/src/full-service.ts
CHANGED
@@ -21,6 +21,7 @@ import os from 'os'
|
|
21
21
|
import { GLOBAL_CONFIG } from './global-config.js'
|
22
22
|
import { compareSemver, parseSemver, Semver } from './utils.js'
|
23
23
|
import { LRUCache } from 'lru-cache'
|
24
|
+
import { createHash } from 'crypto'
|
24
25
|
|
25
26
|
const require = createRequire(import.meta.url)
|
26
27
|
|
@@ -35,6 +36,37 @@ const PROCESSED_MOVE_EVENT_TX_HANDLER = new LRUCache<string, boolean>({
|
|
35
36
|
max: 10000
|
36
37
|
})
|
37
38
|
|
39
|
+
const enableTxCache = process.env.ENABLE_PARSE_CACHE === 'true'
|
40
|
+
|
41
|
+
// Cache for parsed JSON data
|
42
|
+
const PARSED_DATA_CACHE = new LRUCache<string, any>({
|
43
|
+
max: enableTxCache ? 5000 : 1
|
44
|
+
})
|
45
|
+
|
46
|
+
/**
|
47
|
+
* Gets parsed JSON data from a string, using a cache to avoid repeated parsing
|
48
|
+
* @param rawData The raw string data to parse
|
49
|
+
* @returns The parsed JSON object
|
50
|
+
*/
|
51
|
+
function getParsedData(rawData: string): any {
|
52
|
+
if (!enableTxCache) {
|
53
|
+
return JSON.parse(rawData)
|
54
|
+
}
|
55
|
+
|
56
|
+
// Create a digest of the raw data for cache key
|
57
|
+
const digest = createHash('md5').update(rawData).digest('hex')
|
58
|
+
|
59
|
+
// Check if we already have this data parsed
|
60
|
+
let parsedData = PARSED_DATA_CACHE.get(digest)
|
61
|
+
if (!parsedData) {
|
62
|
+
// Parse and cache the data
|
63
|
+
parsedData = JSON.parse(rawData)
|
64
|
+
PARSED_DATA_CACHE.set(digest, parsedData)
|
65
|
+
}
|
66
|
+
|
67
|
+
return parsedData
|
68
|
+
}
|
69
|
+
|
38
70
|
function locatePackageJson(pkgId: string) {
|
39
71
|
const m = require.resolve(pkgId)
|
40
72
|
|
@@ -173,19 +205,32 @@ export class FullProcessorServiceImpl implements ProcessorServiceImplementation
|
|
173
205
|
const ethLog = dataBinding.data?.ethLog
|
174
206
|
if (ethLog?.log == null && ethLog?.rawLog) {
|
175
207
|
ethLog.log = JSON.parse(ethLog.rawLog)
|
176
|
-
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
208
|
+
|
209
|
+
if (ethLog.rawTransaction) {
|
210
|
+
ethLog.transaction = getParsedData(ethLog.rawTransaction)
|
211
|
+
}
|
212
|
+
if (ethLog.rawBlock) {
|
213
|
+
ethLog.block = getParsedData(ethLog.rawBlock)
|
214
|
+
}
|
215
|
+
if (ethLog.rawTransactionReceipt) {
|
216
|
+
ethLog.transactionReceipt = getParsedData(ethLog.rawTransactionReceipt)
|
217
|
+
}
|
181
218
|
}
|
182
219
|
break
|
183
220
|
case HandlerType.ETH_TRANSACTION:
|
184
221
|
const ethTx = dataBinding.data?.ethTransaction
|
185
222
|
if (ethTx?.transaction == null && ethTx?.rawTransaction) {
|
186
|
-
ethTx.transaction =
|
187
|
-
|
188
|
-
|
223
|
+
ethTx.transaction = getParsedData(ethTx.rawTransaction)
|
224
|
+
if (ethTx.rawBlock) {
|
225
|
+
ethTx.block = getParsedData(ethTx.rawBlock)
|
226
|
+
} else {
|
227
|
+
ethTx.block = undefined
|
228
|
+
}
|
229
|
+
if (ethTx.rawTransactionReceipt) {
|
230
|
+
ethTx.transactionReceipt = getParsedData(ethTx.rawTransactionReceipt)
|
231
|
+
} else {
|
232
|
+
ethTx.transactionReceipt = undefined
|
233
|
+
}
|
189
234
|
}
|
190
235
|
break
|
191
236
|
case HandlerType.FUEL_TRANSACTION:
|
@@ -209,7 +254,8 @@ export class FullProcessorServiceImpl implements ProcessorServiceImplementation
|
|
209
254
|
const aptEvent = dataBinding.data?.aptEvent
|
210
255
|
if (aptEvent) {
|
211
256
|
if (isBeforeMoveUseRawVersion && aptEvent.rawTransaction) {
|
212
|
-
const transaction =
|
257
|
+
const transaction = getParsedData(aptEvent.rawTransaction)
|
258
|
+
|
213
259
|
const key = `${transaction.hash}-${dataBinding.handlerIds[0]}`
|
214
260
|
if (PROCESSED_MOVE_EVENT_TX_HANDLER.has(key)) {
|
215
261
|
console.debug('skip binding', key)
|
@@ -230,7 +276,7 @@ export class FullProcessorServiceImpl implements ProcessorServiceImplementation
|
|
230
276
|
const aptCall = dataBinding.data?.aptCall
|
231
277
|
if (aptCall) {
|
232
278
|
if (isBeforeMoveUseRawVersion && aptCall.rawTransaction) {
|
233
|
-
aptCall.transaction =
|
279
|
+
aptCall.transaction = getParsedData(aptCall.rawTransaction)
|
234
280
|
}
|
235
281
|
}
|
236
282
|
break
|
@@ -246,7 +292,8 @@ export class FullProcessorServiceImpl implements ProcessorServiceImplementation
|
|
246
292
|
const suiEvent = dataBinding.data?.suiEvent
|
247
293
|
if (suiEvent) {
|
248
294
|
if (isBeforeMoveUseRawVersion && suiEvent.rawTransaction) {
|
249
|
-
const transaction =
|
295
|
+
const transaction = getParsedData(suiEvent.rawTransaction)
|
296
|
+
|
250
297
|
const key = `${transaction.digest}-${dataBinding.handlerIds[0]}`
|
251
298
|
if (PROCESSED_MOVE_EVENT_TX_HANDLER.has(key)) {
|
252
299
|
console.debug('skip binding', key)
|
@@ -267,7 +314,7 @@ export class FullProcessorServiceImpl implements ProcessorServiceImplementation
|
|
267
314
|
const suiCall = dataBinding.data?.suiCall
|
268
315
|
if (suiCall) {
|
269
316
|
if (isBeforeMoveUseRawVersion && suiCall.rawTransaction) {
|
270
|
-
suiCall.transaction =
|
317
|
+
suiCall.transaction = getParsedData(suiCall.rawTransaction)
|
271
318
|
}
|
272
319
|
}
|
273
320
|
break
|
@@ -336,4 +383,3 @@ export class FullProcessorServiceImpl implements ProcessorServiceImplementation
|
|
336
383
|
// d.data?.ethTrace?.trace?.transactionPosition
|
337
384
|
// )
|
338
385
|
// }
|
339
|
-
//
|