@sentio/runtime 2.62.0-rc.4 → 2.62.0-rc.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/{chunk-6SI5TBIR.js → chunk-RPV67F56.js} +43 -13
- package/lib/{chunk-6SI5TBIR.js.map → chunk-RPV67F56.js.map} +1 -1
- package/lib/index.d.ts +2 -2
- package/lib/index.js +1 -1
- package/lib/{processor-KRKdS8v-.d.ts → processor-HNY62jHs.d.ts} +1 -1
- package/lib/processor-runner.d.ts +0 -33
- package/lib/processor-runner.js +4168 -1422
- package/lib/processor-runner.js.map +1 -1
- package/lib/service-worker.js +1 -1
- package/lib/test-processor.test.d.ts +1 -1
- package/package.json +1 -3
- package/src/processor-runner.ts +194 -161
- package/src/service-manager.ts +2 -1
package/lib/service-worker.js
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
import { P as Plugin,
|
1
|
+
import { P as Plugin, D as DataBinding, a as ProcessResult, H as HandlerType } from './processor-HNY62jHs.js';
|
2
2
|
import { ProcessStreamResponse_Partitions, InitResponse, ProcessConfigResponse } from '@sentio/protos';
|
3
3
|
import 'rxjs';
|
4
4
|
import 'node:async_hooks';
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "@sentio/runtime",
|
3
|
-
"version": "2.62.0-rc.
|
3
|
+
"version": "2.62.0-rc.6",
|
4
4
|
"license": "Apache-2.0",
|
5
5
|
"type": "module",
|
6
6
|
"exports": {
|
@@ -18,8 +18,6 @@
|
|
18
18
|
"piscina": "5.1.3"
|
19
19
|
},
|
20
20
|
"devDependencies": {
|
21
|
-
"@types/command-line-args": "^5.2.3",
|
22
|
-
"@types/command-line-usage": "^5.0.4",
|
23
21
|
"@types/fs-extra": "^11.0.4"
|
24
22
|
},
|
25
23
|
"engines": {
|
package/src/processor-runner.ts
CHANGED
@@ -3,7 +3,7 @@
|
|
3
3
|
import fs from 'fs-extra'
|
4
4
|
|
5
5
|
import { compressionAlgorithms } from '@grpc/grpc-js'
|
6
|
-
import
|
6
|
+
import { Command, InvalidArgumentError } from 'commander'
|
7
7
|
import { createServer } from 'nice-grpc'
|
8
8
|
import { errorDetailsServerMiddleware } from 'nice-grpc-error-details'
|
9
9
|
// import { registry as niceGrpcRegistry } from 'nice-grpc-prometheus'
|
@@ -24,6 +24,13 @@ import { ServiceManager } from './service-manager.js'
|
|
24
24
|
import path from 'path'
|
25
25
|
import { ProcessorV3Definition } from '@sentio/protos'
|
26
26
|
import { ProcessorServiceImplV3 } from './service-v3.js'
|
27
|
+
import { readFileSync } from 'fs'
|
28
|
+
import { fileURLToPath } from 'url'
|
29
|
+
import { dirname, join } from 'path'
|
30
|
+
|
31
|
+
const __filename = fileURLToPath(import.meta.url)
|
32
|
+
const __dirname = dirname(__filename)
|
33
|
+
const packageJson = JSON.parse(readFileSync(join(__dirname, '../package.json'), 'utf8'))
|
27
34
|
|
28
35
|
// const mergedRegistry = Registry.merge([globalRegistry, niceGrpcRegistry])
|
29
36
|
|
@@ -33,189 +40,215 @@ try {
|
|
33
40
|
} catch (e) {
|
34
41
|
console.error('Failed to parse worker number', e)
|
35
42
|
}
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
{
|
43
|
-
name: 'chains-config',
|
44
|
-
alias: 'c',
|
45
|
-
type: String,
|
46
|
-
defaultValue: 'chains-config.json'
|
47
|
-
},
|
48
|
-
{ name: 'chainquery-server', type: String, defaultValue: '' },
|
49
|
-
{ name: 'pricefeed-server', type: String, defaultValue: '' },
|
50
|
-
{ name: 'log-format', type: String, defaultValue: 'console' },
|
51
|
-
{ name: 'debug', type: Boolean, defaultValue: false },
|
52
|
-
{ name: 'otlp-debug', type: Boolean, defaultValue: false },
|
53
|
-
{ name: 'start-action-server', type: Boolean, defaultValue: false },
|
54
|
-
{ name: 'worker', type: Number, defaultValue: workerNum },
|
55
|
-
{ name: 'process-timeout', type: Number, defaultValue: 60 },
|
56
|
-
{ name: 'worker-timeout', type: Number, defaultValue: parseInt(process.env['WORKER_TIMEOUT_SECONDS'] || '60') },
|
57
|
-
{
|
58
|
-
name: 'enable-partition',
|
59
|
-
type: Boolean,
|
60
|
-
defaultValue: process.env['SENTIO_ENABLE_BINDING_DATA_PARTITION'] === 'true'
|
43
|
+
|
44
|
+
function myParseInt(value: string, dummyPrevious: unknown): number {
|
45
|
+
// parseInt takes a string and a radix
|
46
|
+
const parsedValue = parseInt(value, 10)
|
47
|
+
if (isNaN(parsedValue)) {
|
48
|
+
throw new InvalidArgumentError('Not a number.')
|
61
49
|
}
|
62
|
-
|
50
|
+
return parsedValue
|
51
|
+
}
|
63
52
|
|
64
|
-
|
53
|
+
// Create Commander.js program
|
54
|
+
const program = new Command()
|
65
55
|
|
66
|
-
|
56
|
+
program
|
57
|
+
.allowUnknownOption()
|
58
|
+
.allowExcessArguments()
|
59
|
+
.name('processor-runner')
|
60
|
+
.description('Sentio Processor Runtime')
|
61
|
+
.version(packageJson.version)
|
62
|
+
.option('--target <path>', 'Path to the processor module to load')
|
63
|
+
.option('-p, --port <port>', 'Port to listen on', '4000')
|
64
|
+
.option('--concurrency <number>', 'Number of concurrent workers', myParseInt, 4)
|
65
|
+
.option('--batch-count <number>', 'Batch count for processing', myParseInt, 1)
|
66
|
+
.option('-c, --chains-config <path>', 'Path to chains configuration file', 'chains-config.json')
|
67
|
+
.option('--chainquery-server <url>', 'Chain query server URL', '')
|
68
|
+
.option('--pricefeed-server <url>', 'Price feed server URL', '')
|
69
|
+
.option('--log-format <format>', 'Log format (console|json)', 'console')
|
70
|
+
.option('--debug', 'Enable debug mode', false)
|
71
|
+
.option('--otlp-debug', 'Enable OTLP debug mode', false)
|
72
|
+
.option('--start-action-server', 'Start action server instead of processor server', false)
|
73
|
+
.option('--worker <number>', 'Number of worker threads', myParseInt, workerNum)
|
74
|
+
.option('--process-timeout <seconds>', 'Process timeout in seconds', myParseInt, 60)
|
75
|
+
.option(
|
76
|
+
'--worker-timeout <seconds>',
|
77
|
+
'Worker timeout in seconds',
|
78
|
+
myParseInt,
|
79
|
+
parseInt(process.env['WORKER_TIMEOUT_SECONDS'] || '60')
|
80
|
+
)
|
81
|
+
.option(
|
82
|
+
'--enable-partition',
|
83
|
+
'Enable binding data partition',
|
84
|
+
process.env['SENTIO_ENABLE_BINDING_DATA_PARTITION'] === 'true'
|
85
|
+
)
|
86
|
+
.action(async (options: any) => {
|
87
|
+
try {
|
88
|
+
await startServer(options)
|
89
|
+
} catch (error) {
|
90
|
+
console.error('Failed to start server:', error)
|
91
|
+
process.exit(1)
|
92
|
+
}
|
93
|
+
})
|
67
94
|
|
68
|
-
|
69
|
-
|
95
|
+
// Parse arguments
|
96
|
+
program.parse()
|
70
97
|
|
71
|
-
|
98
|
+
async function startServer(options: any): Promise<void> {
|
99
|
+
const logLevel = process.env['LOG_LEVEL']?.toLowerCase()
|
72
100
|
|
73
|
-
|
101
|
+
setupLogger(options['log-format'] === 'json', logLevel === 'debug' ? true : options.debug)
|
102
|
+
console.debug('Starting with', options.target)
|
74
103
|
|
75
|
-
|
104
|
+
await setupOTLP(options['otlp-debug'])
|
76
105
|
|
77
|
-
|
106
|
+
Error.stackTraceLimit = 20
|
78
107
|
|
79
|
-
|
80
|
-
let baseService: ProcessorServiceImpl | ServiceManager
|
81
|
-
const loader = async () => {
|
82
|
-
const m = await import(options.target)
|
83
|
-
console.debug('Module loaded', m)
|
84
|
-
return m
|
85
|
-
}
|
86
|
-
if (options['start-action-server']) {
|
87
|
-
server = new ActionServer(loader)
|
88
|
-
server.listen(options.port)
|
89
|
-
} else {
|
90
|
-
server = createServer({
|
91
|
-
'grpc.max_send_message_length': 768 * 1024 * 1024,
|
92
|
-
'grpc.max_receive_message_length': 768 * 1024 * 1024,
|
93
|
-
'grpc.default_compression_algorithm': compressionAlgorithms.gzip
|
94
|
-
})
|
95
|
-
// .use(prometheusServerMiddleware())
|
96
|
-
.use(openTelemetryServerMiddleware())
|
97
|
-
.use(errorDetailsServerMiddleware)
|
108
|
+
configureEndpoints(options)
|
98
109
|
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
110
|
+
console.debug('Starting Server', options)
|
111
|
+
|
112
|
+
let server: any
|
113
|
+
let baseService: ProcessorServiceImpl | ServiceManager
|
114
|
+
const loader = async () => {
|
115
|
+
const m = await import(options.target)
|
116
|
+
console.debug('Module loaded', m)
|
117
|
+
return m
|
103
118
|
}
|
119
|
+
if (options['start-action-server']) {
|
120
|
+
server = new ActionServer(loader)
|
121
|
+
server.listen(options.port)
|
122
|
+
} else {
|
123
|
+
server = createServer({
|
124
|
+
'grpc.max_send_message_length': 768 * 1024 * 1024,
|
125
|
+
'grpc.max_receive_message_length': 768 * 1024 * 1024,
|
126
|
+
'grpc.default_compression_algorithm': compressionAlgorithms.gzip
|
127
|
+
})
|
128
|
+
// .use(prometheusServerMiddleware())
|
129
|
+
.use(openTelemetryServerMiddleware())
|
130
|
+
.use(errorDetailsServerMiddleware)
|
104
131
|
|
105
|
-
|
132
|
+
if (options.worker > 1) {
|
133
|
+
baseService = new ServiceManager(loader, options, server.shutdown)
|
134
|
+
} else {
|
135
|
+
baseService = new ProcessorServiceImpl(loader, options, server.shutdown)
|
136
|
+
}
|
106
137
|
|
107
|
-
|
108
|
-
server.add(
|
109
|
-
ProcessorV3Definition,
|
110
|
-
new FullProcessorServiceV3Impl(new ProcessorServiceImplV3(loader, options, server.shutdown))
|
111
|
-
)
|
112
|
-
server.listen('0.0.0.0:' + options.port)
|
138
|
+
const service = new FullProcessorServiceImpl(baseService)
|
113
139
|
|
114
|
-
|
115
|
-
|
140
|
+
server.add(ProcessorDefinition, service)
|
141
|
+
server.add(
|
142
|
+
ProcessorV3Definition,
|
143
|
+
new FullProcessorServiceV3Impl(new ProcessorServiceImplV3(loader, options, server.shutdown))
|
144
|
+
)
|
145
|
+
|
146
|
+
server.listen('0.0.0.0:' + options.port)
|
147
|
+
console.log('Processor Server Started at:', options.port)
|
148
|
+
}
|
149
|
+
const metricsPort = 4040
|
116
150
|
|
117
|
-
const
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
151
|
+
const httpServer = http
|
152
|
+
.createServer(async function (req, res) {
|
153
|
+
if (req.url) {
|
154
|
+
const reqUrl = new URL(req.url, `http://${req.headers.host}`)
|
155
|
+
const queries = reqUrl.searchParams
|
156
|
+
switch (reqUrl.pathname) {
|
157
|
+
// case '/metrics':
|
158
|
+
// const metrics = await mergedRegistry.metrics()
|
159
|
+
// res.write(metrics)
|
160
|
+
// break
|
161
|
+
case '/heap': {
|
162
|
+
try {
|
163
|
+
const file = '/tmp/' + Date.now() + '.heapsnapshot'
|
164
|
+
await dumpHeap(file)
|
165
|
+
// send the file
|
166
|
+
const readStream = fs.createReadStream(file)
|
167
|
+
res.writeHead(200, { 'Content-Type': 'application/json' })
|
168
|
+
readStream.pipe(res)
|
169
|
+
res.end()
|
170
|
+
} catch {
|
171
|
+
res.writeHead(500)
|
172
|
+
res.end()
|
173
|
+
}
|
174
|
+
break
|
141
175
|
}
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
176
|
+
case '/profile': {
|
177
|
+
try {
|
178
|
+
const profileTime = parseInt(queries.get('t') || '1000', 10) || 1000
|
179
|
+
const session = new Session()
|
180
|
+
session.connect()
|
181
|
+
|
182
|
+
await session.post('Profiler.enable')
|
183
|
+
await session.post('Profiler.start')
|
184
|
+
|
185
|
+
await new Promise((resolve) => setTimeout(resolve, profileTime))
|
186
|
+
const { profile } = await session.post('Profiler.stop')
|
187
|
+
|
188
|
+
res.writeHead(200, { 'Content-Type': 'application/json' })
|
189
|
+
res.write(JSON.stringify(profile))
|
190
|
+
session.disconnect()
|
191
|
+
} catch {
|
192
|
+
res.writeHead(500)
|
193
|
+
}
|
194
|
+
break
|
161
195
|
}
|
162
|
-
|
196
|
+
default:
|
197
|
+
res.writeHead(404)
|
163
198
|
}
|
164
|
-
|
165
|
-
|
199
|
+
} else {
|
200
|
+
res.writeHead(404)
|
166
201
|
}
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
res.end()
|
171
|
-
})
|
172
|
-
.listen(metricsPort)
|
202
|
+
res.end()
|
203
|
+
})
|
204
|
+
.listen(metricsPort)
|
173
205
|
|
174
|
-
console.log('Metric Server Started at:', metricsPort)
|
206
|
+
console.log('Metric Server Started at:', metricsPort)
|
175
207
|
|
176
|
-
process
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
|
181
|
-
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
208
|
+
process
|
209
|
+
.on('SIGINT', function () {
|
210
|
+
shutdownServers(server, httpServer, 0)
|
211
|
+
})
|
212
|
+
.on('uncaughtException', (err) => {
|
213
|
+
console.error('Uncaught Exception, please checking if await is properly used', err)
|
214
|
+
if (baseService) {
|
215
|
+
baseService.unhandled = err
|
216
|
+
}
|
217
|
+
// shutdownServers(1)
|
218
|
+
})
|
219
|
+
.on('unhandledRejection', (reason, p) => {
|
220
|
+
// @ts-ignore ignore invalid ens error
|
221
|
+
if (reason?.message.startsWith('invalid ENS name (disallowed character: "*"')) {
|
222
|
+
return
|
223
|
+
}
|
224
|
+
console.error('Unhandled Rejection, please checking if await is properly', reason)
|
225
|
+
if (baseService) {
|
226
|
+
baseService.unhandled = reason as Error
|
227
|
+
}
|
228
|
+
// shutdownServers(1)
|
229
|
+
})
|
198
230
|
|
199
|
-
if (process.env['OOM_DUMP_MEMORY_SIZE_GB']) {
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
231
|
+
if (process.env['OOM_DUMP_MEMORY_SIZE_GB']) {
|
232
|
+
let dumping = false
|
233
|
+
const memorySize = parseFloat(process.env['OOM_DUMP_MEMORY_SIZE_GB']!)
|
234
|
+
console.log('heap dumping is enabled, limit set to ', memorySize, 'gb')
|
235
|
+
const dir = process.env['OOM_DUMP_DIR'] || '/tmp'
|
236
|
+
setInterval(async () => {
|
237
|
+
const mem = process.memoryUsage()
|
238
|
+
console.log('Current Memory Usage', mem)
|
239
|
+
// if memory usage is greater this size, dump heap and exit
|
240
|
+
if (mem.heapTotal > memorySize * 1024 * 1024 * 1024 && !dumping) {
|
241
|
+
const file = path.join(dir, `${Date.now()}.heapsnapshot`)
|
242
|
+
dumping = true
|
243
|
+
await dumpHeap(file)
|
244
|
+
// force exit and keep pod running
|
245
|
+
process.exit(11)
|
246
|
+
}
|
247
|
+
}, 1000 * 60)
|
248
|
+
}
|
216
249
|
}
|
217
250
|
|
218
|
-
async function dumpHeap(file: string) {
|
251
|
+
async function dumpHeap(file: string): Promise<void> {
|
219
252
|
console.log('Heap dumping to', file)
|
220
253
|
const session = new Session()
|
221
254
|
fs.mkdirSync(path.dirname(file), { recursive: true })
|
@@ -234,7 +267,7 @@ async function dumpHeap(file: string) {
|
|
234
267
|
}
|
235
268
|
}
|
236
269
|
|
237
|
-
function shutdownServers(exitCode: number) {
|
270
|
+
function shutdownServers(server: any, httpServer: any, exitCode: number): void {
|
238
271
|
server?.forceShutdown()
|
239
272
|
console.log('RPC server shut down')
|
240
273
|
|
package/src/service-manager.ts
CHANGED
@@ -139,7 +139,8 @@ export class ServiceManager extends ProcessorServiceImpl {
|
|
139
139
|
argv: process.argv,
|
140
140
|
workerData: this.workerData
|
141
141
|
})
|
142
|
-
|
142
|
+
// @ts-ignore - Piscina message handling for template instance sync
|
143
|
+
this.pool.on('message', (msg: any) => {
|
143
144
|
if (msg.event == 'add_template_instance') {
|
144
145
|
// sync the template state from worker to the main thread
|
145
146
|
TemplateInstanceState.INSTANCE.addValue(msg.value)
|