@opra/kafka 1.0.0-beta.5 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/kafka-adapter.js +174 -134
- package/cjs/kafka-context.js +1 -0
- package/esm/kafka-adapter.js +174 -134
- package/esm/kafka-context.js +1 -0
- package/package.json +3 -3
- package/types/kafka-adapter.d.ts +27 -21
- package/types/kafka-context.d.ts +1 -0
package/cjs/kafka-adapter.js
CHANGED
|
@@ -10,6 +10,7 @@ const kafka_context_js_1 = require("./kafka-context.js");
|
|
|
10
10
|
const request_parser_js_1 = require("./request-parser.js");
|
|
11
11
|
const globalErrorTypes = ['unhandledRejection', 'uncaughtException'];
|
|
12
12
|
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
|
|
13
|
+
const kGroupId = Symbol('kGroupId');
|
|
13
14
|
/**
|
|
14
15
|
*
|
|
15
16
|
* @class KafkaAdapter
|
|
@@ -24,6 +25,7 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
24
25
|
super(config.document, config);
|
|
25
26
|
this._controllerInstances = new Map();
|
|
26
27
|
this._consumers = new Map();
|
|
28
|
+
this._handlerArgs = [];
|
|
27
29
|
this.protocol = 'rpc';
|
|
28
30
|
this.platform = KafkaAdapter.PlatformName;
|
|
29
31
|
if (!(config.document.api instanceof common_1.RpcApi && config.document.api.platform === KafkaAdapter.PlatformName)) {
|
|
@@ -56,24 +58,71 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
56
58
|
/* istanbul ignore next */
|
|
57
59
|
if (this._consumers.size > 0)
|
|
58
60
|
return;
|
|
59
|
-
|
|
60
|
-
|
|
61
|
+
/* istanbul ignore next */
|
|
62
|
+
if (this._consumers.size > 0)
|
|
63
|
+
return;
|
|
64
|
+
await this._createAllConsumers();
|
|
65
|
+
/** Connect all consumers */
|
|
66
|
+
for (const consumer of this._consumers.values()) {
|
|
67
|
+
await consumer.connect().catch(e => {
|
|
68
|
+
this._emitError(e);
|
|
69
|
+
throw e;
|
|
70
|
+
});
|
|
71
|
+
}
|
|
72
|
+
/** Subscribe to channels */
|
|
73
|
+
for (const args of this._handlerArgs) {
|
|
74
|
+
const { consumer, operation, operationOptions } = args;
|
|
75
|
+
args.topics = Array.isArray(operation.channel) ? operation.channel : [operation.channel];
|
|
76
|
+
await consumer.subscribe({ topics: args.topics, fromBeginning: operationOptions.fromBeginning }).catch(e => {
|
|
77
|
+
this._emitError(e);
|
|
78
|
+
throw e;
|
|
79
|
+
});
|
|
80
|
+
this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
|
|
81
|
+
}
|
|
82
|
+
/** Start consumer listeners */
|
|
83
|
+
const topicMap = new Map();
|
|
84
|
+
for (const consumer of this._consumers.values()) {
|
|
85
|
+
const groupId = consumer[kGroupId];
|
|
86
|
+
await consumer
|
|
87
|
+
.run({
|
|
88
|
+
eachMessage: async (payload) => {
|
|
89
|
+
await this.emitAsync('message', payload).catch(() => undefined);
|
|
90
|
+
const { topic } = payload;
|
|
91
|
+
const topicCacheKey = groupId + ':' + topic;
|
|
92
|
+
let handlerArgsArray = topicMap.get(topicCacheKey);
|
|
93
|
+
if (!handlerArgsArray) {
|
|
94
|
+
handlerArgsArray = this._handlerArgs.filter(args => args.consumer === consumer &&
|
|
95
|
+
args.topics.find(t => (t instanceof RegExp ? t.test(topic) : t === topic)));
|
|
96
|
+
/* istanbul ignore next */
|
|
97
|
+
if (!handlerArgsArray) {
|
|
98
|
+
this._emitError(new Error(`Unhandled topic (${topic})`));
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
topicMap.set(topicCacheKey, handlerArgsArray);
|
|
102
|
+
}
|
|
103
|
+
/** Iterate and call all matching handlers */
|
|
104
|
+
for (const args of handlerArgsArray) {
|
|
105
|
+
try {
|
|
106
|
+
await args.handler(payload);
|
|
107
|
+
}
|
|
108
|
+
catch (e) {
|
|
109
|
+
this._emitError(e);
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
await this.emitAsync('message-finish', payload);
|
|
113
|
+
},
|
|
114
|
+
})
|
|
115
|
+
.catch(e => {
|
|
116
|
+
this._emitError(e);
|
|
117
|
+
throw e;
|
|
118
|
+
});
|
|
119
|
+
}
|
|
61
120
|
}
|
|
62
121
|
/**
|
|
63
122
|
* Closes all connections and stops the service
|
|
64
123
|
*/
|
|
65
124
|
async close() {
|
|
66
|
-
|
|
67
|
-
if (controller.onShutdown) {
|
|
68
|
-
try {
|
|
69
|
-
await controller.onShutdown.call(instance, controller);
|
|
70
|
-
}
|
|
71
|
-
catch (e) {
|
|
72
|
-
this._emitError(e);
|
|
73
|
-
}
|
|
74
|
-
}
|
|
75
|
-
}
|
|
76
|
-
await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.consumer.disconnect()));
|
|
125
|
+
await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.disconnect()));
|
|
77
126
|
this._consumers.clear();
|
|
78
127
|
this._controllerInstances.clear();
|
|
79
128
|
}
|
|
@@ -82,82 +131,98 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
82
131
|
return controller && this._controllerInstances.get(controller);
|
|
83
132
|
}
|
|
84
133
|
/**
|
|
85
|
-
* Creates and initializes all consumers
|
|
86
134
|
*
|
|
135
|
+
* @param controller
|
|
136
|
+
* @param instance
|
|
137
|
+
* @param operation
|
|
87
138
|
* @protected
|
|
88
139
|
*/
|
|
89
|
-
async
|
|
90
|
-
|
|
91
|
-
if (this._consumers.size > 0)
|
|
140
|
+
async _getOperationOptions(controller, instance, operation) {
|
|
141
|
+
if (typeof instance[operation.name] !== 'function')
|
|
92
142
|
return;
|
|
93
|
-
|
|
143
|
+
const proto = controller.ctor?.prototype || Object.getPrototypeOf(instance);
|
|
144
|
+
let operationOptions = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA, proto, operation.name);
|
|
145
|
+
const configResolver = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
|
|
146
|
+
if (configResolver) {
|
|
147
|
+
const cfg = await configResolver();
|
|
148
|
+
operationOptions = { ...operationOptions, ...cfg };
|
|
149
|
+
}
|
|
150
|
+
return operationOptions;
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
*
|
|
154
|
+
* @protected
|
|
155
|
+
*/
|
|
156
|
+
async _createAllConsumers() {
|
|
94
157
|
for (const controller of this.document.rpcApi.controllers.values()) {
|
|
95
158
|
let instance = controller.instance;
|
|
96
159
|
if (!instance && controller.ctor)
|
|
97
160
|
instance = new controller.ctor();
|
|
98
161
|
if (!instance)
|
|
99
162
|
continue;
|
|
163
|
+
this._controllerInstances.set(controller, instance);
|
|
164
|
+
/** Build HandlerData array */
|
|
100
165
|
for (const operation of controller.operations.values()) {
|
|
101
|
-
await this.
|
|
166
|
+
const operationOptions = await this._getOperationOptions(controller, instance, operation);
|
|
167
|
+
if (!operationOptions)
|
|
168
|
+
continue;
|
|
169
|
+
// const consumerConfig = this._getConsumerConfig(operationOptions);
|
|
170
|
+
const args = {
|
|
171
|
+
consumer: null,
|
|
172
|
+
controller,
|
|
173
|
+
instance,
|
|
174
|
+
operation,
|
|
175
|
+
operationOptions,
|
|
176
|
+
handler: null,
|
|
177
|
+
topics: null,
|
|
178
|
+
};
|
|
179
|
+
this._createHandler(args);
|
|
180
|
+
this._handlerArgs.push(args);
|
|
102
181
|
}
|
|
103
|
-
|
|
182
|
+
}
|
|
183
|
+
/** Initialize consumers */
|
|
184
|
+
for (const args of this._handlerArgs) {
|
|
185
|
+
await this._createConsumer(args);
|
|
104
186
|
}
|
|
105
187
|
}
|
|
106
188
|
/**
|
|
107
|
-
* Creates and initializes a consumer for given operation
|
|
108
189
|
*
|
|
190
|
+
* @param args
|
|
109
191
|
* @protected
|
|
110
192
|
*/
|
|
111
|
-
async
|
|
112
|
-
|
|
113
|
-
return;
|
|
114
|
-
const proto = controller.ctor?.prototype || Object.getPrototypeOf(controller.instance);
|
|
115
|
-
// this._config.consumers
|
|
116
|
-
let operationOptions = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA, proto, operation.name);
|
|
117
|
-
const configResolver = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
|
|
118
|
-
if (configResolver) {
|
|
119
|
-
const cfg = await configResolver();
|
|
120
|
-
operationOptions = { ...operationOptions, ...cfg };
|
|
121
|
-
}
|
|
193
|
+
async _createConsumer(args) {
|
|
194
|
+
const { operationOptions } = args;
|
|
122
195
|
const consumerConfig = {
|
|
123
196
|
groupId: constants_js_1.KAFKA_DEFAULT_GROUP,
|
|
124
197
|
};
|
|
125
|
-
|
|
126
|
-
|
|
198
|
+
let consumer;
|
|
199
|
+
if (typeof operationOptions.consumer === 'object') {
|
|
200
|
+
consumer = this._consumers.get(operationOptions.consumer.groupId);
|
|
201
|
+
if (consumer) {
|
|
127
202
|
throw new Error(`Operation consumer for groupId (${operationOptions.consumer.groupId}) already exists`);
|
|
128
203
|
}
|
|
129
|
-
Object.assign(consumerConfig, operationOptions
|
|
204
|
+
Object.assign(consumerConfig, operationOptions.consumer);
|
|
130
205
|
}
|
|
131
|
-
else if (operationOptions
|
|
206
|
+
else if (operationOptions.consumer) {
|
|
132
207
|
const x = this._config.consumers?.[operationOptions.consumer];
|
|
133
208
|
Object.assign(consumerConfig, { ...x, groupId: operationOptions.consumer });
|
|
134
209
|
}
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
if (typeof controller.onInit === 'function')
|
|
144
|
-
controller.onInit.call(instance, controller);
|
|
145
|
-
}
|
|
146
|
-
async _start() {
|
|
147
|
-
const arr = Array.from(this._consumers.values());
|
|
148
|
-
if (!arr.length)
|
|
149
|
-
return;
|
|
150
|
-
/** Start first consumer to test if server is available */
|
|
151
|
-
await this._startConsumer(arr.shift());
|
|
152
|
-
/** if first connection is success than start all consumers at same time */
|
|
153
|
-
await Promise.allSettled(arr.map(x => this._startConsumer(x)));
|
|
210
|
+
consumer = this._consumers.get(consumerConfig.groupId);
|
|
211
|
+
/** Create consumers */
|
|
212
|
+
if (!consumer) {
|
|
213
|
+
consumer = this.kafka.consumer(consumerConfig);
|
|
214
|
+
consumer[kGroupId] = consumerConfig.groupId;
|
|
215
|
+
this._consumers.set(consumerConfig.groupId, consumer);
|
|
216
|
+
}
|
|
217
|
+
args.consumer = consumer;
|
|
154
218
|
}
|
|
155
219
|
/**
|
|
156
|
-
*
|
|
220
|
+
*
|
|
221
|
+
* @param args
|
|
157
222
|
* @protected
|
|
158
223
|
*/
|
|
159
|
-
|
|
160
|
-
const {
|
|
224
|
+
_createHandler(args) {
|
|
225
|
+
const { controller, instance, operation } = args;
|
|
161
226
|
/** Prepare parsers */
|
|
162
227
|
const parseKey = request_parser_js_1.RequestParser.STRING;
|
|
163
228
|
const parsePayload = request_parser_js_1.RequestParser.STRING;
|
|
@@ -171,81 +236,56 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
171
236
|
this[core_1.kAssetCache].set(header, 'decode', decode);
|
|
172
237
|
}
|
|
173
238
|
});
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
});
|
|
185
|
-
}
|
|
186
|
-
else {
|
|
187
|
-
await consumer.subscribe({ topic: operation.channel, fromBeginning: options.fromBeginning }).catch(e => {
|
|
188
|
-
this._emitError(e);
|
|
189
|
-
throw e;
|
|
190
|
-
});
|
|
191
|
-
}
|
|
192
|
-
/** Run message listener */
|
|
193
|
-
await consumer
|
|
194
|
-
.run({
|
|
195
|
-
eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
|
|
196
|
-
const operationHandler = instance[operation.name];
|
|
197
|
-
let key;
|
|
198
|
-
let payload;
|
|
199
|
-
const headers = {};
|
|
200
|
-
try {
|
|
201
|
-
/** Parse and decode `key` */
|
|
202
|
-
if (message.key) {
|
|
203
|
-
const s = parseKey(message.key);
|
|
204
|
-
key = decodeKey(s);
|
|
205
|
-
}
|
|
206
|
-
/** Parse and decode `payload` */
|
|
207
|
-
if (message.value != null) {
|
|
208
|
-
const s = parsePayload(message.value);
|
|
209
|
-
payload = decodePayload(s);
|
|
210
|
-
}
|
|
211
|
-
/** Parse and decode `headers` */
|
|
212
|
-
if (message.headers) {
|
|
213
|
-
for (const [k, v] of Object.entries(message.headers)) {
|
|
214
|
-
const header = operation.findHeader(k);
|
|
215
|
-
const decode = this[core_1.kAssetCache].get(header, 'decode') || valgen_1.vg.isAny();
|
|
216
|
-
headers[k] = decode(Buffer.isBuffer(v) ? v.toString() : v);
|
|
217
|
-
}
|
|
218
|
-
}
|
|
239
|
+
args.handler = async ({ topic, partition, message, heartbeat, pause }) => {
|
|
240
|
+
const operationHandler = instance[operation.name];
|
|
241
|
+
let key;
|
|
242
|
+
let payload;
|
|
243
|
+
const headers = {};
|
|
244
|
+
try {
|
|
245
|
+
/** Parse and decode `key` */
|
|
246
|
+
if (message.key) {
|
|
247
|
+
const s = parseKey(message.key);
|
|
248
|
+
key = decodeKey(s);
|
|
219
249
|
}
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
250
|
+
/** Parse and decode `payload` */
|
|
251
|
+
if (message.value != null) {
|
|
252
|
+
const s = parsePayload(message.value);
|
|
253
|
+
payload = decodePayload(s);
|
|
223
254
|
}
|
|
224
|
-
/**
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
255
|
+
/** Parse and decode `headers` */
|
|
256
|
+
if (message.headers) {
|
|
257
|
+
for (const [k, v] of Object.entries(message.headers)) {
|
|
258
|
+
const header = operation.findHeader(k);
|
|
259
|
+
const decode = this[core_1.kAssetCache].get(header, 'decode') || valgen_1.vg.isAny();
|
|
260
|
+
headers[k] = decode(Buffer.isBuffer(v) ? v.toString() : v);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
catch (e) {
|
|
265
|
+
this._emitError(e);
|
|
266
|
+
return;
|
|
267
|
+
}
|
|
268
|
+
/** Create context */
|
|
269
|
+
const ctx = new kafka_context_js_1.KafkaContext({
|
|
270
|
+
adapter: this,
|
|
271
|
+
platform: this.platform,
|
|
272
|
+
controller,
|
|
273
|
+
controllerInstance: instance,
|
|
274
|
+
operation,
|
|
275
|
+
operationHandler,
|
|
276
|
+
topic,
|
|
277
|
+
partition,
|
|
278
|
+
payload,
|
|
279
|
+
key,
|
|
280
|
+
headers,
|
|
281
|
+
rawMessage: message,
|
|
282
|
+
heartbeat,
|
|
283
|
+
pause,
|
|
284
|
+
});
|
|
285
|
+
await this.emitAsync('before-execute', ctx);
|
|
286
|
+
const result = await operationHandler.call(instance, ctx);
|
|
287
|
+
await this.emitAsync('after-execute', ctx, result);
|
|
288
|
+
};
|
|
249
289
|
}
|
|
250
290
|
_emitError(e) {
|
|
251
291
|
this._logger?.error(e);
|
|
@@ -254,7 +294,7 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
254
294
|
}
|
|
255
295
|
_createLogCreator(logger, logExtra) {
|
|
256
296
|
return ({ namespace, level, log }) => {
|
|
257
|
-
const { message, ...extra } = log;
|
|
297
|
+
const { message, error, ...extra } = log;
|
|
258
298
|
delete extra.namespace;
|
|
259
299
|
delete extra.timestamp;
|
|
260
300
|
delete extra.logger;
|
|
@@ -278,8 +318,8 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
278
318
|
if (!fn)
|
|
279
319
|
return;
|
|
280
320
|
if (!logExtra)
|
|
281
|
-
return fn.call(logger, message);
|
|
282
|
-
return fn.call(logger, message, {
|
|
321
|
+
return fn.call(logger, error || message);
|
|
322
|
+
return fn.call(logger, error || message, {
|
|
283
323
|
...extra,
|
|
284
324
|
namespace,
|
|
285
325
|
});
|
package/cjs/kafka-context.js
CHANGED
|
@@ -18,6 +18,7 @@ class KafkaContext extends core_1.ExecutionContext {
|
|
|
18
18
|
this.operationHandler = init.operationHandler;
|
|
19
19
|
this.partition = init.partition;
|
|
20
20
|
this.headers = init.headers || {};
|
|
21
|
+
this.topic = init.topic;
|
|
21
22
|
this.key = init.key;
|
|
22
23
|
this.payload = init.payload;
|
|
23
24
|
this.heartbeat = init.heartbeat;
|
package/esm/kafka-adapter.js
CHANGED
|
@@ -7,6 +7,7 @@ import { KafkaContext } from './kafka-context.js';
|
|
|
7
7
|
import { RequestParser } from './request-parser.js';
|
|
8
8
|
const globalErrorTypes = ['unhandledRejection', 'uncaughtException'];
|
|
9
9
|
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
|
|
10
|
+
const kGroupId = Symbol('kGroupId');
|
|
10
11
|
/**
|
|
11
12
|
*
|
|
12
13
|
* @class KafkaAdapter
|
|
@@ -21,6 +22,7 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
21
22
|
super(config.document, config);
|
|
22
23
|
this._controllerInstances = new Map();
|
|
23
24
|
this._consumers = new Map();
|
|
25
|
+
this._handlerArgs = [];
|
|
24
26
|
this.protocol = 'rpc';
|
|
25
27
|
this.platform = KafkaAdapter.PlatformName;
|
|
26
28
|
if (!(config.document.api instanceof RpcApi && config.document.api.platform === KafkaAdapter.PlatformName)) {
|
|
@@ -53,24 +55,71 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
53
55
|
/* istanbul ignore next */
|
|
54
56
|
if (this._consumers.size > 0)
|
|
55
57
|
return;
|
|
56
|
-
|
|
57
|
-
|
|
58
|
+
/* istanbul ignore next */
|
|
59
|
+
if (this._consumers.size > 0)
|
|
60
|
+
return;
|
|
61
|
+
await this._createAllConsumers();
|
|
62
|
+
/** Connect all consumers */
|
|
63
|
+
for (const consumer of this._consumers.values()) {
|
|
64
|
+
await consumer.connect().catch(e => {
|
|
65
|
+
this._emitError(e);
|
|
66
|
+
throw e;
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
/** Subscribe to channels */
|
|
70
|
+
for (const args of this._handlerArgs) {
|
|
71
|
+
const { consumer, operation, operationOptions } = args;
|
|
72
|
+
args.topics = Array.isArray(operation.channel) ? operation.channel : [operation.channel];
|
|
73
|
+
await consumer.subscribe({ topics: args.topics, fromBeginning: operationOptions.fromBeginning }).catch(e => {
|
|
74
|
+
this._emitError(e);
|
|
75
|
+
throw e;
|
|
76
|
+
});
|
|
77
|
+
this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
|
|
78
|
+
}
|
|
79
|
+
/** Start consumer listeners */
|
|
80
|
+
const topicMap = new Map();
|
|
81
|
+
for (const consumer of this._consumers.values()) {
|
|
82
|
+
const groupId = consumer[kGroupId];
|
|
83
|
+
await consumer
|
|
84
|
+
.run({
|
|
85
|
+
eachMessage: async (payload) => {
|
|
86
|
+
await this.emitAsync('message', payload).catch(() => undefined);
|
|
87
|
+
const { topic } = payload;
|
|
88
|
+
const topicCacheKey = groupId + ':' + topic;
|
|
89
|
+
let handlerArgsArray = topicMap.get(topicCacheKey);
|
|
90
|
+
if (!handlerArgsArray) {
|
|
91
|
+
handlerArgsArray = this._handlerArgs.filter(args => args.consumer === consumer &&
|
|
92
|
+
args.topics.find(t => (t instanceof RegExp ? t.test(topic) : t === topic)));
|
|
93
|
+
/* istanbul ignore next */
|
|
94
|
+
if (!handlerArgsArray) {
|
|
95
|
+
this._emitError(new Error(`Unhandled topic (${topic})`));
|
|
96
|
+
return;
|
|
97
|
+
}
|
|
98
|
+
topicMap.set(topicCacheKey, handlerArgsArray);
|
|
99
|
+
}
|
|
100
|
+
/** Iterate and call all matching handlers */
|
|
101
|
+
for (const args of handlerArgsArray) {
|
|
102
|
+
try {
|
|
103
|
+
await args.handler(payload);
|
|
104
|
+
}
|
|
105
|
+
catch (e) {
|
|
106
|
+
this._emitError(e);
|
|
107
|
+
}
|
|
108
|
+
}
|
|
109
|
+
await this.emitAsync('message-finish', payload);
|
|
110
|
+
},
|
|
111
|
+
})
|
|
112
|
+
.catch(e => {
|
|
113
|
+
this._emitError(e);
|
|
114
|
+
throw e;
|
|
115
|
+
});
|
|
116
|
+
}
|
|
58
117
|
}
|
|
59
118
|
/**
|
|
60
119
|
* Closes all connections and stops the service
|
|
61
120
|
*/
|
|
62
121
|
async close() {
|
|
63
|
-
|
|
64
|
-
if (controller.onShutdown) {
|
|
65
|
-
try {
|
|
66
|
-
await controller.onShutdown.call(instance, controller);
|
|
67
|
-
}
|
|
68
|
-
catch (e) {
|
|
69
|
-
this._emitError(e);
|
|
70
|
-
}
|
|
71
|
-
}
|
|
72
|
-
}
|
|
73
|
-
await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.consumer.disconnect()));
|
|
122
|
+
await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.disconnect()));
|
|
74
123
|
this._consumers.clear();
|
|
75
124
|
this._controllerInstances.clear();
|
|
76
125
|
}
|
|
@@ -79,82 +128,98 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
79
128
|
return controller && this._controllerInstances.get(controller);
|
|
80
129
|
}
|
|
81
130
|
/**
|
|
82
|
-
* Creates and initializes all consumers
|
|
83
131
|
*
|
|
132
|
+
* @param controller
|
|
133
|
+
* @param instance
|
|
134
|
+
* @param operation
|
|
84
135
|
* @protected
|
|
85
136
|
*/
|
|
86
|
-
async
|
|
87
|
-
|
|
88
|
-
if (this._consumers.size > 0)
|
|
137
|
+
async _getOperationOptions(controller, instance, operation) {
|
|
138
|
+
if (typeof instance[operation.name] !== 'function')
|
|
89
139
|
return;
|
|
90
|
-
|
|
140
|
+
const proto = controller.ctor?.prototype || Object.getPrototypeOf(instance);
|
|
141
|
+
let operationOptions = Reflect.getMetadata(KAFKA_OPERATION_METADATA, proto, operation.name);
|
|
142
|
+
const configResolver = Reflect.getMetadata(KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
|
|
143
|
+
if (configResolver) {
|
|
144
|
+
const cfg = await configResolver();
|
|
145
|
+
operationOptions = { ...operationOptions, ...cfg };
|
|
146
|
+
}
|
|
147
|
+
return operationOptions;
|
|
148
|
+
}
|
|
149
|
+
/**
|
|
150
|
+
*
|
|
151
|
+
* @protected
|
|
152
|
+
*/
|
|
153
|
+
async _createAllConsumers() {
|
|
91
154
|
for (const controller of this.document.rpcApi.controllers.values()) {
|
|
92
155
|
let instance = controller.instance;
|
|
93
156
|
if (!instance && controller.ctor)
|
|
94
157
|
instance = new controller.ctor();
|
|
95
158
|
if (!instance)
|
|
96
159
|
continue;
|
|
160
|
+
this._controllerInstances.set(controller, instance);
|
|
161
|
+
/** Build HandlerData array */
|
|
97
162
|
for (const operation of controller.operations.values()) {
|
|
98
|
-
await this.
|
|
163
|
+
const operationOptions = await this._getOperationOptions(controller, instance, operation);
|
|
164
|
+
if (!operationOptions)
|
|
165
|
+
continue;
|
|
166
|
+
// const consumerConfig = this._getConsumerConfig(operationOptions);
|
|
167
|
+
const args = {
|
|
168
|
+
consumer: null,
|
|
169
|
+
controller,
|
|
170
|
+
instance,
|
|
171
|
+
operation,
|
|
172
|
+
operationOptions,
|
|
173
|
+
handler: null,
|
|
174
|
+
topics: null,
|
|
175
|
+
};
|
|
176
|
+
this._createHandler(args);
|
|
177
|
+
this._handlerArgs.push(args);
|
|
99
178
|
}
|
|
100
|
-
|
|
179
|
+
}
|
|
180
|
+
/** Initialize consumers */
|
|
181
|
+
for (const args of this._handlerArgs) {
|
|
182
|
+
await this._createConsumer(args);
|
|
101
183
|
}
|
|
102
184
|
}
|
|
103
185
|
/**
|
|
104
|
-
* Creates and initializes a consumer for given operation
|
|
105
186
|
*
|
|
187
|
+
* @param args
|
|
106
188
|
* @protected
|
|
107
189
|
*/
|
|
108
|
-
async
|
|
109
|
-
|
|
110
|
-
return;
|
|
111
|
-
const proto = controller.ctor?.prototype || Object.getPrototypeOf(controller.instance);
|
|
112
|
-
// this._config.consumers
|
|
113
|
-
let operationOptions = Reflect.getMetadata(KAFKA_OPERATION_METADATA, proto, operation.name);
|
|
114
|
-
const configResolver = Reflect.getMetadata(KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
|
|
115
|
-
if (configResolver) {
|
|
116
|
-
const cfg = await configResolver();
|
|
117
|
-
operationOptions = { ...operationOptions, ...cfg };
|
|
118
|
-
}
|
|
190
|
+
async _createConsumer(args) {
|
|
191
|
+
const { operationOptions } = args;
|
|
119
192
|
const consumerConfig = {
|
|
120
193
|
groupId: KAFKA_DEFAULT_GROUP,
|
|
121
194
|
};
|
|
122
|
-
|
|
123
|
-
|
|
195
|
+
let consumer;
|
|
196
|
+
if (typeof operationOptions.consumer === 'object') {
|
|
197
|
+
consumer = this._consumers.get(operationOptions.consumer.groupId);
|
|
198
|
+
if (consumer) {
|
|
124
199
|
throw new Error(`Operation consumer for groupId (${operationOptions.consumer.groupId}) already exists`);
|
|
125
200
|
}
|
|
126
|
-
Object.assign(consumerConfig, operationOptions
|
|
201
|
+
Object.assign(consumerConfig, operationOptions.consumer);
|
|
127
202
|
}
|
|
128
|
-
else if (operationOptions
|
|
203
|
+
else if (operationOptions.consumer) {
|
|
129
204
|
const x = this._config.consumers?.[operationOptions.consumer];
|
|
130
205
|
Object.assign(consumerConfig, { ...x, groupId: operationOptions.consumer });
|
|
131
206
|
}
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
if (typeof controller.onInit === 'function')
|
|
141
|
-
controller.onInit.call(instance, controller);
|
|
142
|
-
}
|
|
143
|
-
async _start() {
|
|
144
|
-
const arr = Array.from(this._consumers.values());
|
|
145
|
-
if (!arr.length)
|
|
146
|
-
return;
|
|
147
|
-
/** Start first consumer to test if server is available */
|
|
148
|
-
await this._startConsumer(arr.shift());
|
|
149
|
-
/** if first connection is success than start all consumers at same time */
|
|
150
|
-
await Promise.allSettled(arr.map(x => this._startConsumer(x)));
|
|
207
|
+
consumer = this._consumers.get(consumerConfig.groupId);
|
|
208
|
+
/** Create consumers */
|
|
209
|
+
if (!consumer) {
|
|
210
|
+
consumer = this.kafka.consumer(consumerConfig);
|
|
211
|
+
consumer[kGroupId] = consumerConfig.groupId;
|
|
212
|
+
this._consumers.set(consumerConfig.groupId, consumer);
|
|
213
|
+
}
|
|
214
|
+
args.consumer = consumer;
|
|
151
215
|
}
|
|
152
216
|
/**
|
|
153
|
-
*
|
|
217
|
+
*
|
|
218
|
+
* @param args
|
|
154
219
|
* @protected
|
|
155
220
|
*/
|
|
156
|
-
|
|
157
|
-
const {
|
|
221
|
+
_createHandler(args) {
|
|
222
|
+
const { controller, instance, operation } = args;
|
|
158
223
|
/** Prepare parsers */
|
|
159
224
|
const parseKey = RequestParser.STRING;
|
|
160
225
|
const parsePayload = RequestParser.STRING;
|
|
@@ -168,81 +233,56 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
168
233
|
this[kAssetCache].set(header, 'decode', decode);
|
|
169
234
|
}
|
|
170
235
|
});
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
});
|
|
182
|
-
}
|
|
183
|
-
else {
|
|
184
|
-
await consumer.subscribe({ topic: operation.channel, fromBeginning: options.fromBeginning }).catch(e => {
|
|
185
|
-
this._emitError(e);
|
|
186
|
-
throw e;
|
|
187
|
-
});
|
|
188
|
-
}
|
|
189
|
-
/** Run message listener */
|
|
190
|
-
await consumer
|
|
191
|
-
.run({
|
|
192
|
-
eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
|
|
193
|
-
const operationHandler = instance[operation.name];
|
|
194
|
-
let key;
|
|
195
|
-
let payload;
|
|
196
|
-
const headers = {};
|
|
197
|
-
try {
|
|
198
|
-
/** Parse and decode `key` */
|
|
199
|
-
if (message.key) {
|
|
200
|
-
const s = parseKey(message.key);
|
|
201
|
-
key = decodeKey(s);
|
|
202
|
-
}
|
|
203
|
-
/** Parse and decode `payload` */
|
|
204
|
-
if (message.value != null) {
|
|
205
|
-
const s = parsePayload(message.value);
|
|
206
|
-
payload = decodePayload(s);
|
|
207
|
-
}
|
|
208
|
-
/** Parse and decode `headers` */
|
|
209
|
-
if (message.headers) {
|
|
210
|
-
for (const [k, v] of Object.entries(message.headers)) {
|
|
211
|
-
const header = operation.findHeader(k);
|
|
212
|
-
const decode = this[kAssetCache].get(header, 'decode') || vg.isAny();
|
|
213
|
-
headers[k] = decode(Buffer.isBuffer(v) ? v.toString() : v);
|
|
214
|
-
}
|
|
215
|
-
}
|
|
236
|
+
args.handler = async ({ topic, partition, message, heartbeat, pause }) => {
|
|
237
|
+
const operationHandler = instance[operation.name];
|
|
238
|
+
let key;
|
|
239
|
+
let payload;
|
|
240
|
+
const headers = {};
|
|
241
|
+
try {
|
|
242
|
+
/** Parse and decode `key` */
|
|
243
|
+
if (message.key) {
|
|
244
|
+
const s = parseKey(message.key);
|
|
245
|
+
key = decodeKey(s);
|
|
216
246
|
}
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
247
|
+
/** Parse and decode `payload` */
|
|
248
|
+
if (message.value != null) {
|
|
249
|
+
const s = parsePayload(message.value);
|
|
250
|
+
payload = decodePayload(s);
|
|
220
251
|
}
|
|
221
|
-
/**
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
252
|
+
/** Parse and decode `headers` */
|
|
253
|
+
if (message.headers) {
|
|
254
|
+
for (const [k, v] of Object.entries(message.headers)) {
|
|
255
|
+
const header = operation.findHeader(k);
|
|
256
|
+
const decode = this[kAssetCache].get(header, 'decode') || vg.isAny();
|
|
257
|
+
headers[k] = decode(Buffer.isBuffer(v) ? v.toString() : v);
|
|
258
|
+
}
|
|
259
|
+
}
|
|
260
|
+
}
|
|
261
|
+
catch (e) {
|
|
262
|
+
this._emitError(e);
|
|
263
|
+
return;
|
|
264
|
+
}
|
|
265
|
+
/** Create context */
|
|
266
|
+
const ctx = new KafkaContext({
|
|
267
|
+
adapter: this,
|
|
268
|
+
platform: this.platform,
|
|
269
|
+
controller,
|
|
270
|
+
controllerInstance: instance,
|
|
271
|
+
operation,
|
|
272
|
+
operationHandler,
|
|
273
|
+
topic,
|
|
274
|
+
partition,
|
|
275
|
+
payload,
|
|
276
|
+
key,
|
|
277
|
+
headers,
|
|
278
|
+
rawMessage: message,
|
|
279
|
+
heartbeat,
|
|
280
|
+
pause,
|
|
281
|
+
});
|
|
282
|
+
await this.emitAsync('before-execute', ctx);
|
|
283
|
+
const result = await operationHandler.call(instance, ctx);
|
|
284
|
+
await this.emitAsync('after-execute', ctx, result);
|
|
285
|
+
};
|
|
246
286
|
}
|
|
247
287
|
_emitError(e) {
|
|
248
288
|
this._logger?.error(e);
|
|
@@ -251,7 +291,7 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
251
291
|
}
|
|
252
292
|
_createLogCreator(logger, logExtra) {
|
|
253
293
|
return ({ namespace, level, log }) => {
|
|
254
|
-
const { message, ...extra } = log;
|
|
294
|
+
const { message, error, ...extra } = log;
|
|
255
295
|
delete extra.namespace;
|
|
256
296
|
delete extra.timestamp;
|
|
257
297
|
delete extra.logger;
|
|
@@ -275,8 +315,8 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
275
315
|
if (!fn)
|
|
276
316
|
return;
|
|
277
317
|
if (!logExtra)
|
|
278
|
-
return fn.call(logger, message);
|
|
279
|
-
return fn.call(logger, message, {
|
|
318
|
+
return fn.call(logger, error || message);
|
|
319
|
+
return fn.call(logger, error || message, {
|
|
280
320
|
...extra,
|
|
281
321
|
namespace,
|
|
282
322
|
});
|
package/esm/kafka-context.js
CHANGED
|
@@ -15,6 +15,7 @@ export class KafkaContext extends ExecutionContext {
|
|
|
15
15
|
this.operationHandler = init.operationHandler;
|
|
16
16
|
this.partition = init.partition;
|
|
17
17
|
this.headers = init.headers || {};
|
|
18
|
+
this.topic = init.topic;
|
|
18
19
|
this.key = init.key;
|
|
19
20
|
this.payload = init.payload;
|
|
20
21
|
this.heartbeat = init.heartbeat;
|
package/package.json
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@opra/kafka",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.1",
|
|
4
4
|
"description": "Opra Kafka package",
|
|
5
5
|
"author": "Panates",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"dependencies": {
|
|
8
|
-
"@opra/common": "^1.0.
|
|
9
|
-
"@opra/core": "^1.0.
|
|
8
|
+
"@opra/common": "^1.0.1",
|
|
9
|
+
"@opra/core": "^1.0.1",
|
|
10
10
|
"node-events-async": "^1.0.0",
|
|
11
11
|
"tslib": "^2.7.0",
|
|
12
12
|
"valgen": "^5.10.0"
|
package/types/kafka-adapter.d.ts
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import { ApiDocument, OpraSchema, RpcApi, RpcController, RpcOperation } from '@opra/common';
|
|
2
2
|
import { type ILogger, PlatformAdapter } from '@opra/core';
|
|
3
|
-
import { type Consumer, ConsumerConfig, Kafka, type KafkaConfig } from 'kafkajs';
|
|
3
|
+
import { type Consumer, ConsumerConfig, EachMessageHandler, Kafka, type KafkaConfig } from 'kafkajs';
|
|
4
4
|
import type { StrictOmit } from 'ts-gems';
|
|
5
5
|
import { KafkaContext } from './kafka-context.js';
|
|
6
6
|
/**
|
|
@@ -34,6 +34,15 @@ export declare namespace KafkaAdapter {
|
|
|
34
34
|
intercept(context: KafkaContext, next: NextCallback): Promise<any>;
|
|
35
35
|
};
|
|
36
36
|
}
|
|
37
|
+
interface HandlerArguments {
|
|
38
|
+
consumer: Consumer;
|
|
39
|
+
controller: RpcController;
|
|
40
|
+
instance: any;
|
|
41
|
+
operation: RpcOperation;
|
|
42
|
+
operationOptions: KafkaAdapter.OperationOptions;
|
|
43
|
+
handler: EachMessageHandler;
|
|
44
|
+
topics: (string | RegExp)[];
|
|
45
|
+
}
|
|
37
46
|
/**
|
|
38
47
|
*
|
|
39
48
|
* @class KafkaAdapter
|
|
@@ -42,13 +51,8 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
42
51
|
static readonly PlatformName = "kafka";
|
|
43
52
|
protected _config: KafkaAdapter.Config;
|
|
44
53
|
protected _controllerInstances: Map<RpcController, any>;
|
|
45
|
-
protected _consumers: Map<string,
|
|
46
|
-
|
|
47
|
-
controller: RpcController;
|
|
48
|
-
instance: any;
|
|
49
|
-
operation: RpcOperation;
|
|
50
|
-
options: KafkaAdapter.OperationOptions;
|
|
51
|
-
}>;
|
|
54
|
+
protected _consumers: Map<string, Consumer>;
|
|
55
|
+
protected _handlerArgs: HandlerArguments[];
|
|
52
56
|
protected _logger?: ILogger;
|
|
53
57
|
readonly kafka: Kafka;
|
|
54
58
|
readonly protocol: OpraSchema.Transport;
|
|
@@ -71,29 +75,30 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
71
75
|
close(): Promise<void>;
|
|
72
76
|
getControllerInstance<T>(controllerPath: string): T | undefined;
|
|
73
77
|
/**
|
|
74
|
-
*
|
|
78
|
+
*
|
|
79
|
+
* @param controller
|
|
80
|
+
* @param instance
|
|
81
|
+
* @param operation
|
|
82
|
+
* @protected
|
|
83
|
+
*/
|
|
84
|
+
protected _getOperationOptions(controller: RpcController, instance: any, operation: RpcOperation): Promise<KafkaAdapter.OperationOptions | undefined>;
|
|
85
|
+
/**
|
|
75
86
|
*
|
|
76
87
|
* @protected
|
|
77
88
|
*/
|
|
78
|
-
protected
|
|
89
|
+
protected _createAllConsumers(): Promise<void>;
|
|
79
90
|
/**
|
|
80
|
-
* Creates and initializes a consumer for given operation
|
|
81
91
|
*
|
|
92
|
+
* @param args
|
|
82
93
|
* @protected
|
|
83
94
|
*/
|
|
84
|
-
protected
|
|
85
|
-
protected _start(): Promise<void>;
|
|
95
|
+
protected _createConsumer(args: HandlerArguments): Promise<void>;
|
|
86
96
|
/**
|
|
87
|
-
*
|
|
97
|
+
*
|
|
98
|
+
* @param args
|
|
88
99
|
* @protected
|
|
89
100
|
*/
|
|
90
|
-
protected
|
|
91
|
-
consumer: Consumer;
|
|
92
|
-
controller: RpcController;
|
|
93
|
-
instance: any;
|
|
94
|
-
operation: RpcOperation;
|
|
95
|
-
options: KafkaAdapter.OperationOptions;
|
|
96
|
-
}): Promise<void>;
|
|
101
|
+
protected _createHandler(args: HandlerArguments): void;
|
|
97
102
|
protected _emitError(e: any): void;
|
|
98
103
|
protected _createLogCreator(logger: ILogger, logExtra?: boolean): ({ namespace, level, log }: {
|
|
99
104
|
namespace: any;
|
|
@@ -101,3 +106,4 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
101
106
|
log: any;
|
|
102
107
|
}) => any;
|
|
103
108
|
}
|
|
109
|
+
export {};
|
package/types/kafka-context.d.ts
CHANGED
|
@@ -28,6 +28,7 @@ export declare class KafkaContext extends ExecutionContext implements AsyncEvent
|
|
|
28
28
|
readonly controllerInstance?: any;
|
|
29
29
|
readonly operation?: RpcOperation;
|
|
30
30
|
readonly operationHandler?: Function;
|
|
31
|
+
readonly topic: string;
|
|
31
32
|
readonly key: any;
|
|
32
33
|
readonly payload: any;
|
|
33
34
|
readonly partition: number;
|