@opra/kafka 1.20.0 → 1.22.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,6 +14,15 @@ const noOp = () => undefined;
14
14
  * @class KafkaAdapter
15
15
  */
16
16
  export class KafkaAdapter extends PlatformAdapter {
17
+ static PlatformName = 'kafka';
18
+ _config;
19
+ _controllerInstances = new Map();
20
+ _consumers = new Map();
21
+ _handlerArgs = [];
22
+ _status = 'idle';
23
+ transform = 'mq';
24
+ platform = KafkaAdapter.PlatformName;
25
+ interceptors;
17
26
  /**
18
27
  *
19
28
  * @param document
@@ -22,12 +31,6 @@ export class KafkaAdapter extends PlatformAdapter {
22
31
  */
23
32
  constructor(document, config) {
24
33
  super(config);
25
- this._controllerInstances = new Map();
26
- this._consumers = new Map();
27
- this._handlerArgs = [];
28
- this._status = 'idle';
29
- this.transform = 'mq';
30
- this.platform = KafkaAdapter.PlatformName;
31
34
  this._document = document;
32
35
  this._config = config;
33
36
  if (!(this.document.api instanceof MQApi &&
@@ -416,4 +419,3 @@ export class KafkaAdapter extends PlatformAdapter {
416
419
  };
417
420
  }
418
421
  }
419
- KafkaAdapter.PlatformName = 'kafka';
@@ -4,6 +4,14 @@ import { ExecutionContext } from '@opra/core';
4
4
  * It extends the ExecutionContext and implements the AsyncEventEmitter.
5
5
  */
6
6
  export class KafkaContext extends ExecutionContext {
7
+ topic;
8
+ key;
9
+ payload;
10
+ partition;
11
+ headers;
12
+ rawMessage;
13
+ heartbeat;
14
+ pause;
7
15
  /**
8
16
  * Constructor
9
17
  * @param init the context options
package/package.json CHANGED
@@ -1,53 +1,37 @@
1
1
  {
2
2
  "name": "@opra/kafka",
3
- "version": "1.20.0",
3
+ "version": "1.22.0",
4
4
  "description": "Opra Kafka adapter",
5
5
  "author": "Panates",
6
6
  "license": "MIT",
7
7
  "dependencies": {
8
- "node-events-async": "^1.2.0",
8
+ "node-events-async": "^1.4.0",
9
9
  "tslib": "^2.8.1",
10
10
  "valgen": "^5.18.2"
11
11
  },
12
12
  "peerDependencies": {
13
- "@opra/common": "^1.20.0",
14
- "@opra/core": "^1.20.0",
13
+ "@opra/common": "^1.22.0",
14
+ "@opra/core": "^1.22.0",
15
15
  "kafkajs": ">=2.2.4 <3"
16
16
  },
17
- "type": "module",
18
17
  "exports": {
19
18
  ".": {
20
- "import": {
21
- "types": "./types/index.d.ts",
22
- "default": "./esm/index.js"
23
- },
24
- "require": {
25
- "types": "./types/index.d.cts",
26
- "default": "./cjs/index.js"
27
- },
28
- "default": "./esm/index.js"
19
+ "types": "./index.d.ts",
20
+ "default": "./index.js"
29
21
  },
30
22
  "./package.json": "./package.json"
31
23
  },
32
- "main": "./cjs/index.js",
33
- "module": "./esm/index.js",
34
- "types": "./types/index.d.ts",
24
+ "type": "module",
25
+ "module": "./index.js",
26
+ "types": "./index.d.ts",
27
+ "engines": {
28
+ "node": ">=20.0"
29
+ },
35
30
  "repository": {
36
31
  "type": "git",
37
32
  "url": "git+https://github.com/panates/opra.git",
38
33
  "directory": "packages/kafka"
39
34
  },
40
- "engines": {
41
- "node": ">=16.0",
42
- "npm": ">=7.0.0"
43
- },
44
- "files": [
45
- "cjs/",
46
- "esm/",
47
- "types/",
48
- "LICENSE",
49
- "README.md"
50
- ],
51
35
  "keywords": [
52
36
  "opra",
53
37
  "kafka",
@@ -1,19 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- require("@opra/core");
4
- const common_1 = require("@opra/common");
5
- const constants_js_1 = require("../constants.js");
6
- /** Implementation **/
7
- common_1.classes.MQOperationDecoratorFactory.augment((decorator, decoratorChain) => {
8
- decorator.Kafka = (config) => {
9
- decoratorChain.push((_, target, propertyKey) => {
10
- if (typeof config === 'function') {
11
- Reflect.defineMetadata(constants_js_1.KAFKA_OPERATION_METADATA_RESOLVER, config, target, propertyKey);
12
- }
13
- else {
14
- Reflect.defineMetadata(constants_js_1.KAFKA_OPERATION_METADATA, { ...config }, target, propertyKey);
15
- }
16
- });
17
- return decorator;
18
- };
19
- });
package/cjs/constants.js DELETED
@@ -1,6 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.KAFKA_OPERATION_METADATA_RESOLVER = exports.KAFKA_OPERATION_METADATA = exports.KAFKA_DEFAULT_GROUP = void 0;
4
- exports.KAFKA_DEFAULT_GROUP = 'default';
5
- exports.KAFKA_OPERATION_METADATA = 'KAFKA_OPERATION_METADATA';
6
- exports.KAFKA_OPERATION_METADATA_RESOLVER = 'KAFKA_OPERATION_METADATA_RESOLVER';
package/cjs/index.js DELETED
@@ -1,8 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- const tslib_1 = require("tslib");
4
- require("./augmentation/opra-common.augmentation.js");
5
- tslib_1.__exportStar(require("./constants.js"), exports);
6
- tslib_1.__exportStar(require("./kafka-adapter.js"), exports);
7
- tslib_1.__exportStar(require("./kafka-context.js"), exports);
8
- tslib_1.__exportStar(require("./request-parser.js"), exports);
@@ -1,423 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.KafkaAdapter = void 0;
4
- const common_1 = require("@opra/common");
5
- const core_1 = require("@opra/core");
6
- const kafkajs_1 = require("kafkajs");
7
- const valgen_1 = require("valgen");
8
- const constants_js_1 = require("./constants.js");
9
- const kafka_context_js_1 = require("./kafka-context.js");
10
- const request_parser_js_1 = require("./request-parser.js");
11
- const globalErrorTypes = ['unhandledRejection', 'uncaughtException'];
12
- const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
13
- const kGroupId = Symbol('kGroupId');
14
- const noOp = () => undefined;
15
- /**
16
- *
17
- * @class KafkaAdapter
18
- */
19
- class KafkaAdapter extends core_1.PlatformAdapter {
20
- /**
21
- *
22
- * @param document
23
- * @param config
24
- * @constructor
25
- */
26
- constructor(document, config) {
27
- super(config);
28
- this._controllerInstances = new Map();
29
- this._consumers = new Map();
30
- this._handlerArgs = [];
31
- this._status = 'idle';
32
- this.transform = 'mq';
33
- this.platform = KafkaAdapter.PlatformName;
34
- this._document = document;
35
- this._config = config;
36
- if (!(this.document.api instanceof common_1.MQApi &&
37
- this.document.api.platform === KafkaAdapter.PlatformName)) {
38
- throw new TypeError(`The document doesn't expose a Kafka Api`);
39
- }
40
- // this._config = config;
41
- this.interceptors = [...(config.interceptors || [])];
42
- globalErrorTypes.forEach(type => {
43
- process.on(type, e => {
44
- this._emitError(e);
45
- return this.close();
46
- });
47
- });
48
- signalTraps.forEach(type => {
49
- process.once(type, () => this.close());
50
- });
51
- }
52
- get api() {
53
- return this.document.getMqApi();
54
- }
55
- get kafka() {
56
- return this._kafka;
57
- }
58
- get scope() {
59
- return this._config.scope;
60
- }
61
- get status() {
62
- return this._status;
63
- }
64
- async initialize() {
65
- if (this._kafka)
66
- return;
67
- this._kafka = new kafkajs_1.Kafka({
68
- ...this._config.client,
69
- logCreator: this.logger
70
- ? () => this._createLogCreator(this.logger, this._config.logExtra)
71
- : undefined,
72
- });
73
- await this._createAllConsumers();
74
- }
75
- /**
76
- * Starts the service
77
- */
78
- async start() {
79
- if (this.status !== 'idle')
80
- return;
81
- await this.initialize();
82
- this._status = 'starting';
83
- try {
84
- /** Connect all consumers */
85
- for (const consumer of this._consumers.values()) {
86
- await consumer.connect().catch(e => {
87
- this._emitError(e);
88
- throw e;
89
- });
90
- }
91
- /** Subscribe to channels */
92
- for (const args of this._handlerArgs) {
93
- const { consumer, operation, operationConfig } = args;
94
- args.topics = Array.isArray(operation.channel)
95
- ? operation.channel
96
- : [operation.channel];
97
- await consumer
98
- .subscribe({
99
- ...operationConfig.subscribe,
100
- topics: args.topics,
101
- })
102
- .catch(e => {
103
- this._emitError(e);
104
- throw e;
105
- });
106
- this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
107
- }
108
- /** Start consumer listeners */
109
- const topicMap = new Map();
110
- for (const consumer of this._consumers.values()) {
111
- const groupId = consumer[kGroupId];
112
- await consumer
113
- .run({
114
- eachMessage: async (payload) => {
115
- await this.emitAsync('message', payload).catch(() => undefined);
116
- const { topic } = payload;
117
- const topicCacheKey = groupId + ':' + topic;
118
- let handlerArgsArray = topicMap.get(topicCacheKey);
119
- if (!handlerArgsArray) {
120
- handlerArgsArray = this._handlerArgs.filter(args => args.consumer === consumer &&
121
- args.topics.find(t => t instanceof RegExp ? t.test(topic) : t === topic));
122
- /* istanbul ignore next */
123
- if (!handlerArgsArray) {
124
- this._emitError(new Error(`Unhandled topic (${topic})`));
125
- return;
126
- }
127
- topicMap.set(topicCacheKey, handlerArgsArray);
128
- }
129
- /** Iterate and call all matching handlers */
130
- for (const args of handlerArgsArray) {
131
- try {
132
- await args.handler(payload);
133
- }
134
- catch (e) {
135
- this._emitError(e);
136
- }
137
- }
138
- },
139
- })
140
- .catch(e => {
141
- this._emitError(e);
142
- throw e;
143
- });
144
- }
145
- this._status = 'started';
146
- }
147
- catch (e) {
148
- await this.close();
149
- throw e;
150
- }
151
- }
152
- /**
153
- * Closes all connections and stops the service
154
- */
155
- async close() {
156
- await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.disconnect()));
157
- this._consumers.clear();
158
- this._controllerInstances.clear();
159
- this._status = 'idle';
160
- }
161
- getControllerInstance(controllerPath) {
162
- const controller = this.api.findController(controllerPath);
163
- return controller && this._controllerInstances.get(controller);
164
- }
165
- /**
166
- *
167
- * @param controller
168
- * @param instance
169
- * @param operation
170
- * @protected
171
- */
172
- async _getOperationConfig(controller, instance, operation) {
173
- if (typeof instance[operation.name] !== 'function')
174
- return;
175
- const proto = controller.ctor?.prototype || Object.getPrototypeOf(instance);
176
- if (Reflect.hasMetadata(common_1.MQ_CONTROLLER_METADATA, proto, operation.name))
177
- return;
178
- const operationConfig = {
179
- consumer: {
180
- groupId: constants_js_1.KAFKA_DEFAULT_GROUP,
181
- },
182
- subscribe: {},
183
- };
184
- if (this._config.defaults) {
185
- if (this._config.defaults.subscribe) {
186
- Object.assign(operationConfig.subscribe, this._config.defaults.subscribe);
187
- }
188
- if (this._config.defaults.consumer) {
189
- Object.assign(operationConfig.consumer, this._config.defaults.consumer);
190
- }
191
- }
192
- let kafkaMetadata = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA, proto, operation.name);
193
- if (!kafkaMetadata) {
194
- const configResolver = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
195
- if (configResolver) {
196
- kafkaMetadata = await configResolver();
197
- }
198
- }
199
- if (kafkaMetadata) {
200
- if (kafkaMetadata.subscribe) {
201
- Object.assign(operationConfig.subscribe, kafkaMetadata.subscribe);
202
- }
203
- if (kafkaMetadata.consumer) {
204
- if (typeof kafkaMetadata.consumer === 'object') {
205
- Object.assign(operationConfig.consumer, kafkaMetadata.consumer);
206
- operationConfig.selfConsumer = true;
207
- }
208
- else {
209
- const x = this._config.consumers?.[kafkaMetadata.consumer];
210
- if (x) {
211
- operationConfig.consumer.groupId = kafkaMetadata.consumer;
212
- Object.assign(operationConfig.consumer, x);
213
- }
214
- }
215
- }
216
- }
217
- return operationConfig;
218
- }
219
- /**
220
- *
221
- * @protected
222
- */
223
- async _createAllConsumers() {
224
- for (const controller of this.document.getMqApi().controllers.values()) {
225
- let instance = controller.instance;
226
- if (!instance && controller.ctor)
227
- instance = new controller.ctor();
228
- if (!instance)
229
- continue;
230
- this._controllerInstances.set(controller, instance);
231
- /** Build HandlerData array */
232
- for (const operation of controller.operations.values()) {
233
- const operationConfig = await this._getOperationConfig(controller, instance, operation);
234
- if (!operationConfig)
235
- continue;
236
- const args = {
237
- consumer: null,
238
- controller,
239
- instance,
240
- operation,
241
- operationConfig,
242
- handler: null,
243
- topics: null,
244
- };
245
- this._createHandler(args);
246
- this._handlerArgs.push(args);
247
- }
248
- }
249
- /** Initialize consumers */
250
- for (const args of this._handlerArgs) {
251
- await this._createConsumer(args);
252
- }
253
- }
254
- /**
255
- *
256
- * @param args
257
- * @protected
258
- */
259
- async _createConsumer(args) {
260
- const { operationConfig } = args;
261
- let consumer = this._consumers.get(operationConfig.consumer.groupId);
262
- if (consumer && operationConfig.selfConsumer) {
263
- throw new Error(`Operation consumer for groupId (${operationConfig.consumer.groupId}) already exists`);
264
- }
265
- /** Create consumers */
266
- if (!consumer) {
267
- consumer = this.kafka.consumer(operationConfig.consumer);
268
- consumer[kGroupId] = operationConfig.consumer.groupId;
269
- this._consumers.set(operationConfig.consumer.groupId, consumer);
270
- }
271
- args.consumer = consumer;
272
- }
273
- /**
274
- *
275
- * @param args
276
- * @protected
277
- */
278
- _createHandler(args) {
279
- const { controller, instance, operation } = args;
280
- /** Prepare parsers */
281
- const parseKey = request_parser_js_1.RequestParser.STRING;
282
- const parsePayload = request_parser_js_1.RequestParser.STRING;
283
- /** Prepare decoders */
284
- const decodeKey = operation.generateKeyCodec('decode', {
285
- scope: this.scope,
286
- ignoreReadonlyFields: true,
287
- });
288
- const decodePayload = operation.generateCodec('decode', {
289
- scope: this.scope,
290
- ignoreReadonlyFields: true,
291
- });
292
- operation.headers.forEach(header => {
293
- let decode = this[core_1.kAssetCache].get(header, 'decode');
294
- if (!decode) {
295
- decode = header.generateCodec('decode', {
296
- scope: this.scope,
297
- ignoreReadonlyFields: true,
298
- });
299
- this[core_1.kAssetCache].set(header, 'decode', decode);
300
- }
301
- });
302
- args.handler = async ({ topic, partition, message, heartbeat, pause }) => {
303
- const operationHandler = instance[operation.name];
304
- let key;
305
- let payload;
306
- const headers = {};
307
- try {
308
- /** Parse and decode `key` */
309
- if (message.key) {
310
- const s = parseKey(message.key);
311
- key = decodeKey(s);
312
- }
313
- /** Parse and decode `payload` */
314
- if (message.value != null) {
315
- const s = parsePayload(message.value);
316
- payload = decodePayload(s);
317
- }
318
- /** Parse and decode `headers` */
319
- if (message.headers) {
320
- for (const [k, v] of Object.entries(message.headers)) {
321
- const header = operation.findHeader(k);
322
- const decode = this[core_1.kAssetCache].get(header, 'decode') || valgen_1.vg.isAny();
323
- headers[k] = decode(Buffer.isBuffer(v) ? v.toString() : v);
324
- }
325
- }
326
- }
327
- catch (e) {
328
- this._emitError(e);
329
- return;
330
- }
331
- /** Create context */
332
- const context = new kafka_context_js_1.KafkaContext({
333
- __adapter: this,
334
- __contDef: controller,
335
- __controller: instance,
336
- __oprDef: operation,
337
- __handler: operationHandler,
338
- topic,
339
- partition,
340
- payload,
341
- key,
342
- headers,
343
- rawMessage: message,
344
- heartbeat,
345
- pause,
346
- });
347
- await this.emitAsync('execute', context);
348
- try {
349
- /** Call operation handler */
350
- const result = await operationHandler.call(instance, context);
351
- await this.emitAsync('finish', context, result);
352
- }
353
- catch (e) {
354
- this._emitError(e, context);
355
- }
356
- };
357
- }
358
- _emitError(error, context) {
359
- Promise.resolve()
360
- .then(async () => {
361
- const logger = this.logger;
362
- if (context) {
363
- if (!context.errors.length)
364
- context.errors.push(error);
365
- context.errors = this._wrapExceptions(context.errors);
366
- if (context.listenerCount('error')) {
367
- await context
368
- .emitAsync('error', context.errors[0], context)
369
- .catch(noOp);
370
- }
371
- if (logger?.error) {
372
- context.errors.forEach(err => logger.error(err));
373
- }
374
- }
375
- else
376
- logger?.error(error);
377
- if (this.listenerCount('error'))
378
- this._emitError(error);
379
- })
380
- .catch(noOp);
381
- }
382
- _wrapExceptions(exceptions) {
383
- const wrappedErrors = exceptions.map(e => e instanceof common_1.OpraException ? e : new common_1.OpraException(e));
384
- if (!wrappedErrors.length)
385
- wrappedErrors.push(new common_1.OpraException('Internal Server Error'));
386
- return wrappedErrors;
387
- }
388
- _createLogCreator(logger, logExtra) {
389
- return ({ namespace, level, log }) => {
390
- const { message, error, ...extra } = log;
391
- delete extra.namespace;
392
- delete extra.timestamp;
393
- delete extra.logger;
394
- let fn;
395
- switch (level) {
396
- case kafkajs_1.logLevel.ERROR:
397
- fn = logger.error || logger.info;
398
- break;
399
- case kafkajs_1.logLevel.WARN:
400
- fn = logger.warn || logger.info;
401
- break;
402
- case kafkajs_1.logLevel.DEBUG:
403
- fn = logger.debug;
404
- break;
405
- case kafkajs_1.logLevel.NOTHING:
406
- break;
407
- default:
408
- fn = logger.info;
409
- break;
410
- }
411
- if (!fn)
412
- return;
413
- if (!logExtra)
414
- return fn.call(logger, error || message);
415
- return fn.call(logger, error || message, {
416
- ...extra,
417
- namespace,
418
- });
419
- };
420
- }
421
- }
422
- exports.KafkaAdapter = KafkaAdapter;
423
- KafkaAdapter.PlatformName = 'kafka';
@@ -1,41 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.KafkaContext = void 0;
4
- const core_1 = require("@opra/core");
5
- /**
6
- * KafkaContext class provides the context for handling Kafka messages.
7
- * It extends the ExecutionContext and implements the AsyncEventEmitter.
8
- */
9
- class KafkaContext extends core_1.ExecutionContext {
10
- /**
11
- * Constructor
12
- * @param init the context options
13
- */
14
- constructor(init) {
15
- super({
16
- ...init,
17
- __docNode: init.__oprDef?.node ||
18
- init.__contDef?.node ||
19
- init.__adapter.document.node,
20
- transport: 'mq',
21
- platform: 'kafka',
22
- });
23
- if (init.__contDef)
24
- this.__contDef = init.__contDef;
25
- if (init.__oprDef)
26
- this.__oprDef = init.__oprDef;
27
- if (init.__controller)
28
- this.__controller = init.__controller;
29
- if (init.__handler)
30
- this.__handler = init.__handler;
31
- this.partition = init.partition;
32
- this.headers = init.headers || {};
33
- this.topic = init.topic;
34
- this.key = init.key;
35
- this.payload = init.payload;
36
- this.heartbeat = init.heartbeat;
37
- this.pause = init.pause;
38
- this.rawMessage = init.rawMessage;
39
- }
40
- }
41
- exports.KafkaContext = KafkaContext;
package/cjs/package.json DELETED
@@ -1,3 +0,0 @@
1
- {
2
- "type": "commonjs"
3
- }
@@ -1,7 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.binaryParser = void 0;
4
- const binaryParser = function (buffer) {
5
- return buffer;
6
- };
7
- exports.binaryParser = binaryParser;
@@ -1,7 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.stringParser = void 0;
4
- const stringParser = function (buffer) {
5
- return buffer.toString();
6
- };
7
- exports.stringParser = stringParser;
@@ -1,9 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.RequestParser = void 0;
4
- const binary_parser_js_1 = require("./parsers/binary.parser.js");
5
- const string_parser_js_1 = require("./parsers/string.parser.js");
6
- exports.RequestParser = {
7
- BINARY: binary_parser_js_1.binaryParser,
8
- STRING: string_parser_js_1.stringParser,
9
- };
package/esm/package.json DELETED
@@ -1,3 +0,0 @@
1
- {
2
- "type": "module"
3
- }
package/types/index.d.cts DELETED
@@ -1,5 +0,0 @@
1
- import './augmentation/opra-common.augmentation.js';
2
- export * from './constants.js';
3
- export * from './kafka-adapter.js';
4
- export * from './kafka-context.js';
5
- export * from './request-parser.js';
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes