@opra/kafka 1.0.0-beta.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/LICENSE ADDED
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2022 Panates
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
package/README.md ADDED
@@ -0,0 +1,3 @@
1
+ # @opra/core
2
+
3
+ OPRA schema package.
@@ -0,0 +1,19 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ require("@opra/core");
4
+ const common_1 = require("@opra/common");
5
+ const constants_js_1 = require("../constants.js");
6
+ /** Implementation **/
7
+ common_1.classes.MsgOperationDecoratorFactory.augment((decorator, decoratorChain) => {
8
+ decorator.Kafka = (config) => {
9
+ decoratorChain.push((_, target, propertyKey) => {
10
+ if (typeof config === 'function') {
11
+ Reflect.defineMetadata(constants_js_1.KAFKA_OPERATION_METADATA_RESOLVER, config, target, propertyKey);
12
+ }
13
+ else {
14
+ Reflect.defineMetadata(constants_js_1.KAFKA_OPERATION_METADATA, { ...config }, target, propertyKey);
15
+ }
16
+ });
17
+ return decorator;
18
+ };
19
+ });
@@ -0,0 +1,6 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.KAFKA_OPERATION_METADATA_RESOLVER = exports.KAFKA_OPERATION_METADATA = exports.KAFKA_DEFAULT_GROUP = void 0;
4
+ exports.KAFKA_DEFAULT_GROUP = 'default';
5
+ exports.KAFKA_OPERATION_METADATA = 'KAFKA_OPERATION_METADATA';
6
+ exports.KAFKA_OPERATION_METADATA_RESOLVER = 'KAFKA_OPERATION_METADATA_RESOLVER';
package/cjs/index.js ADDED
@@ -0,0 +1,9 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ const tslib_1 = require("tslib");
4
+ require("./augmentation/msg-operation.augmentation.js");
5
+ tslib_1.__exportStar(require("./constants.js"), exports);
6
+ tslib_1.__exportStar(require("./kafka-adapter.js"), exports);
7
+ tslib_1.__exportStar(require("./kafka-context.js"), exports);
8
+ tslib_1.__exportStar(require("./request-parser.js"), exports);
9
+ tslib_1.__exportStar(require("./types.js"), exports);
@@ -0,0 +1,276 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.KafkaAdapter = void 0;
4
+ const common_1 = require("@opra/common");
5
+ const core_1 = require("@opra/core");
6
+ const kafkajs_1 = require("kafkajs");
7
+ const valgen_1 = require("valgen");
8
+ const constants_js_1 = require("./constants.js");
9
+ const kafka_context_js_1 = require("./kafka-context.js");
10
+ const request_parser_js_1 = require("./request-parser.js");
11
+ const globalErrorTypes = ['unhandledRejection', 'uncaughtException'];
12
+ const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
13
+ /**
14
+ *
15
+ * @class KafkaAdapter
16
+ */
17
+ class KafkaAdapter extends core_1.PlatformAdapter {
18
+ /**
19
+ *
20
+ * @param init
21
+ * @constructor
22
+ */
23
+ constructor(init) {
24
+ super(init.document, init);
25
+ this._controllerInstances = new Map();
26
+ this._consumers = new Map();
27
+ this.protocol = 'msg';
28
+ this.platform = KafkaAdapter.PlatformName;
29
+ if (!(init.document.api instanceof common_1.MsgApi && init.document.api.platform === KafkaAdapter.PlatformName)) {
30
+ throw new TypeError(`The document doesn't expose a Kafka Api`);
31
+ }
32
+ this.interceptors = [...(init.interceptors || [])];
33
+ this.kafka = new kafkajs_1.Kafka({
34
+ ...init,
35
+ logCreator: init.logger ? () => this._createLogCreator(init.logger) : undefined,
36
+ });
37
+ this._logger = init.logger;
38
+ globalErrorTypes.forEach(type => {
39
+ process.on(type, e => {
40
+ this._emitError(e);
41
+ return this.close();
42
+ });
43
+ });
44
+ signalTraps.forEach(type => {
45
+ process.once(type, () => this.close());
46
+ });
47
+ }
48
+ get api() {
49
+ return this.document.msgApi;
50
+ }
51
+ /**
52
+ * Starts the service
53
+ */
54
+ async start() {
55
+ /* istanbul ignore next */
56
+ if (this._consumers.size > 0)
57
+ return;
58
+ await this._initConsumers();
59
+ return this._start();
60
+ }
61
+ /**
62
+ * Closes all connections and stops the service
63
+ */
64
+ async close() {
65
+ for (const [controller, instance] of this._controllerInstances.entries()) {
66
+ if (controller.onShutdown) {
67
+ try {
68
+ await controller.onShutdown.call(instance, controller);
69
+ }
70
+ catch (e) {
71
+ this._emitError(e);
72
+ }
73
+ }
74
+ }
75
+ await Promise.allSettled(Array.from(this._consumers.keys()).map(c => c.disconnect()));
76
+ this._consumers.clear();
77
+ this._controllerInstances.clear();
78
+ }
79
+ getControllerInstance(controllerPath) {
80
+ const controller = this.api.findController(controllerPath);
81
+ return controller && this._controllerInstances.get(controller);
82
+ }
83
+ /**
84
+ * Creates and initializes all consumers
85
+ *
86
+ * @protected
87
+ */
88
+ async _initConsumers() {
89
+ /* istanbul ignore next */
90
+ if (this._consumers.size > 0)
91
+ return;
92
+ /** Create consumers */
93
+ for (const controller of this.document.msgApi.controllers.values()) {
94
+ let instance = controller.instance;
95
+ if (!instance && controller.ctor)
96
+ instance = new controller.ctor();
97
+ if (!instance)
98
+ continue;
99
+ for (const operation of controller.operations.values()) {
100
+ await this._initConsumer(controller, instance, operation);
101
+ }
102
+ this._controllerInstances.set(controller, instance);
103
+ }
104
+ }
105
+ /**
106
+ * Creates and initializes a consumer for given operation
107
+ *
108
+ * @protected
109
+ */
110
+ async _initConsumer(controller, instance, operation) {
111
+ if (typeof instance[operation.name] !== 'function')
112
+ return;
113
+ const proto = controller.ctor?.prototype || Object.getPrototypeOf(controller.instance);
114
+ let operationOptions = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA, proto, operation.name);
115
+ const configResolver = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
116
+ if (configResolver) {
117
+ const cfg = await configResolver();
118
+ operationOptions = { ...operationOptions, ...cfg };
119
+ }
120
+ const groupId = operationOptions?.groupId || constants_js_1.KAFKA_DEFAULT_GROUP;
121
+ const options = { ...operationOptions, groupId };
122
+ const consumer = this.kafka.consumer(options);
123
+ this._consumers.set(consumer, { consumer, controller, instance, operation, options });
124
+ if (typeof controller.onInit === 'function')
125
+ controller.onInit.call(instance, controller);
126
+ }
127
+ async _start() {
128
+ const arr = Array.from(this._consumers.values());
129
+ if (!arr.length)
130
+ return;
131
+ /** Start first consumer to test if server is available */
132
+ await this._startConsumer(arr.shift());
133
+ /** if first connection is success than start all consumers at same time */
134
+ await Promise.allSettled(arr.map(x => this._startConsumer(x)));
135
+ }
136
+ /**
137
+ * Starts all consumers
138
+ * @protected
139
+ */
140
+ async _startConsumer(args) {
141
+ const { consumer, controller, instance, operation, options } = args;
142
+ /** Prepare parsers */
143
+ const parseKey = request_parser_js_1.RequestParser.STRING;
144
+ const parsePayload = request_parser_js_1.RequestParser.STRING;
145
+ /** Prepare decoders */
146
+ const decodeKey = operation.keyType?.generateCodec('decode', { ignoreWriteonlyFields: true }) || valgen_1.vg.isAny();
147
+ const decodePayload = operation.payloadType?.generateCodec('decode', { ignoreWriteonlyFields: true }) || valgen_1.vg.isAny();
148
+ operation.headers.forEach(header => {
149
+ let decode = this[core_1.kAssetCache].get(header, 'decode');
150
+ if (!decode) {
151
+ decode = header.type?.generateCodec('decode', { ignoreReadonlyFields: true }) || valgen_1.vg.isAny();
152
+ this[core_1.kAssetCache].set(header, 'decode', decode);
153
+ }
154
+ });
155
+ /** Connect to Kafka server */
156
+ await consumer.connect().catch(e => {
157
+ this._emitError(e);
158
+ throw e;
159
+ });
160
+ /** Subscribe to channels */
161
+ if (Array.isArray(operation.channel)) {
162
+ await consumer.subscribe({ topics: operation.channel, fromBeginning: options.fromBeginning }).catch(e => {
163
+ this._emitError(e);
164
+ throw e;
165
+ });
166
+ }
167
+ else {
168
+ await consumer.subscribe({ topic: operation.channel, fromBeginning: options.fromBeginning }).catch(e => {
169
+ this._emitError(e);
170
+ throw e;
171
+ });
172
+ }
173
+ /** Run message listener */
174
+ await consumer
175
+ .run({
176
+ eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
177
+ const operationHandler = instance[operation.name];
178
+ let key;
179
+ let payload;
180
+ const headers = {};
181
+ try {
182
+ /** Parse and decode `key` */
183
+ if (message.key) {
184
+ const s = parseKey(message.key);
185
+ key = decodeKey(s);
186
+ }
187
+ /** Parse and decode `payload` */
188
+ if (message.value != null) {
189
+ const s = parsePayload(message.value);
190
+ payload = decodePayload(s);
191
+ }
192
+ /** Parse and decode `headers` */
193
+ if (message.headers) {
194
+ for (const [k, v] of Object.entries(message.headers)) {
195
+ const header = operation.findHeader(k);
196
+ const decode = this[core_1.kAssetCache].get(header, 'decode') || valgen_1.vg.isAny();
197
+ headers[k] = decode(Buffer.isBuffer(v) ? v.toString() : v);
198
+ }
199
+ }
200
+ }
201
+ catch (e) {
202
+ this._emitError(e);
203
+ return;
204
+ }
205
+ /** Create context */
206
+ const ctx = new kafka_context_js_1.KafkaContext({
207
+ adapter: this,
208
+ platform: this.platform,
209
+ controller,
210
+ controllerInstance: instance,
211
+ operation,
212
+ operationHandler,
213
+ topic,
214
+ partition,
215
+ payload,
216
+ key,
217
+ headers,
218
+ rawMessage: message,
219
+ heartbeat,
220
+ pause,
221
+ });
222
+ await this.emitAsync('createContext', ctx);
223
+ await operationHandler(ctx);
224
+ },
225
+ })
226
+ .catch(e => {
227
+ this._emitError(e);
228
+ throw e;
229
+ });
230
+ }
231
+ _emitError(e) {
232
+ this._logger?.error(e);
233
+ if (this.listenerCount('error'))
234
+ this.emit('error', e);
235
+ }
236
+ _createLogCreator(logger, logExtra) {
237
+ return ({ namespace, level, log }) => {
238
+ const { message, ...extra } = log;
239
+ delete extra.namespace;
240
+ delete extra.timestamp;
241
+ delete extra.logger;
242
+ let fn;
243
+ switch (level) {
244
+ case kafkajs_1.logLevel.ERROR:
245
+ fn = logger.error || logger.info;
246
+ break;
247
+ case kafkajs_1.logLevel.WARN:
248
+ fn = logger.warn || logger.info;
249
+ break;
250
+ case kafkajs_1.logLevel.DEBUG:
251
+ fn = logger.debug;
252
+ break;
253
+ case kafkajs_1.logLevel.NOTHING:
254
+ break;
255
+ default:
256
+ fn = logger.info;
257
+ break;
258
+ }
259
+ if (!fn)
260
+ return;
261
+ if (!logExtra)
262
+ return fn(message);
263
+ return fn(message, {
264
+ ...extra,
265
+ namespace,
266
+ });
267
+ };
268
+ }
269
+ }
270
+ exports.KafkaAdapter = KafkaAdapter;
271
+ /**
272
+ * @namespace KafkaAdapter
273
+ */
274
+ (function (KafkaAdapter) {
275
+ KafkaAdapter.PlatformName = 'kafka';
276
+ })(KafkaAdapter || (exports.KafkaAdapter = KafkaAdapter = {}));
@@ -0,0 +1,28 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.KafkaContext = void 0;
4
+ const core_1 = require("@opra/core");
5
+ class KafkaContext extends core_1.ExecutionContext {
6
+ constructor(init) {
7
+ super({ ...init, document: init.adapter.document, protocol: 'msg' });
8
+ this.adapter = init.adapter;
9
+ this.platform = init.adapter.platform;
10
+ this.protocol = 'msg';
11
+ if (init.controller)
12
+ this.controller = init.controller;
13
+ if (init.controllerInstance)
14
+ this.controllerInstance = init.controllerInstance;
15
+ if (init.operation)
16
+ this.operation = init.operation;
17
+ if (init.operationHandler)
18
+ this.operationHandler = init.operationHandler;
19
+ this.partition = init.partition;
20
+ this.headers = init.headers || {};
21
+ this.key = init.key;
22
+ this.payload = init.payload;
23
+ this.heartbeat = init.heartbeat;
24
+ this.pause = init.pause;
25
+ this.rawMessage = init.rawMessage;
26
+ }
27
+ }
28
+ exports.KafkaContext = KafkaContext;
@@ -0,0 +1,3 @@
1
+ {
2
+ "type": "commonjs"
3
+ }
@@ -0,0 +1,7 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.binaryParser = void 0;
4
+ const binaryParser = function (buffer) {
5
+ return buffer;
6
+ };
7
+ exports.binaryParser = binaryParser;
@@ -0,0 +1,7 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.stringParser = void 0;
4
+ const stringParser = function (buffer) {
5
+ return buffer.toString();
6
+ };
7
+ exports.stringParser = stringParser;
@@ -0,0 +1,9 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.RequestParser = void 0;
4
+ const binary_parser_js_1 = require("./parsers/binary.parser.js");
5
+ const string_parser_js_1 = require("./parsers/string.parser.js");
6
+ exports.RequestParser = {
7
+ BINARY: binary_parser_js_1.binaryParser,
8
+ STRING: string_parser_js_1.stringParser,
9
+ };
package/cjs/types.js ADDED
@@ -0,0 +1,2 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
@@ -0,0 +1,17 @@
1
+ import '@opra/core';
2
+ import { classes } from '@opra/common';
3
+ import { KAFKA_OPERATION_METADATA, KAFKA_OPERATION_METADATA_RESOLVER } from '../constants.js';
4
+ /** Implementation **/
5
+ classes.MsgOperationDecoratorFactory.augment((decorator, decoratorChain) => {
6
+ decorator.Kafka = (config) => {
7
+ decoratorChain.push((_, target, propertyKey) => {
8
+ if (typeof config === 'function') {
9
+ Reflect.defineMetadata(KAFKA_OPERATION_METADATA_RESOLVER, config, target, propertyKey);
10
+ }
11
+ else {
12
+ Reflect.defineMetadata(KAFKA_OPERATION_METADATA, { ...config }, target, propertyKey);
13
+ }
14
+ });
15
+ return decorator;
16
+ };
17
+ });
@@ -0,0 +1,3 @@
1
+ export const KAFKA_DEFAULT_GROUP = 'default';
2
+ export const KAFKA_OPERATION_METADATA = 'KAFKA_OPERATION_METADATA';
3
+ export const KAFKA_OPERATION_METADATA_RESOLVER = 'KAFKA_OPERATION_METADATA_RESOLVER';
package/esm/index.js ADDED
@@ -0,0 +1,6 @@
1
+ import './augmentation/msg-operation.augmentation.js';
2
+ export * from './constants.js';
3
+ export * from './kafka-adapter.js';
4
+ export * from './kafka-context.js';
5
+ export * from './request-parser.js';
6
+ export * from './types.js';
@@ -0,0 +1,272 @@
1
+ import { MsgApi } from '@opra/common';
2
+ import { kAssetCache, PlatformAdapter } from '@opra/core';
3
+ import { Kafka, logLevel } from 'kafkajs';
4
+ import { vg } from 'valgen';
5
+ import { KAFKA_DEFAULT_GROUP, KAFKA_OPERATION_METADATA, KAFKA_OPERATION_METADATA_RESOLVER } from './constants.js';
6
+ import { KafkaContext } from './kafka-context.js';
7
+ import { RequestParser } from './request-parser.js';
8
+ const globalErrorTypes = ['unhandledRejection', 'uncaughtException'];
9
+ const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
10
+ /**
11
+ *
12
+ * @class KafkaAdapter
13
+ */
14
+ export class KafkaAdapter extends PlatformAdapter {
15
+ /**
16
+ *
17
+ * @param init
18
+ * @constructor
19
+ */
20
+ constructor(init) {
21
+ super(init.document, init);
22
+ this._controllerInstances = new Map();
23
+ this._consumers = new Map();
24
+ this.protocol = 'msg';
25
+ this.platform = KafkaAdapter.PlatformName;
26
+ if (!(init.document.api instanceof MsgApi && init.document.api.platform === KafkaAdapter.PlatformName)) {
27
+ throw new TypeError(`The document doesn't expose a Kafka Api`);
28
+ }
29
+ this.interceptors = [...(init.interceptors || [])];
30
+ this.kafka = new Kafka({
31
+ ...init,
32
+ logCreator: init.logger ? () => this._createLogCreator(init.logger) : undefined,
33
+ });
34
+ this._logger = init.logger;
35
+ globalErrorTypes.forEach(type => {
36
+ process.on(type, e => {
37
+ this._emitError(e);
38
+ return this.close();
39
+ });
40
+ });
41
+ signalTraps.forEach(type => {
42
+ process.once(type, () => this.close());
43
+ });
44
+ }
45
+ get api() {
46
+ return this.document.msgApi;
47
+ }
48
+ /**
49
+ * Starts the service
50
+ */
51
+ async start() {
52
+ /* istanbul ignore next */
53
+ if (this._consumers.size > 0)
54
+ return;
55
+ await this._initConsumers();
56
+ return this._start();
57
+ }
58
+ /**
59
+ * Closes all connections and stops the service
60
+ */
61
+ async close() {
62
+ for (const [controller, instance] of this._controllerInstances.entries()) {
63
+ if (controller.onShutdown) {
64
+ try {
65
+ await controller.onShutdown.call(instance, controller);
66
+ }
67
+ catch (e) {
68
+ this._emitError(e);
69
+ }
70
+ }
71
+ }
72
+ await Promise.allSettled(Array.from(this._consumers.keys()).map(c => c.disconnect()));
73
+ this._consumers.clear();
74
+ this._controllerInstances.clear();
75
+ }
76
+ getControllerInstance(controllerPath) {
77
+ const controller = this.api.findController(controllerPath);
78
+ return controller && this._controllerInstances.get(controller);
79
+ }
80
+ /**
81
+ * Creates and initializes all consumers
82
+ *
83
+ * @protected
84
+ */
85
+ async _initConsumers() {
86
+ /* istanbul ignore next */
87
+ if (this._consumers.size > 0)
88
+ return;
89
+ /** Create consumers */
90
+ for (const controller of this.document.msgApi.controllers.values()) {
91
+ let instance = controller.instance;
92
+ if (!instance && controller.ctor)
93
+ instance = new controller.ctor();
94
+ if (!instance)
95
+ continue;
96
+ for (const operation of controller.operations.values()) {
97
+ await this._initConsumer(controller, instance, operation);
98
+ }
99
+ this._controllerInstances.set(controller, instance);
100
+ }
101
+ }
102
+ /**
103
+ * Creates and initializes a consumer for given operation
104
+ *
105
+ * @protected
106
+ */
107
+ async _initConsumer(controller, instance, operation) {
108
+ if (typeof instance[operation.name] !== 'function')
109
+ return;
110
+ const proto = controller.ctor?.prototype || Object.getPrototypeOf(controller.instance);
111
+ let operationOptions = Reflect.getMetadata(KAFKA_OPERATION_METADATA, proto, operation.name);
112
+ const configResolver = Reflect.getMetadata(KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
113
+ if (configResolver) {
114
+ const cfg = await configResolver();
115
+ operationOptions = { ...operationOptions, ...cfg };
116
+ }
117
+ const groupId = operationOptions?.groupId || KAFKA_DEFAULT_GROUP;
118
+ const options = { ...operationOptions, groupId };
119
+ const consumer = this.kafka.consumer(options);
120
+ this._consumers.set(consumer, { consumer, controller, instance, operation, options });
121
+ if (typeof controller.onInit === 'function')
122
+ controller.onInit.call(instance, controller);
123
+ }
124
+ async _start() {
125
+ const arr = Array.from(this._consumers.values());
126
+ if (!arr.length)
127
+ return;
128
+ /** Start first consumer to test if server is available */
129
+ await this._startConsumer(arr.shift());
130
+ /** if first connection is success than start all consumers at same time */
131
+ await Promise.allSettled(arr.map(x => this._startConsumer(x)));
132
+ }
133
+ /**
134
+ * Starts all consumers
135
+ * @protected
136
+ */
137
+ async _startConsumer(args) {
138
+ const { consumer, controller, instance, operation, options } = args;
139
+ /** Prepare parsers */
140
+ const parseKey = RequestParser.STRING;
141
+ const parsePayload = RequestParser.STRING;
142
+ /** Prepare decoders */
143
+ const decodeKey = operation.keyType?.generateCodec('decode', { ignoreWriteonlyFields: true }) || vg.isAny();
144
+ const decodePayload = operation.payloadType?.generateCodec('decode', { ignoreWriteonlyFields: true }) || vg.isAny();
145
+ operation.headers.forEach(header => {
146
+ let decode = this[kAssetCache].get(header, 'decode');
147
+ if (!decode) {
148
+ decode = header.type?.generateCodec('decode', { ignoreReadonlyFields: true }) || vg.isAny();
149
+ this[kAssetCache].set(header, 'decode', decode);
150
+ }
151
+ });
152
+ /** Connect to Kafka server */
153
+ await consumer.connect().catch(e => {
154
+ this._emitError(e);
155
+ throw e;
156
+ });
157
+ /** Subscribe to channels */
158
+ if (Array.isArray(operation.channel)) {
159
+ await consumer.subscribe({ topics: operation.channel, fromBeginning: options.fromBeginning }).catch(e => {
160
+ this._emitError(e);
161
+ throw e;
162
+ });
163
+ }
164
+ else {
165
+ await consumer.subscribe({ topic: operation.channel, fromBeginning: options.fromBeginning }).catch(e => {
166
+ this._emitError(e);
167
+ throw e;
168
+ });
169
+ }
170
+ /** Run message listener */
171
+ await consumer
172
+ .run({
173
+ eachMessage: async ({ topic, partition, message, heartbeat, pause }) => {
174
+ const operationHandler = instance[operation.name];
175
+ let key;
176
+ let payload;
177
+ const headers = {};
178
+ try {
179
+ /** Parse and decode `key` */
180
+ if (message.key) {
181
+ const s = parseKey(message.key);
182
+ key = decodeKey(s);
183
+ }
184
+ /** Parse and decode `payload` */
185
+ if (message.value != null) {
186
+ const s = parsePayload(message.value);
187
+ payload = decodePayload(s);
188
+ }
189
+ /** Parse and decode `headers` */
190
+ if (message.headers) {
191
+ for (const [k, v] of Object.entries(message.headers)) {
192
+ const header = operation.findHeader(k);
193
+ const decode = this[kAssetCache].get(header, 'decode') || vg.isAny();
194
+ headers[k] = decode(Buffer.isBuffer(v) ? v.toString() : v);
195
+ }
196
+ }
197
+ }
198
+ catch (e) {
199
+ this._emitError(e);
200
+ return;
201
+ }
202
+ /** Create context */
203
+ const ctx = new KafkaContext({
204
+ adapter: this,
205
+ platform: this.platform,
206
+ controller,
207
+ controllerInstance: instance,
208
+ operation,
209
+ operationHandler,
210
+ topic,
211
+ partition,
212
+ payload,
213
+ key,
214
+ headers,
215
+ rawMessage: message,
216
+ heartbeat,
217
+ pause,
218
+ });
219
+ await this.emitAsync('createContext', ctx);
220
+ await operationHandler(ctx);
221
+ },
222
+ })
223
+ .catch(e => {
224
+ this._emitError(e);
225
+ throw e;
226
+ });
227
+ }
228
+ _emitError(e) {
229
+ this._logger?.error(e);
230
+ if (this.listenerCount('error'))
231
+ this.emit('error', e);
232
+ }
233
+ _createLogCreator(logger, logExtra) {
234
+ return ({ namespace, level, log }) => {
235
+ const { message, ...extra } = log;
236
+ delete extra.namespace;
237
+ delete extra.timestamp;
238
+ delete extra.logger;
239
+ let fn;
240
+ switch (level) {
241
+ case logLevel.ERROR:
242
+ fn = logger.error || logger.info;
243
+ break;
244
+ case logLevel.WARN:
245
+ fn = logger.warn || logger.info;
246
+ break;
247
+ case logLevel.DEBUG:
248
+ fn = logger.debug;
249
+ break;
250
+ case logLevel.NOTHING:
251
+ break;
252
+ default:
253
+ fn = logger.info;
254
+ break;
255
+ }
256
+ if (!fn)
257
+ return;
258
+ if (!logExtra)
259
+ return fn(message);
260
+ return fn(message, {
261
+ ...extra,
262
+ namespace,
263
+ });
264
+ };
265
+ }
266
+ }
267
+ /**
268
+ * @namespace KafkaAdapter
269
+ */
270
+ (function (KafkaAdapter) {
271
+ KafkaAdapter.PlatformName = 'kafka';
272
+ })(KafkaAdapter || (KafkaAdapter = {}));
@@ -0,0 +1,24 @@
1
+ import { ExecutionContext } from '@opra/core';
2
+ export class KafkaContext extends ExecutionContext {
3
+ constructor(init) {
4
+ super({ ...init, document: init.adapter.document, protocol: 'msg' });
5
+ this.adapter = init.adapter;
6
+ this.platform = init.adapter.platform;
7
+ this.protocol = 'msg';
8
+ if (init.controller)
9
+ this.controller = init.controller;
10
+ if (init.controllerInstance)
11
+ this.controllerInstance = init.controllerInstance;
12
+ if (init.operation)
13
+ this.operation = init.operation;
14
+ if (init.operationHandler)
15
+ this.operationHandler = init.operationHandler;
16
+ this.partition = init.partition;
17
+ this.headers = init.headers || {};
18
+ this.key = init.key;
19
+ this.payload = init.payload;
20
+ this.heartbeat = init.heartbeat;
21
+ this.pause = init.pause;
22
+ this.rawMessage = init.rawMessage;
23
+ }
24
+ }
@@ -0,0 +1,3 @@
1
+ {
2
+ "type": "module"
3
+ }
@@ -0,0 +1,3 @@
1
+ export const binaryParser = function (buffer) {
2
+ return buffer;
3
+ };
@@ -0,0 +1,3 @@
1
+ export const stringParser = function (buffer) {
2
+ return buffer.toString();
3
+ };
@@ -0,0 +1,6 @@
1
+ import { binaryParser } from './parsers/binary.parser.js';
2
+ import { stringParser } from './parsers/string.parser.js';
3
+ export const RequestParser = {
4
+ BINARY: binaryParser,
5
+ STRING: stringParser,
6
+ };
package/esm/types.js ADDED
@@ -0,0 +1 @@
1
+ export {};
package/package.json ADDED
@@ -0,0 +1,56 @@
1
+ {
2
+ "name": "@opra/kafka",
3
+ "version": "1.0.0-beta.3",
4
+ "description": "Opra Kafka package",
5
+ "author": "Panates",
6
+ "license": "MIT",
7
+ "dependencies": {
8
+ "@opra/common": "^1.0.0-beta.3",
9
+ "@opra/core": "^1.0.0-beta.3",
10
+ "node-events-async": "^1.0.0",
11
+ "tslib": "^2.7.0",
12
+ "valgen": "^5.10.0"
13
+ },
14
+ "peerDependencies": {
15
+ "kafkajs": "^2.2.4"
16
+ },
17
+ "type": "module",
18
+ "exports": {
19
+ ".": {
20
+ "import": {
21
+ "types": "./types/index.d.ts",
22
+ "default": "./esm/index.js"
23
+ },
24
+ "require": {
25
+ "types": "./types/index.d.cts",
26
+ "default": "./cjs/index.js"
27
+ },
28
+ "default": "./esm/index.js"
29
+ },
30
+ "./package.json": "./package.json"
31
+ },
32
+ "main": "./cjs/index.js",
33
+ "module": "./esm/index.js",
34
+ "types": "./types/index.d.ts",
35
+ "repository": {
36
+ "type": "git",
37
+ "url": "https://github.com/panates/opra.git",
38
+ "directory": "packages/kafka"
39
+ },
40
+ "engines": {
41
+ "node": ">=16.0",
42
+ "npm": ">=7.0.0"
43
+ },
44
+ "files": [
45
+ "cjs/",
46
+ "esm/",
47
+ "types/",
48
+ "LICENSE",
49
+ "README.md"
50
+ ],
51
+ "keywords": [
52
+ "opra",
53
+ "kafka",
54
+ "consumer"
55
+ ]
56
+ }
@@ -0,0 +1,7 @@
1
+ import '@opra/core';
2
+ import { KafkaOperationOptions } from '../types.js';
3
+ declare module '@opra/common' {
4
+ interface MsgOperationDecorator {
5
+ Kafka(config: KafkaOperationOptions | (() => KafkaOperationOptions | Promise<KafkaOperationOptions>)): this;
6
+ }
7
+ }
@@ -0,0 +1,3 @@
1
+ export declare const KAFKA_DEFAULT_GROUP = "default";
2
+ export declare const KAFKA_OPERATION_METADATA = "KAFKA_OPERATION_METADATA";
3
+ export declare const KAFKA_OPERATION_METADATA_RESOLVER = "KAFKA_OPERATION_METADATA_RESOLVER";
@@ -0,0 +1,6 @@
1
+ import './augmentation/msg-operation.augmentation.js';
2
+ export * from './constants.js';
3
+ export * from './kafka-adapter.js';
4
+ export * from './kafka-context.js';
5
+ export * from './request-parser.js';
6
+ export * from './types.js';
@@ -0,0 +1,6 @@
1
+ import './augmentation/msg-operation.augmentation.js';
2
+ export * from './constants.js';
3
+ export * from './kafka-adapter.js';
4
+ export * from './kafka-context.js';
5
+ export * from './request-parser.js';
6
+ export * from './types.js';
@@ -0,0 +1,94 @@
1
+ import { ApiDocument, MsgApi, MsgController, MsgOperation, OpraSchema } from '@opra/common';
2
+ import { type ILogger, PlatformAdapter } from '@opra/core';
3
+ import { type Consumer, Kafka, type KafkaConfig } from 'kafkajs';
4
+ import type { StrictOmit } from 'ts-gems';
5
+ import { KafkaContext } from './kafka-context.js';
6
+ import type { KafkaOperationOptions } from './types.js';
7
+ /**
8
+ *
9
+ * @class KafkaAdapter
10
+ */
11
+ export declare class KafkaAdapter extends PlatformAdapter {
12
+ protected _controllerInstances: Map<MsgController, any>;
13
+ protected _consumers: Map<Consumer, {
14
+ consumer: Consumer;
15
+ controller: MsgController;
16
+ instance: any;
17
+ operation: MsgOperation;
18
+ options: KafkaOperationOptions;
19
+ }>;
20
+ protected _logger?: ILogger;
21
+ readonly kafka: Kafka;
22
+ readonly protocol: OpraSchema.Transport;
23
+ readonly platform = "kafka";
24
+ interceptors: (KafkaAdapter.InterceptorFunction | KafkaAdapter.IKafkaInterceptor)[];
25
+ /**
26
+ *
27
+ * @param init
28
+ * @constructor
29
+ */
30
+ constructor(init: KafkaAdapter.InitArguments);
31
+ get api(): MsgApi;
32
+ /**
33
+ * Starts the service
34
+ */
35
+ start(): Promise<void>;
36
+ /**
37
+ * Closes all connections and stops the service
38
+ */
39
+ close(): Promise<void>;
40
+ getControllerInstance<T>(controllerPath: string): T | undefined;
41
+ /**
42
+ * Creates and initializes all consumers
43
+ *
44
+ * @protected
45
+ */
46
+ protected _initConsumers(): Promise<void>;
47
+ /**
48
+ * Creates and initializes a consumer for given operation
49
+ *
50
+ * @protected
51
+ */
52
+ protected _initConsumer(controller: MsgController, instance: any, operation: MsgOperation): Promise<void>;
53
+ protected _start(): Promise<void>;
54
+ /**
55
+ * Starts all consumers
56
+ * @protected
57
+ */
58
+ protected _startConsumer(args: {
59
+ consumer: Consumer;
60
+ controller: MsgController;
61
+ instance: any;
62
+ operation: MsgOperation;
63
+ options: KafkaOperationOptions;
64
+ }): Promise<void>;
65
+ protected _emitError(e: any): void;
66
+ protected _createLogCreator(logger: ILogger, logExtra?: boolean): ({ namespace, level, log }: {
67
+ namespace: any;
68
+ level: any;
69
+ log: any;
70
+ }) => any;
71
+ }
72
+ /**
73
+ * @namespace KafkaAdapter
74
+ */
75
+ export declare namespace KafkaAdapter {
76
+ const PlatformName = "kafka";
77
+ type NextCallback = () => Promise<void>;
78
+ interface InitArguments extends StrictOmit<KafkaConfig, 'logCreator' | 'logLevel'>, PlatformAdapter.Options {
79
+ document: ApiDocument;
80
+ interceptors?: (InterceptorFunction | IKafkaInterceptor)[];
81
+ logger?: ILogger;
82
+ logExtra?: boolean;
83
+ }
84
+ /**
85
+ * @type InterceptorFunction
86
+ */
87
+ type InterceptorFunction = IKafkaInterceptor['intercept'];
88
+ /**
89
+ * @interface IKafkaInterceptor
90
+ */
91
+ type IKafkaInterceptor = {
92
+ intercept(context: KafkaContext, next: NextCallback): Promise<void>;
93
+ };
94
+ }
@@ -0,0 +1,39 @@
1
+ import { MsgController, MsgOperation, OpraSchema } from '@opra/common';
2
+ import { ExecutionContext } from '@opra/core';
3
+ import type { KafkaMessage } from 'kafkajs';
4
+ import type { AsyncEventEmitter } from 'node-events-async';
5
+ import type { KafkaAdapter } from './kafka-adapter.js';
6
+ export declare namespace KafkaContext {
7
+ interface Initiator extends Omit<ExecutionContext.Initiator, 'document' | 'protocol'> {
8
+ adapter: KafkaAdapter;
9
+ controller?: MsgController;
10
+ controllerInstance?: any;
11
+ operation?: MsgOperation;
12
+ operationHandler?: Function;
13
+ topic: string;
14
+ partition: number;
15
+ key: any;
16
+ payload: any;
17
+ headers: Record<string, any>;
18
+ rawMessage: KafkaMessage;
19
+ heartbeat(): Promise<void>;
20
+ pause(): () => void;
21
+ }
22
+ }
23
+ export declare class KafkaContext extends ExecutionContext implements AsyncEventEmitter {
24
+ readonly protocol: OpraSchema.Transport;
25
+ readonly platform: string;
26
+ readonly adapter: KafkaAdapter;
27
+ readonly controller?: MsgController;
28
+ readonly controllerInstance?: any;
29
+ readonly operation?: MsgOperation;
30
+ readonly operationHandler?: Function;
31
+ readonly key: any;
32
+ readonly payload: any;
33
+ readonly partition: number;
34
+ readonly headers: Record<string, any>;
35
+ readonly rawMessage: KafkaMessage;
36
+ readonly heartbeat: () => Promise<void>;
37
+ readonly pause: () => void;
38
+ constructor(init: KafkaContext.Initiator);
39
+ }
@@ -0,0 +1,2 @@
1
+ import type { RequestParseFunction } from '../types.js';
2
+ export declare const binaryParser: RequestParseFunction;
@@ -0,0 +1,2 @@
1
+ import type { RequestParseFunction } from '../types.js';
2
+ export declare const stringParser: RequestParseFunction;
@@ -0,0 +1,2 @@
1
+ import type { RequestParseFunction } from './types.js';
2
+ export declare const RequestParser: Record<string, RequestParseFunction>;
@@ -0,0 +1,8 @@
1
+ import type { MsgOperationResponse } from '@opra/common';
2
+ import { ConsumerConfig } from 'kafkajs';
3
+ export type RequestParseFunction = (buffer: Buffer) => any;
4
+ export interface KafkaOperationOptions extends ConsumerConfig {
5
+ fromBeginning?: boolean;
6
+ }
7
+ export interface KafkaOperationResponseOptions extends MsgOperationResponse.Options {
8
+ }