@opra/kafka 1.0.4 → 1.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/kafka-adapter.js +56 -23
- package/esm/kafka-adapter.js +57 -24
- package/package.json +4 -4
- package/types/kafka-adapter.d.ts +8 -7
package/cjs/kafka-adapter.js
CHANGED
|
@@ -11,6 +11,7 @@ const request_parser_js_1 = require("./request-parser.js");
|
|
|
11
11
|
const globalErrorTypes = ['unhandledRejection', 'uncaughtException'];
|
|
12
12
|
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
|
|
13
13
|
const kGroupId = Symbol('kGroupId');
|
|
14
|
+
const noOp = () => undefined;
|
|
14
15
|
/**
|
|
15
16
|
*
|
|
16
17
|
* @class KafkaAdapter
|
|
@@ -22,22 +23,15 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
22
23
|
* @constructor
|
|
23
24
|
*/
|
|
24
25
|
constructor(config) {
|
|
25
|
-
super(config
|
|
26
|
+
super(config);
|
|
26
27
|
this._controllerInstances = new Map();
|
|
27
28
|
this._consumers = new Map();
|
|
28
29
|
this._handlerArgs = [];
|
|
30
|
+
this._started = false;
|
|
29
31
|
this.protocol = 'rpc';
|
|
30
32
|
this.platform = KafkaAdapter.PlatformName;
|
|
31
|
-
if (!(config.document.api instanceof common_1.RpcApi && config.document.api.platform === KafkaAdapter.PlatformName)) {
|
|
32
|
-
throw new TypeError(`The document doesn't expose a Kafka Api`);
|
|
33
|
-
}
|
|
34
33
|
this._config = config;
|
|
35
34
|
this.interceptors = [...(config.interceptors || [])];
|
|
36
|
-
this.kafka = new kafkajs_1.Kafka({
|
|
37
|
-
...config.client,
|
|
38
|
-
logCreator: config.logger ? () => this._createLogCreator(config.logger, config.logExtra) : undefined,
|
|
39
|
-
});
|
|
40
|
-
this._logger = config.logger;
|
|
41
35
|
globalErrorTypes.forEach(type => {
|
|
42
36
|
process.on(type, e => {
|
|
43
37
|
this._emitError(e);
|
|
@@ -51,17 +45,29 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
51
45
|
get api() {
|
|
52
46
|
return this.document.rpcApi;
|
|
53
47
|
}
|
|
48
|
+
get kafka() {
|
|
49
|
+
return this._kafka;
|
|
50
|
+
}
|
|
51
|
+
async initialize(document) {
|
|
52
|
+
if (this._document)
|
|
53
|
+
throw new TypeError(`${this.constructor.name} already initialized.`);
|
|
54
|
+
if (!(document.api instanceof common_1.RpcApi && document.api.platform === KafkaAdapter.PlatformName)) {
|
|
55
|
+
throw new TypeError(`The document doesn't expose a Kafka Api`);
|
|
56
|
+
}
|
|
57
|
+
this._document = document;
|
|
58
|
+
this._kafka = new kafkajs_1.Kafka({
|
|
59
|
+
...this._config.client,
|
|
60
|
+
logCreator: this.logger ? () => this._createLogCreator(this.logger, this._config.logExtra) : undefined,
|
|
61
|
+
});
|
|
62
|
+
await this._createAllConsumers();
|
|
63
|
+
}
|
|
54
64
|
/**
|
|
55
65
|
* Starts the service
|
|
56
66
|
*/
|
|
57
67
|
async start() {
|
|
58
|
-
|
|
59
|
-
if (this._consumers.size > 0)
|
|
60
|
-
return;
|
|
61
|
-
/* istanbul ignore next */
|
|
62
|
-
if (this._consumers.size > 0)
|
|
68
|
+
if (this._started)
|
|
63
69
|
return;
|
|
64
|
-
|
|
70
|
+
this._started = true;
|
|
65
71
|
/** Connect all consumers */
|
|
66
72
|
for (const consumer of this._consumers.values()) {
|
|
67
73
|
await consumer.connect().catch(e => {
|
|
@@ -293,7 +299,7 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
293
299
|
return;
|
|
294
300
|
}
|
|
295
301
|
/** Create context */
|
|
296
|
-
const
|
|
302
|
+
const context = new kafka_context_js_1.KafkaContext({
|
|
297
303
|
adapter: this,
|
|
298
304
|
platform: this.platform,
|
|
299
305
|
controller,
|
|
@@ -309,15 +315,42 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
309
315
|
heartbeat,
|
|
310
316
|
pause,
|
|
311
317
|
});
|
|
312
|
-
await this.emitAsync('before-execute',
|
|
313
|
-
|
|
314
|
-
|
|
318
|
+
await this.emitAsync('before-execute', context);
|
|
319
|
+
try {
|
|
320
|
+
/** Call operation handler */
|
|
321
|
+
const result = await operationHandler.call(instance, context);
|
|
322
|
+
await this.emitAsync('after-execute', context, result);
|
|
323
|
+
}
|
|
324
|
+
catch (e) {
|
|
325
|
+
this._emitError(e, context);
|
|
326
|
+
}
|
|
315
327
|
};
|
|
316
328
|
}
|
|
317
|
-
_emitError(e) {
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
this.
|
|
329
|
+
_emitError(e, context) {
|
|
330
|
+
Promise.resolve()
|
|
331
|
+
.then(async () => {
|
|
332
|
+
const logger = this.logger;
|
|
333
|
+
if (context) {
|
|
334
|
+
context.errors = this._wrapExceptions(context.errors);
|
|
335
|
+
if (context.listenerCount('error')) {
|
|
336
|
+
await this.emitAsync('error', context.errors[0], context);
|
|
337
|
+
}
|
|
338
|
+
if (logger?.error) {
|
|
339
|
+
context.errors.forEach(err => logger.error(err, context));
|
|
340
|
+
}
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
this.logger?.error(e);
|
|
344
|
+
if (this.listenerCount('error'))
|
|
345
|
+
this.emit('error', e);
|
|
346
|
+
})
|
|
347
|
+
.catch(noOp);
|
|
348
|
+
}
|
|
349
|
+
_wrapExceptions(exceptions) {
|
|
350
|
+
const wrappedErrors = exceptions.map(e => (e instanceof common_1.OpraException ? e : new common_1.OpraException(e)));
|
|
351
|
+
if (!wrappedErrors.length)
|
|
352
|
+
wrappedErrors.push(new common_1.OpraException('Internal Server Error'));
|
|
353
|
+
return wrappedErrors;
|
|
321
354
|
}
|
|
322
355
|
_createLogCreator(logger, logExtra) {
|
|
323
356
|
return ({ namespace, level, log }) => {
|
package/esm/kafka-adapter.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { RPC_CONTROLLER_METADATA, RpcApi } from '@opra/common';
|
|
1
|
+
import { OpraException, RPC_CONTROLLER_METADATA, RpcApi, } from '@opra/common';
|
|
2
2
|
import { kAssetCache, PlatformAdapter } from '@opra/core';
|
|
3
3
|
import { Kafka, logLevel } from 'kafkajs';
|
|
4
4
|
import { vg } from 'valgen';
|
|
@@ -8,6 +8,7 @@ import { RequestParser } from './request-parser.js';
|
|
|
8
8
|
const globalErrorTypes = ['unhandledRejection', 'uncaughtException'];
|
|
9
9
|
const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
|
|
10
10
|
const kGroupId = Symbol('kGroupId');
|
|
11
|
+
const noOp = () => undefined;
|
|
11
12
|
/**
|
|
12
13
|
*
|
|
13
14
|
* @class KafkaAdapter
|
|
@@ -19,22 +20,15 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
19
20
|
* @constructor
|
|
20
21
|
*/
|
|
21
22
|
constructor(config) {
|
|
22
|
-
super(config
|
|
23
|
+
super(config);
|
|
23
24
|
this._controllerInstances = new Map();
|
|
24
25
|
this._consumers = new Map();
|
|
25
26
|
this._handlerArgs = [];
|
|
27
|
+
this._started = false;
|
|
26
28
|
this.protocol = 'rpc';
|
|
27
29
|
this.platform = KafkaAdapter.PlatformName;
|
|
28
|
-
if (!(config.document.api instanceof RpcApi && config.document.api.platform === KafkaAdapter.PlatformName)) {
|
|
29
|
-
throw new TypeError(`The document doesn't expose a Kafka Api`);
|
|
30
|
-
}
|
|
31
30
|
this._config = config;
|
|
32
31
|
this.interceptors = [...(config.interceptors || [])];
|
|
33
|
-
this.kafka = new Kafka({
|
|
34
|
-
...config.client,
|
|
35
|
-
logCreator: config.logger ? () => this._createLogCreator(config.logger, config.logExtra) : undefined,
|
|
36
|
-
});
|
|
37
|
-
this._logger = config.logger;
|
|
38
32
|
globalErrorTypes.forEach(type => {
|
|
39
33
|
process.on(type, e => {
|
|
40
34
|
this._emitError(e);
|
|
@@ -48,17 +42,29 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
48
42
|
get api() {
|
|
49
43
|
return this.document.rpcApi;
|
|
50
44
|
}
|
|
45
|
+
get kafka() {
|
|
46
|
+
return this._kafka;
|
|
47
|
+
}
|
|
48
|
+
async initialize(document) {
|
|
49
|
+
if (this._document)
|
|
50
|
+
throw new TypeError(`${this.constructor.name} already initialized.`);
|
|
51
|
+
if (!(document.api instanceof RpcApi && document.api.platform === KafkaAdapter.PlatformName)) {
|
|
52
|
+
throw new TypeError(`The document doesn't expose a Kafka Api`);
|
|
53
|
+
}
|
|
54
|
+
this._document = document;
|
|
55
|
+
this._kafka = new Kafka({
|
|
56
|
+
...this._config.client,
|
|
57
|
+
logCreator: this.logger ? () => this._createLogCreator(this.logger, this._config.logExtra) : undefined,
|
|
58
|
+
});
|
|
59
|
+
await this._createAllConsumers();
|
|
60
|
+
}
|
|
51
61
|
/**
|
|
52
62
|
* Starts the service
|
|
53
63
|
*/
|
|
54
64
|
async start() {
|
|
55
|
-
|
|
56
|
-
if (this._consumers.size > 0)
|
|
57
|
-
return;
|
|
58
|
-
/* istanbul ignore next */
|
|
59
|
-
if (this._consumers.size > 0)
|
|
65
|
+
if (this._started)
|
|
60
66
|
return;
|
|
61
|
-
|
|
67
|
+
this._started = true;
|
|
62
68
|
/** Connect all consumers */
|
|
63
69
|
for (const consumer of this._consumers.values()) {
|
|
64
70
|
await consumer.connect().catch(e => {
|
|
@@ -290,7 +296,7 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
290
296
|
return;
|
|
291
297
|
}
|
|
292
298
|
/** Create context */
|
|
293
|
-
const
|
|
299
|
+
const context = new KafkaContext({
|
|
294
300
|
adapter: this,
|
|
295
301
|
platform: this.platform,
|
|
296
302
|
controller,
|
|
@@ -306,15 +312,42 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
306
312
|
heartbeat,
|
|
307
313
|
pause,
|
|
308
314
|
});
|
|
309
|
-
await this.emitAsync('before-execute',
|
|
310
|
-
|
|
311
|
-
|
|
315
|
+
await this.emitAsync('before-execute', context);
|
|
316
|
+
try {
|
|
317
|
+
/** Call operation handler */
|
|
318
|
+
const result = await operationHandler.call(instance, context);
|
|
319
|
+
await this.emitAsync('after-execute', context, result);
|
|
320
|
+
}
|
|
321
|
+
catch (e) {
|
|
322
|
+
this._emitError(e, context);
|
|
323
|
+
}
|
|
312
324
|
};
|
|
313
325
|
}
|
|
314
|
-
_emitError(e) {
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
this.
|
|
326
|
+
_emitError(e, context) {
|
|
327
|
+
Promise.resolve()
|
|
328
|
+
.then(async () => {
|
|
329
|
+
const logger = this.logger;
|
|
330
|
+
if (context) {
|
|
331
|
+
context.errors = this._wrapExceptions(context.errors);
|
|
332
|
+
if (context.listenerCount('error')) {
|
|
333
|
+
await this.emitAsync('error', context.errors[0], context);
|
|
334
|
+
}
|
|
335
|
+
if (logger?.error) {
|
|
336
|
+
context.errors.forEach(err => logger.error(err, context));
|
|
337
|
+
}
|
|
338
|
+
return;
|
|
339
|
+
}
|
|
340
|
+
this.logger?.error(e);
|
|
341
|
+
if (this.listenerCount('error'))
|
|
342
|
+
this.emit('error', e);
|
|
343
|
+
})
|
|
344
|
+
.catch(noOp);
|
|
345
|
+
}
|
|
346
|
+
_wrapExceptions(exceptions) {
|
|
347
|
+
const wrappedErrors = exceptions.map(e => (e instanceof OpraException ? e : new OpraException(e)));
|
|
348
|
+
if (!wrappedErrors.length)
|
|
349
|
+
wrappedErrors.push(new OpraException('Internal Server Error'));
|
|
350
|
+
return wrappedErrors;
|
|
318
351
|
}
|
|
319
352
|
_createLogCreator(logger, logExtra) {
|
|
320
353
|
return ({ namespace, level, log }) => {
|
package/package.json
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@opra/kafka",
|
|
3
|
-
"version": "1.0.
|
|
3
|
+
"version": "1.0.5",
|
|
4
4
|
"description": "Opra Kafka package",
|
|
5
5
|
"author": "Panates",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"dependencies": {
|
|
8
|
-
"@opra/common": "^1.0.
|
|
9
|
-
"@opra/core": "^1.0.
|
|
8
|
+
"@opra/common": "^1.0.5",
|
|
9
|
+
"@opra/core": "^1.0.5",
|
|
10
10
|
"node-events-async": "^1.0.0",
|
|
11
|
-
"tslib": "^2.
|
|
11
|
+
"tslib": "^2.8.0",
|
|
12
12
|
"valgen": "^5.10.0"
|
|
13
13
|
},
|
|
14
14
|
"peerDependencies": {
|
package/types/kafka-adapter.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { ApiDocument, OpraSchema, RpcApi, RpcController, RpcOperation } from '@opra/common';
|
|
1
|
+
import { ApiDocument, OpraException, OpraSchema, RpcApi, RpcController, RpcOperation } from '@opra/common';
|
|
2
2
|
import { type ILogger, PlatformAdapter } from '@opra/core';
|
|
3
3
|
import { type Consumer, ConsumerConfig, EachMessageHandler, Kafka, type KafkaConfig } from 'kafkajs';
|
|
4
4
|
import type { StrictOmit } from 'ts-gems';
|
|
@@ -17,9 +17,7 @@ export declare namespace KafkaAdapter {
|
|
|
17
17
|
fromBeginning?: boolean;
|
|
18
18
|
};
|
|
19
19
|
};
|
|
20
|
-
document: ApiDocument;
|
|
21
20
|
interceptors?: (InterceptorFunction | IKafkaInterceptor)[];
|
|
22
|
-
logger?: ILogger;
|
|
23
21
|
logExtra?: boolean;
|
|
24
22
|
}
|
|
25
23
|
interface OperationOptions {
|
|
@@ -66,11 +64,11 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
66
64
|
protected _controllerInstances: Map<RpcController, any>;
|
|
67
65
|
protected _consumers: Map<string, Consumer>;
|
|
68
66
|
protected _handlerArgs: HandlerArguments[];
|
|
69
|
-
protected
|
|
70
|
-
|
|
67
|
+
protected _started: boolean;
|
|
68
|
+
protected _kafka: Kafka;
|
|
71
69
|
readonly protocol: OpraSchema.Transport;
|
|
72
70
|
readonly platform = "kafka";
|
|
73
|
-
interceptors: (KafkaAdapter.InterceptorFunction | KafkaAdapter.IKafkaInterceptor)[];
|
|
71
|
+
readonly interceptors: (KafkaAdapter.InterceptorFunction | KafkaAdapter.IKafkaInterceptor)[];
|
|
74
72
|
/**
|
|
75
73
|
*
|
|
76
74
|
* @param config
|
|
@@ -78,6 +76,8 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
78
76
|
*/
|
|
79
77
|
constructor(config: KafkaAdapter.Config);
|
|
80
78
|
get api(): RpcApi;
|
|
79
|
+
get kafka(): Kafka;
|
|
80
|
+
initialize(document: ApiDocument): Promise<void>;
|
|
81
81
|
/**
|
|
82
82
|
* Starts the service
|
|
83
83
|
*/
|
|
@@ -112,7 +112,8 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
112
112
|
* @protected
|
|
113
113
|
*/
|
|
114
114
|
protected _createHandler(args: HandlerArguments): void;
|
|
115
|
-
protected _emitError(e: any): void;
|
|
115
|
+
protected _emitError(e: any, context?: KafkaContext): void;
|
|
116
|
+
protected _wrapExceptions(exceptions: any[]): OpraException[];
|
|
116
117
|
protected _createLogCreator(logger: ILogger, logExtra?: boolean): ({ namespace, level, log }: {
|
|
117
118
|
namespace: any;
|
|
118
119
|
level: any;
|