@opra/kafka 1.0.0-beta.3 → 1.0.0-beta.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/cjs/augmentation/{msg-operation.augmentation.js → opra-common.augmentation.js} +1 -1
- package/cjs/index.js +1 -2
- package/cjs/kafka-adapter.js +36 -22
- package/cjs/kafka-context.js +2 -2
- package/esm/augmentation/{msg-operation.augmentation.js → opra-common.augmentation.js} +1 -1
- package/esm/index.js +1 -2
- package/esm/kafka-adapter.js +37 -23
- package/esm/kafka-context.js +2 -2
- package/package.json +3 -3
- package/types/augmentation/opra-common.augmentation.d.ts +7 -0
- package/types/index.d.cts +1 -2
- package/types/index.d.ts +1 -2
- package/types/kafka-adapter.d.ts +47 -38
- package/types/kafka-context.d.ts +5 -5
- package/types/parsers/binary.parser.d.ts +1 -1
- package/types/parsers/string.parser.d.ts +1 -1
- package/types/request-parser.d.ts +1 -1
- package/cjs/types.js +0 -2
- package/esm/types.js +0 -1
- package/types/augmentation/msg-operation.augmentation.d.ts +0 -7
- package/types/types.d.ts +0 -8
|
@@ -4,7 +4,7 @@ require("@opra/core");
|
|
|
4
4
|
const common_1 = require("@opra/common");
|
|
5
5
|
const constants_js_1 = require("../constants.js");
|
|
6
6
|
/** Implementation **/
|
|
7
|
-
common_1.classes.
|
|
7
|
+
common_1.classes.RpcOperationDecoratorFactory.augment((decorator, decoratorChain) => {
|
|
8
8
|
decorator.Kafka = (config) => {
|
|
9
9
|
decoratorChain.push((_, target, propertyKey) => {
|
|
10
10
|
if (typeof config === 'function') {
|
package/cjs/index.js
CHANGED
|
@@ -1,9 +1,8 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
const tslib_1 = require("tslib");
|
|
4
|
-
require("./augmentation/
|
|
4
|
+
require("./augmentation/opra-common.augmentation.js");
|
|
5
5
|
tslib_1.__exportStar(require("./constants.js"), exports);
|
|
6
6
|
tslib_1.__exportStar(require("./kafka-adapter.js"), exports);
|
|
7
7
|
tslib_1.__exportStar(require("./kafka-context.js"), exports);
|
|
8
8
|
tslib_1.__exportStar(require("./request-parser.js"), exports);
|
|
9
|
-
tslib_1.__exportStar(require("./types.js"), exports);
|
package/cjs/kafka-adapter.js
CHANGED
|
@@ -17,24 +17,25 @@ const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
|
|
|
17
17
|
class KafkaAdapter extends core_1.PlatformAdapter {
|
|
18
18
|
/**
|
|
19
19
|
*
|
|
20
|
-
* @param
|
|
20
|
+
* @param config
|
|
21
21
|
* @constructor
|
|
22
22
|
*/
|
|
23
|
-
constructor(
|
|
24
|
-
super(
|
|
23
|
+
constructor(config) {
|
|
24
|
+
super(config.document, config);
|
|
25
25
|
this._controllerInstances = new Map();
|
|
26
26
|
this._consumers = new Map();
|
|
27
|
-
this.protocol = '
|
|
27
|
+
this.protocol = 'rpc';
|
|
28
28
|
this.platform = KafkaAdapter.PlatformName;
|
|
29
|
-
if (!(
|
|
29
|
+
if (!(config.document.api instanceof common_1.RpcApi && config.document.api.platform === KafkaAdapter.PlatformName)) {
|
|
30
30
|
throw new TypeError(`The document doesn't expose a Kafka Api`);
|
|
31
31
|
}
|
|
32
|
-
this.
|
|
32
|
+
this._config = config;
|
|
33
|
+
this.interceptors = [...(config.interceptors || [])];
|
|
33
34
|
this.kafka = new kafkajs_1.Kafka({
|
|
34
|
-
...
|
|
35
|
-
logCreator:
|
|
35
|
+
...config.client,
|
|
36
|
+
logCreator: config.logger ? () => this._createLogCreator(config.logger, config.logExtra) : undefined,
|
|
36
37
|
});
|
|
37
|
-
this._logger =
|
|
38
|
+
this._logger = config.logger;
|
|
38
39
|
globalErrorTypes.forEach(type => {
|
|
39
40
|
process.on(type, e => {
|
|
40
41
|
this._emitError(e);
|
|
@@ -46,7 +47,7 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
46
47
|
});
|
|
47
48
|
}
|
|
48
49
|
get api() {
|
|
49
|
-
return this.document.
|
|
50
|
+
return this.document.rpcApi;
|
|
50
51
|
}
|
|
51
52
|
/**
|
|
52
53
|
* Starts the service
|
|
@@ -72,7 +73,7 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
72
73
|
}
|
|
73
74
|
}
|
|
74
75
|
}
|
|
75
|
-
await Promise.allSettled(Array.from(this._consumers.
|
|
76
|
+
await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.consumer.disconnect()));
|
|
76
77
|
this._consumers.clear();
|
|
77
78
|
this._controllerInstances.clear();
|
|
78
79
|
}
|
|
@@ -90,7 +91,7 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
90
91
|
if (this._consumers.size > 0)
|
|
91
92
|
return;
|
|
92
93
|
/** Create consumers */
|
|
93
|
-
for (const controller of this.document.
|
|
94
|
+
for (const controller of this.document.rpcApi.controllers.values()) {
|
|
94
95
|
let instance = controller.instance;
|
|
95
96
|
if (!instance && controller.ctor)
|
|
96
97
|
instance = new controller.ctor();
|
|
@@ -111,16 +112,34 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
111
112
|
if (typeof instance[operation.name] !== 'function')
|
|
112
113
|
return;
|
|
113
114
|
const proto = controller.ctor?.prototype || Object.getPrototypeOf(controller.instance);
|
|
115
|
+
// this._config.consumers
|
|
114
116
|
let operationOptions = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA, proto, operation.name);
|
|
115
117
|
const configResolver = Reflect.getMetadata(constants_js_1.KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
|
|
116
118
|
if (configResolver) {
|
|
117
119
|
const cfg = await configResolver();
|
|
118
120
|
operationOptions = { ...operationOptions, ...cfg };
|
|
119
121
|
}
|
|
120
|
-
const
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
122
|
+
const consumerConfig = {
|
|
123
|
+
groupId: constants_js_1.KAFKA_DEFAULT_GROUP,
|
|
124
|
+
};
|
|
125
|
+
if (typeof operationOptions?.consumer === 'object') {
|
|
126
|
+
if (this._consumers.has(operationOptions.consumer.groupId)) {
|
|
127
|
+
throw new Error(`Operation consumer for groupId (${operationOptions.consumer.groupId}) already exists`);
|
|
128
|
+
}
|
|
129
|
+
Object.assign(consumerConfig, operationOptions?.consumer);
|
|
130
|
+
}
|
|
131
|
+
else if (operationOptions?.consumer) {
|
|
132
|
+
const x = this._config.consumers?.[operationOptions.consumer];
|
|
133
|
+
Object.assign(consumerConfig, { ...x, groupId: operationOptions.consumer });
|
|
134
|
+
}
|
|
135
|
+
const consumer = this.kafka.consumer(consumerConfig);
|
|
136
|
+
this._consumers.set(consumerConfig.groupId, {
|
|
137
|
+
consumer,
|
|
138
|
+
controller,
|
|
139
|
+
instance,
|
|
140
|
+
operation,
|
|
141
|
+
options: { ...operationOptions },
|
|
142
|
+
});
|
|
124
143
|
if (typeof controller.onInit === 'function')
|
|
125
144
|
controller.onInit.call(instance, controller);
|
|
126
145
|
}
|
|
@@ -268,9 +287,4 @@ class KafkaAdapter extends core_1.PlatformAdapter {
|
|
|
268
287
|
}
|
|
269
288
|
}
|
|
270
289
|
exports.KafkaAdapter = KafkaAdapter;
|
|
271
|
-
|
|
272
|
-
* @namespace KafkaAdapter
|
|
273
|
-
*/
|
|
274
|
-
(function (KafkaAdapter) {
|
|
275
|
-
KafkaAdapter.PlatformName = 'kafka';
|
|
276
|
-
})(KafkaAdapter || (exports.KafkaAdapter = KafkaAdapter = {}));
|
|
290
|
+
KafkaAdapter.PlatformName = 'kafka';
|
package/cjs/kafka-context.js
CHANGED
|
@@ -4,10 +4,10 @@ exports.KafkaContext = void 0;
|
|
|
4
4
|
const core_1 = require("@opra/core");
|
|
5
5
|
class KafkaContext extends core_1.ExecutionContext {
|
|
6
6
|
constructor(init) {
|
|
7
|
-
super({ ...init, document: init.adapter.document, protocol: '
|
|
7
|
+
super({ ...init, document: init.adapter.document, protocol: 'rpc' });
|
|
8
8
|
this.adapter = init.adapter;
|
|
9
9
|
this.platform = init.adapter.platform;
|
|
10
|
-
this.protocol = '
|
|
10
|
+
this.protocol = 'rpc';
|
|
11
11
|
if (init.controller)
|
|
12
12
|
this.controller = init.controller;
|
|
13
13
|
if (init.controllerInstance)
|
|
@@ -2,7 +2,7 @@ import '@opra/core';
|
|
|
2
2
|
import { classes } from '@opra/common';
|
|
3
3
|
import { KAFKA_OPERATION_METADATA, KAFKA_OPERATION_METADATA_RESOLVER } from '../constants.js';
|
|
4
4
|
/** Implementation **/
|
|
5
|
-
classes.
|
|
5
|
+
classes.RpcOperationDecoratorFactory.augment((decorator, decoratorChain) => {
|
|
6
6
|
decorator.Kafka = (config) => {
|
|
7
7
|
decoratorChain.push((_, target, propertyKey) => {
|
|
8
8
|
if (typeof config === 'function') {
|
package/esm/index.js
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import './augmentation/
|
|
1
|
+
import './augmentation/opra-common.augmentation.js';
|
|
2
2
|
export * from './constants.js';
|
|
3
3
|
export * from './kafka-adapter.js';
|
|
4
4
|
export * from './kafka-context.js';
|
|
5
5
|
export * from './request-parser.js';
|
|
6
|
-
export * from './types.js';
|
package/esm/kafka-adapter.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { RpcApi } from '@opra/common';
|
|
2
2
|
import { kAssetCache, PlatformAdapter } from '@opra/core';
|
|
3
3
|
import { Kafka, logLevel } from 'kafkajs';
|
|
4
4
|
import { vg } from 'valgen';
|
|
@@ -14,24 +14,25 @@ const signalTraps = ['SIGTERM', 'SIGINT', 'SIGUSR2'];
|
|
|
14
14
|
export class KafkaAdapter extends PlatformAdapter {
|
|
15
15
|
/**
|
|
16
16
|
*
|
|
17
|
-
* @param
|
|
17
|
+
* @param config
|
|
18
18
|
* @constructor
|
|
19
19
|
*/
|
|
20
|
-
constructor(
|
|
21
|
-
super(
|
|
20
|
+
constructor(config) {
|
|
21
|
+
super(config.document, config);
|
|
22
22
|
this._controllerInstances = new Map();
|
|
23
23
|
this._consumers = new Map();
|
|
24
|
-
this.protocol = '
|
|
24
|
+
this.protocol = 'rpc';
|
|
25
25
|
this.platform = KafkaAdapter.PlatformName;
|
|
26
|
-
if (!(
|
|
26
|
+
if (!(config.document.api instanceof RpcApi && config.document.api.platform === KafkaAdapter.PlatformName)) {
|
|
27
27
|
throw new TypeError(`The document doesn't expose a Kafka Api`);
|
|
28
28
|
}
|
|
29
|
-
this.
|
|
29
|
+
this._config = config;
|
|
30
|
+
this.interceptors = [...(config.interceptors || [])];
|
|
30
31
|
this.kafka = new Kafka({
|
|
31
|
-
...
|
|
32
|
-
logCreator:
|
|
32
|
+
...config.client,
|
|
33
|
+
logCreator: config.logger ? () => this._createLogCreator(config.logger, config.logExtra) : undefined,
|
|
33
34
|
});
|
|
34
|
-
this._logger =
|
|
35
|
+
this._logger = config.logger;
|
|
35
36
|
globalErrorTypes.forEach(type => {
|
|
36
37
|
process.on(type, e => {
|
|
37
38
|
this._emitError(e);
|
|
@@ -43,7 +44,7 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
43
44
|
});
|
|
44
45
|
}
|
|
45
46
|
get api() {
|
|
46
|
-
return this.document.
|
|
47
|
+
return this.document.rpcApi;
|
|
47
48
|
}
|
|
48
49
|
/**
|
|
49
50
|
* Starts the service
|
|
@@ -69,7 +70,7 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
69
70
|
}
|
|
70
71
|
}
|
|
71
72
|
}
|
|
72
|
-
await Promise.allSettled(Array.from(this._consumers.
|
|
73
|
+
await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.consumer.disconnect()));
|
|
73
74
|
this._consumers.clear();
|
|
74
75
|
this._controllerInstances.clear();
|
|
75
76
|
}
|
|
@@ -87,7 +88,7 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
87
88
|
if (this._consumers.size > 0)
|
|
88
89
|
return;
|
|
89
90
|
/** Create consumers */
|
|
90
|
-
for (const controller of this.document.
|
|
91
|
+
for (const controller of this.document.rpcApi.controllers.values()) {
|
|
91
92
|
let instance = controller.instance;
|
|
92
93
|
if (!instance && controller.ctor)
|
|
93
94
|
instance = new controller.ctor();
|
|
@@ -108,16 +109,34 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
108
109
|
if (typeof instance[operation.name] !== 'function')
|
|
109
110
|
return;
|
|
110
111
|
const proto = controller.ctor?.prototype || Object.getPrototypeOf(controller.instance);
|
|
112
|
+
// this._config.consumers
|
|
111
113
|
let operationOptions = Reflect.getMetadata(KAFKA_OPERATION_METADATA, proto, operation.name);
|
|
112
114
|
const configResolver = Reflect.getMetadata(KAFKA_OPERATION_METADATA_RESOLVER, proto, operation.name);
|
|
113
115
|
if (configResolver) {
|
|
114
116
|
const cfg = await configResolver();
|
|
115
117
|
operationOptions = { ...operationOptions, ...cfg };
|
|
116
118
|
}
|
|
117
|
-
const
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
119
|
+
const consumerConfig = {
|
|
120
|
+
groupId: KAFKA_DEFAULT_GROUP,
|
|
121
|
+
};
|
|
122
|
+
if (typeof operationOptions?.consumer === 'object') {
|
|
123
|
+
if (this._consumers.has(operationOptions.consumer.groupId)) {
|
|
124
|
+
throw new Error(`Operation consumer for groupId (${operationOptions.consumer.groupId}) already exists`);
|
|
125
|
+
}
|
|
126
|
+
Object.assign(consumerConfig, operationOptions?.consumer);
|
|
127
|
+
}
|
|
128
|
+
else if (operationOptions?.consumer) {
|
|
129
|
+
const x = this._config.consumers?.[operationOptions.consumer];
|
|
130
|
+
Object.assign(consumerConfig, { ...x, groupId: operationOptions.consumer });
|
|
131
|
+
}
|
|
132
|
+
const consumer = this.kafka.consumer(consumerConfig);
|
|
133
|
+
this._consumers.set(consumerConfig.groupId, {
|
|
134
|
+
consumer,
|
|
135
|
+
controller,
|
|
136
|
+
instance,
|
|
137
|
+
operation,
|
|
138
|
+
options: { ...operationOptions },
|
|
139
|
+
});
|
|
121
140
|
if (typeof controller.onInit === 'function')
|
|
122
141
|
controller.onInit.call(instance, controller);
|
|
123
142
|
}
|
|
@@ -264,9 +283,4 @@ export class KafkaAdapter extends PlatformAdapter {
|
|
|
264
283
|
};
|
|
265
284
|
}
|
|
266
285
|
}
|
|
267
|
-
|
|
268
|
-
* @namespace KafkaAdapter
|
|
269
|
-
*/
|
|
270
|
-
(function (KafkaAdapter) {
|
|
271
|
-
KafkaAdapter.PlatformName = 'kafka';
|
|
272
|
-
})(KafkaAdapter || (KafkaAdapter = {}));
|
|
286
|
+
KafkaAdapter.PlatformName = 'kafka';
|
package/esm/kafka-context.js
CHANGED
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
import { ExecutionContext } from '@opra/core';
|
|
2
2
|
export class KafkaContext extends ExecutionContext {
|
|
3
3
|
constructor(init) {
|
|
4
|
-
super({ ...init, document: init.adapter.document, protocol: '
|
|
4
|
+
super({ ...init, document: init.adapter.document, protocol: 'rpc' });
|
|
5
5
|
this.adapter = init.adapter;
|
|
6
6
|
this.platform = init.adapter.platform;
|
|
7
|
-
this.protocol = '
|
|
7
|
+
this.protocol = 'rpc';
|
|
8
8
|
if (init.controller)
|
|
9
9
|
this.controller = init.controller;
|
|
10
10
|
if (init.controllerInstance)
|
package/package.json
CHANGED
|
@@ -1,12 +1,12 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@opra/kafka",
|
|
3
|
-
"version": "1.0.0-beta.
|
|
3
|
+
"version": "1.0.0-beta.4",
|
|
4
4
|
"description": "Opra Kafka package",
|
|
5
5
|
"author": "Panates",
|
|
6
6
|
"license": "MIT",
|
|
7
7
|
"dependencies": {
|
|
8
|
-
"@opra/common": "^1.0.0-beta.
|
|
9
|
-
"@opra/core": "^1.0.0-beta.
|
|
8
|
+
"@opra/common": "^1.0.0-beta.4",
|
|
9
|
+
"@opra/core": "^1.0.0-beta.4",
|
|
10
10
|
"node-events-async": "^1.0.0",
|
|
11
11
|
"tslib": "^2.7.0",
|
|
12
12
|
"valgen": "^5.10.0"
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
import '@opra/core';
|
|
2
|
+
import { KafkaAdapter } from '../kafka-adapter.js';
|
|
3
|
+
declare module '@opra/common' {
|
|
4
|
+
interface RpcOperationDecorator {
|
|
5
|
+
Kafka(config: KafkaAdapter.OperationOptions | (() => KafkaAdapter.OperationOptions | Promise<KafkaAdapter.OperationOptions>)): this;
|
|
6
|
+
}
|
|
7
|
+
}
|
package/types/index.d.cts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import './augmentation/
|
|
1
|
+
import './augmentation/opra-common.augmentation.js';
|
|
2
2
|
export * from './constants.js';
|
|
3
3
|
export * from './kafka-adapter.js';
|
|
4
4
|
export * from './kafka-context.js';
|
|
5
5
|
export * from './request-parser.js';
|
|
6
|
-
export * from './types.js';
|
package/types/index.d.ts
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
|
-
import './augmentation/
|
|
1
|
+
import './augmentation/opra-common.augmentation.js';
|
|
2
2
|
export * from './constants.js';
|
|
3
3
|
export * from './kafka-adapter.js';
|
|
4
4
|
export * from './kafka-context.js';
|
|
5
5
|
export * from './request-parser.js';
|
|
6
|
-
export * from './types.js';
|
package/types/kafka-adapter.d.ts
CHANGED
|
@@ -1,21 +1,53 @@
|
|
|
1
|
-
import { ApiDocument,
|
|
1
|
+
import { ApiDocument, OpraSchema, RpcApi, RpcController, RpcOperation } from '@opra/common';
|
|
2
2
|
import { type ILogger, PlatformAdapter } from '@opra/core';
|
|
3
|
-
import { type Consumer, Kafka, type KafkaConfig } from 'kafkajs';
|
|
3
|
+
import { type Consumer, ConsumerConfig, Kafka, type KafkaConfig } from 'kafkajs';
|
|
4
4
|
import type { StrictOmit } from 'ts-gems';
|
|
5
5
|
import { KafkaContext } from './kafka-context.js';
|
|
6
|
-
|
|
6
|
+
/**
|
|
7
|
+
* @namespace KafkaAdapter
|
|
8
|
+
*/
|
|
9
|
+
export declare namespace KafkaAdapter {
|
|
10
|
+
type NextCallback = () => Promise<any>;
|
|
11
|
+
interface Config extends PlatformAdapter.Options {
|
|
12
|
+
client: StrictOmit<KafkaConfig, 'logCreator' | 'logLevel'>;
|
|
13
|
+
consumers?: Record<string, StrictOmit<ConsumerConfig, 'groupId'>>;
|
|
14
|
+
document: ApiDocument;
|
|
15
|
+
interceptors?: (InterceptorFunction | IKafkaInterceptor)[];
|
|
16
|
+
logger?: ILogger;
|
|
17
|
+
logExtra?: boolean;
|
|
18
|
+
}
|
|
19
|
+
interface OperationOptions {
|
|
20
|
+
/**
|
|
21
|
+
* groupId or ConsumerConfig
|
|
22
|
+
*/
|
|
23
|
+
consumer?: string | ConsumerConfig;
|
|
24
|
+
fromBeginning?: boolean;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* @type InterceptorFunction
|
|
28
|
+
*/
|
|
29
|
+
type InterceptorFunction = IKafkaInterceptor['intercept'];
|
|
30
|
+
/**
|
|
31
|
+
* @interface IKafkaInterceptor
|
|
32
|
+
*/
|
|
33
|
+
type IKafkaInterceptor = {
|
|
34
|
+
intercept(context: KafkaContext, next: NextCallback): Promise<any>;
|
|
35
|
+
};
|
|
36
|
+
}
|
|
7
37
|
/**
|
|
8
38
|
*
|
|
9
39
|
* @class KafkaAdapter
|
|
10
40
|
*/
|
|
11
41
|
export declare class KafkaAdapter extends PlatformAdapter {
|
|
12
|
-
|
|
13
|
-
protected
|
|
42
|
+
static readonly PlatformName = "kafka";
|
|
43
|
+
protected _config: KafkaAdapter.Config;
|
|
44
|
+
protected _controllerInstances: Map<RpcController, any>;
|
|
45
|
+
protected _consumers: Map<string, {
|
|
14
46
|
consumer: Consumer;
|
|
15
|
-
controller:
|
|
47
|
+
controller: RpcController;
|
|
16
48
|
instance: any;
|
|
17
|
-
operation:
|
|
18
|
-
options:
|
|
49
|
+
operation: RpcOperation;
|
|
50
|
+
options: KafkaAdapter.OperationOptions;
|
|
19
51
|
}>;
|
|
20
52
|
protected _logger?: ILogger;
|
|
21
53
|
readonly kafka: Kafka;
|
|
@@ -24,11 +56,11 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
24
56
|
interceptors: (KafkaAdapter.InterceptorFunction | KafkaAdapter.IKafkaInterceptor)[];
|
|
25
57
|
/**
|
|
26
58
|
*
|
|
27
|
-
* @param
|
|
59
|
+
* @param config
|
|
28
60
|
* @constructor
|
|
29
61
|
*/
|
|
30
|
-
constructor(
|
|
31
|
-
get api():
|
|
62
|
+
constructor(config: KafkaAdapter.Config);
|
|
63
|
+
get api(): RpcApi;
|
|
32
64
|
/**
|
|
33
65
|
* Starts the service
|
|
34
66
|
*/
|
|
@@ -49,7 +81,7 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
49
81
|
*
|
|
50
82
|
* @protected
|
|
51
83
|
*/
|
|
52
|
-
protected _initConsumer(controller:
|
|
84
|
+
protected _initConsumer(controller: RpcController, instance: any, operation: RpcOperation): Promise<void>;
|
|
53
85
|
protected _start(): Promise<void>;
|
|
54
86
|
/**
|
|
55
87
|
* Starts all consumers
|
|
@@ -57,10 +89,10 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
57
89
|
*/
|
|
58
90
|
protected _startConsumer(args: {
|
|
59
91
|
consumer: Consumer;
|
|
60
|
-
controller:
|
|
92
|
+
controller: RpcController;
|
|
61
93
|
instance: any;
|
|
62
|
-
operation:
|
|
63
|
-
options:
|
|
94
|
+
operation: RpcOperation;
|
|
95
|
+
options: KafkaAdapter.OperationOptions;
|
|
64
96
|
}): Promise<void>;
|
|
65
97
|
protected _emitError(e: any): void;
|
|
66
98
|
protected _createLogCreator(logger: ILogger, logExtra?: boolean): ({ namespace, level, log }: {
|
|
@@ -69,26 +101,3 @@ export declare class KafkaAdapter extends PlatformAdapter {
|
|
|
69
101
|
log: any;
|
|
70
102
|
}) => any;
|
|
71
103
|
}
|
|
72
|
-
/**
|
|
73
|
-
* @namespace KafkaAdapter
|
|
74
|
-
*/
|
|
75
|
-
export declare namespace KafkaAdapter {
|
|
76
|
-
const PlatformName = "kafka";
|
|
77
|
-
type NextCallback = () => Promise<void>;
|
|
78
|
-
interface InitArguments extends StrictOmit<KafkaConfig, 'logCreator' | 'logLevel'>, PlatformAdapter.Options {
|
|
79
|
-
document: ApiDocument;
|
|
80
|
-
interceptors?: (InterceptorFunction | IKafkaInterceptor)[];
|
|
81
|
-
logger?: ILogger;
|
|
82
|
-
logExtra?: boolean;
|
|
83
|
-
}
|
|
84
|
-
/**
|
|
85
|
-
* @type InterceptorFunction
|
|
86
|
-
*/
|
|
87
|
-
type InterceptorFunction = IKafkaInterceptor['intercept'];
|
|
88
|
-
/**
|
|
89
|
-
* @interface IKafkaInterceptor
|
|
90
|
-
*/
|
|
91
|
-
type IKafkaInterceptor = {
|
|
92
|
-
intercept(context: KafkaContext, next: NextCallback): Promise<void>;
|
|
93
|
-
};
|
|
94
|
-
}
|
package/types/kafka-context.d.ts
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import {
|
|
1
|
+
import { OpraSchema, RpcController, RpcOperation } from '@opra/common';
|
|
2
2
|
import { ExecutionContext } from '@opra/core';
|
|
3
3
|
import type { KafkaMessage } from 'kafkajs';
|
|
4
4
|
import type { AsyncEventEmitter } from 'node-events-async';
|
|
@@ -6,9 +6,9 @@ import type { KafkaAdapter } from './kafka-adapter.js';
|
|
|
6
6
|
export declare namespace KafkaContext {
|
|
7
7
|
interface Initiator extends Omit<ExecutionContext.Initiator, 'document' | 'protocol'> {
|
|
8
8
|
adapter: KafkaAdapter;
|
|
9
|
-
controller?:
|
|
9
|
+
controller?: RpcController;
|
|
10
10
|
controllerInstance?: any;
|
|
11
|
-
operation?:
|
|
11
|
+
operation?: RpcOperation;
|
|
12
12
|
operationHandler?: Function;
|
|
13
13
|
topic: string;
|
|
14
14
|
partition: number;
|
|
@@ -24,9 +24,9 @@ export declare class KafkaContext extends ExecutionContext implements AsyncEvent
|
|
|
24
24
|
readonly protocol: OpraSchema.Transport;
|
|
25
25
|
readonly platform: string;
|
|
26
26
|
readonly adapter: KafkaAdapter;
|
|
27
|
-
readonly controller?:
|
|
27
|
+
readonly controller?: RpcController;
|
|
28
28
|
readonly controllerInstance?: any;
|
|
29
|
-
readonly operation?:
|
|
29
|
+
readonly operation?: RpcOperation;
|
|
30
30
|
readonly operationHandler?: Function;
|
|
31
31
|
readonly key: any;
|
|
32
32
|
readonly payload: any;
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import type { RequestParseFunction } from '../
|
|
1
|
+
import type { RequestParseFunction } from '../request-parser.js';
|
|
2
2
|
export declare const binaryParser: RequestParseFunction;
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
import type { RequestParseFunction } from '../
|
|
1
|
+
import type { RequestParseFunction } from '../request-parser.js';
|
|
2
2
|
export declare const stringParser: RequestParseFunction;
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
|
|
1
|
+
export type RequestParseFunction = (buffer: Buffer) => any;
|
|
2
2
|
export declare const RequestParser: Record<string, RequestParseFunction>;
|
package/cjs/types.js
DELETED
package/esm/types.js
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
export {};
|
package/types/types.d.ts
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
import type { MsgOperationResponse } from '@opra/common';
|
|
2
|
-
import { ConsumerConfig } from 'kafkajs';
|
|
3
|
-
export type RequestParseFunction = (buffer: Buffer) => any;
|
|
4
|
-
export interface KafkaOperationOptions extends ConsumerConfig {
|
|
5
|
-
fromBeginning?: boolean;
|
|
6
|
-
}
|
|
7
|
-
export interface KafkaOperationResponseOptions extends MsgOperationResponse.Options {
|
|
8
|
-
}
|