@opra/kafka 1.4.1 → 1.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -19,18 +19,24 @@ const noOp = () => undefined;
19
19
  class KafkaAdapter extends core_1.PlatformAdapter {
20
20
  /**
21
21
  *
22
+ * @param document
22
23
  * @param config
23
24
  * @constructor
24
25
  */
25
- constructor(config) {
26
+ constructor(document, config) {
26
27
  super(config);
27
28
  this._controllerInstances = new Map();
28
29
  this._consumers = new Map();
29
30
  this._handlerArgs = [];
30
- this._started = false;
31
+ this._status = 'idle';
31
32
  this.protocol = 'rpc';
32
33
  this.platform = KafkaAdapter.PlatformName;
34
+ this._document = document;
33
35
  this._config = config;
36
+ if (!(this.document.api instanceof common_1.RpcApi && this.document.api.platform === KafkaAdapter.PlatformName)) {
37
+ throw new TypeError(`The document doesn't expose a Kafka Api`);
38
+ }
39
+ // this._config = config;
34
40
  this.interceptors = [...(config.interceptors || [])];
35
41
  globalErrorTypes.forEach(type => {
36
42
  process.on(type, e => {
@@ -48,13 +54,12 @@ class KafkaAdapter extends core_1.PlatformAdapter {
48
54
  get kafka() {
49
55
  return this._kafka;
50
56
  }
51
- async initialize(document) {
52
- if (this._document)
53
- throw new TypeError(`${this.constructor.name} already initialized.`);
54
- if (!(document.api instanceof common_1.RpcApi && document.api.platform === KafkaAdapter.PlatformName)) {
55
- throw new TypeError(`The document doesn't expose a Kafka Api`);
56
- }
57
- this._document = document;
57
+ get status() {
58
+ return this._status;
59
+ }
60
+ async initialize() {
61
+ if (this._kafka)
62
+ return;
58
63
  this._kafka = new kafkajs_1.Kafka({
59
64
  ...this._config.client,
60
65
  logCreator: this.logger ? () => this._createLogCreator(this.logger, this._config.logExtra) : undefined,
@@ -65,68 +70,76 @@ class KafkaAdapter extends core_1.PlatformAdapter {
65
70
  * Starts the service
66
71
  */
67
72
  async start() {
68
- if (this._started)
73
+ if (this.status !== 'idle')
69
74
  return;
70
- this._started = true;
71
- /** Connect all consumers */
72
- for (const consumer of this._consumers.values()) {
73
- await consumer.connect().catch(e => {
74
- this._emitError(e);
75
- throw e;
76
- });
77
- }
78
- /** Subscribe to channels */
79
- for (const args of this._handlerArgs) {
80
- const { consumer, operation, operationConfig } = args;
81
- args.topics = Array.isArray(operation.channel) ? operation.channel : [operation.channel];
82
- await consumer
83
- .subscribe({
84
- ...operationConfig.subscribe,
85
- topics: args.topics,
86
- })
87
- .catch(e => {
88
- this._emitError(e);
89
- throw e;
90
- });
91
- this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
92
- }
93
- /** Start consumer listeners */
94
- const topicMap = new Map();
95
- for (const consumer of this._consumers.values()) {
96
- const groupId = consumer[kGroupId];
97
- await consumer
98
- .run({
99
- eachMessage: async (payload) => {
100
- await this.emitAsync('message', payload).catch(() => undefined);
101
- const { topic } = payload;
102
- const topicCacheKey = groupId + ':' + topic;
103
- let handlerArgsArray = topicMap.get(topicCacheKey);
104
- if (!handlerArgsArray) {
105
- handlerArgsArray = this._handlerArgs.filter(args => args.consumer === consumer &&
106
- args.topics.find(t => (t instanceof RegExp ? t.test(topic) : t === topic)));
107
- /* istanbul ignore next */
75
+ await this.initialize();
76
+ this._status = 'starting';
77
+ try {
78
+ /** Connect all consumers */
79
+ for (const consumer of this._consumers.values()) {
80
+ await consumer.connect().catch(e => {
81
+ this._emitError(e);
82
+ throw e;
83
+ });
84
+ }
85
+ /** Subscribe to channels */
86
+ for (const args of this._handlerArgs) {
87
+ const { consumer, operation, operationConfig } = args;
88
+ args.topics = Array.isArray(operation.channel) ? operation.channel : [operation.channel];
89
+ await consumer
90
+ .subscribe({
91
+ ...operationConfig.subscribe,
92
+ topics: args.topics,
93
+ })
94
+ .catch(e => {
95
+ this._emitError(e);
96
+ throw e;
97
+ });
98
+ this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
99
+ }
100
+ /** Start consumer listeners */
101
+ const topicMap = new Map();
102
+ for (const consumer of this._consumers.values()) {
103
+ const groupId = consumer[kGroupId];
104
+ await consumer
105
+ .run({
106
+ eachMessage: async (payload) => {
107
+ await this.emitAsync('message', payload).catch(() => undefined);
108
+ const { topic } = payload;
109
+ const topicCacheKey = groupId + ':' + topic;
110
+ let handlerArgsArray = topicMap.get(topicCacheKey);
108
111
  if (!handlerArgsArray) {
109
- this._emitError(new Error(`Unhandled topic (${topic})`));
110
- return;
111
- }
112
- topicMap.set(topicCacheKey, handlerArgsArray);
113
- }
114
- /** Iterate and call all matching handlers */
115
- for (const args of handlerArgsArray) {
116
- try {
117
- await args.handler(payload);
112
+ handlerArgsArray = this._handlerArgs.filter(args => args.consumer === consumer &&
113
+ args.topics.find(t => (t instanceof RegExp ? t.test(topic) : t === topic)));
114
+ /* istanbul ignore next */
115
+ if (!handlerArgsArray) {
116
+ this._emitError(new Error(`Unhandled topic (${topic})`));
117
+ return;
118
+ }
119
+ topicMap.set(topicCacheKey, handlerArgsArray);
118
120
  }
119
- catch (e) {
120
- this._emitError(e);
121
+ /** Iterate and call all matching handlers */
122
+ for (const args of handlerArgsArray) {
123
+ try {
124
+ await args.handler(payload);
125
+ }
126
+ catch (e) {
127
+ this._emitError(e);
128
+ }
121
129
  }
122
- }
123
- await this.emitAsync('message-finish', payload);
124
- },
125
- })
126
- .catch(e => {
127
- this._emitError(e);
128
- throw e;
129
- });
130
+ await this.emitAsync('message-finish', payload);
131
+ },
132
+ })
133
+ .catch(e => {
134
+ this._emitError(e);
135
+ throw e;
136
+ });
137
+ }
138
+ this._status = 'started';
139
+ }
140
+ catch (e) {
141
+ await this.close();
142
+ throw e;
130
143
  }
131
144
  }
132
145
  /**
@@ -136,6 +149,7 @@ class KafkaAdapter extends core_1.PlatformAdapter {
136
149
  await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.disconnect()));
137
150
  this._consumers.clear();
138
151
  this._controllerInstances.clear();
152
+ this._status = 'idle';
139
153
  }
140
154
  getControllerInstance(controllerPath) {
141
155
  const controller = this.api.findController(controllerPath);
@@ -16,18 +16,24 @@ const noOp = () => undefined;
16
16
  export class KafkaAdapter extends PlatformAdapter {
17
17
  /**
18
18
  *
19
+ * @param document
19
20
  * @param config
20
21
  * @constructor
21
22
  */
22
- constructor(config) {
23
+ constructor(document, config) {
23
24
  super(config);
24
25
  this._controllerInstances = new Map();
25
26
  this._consumers = new Map();
26
27
  this._handlerArgs = [];
27
- this._started = false;
28
+ this._status = 'idle';
28
29
  this.protocol = 'rpc';
29
30
  this.platform = KafkaAdapter.PlatformName;
31
+ this._document = document;
30
32
  this._config = config;
33
+ if (!(this.document.api instanceof RpcApi && this.document.api.platform === KafkaAdapter.PlatformName)) {
34
+ throw new TypeError(`The document doesn't expose a Kafka Api`);
35
+ }
36
+ // this._config = config;
31
37
  this.interceptors = [...(config.interceptors || [])];
32
38
  globalErrorTypes.forEach(type => {
33
39
  process.on(type, e => {
@@ -45,13 +51,12 @@ export class KafkaAdapter extends PlatformAdapter {
45
51
  get kafka() {
46
52
  return this._kafka;
47
53
  }
48
- async initialize(document) {
49
- if (this._document)
50
- throw new TypeError(`${this.constructor.name} already initialized.`);
51
- if (!(document.api instanceof RpcApi && document.api.platform === KafkaAdapter.PlatformName)) {
52
- throw new TypeError(`The document doesn't expose a Kafka Api`);
53
- }
54
- this._document = document;
54
+ get status() {
55
+ return this._status;
56
+ }
57
+ async initialize() {
58
+ if (this._kafka)
59
+ return;
55
60
  this._kafka = new Kafka({
56
61
  ...this._config.client,
57
62
  logCreator: this.logger ? () => this._createLogCreator(this.logger, this._config.logExtra) : undefined,
@@ -62,68 +67,76 @@ export class KafkaAdapter extends PlatformAdapter {
62
67
  * Starts the service
63
68
  */
64
69
  async start() {
65
- if (this._started)
70
+ if (this.status !== 'idle')
66
71
  return;
67
- this._started = true;
68
- /** Connect all consumers */
69
- for (const consumer of this._consumers.values()) {
70
- await consumer.connect().catch(e => {
71
- this._emitError(e);
72
- throw e;
73
- });
74
- }
75
- /** Subscribe to channels */
76
- for (const args of this._handlerArgs) {
77
- const { consumer, operation, operationConfig } = args;
78
- args.topics = Array.isArray(operation.channel) ? operation.channel : [operation.channel];
79
- await consumer
80
- .subscribe({
81
- ...operationConfig.subscribe,
82
- topics: args.topics,
83
- })
84
- .catch(e => {
85
- this._emitError(e);
86
- throw e;
87
- });
88
- this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
89
- }
90
- /** Start consumer listeners */
91
- const topicMap = new Map();
92
- for (const consumer of this._consumers.values()) {
93
- const groupId = consumer[kGroupId];
94
- await consumer
95
- .run({
96
- eachMessage: async (payload) => {
97
- await this.emitAsync('message', payload).catch(() => undefined);
98
- const { topic } = payload;
99
- const topicCacheKey = groupId + ':' + topic;
100
- let handlerArgsArray = topicMap.get(topicCacheKey);
101
- if (!handlerArgsArray) {
102
- handlerArgsArray = this._handlerArgs.filter(args => args.consumer === consumer &&
103
- args.topics.find(t => (t instanceof RegExp ? t.test(topic) : t === topic)));
104
- /* istanbul ignore next */
72
+ await this.initialize();
73
+ this._status = 'starting';
74
+ try {
75
+ /** Connect all consumers */
76
+ for (const consumer of this._consumers.values()) {
77
+ await consumer.connect().catch(e => {
78
+ this._emitError(e);
79
+ throw e;
80
+ });
81
+ }
82
+ /** Subscribe to channels */
83
+ for (const args of this._handlerArgs) {
84
+ const { consumer, operation, operationConfig } = args;
85
+ args.topics = Array.isArray(operation.channel) ? operation.channel : [operation.channel];
86
+ await consumer
87
+ .subscribe({
88
+ ...operationConfig.subscribe,
89
+ topics: args.topics,
90
+ })
91
+ .catch(e => {
92
+ this._emitError(e);
93
+ throw e;
94
+ });
95
+ this.logger?.info?.(`Subscribed to topic${args.topics.length > 1 ? 's' : ''} "${args.topics}"`);
96
+ }
97
+ /** Start consumer listeners */
98
+ const topicMap = new Map();
99
+ for (const consumer of this._consumers.values()) {
100
+ const groupId = consumer[kGroupId];
101
+ await consumer
102
+ .run({
103
+ eachMessage: async (payload) => {
104
+ await this.emitAsync('message', payload).catch(() => undefined);
105
+ const { topic } = payload;
106
+ const topicCacheKey = groupId + ':' + topic;
107
+ let handlerArgsArray = topicMap.get(topicCacheKey);
105
108
  if (!handlerArgsArray) {
106
- this._emitError(new Error(`Unhandled topic (${topic})`));
107
- return;
108
- }
109
- topicMap.set(topicCacheKey, handlerArgsArray);
110
- }
111
- /** Iterate and call all matching handlers */
112
- for (const args of handlerArgsArray) {
113
- try {
114
- await args.handler(payload);
109
+ handlerArgsArray = this._handlerArgs.filter(args => args.consumer === consumer &&
110
+ args.topics.find(t => (t instanceof RegExp ? t.test(topic) : t === topic)));
111
+ /* istanbul ignore next */
112
+ if (!handlerArgsArray) {
113
+ this._emitError(new Error(`Unhandled topic (${topic})`));
114
+ return;
115
+ }
116
+ topicMap.set(topicCacheKey, handlerArgsArray);
115
117
  }
116
- catch (e) {
117
- this._emitError(e);
118
+ /** Iterate and call all matching handlers */
119
+ for (const args of handlerArgsArray) {
120
+ try {
121
+ await args.handler(payload);
122
+ }
123
+ catch (e) {
124
+ this._emitError(e);
125
+ }
118
126
  }
119
- }
120
- await this.emitAsync('message-finish', payload);
121
- },
122
- })
123
- .catch(e => {
124
- this._emitError(e);
125
- throw e;
126
- });
127
+ await this.emitAsync('message-finish', payload);
128
+ },
129
+ })
130
+ .catch(e => {
131
+ this._emitError(e);
132
+ throw e;
133
+ });
134
+ }
135
+ this._status = 'started';
136
+ }
137
+ catch (e) {
138
+ await this.close();
139
+ throw e;
127
140
  }
128
141
  }
129
142
  /**
@@ -133,6 +146,7 @@ export class KafkaAdapter extends PlatformAdapter {
133
146
  await Promise.allSettled(Array.from(this._consumers.values()).map(c => c.disconnect()));
134
147
  this._consumers.clear();
135
148
  this._controllerInstances.clear();
149
+ this._status = 'idle';
136
150
  }
137
151
  getControllerInstance(controllerPath) {
138
152
  const controller = this.api.findController(controllerPath);
package/package.json CHANGED
@@ -1,12 +1,12 @@
1
1
  {
2
2
  "name": "@opra/kafka",
3
- "version": "1.4.1",
3
+ "version": "1.4.2",
4
4
  "description": "Opra Kafka package",
5
5
  "author": "Panates",
6
6
  "license": "MIT",
7
7
  "dependencies": {
8
- "@opra/common": "^1.4.1",
9
- "@opra/core": "^1.4.1",
8
+ "@opra/common": "^1.4.2",
9
+ "@opra/core": "^1.4.2",
10
10
  "node-events-async": "^1.0.0",
11
11
  "tslib": "^2.8.1",
12
12
  "valgen": "^5.12.0"
@@ -3,43 +3,6 @@ import { type ILogger, PlatformAdapter } from '@opra/core';
3
3
  import { type Consumer, ConsumerConfig, EachMessageHandler, Kafka, type KafkaConfig } from 'kafkajs';
4
4
  import type { StrictOmit } from 'ts-gems';
5
5
  import { KafkaContext } from './kafka-context.js';
6
- /**
7
- * @namespace KafkaAdapter
8
- */
9
- export declare namespace KafkaAdapter {
10
- type NextCallback = () => Promise<any>;
11
- interface Config extends PlatformAdapter.Options {
12
- client: StrictOmit<KafkaConfig, 'logCreator' | 'logLevel'>;
13
- consumers?: Record<string, StrictOmit<ConsumerConfig, 'groupId'>>;
14
- defaults?: {
15
- consumer?: ConsumerConfig;
16
- subscribe?: {
17
- fromBeginning?: boolean;
18
- };
19
- };
20
- interceptors?: (InterceptorFunction | IKafkaInterceptor)[];
21
- logExtra?: boolean;
22
- }
23
- interface OperationOptions {
24
- /**
25
- * groupId or ConsumerConfig
26
- */
27
- consumer?: string | ConsumerConfig;
28
- subscribe?: {
29
- fromBeginning?: boolean;
30
- };
31
- }
32
- /**
33
- * @type InterceptorFunction
34
- */
35
- type InterceptorFunction = IKafkaInterceptor['intercept'];
36
- /**
37
- * @interface IKafkaInterceptor
38
- */
39
- type IKafkaInterceptor = {
40
- intercept(context: KafkaContext, next: NextCallback): Promise<any>;
41
- };
42
- }
43
6
  export interface OperationConfig {
44
7
  consumer: ConsumerConfig;
45
8
  selfConsumer?: boolean;
@@ -64,20 +27,23 @@ export declare class KafkaAdapter extends PlatformAdapter {
64
27
  protected _controllerInstances: Map<RpcController, any>;
65
28
  protected _consumers: Map<string, Consumer>;
66
29
  protected _handlerArgs: HandlerArguments[];
67
- protected _started: boolean;
68
30
  protected _kafka: Kafka;
31
+ protected _status: KafkaAdapter.Status;
32
+ protected _starting?: boolean;
69
33
  readonly protocol: OpraSchema.Transport;
70
34
  readonly platform = "kafka";
71
35
  readonly interceptors: (KafkaAdapter.InterceptorFunction | KafkaAdapter.IKafkaInterceptor)[];
72
36
  /**
73
37
  *
38
+ * @param document
74
39
  * @param config
75
40
  * @constructor
76
41
  */
77
- constructor(config: KafkaAdapter.Config);
42
+ constructor(document: ApiDocument, config: KafkaAdapter.Config);
78
43
  get api(): RpcApi;
79
44
  get kafka(): Kafka;
80
- initialize(document: ApiDocument): Promise<void>;
45
+ get status(): KafkaAdapter.Status;
46
+ initialize(): Promise<void>;
81
47
  /**
82
48
  * Starts the service
83
49
  */
@@ -120,4 +86,42 @@ export declare class KafkaAdapter extends PlatformAdapter {
120
86
  log: any;
121
87
  }) => any;
122
88
  }
89
+ /**
90
+ * @namespace KafkaAdapter
91
+ */
92
+ export declare namespace KafkaAdapter {
93
+ type NextCallback = () => Promise<any>;
94
+ type Status = 'idle' | 'starting' | 'started';
95
+ interface Config extends PlatformAdapter.Options {
96
+ client: StrictOmit<KafkaConfig, 'logCreator' | 'logLevel'>;
97
+ consumers?: Record<string, StrictOmit<ConsumerConfig, 'groupId'>>;
98
+ defaults?: {
99
+ consumer?: ConsumerConfig;
100
+ subscribe?: {
101
+ fromBeginning?: boolean;
102
+ };
103
+ };
104
+ interceptors?: (InterceptorFunction | IKafkaInterceptor)[];
105
+ logExtra?: boolean;
106
+ }
107
+ interface OperationOptions {
108
+ /**
109
+ * groupId or ConsumerConfig
110
+ */
111
+ consumer?: string | ConsumerConfig;
112
+ subscribe?: {
113
+ fromBeginning?: boolean;
114
+ };
115
+ }
116
+ /**
117
+ * @type InterceptorFunction
118
+ */
119
+ type InterceptorFunction = IKafkaInterceptor['intercept'];
120
+ /**
121
+ * @interface IKafkaInterceptor
122
+ */
123
+ type IKafkaInterceptor = {
124
+ intercept(context: KafkaContext, next: NextCallback): Promise<any>;
125
+ };
126
+ }
123
127
  export {};