@backstage/plugin-events-backend-module-kafka 0.0.0-nightly-20251216024726 → 0.0.0-nightly-20260108025012

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md CHANGED
@@ -1,6 +1,20 @@
1
1
  # @backstage/plugin-events-backend-module-kafka
2
2
 
3
- ## 0.0.0-nightly-20251216024726
3
+ ## 0.0.0-nightly-20260108025012
4
+
5
+ ### Minor Changes
6
+
7
+ - ef5bbd8: Add support for Kafka offset configuration (`fromBeginning`) and `autoCommit`
8
+
9
+ ### Patch Changes
10
+
11
+ - Updated dependencies
12
+ - @backstage/backend-plugin-api@1.6.0
13
+ - @backstage/config@1.3.6
14
+ - @backstage/types@1.2.2
15
+ - @backstage/plugin-events-node@0.4.18
16
+
17
+ ## 0.2.0
4
18
 
5
19
  ### Minor Changes
6
20
 
@@ -12,10 +26,8 @@
12
26
  ### Patch Changes
13
27
 
14
28
  - Updated dependencies
15
- - @backstage/plugin-events-node@0.0.0-nightly-20251216024726
16
- - @backstage/backend-plugin-api@0.0.0-nightly-20251216024726
17
- - @backstage/config@1.3.6
18
- - @backstage/types@1.2.2
29
+ - @backstage/plugin-events-node@0.4.18
30
+ - @backstage/backend-plugin-api@1.6.0
19
31
 
20
32
  ## 0.1.6-next.1
21
33
 
package/README.md CHANGED
@@ -33,6 +33,10 @@ events:
33
33
  topics: # (Required) The Kafka topics to subscribe to.
34
34
  - topic1
35
35
  groupId: your-group-id # (Required) The GroupId to be used by the topic consumers.
36
+ # Optional offset management settings (these can be omitted to use defaults):
37
+ # fromBeginning: false # Start from earliest offset when no committed offset exists. Default: not set (latest)
38
+ # autoCommit: true # Enable auto-commit. Default: true (for backward compatibility)
39
+ # pauseOnError: false # Pause consumer on error. Default: false (for backward compatibility)
36
40
  ```
37
41
 
38
42
  ### KafkaPublishingEventConsumer Configuration
@@ -57,6 +61,77 @@ events:
57
61
 
58
62
  For a complete list of all available fields that can be configured, refer to the [config.d.ts file](./config.d.ts).
59
63
 
64
+ ### Offset Management
65
+
66
+ The plugin supports configurable offset management to control message delivery semantics:
67
+
68
+ #### Auto Commit (Default - Backward Compatible)
69
+
70
+ By default (`autoCommit: true` or not specified), Kafka automatically commits offsets at regular intervals. This is the original behavior and ensures backward compatibility.
71
+
72
+ #### Manual Commit (Opt-in for Reliability)
73
+
74
+ When you explicitly set `autoCommit: false`, the plugin will:
75
+
76
+ 1. Start consuming from the last committed offset for the consumer group
77
+ 2. Process each message by publishing it to the Backstage events system
78
+ 3. Only commit the offset after successful processing
79
+ 4. If processing fails, pause the consumer and do not commit the offset
80
+
81
+ **Example configuration for manual commit:**
82
+
83
+ ```yaml
84
+ kafka:
85
+ topics:
86
+ - topic1
87
+ groupId: my-group
88
+ autoCommit: false # Enable manual commit
89
+ ```
90
+
91
+ #### Error Handling
92
+
93
+ The `pauseOnError` option controls how the consumer behaves when message processing fails:
94
+
95
+ **Skip Failed Messages (Default - Backward Compatible)**
96
+
97
+ By default (`pauseOnError: false` or not specified), the consumer will skip failed messages and continue processing:
98
+
99
+ - The consumer logs the error but continues processing subsequent messages
100
+ - If `autoCommit: false`, the offset is still committed to skip the failed message
101
+ - If `autoCommit: true`, Kafka's auto-commit handles the offset
102
+ - Recommended when occasional message failures are acceptable and should not block processing
103
+
104
+ **Pause on Error (Opt-in)**
105
+
106
+ When you explicitly set `pauseOnError: true`, the consumer will pause when an error occurs during message processing:
107
+
108
+ - The consumer pauses and stops processing new messages
109
+ - The failed message offset is not committed
110
+ - The error is re-thrown and logged
111
+ - Recommended when you want to investigate and fix issues before continuing
112
+
113
+ **Example configuration to pause on error:**
114
+
115
+ ```yaml
116
+ kafka:
117
+ topics:
118
+ - topic1
119
+ groupId: my-group
120
+ autoCommit: false
121
+ pauseOnError: true # Pause consumer when a message fails
122
+ ```
123
+
124
+ **Note:** When using the default behavior (`pauseOnError: false`) with `autoCommit: false`, failed messages will have their offsets committed, meaning they will be skipped and not reprocessed. Use this configuration carefully based on your application's requirements.
125
+
126
+ #### Starting Position
127
+
128
+ The `fromBeginning` option controls where the consumer starts when no committed offset exists:
129
+
130
+ - `fromBeginning: true` - Start from the earliest available message
131
+ - `fromBeginning: false` (default) - Start from the latest message (only new messages)
132
+
133
+ Once the consumer group has committed an offset, it will always resume from that position, regardless of the `fromBeginning` setting.
134
+
60
135
  ### Optional SSL Configuration
61
136
 
62
137
  If your Kafka cluster requires SSL, you can configure it for both `kafkaConsumingEventPublisher` and `kafkaPublishingEventConsumer` instances:
package/config.d.ts CHANGED
@@ -200,6 +200,29 @@ export interface Config {
200
200
  * Default: 5000
201
201
  */
202
202
  maxWaitTime?: HumanDuration | string;
203
+
204
+ /**
205
+ * (Optional) If true, the consumer group will start from the earliest offset when no committed offset is found.
206
+ * If false or not specified, it will start from the latest offset.
207
+ * Default: undefined (start from latest)
208
+ */
209
+ fromBeginning?: boolean;
210
+
211
+ /**
212
+ * (Optional) Enable auto-commit of offsets.
213
+ * When true (default), offsets are automatically committed at regular intervals (at-most-once delivery).
214
+ * When false, offsets are only committed after successful message processing (at-least-once delivery).
215
+ * Default: true (auto-commit enabled for backward compatibility)
216
+ */
217
+ autoCommit?: boolean;
218
+
219
+ /**
220
+ * (Optional) When true, the consumer will pause on error and stop processing messages.
221
+ * When false (default), the consumer will skip failed messages and continue processing.
222
+ * Note: When pauseOnError is false and autoCommit is also false, failed messages will still have their offsets committed.
223
+ * Default: false (skip errors for backward compatibility)
224
+ */
225
+ pauseOnError?: boolean;
203
226
  };
204
227
  }>;
205
228
  }
@@ -374,6 +397,29 @@ export interface Config {
374
397
  * Default: 5000
375
398
  */
376
399
  maxWaitTime?: HumanDuration | string;
400
+
401
+ /**
402
+ * (Optional) If true, the consumer group will start from the earliest offset when no committed offset is found.
403
+ * If false or not specified, it will start from the latest offset.
404
+ * Default: undefined (start from latest)
405
+ */
406
+ fromBeginning?: boolean;
407
+
408
+ /**
409
+ * (Optional) Enable auto-commit of offsets.
410
+ * When true (default), offsets are automatically committed at regular intervals (at-most-once delivery).
411
+ * When false, offsets are only committed after successful message processing (at-least-once delivery).
412
+ * Default: true (auto-commit enabled for backward compatibility)
413
+ */
414
+ autoCommit?: boolean;
415
+
416
+ /**
417
+ * (Optional) When true, the consumer will pause on error and stop processing messages.
418
+ * When false (default), the consumer will skip failed messages and continue processing.
419
+ * Note: When pauseOnError is false and autoCommit is also false, failed messages will still have their offsets committed.
420
+ * Default: false (skip errors for backward compatibility)
421
+ */
422
+ pauseOnError?: boolean;
377
423
  };
378
424
  }>;
379
425
  };
@@ -42,12 +42,49 @@ class KafkaConsumingEventPublisher {
42
42
  await consumer.connect();
43
43
  await consumer.subscribe(config.consumerSubscribeTopics);
44
44
  await consumer.run({
45
- eachMessage: async ({ message }) => {
46
- this.events.publish({
47
- topic: config.backstageTopic,
48
- eventPayload: JSON.parse(message.value?.toString()),
49
- metadata: kafkaTransformers.convertHeadersToMetadata(message.headers)
50
- });
45
+ autoCommit: config.autoCommit,
46
+ eachMessage: async ({
47
+ topic,
48
+ partition,
49
+ message,
50
+ heartbeat,
51
+ pause
52
+ }) => {
53
+ try {
54
+ await this.events.publish({
55
+ topic: config.backstageTopic,
56
+ eventPayload: JSON.parse(message.value?.toString()),
57
+ metadata: kafkaTransformers.convertHeadersToMetadata(message.headers)
58
+ });
59
+ if (!config.autoCommit) {
60
+ await consumer.commitOffsets([
61
+ {
62
+ topic,
63
+ partition,
64
+ offset: (parseInt(message.offset, 10) + 1).toString()
65
+ }
66
+ ]);
67
+ }
68
+ await heartbeat();
69
+ } catch (error) {
70
+ consumerLogger.error(
71
+ `Failed to process message at offset ${message.offset} on partition ${partition} of topic ${topic}`,
72
+ error
73
+ );
74
+ if (config.pauseOnError) {
75
+ pause();
76
+ throw error;
77
+ }
78
+ if (!config.autoCommit) {
79
+ await consumer.commitOffsets([
80
+ {
81
+ topic,
82
+ partition,
83
+ offset: (parseInt(message.offset, 10) + 1).toString()
84
+ }
85
+ ]);
86
+ }
87
+ }
51
88
  }
52
89
  });
53
90
  } catch (error) {
@@ -1 +1 @@
1
- {"version":3,"file":"KafkaConsumingEventPublisher.cjs.js","sources":["../../src/KafkaConsumingEventPublisher/KafkaConsumingEventPublisher.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { EventsService } from '@backstage/plugin-events-node';\nimport { Consumer, Kafka } from 'kafkajs';\nimport {\n KafkaConsumerConfig,\n KafkaConsumingEventPublisherConfig,\n readConsumerConfig,\n} from './config';\nimport { Config } from '@backstage/config';\nimport { loggerServiceAdapter } from '../utils/LoggerServiceAdapter';\nimport { convertHeadersToMetadata } from '../utils/kafkaTransformers';\n\ntype KafkaConsumer = {\n consumer: Consumer;\n config: KafkaConsumerConfig;\n};\n\n/**\n * This class subscribes to Kafka topics and publishes events received to the registered subscriber.\n * The message payload will be used as the event payload and passed to the subscribers.\n */\nexport class KafkaConsumingEventPublisher {\n private readonly kafkaConsumers: KafkaConsumer[];\n private readonly logger: LoggerService;\n\n static fromConfig(env: {\n config: Config;\n events: EventsService;\n logger: LoggerService;\n }): KafkaConsumingEventPublisher[] {\n const configs = readConsumerConfig(env.config, env.logger);\n\n return configs.map(\n kafkaConfig =>\n new KafkaConsumingEventPublisher(env.logger, env.events, kafkaConfig),\n );\n }\n\n private constructor(\n logger: LoggerService,\n private readonly events: EventsService,\n config: KafkaConsumingEventPublisherConfig,\n ) {\n this.logger = logger.child({\n class: KafkaConsumingEventPublisher.prototype.constructor.name,\n instance: config.instance,\n });\n\n const kafka = new Kafka({\n ...config.kafkaConfig,\n logCreator: loggerServiceAdapter(this.logger),\n });\n\n this.kafkaConsumers = config.kafkaConsumerConfigs.map(consumerConfig => ({\n consumer: kafka.consumer(consumerConfig.consumerConfig),\n config: consumerConfig,\n }));\n }\n\n async start(): Promise<void> {\n await Promise.all(\n this.kafkaConsumers.map(async ({ consumer, config }) => {\n const consumerLogger = this.logger.child({\n id: `events.kafka.publisher:${config.backstageTopic}`,\n groupId: config.consumerConfig.groupId,\n kafkaTopics: config.consumerSubscribeTopics.topics.toString(),\n backstageTopic: config.backstageTopic,\n });\n try {\n await consumer.connect();\n await consumer.subscribe(config.consumerSubscribeTopics);\n\n await consumer.run({\n eachMessage: async ({ message }) => {\n this.events.publish({\n topic: config.backstageTopic,\n eventPayload: JSON.parse(message.value?.toString()!),\n metadata: convertHeadersToMetadata(message.headers),\n });\n },\n });\n } catch (error: any) {\n consumerLogger.error('Kafka consumer connection failed', error);\n }\n }),\n );\n }\n\n async shutdown(): Promise<void> {\n await Promise.all(\n this.kafkaConsumers.map(({ consumer }) => consumer.disconnect()),\n );\n }\n}\n"],"names":["Kafka","loggerServiceAdapter","readConsumerConfig","convertHeadersToMetadata"],"mappings":";;;;;;;AAoCO,MAAM,4BAAA,CAA6B;AAAA,EAiBhC,WAAA,CACN,MAAA,EACiB,MAAA,EACjB,MAAA,EACA;AAFiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAGjB,IAAA,IAAA,CAAK,MAAA,GAAS,OAAO,KAAA,CAAM;AAAA,MACzB,KAAA,EAAO,4BAAA,CAA6B,SAAA,CAAU,WAAA,CAAY,IAAA;AAAA,MAC1D,UAAU,MAAA,CAAO;AAAA,KAClB,CAAA;AAED,IAAA,MAAM,KAAA,GAAQ,IAAIA,aAAA,CAAM;AAAA,MACtB,GAAG,MAAA,CAAO,WAAA;AAAA,MACV,UAAA,EAAYC,yCAAA,CAAqB,IAAA,CAAK,MAAM;AAAA,KAC7C,CAAA;AAED,IAAA,IAAA,CAAK,cAAA,GAAiB,MAAA,CAAO,oBAAA,CAAqB,GAAA,CAAI,CAAA,cAAA,MAAmB;AAAA,MACvE,QAAA,EAAU,KAAA,CAAM,QAAA,CAAS,cAAA,CAAe,cAAc,CAAA;AAAA,MACtD,MAAA,EAAQ;AAAA,KACV,CAAE,CAAA;AAAA,EACJ;AAAA,EAnCiB,cAAA;AAAA,EACA,MAAA;AAAA,EAEjB,OAAO,WAAW,GAAA,EAIiB;AACjC,IAAA,MAAM,OAAA,GAAUC,yBAAA,CAAmB,GAAA,CAAI,MAAA,EAAQ,IAAI,MAAM,CAAA;AAEzD,IAAA,OAAO,OAAA,CAAQ,GAAA;AAAA,MACb,iBACE,IAAI,4BAAA,CAA6B,IAAI,MAAA,EAAQ,GAAA,CAAI,QAAQ,WAAW;AAAA,KACxE;AAAA,EACF;AAAA,EAuBA,MAAM,KAAA,GAAuB;AAC3B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,KAAK,cAAA,CAAe,GAAA,CAAI,OAAO,EAAE,QAAA,EAAU,QAAO,KAAM;AACtD,QAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,MAAA,CAAO,KAAA,CAAM;AAAA,UACvC,EAAA,EAAI,CAAA,uBAAA,EAA0B,MAAA,CAAO,cAAc,CAAA,CAAA;AAAA,UACnD,OAAA,EAAS,OAAO,cAAA,CAAe,OAAA;AAAA,UAC/B,WAAA,EAAa,MAAA,CAAO,uBAAA,CAAwB,MAAA,CAAO,QAAA,EAAS;AAAA,UAC5D,gBAAgB,MAAA,CAAO;AAAA,SACxB,CAAA;AACD,QAAA,IAAI;AACF,UAAA,MAAM,SAAS,OAAA,EAAQ;AACvB,UAAA,MAAM,QAAA,CAAS,SAAA,CAAU,MAAA,CAAO,uBAAuB,CAAA;AAEvD,UAAA,MAAM,SAAS,GAAA,CAAI;AAAA,YACjB,WAAA,EAAa,OAAO,EAAE,OAAA,EAAQ,KAAM;AAClC,cAAA,IAAA,CAAK,OAAO,OAAA,CAAQ;AAAA,gBAClB,OAAO,MAAA,CAAO,cAAA;AAAA,gBACd,cAAc,IAAA,CAAK,KAAA,CAAM,OAAA,CAAQ,KAAA,EAAO,UAAW,CAAA;AAAA,gBACnD,QAAA,EAAUC,0CAAA,CAAyB,OAAA,CAAQ,OAAO;AAAA,eACnD,CAAA;AAAA,YACH;AAAA,WACD,CAAA;AAAA,QACH,SAAS,KAAA,EAAY;AACnB,UAAA,cAAA,CAAe,KAAA,CAAM,oCAAoC,KAAK,CAAA;AAAA,QAChE;AAAA,MACF,CAAC;AAAA,KACH;AAAA,EACF;AAAA,EAEA,MAAM,QAAA,GAA0B;AAC9B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,IAAA,CAAK,eAAe,GAAA,CAAI,CAAC,EAAE,QAAA,EAAS,KAAM,QAAA,CAAS,UAAA,EAAY;AAAA,KACjE;AAAA,EACF;AACF;;;;"}
1
+ {"version":3,"file":"KafkaConsumingEventPublisher.cjs.js","sources":["../../src/KafkaConsumingEventPublisher/KafkaConsumingEventPublisher.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { EventsService } from '@backstage/plugin-events-node';\nimport { Consumer, Kafka } from 'kafkajs';\nimport {\n KafkaConsumerConfig,\n KafkaConsumingEventPublisherConfig,\n readConsumerConfig,\n} from './config';\nimport { Config } from '@backstage/config';\nimport { loggerServiceAdapter } from '../utils/LoggerServiceAdapter';\nimport { convertHeadersToMetadata } from '../utils/kafkaTransformers';\n\ntype KafkaConsumer = {\n consumer: Consumer;\n config: KafkaConsumerConfig;\n};\n\n/**\n * This class subscribes to Kafka topics and publishes events received to the registered subscriber.\n * The message payload will be used as the event payload and passed to the subscribers.\n */\nexport class KafkaConsumingEventPublisher {\n private readonly kafkaConsumers: KafkaConsumer[];\n private readonly logger: LoggerService;\n\n static fromConfig(env: {\n config: Config;\n events: EventsService;\n logger: LoggerService;\n }): KafkaConsumingEventPublisher[] {\n const configs = readConsumerConfig(env.config, env.logger);\n\n return configs.map(\n kafkaConfig =>\n new KafkaConsumingEventPublisher(env.logger, env.events, kafkaConfig),\n );\n }\n\n private constructor(\n logger: LoggerService,\n private readonly events: EventsService,\n config: KafkaConsumingEventPublisherConfig,\n ) {\n this.logger = logger.child({\n class: KafkaConsumingEventPublisher.prototype.constructor.name,\n instance: config.instance,\n });\n\n const kafka = new Kafka({\n ...config.kafkaConfig,\n logCreator: loggerServiceAdapter(this.logger),\n });\n\n this.kafkaConsumers = config.kafkaConsumerConfigs.map(consumerConfig => ({\n consumer: kafka.consumer(consumerConfig.consumerConfig),\n config: consumerConfig,\n }));\n }\n\n async start(): Promise<void> {\n await Promise.all(\n this.kafkaConsumers.map(async ({ consumer, config }) => {\n const consumerLogger = this.logger.child({\n id: `events.kafka.publisher:${config.backstageTopic}`,\n groupId: config.consumerConfig.groupId,\n kafkaTopics: config.consumerSubscribeTopics.topics.toString(),\n backstageTopic: config.backstageTopic,\n });\n try {\n await consumer.connect();\n await consumer.subscribe(config.consumerSubscribeTopics);\n\n await consumer.run({\n autoCommit: config.autoCommit,\n eachMessage: async ({\n topic,\n partition,\n message,\n heartbeat,\n pause,\n }) => {\n try {\n await this.events.publish({\n topic: config.backstageTopic,\n eventPayload: JSON.parse(message.value?.toString()!),\n metadata: convertHeadersToMetadata(message.headers),\n });\n\n // Only commit offset manually if autoCommit is disabled\n if (!config.autoCommit) {\n await consumer.commitOffsets([\n {\n topic,\n partition,\n offset: (parseInt(message.offset, 10) + 1).toString(),\n },\n ]);\n }\n\n await heartbeat();\n } catch (error: any) {\n consumerLogger.error(\n `Failed to process message at offset ${message.offset} on partition ${partition} of topic ${topic}`,\n error,\n );\n\n if (config.pauseOnError) {\n pause();\n throw error;\n }\n\n // Skip the failed message by committing its offset if autoCommit is disabled\n if (!config.autoCommit) {\n await consumer.commitOffsets([\n {\n topic,\n partition,\n offset: (parseInt(message.offset, 10) + 1).toString(),\n },\n ]);\n }\n }\n },\n });\n } catch (error: any) {\n consumerLogger.error('Kafka consumer connection failed', error);\n }\n }),\n );\n }\n\n async shutdown(): Promise<void> {\n await Promise.all(\n this.kafkaConsumers.map(({ consumer }) => consumer.disconnect()),\n );\n }\n}\n"],"names":["Kafka","loggerServiceAdapter","readConsumerConfig","convertHeadersToMetadata"],"mappings":";;;;;;;AAoCO,MAAM,4BAAA,CAA6B;AAAA,EAiBhC,WAAA,CACN,MAAA,EACiB,MAAA,EACjB,MAAA,EACA;AAFiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAGjB,IAAA,IAAA,CAAK,MAAA,GAAS,OAAO,KAAA,CAAM;AAAA,MACzB,KAAA,EAAO,4BAAA,CAA6B,SAAA,CAAU,WAAA,CAAY,IAAA;AAAA,MAC1D,UAAU,MAAA,CAAO;AAAA,KAClB,CAAA;AAED,IAAA,MAAM,KAAA,GAAQ,IAAIA,aAAA,CAAM;AAAA,MACtB,GAAG,MAAA,CAAO,WAAA;AAAA,MACV,UAAA,EAAYC,yCAAA,CAAqB,IAAA,CAAK,MAAM;AAAA,KAC7C,CAAA;AAED,IAAA,IAAA,CAAK,cAAA,GAAiB,MAAA,CAAO,oBAAA,CAAqB,GAAA,CAAI,CAAA,cAAA,MAAmB;AAAA,MACvE,QAAA,EAAU,KAAA,CAAM,QAAA,CAAS,cAAA,CAAe,cAAc,CAAA;AAAA,MACtD,MAAA,EAAQ;AAAA,KACV,CAAE,CAAA;AAAA,EACJ;AAAA,EAnCiB,cAAA;AAAA,EACA,MAAA;AAAA,EAEjB,OAAO,WAAW,GAAA,EAIiB;AACjC,IAAA,MAAM,OAAA,GAAUC,yBAAA,CAAmB,GAAA,CAAI,MAAA,EAAQ,IAAI,MAAM,CAAA;AAEzD,IAAA,OAAO,OAAA,CAAQ,GAAA;AAAA,MACb,iBACE,IAAI,4BAAA,CAA6B,IAAI,MAAA,EAAQ,GAAA,CAAI,QAAQ,WAAW;AAAA,KACxE;AAAA,EACF;AAAA,EAuBA,MAAM,KAAA,GAAuB;AAC3B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,KAAK,cAAA,CAAe,GAAA,CAAI,OAAO,EAAE,QAAA,EAAU,QAAO,KAAM;AACtD,QAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,MAAA,CAAO,KAAA,CAAM;AAAA,UACvC,EAAA,EAAI,CAAA,uBAAA,EAA0B,MAAA,CAAO,cAAc,CAAA,CAAA;AAAA,UACnD,OAAA,EAAS,OAAO,cAAA,CAAe,OAAA;AAAA,UAC/B,WAAA,EAAa,MAAA,CAAO,uBAAA,CAAwB,MAAA,CAAO,QAAA,EAAS;AAAA,UAC5D,gBAAgB,MAAA,CAAO;AAAA,SACxB,CAAA;AACD,QAAA,IAAI;AACF,UAAA,MAAM,SAAS,OAAA,EAAQ;AACvB,UAAA,MAAM,QAAA,CAAS,SAAA,CAAU,MAAA,CAAO,uBAAuB,CAAA;AAEvD,UAAA,MAAM,SAAS,GAAA,CAAI;AAAA,YACjB,YAAY,MAAA,CAAO,UAAA;AAAA,YACnB,aAAa,OAAO;AAAA,cAClB,KAAA;AAAA,cACA,SAAA;AAAA,cACA,OAAA;AAAA,cACA,SAAA;AAAA,cACA;AAAA,aACF,KAAM;AACJ,cAAA,IAAI;AACF,gBAAA,MAAM,IAAA,CAAK,OAAO,OAAA,CAAQ;AAAA,kBACxB,OAAO,MAAA,CAAO,cAAA;AAAA,kBACd,cAAc,IAAA,CAAK,KAAA,CAAM,OAAA,CAAQ,KAAA,EAAO,UAAW,CAAA;AAAA,kBACnD,QAAA,EAAUC,0CAAA,CAAyB,OAAA,CAAQ,OAAO;AAAA,iBACnD,CAAA;AAGD,gBAAA,IAAI,CAAC,OAAO,UAAA,EAAY;AACtB,kBAAA,MAAM,SAAS,aAAA,CAAc;AAAA,oBAC3B;AAAA,sBACE,KAAA;AAAA,sBACA,SAAA;AAAA,sBACA,SAAS,QAAA,CAAS,OAAA,CAAQ,QAAQ,EAAE,CAAA,GAAI,GAAG,QAAA;AAAS;AACtD,mBACD,CAAA;AAAA,gBACH;AAEA,gBAAA,MAAM,SAAA,EAAU;AAAA,cAClB,SAAS,KAAA,EAAY;AACnB,gBAAA,cAAA,CAAe,KAAA;AAAA,kBACb,uCAAuC,OAAA,CAAQ,MAAM,CAAA,cAAA,EAAiB,SAAS,aAAa,KAAK,CAAA,CAAA;AAAA,kBACjG;AAAA,iBACF;AAEA,gBAAA,IAAI,OAAO,YAAA,EAAc;AACvB,kBAAA,KAAA,EAAM;AACN,kBAAA,MAAM,KAAA;AAAA,gBACR;AAGA,gBAAA,IAAI,CAAC,OAAO,UAAA,EAAY;AACtB,kBAAA,MAAM,SAAS,aAAA,CAAc;AAAA,oBAC3B;AAAA,sBACE,KAAA;AAAA,sBACA,SAAA;AAAA,sBACA,SAAS,QAAA,CAAS,OAAA,CAAQ,QAAQ,EAAE,CAAA,GAAI,GAAG,QAAA;AAAS;AACtD,mBACD,CAAA;AAAA,gBACH;AAAA,cACF;AAAA,YACF;AAAA,WACD,CAAA;AAAA,QACH,SAAS,KAAA,EAAY;AACnB,UAAA,cAAA,CAAe,KAAA,CAAM,oCAAoC,KAAK,CAAA;AAAA,QAChE;AAAA,MACF,CAAC;AAAA,KACH;AAAA,EACF;AAAA,EAEA,MAAM,QAAA,GAA0B;AAC9B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,IAAA,CAAK,eAAe,GAAA,CAAI,CAAC,EAAE,QAAA,EAAS,KAAM,QAAA,CAAS,UAAA,EAAY;AAAA,KACjE;AAAA,EACF;AACF;;;;"}
@@ -39,8 +39,15 @@ const processSinglePublisher = (instanceName, publisherConfig) => {
39
39
  )
40
40
  },
41
41
  consumerSubscribeTopics: {
42
- topics: topicConfig.getStringArray("kafka.topics")
43
- }
42
+ topics: topicConfig.getStringArray("kafka.topics"),
43
+ fromBeginning: topicConfig.getOptionalBoolean(
44
+ "kafka.fromBeginning"
45
+ )
46
+ },
47
+ // Default autoCommit to true to match KafkaJS default and ensure consistency
48
+ // between KafkaJS's auto-commit behavior and our manual commit logic
49
+ autoCommit: topicConfig.getOptionalBoolean("kafka.autoCommit") ?? true,
50
+ pauseOnError: topicConfig.getOptionalBoolean("kafka.pauseOnError") ?? false
44
51
  };
45
52
  })
46
53
  };
@@ -1 +1 @@
1
- {"version":3,"file":"config.cjs.js","sources":["../../src/KafkaConsumingEventPublisher/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Config } from '@backstage/config';\nimport { ConsumerConfig, ConsumerSubscribeTopics, KafkaConfig } from 'kafkajs';\nimport {\n readKafkaConfig,\n readOptionalHumanDurationInMs,\n} from '../utils/config';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\nexport interface KafkaConsumerConfig {\n backstageTopic: string;\n consumerConfig: ConsumerConfig;\n consumerSubscribeTopics: ConsumerSubscribeTopics;\n}\n\nexport interface KafkaConsumingEventPublisherConfig {\n instance: string;\n kafkaConfig: KafkaConfig;\n kafkaConsumerConfigs: KafkaConsumerConfig[];\n}\n\nconst CONFIG_PREFIX_PUBLISHER =\n 'events.modules.kafka.kafkaConsumingEventPublisher';\n\nconst processSinglePublisher = (\n instanceName: string,\n publisherConfig: Config,\n): KafkaConsumingEventPublisherConfig => {\n return {\n instance: instanceName,\n kafkaConfig: readKafkaConfig(publisherConfig),\n kafkaConsumerConfigs: publisherConfig\n .getConfigArray('topics')\n .map(topicConfig => {\n return {\n backstageTopic: topicConfig.getString('topic'),\n consumerConfig: {\n groupId: topicConfig.getString('kafka.groupId'),\n sessionTimeout: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.sessionTimeout',\n ),\n rebalanceTimeout: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.rebalanceTimeout',\n ),\n heartbeatInterval: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.heartbeatInterval',\n ),\n metadataMaxAge: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.metadataMaxAge',\n ),\n maxBytesPerPartition: topicConfig.getOptionalNumber(\n 'kafka.maxBytesPerPartition',\n ),\n minBytes: topicConfig.getOptionalNumber('kafka.minBytes'),\n maxBytes: topicConfig.getOptionalNumber('kafka.maxBytes'),\n maxWaitTimeInMs: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.maxWaitTime',\n ),\n },\n consumerSubscribeTopics: {\n topics: topicConfig.getStringArray('kafka.topics'),\n },\n };\n }),\n };\n};\n\nexport const readConsumerConfig = (\n config: Config,\n logger: LoggerService,\n): KafkaConsumingEventPublisherConfig[] => {\n const publishersConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);\n\n // Check for legacy single publisher format\n if (publishersConfig?.getOptionalString('clientId')) {\n logger.warn(\n 'Legacy single config format detected at events.modules.kafka.kafkaConsumingEventPublisher.',\n );\n return [\n processSinglePublisher(\n 'default', // use `default` as instance name for legacy single config\n publishersConfig,\n ),\n ];\n }\n\n return (\n publishersConfig\n ?.keys()\n ?.map(publisherKey =>\n processSinglePublisher(\n publisherKey,\n publishersConfig.getConfig(publisherKey),\n ),\n ) ?? []\n );\n};\n"],"names":["readKafkaConfig","readOptionalHumanDurationInMs"],"mappings":";;;;AAmCA,MAAM,uBAAA,GACJ,mDAAA;AAEF,MAAM,sBAAA,GAAyB,CAC7B,YAAA,EACA,eAAA,KACuC;AACvC,EAAA,OAAO;AAAA,IACL,QAAA,EAAU,YAAA;AAAA,IACV,WAAA,EAAaA,uBAAgB,eAAe,CAAA;AAAA,IAC5C,sBAAsB,eAAA,CACnB,cAAA,CAAe,QAAQ,CAAA,CACvB,IAAI,CAAA,WAAA,KAAe;AAClB,MAAA,OAAO;AAAA,QACL,cAAA,EAAgB,WAAA,CAAY,SAAA,CAAU,OAAO,CAAA;AAAA,QAC7C,cAAA,EAAgB;AAAA,UACd,OAAA,EAAS,WAAA,CAAY,SAAA,CAAU,eAAe,CAAA;AAAA,UAC9C,cAAA,EAAgBC,oCAAA;AAAA,YACd,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,gBAAA,EAAkBA,oCAAA;AAAA,YAChB,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,iBAAA,EAAmBA,oCAAA;AAAA,YACjB,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,cAAA,EAAgBA,oCAAA;AAAA,YACd,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,sBAAsB,WAAA,CAAY,iBAAA;AAAA,YAChC;AAAA,WACF;AAAA,UACA,QAAA,EAAU,WAAA,CAAY,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,UACxD,QAAA,EAAU,WAAA,CAAY,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,UACxD,eAAA,EAAiBA,oCAAA;AAAA,YACf,WAAA;AAAA,YACA;AAAA;AACF,SACF;AAAA,QACA,uBAAA,EAAyB;AAAA,UACvB,MAAA,EAAQ,WAAA,CAAY,cAAA,CAAe,cAAc;AAAA;AACnD,OACF;AAAA,IACF,CAAC;AAAA,GACL;AACF,CAAA;AAEO,MAAM,kBAAA,GAAqB,CAChC,MAAA,EACA,MAAA,KACyC;AACzC,EAAA,MAAM,gBAAA,GAAmB,MAAA,CAAO,iBAAA,CAAkB,uBAAuB,CAAA;AAGzE,EAAA,IAAI,gBAAA,EAAkB,iBAAA,CAAkB,UAAU,CAAA,EAAG;AACnD,IAAA,MAAA,CAAO,IAAA;AAAA,MACL;AAAA,KACF;AACA,IAAA,OAAO;AAAA,MACL,sBAAA;AAAA,QACE,SAAA;AAAA;AAAA,QACA;AAAA;AACF,KACF;AAAA,EACF;AAEA,EAAA,OACE,gBAAA,EACI,MAAK,EACL,GAAA;AAAA,IAAI,CAAA,YAAA,KACJ,sBAAA;AAAA,MACE,YAAA;AAAA,MACA,gBAAA,CAAiB,UAAU,YAAY;AAAA;AACzC,OACG,EAAC;AAEZ;;;;"}
1
+ {"version":3,"file":"config.cjs.js","sources":["../../src/KafkaConsumingEventPublisher/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Config } from '@backstage/config';\nimport { ConsumerConfig, ConsumerSubscribeTopics, KafkaConfig } from 'kafkajs';\nimport {\n readKafkaConfig,\n readOptionalHumanDurationInMs,\n} from '../utils/config';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\nexport interface KafkaConsumerConfig {\n backstageTopic: string;\n consumerConfig: ConsumerConfig;\n consumerSubscribeTopics: ConsumerSubscribeTopics;\n autoCommit: boolean;\n pauseOnError: boolean;\n}\n\nexport interface KafkaConsumingEventPublisherConfig {\n instance: string;\n kafkaConfig: KafkaConfig;\n kafkaConsumerConfigs: KafkaConsumerConfig[];\n}\n\nconst CONFIG_PREFIX_PUBLISHER =\n 'events.modules.kafka.kafkaConsumingEventPublisher';\n\nconst processSinglePublisher = (\n instanceName: string,\n publisherConfig: Config,\n): KafkaConsumingEventPublisherConfig => {\n return {\n instance: instanceName,\n kafkaConfig: readKafkaConfig(publisherConfig),\n kafkaConsumerConfigs: publisherConfig\n .getConfigArray('topics')\n .map(topicConfig => {\n return {\n backstageTopic: topicConfig.getString('topic'),\n consumerConfig: {\n groupId: topicConfig.getString('kafka.groupId'),\n sessionTimeout: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.sessionTimeout',\n ),\n rebalanceTimeout: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.rebalanceTimeout',\n ),\n heartbeatInterval: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.heartbeatInterval',\n ),\n metadataMaxAge: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.metadataMaxAge',\n ),\n maxBytesPerPartition: topicConfig.getOptionalNumber(\n 'kafka.maxBytesPerPartition',\n ),\n minBytes: topicConfig.getOptionalNumber('kafka.minBytes'),\n maxBytes: topicConfig.getOptionalNumber('kafka.maxBytes'),\n maxWaitTimeInMs: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.maxWaitTime',\n ),\n },\n consumerSubscribeTopics: {\n topics: topicConfig.getStringArray('kafka.topics'),\n fromBeginning: topicConfig.getOptionalBoolean(\n 'kafka.fromBeginning',\n ),\n },\n // Default autoCommit to true to match KafkaJS default and ensure consistency\n // between KafkaJS's auto-commit behavior and our manual commit logic\n autoCommit:\n topicConfig.getOptionalBoolean('kafka.autoCommit') ?? true,\n pauseOnError:\n topicConfig.getOptionalBoolean('kafka.pauseOnError') ?? false,\n };\n }),\n };\n};\n\nexport const readConsumerConfig = (\n config: Config,\n logger: LoggerService,\n): KafkaConsumingEventPublisherConfig[] => {\n const publishersConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);\n\n // Check for legacy single publisher format\n if (publishersConfig?.getOptionalString('clientId')) {\n logger.warn(\n 'Legacy single config format detected at events.modules.kafka.kafkaConsumingEventPublisher.',\n );\n return [\n processSinglePublisher(\n 'default', // use `default` as instance name for legacy single config\n publishersConfig,\n ),\n ];\n }\n\n return (\n publishersConfig\n ?.keys()\n ?.map(publisherKey =>\n processSinglePublisher(\n publisherKey,\n publishersConfig.getConfig(publisherKey),\n ),\n ) ?? []\n );\n};\n"],"names":["readKafkaConfig","readOptionalHumanDurationInMs"],"mappings":";;;;AAqCA,MAAM,uBAAA,GACJ,mDAAA;AAEF,MAAM,sBAAA,GAAyB,CAC7B,YAAA,EACA,eAAA,KACuC;AACvC,EAAA,OAAO;AAAA,IACL,QAAA,EAAU,YAAA;AAAA,IACV,WAAA,EAAaA,uBAAgB,eAAe,CAAA;AAAA,IAC5C,sBAAsB,eAAA,CACnB,cAAA,CAAe,QAAQ,CAAA,CACvB,IAAI,CAAA,WAAA,KAAe;AAClB,MAAA,OAAO;AAAA,QACL,cAAA,EAAgB,WAAA,CAAY,SAAA,CAAU,OAAO,CAAA;AAAA,QAC7C,cAAA,EAAgB;AAAA,UACd,OAAA,EAAS,WAAA,CAAY,SAAA,CAAU,eAAe,CAAA;AAAA,UAC9C,cAAA,EAAgBC,oCAAA;AAAA,YACd,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,gBAAA,EAAkBA,oCAAA;AAAA,YAChB,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,iBAAA,EAAmBA,oCAAA;AAAA,YACjB,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,cAAA,EAAgBA,oCAAA;AAAA,YACd,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,sBAAsB,WAAA,CAAY,iBAAA;AAAA,YAChC;AAAA,WACF;AAAA,UACA,QAAA,EAAU,WAAA,CAAY,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,UACxD,QAAA,EAAU,WAAA,CAAY,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,UACxD,eAAA,EAAiBA,oCAAA;AAAA,YACf,WAAA;AAAA,YACA;AAAA;AACF,SACF;AAAA,QACA,uBAAA,EAAyB;AAAA,UACvB,MAAA,EAAQ,WAAA,CAAY,cAAA,CAAe,cAAc,CAAA;AAAA,UACjD,eAAe,WAAA,CAAY,kBAAA;AAAA,YACzB;AAAA;AACF,SACF;AAAA;AAAA;AAAA,QAGA,UAAA,EACE,WAAA,CAAY,kBAAA,CAAmB,kBAAkB,CAAA,IAAK,IAAA;AAAA,QACxD,YAAA,EACE,WAAA,CAAY,kBAAA,CAAmB,oBAAoB,CAAA,IAAK;AAAA,OAC5D;AAAA,IACF,CAAC;AAAA,GACL;AACF,CAAA;AAEO,MAAM,kBAAA,GAAqB,CAChC,MAAA,EACA,MAAA,KACyC;AACzC,EAAA,MAAM,gBAAA,GAAmB,MAAA,CAAO,iBAAA,CAAkB,uBAAuB,CAAA;AAGzE,EAAA,IAAI,gBAAA,EAAkB,iBAAA,CAAkB,UAAU,CAAA,EAAG;AACnD,IAAA,MAAA,CAAO,IAAA;AAAA,MACL;AAAA,KACF;AACA,IAAA,OAAO;AAAA,MACL,sBAAA;AAAA,QACE,SAAA;AAAA;AAAA,QACA;AAAA;AACF,KACF;AAAA,EACF;AAEA,EAAA,OACE,gBAAA,EACI,MAAK,EACL,GAAA;AAAA,IAAI,CAAA,YAAA,KACJ,sBAAA;AAAA,MACE,YAAA;AAAA,MACA,gBAAA,CAAiB,UAAU,YAAY;AAAA;AACzC,OACG,EAAC;AAEZ;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-events-backend-module-kafka",
3
- "version": "0.0.0-nightly-20251216024726",
3
+ "version": "0.0.0-nightly-20260108025012",
4
4
  "description": "The kafka backend module for the events plugin.",
5
5
  "backstage": {
6
6
  "role": "backend-plugin-module",
@@ -38,16 +38,16 @@
38
38
  "test": "backstage-cli package test"
39
39
  },
40
40
  "dependencies": {
41
- "@backstage/backend-plugin-api": "0.0.0-nightly-20251216024726",
41
+ "@backstage/backend-plugin-api": "1.6.0",
42
42
  "@backstage/config": "1.3.6",
43
- "@backstage/plugin-events-node": "0.0.0-nightly-20251216024726",
43
+ "@backstage/plugin-events-node": "0.4.18",
44
44
  "@backstage/types": "1.2.2",
45
45
  "kafkajs": "^2.2.4"
46
46
  },
47
47
  "devDependencies": {
48
- "@backstage/backend-test-utils": "0.0.0-nightly-20251216024726",
49
- "@backstage/cli": "0.0.0-nightly-20251216024726",
50
- "@backstage/plugin-events-backend-test-utils": "0.0.0-nightly-20251216024726"
48
+ "@backstage/backend-test-utils": "0.0.0-nightly-20260108025012",
49
+ "@backstage/cli": "0.0.0-nightly-20260108025012",
50
+ "@backstage/plugin-events-backend-test-utils": "0.1.51"
51
51
  },
52
52
  "configSchema": "config.d.ts",
53
53
  "typesVersions": {