@backstage/plugin-events-backend-module-kafka 0.0.0-nightly-20250612024034

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/CHANGELOG.md ADDED
@@ -0,0 +1,33 @@
1
+ # @backstage/plugin-events-backend-module-kafka
2
+
3
+ ## 0.0.0-nightly-20250612024034
4
+
5
+ ### Minor Changes
6
+
7
+ - b034b9d: Adds a new module `kafka` for plugin-events-backend
8
+
9
+ The module introduces the `KafkaConsumerClient` which creates a Kafka client used to establish consumer connections. It also provides the `KafkaConsumingEventPublisher`, a consumer that subscribes to configured Kafka topics and publishes received messages to the Event Service.
10
+
11
+ ### Patch Changes
12
+
13
+ - Updated dependencies
14
+ - @backstage/backend-plugin-api@0.0.0-nightly-20250612024034
15
+ - @backstage/config@1.3.2
16
+ - @backstage/types@1.2.1
17
+ - @backstage/plugin-events-node@0.0.0-nightly-20250612024034
18
+
19
+ ## 0.1.0-next.0
20
+
21
+ ### Minor Changes
22
+
23
+ - b034b9d: Adds a new module `kafka` for plugin-events-backend
24
+
25
+ The module introduces the `KafkaConsumerClient` which creates a Kafka client used to establish consumer connections. It also provides the `KafkaConsumingEventPublisher`, a consumer that subscribes to configured Kafka topics and publishes received messages to the Event Service.
26
+
27
+ ### Patch Changes
28
+
29
+ - Updated dependencies
30
+ - @backstage/backend-plugin-api@1.4.0-next.1
31
+ - @backstage/config@1.3.2
32
+ - @backstage/types@1.2.1
33
+ - @backstage/plugin-events-node@0.4.12-next.1
package/README.md ADDED
@@ -0,0 +1,78 @@
1
+ # @backstage/backstage-plugin-events-backend-module-kafka
2
+
3
+ Welcome to the `events-backend-module-kafka` backend module!
4
+
5
+ This package is a module for the `events-backend` backend plugin and extends the events system with an `KafkaConsumingEventPublisher.`
6
+
7
+ This event publisher will allow you to receive events from an Kafka queue and will publish these to the used `EventsService` implementation.
8
+
9
+ ## Configuration
10
+
11
+ To set up Kafka queues, you need to configure the following values:
12
+
13
+ ```yaml
14
+ events:
15
+ modules:
16
+ kafka:
17
+ kafkaConsumingEventPublisher:
18
+ clientId: your-client-id # (Required) Client ID used by Backstage to identify when connecting to the Kafka cluster.
19
+ brokers: # (Required) List of brokers in the Kafka cluster to connect to.
20
+ - broker1
21
+ - broker2
22
+ topics:
23
+ - topic: 'backstage.topic' # (Required) Replace with actual topic name as expected by subscribers
24
+ kafka:
25
+ topics: # (Required) The Kafka topics to subscribe to.
26
+ - topic1
27
+ groupId: your-group-id # (Required) The GroupId to be used by the topic consumers.
28
+ ```
29
+
30
+ For a complete list of all available fields that can be configured, refer to the [config.d.ts file](./config.d.ts).
31
+
32
+ ### Optional SSL Configuration
33
+
34
+ If your Kafka cluster requires SSL, you can configure it as follows:
35
+
36
+ ```yaml
37
+ events:
38
+ modules:
39
+ kafka:
40
+ kafkaConsumingEventPublisher:
41
+ ssl:
42
+ rejectUnauthorized: true # (Optional) If true, the server certificate is verified against the list of supplied CAs.
43
+ ca: [path/to/ca-cert] # (Optional) Array of trusted certificates in PEM format.
44
+ key: path/to/client-key # (Optional) Private key in PEM format.
45
+ cert: path/to/client-cert # (Optional) Public x509 certificate in PEM format.
46
+ ```
47
+
48
+ ### Optional SASL Authentication Configuration
49
+
50
+ If your Kafka cluster requires `SASL` authentication, you can configure it as follows:
51
+
52
+ ```yaml
53
+ events:
54
+ modules:
55
+ kafka:
56
+ kafkaConsumingEventPublisher:
57
+ sasl:
58
+ mechanism: 'plain' # SASL mechanism ('plain', 'scram-sha-256' or 'scram-sha-512')
59
+ username: your-username # SASL username
60
+ password: your-password # SASL password
61
+ ```
62
+
63
+ This section includes optional `SSL` and `SASL` authentication configuration for enhanced security.
64
+
65
+ ## Installation
66
+
67
+ 1. Install this module
68
+ 2. Add your configuration.
69
+
70
+ ```bash
71
+ # From your Backstage root directory
72
+ yarn --cwd packages/backend add @backstage/plugin-events-backend-module-kafka
73
+ ```
74
+
75
+ ```typescript
76
+ // packages/backend/src/index.ts
77
+ backend.add(import('@backstage/plugin-events-backend-module-kafka'));
78
+ ```
package/config.d.ts ADDED
@@ -0,0 +1,205 @@
1
+ /*
2
+ * Copyright 2025 The Backstage Authors
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+
17
+ import { HumanDuration } from '@backstage/types';
18
+
19
+ export interface Config {
20
+ events?: {
21
+ modules?: {
22
+ /**
23
+ * events-backend-module-kafka plugin configuration.
24
+ */
25
+ kafka?: {
26
+ /**
27
+ * Configuration for KafkaConsumingEventPublisher
28
+ */
29
+ kafkaConsumingEventPublisher?: {
30
+ /**
31
+ * (Required) Client ID used by Backstage to identify when connecting to the Kafka cluster.
32
+ */
33
+ clientId: string;
34
+ /**
35
+ * (Required) List of brokers in the Kafka cluster to connect to.
36
+ */
37
+ brokers: string[];
38
+ /**
39
+ * Optional SSL connection parameters to connect to the cluster. Passed directly to Node tls.connect.
40
+ * See https://nodejs.org/dist/latest-v8.x/docs/api/tls.html#tls_tls_createsecurecontext_options
41
+ */
42
+ ssl?:
43
+ | {
44
+ ca?: string[];
45
+ /** @visibility secret */
46
+ key?: string;
47
+ cert?: string;
48
+ rejectUnauthorized?: boolean;
49
+ }
50
+ | boolean;
51
+ /**
52
+ * Optional SASL connection parameters.
53
+ */
54
+ sasl?: {
55
+ mechanism: 'plain' | 'scram-sha-256' | 'scram-sha-512';
56
+ username: string;
57
+ /** @visibility secret */
58
+ password: string;
59
+ };
60
+
61
+ /**
62
+ * Optional retry connection parameters.
63
+ */
64
+ retry: {
65
+ /**
66
+ * (Optional) Maximum wait time for a retry
67
+ * Default: 30000 ms.
68
+ */
69
+ maxRetryTime: HumanDuration | string;
70
+
71
+ /**
72
+ * (Optional) Initial value used to calculate the retry (This is still randomized following the randomization factor)
73
+ * Default: 300 ms.
74
+ */
75
+ initialRetryTime: HumanDuration | string;
76
+
77
+ /**
78
+ * (Optional) Randomization factor
79
+ * Default: 0.2.
80
+ */
81
+ factor: number;
82
+
83
+ /**
84
+ * (Optional) Exponential factor
85
+ * Default: 2.
86
+ */
87
+ multiplier: number;
88
+
89
+ /**
90
+ * (Optional) Max number of retries per call
91
+ * Default: 5.
92
+ */
93
+ retries: number;
94
+ };
95
+
96
+ /**
97
+ * (Optional) Timeout for authentication requests.
98
+ * Default: 10000 ms.
99
+ */
100
+ authenticationTimeout: HumanDuration | string;
101
+
102
+ /**
103
+ * (Optional) Time to wait for a successful connection.
104
+ * Default: 1000 ms.
105
+ */
106
+ connectionTimeout: HumanDuration | string;
107
+
108
+ /**
109
+ * (Optional) Time to wait for a successful request.
110
+ * Default: 30000 ms.
111
+ */
112
+ requestTimeout: HumanDuration | string;
113
+
114
+ /**
115
+ * (Optional) The request timeout can be disabled by setting enforceRequestTimeout to false.
116
+ * Default: true
117
+ */
118
+ enforceRequestTimeout: boolean;
119
+
120
+ /**
121
+ * Contains a object per topic for which an Kafka queue
122
+ * should be used as source of events.
123
+ */
124
+ topics: Array<{
125
+ /**
126
+ * (Required) The Backstage topic to publish to
127
+ */
128
+ topic: string;
129
+ /**
130
+ * (Required) KafkaConsumer-related configuration.
131
+ */
132
+ kafka: {
133
+ /**
134
+ * (Required) The Kafka topics to subscribe to
135
+ */
136
+ topics: string[];
137
+ /**
138
+ * (Required) The GroupId to be used by the topic consumers
139
+ */
140
+ groupId: string;
141
+
142
+ /**
143
+ * (Optional) Timeout used to detect failures.
144
+ * The consumer sends periodic heartbeats to indicate its liveness to the broker.
145
+ * If no heartbeats are received by the broker before the expiration of this session timeout,
146
+ * then the broker will remove this consumer from the group and initiate a rebalance
147
+ * Default: 30000 ms.
148
+ */
149
+ sessionTimeout: HumanDuration | string;
150
+
151
+ /**
152
+ * (Optional) The maximum time that the coordinator will wait for each member to rejoin when rebalancing the group
153
+ * Default: 60000 ms.
154
+ */
155
+ rebalanceTimeout: HumanDuration | string;
156
+
157
+ /**
158
+ * (Optional) The expected time between heartbeats to the consumer coordinator.
159
+ * Heartbeats are used to ensure that the consumer's session stays active.
160
+ * The value must be set lower than session timeout
161
+ * Default: 3000 ms.
162
+ */
163
+ heartbeatInterval: HumanDuration | string;
164
+
165
+ /**
166
+ * (Optional) The period of time after which we force a refresh of metadata
167
+ * even if we haven't seen any partition leadership changes to proactively discover any new brokers or partitions
168
+ * Default: 300000 ms (5 minutes).
169
+ */
170
+ metadataMaxAge: HumanDuration | string;
171
+
172
+ /**
173
+ * (Optional) The maximum amount of data per-partition the server will return.
174
+ * This size must be at least as large as the maximum message size the server allows
175
+ * or else it is possible for the producer to send messages larger than the consumer can fetch.
176
+ * If that happens, the consumer can get stuck trying to fetch a large message on a certain partition
177
+ * Default: 1048576 (1MB)
178
+ */
179
+ maxBytesPerPartition: number;
180
+
181
+ /**
182
+ * (Optional) Minimum amount of data the server should return for a fetch request, otherwise wait up to maxWaitTime for more data to accumulate.
183
+ * Default: 1
184
+ */
185
+ minBytes: number;
186
+
187
+ /**
188
+ * (Optional) Maximum amount of bytes to accumulate in the response. Supported by Kafka >= 0.10.1.0
189
+ * Default: 10485760 (10MB)
190
+ */
191
+ maxBytes: number;
192
+
193
+ /**
194
+ * (Optional) The maximum amount of time the server will block before answering the fetch request
195
+ * if there isn’t sufficient data to immediately satisfy the requirement given by minBytes
196
+ * Default: 5000
197
+ */
198
+ maxWaitTime: HumanDuration | string;
199
+ };
200
+ }>;
201
+ };
202
+ };
203
+ };
204
+ };
205
+ }
@@ -0,0 +1,10 @@
1
+ 'use strict';
2
+
3
+ Object.defineProperty(exports, '__esModule', { value: true });
4
+
5
+ var eventsModuleKafkaConsumingEventPublisher = require('./service/eventsModuleKafkaConsumingEventPublisher.cjs.js');
6
+
7
+
8
+
9
+ exports.default = eventsModuleKafkaConsumingEventPublisher.eventsModuleKafkaConsumingEventPublisher;
10
+ //# sourceMappingURL=index.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}
@@ -0,0 +1,10 @@
1
+ import * as _backstage_backend_plugin_api from '@backstage/backend-plugin-api';
2
+
3
+ /**
4
+ * Kafka module for the Events plugin.
5
+ *
6
+ * @public
7
+ */
8
+ declare const eventsModuleKafkaConsumingEventPublisher: _backstage_backend_plugin_api.BackendFeature;
9
+
10
+ export { eventsModuleKafkaConsumingEventPublisher as default };
@@ -0,0 +1,44 @@
1
+ 'use strict';
2
+
3
+ var kafkajs = require('kafkajs');
4
+ var config = require('./config.cjs.js');
5
+ var KafkaConsumingEventPublisher = require('./KafkaConsumingEventPublisher.cjs.js');
6
+ var LoggerServiceAdapter = require('./LoggerServiceAdapter.cjs.js');
7
+
8
+ class KafkaConsumerClient {
9
+ kafka;
10
+ consumers;
11
+ static fromConfig(options) {
12
+ const kafkaConfig = config.readConfig(options.config);
13
+ if (!kafkaConfig) {
14
+ options.logger.info(
15
+ "Kafka consumer not configured, skipping initialization"
16
+ );
17
+ return void 0;
18
+ }
19
+ return new KafkaConsumerClient(options.logger, options.events, kafkaConfig);
20
+ }
21
+ constructor(logger, events, config) {
22
+ this.kafka = new kafkajs.Kafka({
23
+ ...config.kafkaConfig,
24
+ logCreator: LoggerServiceAdapter.loggerServiceAdapter(logger)
25
+ });
26
+ this.consumers = config.kafkaConsumerConfigs.map(
27
+ (consumerConfig) => KafkaConsumingEventPublisher.KafkaConsumingEventPublisher.fromConfig({
28
+ kafkaClient: this.kafka,
29
+ config: consumerConfig,
30
+ logger,
31
+ events
32
+ })
33
+ );
34
+ }
35
+ async start() {
36
+ this.consumers.map(async (consumer) => await consumer.start());
37
+ }
38
+ async shutdown() {
39
+ this.consumers.map(async (consumer) => await consumer.shutdown());
40
+ }
41
+ }
42
+
43
+ exports.KafkaConsumerClient = KafkaConsumerClient;
44
+ //# sourceMappingURL=KafkaConsumerClient.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"KafkaConsumerClient.cjs.js","sources":["../../src/publisher/KafkaConsumerClient.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { Config } from '@backstage/config';\nimport { EventsService } from '@backstage/plugin-events-node';\nimport { Kafka } from 'kafkajs';\nimport { KafkaEventSourceConfig, readConfig } from './config';\nimport { KafkaConsumingEventPublisher } from './KafkaConsumingEventPublisher';\nimport { loggerServiceAdapter } from './LoggerServiceAdapter';\n\n/**\n * KafkaConsumerClient\n *\n * This class creates the Kafka client that will be used to create the KafkaConsumingEventPublisher\n *\n * @public\n */\nexport class KafkaConsumerClient {\n private readonly kafka: Kafka;\n private readonly consumers: KafkaConsumingEventPublisher[];\n\n static fromConfig(options: {\n config: Config;\n events: EventsService;\n logger: LoggerService;\n }): KafkaConsumerClient | undefined {\n const kafkaConfig = readConfig(options.config);\n\n if (!kafkaConfig) {\n options.logger.info(\n 'Kafka consumer not configured, skipping initialization',\n );\n return undefined;\n }\n\n return new KafkaConsumerClient(options.logger, options.events, kafkaConfig);\n }\n\n private constructor(\n logger: LoggerService,\n events: EventsService,\n config: KafkaEventSourceConfig,\n ) {\n this.kafka = new Kafka({\n ...config.kafkaConfig,\n logCreator: loggerServiceAdapter(logger),\n });\n\n this.consumers = config.kafkaConsumerConfigs.map(consumerConfig =>\n KafkaConsumingEventPublisher.fromConfig({\n kafkaClient: this.kafka,\n config: consumerConfig,\n logger,\n events,\n }),\n );\n }\n\n async start(): Promise<void> {\n this.consumers.map(async consumer => await consumer.start());\n }\n\n async shutdown(): Promise<void> {\n this.consumers.map(async consumer => await consumer.shutdown());\n }\n}\n"],"names":["readConfig","Kafka","loggerServiceAdapter","KafkaConsumingEventPublisher"],"mappings":";;;;;;;AA8BO,MAAM,mBAAoB,CAAA;AAAA,EACd,KAAA;AAAA,EACA,SAAA;AAAA,EAEjB,OAAO,WAAW,OAIkB,EAAA;AAClC,IAAM,MAAA,WAAA,GAAcA,iBAAW,CAAA,OAAA,CAAQ,MAAM,CAAA;AAE7C,IAAA,IAAI,CAAC,WAAa,EAAA;AAChB,MAAA,OAAA,CAAQ,MAAO,CAAA,IAAA;AAAA,QACb;AAAA,OACF;AACA,MAAO,OAAA,KAAA,CAAA;AAAA;AAGT,IAAA,OAAO,IAAI,mBAAoB,CAAA,OAAA,CAAQ,MAAQ,EAAA,OAAA,CAAQ,QAAQ,WAAW,CAAA;AAAA;AAC5E,EAEQ,WAAA,CACN,MACA,EAAA,MAAA,EACA,MACA,EAAA;AACA,IAAK,IAAA,CAAA,KAAA,GAAQ,IAAIC,aAAM,CAAA;AAAA,MACrB,GAAG,MAAO,CAAA,WAAA;AAAA,MACV,UAAA,EAAYC,0CAAqB,MAAM;AAAA,KACxC,CAAA;AAED,IAAK,IAAA,CAAA,SAAA,GAAY,OAAO,oBAAqB,CAAA,GAAA;AAAA,MAAI,CAAA,cAAA,KAC/CC,0DAA6B,UAAW,CAAA;AAAA,QACtC,aAAa,IAAK,CAAA,KAAA;AAAA,QAClB,MAAQ,EAAA,cAAA;AAAA,QACR,MAAA;AAAA,QACA;AAAA,OACD;AAAA,KACH;AAAA;AACF,EAEA,MAAM,KAAuB,GAAA;AAC3B,IAAA,IAAA,CAAK,UAAU,GAAI,CAAA,OAAM,aAAY,MAAM,QAAA,CAAS,OAAO,CAAA;AAAA;AAC7D,EAEA,MAAM,QAA0B,GAAA;AAC9B,IAAA,IAAA,CAAK,UAAU,GAAI,CAAA,OAAM,aAAY,MAAM,QAAA,CAAS,UAAU,CAAA;AAAA;AAElE;;;;"}
@@ -0,0 +1,62 @@
1
+ 'use strict';
2
+
3
+ class KafkaConsumingEventPublisher {
4
+ constructor(kafkaClient, logger, events, config) {
5
+ this.events = events;
6
+ this.kafkaConsumer = kafkaClient.consumer(config.consumerConfig);
7
+ this.consumerSubscribeTopics = config.consumerSubscribeTopics;
8
+ this.backstageTopic = config.backstageTopic;
9
+ const id = `events.kafka.publisher:${this.backstageTopic}`;
10
+ this.logger = logger.child({
11
+ class: KafkaConsumingEventPublisher.prototype.constructor.name,
12
+ groupId: config.consumerConfig.groupId,
13
+ kafkaTopics: config.consumerSubscribeTopics.topics.toString(),
14
+ backstageTopic: config.backstageTopic,
15
+ taskId: id
16
+ });
17
+ }
18
+ kafkaConsumer;
19
+ consumerSubscribeTopics;
20
+ backstageTopic;
21
+ logger;
22
+ static fromConfig(env) {
23
+ return new KafkaConsumingEventPublisher(
24
+ env.kafkaClient,
25
+ env.logger,
26
+ env.events,
27
+ env.config
28
+ );
29
+ }
30
+ async start() {
31
+ try {
32
+ await this.kafkaConsumer.connect();
33
+ await this.kafkaConsumer.subscribe(this.consumerSubscribeTopics);
34
+ await this.kafkaConsumer.run({
35
+ eachMessage: async ({ message }) => {
36
+ this.events.publish({
37
+ topic: this.backstageTopic,
38
+ eventPayload: JSON.parse(message.value?.toString()),
39
+ metadata: this.convertHeadersToMetadata(message.headers)
40
+ });
41
+ }
42
+ });
43
+ } catch (error) {
44
+ this.logger.error("Kafka consumer connection failed ", error);
45
+ }
46
+ }
47
+ async shutdown() {
48
+ await this.kafkaConsumer.disconnect();
49
+ }
50
+ convertHeadersToMetadata = (headers) => {
51
+ if (!headers) return void 0;
52
+ const metadata = {};
53
+ Object.entries(headers).forEach(([key, value]) => {
54
+ if (Array.isArray(value)) metadata[key] = value.map((v) => v.toString());
55
+ else metadata[key] = value?.toString();
56
+ });
57
+ return metadata;
58
+ };
59
+ }
60
+
61
+ exports.KafkaConsumingEventPublisher = KafkaConsumingEventPublisher;
62
+ //# sourceMappingURL=KafkaConsumingEventPublisher.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"KafkaConsumingEventPublisher.cjs.js","sources":["../../src/publisher/KafkaConsumingEventPublisher.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { EventParams, EventsService } from '@backstage/plugin-events-node';\nimport { Consumer, ConsumerSubscribeTopics, IHeaders, Kafka } from 'kafkajs';\nimport { KafkaConsumerConfig } from './config';\n\ntype EventMetadata = EventParams['metadata'];\n\n/**\n *\n * This class subscribes to Kafka topics and publishes events received to the registered subscriber.\n * The message payload will be used as the event payload and passed to the subscribers.\n *\n * @public\n */\nexport class KafkaConsumingEventPublisher {\n private readonly kafkaConsumer: Consumer;\n private readonly consumerSubscribeTopics: ConsumerSubscribeTopics;\n private readonly backstageTopic: string;\n private readonly logger: LoggerService;\n\n static fromConfig(env: {\n kafkaClient: Kafka;\n config: KafkaConsumerConfig;\n events: EventsService;\n logger: LoggerService;\n }): KafkaConsumingEventPublisher {\n return new KafkaConsumingEventPublisher(\n env.kafkaClient,\n env.logger,\n env.events,\n env.config,\n );\n }\n\n private constructor(\n kafkaClient: Kafka,\n logger: LoggerService,\n private readonly events: EventsService,\n config: KafkaConsumerConfig,\n ) {\n this.kafkaConsumer = kafkaClient.consumer(config.consumerConfig);\n this.consumerSubscribeTopics = config.consumerSubscribeTopics;\n this.backstageTopic = config.backstageTopic;\n const id = `events.kafka.publisher:${this.backstageTopic}`;\n this.logger = logger.child({\n class: KafkaConsumingEventPublisher.prototype.constructor.name,\n groupId: config.consumerConfig.groupId,\n kafkaTopics: config.consumerSubscribeTopics.topics.toString(),\n backstageTopic: config.backstageTopic,\n taskId: id,\n });\n }\n\n async start(): Promise<void> {\n try {\n await this.kafkaConsumer.connect();\n\n await this.kafkaConsumer.subscribe(this.consumerSubscribeTopics);\n\n await this.kafkaConsumer.run({\n eachMessage: async ({ message }) => {\n this.events.publish({\n topic: this.backstageTopic,\n eventPayload: JSON.parse(message.value?.toString()!),\n metadata: this.convertHeadersToMetadata(message.headers),\n });\n },\n });\n } catch (error: any) {\n this.logger.error('Kafka consumer connection failed ', error);\n }\n }\n\n async shutdown(): Promise<void> {\n await this.kafkaConsumer.disconnect();\n }\n\n private convertHeadersToMetadata = (\n headers: IHeaders | undefined,\n ): EventParams['metadata'] => {\n if (!headers) return undefined;\n\n const metadata: EventMetadata = {};\n\n Object.entries(headers).forEach(([key, value]) => {\n // If value is an array use toString() on all values converting any Buffer types to valid strings\n if (Array.isArray(value)) metadata[key] = value.map(v => v.toString());\n // Always return the values using toString() to catch all Buffer types that should be converted to strings\n else metadata[key] = value?.toString();\n });\n\n return metadata;\n };\n}\n"],"names":[],"mappings":";;AA6BO,MAAM,4BAA6B,CAAA;AAAA,EAoBhC,WACN,CAAA,WAAA,EACA,MACiB,EAAA,MAAA,EACjB,MACA,EAAA;AAFiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAGjB,IAAA,IAAA,CAAK,aAAgB,GAAA,WAAA,CAAY,QAAS,CAAA,MAAA,CAAO,cAAc,CAAA;AAC/D,IAAA,IAAA,CAAK,0BAA0B,MAAO,CAAA,uBAAA;AACtC,IAAA,IAAA,CAAK,iBAAiB,MAAO,CAAA,cAAA;AAC7B,IAAM,MAAA,EAAA,GAAK,CAA0B,uBAAA,EAAA,IAAA,CAAK,cAAc,CAAA,CAAA;AACxD,IAAK,IAAA,CAAA,MAAA,GAAS,OAAO,KAAM,CAAA;AAAA,MACzB,KAAA,EAAO,4BAA6B,CAAA,SAAA,CAAU,WAAY,CAAA,IAAA;AAAA,MAC1D,OAAA,EAAS,OAAO,cAAe,CAAA,OAAA;AAAA,MAC/B,WAAa,EAAA,MAAA,CAAO,uBAAwB,CAAA,MAAA,CAAO,QAAS,EAAA;AAAA,MAC5D,gBAAgB,MAAO,CAAA,cAAA;AAAA,MACvB,MAAQ,EAAA;AAAA,KACT,CAAA;AAAA;AACH,EApCiB,aAAA;AAAA,EACA,uBAAA;AAAA,EACA,cAAA;AAAA,EACA,MAAA;AAAA,EAEjB,OAAO,WAAW,GAKe,EAAA;AAC/B,IAAA,OAAO,IAAI,4BAAA;AAAA,MACT,GAAI,CAAA,WAAA;AAAA,MACJ,GAAI,CAAA,MAAA;AAAA,MACJ,GAAI,CAAA,MAAA;AAAA,MACJ,GAAI,CAAA;AAAA,KACN;AAAA;AACF,EAqBA,MAAM,KAAuB,GAAA;AAC3B,IAAI,IAAA;AACF,MAAM,MAAA,IAAA,CAAK,cAAc,OAAQ,EAAA;AAEjC,MAAA,MAAM,IAAK,CAAA,aAAA,CAAc,SAAU,CAAA,IAAA,CAAK,uBAAuB,CAAA;AAE/D,MAAM,MAAA,IAAA,CAAK,cAAc,GAAI,CAAA;AAAA,QAC3B,WAAa,EAAA,OAAO,EAAE,OAAA,EAAc,KAAA;AAClC,UAAA,IAAA,CAAK,OAAO,OAAQ,CAAA;AAAA,YAClB,OAAO,IAAK,CAAA,cAAA;AAAA,YACZ,cAAc,IAAK,CAAA,KAAA,CAAM,OAAQ,CAAA,KAAA,EAAO,UAAW,CAAA;AAAA,YACnD,QAAU,EAAA,IAAA,CAAK,wBAAyB,CAAA,OAAA,CAAQ,OAAO;AAAA,WACxD,CAAA;AAAA;AACH,OACD,CAAA;AAAA,aACM,KAAY,EAAA;AACnB,MAAK,IAAA,CAAA,MAAA,CAAO,KAAM,CAAA,mCAAA,EAAqC,KAAK,CAAA;AAAA;AAC9D;AACF,EAEA,MAAM,QAA0B,GAAA;AAC9B,IAAM,MAAA,IAAA,CAAK,cAAc,UAAW,EAAA;AAAA;AACtC,EAEQ,wBAAA,GAA2B,CACjC,OAC4B,KAAA;AAC5B,IAAI,IAAA,CAAC,SAAgB,OAAA,KAAA,CAAA;AAErB,IAAA,MAAM,WAA0B,EAAC;AAEjC,IAAO,MAAA,CAAA,OAAA,CAAQ,OAAO,CAAE,CAAA,OAAA,CAAQ,CAAC,CAAC,GAAA,EAAK,KAAK,CAAM,KAAA;AAEhD,MAAA,IAAI,KAAM,CAAA,OAAA,CAAQ,KAAK,CAAA,EAAY,QAAA,CAAA,GAAG,CAAI,GAAA,KAAA,CAAM,GAAI,CAAA,CAAA,CAAA,KAAK,CAAE,CAAA,QAAA,EAAU,CAAA;AAAA,WAEvD,QAAA,CAAA,GAAG,CAAI,GAAA,KAAA,EAAO,QAAS,EAAA;AAAA,KACtC,CAAA;AAED,IAAO,OAAA,QAAA;AAAA,GACT;AACF;;;;"}
@@ -0,0 +1,28 @@
1
+ 'use strict';
2
+
3
+ var kafkajs = require('kafkajs');
4
+
5
+ const loggerServiceAdapter = (loggerService) => (_level) => {
6
+ return (entry) => {
7
+ const { namespace, level, log } = entry;
8
+ const { message, ...extra } = log;
9
+ const logMethods = {
10
+ [kafkajs.logLevel.ERROR]: loggerService.error,
11
+ [kafkajs.logLevel.WARN]: loggerService.warn,
12
+ [kafkajs.logLevel.INFO]: loggerService.info,
13
+ [kafkajs.logLevel.DEBUG]: loggerService.debug,
14
+ [kafkajs.logLevel.NOTHING]: () => {
15
+ }
16
+ };
17
+ logMethods[level].call(
18
+ loggerService,
19
+ `Kafka ${namespace} ${log.message}`,
20
+ {
21
+ ...extra
22
+ }
23
+ );
24
+ };
25
+ };
26
+
27
+ exports.loggerServiceAdapter = loggerServiceAdapter;
28
+ //# sourceMappingURL=LoggerServiceAdapter.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LoggerServiceAdapter.cjs.js","sources":["../../src/publisher/LoggerServiceAdapter.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { LogEntry, logLevel } from 'kafkajs';\n\nexport const loggerServiceAdapter =\n (loggerService: LoggerService) => (_level: logLevel) => {\n return (entry: LogEntry) => {\n const { namespace, level, log } = entry;\n const { message, ...extra } = log;\n\n const logMethods: Record<\n logLevel,\n (message: string, meta?: object) => void\n > = {\n [logLevel.ERROR]: loggerService.error,\n [logLevel.WARN]: loggerService.warn,\n [logLevel.INFO]: loggerService.info,\n [logLevel.DEBUG]: loggerService.debug,\n [logLevel.NOTHING]: () => {},\n };\n\n // Use loggerService method that matches the level\n logMethods[level].call(\n loggerService,\n `Kafka ${namespace} ${log.message}`,\n {\n ...extra,\n },\n );\n };\n };\n"],"names":["logLevel"],"mappings":";;;;AAkBO,MAAM,oBACX,GAAA,CAAC,aAAiC,KAAA,CAAC,MAAqB,KAAA;AACtD,EAAA,OAAO,CAAC,KAAoB,KAAA;AAC1B,IAAA,MAAM,EAAE,SAAA,EAAW,KAAO,EAAA,GAAA,EAAQ,GAAA,KAAA;AAClC,IAAA,MAAM,EAAE,OAAA,EAAS,GAAG,KAAA,EAAU,GAAA,GAAA;AAE9B,IAAA,MAAM,UAGF,GAAA;AAAA,MACF,CAACA,gBAAA,CAAS,KAAK,GAAG,aAAc,CAAA,KAAA;AAAA,MAChC,CAACA,gBAAA,CAAS,IAAI,GAAG,aAAc,CAAA,IAAA;AAAA,MAC/B,CAACA,gBAAA,CAAS,IAAI,GAAG,aAAc,CAAA,IAAA;AAAA,MAC/B,CAACA,gBAAA,CAAS,KAAK,GAAG,aAAc,CAAA,KAAA;AAAA,MAChC,CAACA,gBAAA,CAAS,OAAO,GAAG,MAAM;AAAA;AAAC,KAC7B;AAGA,IAAA,UAAA,CAAW,KAAK,CAAE,CAAA,IAAA;AAAA,MAChB,aAAA;AAAA,MACA,CAAS,MAAA,EAAA,SAAS,CAAI,CAAA,EAAA,GAAA,CAAI,OAAO,CAAA,CAAA;AAAA,MACjC;AAAA,QACE,GAAG;AAAA;AACL,KACF;AAAA,GACF;AACF;;;;"}
@@ -0,0 +1,102 @@
1
+ 'use strict';
2
+
3
+ var config = require('@backstage/config');
4
+ var luxon = require('luxon');
5
+
6
+ const CONFIG_PREFIX_PUBLISHER = "events.modules.kafka.kafkaConsumingEventPublisher";
7
+ const readOptionalHumanDurationInMs = (config$1, key) => {
8
+ const humanDuration = config$1.has(key) ? config.readDurationFromConfig(config$1, { key }) : void 0;
9
+ if (!humanDuration) return void 0;
10
+ return luxon.Duration.fromObject(humanDuration).as("milliseconds");
11
+ };
12
+ const readConfig = (config) => {
13
+ const kafkaConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);
14
+ if (!kafkaConfig) {
15
+ return void 0;
16
+ }
17
+ const clientId = kafkaConfig.getString("clientId");
18
+ const brokers = kafkaConfig.getStringArray("brokers");
19
+ const authenticationTimeout = readOptionalHumanDurationInMs(
20
+ kafkaConfig,
21
+ "authenticationTimeout"
22
+ );
23
+ const connectionTimeout = readOptionalHumanDurationInMs(
24
+ kafkaConfig,
25
+ "connectionTimeout"
26
+ );
27
+ const requestTimeout = readOptionalHumanDurationInMs(
28
+ kafkaConfig,
29
+ "requestTimeout"
30
+ );
31
+ const enforceRequestTimeout = kafkaConfig.getOptionalBoolean(
32
+ "enforceRequestTimeout"
33
+ );
34
+ const ssl = kafkaConfig.getOptional("ssl");
35
+ const sasl = kafkaConfig.getOptional("sasl");
36
+ const retry = {
37
+ maxRetryTime: readOptionalHumanDurationInMs(
38
+ kafkaConfig,
39
+ "retry.maxRetryTime"
40
+ ),
41
+ initialRetryTime: readOptionalHumanDurationInMs(
42
+ kafkaConfig,
43
+ "retry.initialRetryTime"
44
+ ),
45
+ factor: kafkaConfig.getOptionalNumber("retry.factor"),
46
+ multiplier: kafkaConfig.getOptionalNumber("retry.multiplier"),
47
+ retries: kafkaConfig.getOptionalNumber("retry.retries")
48
+ };
49
+ const kafkaConsumerConfigs = kafkaConfig.getConfigArray("topics").map((topic) => {
50
+ return {
51
+ backstageTopic: topic.getString("topic"),
52
+ consumerConfig: {
53
+ groupId: topic.getString("kafka.groupId"),
54
+ sessionTimeout: readOptionalHumanDurationInMs(
55
+ topic,
56
+ "kafka.sessionTimeout"
57
+ ),
58
+ rebalanceTimeout: readOptionalHumanDurationInMs(
59
+ topic,
60
+ "kafka.rebalanceTimeout"
61
+ ),
62
+ heartbeatInterval: readOptionalHumanDurationInMs(
63
+ topic,
64
+ "kafka.heartbeatInterval"
65
+ ),
66
+ metadataMaxAge: readOptionalHumanDurationInMs(
67
+ topic,
68
+ "kafka.metadataMaxAge"
69
+ ),
70
+ maxBytesPerPartition: topic.getOptionalNumber(
71
+ "kafka.maxBytesPerPartition"
72
+ ),
73
+ minBytes: topic.getOptionalNumber("kafka.minBytes"),
74
+ maxBytes: topic.getOptionalNumber("kafka.maxBytes"),
75
+ maxWaitTimeInMs: readOptionalHumanDurationInMs(
76
+ topic,
77
+ "kafka.maxWaitTime"
78
+ )
79
+ },
80
+ consumerSubscribeTopics: {
81
+ topics: topic.getStringArray("kafka.topics")
82
+ }
83
+ };
84
+ });
85
+ return {
86
+ kafkaConfig: {
87
+ clientId,
88
+ brokers,
89
+ ssl,
90
+ sasl,
91
+ authenticationTimeout,
92
+ connectionTimeout,
93
+ requestTimeout,
94
+ enforceRequestTimeout,
95
+ retry
96
+ },
97
+ kafkaConsumerConfigs
98
+ };
99
+ };
100
+
101
+ exports.readConfig = readConfig;
102
+ //# sourceMappingURL=config.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.cjs.js","sources":["../../src/publisher/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Config, readDurationFromConfig } from '@backstage/config';\nimport { ConsumerConfig, ConsumerSubscribeTopics, KafkaConfig } from 'kafkajs';\nimport { Duration } from 'luxon';\n\n/**\n * @public\n */\nexport interface KafkaConsumerConfig {\n backstageTopic: string;\n consumerConfig: ConsumerConfig;\n consumerSubscribeTopics: ConsumerSubscribeTopics;\n}\n\n/**\n * @public\n */\nexport interface KafkaEventSourceConfig {\n kafkaConfig: KafkaConfig;\n kafkaConsumerConfigs: KafkaConsumerConfig[];\n}\n\nconst CONFIG_PREFIX_PUBLISHER =\n 'events.modules.kafka.kafkaConsumingEventPublisher';\n\n/**\n * Reads an optional HumanDuration from the config and returns the value in milliseconds if the key is defined.\n *\n * @param config - The configuration object to read from.\n * @param key - The key to look up in the configuration.\n * @returns The duration in milliseconds, or undefined if the key is not defined.\n */\nconst readOptionalHumanDurationInMs = (\n config: Config,\n key: string,\n): number | undefined => {\n const humanDuration = config.has(key)\n ? readDurationFromConfig(config, { key })\n : undefined;\n\n if (!humanDuration) return undefined;\n\n return Duration.fromObject(humanDuration).as('milliseconds');\n};\n\nexport const readConfig = (\n config: Config,\n): KafkaEventSourceConfig | undefined => {\n const kafkaConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);\n\n if (!kafkaConfig) {\n return undefined;\n }\n\n const clientId = kafkaConfig.getString('clientId');\n const brokers = kafkaConfig.getStringArray('brokers');\n\n const authenticationTimeout = readOptionalHumanDurationInMs(\n kafkaConfig,\n 'authenticationTimeout',\n );\n\n const connectionTimeout = readOptionalHumanDurationInMs(\n kafkaConfig,\n 'connectionTimeout',\n );\n const requestTimeout = readOptionalHumanDurationInMs(\n kafkaConfig,\n 'requestTimeout',\n );\n const enforceRequestTimeout = kafkaConfig.getOptionalBoolean(\n 'enforceRequestTimeout',\n );\n\n const ssl = kafkaConfig.getOptional('ssl') as KafkaConfig['ssl'];\n const sasl = kafkaConfig.getOptional('sasl') as KafkaConfig['sasl'];\n\n const retry: KafkaConfig['retry'] = {\n maxRetryTime: readOptionalHumanDurationInMs(\n kafkaConfig,\n 'retry.maxRetryTime',\n ),\n initialRetryTime: readOptionalHumanDurationInMs(\n kafkaConfig,\n 'retry.initialRetryTime',\n ),\n factor: kafkaConfig.getOptionalNumber('retry.factor'),\n multiplier: kafkaConfig.getOptionalNumber('retry.multiplier'),\n retries: kafkaConfig.getOptionalNumber('retry.retries'),\n };\n\n const kafkaConsumerConfigs: KafkaConsumerConfig[] = kafkaConfig\n .getConfigArray('topics')\n .map(topic => {\n return {\n backstageTopic: topic.getString('topic'),\n consumerConfig: {\n groupId: topic.getString('kafka.groupId'),\n sessionTimeout: readOptionalHumanDurationInMs(\n topic,\n 'kafka.sessionTimeout',\n ),\n rebalanceTimeout: readOptionalHumanDurationInMs(\n topic,\n 'kafka.rebalanceTimeout',\n ),\n heartbeatInterval: readOptionalHumanDurationInMs(\n topic,\n 'kafka.heartbeatInterval',\n ),\n metadataMaxAge: readOptionalHumanDurationInMs(\n topic,\n 'kafka.metadataMaxAge',\n ),\n maxBytesPerPartition: topic.getOptionalNumber(\n 'kafka.maxBytesPerPartition',\n ),\n minBytes: topic.getOptionalNumber('kafka.minBytes'),\n maxBytes: topic.getOptionalNumber('kafka.maxBytes'),\n maxWaitTimeInMs: readOptionalHumanDurationInMs(\n topic,\n 'kafka.maxWaitTime',\n ),\n },\n consumerSubscribeTopics: {\n topics: topic.getStringArray('kafka.topics'),\n },\n };\n });\n\n return {\n kafkaConfig: {\n clientId,\n brokers,\n ssl,\n sasl,\n authenticationTimeout,\n connectionTimeout,\n requestTimeout,\n enforceRequestTimeout,\n retry,\n },\n kafkaConsumerConfigs,\n };\n};\n"],"names":["config","readDurationFromConfig","Duration"],"mappings":";;;;;AAoCA,MAAM,uBACJ,GAAA,mDAAA;AASF,MAAM,6BAAA,GAAgC,CACpCA,QAAA,EACA,GACuB,KAAA;AACvB,EAAM,MAAA,aAAA,GAAgBA,QAAO,CAAA,GAAA,CAAI,GAAG,CAAA,GAChCC,8BAAuBD,QAAQ,EAAA,EAAE,GAAI,EAAC,CACtC,GAAA,KAAA,CAAA;AAEJ,EAAI,IAAA,CAAC,eAAsB,OAAA,KAAA,CAAA;AAE3B,EAAA,OAAOE,cAAS,CAAA,UAAA,CAAW,aAAa,CAAA,CAAE,GAAG,cAAc,CAAA;AAC7D,CAAA;AAEa,MAAA,UAAA,GAAa,CACxB,MACuC,KAAA;AACvC,EAAM,MAAA,WAAA,GAAc,MAAO,CAAA,iBAAA,CAAkB,uBAAuB,CAAA;AAEpE,EAAA,IAAI,CAAC,WAAa,EAAA;AAChB,IAAO,OAAA,KAAA,CAAA;AAAA;AAGT,EAAM,MAAA,QAAA,GAAW,WAAY,CAAA,SAAA,CAAU,UAAU,CAAA;AACjD,EAAM,MAAA,OAAA,GAAU,WAAY,CAAA,cAAA,CAAe,SAAS,CAAA;AAEpD,EAAA,MAAM,qBAAwB,GAAA,6BAAA;AAAA,IAC5B,WAAA;AAAA,IACA;AAAA,GACF;AAEA,EAAA,MAAM,iBAAoB,GAAA,6BAAA;AAAA,IACxB,WAAA;AAAA,IACA;AAAA,GACF;AACA,EAAA,MAAM,cAAiB,GAAA,6BAAA;AAAA,IACrB,WAAA;AAAA,IACA;AAAA,GACF;AACA,EAAA,MAAM,wBAAwB,WAAY,CAAA,kBAAA;AAAA,IACxC;AAAA,GACF;AAEA,EAAM,MAAA,GAAA,GAAM,WAAY,CAAA,WAAA,CAAY,KAAK,CAAA;AACzC,EAAM,MAAA,IAAA,GAAO,WAAY,CAAA,WAAA,CAAY,MAAM,CAAA;AAE3C,EAAA,MAAM,KAA8B,GAAA;AAAA,IAClC,YAAc,EAAA,6BAAA;AAAA,MACZ,WAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,gBAAkB,EAAA,6BAAA;AAAA,MAChB,WAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,MAAA,EAAQ,WAAY,CAAA,iBAAA,CAAkB,cAAc,CAAA;AAAA,IACpD,UAAA,EAAY,WAAY,CAAA,iBAAA,CAAkB,kBAAkB,CAAA;AAAA,IAC5D,OAAA,EAAS,WAAY,CAAA,iBAAA,CAAkB,eAAe;AAAA,GACxD;AAEA,EAAA,MAAM,uBAA8C,WACjD,CAAA,cAAA,CAAe,QAAQ,CAAA,CACvB,IAAI,CAAS,KAAA,KAAA;AACZ,IAAO,OAAA;AAAA,MACL,cAAA,EAAgB,KAAM,CAAA,SAAA,CAAU,OAAO,CAAA;AAAA,MACvC,cAAgB,EAAA;AAAA,QACd,OAAA,EAAS,KAAM,CAAA,SAAA,CAAU,eAAe,CAAA;AAAA,QACxC,cAAgB,EAAA,6BAAA;AAAA,UACd,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,gBAAkB,EAAA,6BAAA;AAAA,UAChB,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,iBAAmB,EAAA,6BAAA;AAAA,UACjB,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,cAAgB,EAAA,6BAAA;AAAA,UACd,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,sBAAsB,KAAM,CAAA,iBAAA;AAAA,UAC1B;AAAA,SACF;AAAA,QACA,QAAA,EAAU,KAAM,CAAA,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,QAClD,QAAA,EAAU,KAAM,CAAA,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,QAClD,eAAiB,EAAA,6BAAA;AAAA,UACf,KAAA;AAAA,UACA;AAAA;AACF,OACF;AAAA,MACA,uBAAyB,EAAA;AAAA,QACvB,MAAA,EAAQ,KAAM,CAAA,cAAA,CAAe,cAAc;AAAA;AAC7C,KACF;AAAA,GACD,CAAA;AAEH,EAAO,OAAA;AAAA,IACL,WAAa,EAAA;AAAA,MACX,QAAA;AAAA,MACA,OAAA;AAAA,MACA,GAAA;AAAA,MACA,IAAA;AAAA,MACA,qBAAA;AAAA,MACA,iBAAA;AAAA,MACA,cAAA;AAAA,MACA,qBAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA;AAAA,GACF;AACF;;;;"}
@@ -0,0 +1,35 @@
1
+ 'use strict';
2
+
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
4
+ var KafkaConsumerClient = require('../publisher/KafkaConsumerClient.cjs.js');
5
+ var pluginEventsNode = require('@backstage/plugin-events-node');
6
+
7
+ const eventsModuleKafkaConsumingEventPublisher = backendPluginApi.createBackendModule({
8
+ pluginId: "events",
9
+ moduleId: "kafka-consuming-event-publisher",
10
+ register(env) {
11
+ env.registerInit({
12
+ deps: {
13
+ config: backendPluginApi.coreServices.rootConfig,
14
+ events: pluginEventsNode.eventsServiceRef,
15
+ logger: backendPluginApi.coreServices.logger,
16
+ lifecycle: backendPluginApi.coreServices.lifecycle
17
+ },
18
+ async init({ config, logger, events, lifecycle }) {
19
+ const kafka = KafkaConsumerClient.KafkaConsumerClient.fromConfig({
20
+ config,
21
+ events,
22
+ logger
23
+ });
24
+ if (!kafka) {
25
+ return;
26
+ }
27
+ await kafka.start();
28
+ lifecycle.addShutdownHook(async () => await kafka.shutdown());
29
+ }
30
+ });
31
+ }
32
+ });
33
+
34
+ exports.eventsModuleKafkaConsumingEventPublisher = eventsModuleKafkaConsumingEventPublisher;
35
+ //# sourceMappingURL=eventsModuleKafkaConsumingEventPublisher.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"eventsModuleKafkaConsumingEventPublisher.cjs.js","sources":["../../src/service/eventsModuleKafkaConsumingEventPublisher.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n coreServices,\n createBackendModule,\n} from '@backstage/backend-plugin-api';\nimport { KafkaConsumerClient } from '../publisher/KafkaConsumerClient';\nimport { eventsServiceRef } from '@backstage/plugin-events-node';\n\n/**\n * Kafka module for the Events plugin.\n *\n * @public\n */\nexport const eventsModuleKafkaConsumingEventPublisher = createBackendModule({\n pluginId: 'events',\n moduleId: 'kafka-consuming-event-publisher',\n register(env) {\n env.registerInit({\n deps: {\n config: coreServices.rootConfig,\n events: eventsServiceRef,\n logger: coreServices.logger,\n lifecycle: coreServices.lifecycle,\n },\n async init({ config, logger, events, lifecycle }) {\n const kafka = KafkaConsumerClient.fromConfig({\n config,\n events,\n logger,\n });\n\n if (!kafka) {\n return;\n }\n\n await kafka.start();\n\n lifecycle.addShutdownHook(async () => await kafka.shutdown());\n },\n });\n },\n});\n"],"names":["createBackendModule","coreServices","eventsServiceRef","KafkaConsumerClient"],"mappings":";;;;;;AA2BO,MAAM,2CAA2CA,oCAAoB,CAAA;AAAA,EAC1E,QAAU,EAAA,QAAA;AAAA,EACV,QAAU,EAAA,iCAAA;AAAA,EACV,SAAS,GAAK,EAAA;AACZ,IAAA,GAAA,CAAI,YAAa,CAAA;AAAA,MACf,IAAM,EAAA;AAAA,QACJ,QAAQC,6BAAa,CAAA,UAAA;AAAA,QACrB,MAAQ,EAAAC,iCAAA;AAAA,QACR,QAAQD,6BAAa,CAAA,MAAA;AAAA,QACrB,WAAWA,6BAAa,CAAA;AAAA,OAC1B;AAAA,MACA,MAAM,IAAK,CAAA,EAAE,QAAQ,MAAQ,EAAA,MAAA,EAAQ,WAAa,EAAA;AAChD,QAAM,MAAA,KAAA,GAAQE,wCAAoB,UAAW,CAAA;AAAA,UAC3C,MAAA;AAAA,UACA,MAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,IAAI,CAAC,KAAO,EAAA;AACV,UAAA;AAAA;AAGF,QAAA,MAAM,MAAM,KAAM,EAAA;AAElB,QAAA,SAAA,CAAU,eAAgB,CAAA,YAAY,MAAM,KAAA,CAAM,UAAU,CAAA;AAAA;AAC9D,KACD,CAAA;AAAA;AAEL,CAAC;;;;"}
package/package.json ADDED
@@ -0,0 +1,61 @@
1
+ {
2
+ "name": "@backstage/plugin-events-backend-module-kafka",
3
+ "version": "0.0.0-nightly-20250612024034",
4
+ "description": "The kafka backend module for the events plugin.",
5
+ "backstage": {
6
+ "role": "backend-plugin-module",
7
+ "pluginId": "events",
8
+ "pluginPackage": "@backstage/plugin-events-backend",
9
+ "features": {
10
+ ".": "@backstage/BackendFeature"
11
+ }
12
+ },
13
+ "publishConfig": {
14
+ "access": "public",
15
+ "main": "dist/index.cjs.js",
16
+ "types": "dist/index.d.ts"
17
+ },
18
+ "homepage": "https://backstage.io",
19
+ "repository": {
20
+ "type": "git",
21
+ "url": "https://github.com/backstage/backstage",
22
+ "directory": "plugins/events-backend-module-kafka"
23
+ },
24
+ "license": "Apache-2.0",
25
+ "main": "dist/index.cjs.js",
26
+ "types": "dist/index.d.ts",
27
+ "files": [
28
+ "dist",
29
+ "config.d.ts"
30
+ ],
31
+ "scripts": {
32
+ "build": "backstage-cli package build",
33
+ "clean": "backstage-cli package clean",
34
+ "lint": "backstage-cli package lint",
35
+ "prepack": "backstage-cli package prepack",
36
+ "postpack": "backstage-cli package postpack",
37
+ "start": "backstage-cli package start",
38
+ "test": "backstage-cli package test"
39
+ },
40
+ "dependencies": {
41
+ "@backstage/backend-plugin-api": "0.0.0-nightly-20250612024034",
42
+ "@backstage/config": "1.3.2",
43
+ "@backstage/plugin-events-node": "0.0.0-nightly-20250612024034",
44
+ "@backstage/types": "1.2.1",
45
+ "kafkajs": "^2.2.4",
46
+ "luxon": "^3.0.0"
47
+ },
48
+ "devDependencies": {
49
+ "@backstage/backend-test-utils": "0.0.0-nightly-20250612024034",
50
+ "@backstage/cli": "0.0.0-nightly-20250612024034",
51
+ "@backstage/plugin-events-backend-test-utils": "0.0.0-nightly-20250612024034"
52
+ },
53
+ "configSchema": "config.d.ts",
54
+ "typesVersions": {
55
+ "*": {
56
+ "package.json": [
57
+ "package.json"
58
+ ]
59
+ }
60
+ }
61
+ }