@backstage/plugin-events-backend-module-kafka 0.1.6-next.0 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/CHANGELOG.md +25 -0
  2. package/README.md +67 -25
  3. package/config.d.ts +504 -136
  4. package/dist/KafkaConsumingEventPublisher/KafkaConsumingEventPublisher.cjs.js +67 -0
  5. package/dist/KafkaConsumingEventPublisher/KafkaConsumingEventPublisher.cjs.js.map +1 -0
  6. package/dist/KafkaConsumingEventPublisher/config.cjs.js +71 -0
  7. package/dist/KafkaConsumingEventPublisher/config.cjs.js.map +1 -0
  8. package/dist/{service/eventsModuleKafkaConsumingEventPublisher.cjs.js → KafkaConsumingEventPublisher/module.cjs.js} +10 -9
  9. package/dist/KafkaConsumingEventPublisher/module.cjs.js.map +1 -0
  10. package/dist/KafkaPublishingEventConsumer/KafkaPublishingEventConsumer.cjs.js +73 -0
  11. package/dist/KafkaPublishingEventConsumer/KafkaPublishingEventConsumer.cjs.js.map +1 -0
  12. package/dist/KafkaPublishingEventConsumer/config.cjs.js +44 -0
  13. package/dist/KafkaPublishingEventConsumer/config.cjs.js.map +1 -0
  14. package/dist/KafkaPublishingEventConsumer/module.cjs.js +36 -0
  15. package/dist/KafkaPublishingEventConsumer/module.cjs.js.map +1 -0
  16. package/dist/index.cjs.js +10 -3
  17. package/dist/index.cjs.js.map +1 -1
  18. package/dist/index.d.ts +7 -4
  19. package/dist/utils/LoggerServiceAdapter.cjs.js.map +1 -0
  20. package/dist/utils/config.cjs.js +46 -0
  21. package/dist/utils/config.cjs.js.map +1 -0
  22. package/dist/utils/kafkaTransformers.cjs.js +24 -0
  23. package/dist/utils/kafkaTransformers.cjs.js.map +1 -0
  24. package/package.json +8 -8
  25. package/dist/publisher/KafkaConsumerClient.cjs.js +0 -44
  26. package/dist/publisher/KafkaConsumerClient.cjs.js.map +0 -1
  27. package/dist/publisher/KafkaConsumingEventPublisher.cjs.js +0 -63
  28. package/dist/publisher/KafkaConsumingEventPublisher.cjs.js.map +0 -1
  29. package/dist/publisher/LoggerServiceAdapter.cjs.js.map +0 -1
  30. package/dist/publisher/config.cjs.js +0 -102
  31. package/dist/publisher/config.cjs.js.map +0 -1
  32. package/dist/service/eventsModuleKafkaConsumingEventPublisher.cjs.js.map +0 -1
  33. /package/dist/{publisher → utils}/LoggerServiceAdapter.cjs.js +0 -0
@@ -0,0 +1,67 @@
1
+ 'use strict';
2
+
3
+ var kafkajs = require('kafkajs');
4
+ var config = require('./config.cjs.js');
5
+ var LoggerServiceAdapter = require('../utils/LoggerServiceAdapter.cjs.js');
6
+ var kafkaTransformers = require('../utils/kafkaTransformers.cjs.js');
7
+
8
+ class KafkaConsumingEventPublisher {
9
+ constructor(logger, events, config) {
10
+ this.events = events;
11
+ this.logger = logger.child({
12
+ class: KafkaConsumingEventPublisher.prototype.constructor.name,
13
+ instance: config.instance
14
+ });
15
+ const kafka = new kafkajs.Kafka({
16
+ ...config.kafkaConfig,
17
+ logCreator: LoggerServiceAdapter.loggerServiceAdapter(this.logger)
18
+ });
19
+ this.kafkaConsumers = config.kafkaConsumerConfigs.map((consumerConfig) => ({
20
+ consumer: kafka.consumer(consumerConfig.consumerConfig),
21
+ config: consumerConfig
22
+ }));
23
+ }
24
+ kafkaConsumers;
25
+ logger;
26
+ static fromConfig(env) {
27
+ const configs = config.readConsumerConfig(env.config, env.logger);
28
+ return configs.map(
29
+ (kafkaConfig) => new KafkaConsumingEventPublisher(env.logger, env.events, kafkaConfig)
30
+ );
31
+ }
32
+ async start() {
33
+ await Promise.all(
34
+ this.kafkaConsumers.map(async ({ consumer, config }) => {
35
+ const consumerLogger = this.logger.child({
36
+ id: `events.kafka.publisher:${config.backstageTopic}`,
37
+ groupId: config.consumerConfig.groupId,
38
+ kafkaTopics: config.consumerSubscribeTopics.topics.toString(),
39
+ backstageTopic: config.backstageTopic
40
+ });
41
+ try {
42
+ await consumer.connect();
43
+ await consumer.subscribe(config.consumerSubscribeTopics);
44
+ await consumer.run({
45
+ eachMessage: async ({ message }) => {
46
+ this.events.publish({
47
+ topic: config.backstageTopic,
48
+ eventPayload: JSON.parse(message.value?.toString()),
49
+ metadata: kafkaTransformers.convertHeadersToMetadata(message.headers)
50
+ });
51
+ }
52
+ });
53
+ } catch (error) {
54
+ consumerLogger.error("Kafka consumer connection failed", error);
55
+ }
56
+ })
57
+ );
58
+ }
59
+ async shutdown() {
60
+ await Promise.all(
61
+ this.kafkaConsumers.map(({ consumer }) => consumer.disconnect())
62
+ );
63
+ }
64
+ }
65
+
66
+ exports.KafkaConsumingEventPublisher = KafkaConsumingEventPublisher;
67
+ //# sourceMappingURL=KafkaConsumingEventPublisher.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"KafkaConsumingEventPublisher.cjs.js","sources":["../../src/KafkaConsumingEventPublisher/KafkaConsumingEventPublisher.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { EventsService } from '@backstage/plugin-events-node';\nimport { Consumer, Kafka } from 'kafkajs';\nimport {\n KafkaConsumerConfig,\n KafkaConsumingEventPublisherConfig,\n readConsumerConfig,\n} from './config';\nimport { Config } from '@backstage/config';\nimport { loggerServiceAdapter } from '../utils/LoggerServiceAdapter';\nimport { convertHeadersToMetadata } from '../utils/kafkaTransformers';\n\ntype KafkaConsumer = {\n consumer: Consumer;\n config: KafkaConsumerConfig;\n};\n\n/**\n * This class subscribes to Kafka topics and publishes events received to the registered subscriber.\n * The message payload will be used as the event payload and passed to the subscribers.\n */\nexport class KafkaConsumingEventPublisher {\n private readonly kafkaConsumers: KafkaConsumer[];\n private readonly logger: LoggerService;\n\n static fromConfig(env: {\n config: Config;\n events: EventsService;\n logger: LoggerService;\n }): KafkaConsumingEventPublisher[] {\n const configs = readConsumerConfig(env.config, env.logger);\n\n return configs.map(\n kafkaConfig =>\n new KafkaConsumingEventPublisher(env.logger, env.events, kafkaConfig),\n );\n }\n\n private constructor(\n logger: LoggerService,\n private readonly events: EventsService,\n config: KafkaConsumingEventPublisherConfig,\n ) {\n this.logger = logger.child({\n class: KafkaConsumingEventPublisher.prototype.constructor.name,\n instance: config.instance,\n });\n\n const kafka = new Kafka({\n ...config.kafkaConfig,\n logCreator: loggerServiceAdapter(this.logger),\n });\n\n this.kafkaConsumers = config.kafkaConsumerConfigs.map(consumerConfig => ({\n consumer: kafka.consumer(consumerConfig.consumerConfig),\n config: consumerConfig,\n }));\n }\n\n async start(): Promise<void> {\n await Promise.all(\n this.kafkaConsumers.map(async ({ consumer, config }) => {\n const consumerLogger = this.logger.child({\n id: `events.kafka.publisher:${config.backstageTopic}`,\n groupId: config.consumerConfig.groupId,\n kafkaTopics: config.consumerSubscribeTopics.topics.toString(),\n backstageTopic: config.backstageTopic,\n });\n try {\n await consumer.connect();\n await consumer.subscribe(config.consumerSubscribeTopics);\n\n await consumer.run({\n eachMessage: async ({ message }) => {\n this.events.publish({\n topic: config.backstageTopic,\n eventPayload: JSON.parse(message.value?.toString()!),\n metadata: convertHeadersToMetadata(message.headers),\n });\n },\n });\n } catch (error: any) {\n consumerLogger.error('Kafka consumer connection failed', error);\n }\n }),\n );\n }\n\n async shutdown(): Promise<void> {\n await Promise.all(\n this.kafkaConsumers.map(({ consumer }) => consumer.disconnect()),\n );\n }\n}\n"],"names":["Kafka","loggerServiceAdapter","readConsumerConfig","convertHeadersToMetadata"],"mappings":";;;;;;;AAoCO,MAAM,4BAAA,CAA6B;AAAA,EAiBhC,WAAA,CACN,MAAA,EACiB,MAAA,EACjB,MAAA,EACA;AAFiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAGjB,IAAA,IAAA,CAAK,MAAA,GAAS,OAAO,KAAA,CAAM;AAAA,MACzB,KAAA,EAAO,4BAAA,CAA6B,SAAA,CAAU,WAAA,CAAY,IAAA;AAAA,MAC1D,UAAU,MAAA,CAAO;AAAA,KAClB,CAAA;AAED,IAAA,MAAM,KAAA,GAAQ,IAAIA,aAAA,CAAM;AAAA,MACtB,GAAG,MAAA,CAAO,WAAA;AAAA,MACV,UAAA,EAAYC,yCAAA,CAAqB,IAAA,CAAK,MAAM;AAAA,KAC7C,CAAA;AAED,IAAA,IAAA,CAAK,cAAA,GAAiB,MAAA,CAAO,oBAAA,CAAqB,GAAA,CAAI,CAAA,cAAA,MAAmB;AAAA,MACvE,QAAA,EAAU,KAAA,CAAM,QAAA,CAAS,cAAA,CAAe,cAAc,CAAA;AAAA,MACtD,MAAA,EAAQ;AAAA,KACV,CAAE,CAAA;AAAA,EACJ;AAAA,EAnCiB,cAAA;AAAA,EACA,MAAA;AAAA,EAEjB,OAAO,WAAW,GAAA,EAIiB;AACjC,IAAA,MAAM,OAAA,GAAUC,yBAAA,CAAmB,GAAA,CAAI,MAAA,EAAQ,IAAI,MAAM,CAAA;AAEzD,IAAA,OAAO,OAAA,CAAQ,GAAA;AAAA,MACb,iBACE,IAAI,4BAAA,CAA6B,IAAI,MAAA,EAAQ,GAAA,CAAI,QAAQ,WAAW;AAAA,KACxE;AAAA,EACF;AAAA,EAuBA,MAAM,KAAA,GAAuB;AAC3B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,KAAK,cAAA,CAAe,GAAA,CAAI,OAAO,EAAE,QAAA,EAAU,QAAO,KAAM;AACtD,QAAA,MAAM,cAAA,GAAiB,IAAA,CAAK,MAAA,CAAO,KAAA,CAAM;AAAA,UACvC,EAAA,EAAI,CAAA,uBAAA,EAA0B,MAAA,CAAO,cAAc,CAAA,CAAA;AAAA,UACnD,OAAA,EAAS,OAAO,cAAA,CAAe,OAAA;AAAA,UAC/B,WAAA,EAAa,MAAA,CAAO,uBAAA,CAAwB,MAAA,CAAO,QAAA,EAAS;AAAA,UAC5D,gBAAgB,MAAA,CAAO;AAAA,SACxB,CAAA;AACD,QAAA,IAAI;AACF,UAAA,MAAM,SAAS,OAAA,EAAQ;AACvB,UAAA,MAAM,QAAA,CAAS,SAAA,CAAU,MAAA,CAAO,uBAAuB,CAAA;AAEvD,UAAA,MAAM,SAAS,GAAA,CAAI;AAAA,YACjB,WAAA,EAAa,OAAO,EAAE,OAAA,EAAQ,KAAM;AAClC,cAAA,IAAA,CAAK,OAAO,OAAA,CAAQ;AAAA,gBAClB,OAAO,MAAA,CAAO,cAAA;AAAA,gBACd,cAAc,IAAA,CAAK,KAAA,CAAM,OAAA,CAAQ,KAAA,EAAO,UAAW,CAAA;AAAA,gBACnD,QAAA,EAAUC,0CAAA,CAAyB,OAAA,CAAQ,OAAO;AAAA,eACnD,CAAA;AAAA,YACH;AAAA,WACD,CAAA;AAAA,QACH,SAAS,KAAA,EAAY;AACnB,UAAA,cAAA,CAAe,KAAA,CAAM,oCAAoC,KAAK,CAAA;AAAA,QAChE;AAAA,MACF,CAAC;AAAA,KACH;AAAA,EACF;AAAA,EAEA,MAAM,QAAA,GAA0B;AAC9B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,IAAA,CAAK,eAAe,GAAA,CAAI,CAAC,EAAE,QAAA,EAAS,KAAM,QAAA,CAAS,UAAA,EAAY;AAAA,KACjE;AAAA,EACF;AACF;;;;"}
@@ -0,0 +1,71 @@
1
+ 'use strict';
2
+
3
+ var config = require('../utils/config.cjs.js');
4
+
5
+ const CONFIG_PREFIX_PUBLISHER = "events.modules.kafka.kafkaConsumingEventPublisher";
6
+ const processSinglePublisher = (instanceName, publisherConfig) => {
7
+ return {
8
+ instance: instanceName,
9
+ kafkaConfig: config.readKafkaConfig(publisherConfig),
10
+ kafkaConsumerConfigs: publisherConfig.getConfigArray("topics").map((topicConfig) => {
11
+ return {
12
+ backstageTopic: topicConfig.getString("topic"),
13
+ consumerConfig: {
14
+ groupId: topicConfig.getString("kafka.groupId"),
15
+ sessionTimeout: config.readOptionalHumanDurationInMs(
16
+ topicConfig,
17
+ "kafka.sessionTimeout"
18
+ ),
19
+ rebalanceTimeout: config.readOptionalHumanDurationInMs(
20
+ topicConfig,
21
+ "kafka.rebalanceTimeout"
22
+ ),
23
+ heartbeatInterval: config.readOptionalHumanDurationInMs(
24
+ topicConfig,
25
+ "kafka.heartbeatInterval"
26
+ ),
27
+ metadataMaxAge: config.readOptionalHumanDurationInMs(
28
+ topicConfig,
29
+ "kafka.metadataMaxAge"
30
+ ),
31
+ maxBytesPerPartition: topicConfig.getOptionalNumber(
32
+ "kafka.maxBytesPerPartition"
33
+ ),
34
+ minBytes: topicConfig.getOptionalNumber("kafka.minBytes"),
35
+ maxBytes: topicConfig.getOptionalNumber("kafka.maxBytes"),
36
+ maxWaitTimeInMs: config.readOptionalHumanDurationInMs(
37
+ topicConfig,
38
+ "kafka.maxWaitTime"
39
+ )
40
+ },
41
+ consumerSubscribeTopics: {
42
+ topics: topicConfig.getStringArray("kafka.topics")
43
+ }
44
+ };
45
+ })
46
+ };
47
+ };
48
+ const readConsumerConfig = (config, logger) => {
49
+ const publishersConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);
50
+ if (publishersConfig?.getOptionalString("clientId")) {
51
+ logger.warn(
52
+ "Legacy single config format detected at events.modules.kafka.kafkaConsumingEventPublisher."
53
+ );
54
+ return [
55
+ processSinglePublisher(
56
+ "default",
57
+ // use `default` as instance name for legacy single config
58
+ publishersConfig
59
+ )
60
+ ];
61
+ }
62
+ return publishersConfig?.keys()?.map(
63
+ (publisherKey) => processSinglePublisher(
64
+ publisherKey,
65
+ publishersConfig.getConfig(publisherKey)
66
+ )
67
+ ) ?? [];
68
+ };
69
+
70
+ exports.readConsumerConfig = readConsumerConfig;
71
+ //# sourceMappingURL=config.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.cjs.js","sources":["../../src/KafkaConsumingEventPublisher/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Config } from '@backstage/config';\nimport { ConsumerConfig, ConsumerSubscribeTopics, KafkaConfig } from 'kafkajs';\nimport {\n readKafkaConfig,\n readOptionalHumanDurationInMs,\n} from '../utils/config';\nimport { LoggerService } from '@backstage/backend-plugin-api';\n\nexport interface KafkaConsumerConfig {\n backstageTopic: string;\n consumerConfig: ConsumerConfig;\n consumerSubscribeTopics: ConsumerSubscribeTopics;\n}\n\nexport interface KafkaConsumingEventPublisherConfig {\n instance: string;\n kafkaConfig: KafkaConfig;\n kafkaConsumerConfigs: KafkaConsumerConfig[];\n}\n\nconst CONFIG_PREFIX_PUBLISHER =\n 'events.modules.kafka.kafkaConsumingEventPublisher';\n\nconst processSinglePublisher = (\n instanceName: string,\n publisherConfig: Config,\n): KafkaConsumingEventPublisherConfig => {\n return {\n instance: instanceName,\n kafkaConfig: readKafkaConfig(publisherConfig),\n kafkaConsumerConfigs: publisherConfig\n .getConfigArray('topics')\n .map(topicConfig => {\n return {\n backstageTopic: topicConfig.getString('topic'),\n consumerConfig: {\n groupId: topicConfig.getString('kafka.groupId'),\n sessionTimeout: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.sessionTimeout',\n ),\n rebalanceTimeout: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.rebalanceTimeout',\n ),\n heartbeatInterval: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.heartbeatInterval',\n ),\n metadataMaxAge: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.metadataMaxAge',\n ),\n maxBytesPerPartition: topicConfig.getOptionalNumber(\n 'kafka.maxBytesPerPartition',\n ),\n minBytes: topicConfig.getOptionalNumber('kafka.minBytes'),\n maxBytes: topicConfig.getOptionalNumber('kafka.maxBytes'),\n maxWaitTimeInMs: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.maxWaitTime',\n ),\n },\n consumerSubscribeTopics: {\n topics: topicConfig.getStringArray('kafka.topics'),\n },\n };\n }),\n };\n};\n\nexport const readConsumerConfig = (\n config: Config,\n logger: LoggerService,\n): KafkaConsumingEventPublisherConfig[] => {\n const publishersConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);\n\n // Check for legacy single publisher format\n if (publishersConfig?.getOptionalString('clientId')) {\n logger.warn(\n 'Legacy single config format detected at events.modules.kafka.kafkaConsumingEventPublisher.',\n );\n return [\n processSinglePublisher(\n 'default', // use `default` as instance name for legacy single config\n publishersConfig,\n ),\n ];\n }\n\n return (\n publishersConfig\n ?.keys()\n ?.map(publisherKey =>\n processSinglePublisher(\n publisherKey,\n publishersConfig.getConfig(publisherKey),\n ),\n ) ?? []\n );\n};\n"],"names":["readKafkaConfig","readOptionalHumanDurationInMs"],"mappings":";;;;AAmCA,MAAM,uBAAA,GACJ,mDAAA;AAEF,MAAM,sBAAA,GAAyB,CAC7B,YAAA,EACA,eAAA,KACuC;AACvC,EAAA,OAAO;AAAA,IACL,QAAA,EAAU,YAAA;AAAA,IACV,WAAA,EAAaA,uBAAgB,eAAe,CAAA;AAAA,IAC5C,sBAAsB,eAAA,CACnB,cAAA,CAAe,QAAQ,CAAA,CACvB,IAAI,CAAA,WAAA,KAAe;AAClB,MAAA,OAAO;AAAA,QACL,cAAA,EAAgB,WAAA,CAAY,SAAA,CAAU,OAAO,CAAA;AAAA,QAC7C,cAAA,EAAgB;AAAA,UACd,OAAA,EAAS,WAAA,CAAY,SAAA,CAAU,eAAe,CAAA;AAAA,UAC9C,cAAA,EAAgBC,oCAAA;AAAA,YACd,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,gBAAA,EAAkBA,oCAAA;AAAA,YAChB,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,iBAAA,EAAmBA,oCAAA;AAAA,YACjB,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,cAAA,EAAgBA,oCAAA;AAAA,YACd,WAAA;AAAA,YACA;AAAA,WACF;AAAA,UACA,sBAAsB,WAAA,CAAY,iBAAA;AAAA,YAChC;AAAA,WACF;AAAA,UACA,QAAA,EAAU,WAAA,CAAY,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,UACxD,QAAA,EAAU,WAAA,CAAY,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,UACxD,eAAA,EAAiBA,oCAAA;AAAA,YACf,WAAA;AAAA,YACA;AAAA;AACF,SACF;AAAA,QACA,uBAAA,EAAyB;AAAA,UACvB,MAAA,EAAQ,WAAA,CAAY,cAAA,CAAe,cAAc;AAAA;AACnD,OACF;AAAA,IACF,CAAC;AAAA,GACL;AACF,CAAA;AAEO,MAAM,kBAAA,GAAqB,CAChC,MAAA,EACA,MAAA,KACyC;AACzC,EAAA,MAAM,gBAAA,GAAmB,MAAA,CAAO,iBAAA,CAAkB,uBAAuB,CAAA;AAGzE,EAAA,IAAI,gBAAA,EAAkB,iBAAA,CAAkB,UAAU,CAAA,EAAG;AACnD,IAAA,MAAA,CAAO,IAAA;AAAA,MACL;AAAA,KACF;AACA,IAAA,OAAO;AAAA,MACL,sBAAA;AAAA,QACE,SAAA;AAAA;AAAA,QACA;AAAA;AACF,KACF;AAAA,EACF;AAEA,EAAA,OACE,gBAAA,EACI,MAAK,EACL,GAAA;AAAA,IAAI,CAAA,YAAA,KACJ,sBAAA;AAAA,MACE,YAAA;AAAA,MACA,gBAAA,CAAiB,UAAU,YAAY;AAAA;AACzC,OACG,EAAC;AAEZ;;;;"}
@@ -1,8 +1,8 @@
1
1
  'use strict';
2
2
 
3
3
  var backendPluginApi = require('@backstage/backend-plugin-api');
4
- var KafkaConsumerClient = require('../publisher/KafkaConsumerClient.cjs.js');
5
4
  var pluginEventsNode = require('@backstage/plugin-events-node');
5
+ var KafkaConsumingEventPublisher = require('./KafkaConsumingEventPublisher.cjs.js');
6
6
 
7
7
  const eventsModuleKafkaConsumingEventPublisher = backendPluginApi.createBackendModule({
8
8
  pluginId: "events",
@@ -13,23 +13,24 @@ const eventsModuleKafkaConsumingEventPublisher = backendPluginApi.createBackendM
13
13
  config: backendPluginApi.coreServices.rootConfig,
14
14
  events: pluginEventsNode.eventsServiceRef,
15
15
  logger: backendPluginApi.coreServices.logger,
16
- lifecycle: backendPluginApi.coreServices.lifecycle
16
+ lifecycle: backendPluginApi.coreServices.rootLifecycle
17
17
  },
18
18
  async init({ config, logger, events, lifecycle }) {
19
- const kafka = KafkaConsumerClient.KafkaConsumerClient.fromConfig({
19
+ const consumers = KafkaConsumingEventPublisher.KafkaConsumingEventPublisher.fromConfig({
20
20
  config,
21
21
  events,
22
22
  logger
23
23
  });
24
- if (!kafka) {
25
- return;
26
- }
27
- await kafka.start();
28
- lifecycle.addShutdownHook(async () => await kafka.shutdown());
24
+ lifecycle.addStartupHook(async () => {
25
+ await Promise.all(consumers.map((consumer) => consumer.start()));
26
+ });
27
+ lifecycle.addShutdownHook(async () => {
28
+ await Promise.all(consumers.map((consumer) => consumer.shutdown()));
29
+ });
29
30
  }
30
31
  });
31
32
  }
32
33
  });
33
34
 
34
35
  exports.eventsModuleKafkaConsumingEventPublisher = eventsModuleKafkaConsumingEventPublisher;
35
- //# sourceMappingURL=eventsModuleKafkaConsumingEventPublisher.cjs.js.map
36
+ //# sourceMappingURL=module.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"module.cjs.js","sources":["../../src/KafkaConsumingEventPublisher/module.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n coreServices,\n createBackendModule,\n} from '@backstage/backend-plugin-api';\nimport { eventsServiceRef } from '@backstage/plugin-events-node';\nimport { KafkaConsumingEventPublisher } from './KafkaConsumingEventPublisher';\n\n/**\n * Reads messages off of Kafka topics and forwards them into the Backstage events system.\n *\n * @public\n */\nexport const eventsModuleKafkaConsumingEventPublisher = createBackendModule({\n pluginId: 'events',\n moduleId: 'kafka-consuming-event-publisher',\n register(env) {\n env.registerInit({\n deps: {\n config: coreServices.rootConfig,\n events: eventsServiceRef,\n logger: coreServices.logger,\n lifecycle: coreServices.rootLifecycle,\n },\n async init({ config, logger, events, lifecycle }) {\n const consumers = KafkaConsumingEventPublisher.fromConfig({\n config,\n events,\n logger,\n });\n\n lifecycle.addStartupHook(async () => {\n await Promise.all(consumers.map(consumer => consumer.start()));\n });\n\n lifecycle.addShutdownHook(async () => {\n await Promise.all(consumers.map(consumer => consumer.shutdown()));\n });\n },\n });\n },\n});\n"],"names":["createBackendModule","coreServices","eventsServiceRef","KafkaConsumingEventPublisher"],"mappings":";;;;;;AA2BO,MAAM,2CAA2CA,oCAAA,CAAoB;AAAA,EAC1E,QAAA,EAAU,QAAA;AAAA,EACV,QAAA,EAAU,iCAAA;AAAA,EACV,SAAS,GAAA,EAAK;AACZ,IAAA,GAAA,CAAI,YAAA,CAAa;AAAA,MACf,IAAA,EAAM;AAAA,QACJ,QAAQC,6BAAA,CAAa,UAAA;AAAA,QACrB,MAAA,EAAQC,iCAAA;AAAA,QACR,QAAQD,6BAAA,CAAa,MAAA;AAAA,QACrB,WAAWA,6BAAA,CAAa;AAAA,OAC1B;AAAA,MACA,MAAM,IAAA,CAAK,EAAE,QAAQ,MAAA,EAAQ,MAAA,EAAQ,WAAU,EAAG;AAChD,QAAA,MAAM,SAAA,GAAYE,0DAA6B,UAAA,CAAW;AAAA,UACxD,MAAA;AAAA,UACA,MAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,SAAA,CAAU,eAAe,YAAY;AACnC,UAAA,MAAM,OAAA,CAAQ,IAAI,SAAA,CAAU,GAAA,CAAI,cAAY,QAAA,CAAS,KAAA,EAAO,CAAC,CAAA;AAAA,QAC/D,CAAC,CAAA;AAED,QAAA,SAAA,CAAU,gBAAgB,YAAY;AACpC,UAAA,MAAM,OAAA,CAAQ,IAAI,SAAA,CAAU,GAAA,CAAI,cAAY,QAAA,CAAS,QAAA,EAAU,CAAC,CAAA;AAAA,QAClE,CAAC,CAAA;AAAA,MACH;AAAA,KACD,CAAA;AAAA,EACH;AACF,CAAC;;;;"}
@@ -0,0 +1,73 @@
1
+ 'use strict';
2
+
3
+ var kafkajs = require('kafkajs');
4
+ var config = require('./config.cjs.js');
5
+ var LoggerServiceAdapter = require('../utils/LoggerServiceAdapter.cjs.js');
6
+ var kafkaTransformers = require('../utils/kafkaTransformers.cjs.js');
7
+
8
+ class KafkaPublishingEventConsumer {
9
+ constructor(logger, events, config) {
10
+ this.events = events;
11
+ this.logger = logger.child({
12
+ class: KafkaPublishingEventConsumer.prototype.constructor.name,
13
+ instance: config.instance
14
+ });
15
+ const kafka = new kafkajs.Kafka({
16
+ ...config.kafkaConfig,
17
+ logCreator: LoggerServiceAdapter.loggerServiceAdapter(this.logger)
18
+ });
19
+ this.kafkaPublishers = config.kafkaPublisherConfigs.map(
20
+ (publisherConfig) => ({
21
+ producer: kafka.producer(publisherConfig.producerConfig),
22
+ config: publisherConfig
23
+ })
24
+ );
25
+ }
26
+ kafkaPublishers;
27
+ logger;
28
+ static fromConfig(env) {
29
+ const configs = config.readPublisherConfig(env.config);
30
+ return configs.map(
31
+ (kafkaConfig) => new KafkaPublishingEventConsumer(env.logger, env.events, kafkaConfig)
32
+ );
33
+ }
34
+ async start() {
35
+ await Promise.all(
36
+ this.kafkaPublishers.map(async ({ producer, config }) => {
37
+ try {
38
+ await producer.connect();
39
+ this.events.subscribe({
40
+ id: `kafka:publisher:${config.backstageTopic}`,
41
+ topics: [config.backstageTopic],
42
+ onEvent: async (params) => {
43
+ await producer.send({
44
+ topic: config.kafkaTopic,
45
+ messages: [
46
+ {
47
+ value: kafkaTransformers.payloadToBuffer(params.eventPayload)
48
+ }
49
+ ]
50
+ });
51
+ }
52
+ });
53
+ this.logger.info(
54
+ `Subscribed to EventService, publishing events to external topic: ${config.backstageTopic}`
55
+ );
56
+ } catch (error) {
57
+ this.logger.error(
58
+ `Kafka producer connection failed for topic ${config.backstageTopic}`,
59
+ error
60
+ );
61
+ }
62
+ })
63
+ );
64
+ }
65
+ async shutdown() {
66
+ await Promise.all(
67
+ this.kafkaPublishers.map(({ producer }) => producer.disconnect())
68
+ );
69
+ }
70
+ }
71
+
72
+ exports.KafkaPublishingEventConsumer = KafkaPublishingEventConsumer;
73
+ //# sourceMappingURL=KafkaPublishingEventConsumer.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"KafkaPublishingEventConsumer.cjs.js","sources":["../../src/KafkaPublishingEventConsumer/KafkaPublishingEventConsumer.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { EventParams, EventsService } from '@backstage/plugin-events-node';\nimport { Kafka, Producer } from 'kafkajs';\nimport {\n KafkaPublisherConfig,\n KafkaPublishingEventConsumerConfig,\n readPublisherConfig,\n} from './config';\nimport { Config } from '@backstage/config';\nimport { loggerServiceAdapter } from '../utils/LoggerServiceAdapter';\nimport { payloadToBuffer } from '../utils/kafkaTransformers';\n\ntype KafkaPublisher = {\n producer: Producer;\n config: KafkaPublisherConfig;\n};\n\n/**\n * This class subscribes to Backstage internal events and publishes them to Kafka topics.\n * The internal event payload will be serialized and sent to the configured Kafka topic.\n */\nexport class KafkaPublishingEventConsumer {\n private readonly kafkaPublishers: KafkaPublisher[];\n private readonly logger: LoggerService;\n\n static fromConfig(env: {\n config: Config;\n events: EventsService;\n logger: LoggerService;\n }): KafkaPublishingEventConsumer[] {\n const configs = readPublisherConfig(env.config);\n\n return configs.map(\n kafkaConfig =>\n new KafkaPublishingEventConsumer(env.logger, env.events, kafkaConfig),\n );\n }\n\n private constructor(\n logger: LoggerService,\n private readonly events: EventsService,\n config: KafkaPublishingEventConsumerConfig,\n ) {\n this.logger = logger.child({\n class: KafkaPublishingEventConsumer.prototype.constructor.name,\n instance: config.instance,\n });\n\n const kafka = new Kafka({\n ...config.kafkaConfig,\n logCreator: loggerServiceAdapter(this.logger),\n });\n\n this.kafkaPublishers = config.kafkaPublisherConfigs.map(\n publisherConfig => ({\n producer: kafka.producer(publisherConfig.producerConfig),\n config: publisherConfig,\n }),\n );\n }\n\n async start(): Promise<void> {\n await Promise.all(\n this.kafkaPublishers.map(async ({ producer, config }) => {\n try {\n await producer.connect();\n\n this.events.subscribe({\n id: `kafka:publisher:${config.backstageTopic}`,\n topics: [config.backstageTopic],\n onEvent: async (params: EventParams) => {\n await producer.send({\n topic: config.kafkaTopic,\n messages: [\n {\n value: payloadToBuffer(params.eventPayload),\n },\n ],\n });\n },\n });\n this.logger.info(\n `Subscribed to EventService, publishing events to external topic: ${config.backstageTopic}`,\n );\n } catch (error: any) {\n this.logger.error(\n `Kafka producer connection failed for topic ${config.backstageTopic}`,\n error,\n );\n }\n }),\n );\n }\n\n async shutdown(): Promise<void> {\n await Promise.all(\n this.kafkaPublishers.map(({ producer }) => producer.disconnect()),\n );\n }\n}\n"],"names":["Kafka","loggerServiceAdapter","readPublisherConfig","payloadToBuffer"],"mappings":";;;;;;;AAoCO,MAAM,4BAAA,CAA6B;AAAA,EAiBhC,WAAA,CACN,MAAA,EACiB,MAAA,EACjB,MAAA,EACA;AAFiB,IAAA,IAAA,CAAA,MAAA,GAAA,MAAA;AAGjB,IAAA,IAAA,CAAK,MAAA,GAAS,OAAO,KAAA,CAAM;AAAA,MACzB,KAAA,EAAO,4BAAA,CAA6B,SAAA,CAAU,WAAA,CAAY,IAAA;AAAA,MAC1D,UAAU,MAAA,CAAO;AAAA,KAClB,CAAA;AAED,IAAA,MAAM,KAAA,GAAQ,IAAIA,aAAA,CAAM;AAAA,MACtB,GAAG,MAAA,CAAO,WAAA;AAAA,MACV,UAAA,EAAYC,yCAAA,CAAqB,IAAA,CAAK,MAAM;AAAA,KAC7C,CAAA;AAED,IAAA,IAAA,CAAK,eAAA,GAAkB,OAAO,qBAAA,CAAsB,GAAA;AAAA,MAClD,CAAA,eAAA,MAAoB;AAAA,QAClB,QAAA,EAAU,KAAA,CAAM,QAAA,CAAS,eAAA,CAAgB,cAAc,CAAA;AAAA,QACvD,MAAA,EAAQ;AAAA,OACV;AAAA,KACF;AAAA,EACF;AAAA,EArCiB,eAAA;AAAA,EACA,MAAA;AAAA,EAEjB,OAAO,WAAW,GAAA,EAIiB;AACjC,IAAA,MAAM,OAAA,GAAUC,0BAAA,CAAoB,GAAA,CAAI,MAAM,CAAA;AAE9C,IAAA,OAAO,OAAA,CAAQ,GAAA;AAAA,MACb,iBACE,IAAI,4BAAA,CAA6B,IAAI,MAAA,EAAQ,GAAA,CAAI,QAAQ,WAAW;AAAA,KACxE;AAAA,EACF;AAAA,EAyBA,MAAM,KAAA,GAAuB;AAC3B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,KAAK,eAAA,CAAgB,GAAA,CAAI,OAAO,EAAE,QAAA,EAAU,QAAO,KAAM;AACvD,QAAA,IAAI;AACF,UAAA,MAAM,SAAS,OAAA,EAAQ;AAEvB,UAAA,IAAA,CAAK,OAAO,SAAA,CAAU;AAAA,YACpB,EAAA,EAAI,CAAA,gBAAA,EAAmB,MAAA,CAAO,cAAc,CAAA,CAAA;AAAA,YAC5C,MAAA,EAAQ,CAAC,MAAA,CAAO,cAAc,CAAA;AAAA,YAC9B,OAAA,EAAS,OAAO,MAAA,KAAwB;AACtC,cAAA,MAAM,SAAS,IAAA,CAAK;AAAA,gBAClB,OAAO,MAAA,CAAO,UAAA;AAAA,gBACd,QAAA,EAAU;AAAA,kBACR;AAAA,oBACE,KAAA,EAAOC,iCAAA,CAAgB,MAAA,CAAO,YAAY;AAAA;AAC5C;AACF,eACD,CAAA;AAAA,YACH;AAAA,WACD,CAAA;AACD,UAAA,IAAA,CAAK,MAAA,CAAO,IAAA;AAAA,YACV,CAAA,iEAAA,EAAoE,OAAO,cAAc,CAAA;AAAA,WAC3F;AAAA,QACF,SAAS,KAAA,EAAY;AACnB,UAAA,IAAA,CAAK,MAAA,CAAO,KAAA;AAAA,YACV,CAAA,2CAAA,EAA8C,OAAO,cAAc,CAAA,CAAA;AAAA,YACnE;AAAA,WACF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,KACH;AAAA,EACF;AAAA,EAEA,MAAM,QAAA,GAA0B;AAC9B,IAAA,MAAM,OAAA,CAAQ,GAAA;AAAA,MACZ,IAAA,CAAK,gBAAgB,GAAA,CAAI,CAAC,EAAE,QAAA,EAAS,KAAM,QAAA,CAAS,UAAA,EAAY;AAAA,KAClE;AAAA,EACF;AACF;;;;"}
@@ -0,0 +1,44 @@
1
+ 'use strict';
2
+
3
+ var config = require('../utils/config.cjs.js');
4
+
5
+ const CONFIG_PREFIX_PUBLISHER = "events.modules.kafka.kafkaPublishingEventConsumer";
6
+ const readPublisherConfig = (config$1) => {
7
+ const publishers = config$1.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);
8
+ return publishers?.keys()?.map((publisherKey) => {
9
+ const publisherConfig = publishers.getConfig(publisherKey);
10
+ return {
11
+ instance: publisherKey,
12
+ kafkaConfig: config.readKafkaConfig(publisherConfig),
13
+ kafkaPublisherConfigs: publisherConfig.getConfigArray("topics").map((topicConfig) => {
14
+ return {
15
+ backstageTopic: topicConfig.getString("topic"),
16
+ kafkaTopic: topicConfig.getString("kafka.topic"),
17
+ producerConfig: {
18
+ allowAutoTopicCreation: topicConfig.getOptionalBoolean(
19
+ "kafka.allowAutoTopicCreation"
20
+ ),
21
+ metadataMaxAge: config.readOptionalHumanDurationInMs(
22
+ topicConfig,
23
+ "kafka.metadataMaxAge"
24
+ ),
25
+ transactionTimeout: config.readOptionalHumanDurationInMs(
26
+ topicConfig,
27
+ "kafka.transactionTimeout"
28
+ ),
29
+ idempotent: topicConfig.getOptionalBoolean("kafka.idempotent"),
30
+ maxInFlightRequests: topicConfig.getOptionalNumber(
31
+ "kafka.maxInFlightRequests"
32
+ ),
33
+ retry: config.readRetryConfig(
34
+ topicConfig.getOptionalConfig("kafka.retry")
35
+ )
36
+ }
37
+ };
38
+ })
39
+ };
40
+ }) ?? [];
41
+ };
42
+
43
+ exports.readPublisherConfig = readPublisherConfig;
44
+ //# sourceMappingURL=config.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.cjs.js","sources":["../../src/KafkaPublishingEventConsumer/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Config } from '@backstage/config';\nimport {\n readKafkaConfig,\n readOptionalHumanDurationInMs,\n readRetryConfig,\n} from '../utils/config';\nimport { KafkaConfig, ProducerConfig } from 'kafkajs';\n\nexport interface KafkaPublisherConfig {\n backstageTopic: string;\n kafkaTopic: string;\n producerConfig: ProducerConfig;\n}\n\nexport interface KafkaPublishingEventConsumerConfig {\n instance: string;\n kafkaConfig: KafkaConfig;\n kafkaPublisherConfigs: KafkaPublisherConfig[];\n}\n\nconst CONFIG_PREFIX_PUBLISHER =\n 'events.modules.kafka.kafkaPublishingEventConsumer';\n\nexport const readPublisherConfig = (\n config: Config,\n): KafkaPublishingEventConsumerConfig[] => {\n const publishers = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);\n\n return (\n publishers?.keys()?.map(publisherKey => {\n const publisherConfig = publishers.getConfig(publisherKey);\n\n return {\n instance: publisherKey,\n kafkaConfig: readKafkaConfig(publisherConfig),\n kafkaPublisherConfigs: publisherConfig\n .getConfigArray('topics')\n .map(topicConfig => {\n return {\n backstageTopic: topicConfig.getString('topic'),\n kafkaTopic: topicConfig.getString('kafka.topic'),\n producerConfig: {\n allowAutoTopicCreation: topicConfig.getOptionalBoolean(\n 'kafka.allowAutoTopicCreation',\n ),\n metadataMaxAge: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.metadataMaxAge',\n ),\n transactionTimeout: readOptionalHumanDurationInMs(\n topicConfig,\n 'kafka.transactionTimeout',\n ),\n idempotent: topicConfig.getOptionalBoolean('kafka.idempotent'),\n maxInFlightRequests: topicConfig.getOptionalNumber(\n 'kafka.maxInFlightRequests',\n ),\n retry: readRetryConfig(\n topicConfig.getOptionalConfig('kafka.retry'),\n ),\n },\n };\n }),\n };\n }) ?? []\n );\n};\n"],"names":["config","readKafkaConfig","readOptionalHumanDurationInMs","readRetryConfig"],"mappings":";;;;AAmCA,MAAM,uBAAA,GACJ,mDAAA;AAEK,MAAM,mBAAA,GAAsB,CACjCA,QAAA,KACyC;AACzC,EAAA,MAAM,UAAA,GAAaA,QAAA,CAAO,iBAAA,CAAkB,uBAAuB,CAAA;AAEnE,EAAA,OACE,UAAA,EAAY,IAAA,EAAK,EAAG,GAAA,CAAI,CAAA,YAAA,KAAgB;AACtC,IAAA,MAAM,eAAA,GAAkB,UAAA,CAAW,SAAA,CAAU,YAAY,CAAA;AAEzD,IAAA,OAAO;AAAA,MACL,QAAA,EAAU,YAAA;AAAA,MACV,WAAA,EAAaC,uBAAgB,eAAe,CAAA;AAAA,MAC5C,uBAAuB,eAAA,CACpB,cAAA,CAAe,QAAQ,CAAA,CACvB,IAAI,CAAA,WAAA,KAAe;AAClB,QAAA,OAAO;AAAA,UACL,cAAA,EAAgB,WAAA,CAAY,SAAA,CAAU,OAAO,CAAA;AAAA,UAC7C,UAAA,EAAY,WAAA,CAAY,SAAA,CAAU,aAAa,CAAA;AAAA,UAC/C,cAAA,EAAgB;AAAA,YACd,wBAAwB,WAAA,CAAY,kBAAA;AAAA,cAClC;AAAA,aACF;AAAA,YACA,cAAA,EAAgBC,oCAAA;AAAA,cACd,WAAA;AAAA,cACA;AAAA,aACF;AAAA,YACA,kBAAA,EAAoBA,oCAAA;AAAA,cAClB,WAAA;AAAA,cACA;AAAA,aACF;AAAA,YACA,UAAA,EAAY,WAAA,CAAY,kBAAA,CAAmB,kBAAkB,CAAA;AAAA,YAC7D,qBAAqB,WAAA,CAAY,iBAAA;AAAA,cAC/B;AAAA,aACF;AAAA,YACA,KAAA,EAAOC,sBAAA;AAAA,cACL,WAAA,CAAY,kBAAkB,aAAa;AAAA;AAC7C;AACF,SACF;AAAA,MACF,CAAC;AAAA,KACL;AAAA,EACF,CAAC,KAAK,EAAC;AAEX;;;;"}
@@ -0,0 +1,36 @@
1
+ 'use strict';
2
+
3
+ var backendPluginApi = require('@backstage/backend-plugin-api');
4
+ var pluginEventsNode = require('@backstage/plugin-events-node');
5
+ var KafkaPublishingEventConsumer = require('./KafkaPublishingEventConsumer.cjs.js');
6
+
7
+ const eventsModuleKafkaPublishingEventConsumer = backendPluginApi.createBackendModule({
8
+ pluginId: "events",
9
+ moduleId: "kafka-publishing-event-consumer",
10
+ register(env) {
11
+ env.registerInit({
12
+ deps: {
13
+ config: backendPluginApi.coreServices.rootConfig,
14
+ events: pluginEventsNode.eventsServiceRef,
15
+ logger: backendPluginApi.coreServices.logger,
16
+ lifecycle: backendPluginApi.coreServices.rootLifecycle
17
+ },
18
+ async init({ config, logger, events, lifecycle }) {
19
+ const consumers = KafkaPublishingEventConsumer.KafkaPublishingEventConsumer.fromConfig({
20
+ config,
21
+ events,
22
+ logger
23
+ });
24
+ lifecycle.addStartupHook(async () => {
25
+ await Promise.all(consumers.map((consumer) => consumer.start()));
26
+ });
27
+ lifecycle.addShutdownHook(async () => {
28
+ await Promise.all(consumers.map((consumer) => consumer.shutdown()));
29
+ });
30
+ }
31
+ });
32
+ }
33
+ });
34
+
35
+ exports.eventsModuleKafkaPublishingEventConsumer = eventsModuleKafkaPublishingEventConsumer;
36
+ //# sourceMappingURL=module.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"module.cjs.js","sources":["../../src/KafkaPublishingEventConsumer/module.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n coreServices,\n createBackendModule,\n} from '@backstage/backend-plugin-api';\nimport { eventsServiceRef } from '@backstage/plugin-events-node';\nimport { KafkaPublishingEventConsumer } from './KafkaPublishingEventConsumer';\n\n/**\n * Reads internal Backstage events and forwards them to Kafka topics.\n *\n * @public\n */\nexport const eventsModuleKafkaPublishingEventConsumer = createBackendModule({\n pluginId: 'events',\n moduleId: 'kafka-publishing-event-consumer',\n register(env) {\n env.registerInit({\n deps: {\n config: coreServices.rootConfig,\n events: eventsServiceRef,\n logger: coreServices.logger,\n lifecycle: coreServices.rootLifecycle,\n },\n async init({ config, logger, events, lifecycle }) {\n const consumers = KafkaPublishingEventConsumer.fromConfig({\n config,\n events,\n logger,\n });\n\n lifecycle.addStartupHook(async () => {\n await Promise.all(consumers.map(consumer => consumer.start()));\n });\n\n lifecycle.addShutdownHook(async () => {\n await Promise.all(consumers.map(consumer => consumer.shutdown()));\n });\n },\n });\n },\n});\n"],"names":["createBackendModule","coreServices","eventsServiceRef","KafkaPublishingEventConsumer"],"mappings":";;;;;;AA2BO,MAAM,2CAA2CA,oCAAA,CAAoB;AAAA,EAC1E,QAAA,EAAU,QAAA;AAAA,EACV,QAAA,EAAU,iCAAA;AAAA,EACV,SAAS,GAAA,EAAK;AACZ,IAAA,GAAA,CAAI,YAAA,CAAa;AAAA,MACf,IAAA,EAAM;AAAA,QACJ,QAAQC,6BAAA,CAAa,UAAA;AAAA,QACrB,MAAA,EAAQC,iCAAA;AAAA,QACR,QAAQD,6BAAA,CAAa,MAAA;AAAA,QACrB,WAAWA,6BAAA,CAAa;AAAA,OAC1B;AAAA,MACA,MAAM,IAAA,CAAK,EAAE,QAAQ,MAAA,EAAQ,MAAA,EAAQ,WAAU,EAAG;AAChD,QAAA,MAAM,SAAA,GAAYE,0DAA6B,UAAA,CAAW;AAAA,UACxD,MAAA;AAAA,UACA,MAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,SAAA,CAAU,eAAe,YAAY;AACnC,UAAA,MAAM,OAAA,CAAQ,IAAI,SAAA,CAAU,GAAA,CAAI,cAAY,QAAA,CAAS,KAAA,EAAO,CAAC,CAAA;AAAA,QAC/D,CAAC,CAAA;AAED,QAAA,SAAA,CAAU,gBAAgB,YAAY;AACpC,UAAA,MAAM,OAAA,CAAQ,IAAI,SAAA,CAAU,GAAA,CAAI,cAAY,QAAA,CAAS,QAAA,EAAU,CAAC,CAAA;AAAA,QAClE,CAAC,CAAA;AAAA,MACH;AAAA,KACD,CAAA;AAAA,EACH;AACF,CAAC;;;;"}
package/dist/index.cjs.js CHANGED
@@ -2,9 +2,16 @@
2
2
 
3
3
  Object.defineProperty(exports, '__esModule', { value: true });
4
4
 
5
- var eventsModuleKafkaConsumingEventPublisher = require('./service/eventsModuleKafkaConsumingEventPublisher.cjs.js');
5
+ var backendPluginApi = require('@backstage/backend-plugin-api');
6
+ var module$1 = require('./KafkaConsumingEventPublisher/module.cjs.js');
7
+ var module$2 = require('./KafkaPublishingEventConsumer/module.cjs.js');
6
8
 
9
+ var index = backendPluginApi.createBackendFeatureLoader({
10
+ *loader() {
11
+ yield module$1.eventsModuleKafkaConsumingEventPublisher;
12
+ yield module$2.eventsModuleKafkaPublishingEventConsumer;
13
+ }
14
+ });
7
15
 
8
-
9
- exports.default = eventsModuleKafkaConsumingEventPublisher.eventsModuleKafkaConsumingEventPublisher;
16
+ exports.default = index;
10
17
  //# sourceMappingURL=index.cjs.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs.js","sources":[],"sourcesContent":[],"names":[],"mappings":";;;;;;;;"}
1
+ {"version":3,"file":"index.cjs.js","sources":["../src/index.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { createBackendFeatureLoader } from '@backstage/backend-plugin-api';\nimport { eventsModuleKafkaConsumingEventPublisher } from './KafkaConsumingEventPublisher';\nimport { eventsModuleKafkaPublishingEventConsumer } from './KafkaPublishingEventConsumer';\n\n/**\n * The module \"kafka\" for the Backstage backend plugin \"events\"\n * adding Kafka-based event handling:\n * - Consumer: receives events from Kafka topics and passes them to the internal event broker\n * - Publisher: receives internal events and publishes them to Kafka topics\n *\n * @packageDocumentation\n */\n\nexport default createBackendFeatureLoader({\n *loader() {\n yield eventsModuleKafkaConsumingEventPublisher;\n yield eventsModuleKafkaPublishingEventConsumer;\n },\n});\n"],"names":["createBackendFeatureLoader","eventsModuleKafkaConsumingEventPublisher","eventsModuleKafkaPublishingEventConsumer"],"mappings":";;;;;;;;AA4BA,YAAeA,2CAAA,CAA2B;AAAA,EACxC,CAAC,MAAA,GAAS;AACR,IAAA,MAAMC,iDAAA;AACN,IAAA,MAAMC,iDAAA;AAAA,EACR;AACF,CAAC,CAAA;;;;"}
package/dist/index.d.ts CHANGED
@@ -1,10 +1,13 @@
1
1
  import * as _backstage_backend_plugin_api from '@backstage/backend-plugin-api';
2
2
 
3
3
  /**
4
- * Kafka module for the Events plugin.
4
+ * The module "kafka" for the Backstage backend plugin "events"
5
+ * adding Kafka-based event handling:
6
+ * - Consumer: receives events from Kafka topics and passes them to the internal event broker
7
+ * - Publisher: receives internal events and publishes them to Kafka topics
5
8
  *
6
- * @public
9
+ * @packageDocumentation
7
10
  */
8
- declare const eventsModuleKafkaConsumingEventPublisher: _backstage_backend_plugin_api.BackendFeature;
11
+ declare const _default: _backstage_backend_plugin_api.BackendFeature;
9
12
 
10
- export { eventsModuleKafkaConsumingEventPublisher as default };
13
+ export { _default as default };
@@ -0,0 +1 @@
1
+ {"version":3,"file":"LoggerServiceAdapter.cjs.js","sources":["../../src/utils/LoggerServiceAdapter.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { LogEntry, logLevel } from 'kafkajs';\n\nexport const loggerServiceAdapter = (loggerService: LoggerService) => {\n const logMethods: Record<logLevel, (message: string, meta?: object) => void> =\n {\n [logLevel.ERROR]: loggerService.error,\n [logLevel.WARN]: loggerService.warn,\n [logLevel.INFO]: loggerService.info,\n [logLevel.DEBUG]: loggerService.debug,\n [logLevel.NOTHING]: () => {},\n };\n\n return (_level: logLevel) => {\n return (entry: LogEntry) => {\n const { namespace, level, log } = entry;\n const { message, ...extra } = log;\n\n // Use loggerService method that matches the level\n logMethods[level].call(\n loggerService,\n `Kafka ${namespace} ${log.message}`,\n {\n ...extra,\n },\n );\n };\n };\n};\n"],"names":["logLevel"],"mappings":";;;;AAkBO,MAAM,oBAAA,GAAuB,CAAC,aAAA,KAAiC;AACpE,EAAA,MAAM,UAAA,GACJ;AAAA,IACE,CAACA,gBAAA,CAAS,KAAK,GAAG,aAAA,CAAc,KAAA;AAAA,IAChC,CAACA,gBAAA,CAAS,IAAI,GAAG,aAAA,CAAc,IAAA;AAAA,IAC/B,CAACA,gBAAA,CAAS,IAAI,GAAG,aAAA,CAAc,IAAA;AAAA,IAC/B,CAACA,gBAAA,CAAS,KAAK,GAAG,aAAA,CAAc,KAAA;AAAA,IAChC,CAACA,gBAAA,CAAS,OAAO,GAAG,MAAM;AAAA,IAAC;AAAA,GAC7B;AAEF,EAAA,OAAO,CAAC,MAAA,KAAqB;AAC3B,IAAA,OAAO,CAAC,KAAA,KAAoB;AAC1B,MAAA,MAAM,EAAE,SAAA,EAAW,KAAA,EAAO,GAAA,EAAI,GAAI,KAAA;AAClC,MAAA,MAAM,EAAE,OAAA,EAAS,GAAG,KAAA,EAAM,GAAI,GAAA;AAG9B,MAAA,UAAA,CAAW,KAAK,CAAA,CAAE,IAAA;AAAA,QAChB,aAAA;AAAA,QACA,CAAA,MAAA,EAAS,SAAS,CAAA,CAAA,EAAI,GAAA,CAAI,OAAO,CAAA,CAAA;AAAA,QACjC;AAAA,UACE,GAAG;AAAA;AACL,OACF;AAAA,IACF,CAAA;AAAA,EACF,CAAA;AACF;;;;"}
@@ -0,0 +1,46 @@
1
+ 'use strict';
2
+
3
+ var config = require('@backstage/config');
4
+ var types = require('@backstage/types');
5
+
6
+ const readOptionalHumanDurationInMs = (config$1, key) => {
7
+ const humanDuration = config$1.has(key) ? config.readDurationFromConfig(config$1, { key }) : void 0;
8
+ if (!humanDuration) return void 0;
9
+ return types.durationToMilliseconds(humanDuration);
10
+ };
11
+ const readRetryConfig = (config) => {
12
+ if (!config) {
13
+ return {};
14
+ }
15
+ return {
16
+ maxRetryTime: readOptionalHumanDurationInMs(config, "maxRetryTime"),
17
+ initialRetryTime: readOptionalHumanDurationInMs(config, "initialRetryTime"),
18
+ factor: config.getOptionalNumber("factor"),
19
+ multiplier: config.getOptionalNumber("multiplier"),
20
+ retries: config.getOptionalNumber("retries")
21
+ };
22
+ };
23
+ const readKafkaConfig = (config) => {
24
+ return {
25
+ clientId: config.getString("clientId"),
26
+ brokers: config.getStringArray("brokers"),
27
+ authenticationTimeout: readOptionalHumanDurationInMs(
28
+ config,
29
+ "authenticationTimeout"
30
+ ),
31
+ connectionTimeout: readOptionalHumanDurationInMs(
32
+ config,
33
+ "connectionTimeout"
34
+ ),
35
+ requestTimeout: readOptionalHumanDurationInMs(config, "requestTimeout"),
36
+ enforceRequestTimeout: config.getOptionalBoolean("enforceRequestTimeout"),
37
+ ssl: config.getOptional("ssl"),
38
+ sasl: config.getOptional("sasl"),
39
+ retry: readRetryConfig(config.getOptionalConfig("retry"))
40
+ };
41
+ };
42
+
43
+ exports.readKafkaConfig = readKafkaConfig;
44
+ exports.readOptionalHumanDurationInMs = readOptionalHumanDurationInMs;
45
+ exports.readRetryConfig = readRetryConfig;
46
+ //# sourceMappingURL=config.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.cjs.js","sources":["../../src/utils/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Config, readDurationFromConfig } from '@backstage/config';\nimport { durationToMilliseconds } from '@backstage/types';\nimport { KafkaConfig, RetryOptions } from 'kafkajs';\n\n/**\n * Reads an optional HumanDuration from the config and returns the value in milliseconds if the key is defined.\n *\n * @param config - The configuration object to read from.\n * @param key - The key to look up in the configuration.\n * @returns The duration in milliseconds, or undefined if the key is not defined.\n */\nexport const readOptionalHumanDurationInMs = (\n config: Config,\n key: string,\n): number | undefined => {\n const humanDuration = config.has(key)\n ? readDurationFromConfig(config, { key })\n : undefined;\n\n if (!humanDuration) return undefined;\n\n return durationToMilliseconds(humanDuration);\n};\n\n/**\n * Reads retry configuration options from the provided config object.\n *\n * @param config - The configuration object to read retry options from, or undefined.\n * @returns A RetryOptions object with optional retry settings, or an empty object if config is undefined.\n */\nexport const readRetryConfig = (config: Config | undefined): RetryOptions => {\n if (!config) {\n return {};\n }\n\n return {\n maxRetryTime: readOptionalHumanDurationInMs(config, 'maxRetryTime'),\n initialRetryTime: readOptionalHumanDurationInMs(config, 'initialRetryTime'),\n factor: config.getOptionalNumber('factor'),\n multiplier: config.getOptionalNumber('multiplier'),\n retries: config.getOptionalNumber('retries'),\n };\n};\n\n/**\n * Reads Kafka configuration from the provided config object.\n *\n * @param config - The configuration object containing Kafka settings.\n * @returns A KafkaConfig object with all necessary Kafka connection and authentication settings.\n */\nexport const readKafkaConfig = (config: Config): KafkaConfig => {\n return {\n clientId: config.getString('clientId'),\n brokers: config.getStringArray('brokers'),\n authenticationTimeout: readOptionalHumanDurationInMs(\n config,\n 'authenticationTimeout',\n ),\n connectionTimeout: readOptionalHumanDurationInMs(\n config,\n 'connectionTimeout',\n ),\n requestTimeout: readOptionalHumanDurationInMs(config, 'requestTimeout'),\n enforceRequestTimeout: config.getOptionalBoolean('enforceRequestTimeout'),\n ssl: config.getOptional('ssl') as KafkaConfig['ssl'],\n sasl: config.getOptional('sasl') as KafkaConfig['sasl'],\n retry: readRetryConfig(config.getOptionalConfig('retry')),\n };\n};\n"],"names":["config","readDurationFromConfig","durationToMilliseconds"],"mappings":";;;;;AA0BO,MAAM,6BAAA,GAAgC,CAC3CA,QAAA,EACA,GAAA,KACuB;AACvB,EAAA,MAAM,aAAA,GAAgBA,QAAA,CAAO,GAAA,CAAI,GAAG,CAAA,GAChCC,8BAAuBD,QAAA,EAAQ,EAAE,GAAA,EAAK,CAAA,GACtC,MAAA;AAEJ,EAAA,IAAI,CAAC,eAAe,OAAO,MAAA;AAE3B,EAAA,OAAOE,6BAAuB,aAAa,CAAA;AAC7C;AAQO,MAAM,eAAA,GAAkB,CAAC,MAAA,KAA6C;AAC3E,EAAA,IAAI,CAAC,MAAA,EAAQ;AACX,IAAA,OAAO,EAAC;AAAA,EACV;AAEA,EAAA,OAAO;AAAA,IACL,YAAA,EAAc,6BAAA,CAA8B,MAAA,EAAQ,cAAc,CAAA;AAAA,IAClE,gBAAA,EAAkB,6BAAA,CAA8B,MAAA,EAAQ,kBAAkB,CAAA;AAAA,IAC1E,MAAA,EAAQ,MAAA,CAAO,iBAAA,CAAkB,QAAQ,CAAA;AAAA,IACzC,UAAA,EAAY,MAAA,CAAO,iBAAA,CAAkB,YAAY,CAAA;AAAA,IACjD,OAAA,EAAS,MAAA,CAAO,iBAAA,CAAkB,SAAS;AAAA,GAC7C;AACF;AAQO,MAAM,eAAA,GAAkB,CAAC,MAAA,KAAgC;AAC9D,EAAA,OAAO;AAAA,IACL,QAAA,EAAU,MAAA,CAAO,SAAA,CAAU,UAAU,CAAA;AAAA,IACrC,OAAA,EAAS,MAAA,CAAO,cAAA,CAAe,SAAS,CAAA;AAAA,IACxC,qBAAA,EAAuB,6BAAA;AAAA,MACrB,MAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,iBAAA,EAAmB,6BAAA;AAAA,MACjB,MAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,cAAA,EAAgB,6BAAA,CAA8B,MAAA,EAAQ,gBAAgB,CAAA;AAAA,IACtE,qBAAA,EAAuB,MAAA,CAAO,kBAAA,CAAmB,uBAAuB,CAAA;AAAA,IACxE,GAAA,EAAK,MAAA,CAAO,WAAA,CAAY,KAAK,CAAA;AAAA,IAC7B,IAAA,EAAM,MAAA,CAAO,WAAA,CAAY,MAAM,CAAA;AAAA,IAC/B,KAAA,EAAO,eAAA,CAAgB,MAAA,CAAO,iBAAA,CAAkB,OAAO,CAAC;AAAA,GAC1D;AACF;;;;;;"}
@@ -0,0 +1,24 @@
1
+ 'use strict';
2
+
3
+ const convertHeadersToMetadata = (headers) => {
4
+ if (!headers) return void 0;
5
+ const metadata = {};
6
+ Object.entries(headers).forEach(([key, value]) => {
7
+ if (Array.isArray(value)) metadata[key] = value.map((v) => v.toString());
8
+ else metadata[key] = value?.toString();
9
+ });
10
+ return metadata;
11
+ };
12
+ const payloadToBuffer = (payload) => {
13
+ if (Buffer.isBuffer(payload)) {
14
+ return payload;
15
+ }
16
+ if (typeof payload === "string") {
17
+ return Buffer.from(payload, "utf8");
18
+ }
19
+ return Buffer.from(JSON.stringify(payload), "utf8");
20
+ };
21
+
22
+ exports.convertHeadersToMetadata = convertHeadersToMetadata;
23
+ exports.payloadToBuffer = payloadToBuffer;
24
+ //# sourceMappingURL=kafkaTransformers.cjs.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"kafkaTransformers.cjs.js","sources":["../../src/utils/kafkaTransformers.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { EventParams } from '@backstage/plugin-events-node';\nimport { IHeaders } from 'kafkajs';\n\ntype EventMetadata = EventParams['metadata'];\n\nexport const convertHeadersToMetadata = (\n headers: IHeaders | undefined,\n): EventMetadata => {\n if (!headers) return undefined;\n\n const metadata: EventMetadata = {};\n\n Object.entries(headers).forEach(([key, value]) => {\n // If value is an array use toString() on all values converting any Buffer types to valid strings\n if (Array.isArray(value)) metadata[key] = value.map(v => v.toString());\n // Always return the values using toString() to catch all Buffer types that should be converted to strings\n else metadata[key] = value?.toString();\n });\n\n return metadata;\n};\n\nexport const payloadToBuffer = (payload: unknown): Buffer => {\n if (Buffer.isBuffer(payload)) {\n return payload;\n }\n\n if (typeof payload === 'string') {\n return Buffer.from(payload, 'utf8'); // More explicit encoding\n }\n\n // Convert to JSON string then encode\n return Buffer.from(JSON.stringify(payload), 'utf8');\n};\n"],"names":[],"mappings":";;AAoBO,MAAM,wBAAA,GAA2B,CACtC,OAAA,KACkB;AAClB,EAAA,IAAI,CAAC,SAAS,OAAO,MAAA;AAErB,EAAA,MAAM,WAA0B,EAAC;AAEjC,EAAA,MAAA,CAAO,OAAA,CAAQ,OAAO,CAAA,CAAE,OAAA,CAAQ,CAAC,CAAC,GAAA,EAAK,KAAK,CAAA,KAAM;AAEhD,IAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA,EAAG,QAAA,CAAS,GAAG,CAAA,GAAI,KAAA,CAAM,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,QAAA,EAAU,CAAA;AAAA,SAEhE,QAAA,CAAS,GAAG,CAAA,GAAI,KAAA,EAAO,QAAA,EAAS;AAAA,EACvC,CAAC,CAAA;AAED,EAAA,OAAO,QAAA;AACT;AAEO,MAAM,eAAA,GAAkB,CAAC,OAAA,KAA6B;AAC3D,EAAA,IAAI,MAAA,CAAO,QAAA,CAAS,OAAO,CAAA,EAAG;AAC5B,IAAA,OAAO,OAAA;AAAA,EACT;AAEA,EAAA,IAAI,OAAO,YAAY,QAAA,EAAU;AAC/B,IAAA,OAAO,MAAA,CAAO,IAAA,CAAK,OAAA,EAAS,MAAM,CAAA;AAAA,EACpC;AAGA,EAAA,OAAO,OAAO,IAAA,CAAK,IAAA,CAAK,SAAA,CAAU,OAAO,GAAG,MAAM,CAAA;AACpD;;;;;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@backstage/plugin-events-backend-module-kafka",
3
- "version": "0.1.6-next.0",
3
+ "version": "0.2.0",
4
4
  "description": "The kafka backend module for the events plugin.",
5
5
  "backstage": {
6
6
  "role": "backend-plugin-module",
@@ -38,16 +38,16 @@
38
38
  "test": "backstage-cli package test"
39
39
  },
40
40
  "dependencies": {
41
- "@backstage/backend-plugin-api": "1.5.1-next.0",
42
- "@backstage/config": "1.3.6",
43
- "@backstage/plugin-events-node": "0.4.18-next.0",
44
- "@backstage/types": "1.2.2",
41
+ "@backstage/backend-plugin-api": "^1.6.0",
42
+ "@backstage/config": "^1.3.6",
43
+ "@backstage/plugin-events-node": "^0.4.18",
44
+ "@backstage/types": "^1.2.2",
45
45
  "kafkajs": "^2.2.4"
46
46
  },
47
47
  "devDependencies": {
48
- "@backstage/backend-test-utils": "1.10.1-next.0",
49
- "@backstage/cli": "0.34.6-next.0",
50
- "@backstage/plugin-events-backend-test-utils": "0.1.51-next.0"
48
+ "@backstage/backend-test-utils": "^1.10.2",
49
+ "@backstage/cli": "^0.35.0",
50
+ "@backstage/plugin-events-backend-test-utils": "^0.1.51"
51
51
  },
52
52
  "configSchema": "config.d.ts",
53
53
  "typesVersions": {
@@ -1,44 +0,0 @@
1
- 'use strict';
2
-
3
- var kafkajs = require('kafkajs');
4
- var config = require('./config.cjs.js');
5
- var KafkaConsumingEventPublisher = require('./KafkaConsumingEventPublisher.cjs.js');
6
- var LoggerServiceAdapter = require('./LoggerServiceAdapter.cjs.js');
7
-
8
- class KafkaConsumerClient {
9
- kafka;
10
- consumers;
11
- static fromConfig(options) {
12
- const kafkaConfig = config.readConfig(options.config);
13
- if (!kafkaConfig) {
14
- options.logger.info(
15
- "Kafka consumer not configured, skipping initialization"
16
- );
17
- return void 0;
18
- }
19
- return new KafkaConsumerClient(options.logger, options.events, kafkaConfig);
20
- }
21
- constructor(logger, events, config) {
22
- this.kafka = new kafkajs.Kafka({
23
- ...config.kafkaConfig,
24
- logCreator: LoggerServiceAdapter.loggerServiceAdapter(logger)
25
- });
26
- this.consumers = config.kafkaConsumerConfigs.map(
27
- (consumerConfig) => KafkaConsumingEventPublisher.KafkaConsumingEventPublisher.fromConfig({
28
- kafkaClient: this.kafka,
29
- config: consumerConfig,
30
- logger,
31
- events
32
- })
33
- );
34
- }
35
- async start() {
36
- this.consumers.map(async (consumer) => await consumer.start());
37
- }
38
- async shutdown() {
39
- this.consumers.map(async (consumer) => await consumer.shutdown());
40
- }
41
- }
42
-
43
- exports.KafkaConsumerClient = KafkaConsumerClient;
44
- //# sourceMappingURL=KafkaConsumerClient.cjs.js.map