@backstage/plugin-events-backend-module-kafka 0.1.6-next.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/CHANGELOG.md +15 -0
  2. package/README.md +67 -25
  3. package/config.d.ts +504 -136
  4. package/dist/KafkaConsumingEventPublisher/KafkaConsumingEventPublisher.cjs.js +67 -0
  5. package/dist/KafkaConsumingEventPublisher/KafkaConsumingEventPublisher.cjs.js.map +1 -0
  6. package/dist/KafkaConsumingEventPublisher/config.cjs.js +71 -0
  7. package/dist/KafkaConsumingEventPublisher/config.cjs.js.map +1 -0
  8. package/dist/{service/eventsModuleKafkaConsumingEventPublisher.cjs.js → KafkaConsumingEventPublisher/module.cjs.js} +10 -9
  9. package/dist/KafkaConsumingEventPublisher/module.cjs.js.map +1 -0
  10. package/dist/KafkaPublishingEventConsumer/KafkaPublishingEventConsumer.cjs.js +73 -0
  11. package/dist/KafkaPublishingEventConsumer/KafkaPublishingEventConsumer.cjs.js.map +1 -0
  12. package/dist/KafkaPublishingEventConsumer/config.cjs.js +44 -0
  13. package/dist/KafkaPublishingEventConsumer/config.cjs.js.map +1 -0
  14. package/dist/KafkaPublishingEventConsumer/module.cjs.js +36 -0
  15. package/dist/KafkaPublishingEventConsumer/module.cjs.js.map +1 -0
  16. package/dist/index.cjs.js +10 -3
  17. package/dist/index.cjs.js.map +1 -1
  18. package/dist/index.d.ts +7 -4
  19. package/dist/utils/LoggerServiceAdapter.cjs.js.map +1 -0
  20. package/dist/utils/config.cjs.js +46 -0
  21. package/dist/utils/config.cjs.js.map +1 -0
  22. package/dist/utils/kafkaTransformers.cjs.js +24 -0
  23. package/dist/utils/kafkaTransformers.cjs.js.map +1 -0
  24. package/package.json +8 -8
  25. package/dist/publisher/KafkaConsumerClient.cjs.js +0 -44
  26. package/dist/publisher/KafkaConsumerClient.cjs.js.map +0 -1
  27. package/dist/publisher/KafkaConsumingEventPublisher.cjs.js +0 -63
  28. package/dist/publisher/KafkaConsumingEventPublisher.cjs.js.map +0 -1
  29. package/dist/publisher/LoggerServiceAdapter.cjs.js.map +0 -1
  30. package/dist/publisher/config.cjs.js +0 -102
  31. package/dist/publisher/config.cjs.js.map +0 -1
  32. package/dist/service/eventsModuleKafkaConsumingEventPublisher.cjs.js.map +0 -1
  33. /package/dist/{publisher → utils}/LoggerServiceAdapter.cjs.js +0 -0
@@ -1 +0,0 @@
1
- {"version":3,"file":"KafkaConsumerClient.cjs.js","sources":["../../src/publisher/KafkaConsumerClient.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { Config } from '@backstage/config';\nimport { EventsService } from '@backstage/plugin-events-node';\nimport { Kafka } from 'kafkajs';\nimport { KafkaEventSourceConfig, readConfig } from './config';\nimport { KafkaConsumingEventPublisher } from './KafkaConsumingEventPublisher';\nimport { loggerServiceAdapter } from './LoggerServiceAdapter';\n\n/**\n * KafkaConsumerClient\n *\n * This class creates the Kafka client that will be used to create the KafkaConsumingEventPublisher\n */\nexport class KafkaConsumerClient {\n private readonly kafka: Kafka;\n private readonly consumers: KafkaConsumingEventPublisher[];\n\n static fromConfig(options: {\n config: Config;\n events: EventsService;\n logger: LoggerService;\n }): KafkaConsumerClient | undefined {\n const kafkaConfig = readConfig(options.config);\n\n if (!kafkaConfig) {\n options.logger.info(\n 'Kafka consumer not configured, skipping initialization',\n );\n return undefined;\n }\n\n return new KafkaConsumerClient(options.logger, options.events, kafkaConfig);\n }\n\n private constructor(\n logger: LoggerService,\n events: EventsService,\n config: KafkaEventSourceConfig,\n ) {\n this.kafka = new Kafka({\n ...config.kafkaConfig,\n logCreator: loggerServiceAdapter(logger),\n });\n\n this.consumers = config.kafkaConsumerConfigs.map(consumerConfig =>\n KafkaConsumingEventPublisher.fromConfig({\n kafkaClient: this.kafka,\n config: consumerConfig,\n logger,\n events,\n }),\n );\n }\n\n async start(): Promise<void> {\n this.consumers.map(async consumer => await consumer.start());\n }\n\n async shutdown(): Promise<void> {\n this.consumers.map(async consumer => await consumer.shutdown());\n }\n}\n"],"names":["readConfig","Kafka","loggerServiceAdapter","KafkaConsumingEventPublisher"],"mappings":";;;;;;;AA4BO,MAAM,mBAAA,CAAoB;AAAA,EACd,KAAA;AAAA,EACA,SAAA;AAAA,EAEjB,OAAO,WAAW,OAAA,EAIkB;AAClC,IAAA,MAAM,WAAA,GAAcA,iBAAA,CAAW,OAAA,CAAQ,MAAM,CAAA;AAE7C,IAAA,IAAI,CAAC,WAAA,EAAa;AAChB,MAAA,OAAA,CAAQ,MAAA,CAAO,IAAA;AAAA,QACb;AAAA,OACF;AACA,MAAA,OAAO,MAAA;AAAA,IACT;AAEA,IAAA,OAAO,IAAI,mBAAA,CAAoB,OAAA,CAAQ,MAAA,EAAQ,OAAA,CAAQ,QAAQ,WAAW,CAAA;AAAA,EAC5E;AAAA,EAEQ,WAAA,CACN,MAAA,EACA,MAAA,EACA,MAAA,EACA;AACA,IAAA,IAAA,CAAK,KAAA,GAAQ,IAAIC,aAAA,CAAM;AAAA,MACrB,GAAG,MAAA,CAAO,WAAA;AAAA,MACV,UAAA,EAAYC,0CAAqB,MAAM;AAAA,KACxC,CAAA;AAED,IAAA,IAAA,CAAK,SAAA,GAAY,OAAO,oBAAA,CAAqB,GAAA;AAAA,MAAI,CAAA,cAAA,KAC/CC,0DAA6B,UAAA,CAAW;AAAA,QACtC,aAAa,IAAA,CAAK,KAAA;AAAA,QAClB,MAAA,EAAQ,cAAA;AAAA,QACR,MAAA;AAAA,QACA;AAAA,OACD;AAAA,KACH;AAAA,EACF;AAAA,EAEA,MAAM,KAAA,GAAuB;AAC3B,IAAA,IAAA,CAAK,UAAU,GAAA,CAAI,OAAM,aAAY,MAAM,QAAA,CAAS,OAAO,CAAA;AAAA,EAC7D;AAAA,EAEA,MAAM,QAAA,GAA0B;AAC9B,IAAA,IAAA,CAAK,UAAU,GAAA,CAAI,OAAM,aAAY,MAAM,QAAA,CAAS,UAAU,CAAA;AAAA,EAChE;AACF;;;;"}
@@ -1,63 +0,0 @@
1
- 'use strict';
2
-
3
- class KafkaConsumingEventPublisher {
4
- kafkaConsumer;
5
- consumerSubscribeTopics;
6
- backstageTopic;
7
- logger;
8
- static fromConfig(env) {
9
- return new KafkaConsumingEventPublisher(
10
- env.kafkaClient,
11
- env.logger,
12
- env.events,
13
- env.config
14
- );
15
- }
16
- events;
17
- constructor(kafkaClient, logger, events, config) {
18
- this.events = events;
19
- this.kafkaConsumer = kafkaClient.consumer(config.consumerConfig);
20
- this.consumerSubscribeTopics = config.consumerSubscribeTopics;
21
- this.backstageTopic = config.backstageTopic;
22
- const id = `events.kafka.publisher:${this.backstageTopic}`;
23
- this.logger = logger.child({
24
- class: KafkaConsumingEventPublisher.prototype.constructor.name,
25
- groupId: config.consumerConfig.groupId,
26
- kafkaTopics: config.consumerSubscribeTopics.topics.toString(),
27
- backstageTopic: config.backstageTopic,
28
- taskId: id
29
- });
30
- }
31
- async start() {
32
- try {
33
- await this.kafkaConsumer.connect();
34
- await this.kafkaConsumer.subscribe(this.consumerSubscribeTopics);
35
- await this.kafkaConsumer.run({
36
- eachMessage: async ({ message }) => {
37
- this.events.publish({
38
- topic: this.backstageTopic,
39
- eventPayload: JSON.parse(message.value?.toString()),
40
- metadata: this.convertHeadersToMetadata(message.headers)
41
- });
42
- }
43
- });
44
- } catch (error) {
45
- this.logger.error("Kafka consumer connection failed ", error);
46
- }
47
- }
48
- async shutdown() {
49
- await this.kafkaConsumer.disconnect();
50
- }
51
- convertHeadersToMetadata = (headers) => {
52
- if (!headers) return void 0;
53
- const metadata = {};
54
- Object.entries(headers).forEach(([key, value]) => {
55
- if (Array.isArray(value)) metadata[key] = value.map((v) => v.toString());
56
- else metadata[key] = value?.toString();
57
- });
58
- return metadata;
59
- };
60
- }
61
-
62
- exports.KafkaConsumingEventPublisher = KafkaConsumingEventPublisher;
63
- //# sourceMappingURL=KafkaConsumingEventPublisher.cjs.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"KafkaConsumingEventPublisher.cjs.js","sources":["../../src/publisher/KafkaConsumingEventPublisher.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { EventParams, EventsService } from '@backstage/plugin-events-node';\nimport { Consumer, ConsumerSubscribeTopics, IHeaders, Kafka } from 'kafkajs';\nimport { KafkaConsumerConfig } from './config';\n\ntype EventMetadata = EventParams['metadata'];\n\n/**\n * This class subscribes to Kafka topics and publishes events received to the registered subscriber.\n * The message payload will be used as the event payload and passed to the subscribers.\n */\nexport class KafkaConsumingEventPublisher {\n private readonly kafkaConsumer: Consumer;\n private readonly consumerSubscribeTopics: ConsumerSubscribeTopics;\n private readonly backstageTopic: string;\n private readonly logger: LoggerService;\n\n static fromConfig(env: {\n kafkaClient: Kafka;\n config: KafkaConsumerConfig;\n events: EventsService;\n logger: LoggerService;\n }): KafkaConsumingEventPublisher {\n return new KafkaConsumingEventPublisher(\n env.kafkaClient,\n env.logger,\n env.events,\n env.config,\n );\n }\n\n private readonly events: EventsService;\n\n private constructor(\n kafkaClient: Kafka,\n logger: LoggerService,\n events: EventsService,\n config: KafkaConsumerConfig,\n ) {\n this.events = events;\n this.kafkaConsumer = kafkaClient.consumer(config.consumerConfig);\n this.consumerSubscribeTopics = config.consumerSubscribeTopics;\n this.backstageTopic = config.backstageTopic;\n const id = `events.kafka.publisher:${this.backstageTopic}`;\n this.logger = logger.child({\n class: KafkaConsumingEventPublisher.prototype.constructor.name,\n groupId: config.consumerConfig.groupId,\n kafkaTopics: config.consumerSubscribeTopics.topics.toString(),\n backstageTopic: config.backstageTopic,\n taskId: id,\n });\n }\n\n async start(): Promise<void> {\n try {\n await this.kafkaConsumer.connect();\n\n await this.kafkaConsumer.subscribe(this.consumerSubscribeTopics);\n\n await this.kafkaConsumer.run({\n eachMessage: async ({ message }) => {\n this.events.publish({\n topic: this.backstageTopic,\n eventPayload: JSON.parse(message.value?.toString()!),\n metadata: this.convertHeadersToMetadata(message.headers),\n });\n },\n });\n } catch (error: any) {\n this.logger.error('Kafka consumer connection failed ', error);\n }\n }\n\n async shutdown(): Promise<void> {\n await this.kafkaConsumer.disconnect();\n }\n\n private convertHeadersToMetadata = (\n headers: IHeaders | undefined,\n ): EventParams['metadata'] => {\n if (!headers) return undefined;\n\n const metadata: EventMetadata = {};\n\n Object.entries(headers).forEach(([key, value]) => {\n // If value is an array use toString() on all values converting any Buffer types to valid strings\n if (Array.isArray(value)) metadata[key] = value.map(v => v.toString());\n // Always return the values using toString() to catch all Buffer types that should be converted to strings\n else metadata[key] = value?.toString();\n });\n\n return metadata;\n };\n}\n"],"names":[],"mappings":";;AA0BO,MAAM,4BAAA,CAA6B;AAAA,EACvB,aAAA;AAAA,EACA,uBAAA;AAAA,EACA,cAAA;AAAA,EACA,MAAA;AAAA,EAEjB,OAAO,WAAW,GAAA,EAKe;AAC/B,IAAA,OAAO,IAAI,4BAAA;AAAA,MACT,GAAA,CAAI,WAAA;AAAA,MACJ,GAAA,CAAI,MAAA;AAAA,MACJ,GAAA,CAAI,MAAA;AAAA,MACJ,GAAA,CAAI;AAAA,KACN;AAAA,EACF;AAAA,EAEiB,MAAA;AAAA,EAET,WAAA,CACN,WAAA,EACA,MAAA,EACA,MAAA,EACA,MAAA,EACA;AACA,IAAA,IAAA,CAAK,MAAA,GAAS,MAAA;AACd,IAAA,IAAA,CAAK,aAAA,GAAgB,WAAA,CAAY,QAAA,CAAS,MAAA,CAAO,cAAc,CAAA;AAC/D,IAAA,IAAA,CAAK,0BAA0B,MAAA,CAAO,uBAAA;AACtC,IAAA,IAAA,CAAK,iBAAiB,MAAA,CAAO,cAAA;AAC7B,IAAA,MAAM,EAAA,GAAK,CAAA,uBAAA,EAA0B,IAAA,CAAK,cAAc,CAAA,CAAA;AACxD,IAAA,IAAA,CAAK,MAAA,GAAS,OAAO,KAAA,CAAM;AAAA,MACzB,KAAA,EAAO,4BAAA,CAA6B,SAAA,CAAU,WAAA,CAAY,IAAA;AAAA,MAC1D,OAAA,EAAS,OAAO,cAAA,CAAe,OAAA;AAAA,MAC/B,WAAA,EAAa,MAAA,CAAO,uBAAA,CAAwB,MAAA,CAAO,QAAA,EAAS;AAAA,MAC5D,gBAAgB,MAAA,CAAO,cAAA;AAAA,MACvB,MAAA,EAAQ;AAAA,KACT,CAAA;AAAA,EACH;AAAA,EAEA,MAAM,KAAA,GAAuB;AAC3B,IAAA,IAAI;AACF,MAAA,MAAM,IAAA,CAAK,cAAc,OAAA,EAAQ;AAEjC,MAAA,MAAM,IAAA,CAAK,aAAA,CAAc,SAAA,CAAU,IAAA,CAAK,uBAAuB,CAAA;AAE/D,MAAA,MAAM,IAAA,CAAK,cAAc,GAAA,CAAI;AAAA,QAC3B,WAAA,EAAa,OAAO,EAAE,OAAA,EAAQ,KAAM;AAClC,UAAA,IAAA,CAAK,OAAO,OAAA,CAAQ;AAAA,YAClB,OAAO,IAAA,CAAK,cAAA;AAAA,YACZ,cAAc,IAAA,CAAK,KAAA,CAAM,OAAA,CAAQ,KAAA,EAAO,UAAW,CAAA;AAAA,YACnD,QAAA,EAAU,IAAA,CAAK,wBAAA,CAAyB,OAAA,CAAQ,OAAO;AAAA,WACxD,CAAA;AAAA,QACH;AAAA,OACD,CAAA;AAAA,IACH,SAAS,KAAA,EAAY;AACnB,MAAA,IAAA,CAAK,MAAA,CAAO,KAAA,CAAM,mCAAA,EAAqC,KAAK,CAAA;AAAA,IAC9D;AAAA,EACF;AAAA,EAEA,MAAM,QAAA,GAA0B;AAC9B,IAAA,MAAM,IAAA,CAAK,cAAc,UAAA,EAAW;AAAA,EACtC;AAAA,EAEQ,wBAAA,GAA2B,CACjC,OAAA,KAC4B;AAC5B,IAAA,IAAI,CAAC,SAAS,OAAO,MAAA;AAErB,IAAA,MAAM,WAA0B,EAAC;AAEjC,IAAA,MAAA,CAAO,OAAA,CAAQ,OAAO,CAAA,CAAE,OAAA,CAAQ,CAAC,CAAC,GAAA,EAAK,KAAK,CAAA,KAAM;AAEhD,MAAA,IAAI,KAAA,CAAM,OAAA,CAAQ,KAAK,CAAA,EAAG,QAAA,CAAS,GAAG,CAAA,GAAI,KAAA,CAAM,GAAA,CAAI,CAAA,CAAA,KAAK,CAAA,CAAE,QAAA,EAAU,CAAA;AAAA,WAEhE,QAAA,CAAS,GAAG,CAAA,GAAI,KAAA,EAAO,QAAA,EAAS;AAAA,IACvC,CAAC,CAAA;AAED,IAAA,OAAO,QAAA;AAAA,EACT,CAAA;AACF;;;;"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"LoggerServiceAdapter.cjs.js","sources":["../../src/publisher/LoggerServiceAdapter.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { LoggerService } from '@backstage/backend-plugin-api';\nimport { LogEntry, logLevel } from 'kafkajs';\n\nexport const loggerServiceAdapter = (loggerService: LoggerService) => {\n const logMethods: Record<logLevel, (message: string, meta?: object) => void> =\n {\n [logLevel.ERROR]: loggerService.error,\n [logLevel.WARN]: loggerService.warn,\n [logLevel.INFO]: loggerService.info,\n [logLevel.DEBUG]: loggerService.debug,\n [logLevel.NOTHING]: () => {},\n };\n\n return (_level: logLevel) => {\n return (entry: LogEntry) => {\n const { namespace, level, log } = entry;\n const { message, ...extra } = log;\n\n // Use loggerService method that matches the level\n logMethods[level].call(\n loggerService,\n `Kafka ${namespace} ${log.message}`,\n {\n ...extra,\n },\n );\n };\n };\n};\n"],"names":["logLevel"],"mappings":";;;;AAkBO,MAAM,oBAAA,GAAuB,CAAC,aAAA,KAAiC;AACpE,EAAA,MAAM,UAAA,GACJ;AAAA,IACE,CAACA,gBAAA,CAAS,KAAK,GAAG,aAAA,CAAc,KAAA;AAAA,IAChC,CAACA,gBAAA,CAAS,IAAI,GAAG,aAAA,CAAc,IAAA;AAAA,IAC/B,CAACA,gBAAA,CAAS,IAAI,GAAG,aAAA,CAAc,IAAA;AAAA,IAC/B,CAACA,gBAAA,CAAS,KAAK,GAAG,aAAA,CAAc,KAAA;AAAA,IAChC,CAACA,gBAAA,CAAS,OAAO,GAAG,MAAM;AAAA,IAAC;AAAA,GAC7B;AAEF,EAAA,OAAO,CAAC,MAAA,KAAqB;AAC3B,IAAA,OAAO,CAAC,KAAA,KAAoB;AAC1B,MAAA,MAAM,EAAE,SAAA,EAAW,KAAA,EAAO,GAAA,EAAI,GAAI,KAAA;AAClC,MAAA,MAAM,EAAE,OAAA,EAAS,GAAG,KAAA,EAAM,GAAI,GAAA;AAG9B,MAAA,UAAA,CAAW,KAAK,CAAA,CAAE,IAAA;AAAA,QAChB,aAAA;AAAA,QACA,CAAA,MAAA,EAAS,SAAS,CAAA,CAAA,EAAI,GAAA,CAAI,OAAO,CAAA,CAAA;AAAA,QACjC;AAAA,UACE,GAAG;AAAA;AACL,OACF;AAAA,IACF,CAAA;AAAA,EACF,CAAA;AACF;;;;"}
@@ -1,102 +0,0 @@
1
- 'use strict';
2
-
3
- var config = require('@backstage/config');
4
- var types = require('@backstage/types');
5
-
6
- const CONFIG_PREFIX_PUBLISHER = "events.modules.kafka.kafkaConsumingEventPublisher";
7
- const readOptionalHumanDurationInMs = (config$1, key) => {
8
- const humanDuration = config$1.has(key) ? config.readDurationFromConfig(config$1, { key }) : void 0;
9
- if (!humanDuration) return void 0;
10
- return types.durationToMilliseconds(humanDuration);
11
- };
12
- const readConfig = (config) => {
13
- const kafkaConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);
14
- if (!kafkaConfig) {
15
- return void 0;
16
- }
17
- const clientId = kafkaConfig.getString("clientId");
18
- const brokers = kafkaConfig.getStringArray("brokers");
19
- const authenticationTimeout = readOptionalHumanDurationInMs(
20
- kafkaConfig,
21
- "authenticationTimeout"
22
- );
23
- const connectionTimeout = readOptionalHumanDurationInMs(
24
- kafkaConfig,
25
- "connectionTimeout"
26
- );
27
- const requestTimeout = readOptionalHumanDurationInMs(
28
- kafkaConfig,
29
- "requestTimeout"
30
- );
31
- const enforceRequestTimeout = kafkaConfig.getOptionalBoolean(
32
- "enforceRequestTimeout"
33
- );
34
- const ssl = kafkaConfig.getOptional("ssl");
35
- const sasl = kafkaConfig.getOptional("sasl");
36
- const retry = {
37
- maxRetryTime: readOptionalHumanDurationInMs(
38
- kafkaConfig,
39
- "retry.maxRetryTime"
40
- ),
41
- initialRetryTime: readOptionalHumanDurationInMs(
42
- kafkaConfig,
43
- "retry.initialRetryTime"
44
- ),
45
- factor: kafkaConfig.getOptionalNumber("retry.factor"),
46
- multiplier: kafkaConfig.getOptionalNumber("retry.multiplier"),
47
- retries: kafkaConfig.getOptionalNumber("retry.retries")
48
- };
49
- const kafkaConsumerConfigs = kafkaConfig.getConfigArray("topics").map((topic) => {
50
- return {
51
- backstageTopic: topic.getString("topic"),
52
- consumerConfig: {
53
- groupId: topic.getString("kafka.groupId"),
54
- sessionTimeout: readOptionalHumanDurationInMs(
55
- topic,
56
- "kafka.sessionTimeout"
57
- ),
58
- rebalanceTimeout: readOptionalHumanDurationInMs(
59
- topic,
60
- "kafka.rebalanceTimeout"
61
- ),
62
- heartbeatInterval: readOptionalHumanDurationInMs(
63
- topic,
64
- "kafka.heartbeatInterval"
65
- ),
66
- metadataMaxAge: readOptionalHumanDurationInMs(
67
- topic,
68
- "kafka.metadataMaxAge"
69
- ),
70
- maxBytesPerPartition: topic.getOptionalNumber(
71
- "kafka.maxBytesPerPartition"
72
- ),
73
- minBytes: topic.getOptionalNumber("kafka.minBytes"),
74
- maxBytes: topic.getOptionalNumber("kafka.maxBytes"),
75
- maxWaitTimeInMs: readOptionalHumanDurationInMs(
76
- topic,
77
- "kafka.maxWaitTime"
78
- )
79
- },
80
- consumerSubscribeTopics: {
81
- topics: topic.getStringArray("kafka.topics")
82
- }
83
- };
84
- });
85
- return {
86
- kafkaConfig: {
87
- clientId,
88
- brokers,
89
- ssl,
90
- sasl,
91
- authenticationTimeout,
92
- connectionTimeout,
93
- requestTimeout,
94
- enforceRequestTimeout,
95
- retry
96
- },
97
- kafkaConsumerConfigs
98
- };
99
- };
100
-
101
- exports.readConfig = readConfig;
102
- //# sourceMappingURL=config.cjs.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"config.cjs.js","sources":["../../src/publisher/config.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport { Config, readDurationFromConfig } from '@backstage/config';\nimport { durationToMilliseconds } from '@backstage/types';\nimport { ConsumerConfig, ConsumerSubscribeTopics, KafkaConfig } from 'kafkajs';\n\nexport interface KafkaConsumerConfig {\n backstageTopic: string;\n consumerConfig: ConsumerConfig;\n consumerSubscribeTopics: ConsumerSubscribeTopics;\n}\n\nexport interface KafkaEventSourceConfig {\n kafkaConfig: KafkaConfig;\n kafkaConsumerConfigs: KafkaConsumerConfig[];\n}\n\nconst CONFIG_PREFIX_PUBLISHER =\n 'events.modules.kafka.kafkaConsumingEventPublisher';\n\n/**\n * Reads an optional HumanDuration from the config and returns the value in milliseconds if the key is defined.\n *\n * @param config - The configuration object to read from.\n * @param key - The key to look up in the configuration.\n * @returns The duration in milliseconds, or undefined if the key is not defined.\n */\nconst readOptionalHumanDurationInMs = (\n config: Config,\n key: string,\n): number | undefined => {\n const humanDuration = config.has(key)\n ? readDurationFromConfig(config, { key })\n : undefined;\n\n if (!humanDuration) return undefined;\n\n return durationToMilliseconds(humanDuration);\n};\n\nexport const readConfig = (\n config: Config,\n): KafkaEventSourceConfig | undefined => {\n const kafkaConfig = config.getOptionalConfig(CONFIG_PREFIX_PUBLISHER);\n\n if (!kafkaConfig) {\n return undefined;\n }\n\n const clientId = kafkaConfig.getString('clientId');\n const brokers = kafkaConfig.getStringArray('brokers');\n\n const authenticationTimeout = readOptionalHumanDurationInMs(\n kafkaConfig,\n 'authenticationTimeout',\n );\n\n const connectionTimeout = readOptionalHumanDurationInMs(\n kafkaConfig,\n 'connectionTimeout',\n );\n const requestTimeout = readOptionalHumanDurationInMs(\n kafkaConfig,\n 'requestTimeout',\n );\n const enforceRequestTimeout = kafkaConfig.getOptionalBoolean(\n 'enforceRequestTimeout',\n );\n\n const ssl = kafkaConfig.getOptional('ssl') as KafkaConfig['ssl'];\n const sasl = kafkaConfig.getOptional('sasl') as KafkaConfig['sasl'];\n\n const retry: KafkaConfig['retry'] = {\n maxRetryTime: readOptionalHumanDurationInMs(\n kafkaConfig,\n 'retry.maxRetryTime',\n ),\n initialRetryTime: readOptionalHumanDurationInMs(\n kafkaConfig,\n 'retry.initialRetryTime',\n ),\n factor: kafkaConfig.getOptionalNumber('retry.factor'),\n multiplier: kafkaConfig.getOptionalNumber('retry.multiplier'),\n retries: kafkaConfig.getOptionalNumber('retry.retries'),\n };\n\n const kafkaConsumerConfigs: KafkaConsumerConfig[] = kafkaConfig\n .getConfigArray('topics')\n .map(topic => {\n return {\n backstageTopic: topic.getString('topic'),\n consumerConfig: {\n groupId: topic.getString('kafka.groupId'),\n sessionTimeout: readOptionalHumanDurationInMs(\n topic,\n 'kafka.sessionTimeout',\n ),\n rebalanceTimeout: readOptionalHumanDurationInMs(\n topic,\n 'kafka.rebalanceTimeout',\n ),\n heartbeatInterval: readOptionalHumanDurationInMs(\n topic,\n 'kafka.heartbeatInterval',\n ),\n metadataMaxAge: readOptionalHumanDurationInMs(\n topic,\n 'kafka.metadataMaxAge',\n ),\n maxBytesPerPartition: topic.getOptionalNumber(\n 'kafka.maxBytesPerPartition',\n ),\n minBytes: topic.getOptionalNumber('kafka.minBytes'),\n maxBytes: topic.getOptionalNumber('kafka.maxBytes'),\n maxWaitTimeInMs: readOptionalHumanDurationInMs(\n topic,\n 'kafka.maxWaitTime',\n ),\n },\n consumerSubscribeTopics: {\n topics: topic.getStringArray('kafka.topics'),\n },\n };\n });\n\n return {\n kafkaConfig: {\n clientId,\n brokers,\n ssl,\n sasl,\n authenticationTimeout,\n connectionTimeout,\n requestTimeout,\n enforceRequestTimeout,\n retry,\n },\n kafkaConsumerConfigs,\n };\n};\n"],"names":["config","readDurationFromConfig","durationToMilliseconds"],"mappings":";;;;;AA8BA,MAAM,uBAAA,GACJ,mDAAA;AASF,MAAM,6BAAA,GAAgC,CACpCA,QAAA,EACA,GAAA,KACuB;AACvB,EAAA,MAAM,aAAA,GAAgBA,QAAA,CAAO,GAAA,CAAI,GAAG,CAAA,GAChCC,8BAAuBD,QAAA,EAAQ,EAAE,GAAA,EAAK,CAAA,GACtC,MAAA;AAEJ,EAAA,IAAI,CAAC,eAAe,OAAO,MAAA;AAE3B,EAAA,OAAOE,6BAAuB,aAAa,CAAA;AAC7C,CAAA;AAEO,MAAM,UAAA,GAAa,CACxB,MAAA,KACuC;AACvC,EAAA,MAAM,WAAA,GAAc,MAAA,CAAO,iBAAA,CAAkB,uBAAuB,CAAA;AAEpE,EAAA,IAAI,CAAC,WAAA,EAAa;AAChB,IAAA,OAAO,MAAA;AAAA,EACT;AAEA,EAAA,MAAM,QAAA,GAAW,WAAA,CAAY,SAAA,CAAU,UAAU,CAAA;AACjD,EAAA,MAAM,OAAA,GAAU,WAAA,CAAY,cAAA,CAAe,SAAS,CAAA;AAEpD,EAAA,MAAM,qBAAA,GAAwB,6BAAA;AAAA,IAC5B,WAAA;AAAA,IACA;AAAA,GACF;AAEA,EAAA,MAAM,iBAAA,GAAoB,6BAAA;AAAA,IACxB,WAAA;AAAA,IACA;AAAA,GACF;AACA,EAAA,MAAM,cAAA,GAAiB,6BAAA;AAAA,IACrB,WAAA;AAAA,IACA;AAAA,GACF;AACA,EAAA,MAAM,wBAAwB,WAAA,CAAY,kBAAA;AAAA,IACxC;AAAA,GACF;AAEA,EAAA,MAAM,GAAA,GAAM,WAAA,CAAY,WAAA,CAAY,KAAK,CAAA;AACzC,EAAA,MAAM,IAAA,GAAO,WAAA,CAAY,WAAA,CAAY,MAAM,CAAA;AAE3C,EAAA,MAAM,KAAA,GAA8B;AAAA,IAClC,YAAA,EAAc,6BAAA;AAAA,MACZ,WAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,gBAAA,EAAkB,6BAAA;AAAA,MAChB,WAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA,MAAA,EAAQ,WAAA,CAAY,iBAAA,CAAkB,cAAc,CAAA;AAAA,IACpD,UAAA,EAAY,WAAA,CAAY,iBAAA,CAAkB,kBAAkB,CAAA;AAAA,IAC5D,OAAA,EAAS,WAAA,CAAY,iBAAA,CAAkB,eAAe;AAAA,GACxD;AAEA,EAAA,MAAM,uBAA8C,WAAA,CACjD,cAAA,CAAe,QAAQ,CAAA,CACvB,IAAI,CAAA,KAAA,KAAS;AACZ,IAAA,OAAO;AAAA,MACL,cAAA,EAAgB,KAAA,CAAM,SAAA,CAAU,OAAO,CAAA;AAAA,MACvC,cAAA,EAAgB;AAAA,QACd,OAAA,EAAS,KAAA,CAAM,SAAA,CAAU,eAAe,CAAA;AAAA,QACxC,cAAA,EAAgB,6BAAA;AAAA,UACd,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,gBAAA,EAAkB,6BAAA;AAAA,UAChB,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,iBAAA,EAAmB,6BAAA;AAAA,UACjB,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,cAAA,EAAgB,6BAAA;AAAA,UACd,KAAA;AAAA,UACA;AAAA,SACF;AAAA,QACA,sBAAsB,KAAA,CAAM,iBAAA;AAAA,UAC1B;AAAA,SACF;AAAA,QACA,QAAA,EAAU,KAAA,CAAM,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,QAClD,QAAA,EAAU,KAAA,CAAM,iBAAA,CAAkB,gBAAgB,CAAA;AAAA,QAClD,eAAA,EAAiB,6BAAA;AAAA,UACf,KAAA;AAAA,UACA;AAAA;AACF,OACF;AAAA,MACA,uBAAA,EAAyB;AAAA,QACvB,MAAA,EAAQ,KAAA,CAAM,cAAA,CAAe,cAAc;AAAA;AAC7C,KACF;AAAA,EACF,CAAC,CAAA;AAEH,EAAA,OAAO;AAAA,IACL,WAAA,EAAa;AAAA,MACX,QAAA;AAAA,MACA,OAAA;AAAA,MACA,GAAA;AAAA,MACA,IAAA;AAAA,MACA,qBAAA;AAAA,MACA,iBAAA;AAAA,MACA,cAAA;AAAA,MACA,qBAAA;AAAA,MACA;AAAA,KACF;AAAA,IACA;AAAA,GACF;AACF;;;;"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"eventsModuleKafkaConsumingEventPublisher.cjs.js","sources":["../../src/service/eventsModuleKafkaConsumingEventPublisher.ts"],"sourcesContent":["/*\n * Copyright 2025 The Backstage Authors\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n */\nimport {\n coreServices,\n createBackendModule,\n} from '@backstage/backend-plugin-api';\nimport { KafkaConsumerClient } from '../publisher/KafkaConsumerClient';\nimport { eventsServiceRef } from '@backstage/plugin-events-node';\n\n/**\n * Kafka module for the Events plugin.\n *\n * @public\n */\nexport const eventsModuleKafkaConsumingEventPublisher = createBackendModule({\n pluginId: 'events',\n moduleId: 'kafka-consuming-event-publisher',\n register(env) {\n env.registerInit({\n deps: {\n config: coreServices.rootConfig,\n events: eventsServiceRef,\n logger: coreServices.logger,\n lifecycle: coreServices.lifecycle,\n },\n async init({ config, logger, events, lifecycle }) {\n const kafka = KafkaConsumerClient.fromConfig({\n config,\n events,\n logger,\n });\n\n if (!kafka) {\n return;\n }\n\n await kafka.start();\n\n lifecycle.addShutdownHook(async () => await kafka.shutdown());\n },\n });\n },\n});\n"],"names":["createBackendModule","coreServices","eventsServiceRef","KafkaConsumerClient"],"mappings":";;;;;;AA2BO,MAAM,2CAA2CA,oCAAA,CAAoB;AAAA,EAC1E,QAAA,EAAU,QAAA;AAAA,EACV,QAAA,EAAU,iCAAA;AAAA,EACV,SAAS,GAAA,EAAK;AACZ,IAAA,GAAA,CAAI,YAAA,CAAa;AAAA,MACf,IAAA,EAAM;AAAA,QACJ,QAAQC,6BAAA,CAAa,UAAA;AAAA,QACrB,MAAA,EAAQC,iCAAA;AAAA,QACR,QAAQD,6BAAA,CAAa,MAAA;AAAA,QACrB,WAAWA,6BAAA,CAAa;AAAA,OAC1B;AAAA,MACA,MAAM,IAAA,CAAK,EAAE,QAAQ,MAAA,EAAQ,MAAA,EAAQ,WAAU,EAAG;AAChD,QAAA,MAAM,KAAA,GAAQE,wCAAoB,UAAA,CAAW;AAAA,UAC3C,MAAA;AAAA,UACA,MAAA;AAAA,UACA;AAAA,SACD,CAAA;AAED,QAAA,IAAI,CAAC,KAAA,EAAO;AACV,UAAA;AAAA,QACF;AAEA,QAAA,MAAM,MAAM,KAAA,EAAM;AAElB,QAAA,SAAA,CAAU,eAAA,CAAgB,YAAY,MAAM,KAAA,CAAM,UAAU,CAAA;AAAA,MAC9D;AAAA,KACD,CAAA;AAAA,EACH;AACF,CAAC;;;;"}