@dbos-inc/confluent-kafka-receive 3.0.16-preview

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md ADDED
@@ -0,0 +1,152 @@
1
+ # DBOS Confluent Kafka Receiver
2
+
3
+ Publish/subscribe message queues are a common building block for distributed systems.
4
+ Message queues allow processing to occur at a different place or time, perhaps in multiple client programming environments.
5
+ Due to its performance, flexibility, and simple, scalable design, [Kafka](https://www.confluent.io/cloud-kafka) is a popular choice for publish/subscribe.
6
+
7
+ This package includes a [DBOS](https://docs.dbos.dev/) receiver for Kafka messages, which reliably invokes a
8
+ [DBOS workflow](https://docs.dbos.dev/typescript/tutorials/workflow-tutorial) for every Kafka message received.
9
+
10
+ This package is based on the [Confluent JavaScript Client for Apache Kafka](https://github.com/confluentinc/confluent-kafka-javascript/).
11
+
12
+ ## Configuring a Confluent Kafka Receiver
13
+
14
+ First, ensure that the DBOS Confluent Kafka Receiver package is installed into the application:
15
+
16
+ ```
17
+ npm install --save @dbos-inc/confluent-kafka-receive
18
+ ```
19
+
20
+ Then, create a `ConfluentKafkaReceiver` instance, providing the Kafka configuration information to the constructor.
21
+
22
+ ```ts
23
+ const kafkaConfig = {
24
+ clientId: 'example-dbos-kafka-client',
25
+ brokers: ['kafka-host:9092'],
26
+ };
27
+
28
+ const kafkaReceiver = new ConfluentKafkaReceiver(kafkaConfig);
29
+ ```
30
+
31
+ Finally, register a DBOS workflow as a Kafka topic consumer via the `KafkaReceiver` instance.
32
+ This can be done with the `KafkaReceiver.consumer` decorator or the `KafkaReceiver.registerConsumer` function.
33
+
34
+ ```ts
35
+ class KafkaExample {
36
+ @kafkaReceiver.consumer('example-topic')
37
+ @DBOS.workflow()
38
+ static async consumerWorkflow(topic: string, partition: number, message: ConfluentKafkaJS.Message) {
39
+ DBOS.logger.info(`Message received: ${message.value}`);
40
+ }
41
+
42
+ static async registeredConsumerWorkflow(topic: string, partition: number, message: ConfluentKafkaJS.Message) {
43
+ DBOS.logger.info(`Message received: ${message.value}`);
44
+ }
45
+ }
46
+
47
+ KafkaExample.registeredConsumerWorkflow = DBOS.registerWorkflow(
48
+ KafkaExample.registeredConsumerWorkflow,
49
+ 'registeredConsumerWorkflow',
50
+ );
51
+ kafkaReceiver.registerConsumer(KafkaExample.registeredConsumerWorkflow, 'another-example-topic');
52
+ ```
53
+
54
+ When registering a Kafka consumer workflow, you can specify a single topic or an array of topics.
55
+ Topics are specified as strings or regular expressions.
56
+
57
+ ### Kafka Consumer Configuration
58
+
59
+ If you need more control, you can pass consumer configuration into the decorator or `register` function.
60
+ Additionally, if you need managed concurrency, you can specify the [DBOS Queue](https://docs.dbos.dev/typescript/tutorials/queue-tutorial)
61
+ to use when executing the workflow.
62
+
63
+ ```ts
64
+ class KafkaExample {
65
+ @kafkaReceiver.consumer('example-topic', {
66
+ config: { groupId: 'custom-group-id' },
67
+ })
68
+ @DBOS.workflow()
69
+ static async consumerWorkflow(topic: string, partition: number, message: KafkaMessage) {
70
+ DBOS.logger.info(`Message received: ${message.value}`);
71
+ }
72
+
73
+ static async registeredConsumerWorkflow(topic: string, partition: number, message: KafkaMessage) {
74
+ DBOS.logger.info(`Message received: ${message.value}`);
75
+ }
76
+ }
77
+
78
+ KafkaExample.registeredConsumerWorkflow = DBOS.registerWorkflow(
79
+ KafkaExample.registeredConsumerWorkflow,
80
+ 'registeredConsumerWorkflow',
81
+ );
82
+ kafkaReceiver.registerConsumer(KafkaExample.registeredConsumerWorkflow, 'another-example-topic', {
83
+ config: { groupId: 'custom-group-id' },
84
+ });
85
+ ```
86
+
87
+ ### Concurrency and Rate Limiting
88
+
89
+ By default, Kafka `eventConsumer` workflows are started immediately after message receipt.
90
+ If `queueName` is specified in `eventConsumer` options, then the workflow will be enqueued in a [workflow queue](https://docs.dbos.dev/typescript/reference/transactapi/workflow-queues).
91
+
92
+ ```ts
93
+ class KafkaExample {
94
+ @kafkaReceiver.consumer('example-topic', { queueName: 'example-queue' })
95
+ @DBOS.workflow()
96
+ static async consumerWorkflow(topic: string, partition: number, message: KafkaMessage) {
97
+ DBOS.logger.info(`Message received: ${message.value}`);
98
+ }
99
+
100
+ static async registeredConsumerWorkflow(topic: string, partition: number, message: KafkaMessage) {
101
+ DBOS.logger.info(`Message received: ${message.value}`);
102
+ }
103
+ }
104
+
105
+ KafkaExample.registeredConsumerWorkflow = DBOS.registerWorkflow(
106
+ KafkaExample.registeredConsumerWorkflow,
107
+ 'registeredConsumerWorkflow',
108
+ );
109
+ kafkaReceiver.registerConsumer(KafkaExample.registeredConsumerWorkflow, 'another-example-topic', {
110
+ queueName: 'example-queue',
111
+ });
112
+ ```
113
+
114
+ ## Sending Messages
115
+
116
+ Sending Kafka messages is done directly using the KafkaJS library.
117
+ You can wrap the message send call in a DBOS Step to make it reliable.
118
+
119
+ ```ts
120
+ import { KafkaJS } from '@confluentinc/kafka-javascript';
121
+
122
+ class KafkaTestClass {
123
+ @DBOS.workflow()
124
+ static async kafkaSendWorkflow(name: string, value: number) {
125
+ const kafka = new KafkaJS.Kafka({ kafkaJS: kafkaConfig });
126
+
127
+ const producer = kafka.producer();
128
+ await producer.connect();
129
+
130
+ try {
131
+ DBOS.runStep(
132
+ async () => {
133
+ const message = JSON.stringify({ name, value });
134
+ await producer.send({
135
+ topic: 'example-topic',
136
+ messages: [{ value: message }],
137
+ });
138
+ },
139
+ { name: 'send-kafka-message' },
140
+ );
141
+ } finally {
142
+ await producer.disconnect();
143
+ }
144
+ }
145
+ }
146
+ ```
147
+
148
+ ## Next Steps
149
+
150
+ - To start a DBOS app from a template, visit our [quickstart](https://docs.dbos.dev/quickstart).
151
+ - For DBOS programming tutorials, check out our [programming guide](https://docs.dbos.dev/typescript/programming-guide).
152
+ - To learn more about DBOS, take a look at [our documentation](https://docs.dbos.dev/) or our [source code](https://github.com/dbos-inc/dbos-transact-ts).
@@ -0,0 +1,30 @@
1
+ import { DBOSLifecycleCallback } from '@dbos-inc/dbos-sdk';
2
+ import { KafkaJS } from '@confluentinc/kafka-javascript';
3
+ interface KafkaRetryConfig {
4
+ maxRetries: number;
5
+ retryTime: number;
6
+ multiplier: number;
7
+ }
8
+ export type ConsumerTopics = string | RegExp | Array<string | RegExp>;
9
+ export declare class ConfluentKafkaReceiver extends DBOSLifecycleCallback {
10
+ #private;
11
+ private readonly config;
12
+ private readonly retryConfig;
13
+ constructor(config: KafkaJS.KafkaConfig, retryConfig?: KafkaRetryConfig);
14
+ initialize(): Promise<void>;
15
+ destroy(): Promise<void>;
16
+ logRegisteredEndpoints(): void;
17
+ registerConsumer<This, Args extends unknown[], Return>(func: (this: This, ...args: Args) => Promise<Return>, topics: ConsumerTopics, options?: {
18
+ classOrInst?: object;
19
+ className?: string;
20
+ name?: string;
21
+ queueName?: string;
22
+ config?: KafkaJS.ConsumerConstructorConfig;
23
+ }): void;
24
+ consumer(topics: ConsumerTopics, options?: {
25
+ queueName?: string;
26
+ config?: KafkaJS.ConsumerConstructorConfig;
27
+ }): <This, Args extends [string, number, KafkaJS.Message], Return>(target: object, propertyKey: PropertyKey, descriptor: TypedPropertyDescriptor<(this: This, ...args: Args) => Promise<Return>>) => TypedPropertyDescriptor<(this: This, ...args: Args) => Promise<Return>>;
28
+ }
29
+ export {};
30
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAQ,qBAAqB,EAAE,MAAM,oBAAoB,CAAC;AAEjE,OAAO,EAAE,OAAO,EAAiC,MAAM,gCAAgC,CAAC;AAYxF,UAAU,gBAAgB;IACxB,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,UAAU,EAAE,MAAM,CAAC;CACpB;AAiBD,MAAM,MAAM,cAAc,GAAG,MAAM,GAAG,MAAM,GAAG,KAAK,CAAC,MAAM,GAAG,MAAM,CAAC,CAAC;AAEtE,qBAAa,sBAAuB,SAAQ,qBAAqB;;IAI7D,OAAO,CAAC,QAAQ,CAAC,MAAM;IACvB,OAAO,CAAC,QAAQ,CAAC,WAAW;gBADX,MAAM,EAAE,OAAO,CAAC,WAAW,EAC3B,WAAW,GAAE,gBAAmE;IAMpF,UAAU;IA+DV,OAAO;IAKb,sBAAsB;IAa/B,gBAAgB,CAAC,IAAI,EAAE,IAAI,SAAS,OAAO,EAAE,EAAE,MAAM,EACnD,IAAI,EAAE,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,IAAI,KAAK,OAAO,CAAC,MAAM,CAAC,EACpD,MAAM,EAAE,cAAc,EACtB,OAAO,GAAE;QACP,WAAW,CAAC,EAAE,MAAM,CAAC;QACrB,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,IAAI,CAAC,EAAE,MAAM,CAAC;QACd,SAAS,CAAC,EAAE,MAAM,CAAC;QACnB,MAAM,CAAC,EAAE,OAAO,CAAC,yBAAyB,CAAC;KACvC;IAcR,QAAQ,CAAC,MAAM,EAAE,cAAc,EAAE,OAAO,GAAE;QAAE,SAAS,CAAC,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,OAAO,CAAC,yBAAyB,CAAA;KAAO,0EAIrG,MAAM,eACD,WAAW,cACZ,wBAAwB,CAAC,IAAI,EAAE,IAAI,EAAE,GAAG,IAAI,EAAE,IAAI,KAAK,QAAQ,MAAM,CAAC,CAAC,oCAAxC,IAAI,WAAW,IAAI,KAAK,QAAQ,MAAM,CAAC;CAcvF"}
package/dist/index.js ADDED
@@ -0,0 +1,131 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ConfluentKafkaReceiver = void 0;
4
+ const dbos_sdk_1 = require("@dbos-inc/dbos-sdk");
5
+ const kafka_javascript_1 = require("@confluentinc/kafka-javascript");
6
+ const sleepms = (ms) => new Promise((r) => setTimeout(r, ms));
7
+ function safeGroupName(className, methodName, topics) {
8
+ const safeGroupIdPart = [className, methodName, ...topics]
9
+ .map((r) => r.toString())
10
+ .map((r) => r.replaceAll(/[^a-zA-Z0-9\\-]/g, ''))
11
+ .join('-');
12
+ return `dbos-kafka-group-${safeGroupIdPart}`.slice(0, 255);
13
+ }
14
+ function isKafkaError(e) {
15
+ if (e && typeof e === 'object') {
16
+ return 'code' in e && typeof e.code === 'number';
17
+ }
18
+ return false;
19
+ }
20
+ class ConfluentKafkaReceiver extends dbos_sdk_1.DBOSLifecycleCallback {
21
+ config;
22
+ retryConfig;
23
+ #consumers = new Array();
24
+ constructor(config, retryConfig = { maxRetries: 5, retryTime: 300, multiplier: 2 }) {
25
+ super();
26
+ this.config = config;
27
+ this.retryConfig = retryConfig;
28
+ dbos_sdk_1.DBOS.registerLifecycleCallback(this);
29
+ }
30
+ async initialize() {
31
+ const { maxRetries, multiplier } = this.retryConfig;
32
+ const clientId = this.config.clientId ?? 'dbos-confluent-kafka-receiver';
33
+ const kafka = new kafka_javascript_1.KafkaJS.Kafka({ kafkaJS: { ...this.config, clientId } });
34
+ for (const regOp of dbos_sdk_1.DBOS.getAssociatedInfo(this)) {
35
+ const func = regOp.methodReg.registeredFunction;
36
+ if (func === undefined) {
37
+ continue; // TODO: Log?
38
+ }
39
+ const methodConfig = regOp.methodConfig;
40
+ const topics = methodConfig.topics ?? [];
41
+ if (topics.length === 0) {
42
+ continue; // TODO: Log?
43
+ }
44
+ const { name, className } = regOp.methodReg;
45
+ const config = methodConfig.config ?? {
46
+ 'group.id': safeGroupName(className, name, topics),
47
+ };
48
+ const consumer = kafka.consumer({ ...config, 'auto.offset.reset': 'earliest' });
49
+ await consumer.connect();
50
+ // A temporary workaround for https://github.com/tulios/kafkajs/pull/1558 until it gets fixed
51
+ // If topic auto-creation is on and you try to subscribe to a nonexistent topic, KafkaJS should retry until the topic is created.
52
+ // However, it has a bug where it won't. Thus, we retry instead.
53
+ let { retryTime } = this.retryConfig;
54
+ for (let i = 1; i <= maxRetries; i++) {
55
+ try {
56
+ await consumer.subscribe({ topics });
57
+ break;
58
+ }
59
+ catch (e) {
60
+ if (isKafkaError(e) && e.code === 3 && i < maxRetries) {
61
+ await sleepms(retryTime);
62
+ retryTime *= multiplier;
63
+ }
64
+ else {
65
+ throw e;
66
+ }
67
+ }
68
+ }
69
+ await consumer.run({
70
+ eachMessage: async ({ topic, partition, message }) => {
71
+ dbos_sdk_1.DBOS.logger.debug(`ConfluentKafkaReceiver message on topic ${topic} partition ${partition} offset ${message.offset}`);
72
+ try {
73
+ const workflowID = `confluent-kafka-${topic}-${partition}-${config['group.id']}-${message.offset}`;
74
+ const wfParams = { workflowID, queueName: methodConfig.queueName };
75
+ await dbos_sdk_1.DBOS.startWorkflow(func, wfParams)(topic, partition, message);
76
+ }
77
+ catch (e) {
78
+ const message = e instanceof Error ? e.message : String(e);
79
+ dbos_sdk_1.DBOS.logger.error(`Error processing Kafka message ${message}`);
80
+ throw e;
81
+ }
82
+ },
83
+ });
84
+ this.#consumers.push(consumer);
85
+ }
86
+ }
87
+ async destroy() {
88
+ const disconnectPromises = this.#consumers.splice(0, this.#consumers.length).map((c) => c.disconnect());
89
+ await Promise.allSettled(disconnectPromises);
90
+ }
91
+ logRegisteredEndpoints() {
92
+ dbos_sdk_1.DBOS.logger.info('KafkaJS receiver endpoints:');
93
+ const regOps = dbos_sdk_1.DBOS.getAssociatedInfo(this);
94
+ for (const regOp of regOps) {
95
+ const methodConfig = regOp.methodConfig;
96
+ const { name, className } = regOp.methodReg;
97
+ for (const topic of methodConfig.topics ?? []) {
98
+ dbos_sdk_1.DBOS.logger.info(` ${topic} -> ${className}.${name}`);
99
+ }
100
+ }
101
+ }
102
+ registerConsumer(func, topics, options = {}) {
103
+ const { regInfo } = dbos_sdk_1.DBOS.associateFunctionWithInfo(this, func, {
104
+ classOrInst: options.classOrInst,
105
+ className: options.className,
106
+ name: options.name ?? func.name,
107
+ });
108
+ const kafkaRegInfo = regInfo;
109
+ kafkaRegInfo.topics = Array.isArray(topics) ? topics : [topics];
110
+ kafkaRegInfo.queueName = options.queueName;
111
+ kafkaRegInfo.config = options.config;
112
+ }
113
+ consumer(topics, options = {}) {
114
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
115
+ const $this = this;
116
+ function methodDecorator(target, propertyKey, descriptor) {
117
+ if (descriptor.value) {
118
+ $this.registerConsumer(descriptor.value, topics, {
119
+ classOrInst: target,
120
+ name: String(propertyKey),
121
+ queueName: options.queueName,
122
+ config: options.config,
123
+ });
124
+ }
125
+ return descriptor;
126
+ }
127
+ return methodDecorator;
128
+ }
129
+ }
130
+ exports.ConfluentKafkaReceiver = ConfluentKafkaReceiver;
131
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../index.ts"],"names":[],"mappings":";;;AAAA,iDAAiE;AAEjE,qEAAwF;AAIxF,MAAM,OAAO,GAAG,CAAC,EAAU,EAAE,EAAE,CAAC,IAAI,OAAO,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,UAAU,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC;AActE,SAAS,aAAa,CAAC,SAAiB,EAAE,UAAkB,EAAE,MAA8B;IAC1F,MAAM,eAAe,GAAG,CAAC,SAAS,EAAE,UAAU,EAAE,GAAG,MAAM,CAAC;SACvD,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC;SACxB,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,UAAU,CAAC,kBAAkB,EAAE,EAAE,CAAC,CAAC;SAChD,IAAI,CAAC,GAAG,CAAC,CAAC;IACb,OAAO,oBAAoB,eAAe,EAAE,CAAC,KAAK,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;AAC7D,CAAC;AAED,SAAS,YAAY,CAAC,CAAU;IAC9B,IAAI,CAAC,IAAI,OAAO,CAAC,KAAK,QAAQ,EAAE,CAAC;QAC/B,OAAO,MAAM,IAAI,CAAC,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC;IACnD,CAAC;IACD,OAAO,KAAK,CAAC;AACf,CAAC;AAID,MAAa,sBAAuB,SAAQ,gCAAqB;IAI5C;IACA;IAJV,UAAU,GAAG,IAAI,KAAK,EAAoB,CAAC;IAEpD,YACmB,MAA2B,EAC3B,cAAgC,EAAE,UAAU,EAAE,CAAC,EAAE,SAAS,EAAE,GAAG,EAAE,UAAU,EAAE,CAAC,EAAE;QAEjG,KAAK,EAAE,CAAC;QAHS,WAAM,GAAN,MAAM,CAAqB;QAC3B,gBAAW,GAAX,WAAW,CAAqE;QAGjG,eAAI,CAAC,yBAAyB,CAAC,IAAI,CAAC,CAAC;IACvC,CAAC;IAEQ,KAAK,CAAC,UAAU;QACvB,MAAM,EAAE,UAAU,EAAE,UAAU,EAAE,GAAG,IAAI,CAAC,WAAW,CAAC;QACpD,MAAM,QAAQ,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,IAAI,+BAA+B,CAAC;QACzE,MAAM,KAAK,GAAG,IAAI,0BAAO,CAAC,KAAK,CAAC,EAAE,OAAO,EAAE,EAAE,GAAG,IAAI,CAAC,MAAM,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;QAE3E,KAAK,MAAM,KAAK,IAAI,eAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,EAAE,CAAC;YACjD,MAAM,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC,kBAA8D,CAAC;YAC5F,IAAI,IAAI,KAAK,SAAS,EAAE,CAAC;gBACvB,SAAS,CAAC,aAAa;YACzB,CAAC;YAED,MAAM,YAAY,GAAG,KAAK,CAAC,YAAiC,CAAC;YAC7D,MAAM,MAAM,GAAG,YAAY,CAAC,MAAM,IAAI,EAAE,CAAC;YACzC,IAAI,MAAM,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;gBACxB,SAAS,CAAC,aAAa;YACzB,CAAC;YAED,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,GAAG,KAAK,CAAC,SAAS,CAAC;YAC5C,MAAM,MAAM,GAAsC,YAAY,CAAC,MAAM,IAAI;gBACvE,UAAU,EAAE,aAAa,CAAC,SAAS,EAAE,IAAI,EAAE,MAAM,CAAC;aACnD,CAAC;YACF,MAAM,QAAQ,GAAG,KAAK,CAAC,QAAQ,CAAC,EAAE,GAAG,MAAM,EAAE,mBAAmB,EAAE,UAAU,EAAE,CAAC,CAAC;YAChF,MAAM,QAAQ,CAAC,OAAO,EAAE,CAAC;YAEzB,6FAA6F;YAC7F,iIAAiI;YACjI,gEAAgE;YAChE,IAAI,EAAE,SAAS,EAAE,GAAG,IAAI,CAAC,WAAW,CAAC;YACrC,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,UAAU,EAAE,CAAC,EAAE,EAAE,CAAC;gBACrC,IAAI,CAAC;oBACH,MAAM,QAAQ,CAAC,SAAS,CAAC,EAAE,MAAM,EAAE,CAAC,CAAC;oBACrC,MAAM;gBACR,CAAC;gBAAC,OAAO,CAAC,EAAE,CAAC;oBACX,IAAI,YAAY,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,UAAU,EAAE,CAAC;wBACtD,MAAM,OAAO,CAAC,SAAS,CAAC,CAAC;wBACzB,SAAS,IAAI,UAAU,CAAC;oBAC1B,CAAC;yBAAM,CAAC;wBACN,MAAM,CAAC,CAAC;oBACV,CAAC;gBACH,CAAC;YACH,CAAC;YAED,MAAM,QAAQ,CAAC,GAAG,CAAC;gBACjB,WAAW,EAAE,KAAK,EAAE,EAAE,KAAK,EAAE,SAAS,EAAE,OAAO,EAAE,EAAE,EAAE;oBACnD,eAAI,CAAC,MAAM,CAAC,KAAK,CACf,2CAA2C,KAAK,cAAc,SAAS,WAAW,OAAO,CAAC,MAAM,EAAE,CACnG,CAAC;oBACF,IAAI,CAAC;wBACH,MAAM,UAAU,GAAG,mBAAmB,KAAK,IAAI,SAAS,IAAI,MAAM,CAAC,UAAU,CAAC,IAAI,OAAO,CAAC,MAAM,EAAE,CAAC;wBACnG,MAAM,QAAQ,GAAG,EAAE,UAAU,EAAE,SAAS,EAAE,YAAY,CAAC,SAAS,EAAE,CAAC;wBACnE,MAAM,eAAI,CAAC,aAAa,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC,KAAK,EAAE,SAAS,EAAE,OAAO,CAAC,CAAC;oBACtE,CAAC;oBAAC,OAAO,CAAC,EAAE,CAAC;wBACX,MAAM,OAAO,GAAG,CAAC,YAAY,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC;wBAC3D,eAAI,CAAC,MAAM,CAAC,KAAK,CAAC,kCAAkC,OAAO,EAAE,CAAC,CAAC;wBAC/D,MAAM,CAAC,CAAC;oBACV,CAAC;gBACH,CAAC;aACF,CAAC,CAAC;YAEH,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;QACjC,CAAC;IACH,CAAC;IAEQ,KAAK,CAAC,OAAO;QACpB,MAAM,kBAAkB,GAAG,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,UAAU,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,UAAU,EAAE,CAAC,CAAC;QACxG,MAAM,OAAO,CAAC,UAAU,CAAC,kBAAkB,CAAC,CAAC;IAC/C,CAAC;IAEQ,sBAAsB;QAC7B,eAAI,CAAC,MAAM,CAAC,IAAI,CAAC,6BAA6B,CAAC,CAAC;QAEhD,MAAM,MAAM,GAAG,eAAI,CAAC,iBAAiB,CAAC,IAAI,CAAC,CAAC;QAC5C,KAAK,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;YAC3B,MAAM,YAAY,GAAG,KAAK,CAAC,YAAiC,CAAC;YAC7D,MAAM,EAAE,IAAI,EAAE,SAAS,EAAE,GAAG,KAAK,CAAC,SAAS,CAAC;YAC5C,KAAK,MAAM,KAAK,IAAI,YAAY,CAAC,MAAM,IAAI,EAAE,EAAE,CAAC;gBAC9C,eAAI,CAAC,MAAM,CAAC,IAAI,CAAC,OAAO,KAAK,OAAO,SAAS,IAAI,IAAI,EAAE,CAAC,CAAC;YAC3D,CAAC;QACH,CAAC;IACH,CAAC;IAED,gBAAgB,CACd,IAAoD,EACpD,MAAsB,EACtB,UAMI,EAAE;QAEN,MAAM,EAAE,OAAO,EAAE,GAAG,eAAI,CAAC,yBAAyB,CAAC,IAAI,EAAE,IAAI,EAAE;YAC7D,WAAW,EAAE,OAAO,CAAC,WAAW;YAChC,SAAS,EAAE,OAAO,CAAC,SAAS;YAC5B,IAAI,EAAE,OAAO,CAAC,IAAI,IAAI,IAAI,CAAC,IAAI;SAChC,CAAC,CAAC;QAEH,MAAM,YAAY,GAAG,OAA4B,CAAC;QAClD,YAAY,CAAC,MAAM,GAAG,KAAK,CAAC,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC;QAChE,YAAY,CAAC,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC;QAC3C,YAAY,CAAC,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IACvC,CAAC;IAED,QAAQ,CAAC,MAAsB,EAAE,UAA8E,EAAE;QAC/G,4DAA4D;QAC5D,MAAM,KAAK,GAAG,IAAI,CAAC;QACnB,SAAS,eAAe,CACtB,MAAc,EACd,WAAwB,EACxB,UAAmF;YAEnF,IAAI,UAAU,CAAC,KAAK,EAAE,CAAC;gBACrB,KAAK,CAAC,gBAAgB,CAAC,UAAU,CAAC,KAAK,EAAE,MAAM,EAAE;oBAC/C,WAAW,EAAE,MAAM;oBACnB,IAAI,EAAE,MAAM,CAAC,WAAW,CAAC;oBACzB,SAAS,EAAE,OAAO,CAAC,SAAS;oBAC5B,MAAM,EAAE,OAAO,CAAC,MAAM;iBACvB,CAAC,CAAC;YACL,CAAC;YACD,OAAO,UAAU,CAAC;QACpB,CAAC;QACD,OAAO,eAAe,CAAC;IACzB,CAAC;CACF;AAvID,wDAuIC"}
package/index.ts ADDED
@@ -0,0 +1,173 @@
1
+ import { DBOS, DBOSLifecycleCallback } from '@dbos-inc/dbos-sdk';
2
+
3
+ import { KafkaJS, LibrdKafkaError as KafkaError } from '@confluentinc/kafka-javascript';
4
+
5
+ type KafkaMessageHandler<Return> = (topic: string, partition: number, message: KafkaJS.Message) => Promise<Return>;
6
+
7
+ const sleepms = (ms: number) => new Promise((r) => setTimeout(r, ms));
8
+
9
+ interface KafkaMethodConfig {
10
+ topics?: Array<string | RegExp>;
11
+ config?: KafkaJS.ConsumerConstructorConfig;
12
+ queueName?: string;
13
+ }
14
+
15
+ interface KafkaRetryConfig {
16
+ maxRetries: number;
17
+ retryTime: number;
18
+ multiplier: number;
19
+ }
20
+
21
+ function safeGroupName(className: string, methodName: string, topics: Array<string | RegExp>) {
22
+ const safeGroupIdPart = [className, methodName, ...topics]
23
+ .map((r) => r.toString())
24
+ .map((r) => r.replaceAll(/[^a-zA-Z0-9\\-]/g, ''))
25
+ .join('-');
26
+ return `dbos-kafka-group-${safeGroupIdPart}`.slice(0, 255);
27
+ }
28
+
29
+ function isKafkaError(e: unknown): e is KafkaError {
30
+ if (e && typeof e === 'object') {
31
+ return 'code' in e && typeof e.code === 'number';
32
+ }
33
+ return false;
34
+ }
35
+
36
+ export type ConsumerTopics = string | RegExp | Array<string | RegExp>;
37
+
38
+ export class ConfluentKafkaReceiver extends DBOSLifecycleCallback {
39
+ readonly #consumers = new Array<KafkaJS.Consumer>();
40
+
41
+ constructor(
42
+ private readonly config: KafkaJS.KafkaConfig,
43
+ private readonly retryConfig: KafkaRetryConfig = { maxRetries: 5, retryTime: 300, multiplier: 2 },
44
+ ) {
45
+ super();
46
+ DBOS.registerLifecycleCallback(this);
47
+ }
48
+
49
+ override async initialize() {
50
+ const { maxRetries, multiplier } = this.retryConfig;
51
+ const clientId = this.config.clientId ?? 'dbos-confluent-kafka-receiver';
52
+ const kafka = new KafkaJS.Kafka({ kafkaJS: { ...this.config, clientId } });
53
+
54
+ for (const regOp of DBOS.getAssociatedInfo(this)) {
55
+ const func = regOp.methodReg.registeredFunction as KafkaMessageHandler<unknown> | undefined;
56
+ if (func === undefined) {
57
+ continue; // TODO: Log?
58
+ }
59
+
60
+ const methodConfig = regOp.methodConfig as KafkaMethodConfig;
61
+ const topics = methodConfig.topics ?? [];
62
+ if (topics.length === 0) {
63
+ continue; // TODO: Log?
64
+ }
65
+
66
+ const { name, className } = regOp.methodReg;
67
+ const config: KafkaJS.ConsumerConstructorConfig = methodConfig.config ?? {
68
+ 'group.id': safeGroupName(className, name, topics),
69
+ };
70
+ const consumer = kafka.consumer({ ...config, 'auto.offset.reset': 'earliest' });
71
+ await consumer.connect();
72
+
73
+ // A temporary workaround for https://github.com/tulios/kafkajs/pull/1558 until it gets fixed
74
+ // If topic auto-creation is on and you try to subscribe to a nonexistent topic, KafkaJS should retry until the topic is created.
75
+ // However, it has a bug where it won't. Thus, we retry instead.
76
+ let { retryTime } = this.retryConfig;
77
+ for (let i = 1; i <= maxRetries; i++) {
78
+ try {
79
+ await consumer.subscribe({ topics });
80
+ break;
81
+ } catch (e) {
82
+ if (isKafkaError(e) && e.code === 3 && i < maxRetries) {
83
+ await sleepms(retryTime);
84
+ retryTime *= multiplier;
85
+ } else {
86
+ throw e;
87
+ }
88
+ }
89
+ }
90
+
91
+ await consumer.run({
92
+ eachMessage: async ({ topic, partition, message }) => {
93
+ DBOS.logger.debug(
94
+ `ConfluentKafkaReceiver message on topic ${topic} partition ${partition} offset ${message.offset}`,
95
+ );
96
+ try {
97
+ const workflowID = `confluent-kafka-${topic}-${partition}-${config['group.id']}-${message.offset}`;
98
+ const wfParams = { workflowID, queueName: methodConfig.queueName };
99
+ await DBOS.startWorkflow(func, wfParams)(topic, partition, message);
100
+ } catch (e) {
101
+ const message = e instanceof Error ? e.message : String(e);
102
+ DBOS.logger.error(`Error processing Kafka message ${message}`);
103
+ throw e;
104
+ }
105
+ },
106
+ });
107
+
108
+ this.#consumers.push(consumer);
109
+ }
110
+ }
111
+
112
+ override async destroy() {
113
+ const disconnectPromises = this.#consumers.splice(0, this.#consumers.length).map((c) => c.disconnect());
114
+ await Promise.allSettled(disconnectPromises);
115
+ }
116
+
117
+ override logRegisteredEndpoints() {
118
+ DBOS.logger.info('KafkaJS receiver endpoints:');
119
+
120
+ const regOps = DBOS.getAssociatedInfo(this);
121
+ for (const regOp of regOps) {
122
+ const methodConfig = regOp.methodConfig as KafkaMethodConfig;
123
+ const { name, className } = regOp.methodReg;
124
+ for (const topic of methodConfig.topics ?? []) {
125
+ DBOS.logger.info(` ${topic} -> ${className}.${name}`);
126
+ }
127
+ }
128
+ }
129
+
130
+ registerConsumer<This, Args extends unknown[], Return>(
131
+ func: (this: This, ...args: Args) => Promise<Return>,
132
+ topics: ConsumerTopics,
133
+ options: {
134
+ classOrInst?: object;
135
+ className?: string;
136
+ name?: string;
137
+ queueName?: string;
138
+ config?: KafkaJS.ConsumerConstructorConfig;
139
+ } = {},
140
+ ) {
141
+ const { regInfo } = DBOS.associateFunctionWithInfo(this, func, {
142
+ classOrInst: options.classOrInst,
143
+ className: options.className,
144
+ name: options.name ?? func.name,
145
+ });
146
+
147
+ const kafkaRegInfo = regInfo as KafkaMethodConfig;
148
+ kafkaRegInfo.topics = Array.isArray(topics) ? topics : [topics];
149
+ kafkaRegInfo.queueName = options.queueName;
150
+ kafkaRegInfo.config = options.config;
151
+ }
152
+
153
+ consumer(topics: ConsumerTopics, options: { queueName?: string; config?: KafkaJS.ConsumerConstructorConfig } = {}) {
154
+ // eslint-disable-next-line @typescript-eslint/no-this-alias
155
+ const $this = this;
156
+ function methodDecorator<This, Args extends [string, number, KafkaJS.Message], Return>(
157
+ target: object,
158
+ propertyKey: PropertyKey,
159
+ descriptor: TypedPropertyDescriptor<(this: This, ...args: Args) => Promise<Return>>,
160
+ ) {
161
+ if (descriptor.value) {
162
+ $this.registerConsumer(descriptor.value, topics, {
163
+ classOrInst: target,
164
+ name: String(propertyKey),
165
+ queueName: options.queueName,
166
+ config: options.config,
167
+ });
168
+ }
169
+ return descriptor;
170
+ }
171
+ return methodDecorator;
172
+ }
173
+ }
package/jest.config.js ADDED
@@ -0,0 +1,8 @@
1
+ /** @type {import('ts-jest').JestConfigWithTsJest} */
2
+ module.exports = {
3
+ preset: 'ts-jest',
4
+ testEnvironment: 'node',
5
+ testRegex: '((\\.|/)(test|spec))\\.ts?$',
6
+ moduleFileExtensions: ['ts', 'tsx', 'js', 'jsx', 'json', 'node'],
7
+ modulePaths: ['./'],
8
+ };
package/package.json ADDED
@@ -0,0 +1,29 @@
1
+ {
2
+ "name": "@dbos-inc/confluent-kafka-receive",
3
+ "version": "3.0.16-preview",
4
+ "description": "DBOS event reciever for Kafka using the Confluent JavaScript Client for Apache Kafka",
5
+ "license": "MIT",
6
+ "main": "dist/index.js",
7
+ "types": "dist/index.d.ts",
8
+ "homepage": "https://docs.dbos.dev/",
9
+ "repository": {
10
+ "type": "git",
11
+ "url": "https://github.com/dbos-inc/dbos-transact-ts",
12
+ "directory": "packages/confluent-kafka-receive"
13
+ },
14
+ "scripts": {
15
+ "build": "tsc --project tsconfig.build.json",
16
+ "pretest": "npm run build",
17
+ "test": "tsx --test tests/*.test.ts"
18
+ },
19
+ "dependencies": {
20
+ "@confluentinc/kafka-javascript": "^1.3.2"
21
+ },
22
+ "peerDependencies": {
23
+ "@dbos-inc/dbos-sdk": "*"
24
+ },
25
+ "devDependencies": {
26
+ "kafkajs": "^2.2.4",
27
+ "tsx": "^4.20.3"
28
+ }
29
+ }
@@ -0,0 +1,248 @@
1
+ import { after, afterEach, before, beforeEach, suite, test } from 'node:test';
2
+ import assert from 'node:assert/strict';
3
+
4
+ import { DBOS } from '@dbos-inc/dbos-sdk';
5
+ import { Client } from 'pg';
6
+ import { dropDB, withTimeout } from './test-helpers';
7
+ import { Kafka, KafkaConfig, Producer, Admin, ConfigResourceTypes } from 'kafkajs';
8
+ import { EventEmitter } from 'node:events';
9
+ import { ConfluentKafkaReceiver } from '..';
10
+ import { KafkaJS as ConfluentKafkaJS } from '@confluentinc/kafka-javascript';
11
+
12
+ const kafkaConfig = {
13
+ clientId: 'dbos-conf-kafka-test',
14
+ brokers: [process.env['KAFKA_BROKER'] ?? 'localhost:9092'],
15
+ retry: { retries: 5 },
16
+ logLevel: 2,
17
+ };
18
+
19
+ const kafkaReceiver = new ConfluentKafkaReceiver(kafkaConfig);
20
+
21
+ interface KafkaEvents {
22
+ message: (functionName: string, topic: string, partition: number, message: ConfluentKafkaJS.Message) => void;
23
+ }
24
+
25
+ class KafkaEmitter extends EventEmitter {
26
+ override on<K extends keyof KafkaEvents>(event: K, listener: KafkaEvents[K]): this {
27
+ return super.on(event, listener);
28
+ }
29
+
30
+ override emit<K extends keyof KafkaEvents>(event: K, ...args: Parameters<KafkaEvents[K]>): boolean {
31
+ DBOS.logger.info(`KafkaEmitter topic ${args[1]} partition ${args[2]}`);
32
+ return super.emit(event, ...args);
33
+ }
34
+ }
35
+
36
+ type KafkaMessageEvent = {
37
+ topic: string;
38
+ partition: number;
39
+ message: ConfluentKafkaJS.Message;
40
+ };
41
+
42
+ function waitForMessage(
43
+ emitter: KafkaEmitter,
44
+ funcName: string,
45
+ topic: string,
46
+ timeoutMS = 45000,
47
+ ): Promise<KafkaMessageEvent> {
48
+ return withTimeout(
49
+ new Promise<KafkaMessageEvent>((resolve) => {
50
+ const handler = (f: string, t: string, partition: number, message: ConfluentKafkaJS.Message) => {
51
+ if (f === funcName && t === topic) {
52
+ emitter.off('message', handler);
53
+ resolve({ topic: t, partition, message });
54
+ }
55
+ };
56
+ emitter.on('message', handler);
57
+ }),
58
+ timeoutMS,
59
+ `Timeout waiting for message for function ${funcName}`,
60
+ );
61
+ }
62
+
63
+ class KafkaTestClass {
64
+ static readonly emitter = new KafkaEmitter();
65
+
66
+ @kafkaReceiver.consumer('string-topic')
67
+ @DBOS.workflow()
68
+ static async stringTopic(topic: string, partition: number, message: ConfluentKafkaJS.Message) {
69
+ await Promise.resolve();
70
+ KafkaTestClass.emitter.emit('message', 'stringTopic', topic, partition, message);
71
+ }
72
+
73
+ @kafkaReceiver.consumer(/^regex-topic-.*/)
74
+ @DBOS.workflow()
75
+ static async regexTopic(topic: string, partition: number, message: ConfluentKafkaJS.Message) {
76
+ await Promise.resolve();
77
+ KafkaTestClass.emitter.emit('message', 'regexTopic', topic, partition, message);
78
+ }
79
+
80
+ @kafkaReceiver.consumer(['a-topic', 'b-topic'])
81
+ @DBOS.workflow()
82
+ static async stringArrayTopic(topic: string, partition: number, message: ConfluentKafkaJS.Message) {
83
+ await Promise.resolve();
84
+ KafkaTestClass.emitter.emit('message', 'stringArrayTopic', topic, partition, message);
85
+ }
86
+
87
+ @kafkaReceiver.consumer([/^z-topic-.*/, /^y-topic-.*/])
88
+ @DBOS.workflow()
89
+ static async regexArrayTopic(topic: string, partition: number, message: ConfluentKafkaJS.Message) {
90
+ await Promise.resolve();
91
+ KafkaTestClass.emitter.emit('message', 'regexArrayTopic', topic, partition, message);
92
+ }
93
+
94
+ static async registeredConsumer(topic: string, partition: number, message: ConfluentKafkaJS.Message) {
95
+ await Promise.resolve();
96
+ KafkaTestClass.emitter.emit('message', 'registeredConsumer', topic, partition, message);
97
+ }
98
+ }
99
+
100
+ KafkaTestClass.registeredConsumer = DBOS.registerWorkflow(KafkaTestClass.registeredConsumer, 'registeredConsumer');
101
+ kafkaReceiver.registerConsumer(KafkaTestClass.registeredConsumer, 'registered-topic');
102
+
103
+ async function validateKafka(config: KafkaConfig) {
104
+ const kafka = new Kafka(config);
105
+ const admin = kafka.admin();
106
+ try {
107
+ await admin.connect();
108
+ await admin.listTopics();
109
+ return true;
110
+ } catch (e) {
111
+ const message = e instanceof Error ? e.message : String(e);
112
+ DBOS.logger.error(message);
113
+ return false;
114
+ } finally {
115
+ await admin.disconnect();
116
+ }
117
+ }
118
+
119
+ async function createTopics(admin: Admin, topics: string[]) {
120
+ const existingTopics = await admin.listTopics();
121
+ const topicsToCreate = topics.filter((t) => !existingTopics.includes(t));
122
+ await admin.createTopics({
123
+ topics: topicsToCreate.map((t) => ({
124
+ topic: t,
125
+ numPartitions: 1,
126
+ replicationFactor: 1,
127
+ })),
128
+ timeout: 5000,
129
+ });
130
+ }
131
+
132
+ async function purgeTopic(admin: Admin, topic: string) {
133
+ const { resources } = await admin.describeConfigs({
134
+ includeSynonyms: false,
135
+ resources: [
136
+ {
137
+ type: ConfigResourceTypes.TOPIC,
138
+ name: topic,
139
+ configNames: ['retention.ms'],
140
+ },
141
+ ],
142
+ });
143
+
144
+ const resource = resources.find((r) => r.resourceName === topic);
145
+ const configEntry = (resource?.configEntries ?? []).find((ce) => ce.configName === 'retention.ms');
146
+ const retentionMS = configEntry?.configValue ?? '604800000';
147
+
148
+ await admin.alterConfigs({
149
+ validateOnly: false,
150
+ resources: [
151
+ {
152
+ type: ConfigResourceTypes.TOPIC,
153
+ name: topic,
154
+ configEntries: [{ name: 'retention.ms', value: '0' }],
155
+ },
156
+ ],
157
+ });
158
+
159
+ await new Promise((resolve) => setTimeout(resolve, 2000));
160
+
161
+ await admin.alterConfigs({
162
+ validateOnly: false,
163
+ resources: [
164
+ {
165
+ type: ConfigResourceTypes.TOPIC,
166
+ name: topic,
167
+ configEntries: [{ name: 'retention.ms', value: retentionMS }],
168
+ },
169
+ ],
170
+ });
171
+
172
+ await new Promise((resolve) => setTimeout(resolve, 2000));
173
+ }
174
+
175
+ // eslint-disable-next-line @typescript-eslint/no-floating-promises
176
+ suite('confluent-kafka-receive', async () => {
177
+ const kafkaAvailable = await validateKafka(kafkaConfig);
178
+ let producer: Producer | undefined = undefined;
179
+ let admin: Admin | undefined = undefined;
180
+
181
+ const testCases = [
182
+ { topic: 'string-topic', functionName: 'stringTopic' },
183
+ { topic: 'regex-topic-foo', functionName: 'regexTopic' },
184
+ { topic: 'a-topic', functionName: 'stringArrayTopic' },
185
+ { topic: 'b-topic', functionName: 'stringArrayTopic' },
186
+ { topic: 'z-topic-foo', functionName: 'regexArrayTopic' },
187
+ { topic: 'y-topic-foo', functionName: 'regexArrayTopic' },
188
+ { topic: 'registered-topic', functionName: 'registeredConsumer' },
189
+ ];
190
+
191
+ before(async () => {
192
+ if (!kafkaAvailable) {
193
+ return;
194
+ }
195
+
196
+ const kafka = new Kafka(kafkaConfig);
197
+ producer = kafka.producer();
198
+ admin = kafka.admin();
199
+ await Promise.all([producer.connect(), admin.connect()]);
200
+
201
+ await createTopics(
202
+ admin,
203
+ testCases.map((tc) => tc.topic),
204
+ );
205
+
206
+ const client = new Client({ user: 'postgres', database: 'postgres' });
207
+ try {
208
+ await client.connect();
209
+ await Promise.all([
210
+ dropDB(client, 'conf_kafka_recv_test', true),
211
+ dropDB(client, 'conf_kafka_recv_test_dbos_sys', true),
212
+ ]);
213
+ } finally {
214
+ await client.end();
215
+ }
216
+ });
217
+
218
+ after(async () => {
219
+ await admin?.disconnect();
220
+ await producer?.disconnect();
221
+ });
222
+
223
+ beforeEach(async () => {
224
+ if (kafkaAvailable) {
225
+ DBOS.setConfig({ name: 'conf-kafka-recv-test' });
226
+ await DBOS.launch();
227
+ }
228
+ });
229
+
230
+ afterEach(async () => {
231
+ if (kafkaAvailable) {
232
+ await DBOS.shutdown();
233
+ }
234
+ });
235
+
236
+ for (const { functionName, topic } of testCases) {
237
+ // eslint-disable-next-line @typescript-eslint/no-floating-promises
238
+ test(`${topic}-${functionName}`, { skip: !kafkaAvailable, timeout: 125000 }, async () => {
239
+ await purgeTopic(admin!, topic);
240
+ const message = `test-message-${Date.now()}`;
241
+ await producer!.send({ topic, messages: [{ value: message }] });
242
+ const result = await waitForMessage(KafkaTestClass.emitter, functionName, topic, 120000);
243
+
244
+ assert.equal(topic, result.topic);
245
+ assert.equal(message, String(result.message.value));
246
+ });
247
+ }
248
+ });
@@ -0,0 +1,23 @@
1
+ import { Client } from 'pg';
2
+
3
+ export async function ensureDB(client: Client, name: string) {
4
+ const results = await client.query('SELECT 1 FROM pg_database WHERE datname = $1', [name]);
5
+ if (results.rows.length === 0) {
6
+ await client.query(`CREATE DATABASE ${name}`);
7
+ }
8
+ }
9
+
10
+ export async function dropDB(client: Client, name: string, force: boolean = false) {
11
+ const withForce = force ? ' WITH (FORCE)' : '';
12
+ await client.query(`DROP DATABASE IF EXISTS ${name} ${withForce}`);
13
+ }
14
+
15
+ export function withTimeout<T>(promise: Promise<T>, ms: number, message = 'Timeout'): Promise<T> {
16
+ let timeoutId: ReturnType<typeof setTimeout>;
17
+
18
+ const timeout = new Promise<never>((_, reject) => {
19
+ timeoutId = setTimeout(() => reject(new Error(message)), ms);
20
+ });
21
+
22
+ return Promise.race([promise, timeout]).finally(() => clearTimeout(timeoutId));
23
+ }
@@ -0,0 +1,8 @@
1
+ /* Visit https://aka.ms/tsconfig to read more about this file */
2
+ {
3
+ "extends": "../../tsconfig.shared.json",
4
+ "compilerOptions": {
5
+ "outDir": "./dist"
6
+ },
7
+ "include": ["index.ts"]
8
+ }
package/tsconfig.json ADDED
@@ -0,0 +1,8 @@
1
+ /* Visit https://aka.ms/tsconfig to read more about this file */
2
+ {
3
+ "extends": "../../tsconfig.shared.json",
4
+ "compilerOptions": {
5
+ "noEmit": true
6
+ },
7
+ "include": ["index.ts", "tests/*.ts"]
8
+ }