@stepflowjs/trigger-kafka 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +65 -0
- package/dist/index.js +114 -0
- package/dist/index.js.map +1 -0
- package/package.json +58 -0
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
import { Trigger, TriggerHandler } from '@stepflowjs/core';
|
|
2
|
+
|
|
3
|
+
interface KafkaTriggerConfig {
|
|
4
|
+
/** Kafka broker URLs */
|
|
5
|
+
brokers: string[];
|
|
6
|
+
/** Topic to consume from */
|
|
7
|
+
topic: string;
|
|
8
|
+
/** Consumer group ID */
|
|
9
|
+
groupId: string;
|
|
10
|
+
/** Client identifier (optional) */
|
|
11
|
+
clientId?: string;
|
|
12
|
+
/** Start from beginning (default: false) */
|
|
13
|
+
fromBeginning?: boolean;
|
|
14
|
+
/** Enable SSL */
|
|
15
|
+
ssl?: boolean;
|
|
16
|
+
/** SASL authentication */
|
|
17
|
+
sasl?: {
|
|
18
|
+
mechanism: "plain" | "scram-sha-256" | "scram-sha-512";
|
|
19
|
+
username: string;
|
|
20
|
+
password: string;
|
|
21
|
+
};
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Kafka trigger for Stepflow workflows
|
|
25
|
+
*
|
|
26
|
+
* Consumes messages from a Kafka topic using consumer groups and triggers
|
|
27
|
+
* workflows for each message. Supports SSL and SASL authentication.
|
|
28
|
+
*
|
|
29
|
+
* @example
|
|
30
|
+
* ```typescript
|
|
31
|
+
* const trigger = new KafkaTrigger({
|
|
32
|
+
* brokers: ['localhost:9092'],
|
|
33
|
+
* topic: 'orders',
|
|
34
|
+
* groupId: 'stepflow-orders',
|
|
35
|
+
* fromBeginning: false,
|
|
36
|
+
* });
|
|
37
|
+
*
|
|
38
|
+
* await trigger.start(async (event) => {
|
|
39
|
+
* await stepflow.trigger('process-order', event.data);
|
|
40
|
+
* });
|
|
41
|
+
* ```
|
|
42
|
+
*/
|
|
43
|
+
declare class KafkaTrigger implements Trigger<KafkaTriggerConfig> {
|
|
44
|
+
readonly config: KafkaTriggerConfig;
|
|
45
|
+
readonly type = "kafka";
|
|
46
|
+
private kafka?;
|
|
47
|
+
private consumer?;
|
|
48
|
+
private running;
|
|
49
|
+
constructor(config: KafkaTriggerConfig);
|
|
50
|
+
/**
|
|
51
|
+
* Start the Kafka consumer and begin processing messages
|
|
52
|
+
* @param handler Function to call for each Kafka message
|
|
53
|
+
*/
|
|
54
|
+
start(handler: TriggerHandler): Promise<void>;
|
|
55
|
+
/**
|
|
56
|
+
* Stop the Kafka consumer and disconnect
|
|
57
|
+
*/
|
|
58
|
+
stop(): Promise<void>;
|
|
59
|
+
/**
|
|
60
|
+
* Health check - returns true if consumer is running
|
|
61
|
+
*/
|
|
62
|
+
healthCheck(): Promise<boolean>;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export { KafkaTrigger, type KafkaTriggerConfig };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
// src/index.ts
|
|
2
|
+
import { Kafka } from "kafkajs";
|
|
3
|
+
var KafkaTrigger = class {
|
|
4
|
+
constructor(config) {
|
|
5
|
+
this.config = config;
|
|
6
|
+
}
|
|
7
|
+
type = "kafka";
|
|
8
|
+
kafka;
|
|
9
|
+
consumer;
|
|
10
|
+
running = false;
|
|
11
|
+
/**
|
|
12
|
+
* Start the Kafka consumer and begin processing messages
|
|
13
|
+
* @param handler Function to call for each Kafka message
|
|
14
|
+
*/
|
|
15
|
+
async start(handler) {
|
|
16
|
+
if (this.running) {
|
|
17
|
+
throw new Error("KafkaTrigger is already running");
|
|
18
|
+
}
|
|
19
|
+
try {
|
|
20
|
+
const kafkaConfig = {
|
|
21
|
+
clientId: this.config.clientId || "stepflow-trigger",
|
|
22
|
+
brokers: this.config.brokers
|
|
23
|
+
};
|
|
24
|
+
if (this.config.ssl) {
|
|
25
|
+
kafkaConfig.ssl = true;
|
|
26
|
+
}
|
|
27
|
+
if (this.config.sasl) {
|
|
28
|
+
kafkaConfig.sasl = this.config.sasl;
|
|
29
|
+
}
|
|
30
|
+
this.kafka = new Kafka(kafkaConfig);
|
|
31
|
+
this.consumer = this.kafka.consumer({ groupId: this.config.groupId });
|
|
32
|
+
await this.consumer.connect();
|
|
33
|
+
await this.consumer.subscribe({
|
|
34
|
+
topic: this.config.topic,
|
|
35
|
+
fromBeginning: this.config.fromBeginning ?? false
|
|
36
|
+
});
|
|
37
|
+
this.running = true;
|
|
38
|
+
await this.consumer.run({
|
|
39
|
+
eachMessage: async ({ topic, partition, message }) => {
|
|
40
|
+
if (!this.running) {
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
try {
|
|
44
|
+
let data = null;
|
|
45
|
+
if (message.value) {
|
|
46
|
+
try {
|
|
47
|
+
data = JSON.parse(message.value.toString());
|
|
48
|
+
} catch {
|
|
49
|
+
data = message.value.toString();
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
const event = {
|
|
53
|
+
id: crypto.randomUUID(),
|
|
54
|
+
type: this.type,
|
|
55
|
+
source: topic,
|
|
56
|
+
data,
|
|
57
|
+
metadata: {
|
|
58
|
+
topic,
|
|
59
|
+
partition,
|
|
60
|
+
offset: message.offset,
|
|
61
|
+
key: message.key?.toString(),
|
|
62
|
+
headers: message.headers ? Object.fromEntries(
|
|
63
|
+
Object.entries(message.headers).map(([k, v]) => [
|
|
64
|
+
k,
|
|
65
|
+
v?.toString()
|
|
66
|
+
])
|
|
67
|
+
) : void 0,
|
|
68
|
+
timestamp: message.timestamp
|
|
69
|
+
},
|
|
70
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
71
|
+
};
|
|
72
|
+
await handler(event);
|
|
73
|
+
} catch (error) {
|
|
74
|
+
console.error(
|
|
75
|
+
`Error processing Kafka message from ${topic}:${partition}@${message.offset}:`,
|
|
76
|
+
error
|
|
77
|
+
);
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
});
|
|
81
|
+
} catch (error) {
|
|
82
|
+
this.running = false;
|
|
83
|
+
throw new Error(
|
|
84
|
+
`Failed to start KafkaTrigger: ${error.message}`
|
|
85
|
+
);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
/**
|
|
89
|
+
* Stop the Kafka consumer and disconnect
|
|
90
|
+
*/
|
|
91
|
+
async stop() {
|
|
92
|
+
this.running = false;
|
|
93
|
+
if (this.consumer) {
|
|
94
|
+
try {
|
|
95
|
+
await this.consumer.disconnect();
|
|
96
|
+
} catch (error) {
|
|
97
|
+
console.error("Error disconnecting Kafka consumer:", error);
|
|
98
|
+
} finally {
|
|
99
|
+
this.consumer = void 0;
|
|
100
|
+
this.kafka = void 0;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Health check - returns true if consumer is running
|
|
106
|
+
*/
|
|
107
|
+
async healthCheck() {
|
|
108
|
+
return this.running && this.consumer !== void 0;
|
|
109
|
+
}
|
|
110
|
+
};
|
|
111
|
+
export {
|
|
112
|
+
KafkaTrigger
|
|
113
|
+
};
|
|
114
|
+
//# sourceMappingURL=index.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"sources":["../src/index.ts"],"sourcesContent":["import type { Trigger, TriggerHandler, TriggerEvent } from \"@stepflowjs/core\";\nimport { Kafka, type Consumer, type KafkaConfig } from \"kafkajs\";\n\n// ============================================================================\n// Types\n// ============================================================================\n\nexport interface KafkaTriggerConfig {\n /** Kafka broker URLs */\n brokers: string[];\n /** Topic to consume from */\n topic: string;\n /** Consumer group ID */\n groupId: string;\n /** Client identifier (optional) */\n clientId?: string;\n /** Start from beginning (default: false) */\n fromBeginning?: boolean;\n /** Enable SSL */\n ssl?: boolean;\n /** SASL authentication */\n sasl?: {\n mechanism: \"plain\" | \"scram-sha-256\" | \"scram-sha-512\";\n username: string;\n password: string;\n };\n}\n\n// ============================================================================\n// KafkaTrigger Implementation\n// ============================================================================\n\n/**\n * Kafka trigger for Stepflow workflows\n *\n * Consumes messages from a Kafka topic using consumer groups and triggers\n * workflows for each message. Supports SSL and SASL authentication.\n *\n * @example\n * ```typescript\n * const trigger = new KafkaTrigger({\n * brokers: ['localhost:9092'],\n * topic: 'orders',\n * groupId: 'stepflow-orders',\n * fromBeginning: false,\n * });\n *\n * await trigger.start(async (event) => {\n * await stepflow.trigger('process-order', event.data);\n * });\n * ```\n */\nexport class KafkaTrigger implements Trigger<KafkaTriggerConfig> {\n readonly type = \"kafka\";\n private kafka?: Kafka;\n private consumer?: Consumer;\n private running = false;\n\n constructor(readonly config: KafkaTriggerConfig) {}\n\n /**\n * Start the Kafka consumer and begin processing messages\n * @param handler Function to call for each Kafka message\n */\n async start(handler: TriggerHandler): Promise<void> {\n if (this.running) {\n throw new Error(\"KafkaTrigger is already running\");\n }\n\n try {\n // Create Kafka client configuration\n const kafkaConfig: KafkaConfig = {\n clientId: this.config.clientId || \"stepflow-trigger\",\n brokers: this.config.brokers,\n };\n\n // Add SSL if configured\n if (this.config.ssl) {\n kafkaConfig.ssl = true;\n }\n\n // Add SASL if configured\n if (this.config.sasl) {\n kafkaConfig.sasl = this.config.sasl as any;\n }\n\n // Create Kafka client and consumer\n this.kafka = new Kafka(kafkaConfig);\n this.consumer = this.kafka.consumer({ groupId: this.config.groupId });\n\n // Connect to Kafka\n await this.consumer.connect();\n\n // Subscribe to topic\n await this.consumer.subscribe({\n topic: this.config.topic,\n fromBeginning: this.config.fromBeginning ?? false,\n });\n\n this.running = true;\n\n // Start consuming messages\n await this.consumer.run({\n eachMessage: async ({ topic, partition, message }) => {\n if (!this.running) {\n return;\n }\n\n try {\n // Parse message value as JSON\n let data: unknown = null;\n if (message.value) {\n try {\n data = JSON.parse(message.value.toString());\n } catch {\n // If not JSON, use raw string\n data = message.value.toString();\n }\n }\n\n // Create trigger event\n const event: TriggerEvent = {\n id: crypto.randomUUID(),\n type: this.type,\n source: topic,\n data,\n metadata: {\n topic,\n partition,\n offset: message.offset,\n key: message.key?.toString(),\n headers: message.headers\n ? Object.fromEntries(\n Object.entries(message.headers).map(([k, v]) => [\n k,\n v?.toString(),\n ]),\n )\n : undefined,\n timestamp: message.timestamp,\n },\n timestamp: new Date(),\n };\n\n // Invoke handler\n await handler(event);\n } catch (error) {\n console.error(\n `Error processing Kafka message from ${topic}:${partition}@${message.offset}:`,\n error,\n );\n // Don't throw - let Kafka handle retries via consumer group\n }\n },\n });\n } catch (error) {\n this.running = false;\n throw new Error(\n `Failed to start KafkaTrigger: ${(error as Error).message}`,\n );\n }\n }\n\n /**\n * Stop the Kafka consumer and disconnect\n */\n async stop(): Promise<void> {\n this.running = false;\n\n if (this.consumer) {\n try {\n await this.consumer.disconnect();\n } catch (error) {\n console.error(\"Error disconnecting Kafka consumer:\", error);\n } finally {\n this.consumer = undefined;\n this.kafka = undefined;\n }\n }\n }\n\n /**\n * Health check - returns true if consumer is running\n */\n async healthCheck(): Promise<boolean> {\n return this.running && this.consumer !== undefined;\n }\n}\n"],"mappings":";AACA,SAAS,aAA8C;AAmDhD,IAAM,eAAN,MAA0D;AAAA,EAM/D,YAAqB,QAA4B;AAA5B;AAAA,EAA6B;AAAA,EALzC,OAAO;AAAA,EACR;AAAA,EACA;AAAA,EACA,UAAU;AAAA;AAAA;AAAA;AAAA;AAAA,EAQlB,MAAM,MAAM,SAAwC;AAClD,QAAI,KAAK,SAAS;AAChB,YAAM,IAAI,MAAM,iCAAiC;AAAA,IACnD;AAEA,QAAI;AAEF,YAAM,cAA2B;AAAA,QAC/B,UAAU,KAAK,OAAO,YAAY;AAAA,QAClC,SAAS,KAAK,OAAO;AAAA,MACvB;AAGA,UAAI,KAAK,OAAO,KAAK;AACnB,oBAAY,MAAM;AAAA,MACpB;AAGA,UAAI,KAAK,OAAO,MAAM;AACpB,oBAAY,OAAO,KAAK,OAAO;AAAA,MACjC;AAGA,WAAK,QAAQ,IAAI,MAAM,WAAW;AAClC,WAAK,WAAW,KAAK,MAAM,SAAS,EAAE,SAAS,KAAK,OAAO,QAAQ,CAAC;AAGpE,YAAM,KAAK,SAAS,QAAQ;AAG5B,YAAM,KAAK,SAAS,UAAU;AAAA,QAC5B,OAAO,KAAK,OAAO;AAAA,QACnB,eAAe,KAAK,OAAO,iBAAiB;AAAA,MAC9C,CAAC;AAED,WAAK,UAAU;AAGf,YAAM,KAAK,SAAS,IAAI;AAAA,QACtB,aAAa,OAAO,EAAE,OAAO,WAAW,QAAQ,MAAM;AACpD,cAAI,CAAC,KAAK,SAAS;AACjB;AAAA,UACF;AAEA,cAAI;AAEF,gBAAI,OAAgB;AACpB,gBAAI,QAAQ,OAAO;AACjB,kBAAI;AACF,uBAAO,KAAK,MAAM,QAAQ,MAAM,SAAS,CAAC;AAAA,cAC5C,QAAQ;AAEN,uBAAO,QAAQ,MAAM,SAAS;AAAA,cAChC;AAAA,YACF;AAGA,kBAAM,QAAsB;AAAA,cAC1B,IAAI,OAAO,WAAW;AAAA,cACtB,MAAM,KAAK;AAAA,cACX,QAAQ;AAAA,cACR;AAAA,cACA,UAAU;AAAA,gBACR;AAAA,gBACA;AAAA,gBACA,QAAQ,QAAQ;AAAA,gBAChB,KAAK,QAAQ,KAAK,SAAS;AAAA,gBAC3B,SAAS,QAAQ,UACb,OAAO;AAAA,kBACL,OAAO,QAAQ,QAAQ,OAAO,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM;AAAA,oBAC9C;AAAA,oBACA,GAAG,SAAS;AAAA,kBACd,CAAC;AAAA,gBACH,IACA;AAAA,gBACJ,WAAW,QAAQ;AAAA,cACrB;AAAA,cACA,WAAW,oBAAI,KAAK;AAAA,YACtB;AAGA,kBAAM,QAAQ,KAAK;AAAA,UACrB,SAAS,OAAO;AACd,oBAAQ;AAAA,cACN,uCAAuC,KAAK,IAAI,SAAS,IAAI,QAAQ,MAAM;AAAA,cAC3E;AAAA,YACF;AAAA,UAEF;AAAA,QACF;AAAA,MACF,CAAC;AAAA,IACH,SAAS,OAAO;AACd,WAAK,UAAU;AACf,YAAM,IAAI;AAAA,QACR,iCAAkC,MAAgB,OAAO;AAAA,MAC3D;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,OAAsB;AAC1B,SAAK,UAAU;AAEf,QAAI,KAAK,UAAU;AACjB,UAAI;AACF,cAAM,KAAK,SAAS,WAAW;AAAA,MACjC,SAAS,OAAO;AACd,gBAAQ,MAAM,uCAAuC,KAAK;AAAA,MAC5D,UAAE;AACA,aAAK,WAAW;AAChB,aAAK,QAAQ;AAAA,MACf;AAAA,IACF;AAAA,EACF;AAAA;AAAA;AAAA;AAAA,EAKA,MAAM,cAAgC;AACpC,WAAO,KAAK,WAAW,KAAK,aAAa;AAAA,EAC3C;AACF;","names":[]}
|
package/package.json
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@stepflowjs/trigger-kafka",
|
|
3
|
+
"version": "0.0.1",
|
|
4
|
+
"description": "Kafka trigger for Stepflow with consumer group management",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "./dist/index.js",
|
|
7
|
+
"module": "./dist/index.js",
|
|
8
|
+
"types": "./dist/index.d.ts",
|
|
9
|
+
"exports": {
|
|
10
|
+
".": {
|
|
11
|
+
"import": "./dist/index.js",
|
|
12
|
+
"types": "./dist/index.d.ts"
|
|
13
|
+
}
|
|
14
|
+
},
|
|
15
|
+
"files": [
|
|
16
|
+
"dist"
|
|
17
|
+
],
|
|
18
|
+
"dependencies": {
|
|
19
|
+
"kafkajs": "^2.2.4",
|
|
20
|
+
"@stepflowjs/core": "0.0.1"
|
|
21
|
+
},
|
|
22
|
+
"devDependencies": {
|
|
23
|
+
"tsup": "^8.5.1",
|
|
24
|
+
"vitest": "^4.0.17"
|
|
25
|
+
},
|
|
26
|
+
"peerDependencies": {
|
|
27
|
+
"typescript": "^5.0.0"
|
|
28
|
+
},
|
|
29
|
+
"license": "MIT",
|
|
30
|
+
"author": "Stepflow Contributors",
|
|
31
|
+
"repository": {
|
|
32
|
+
"type": "git",
|
|
33
|
+
"url": "https://stepflow-production.up.railway.app",
|
|
34
|
+
"directory": "packages/triggers/kafka"
|
|
35
|
+
},
|
|
36
|
+
"homepage": "https://stepflow-production.up.railway.app",
|
|
37
|
+
"bugs": {
|
|
38
|
+
"url": "https://stepflow-production.up.railway.app"
|
|
39
|
+
},
|
|
40
|
+
"keywords": [
|
|
41
|
+
"stepflow",
|
|
42
|
+
"trigger",
|
|
43
|
+
"kafka",
|
|
44
|
+
"stream",
|
|
45
|
+
"workflow",
|
|
46
|
+
"orchestration"
|
|
47
|
+
],
|
|
48
|
+
"publishConfig": {
|
|
49
|
+
"access": "public"
|
|
50
|
+
},
|
|
51
|
+
"scripts": {
|
|
52
|
+
"build": "tsup",
|
|
53
|
+
"dev": "tsup --watch",
|
|
54
|
+
"typecheck": "tsc --noEmit",
|
|
55
|
+
"test": "vitest",
|
|
56
|
+
"clean": "rm -rf dist"
|
|
57
|
+
}
|
|
58
|
+
}
|