@forklaunch/implementation-worker-kafka 0.4.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/consumers/index.d.mts +4 -4
- package/lib/consumers/index.d.ts +4 -4
- package/lib/consumers/index.js +8 -4
- package/lib/consumers/index.mjs +8 -4
- package/lib/domain/schemas/index.js +8 -8
- package/lib/domain/schemas/index.mjs +8 -8
- package/lib/eject/consumers/kafkaWorker.consumer.ts +12 -4
- package/lib/eject/producers/kafkaWorker.producer.ts +5 -4
- package/lib/producers/index.d.mts +1 -1
- package/lib/producers/index.d.ts +1 -1
- package/lib/producers/index.js +3 -1
- package/lib/producers/index.mjs +3 -1
- package/package.json +10 -10
|
@@ -3,14 +3,14 @@ import { WorkerEventEntity, WorkerProcessFunction, WorkerFailureHandler } from '
|
|
|
3
3
|
import { KafkaWorkerOptions } from '../domain/types/index.mjs';
|
|
4
4
|
|
|
5
5
|
declare class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> implements WorkerConsumer<EventEntity> {
|
|
6
|
-
protected readonly queueName: string;
|
|
7
|
-
protected readonly options: Options;
|
|
8
|
-
protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
|
|
9
|
-
protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
|
|
10
6
|
private kafka;
|
|
11
7
|
private producer;
|
|
12
8
|
private consumer;
|
|
13
9
|
private processedMessages;
|
|
10
|
+
protected readonly queueName: string;
|
|
11
|
+
protected readonly options: Options;
|
|
12
|
+
protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
|
|
13
|
+
protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
|
|
14
14
|
constructor(queueName: string, options: Options, processEventsFunction: WorkerProcessFunction<EventEntity>, failureHandler: WorkerFailureHandler<EventEntity>);
|
|
15
15
|
private setupConsumer;
|
|
16
16
|
peekEvents(): Promise<EventEntity[]>;
|
package/lib/consumers/index.d.ts
CHANGED
|
@@ -3,14 +3,14 @@ import { WorkerEventEntity, WorkerProcessFunction, WorkerFailureHandler } from '
|
|
|
3
3
|
import { KafkaWorkerOptions } from '../domain/types/index.js';
|
|
4
4
|
|
|
5
5
|
declare class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> implements WorkerConsumer<EventEntity> {
|
|
6
|
-
protected readonly queueName: string;
|
|
7
|
-
protected readonly options: Options;
|
|
8
|
-
protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
|
|
9
|
-
protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
|
|
10
6
|
private kafka;
|
|
11
7
|
private producer;
|
|
12
8
|
private consumer;
|
|
13
9
|
private processedMessages;
|
|
10
|
+
protected readonly queueName: string;
|
|
11
|
+
protected readonly options: Options;
|
|
12
|
+
protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
|
|
13
|
+
protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
|
|
14
14
|
constructor(queueName: string, options: Options, processEventsFunction: WorkerProcessFunction<EventEntity>, failureHandler: WorkerFailureHandler<EventEntity>);
|
|
15
15
|
private setupConsumer;
|
|
16
16
|
peekEvents(): Promise<EventEntity[]>;
|
package/lib/consumers/index.js
CHANGED
|
@@ -27,6 +27,14 @@ module.exports = __toCommonJS(consumers_exports);
|
|
|
27
27
|
// consumers/kafkaWorker.consumer.ts
|
|
28
28
|
var import_kafkajs = require("kafkajs");
|
|
29
29
|
var KafkaWorkerConsumer = class {
|
|
30
|
+
kafka;
|
|
31
|
+
producer;
|
|
32
|
+
consumer;
|
|
33
|
+
processedMessages = /* @__PURE__ */ new Set();
|
|
34
|
+
queueName;
|
|
35
|
+
options;
|
|
36
|
+
processEventsFunction;
|
|
37
|
+
failureHandler;
|
|
30
38
|
constructor(queueName, options, processEventsFunction, failureHandler) {
|
|
31
39
|
this.queueName = queueName;
|
|
32
40
|
this.options = options;
|
|
@@ -41,10 +49,6 @@ var KafkaWorkerConsumer = class {
|
|
|
41
49
|
groupId: this.options.groupId
|
|
42
50
|
});
|
|
43
51
|
}
|
|
44
|
-
kafka;
|
|
45
|
-
producer;
|
|
46
|
-
consumer;
|
|
47
|
-
processedMessages = /* @__PURE__ */ new Set();
|
|
48
52
|
async setupConsumer() {
|
|
49
53
|
await this.consumer.connect();
|
|
50
54
|
await this.consumer.subscribe({
|
package/lib/consumers/index.mjs
CHANGED
|
@@ -1,6 +1,14 @@
|
|
|
1
1
|
// consumers/kafkaWorker.consumer.ts
|
|
2
2
|
import { Kafka } from "kafkajs";
|
|
3
3
|
var KafkaWorkerConsumer = class {
|
|
4
|
+
kafka;
|
|
5
|
+
producer;
|
|
6
|
+
consumer;
|
|
7
|
+
processedMessages = /* @__PURE__ */ new Set();
|
|
8
|
+
queueName;
|
|
9
|
+
options;
|
|
10
|
+
processEventsFunction;
|
|
11
|
+
failureHandler;
|
|
4
12
|
constructor(queueName, options, processEventsFunction, failureHandler) {
|
|
5
13
|
this.queueName = queueName;
|
|
6
14
|
this.options = options;
|
|
@@ -15,10 +23,6 @@ var KafkaWorkerConsumer = class {
|
|
|
15
23
|
groupId: this.options.groupId
|
|
16
24
|
});
|
|
17
25
|
}
|
|
18
|
-
kafka;
|
|
19
|
-
producer;
|
|
20
|
-
consumer;
|
|
21
|
-
processedMessages = /* @__PURE__ */ new Set();
|
|
22
26
|
async setupConsumer() {
|
|
23
27
|
await this.consumer.connect();
|
|
24
28
|
await this.consumer.subscribe({
|
|
@@ -28,7 +28,7 @@ module.exports = __toCommonJS(schemas_exports);
|
|
|
28
28
|
// domain/schemas/kafka.schema.ts
|
|
29
29
|
var import_internal = require("@forklaunch/internal");
|
|
30
30
|
|
|
31
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
31
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/typebox/index.mjs
|
|
32
32
|
var typebox_exports = {};
|
|
33
33
|
__export(typebox_exports, {
|
|
34
34
|
SchemaValidator: () => SchemaValidator,
|
|
@@ -68,7 +68,7 @@ __export(typebox_exports, {
|
|
|
68
68
|
});
|
|
69
69
|
__reExport(typebox_exports, require("@sinclair/typebox"));
|
|
70
70
|
|
|
71
|
-
// ../../../node_modules/.pnpm/@forklaunch+common@0.4.
|
|
71
|
+
// ../../../node_modules/.pnpm/@forklaunch+common@0.4.6/node_modules/@forklaunch/common/lib/index.mjs
|
|
72
72
|
var InMemoryBlob = class extends Blob {
|
|
73
73
|
constructor(content) {
|
|
74
74
|
super([Buffer.from(content)]);
|
|
@@ -76,7 +76,7 @@ var InMemoryBlob = class extends Blob {
|
|
|
76
76
|
}
|
|
77
77
|
};
|
|
78
78
|
|
|
79
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
79
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/typebox/index.mjs
|
|
80
80
|
var import_typebox = require("@sinclair/typebox");
|
|
81
81
|
var import_compiler = require("@sinclair/typebox/compiler");
|
|
82
82
|
var import_errors = require("@sinclair/typebox/errors");
|
|
@@ -310,12 +310,12 @@ var TypeboxSchemaValidator = class {
|
|
|
310
310
|
* @returns {TResolve<T>} The resolved schema.
|
|
311
311
|
*/
|
|
312
312
|
schemify(schema) {
|
|
313
|
-
if (import_typebox.KindGuard.IsSchema(schema) || schema instanceof import_compiler.TypeCheck) {
|
|
314
|
-
return schema;
|
|
315
|
-
}
|
|
316
313
|
if (typeof schema === "string" || typeof schema === "number" || typeof schema === "boolean") {
|
|
317
314
|
return import_typebox.Type.Literal(schema);
|
|
318
315
|
}
|
|
316
|
+
if (import_typebox.KindGuard.IsSchema(schema) || schema instanceof import_compiler.TypeCheck) {
|
|
317
|
+
return schema;
|
|
318
|
+
}
|
|
319
319
|
const newSchema = {};
|
|
320
320
|
Object.getOwnPropertyNames(schema).forEach((key) => {
|
|
321
321
|
if (import_typebox.KindGuard.IsSchema(schema[key])) {
|
|
@@ -586,7 +586,7 @@ var KafkaWorkerOptionsSchema = {
|
|
|
586
586
|
peekCount: number
|
|
587
587
|
};
|
|
588
588
|
|
|
589
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
589
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/zod/index.mjs
|
|
590
590
|
var import_v3 = require("zod/v3");
|
|
591
591
|
|
|
592
592
|
// ../../../node_modules/.pnpm/ts-deepmerge@7.0.3/node_modules/ts-deepmerge/esm/index.js
|
|
@@ -636,7 +636,7 @@ merge.withOptions = (options, ...objects) => {
|
|
|
636
636
|
return result;
|
|
637
637
|
};
|
|
638
638
|
|
|
639
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
639
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/zod/index.mjs
|
|
640
640
|
var import_v32 = require("zod/v3");
|
|
641
641
|
function extendApi(schema, schemaObject = {}) {
|
|
642
642
|
const This = schema.constructor;
|
|
@@ -19,7 +19,7 @@ var __reExport = (target, mod, secondTarget) => (__copyProps(target, mod, "defau
|
|
|
19
19
|
// domain/schemas/kafka.schema.ts
|
|
20
20
|
import { serviceSchemaResolver } from "@forklaunch/internal";
|
|
21
21
|
|
|
22
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
22
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/typebox/index.mjs
|
|
23
23
|
var typebox_exports = {};
|
|
24
24
|
__export(typebox_exports, {
|
|
25
25
|
SchemaValidator: () => SchemaValidator,
|
|
@@ -60,7 +60,7 @@ __export(typebox_exports, {
|
|
|
60
60
|
__reExport(typebox_exports, typebox_star);
|
|
61
61
|
import * as typebox_star from "@sinclair/typebox";
|
|
62
62
|
|
|
63
|
-
// ../../../node_modules/.pnpm/@forklaunch+common@0.4.
|
|
63
|
+
// ../../../node_modules/.pnpm/@forklaunch+common@0.4.6/node_modules/@forklaunch/common/lib/index.mjs
|
|
64
64
|
var InMemoryBlob = class extends Blob {
|
|
65
65
|
constructor(content) {
|
|
66
66
|
super([Buffer.from(content)]);
|
|
@@ -68,7 +68,7 @@ var InMemoryBlob = class extends Blob {
|
|
|
68
68
|
}
|
|
69
69
|
};
|
|
70
70
|
|
|
71
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
71
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/typebox/index.mjs
|
|
72
72
|
import {
|
|
73
73
|
FormatRegistry,
|
|
74
74
|
Kind,
|
|
@@ -311,12 +311,12 @@ var TypeboxSchemaValidator = class {
|
|
|
311
311
|
* @returns {TResolve<T>} The resolved schema.
|
|
312
312
|
*/
|
|
313
313
|
schemify(schema) {
|
|
314
|
-
if (KindGuard.IsSchema(schema) || schema instanceof TypeCheck) {
|
|
315
|
-
return schema;
|
|
316
|
-
}
|
|
317
314
|
if (typeof schema === "string" || typeof schema === "number" || typeof schema === "boolean") {
|
|
318
315
|
return Type.Literal(schema);
|
|
319
316
|
}
|
|
317
|
+
if (KindGuard.IsSchema(schema) || schema instanceof TypeCheck) {
|
|
318
|
+
return schema;
|
|
319
|
+
}
|
|
320
320
|
const newSchema = {};
|
|
321
321
|
Object.getOwnPropertyNames(schema).forEach((key) => {
|
|
322
322
|
if (KindGuard.IsSchema(schema[key])) {
|
|
@@ -587,7 +587,7 @@ var KafkaWorkerOptionsSchema = {
|
|
|
587
587
|
peekCount: number
|
|
588
588
|
};
|
|
589
589
|
|
|
590
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
590
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/zod/index.mjs
|
|
591
591
|
import {
|
|
592
592
|
z as z2,
|
|
593
593
|
ZodType
|
|
@@ -640,7 +640,7 @@ merge.withOptions = (options, ...objects) => {
|
|
|
640
640
|
return result;
|
|
641
641
|
};
|
|
642
642
|
|
|
643
|
-
// ../../../node_modules/.pnpm/@forklaunch+validator@0.
|
|
643
|
+
// ../../../node_modules/.pnpm/@forklaunch+validator@0.8.0/node_modules/@forklaunch/validator/lib/src/zod/index.mjs
|
|
644
644
|
import { z } from "zod/v3";
|
|
645
645
|
function extendApi(schema, schemaObject = {}) {
|
|
646
646
|
const This = schema.constructor;
|
|
@@ -16,13 +16,21 @@ export class KafkaWorkerConsumer<
|
|
|
16
16
|
private producer: Producer;
|
|
17
17
|
private consumer: Consumer;
|
|
18
18
|
private processedMessages: Set<string> = new Set();
|
|
19
|
+
protected readonly queueName: string;
|
|
20
|
+
protected readonly options: Options;
|
|
21
|
+
protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
|
|
22
|
+
protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
|
|
19
23
|
|
|
20
24
|
constructor(
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
+
queueName: string,
|
|
26
|
+
options: Options,
|
|
27
|
+
processEventsFunction: WorkerProcessFunction<EventEntity>,
|
|
28
|
+
failureHandler: WorkerFailureHandler<EventEntity>
|
|
25
29
|
) {
|
|
30
|
+
this.queueName = queueName;
|
|
31
|
+
this.options = options;
|
|
32
|
+
this.processEventsFunction = processEventsFunction;
|
|
33
|
+
this.failureHandler = failureHandler;
|
|
26
34
|
this.kafka = new Kafka({
|
|
27
35
|
clientId: this.options.clientId,
|
|
28
36
|
brokers: this.options.brokers
|
|
@@ -6,11 +6,12 @@ export class KafkaWorkerProducer<
|
|
|
6
6
|
Options extends KafkaWorkerOptions
|
|
7
7
|
> {
|
|
8
8
|
private producer;
|
|
9
|
+
private readonly queueName: string;
|
|
10
|
+
private readonly options: Options;
|
|
9
11
|
|
|
10
|
-
constructor(
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
) {
|
|
12
|
+
constructor(queueName: string, options: Options) {
|
|
13
|
+
this.queueName = queueName;
|
|
14
|
+
this.options = options;
|
|
14
15
|
const kafka = new Kafka({
|
|
15
16
|
clientId: this.options.clientId,
|
|
16
17
|
brokers: this.options.brokers
|
|
@@ -2,9 +2,9 @@ import { WorkerEventEntity } from '@forklaunch/interfaces-worker/types';
|
|
|
2
2
|
import { KafkaWorkerOptions } from '../domain/types/index.mjs';
|
|
3
3
|
|
|
4
4
|
declare class KafkaWorkerProducer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> {
|
|
5
|
+
private producer;
|
|
5
6
|
private readonly queueName;
|
|
6
7
|
private readonly options;
|
|
7
|
-
private producer;
|
|
8
8
|
constructor(queueName: string, options: Options);
|
|
9
9
|
enqueueJob(event: EventEntity): Promise<void>;
|
|
10
10
|
enqueueBatchJobs(events: EventEntity[]): Promise<void>;
|
package/lib/producers/index.d.ts
CHANGED
|
@@ -2,9 +2,9 @@ import { WorkerEventEntity } from '@forklaunch/interfaces-worker/types';
|
|
|
2
2
|
import { KafkaWorkerOptions } from '../domain/types/index.js';
|
|
3
3
|
|
|
4
4
|
declare class KafkaWorkerProducer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> {
|
|
5
|
+
private producer;
|
|
5
6
|
private readonly queueName;
|
|
6
7
|
private readonly options;
|
|
7
|
-
private producer;
|
|
8
8
|
constructor(queueName: string, options: Options);
|
|
9
9
|
enqueueJob(event: EventEntity): Promise<void>;
|
|
10
10
|
enqueueBatchJobs(events: EventEntity[]): Promise<void>;
|
package/lib/producers/index.js
CHANGED
|
@@ -27,6 +27,9 @@ module.exports = __toCommonJS(producers_exports);
|
|
|
27
27
|
// producers/kafkaWorker.producer.ts
|
|
28
28
|
var import_kafkajs = require("kafkajs");
|
|
29
29
|
var KafkaWorkerProducer = class {
|
|
30
|
+
producer;
|
|
31
|
+
queueName;
|
|
32
|
+
options;
|
|
30
33
|
constructor(queueName, options) {
|
|
31
34
|
this.queueName = queueName;
|
|
32
35
|
this.options = options;
|
|
@@ -37,7 +40,6 @@ var KafkaWorkerProducer = class {
|
|
|
37
40
|
this.producer = kafka.producer();
|
|
38
41
|
this.producer.connect();
|
|
39
42
|
}
|
|
40
|
-
producer;
|
|
41
43
|
async enqueueJob(event) {
|
|
42
44
|
await this.producer.send({
|
|
43
45
|
topic: this.queueName,
|
package/lib/producers/index.mjs
CHANGED
|
@@ -1,6 +1,9 @@
|
|
|
1
1
|
// producers/kafkaWorker.producer.ts
|
|
2
2
|
import { Kafka } from "kafkajs";
|
|
3
3
|
var KafkaWorkerProducer = class {
|
|
4
|
+
producer;
|
|
5
|
+
queueName;
|
|
6
|
+
options;
|
|
4
7
|
constructor(queueName, options) {
|
|
5
8
|
this.queueName = queueName;
|
|
6
9
|
this.options = options;
|
|
@@ -11,7 +14,6 @@ var KafkaWorkerProducer = class {
|
|
|
11
14
|
this.producer = kafka.producer();
|
|
12
15
|
this.producer.connect();
|
|
13
16
|
}
|
|
14
|
-
producer;
|
|
15
17
|
async enqueueJob(event) {
|
|
16
18
|
await this.producer.send({
|
|
17
19
|
topic: this.queueName,
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@forklaunch/implementation-worker-kafka",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.5.1",
|
|
4
4
|
"description": "Kafka implementation for forklaunch workers",
|
|
5
5
|
"homepage": "https://github.com/forklaunch/forklaunch-js#readme",
|
|
6
6
|
"bugs": {
|
|
@@ -42,20 +42,20 @@
|
|
|
42
42
|
"lib/**"
|
|
43
43
|
],
|
|
44
44
|
"dependencies": {
|
|
45
|
-
"@forklaunch/core": "^0.
|
|
46
|
-
"@forklaunch/internal": "^0.1.
|
|
47
|
-
"@sinclair/typebox": "^0.34.
|
|
45
|
+
"@forklaunch/core": "^0.12.0",
|
|
46
|
+
"@forklaunch/internal": "^0.1.9",
|
|
47
|
+
"@sinclair/typebox": "^0.34.38",
|
|
48
48
|
"kafkajs": "^2.2.4",
|
|
49
|
-
"zod": "^4.0.
|
|
50
|
-
"@forklaunch/interfaces-worker": "0.
|
|
49
|
+
"zod": "^4.0.14",
|
|
50
|
+
"@forklaunch/interfaces-worker": "0.4.0"
|
|
51
51
|
},
|
|
52
52
|
"devDependencies": {
|
|
53
|
-
"@typescript/native-preview": "7.0.0-dev.
|
|
53
|
+
"@typescript/native-preview": "7.0.0-dev.20250803.1",
|
|
54
54
|
"depcheck": "^1.4.7",
|
|
55
|
-
"eslint": "^9.
|
|
55
|
+
"eslint": "^9.32.0",
|
|
56
56
|
"prettier": "^3.6.2",
|
|
57
|
-
"typedoc": "^0.28.
|
|
58
|
-
"typescript-eslint": "^8.
|
|
57
|
+
"typedoc": "^0.28.9",
|
|
58
|
+
"typescript-eslint": "^8.38.0"
|
|
59
59
|
},
|
|
60
60
|
"scripts": {
|
|
61
61
|
"build": "tsc --noEmit && tsup producers/index.ts consumers/index.ts domain/schemas/index.ts domain/types/index.ts --format cjs,esm --no-splitting --dts --tsconfig tsconfig.json --out-dir lib --clean && if [ -f eject-package.bash ]; then pnpm package:eject; fi",
|