@forklaunch/implementation-worker-kafka 0.3.3 → 0.3.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/consumers/index.d.mts +15 -28
- package/lib/consumers/index.d.ts +15 -28
- package/lib/consumers/index.js +11 -14
- package/lib/consumers/index.mjs +7 -3
- package/lib/domain/schemas/index.d.mts +8 -52
- package/lib/domain/schemas/index.d.ts +8 -52
- package/lib/domain/schemas/index.js +489 -860
- package/lib/domain/schemas/index.mjs +497 -837
- package/lib/domain/types/index.d.mts +6 -6
- package/lib/domain/types/index.d.ts +6 -6
- package/lib/domain/types/index.js +4 -8
- package/lib/eject/domain/schemas/kafkaWorker.schema.ts +1 -1
- package/lib/producers/index.d.mts +7 -10
- package/lib/producers/index.d.ts +7 -10
- package/lib/producers/index.js +8 -13
- package/lib/producers/index.mjs +4 -2
- package/package.json +1 -1
|
@@ -1,34 +1,21 @@
|
|
|
1
1
|
import { WorkerConsumer } from '@forklaunch/interfaces-worker/interfaces';
|
|
2
|
-
import {
|
|
3
|
-
WorkerEventEntity,
|
|
4
|
-
WorkerProcessFunction,
|
|
5
|
-
WorkerFailureHandler
|
|
6
|
-
} from '@forklaunch/interfaces-worker/types';
|
|
2
|
+
import { WorkerEventEntity, WorkerProcessFunction, WorkerFailureHandler } from '@forklaunch/interfaces-worker/types';
|
|
7
3
|
import { KafkaWorkerOptions } from '../domain/types/index.mjs';
|
|
8
4
|
|
|
9
|
-
declare class KafkaWorkerConsumer<
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
queueName: string,
|
|
24
|
-
options: Options,
|
|
25
|
-
processEventsFunction: WorkerProcessFunction<EventEntity>,
|
|
26
|
-
failureHandler: WorkerFailureHandler<EventEntity>
|
|
27
|
-
);
|
|
28
|
-
private setupConsumer;
|
|
29
|
-
peekEvents(): Promise<EventEntity[]>;
|
|
30
|
-
start(): Promise<void>;
|
|
31
|
-
close(): Promise<void>;
|
|
5
|
+
declare class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> implements WorkerConsumer<EventEntity> {
|
|
6
|
+
protected readonly queueName: string;
|
|
7
|
+
protected readonly options: Options;
|
|
8
|
+
protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
|
|
9
|
+
protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
|
|
10
|
+
private kafka;
|
|
11
|
+
private producer;
|
|
12
|
+
private consumer;
|
|
13
|
+
private processedMessages;
|
|
14
|
+
constructor(queueName: string, options: Options, processEventsFunction: WorkerProcessFunction<EventEntity>, failureHandler: WorkerFailureHandler<EventEntity>);
|
|
15
|
+
private setupConsumer;
|
|
16
|
+
peekEvents(): Promise<EventEntity[]>;
|
|
17
|
+
start(): Promise<void>;
|
|
18
|
+
close(): Promise<void>;
|
|
32
19
|
}
|
|
33
20
|
|
|
34
21
|
export { KafkaWorkerConsumer };
|
package/lib/consumers/index.d.ts
CHANGED
|
@@ -1,34 +1,21 @@
|
|
|
1
1
|
import { WorkerConsumer } from '@forklaunch/interfaces-worker/interfaces';
|
|
2
|
-
import {
|
|
3
|
-
WorkerEventEntity,
|
|
4
|
-
WorkerProcessFunction,
|
|
5
|
-
WorkerFailureHandler
|
|
6
|
-
} from '@forklaunch/interfaces-worker/types';
|
|
2
|
+
import { WorkerEventEntity, WorkerProcessFunction, WorkerFailureHandler } from '@forklaunch/interfaces-worker/types';
|
|
7
3
|
import { KafkaWorkerOptions } from '../domain/types/index.js';
|
|
8
4
|
|
|
9
|
-
declare class KafkaWorkerConsumer<
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
queueName: string,
|
|
24
|
-
options: Options,
|
|
25
|
-
processEventsFunction: WorkerProcessFunction<EventEntity>,
|
|
26
|
-
failureHandler: WorkerFailureHandler<EventEntity>
|
|
27
|
-
);
|
|
28
|
-
private setupConsumer;
|
|
29
|
-
peekEvents(): Promise<EventEntity[]>;
|
|
30
|
-
start(): Promise<void>;
|
|
31
|
-
close(): Promise<void>;
|
|
5
|
+
declare class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> implements WorkerConsumer<EventEntity> {
|
|
6
|
+
protected readonly queueName: string;
|
|
7
|
+
protected readonly options: Options;
|
|
8
|
+
protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
|
|
9
|
+
protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
|
|
10
|
+
private kafka;
|
|
11
|
+
private producer;
|
|
12
|
+
private consumer;
|
|
13
|
+
private processedMessages;
|
|
14
|
+
constructor(queueName: string, options: Options, processEventsFunction: WorkerProcessFunction<EventEntity>, failureHandler: WorkerFailureHandler<EventEntity>);
|
|
15
|
+
private setupConsumer;
|
|
16
|
+
peekEvents(): Promise<EventEntity[]>;
|
|
17
|
+
start(): Promise<void>;
|
|
18
|
+
close(): Promise<void>;
|
|
32
19
|
}
|
|
33
20
|
|
|
34
21
|
export { KafkaWorkerConsumer };
|
package/lib/consumers/index.js
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
|
|
1
|
+
"use strict";
|
|
2
2
|
var __defProp = Object.defineProperty;
|
|
3
3
|
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
4
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
@@ -8,18 +8,14 @@ var __export = (target, all) => {
|
|
|
8
8
|
__defProp(target, name, { get: all[name], enumerable: true });
|
|
9
9
|
};
|
|
10
10
|
var __copyProps = (to, from, except, desc) => {
|
|
11
|
-
if (
|
|
11
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
12
12
|
for (let key of __getOwnPropNames(from))
|
|
13
13
|
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
14
|
-
__defProp(to, key, {
|
|
15
|
-
get: () => from[key],
|
|
16
|
-
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
17
|
-
});
|
|
14
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
18
15
|
}
|
|
19
16
|
return to;
|
|
20
17
|
};
|
|
21
|
-
var __toCommonJS = (mod) =>
|
|
22
|
-
__copyProps(__defProp({}, '__esModule', { value: true }), mod);
|
|
18
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
23
19
|
|
|
24
20
|
// consumers/index.ts
|
|
25
21
|
var consumers_exports = {};
|
|
@@ -29,7 +25,7 @@ __export(consumers_exports, {
|
|
|
29
25
|
module.exports = __toCommonJS(consumers_exports);
|
|
30
26
|
|
|
31
27
|
// consumers/kafkaWorker.consumer.ts
|
|
32
|
-
var import_kafkajs = require(
|
|
28
|
+
var import_kafkajs = require("kafkajs");
|
|
33
29
|
var KafkaWorkerConsumer = class {
|
|
34
30
|
constructor(queueName, options, processEventsFunction, failureHandler) {
|
|
35
31
|
this.queueName = queueName;
|
|
@@ -135,7 +131,9 @@ var KafkaWorkerConsumer = class {
|
|
|
135
131
|
peekConsumer.run({
|
|
136
132
|
eachMessage: async ({ message }) => {
|
|
137
133
|
if (message.value && events.length < this.options.peekCount) {
|
|
138
|
-
const messageEvents = JSON.parse(
|
|
134
|
+
const messageEvents = JSON.parse(
|
|
135
|
+
message.value.toString()
|
|
136
|
+
);
|
|
139
137
|
events.push(...messageEvents);
|
|
140
138
|
if (events.length >= this.options.peekCount) {
|
|
141
139
|
resolve();
|
|
@@ -170,7 +168,6 @@ var KafkaWorkerConsumer = class {
|
|
|
170
168
|
}
|
|
171
169
|
};
|
|
172
170
|
// Annotate the CommonJS export names for ESM import in node:
|
|
173
|
-
0 &&
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
});
|
|
171
|
+
0 && (module.exports = {
|
|
172
|
+
KafkaWorkerConsumer
|
|
173
|
+
});
|
package/lib/consumers/index.mjs
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
// consumers/kafkaWorker.consumer.ts
|
|
2
|
-
import { Kafka } from
|
|
2
|
+
import { Kafka } from "kafkajs";
|
|
3
3
|
var KafkaWorkerConsumer = class {
|
|
4
4
|
constructor(queueName, options, processEventsFunction, failureHandler) {
|
|
5
5
|
this.queueName = queueName;
|
|
@@ -105,7 +105,9 @@ var KafkaWorkerConsumer = class {
|
|
|
105
105
|
peekConsumer.run({
|
|
106
106
|
eachMessage: async ({ message }) => {
|
|
107
107
|
if (message.value && events.length < this.options.peekCount) {
|
|
108
|
-
const messageEvents = JSON.parse(
|
|
108
|
+
const messageEvents = JSON.parse(
|
|
109
|
+
message.value.toString()
|
|
110
|
+
);
|
|
109
111
|
events.push(...messageEvents);
|
|
110
112
|
if (events.length >= this.options.peekCount) {
|
|
111
113
|
resolve();
|
|
@@ -139,4 +141,6 @@ var KafkaWorkerConsumer = class {
|
|
|
139
141
|
await this.consumer.disconnect();
|
|
140
142
|
}
|
|
141
143
|
};
|
|
142
|
-
export {
|
|
144
|
+
export {
|
|
145
|
+
KafkaWorkerConsumer
|
|
146
|
+
};
|
|
@@ -3,66 +3,22 @@ import * as zod from 'zod';
|
|
|
3
3
|
import * as _sinclair_typebox from '@sinclair/typebox';
|
|
4
4
|
import * as _forklaunch_validator from '@forklaunch/validator';
|
|
5
5
|
|
|
6
|
-
declare const KafkaWorkerSchemas: <
|
|
7
|
-
SchemaValidator extends _forklaunch_validator.AnySchemaValidator
|
|
8
|
-
>(
|
|
9
|
-
options: Record<string, unknown> & {
|
|
6
|
+
declare const KafkaWorkerSchemas: <SchemaValidator extends _forklaunch_validator.AnySchemaValidator>(options: Record<string, unknown> & {
|
|
10
7
|
validator: SchemaValidator;
|
|
11
|
-
|
|
12
|
-
) => _forklaunch_internal.SchemasByValidator<
|
|
13
|
-
SchemaValidator,
|
|
14
|
-
(options: Record<string, unknown>) => {
|
|
8
|
+
}) => _forklaunch_internal.SchemasByValidator<SchemaValidator, (options: Record<string, unknown>) => {
|
|
15
9
|
brokers: _sinclair_typebox.TArray<_sinclair_typebox.TString>;
|
|
16
10
|
clientId: _sinclair_typebox.TString;
|
|
17
11
|
groupId: _sinclair_typebox.TString;
|
|
18
|
-
retries: _sinclair_typebox.TTransform<
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
_sinclair_typebox.TBoolean,
|
|
24
|
-
_sinclair_typebox.TNull,
|
|
25
|
-
_sinclair_typebox.TBigInt,
|
|
26
|
-
_sinclair_typebox.TDate
|
|
27
|
-
]
|
|
28
|
-
>,
|
|
29
|
-
number
|
|
30
|
-
>;
|
|
31
|
-
interval: _sinclair_typebox.TTransform<
|
|
32
|
-
_sinclair_typebox.TUnion<
|
|
33
|
-
[
|
|
34
|
-
_sinclair_typebox.TNumber,
|
|
35
|
-
_sinclair_typebox.TString,
|
|
36
|
-
_sinclair_typebox.TBoolean,
|
|
37
|
-
_sinclair_typebox.TNull,
|
|
38
|
-
_sinclair_typebox.TBigInt,
|
|
39
|
-
_sinclair_typebox.TDate
|
|
40
|
-
]
|
|
41
|
-
>,
|
|
42
|
-
number
|
|
43
|
-
>;
|
|
44
|
-
peekCount: _sinclair_typebox.TTransform<
|
|
45
|
-
_sinclair_typebox.TUnion<
|
|
46
|
-
[
|
|
47
|
-
_sinclair_typebox.TNumber,
|
|
48
|
-
_sinclair_typebox.TString,
|
|
49
|
-
_sinclair_typebox.TBoolean,
|
|
50
|
-
_sinclair_typebox.TNull,
|
|
51
|
-
_sinclair_typebox.TBigInt,
|
|
52
|
-
_sinclair_typebox.TDate
|
|
53
|
-
]
|
|
54
|
-
>,
|
|
55
|
-
number
|
|
56
|
-
>;
|
|
57
|
-
},
|
|
58
|
-
(options: Record<string, unknown>) => {
|
|
59
|
-
brokers: zod.ZodArray<zod.ZodString, 'many'>;
|
|
12
|
+
retries: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
|
|
13
|
+
interval: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
|
|
14
|
+
peekCount: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
|
|
15
|
+
}, (options: Record<string, unknown>) => {
|
|
16
|
+
brokers: zod.ZodArray<zod.ZodString, "many">;
|
|
60
17
|
clientId: zod.ZodString;
|
|
61
18
|
groupId: zod.ZodString;
|
|
62
19
|
retries: zod.ZodEffects<zod.ZodNumber, number, unknown>;
|
|
63
20
|
interval: zod.ZodEffects<zod.ZodNumber, number, unknown>;
|
|
64
21
|
peekCount: zod.ZodEffects<zod.ZodNumber, number, unknown>;
|
|
65
|
-
|
|
66
|
-
>;
|
|
22
|
+
}>;
|
|
67
23
|
|
|
68
24
|
export { KafkaWorkerSchemas };
|
|
@@ -3,66 +3,22 @@ import * as zod from 'zod';
|
|
|
3
3
|
import * as _sinclair_typebox from '@sinclair/typebox';
|
|
4
4
|
import * as _forklaunch_validator from '@forklaunch/validator';
|
|
5
5
|
|
|
6
|
-
declare const KafkaWorkerSchemas: <
|
|
7
|
-
SchemaValidator extends _forklaunch_validator.AnySchemaValidator
|
|
8
|
-
>(
|
|
9
|
-
options: Record<string, unknown> & {
|
|
6
|
+
declare const KafkaWorkerSchemas: <SchemaValidator extends _forklaunch_validator.AnySchemaValidator>(options: Record<string, unknown> & {
|
|
10
7
|
validator: SchemaValidator;
|
|
11
|
-
|
|
12
|
-
) => _forklaunch_internal.SchemasByValidator<
|
|
13
|
-
SchemaValidator,
|
|
14
|
-
(options: Record<string, unknown>) => {
|
|
8
|
+
}) => _forklaunch_internal.SchemasByValidator<SchemaValidator, (options: Record<string, unknown>) => {
|
|
15
9
|
brokers: _sinclair_typebox.TArray<_sinclair_typebox.TString>;
|
|
16
10
|
clientId: _sinclair_typebox.TString;
|
|
17
11
|
groupId: _sinclair_typebox.TString;
|
|
18
|
-
retries: _sinclair_typebox.TTransform<
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
_sinclair_typebox.TBoolean,
|
|
24
|
-
_sinclair_typebox.TNull,
|
|
25
|
-
_sinclair_typebox.TBigInt,
|
|
26
|
-
_sinclair_typebox.TDate
|
|
27
|
-
]
|
|
28
|
-
>,
|
|
29
|
-
number
|
|
30
|
-
>;
|
|
31
|
-
interval: _sinclair_typebox.TTransform<
|
|
32
|
-
_sinclair_typebox.TUnion<
|
|
33
|
-
[
|
|
34
|
-
_sinclair_typebox.TNumber,
|
|
35
|
-
_sinclair_typebox.TString,
|
|
36
|
-
_sinclair_typebox.TBoolean,
|
|
37
|
-
_sinclair_typebox.TNull,
|
|
38
|
-
_sinclair_typebox.TBigInt,
|
|
39
|
-
_sinclair_typebox.TDate
|
|
40
|
-
]
|
|
41
|
-
>,
|
|
42
|
-
number
|
|
43
|
-
>;
|
|
44
|
-
peekCount: _sinclair_typebox.TTransform<
|
|
45
|
-
_sinclair_typebox.TUnion<
|
|
46
|
-
[
|
|
47
|
-
_sinclair_typebox.TNumber,
|
|
48
|
-
_sinclair_typebox.TString,
|
|
49
|
-
_sinclair_typebox.TBoolean,
|
|
50
|
-
_sinclair_typebox.TNull,
|
|
51
|
-
_sinclair_typebox.TBigInt,
|
|
52
|
-
_sinclair_typebox.TDate
|
|
53
|
-
]
|
|
54
|
-
>,
|
|
55
|
-
number
|
|
56
|
-
>;
|
|
57
|
-
},
|
|
58
|
-
(options: Record<string, unknown>) => {
|
|
59
|
-
brokers: zod.ZodArray<zod.ZodString, 'many'>;
|
|
12
|
+
retries: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
|
|
13
|
+
interval: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
|
|
14
|
+
peekCount: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
|
|
15
|
+
}, (options: Record<string, unknown>) => {
|
|
16
|
+
brokers: zod.ZodArray<zod.ZodString, "many">;
|
|
60
17
|
clientId: zod.ZodString;
|
|
61
18
|
groupId: zod.ZodString;
|
|
62
19
|
retries: zod.ZodEffects<zod.ZodNumber, number, unknown>;
|
|
63
20
|
interval: zod.ZodEffects<zod.ZodNumber, number, unknown>;
|
|
64
21
|
peekCount: zod.ZodEffects<zod.ZodNumber, number, unknown>;
|
|
65
|
-
|
|
66
|
-
>;
|
|
22
|
+
}>;
|
|
67
23
|
|
|
68
24
|
export { KafkaWorkerSchemas };
|