@forklaunch/implementation-worker-kafka 0.1.0 → 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,2 +1,2 @@
1
1
  export {};
2
- //# sourceMappingURL=schemaEquality.test.d.ts.map
2
+ //# sourceMappingURL=schemaEquality.test.d.ts.map
@@ -3,14 +3,22 @@ import { testSchemaEquality } from '@forklaunch/core/test';
3
3
  import { KafkaWorkerOptionsSchema as TypeboxKafkaWorkerOptionsSchema } from '../schemas/typebox/kafkaWorker.schema';
4
4
  import { KafkaWorkerOptionsSchema as ZodKafkaWorkerOptionsSchema } from '../schemas/zod/kafkaWorker.schema';
5
5
  describe('schema equality', () => {
6
- it('should be equal for bullmq worker', () => {
7
- expect(isTrue(testSchemaEquality(ZodKafkaWorkerOptionsSchema, TypeboxKafkaWorkerOptionsSchema, {
6
+ it('should be equal for bullmq worker', () => {
7
+ expect(
8
+ isTrue(
9
+ testSchemaEquality(
10
+ ZodKafkaWorkerOptionsSchema,
11
+ TypeboxKafkaWorkerOptionsSchema,
12
+ {
8
13
  brokers: ['localhost:9092'],
9
14
  clientId: 'test',
10
15
  groupId: 'test',
11
16
  retries: 1,
12
17
  interval: 1000,
13
18
  peekCount: 1
14
- }))).toBeTruthy();
15
- });
19
+ }
20
+ )
21
+ )
22
+ ).toBeTruthy();
23
+ });
16
24
  });
@@ -1,2 +1,2 @@
1
1
  export * from './kafkaWorker.consumer';
2
- //# sourceMappingURL=index.d.ts.map
2
+ //# sourceMappingURL=index.d.ts.map
@@ -1,19 +1,32 @@
1
1
  import { WorkerConsumer } from '@forklaunch/interfaces-worker/interfaces';
2
- import { WorkerEventEntity, WorkerFailureHandler, WorkerProcessFunction } from '@forklaunch/interfaces-worker/types';
2
+ import {
3
+ WorkerEventEntity,
4
+ WorkerFailureHandler,
5
+ WorkerProcessFunction
6
+ } from '@forklaunch/interfaces-worker/types';
3
7
  import { KafkaWorkerOptions } from '../types/kafkaWorker.types';
4
- export declare class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity> implements WorkerConsumer<EventEntity> {
5
- protected readonly queueName: string;
6
- protected readonly options: KafkaWorkerOptions;
7
- protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
8
- protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
9
- private kafka;
10
- private producer;
11
- private consumer;
12
- private processedMessages;
13
- constructor(queueName: string, options: KafkaWorkerOptions, processEventsFunction: WorkerProcessFunction<EventEntity>, failureHandler: WorkerFailureHandler<EventEntity>);
14
- private setupConsumer;
15
- peekEvents(): Promise<EventEntity[]>;
16
- start(): Promise<void>;
17
- close(): Promise<void>;
8
+ export declare class KafkaWorkerConsumer<
9
+ EventEntity extends WorkerEventEntity,
10
+ Options extends KafkaWorkerOptions
11
+ > implements WorkerConsumer<EventEntity>
12
+ {
13
+ protected readonly queueName: string;
14
+ protected readonly options: Options;
15
+ protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
16
+ protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
17
+ private kafka;
18
+ private producer;
19
+ private consumer;
20
+ private processedMessages;
21
+ constructor(
22
+ queueName: string,
23
+ options: Options,
24
+ processEventsFunction: WorkerProcessFunction<EventEntity>,
25
+ failureHandler: WorkerFailureHandler<EventEntity>
26
+ );
27
+ private setupConsumer;
28
+ peekEvents(): Promise<EventEntity[]>;
29
+ start(): Promise<void>;
30
+ close(): Promise<void>;
18
31
  }
19
- //# sourceMappingURL=kafkaWorker.consumer.d.ts.map
32
+ //# sourceMappingURL=kafkaWorker.consumer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"kafkaWorker.consumer.d.ts","sourceRoot":"","sources":["../../consumers/kafkaWorker.consumer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,0CAA0C,CAAC;AAC1E,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,qBAAqB,EACtB,MAAM,qCAAqC,CAAC;AAE7C,OAAO,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAEhE,qBAAa,mBAAmB,CAAC,WAAW,SAAS,iBAAiB,CACpE,YAAW,cAAc,CAAC,WAAW,CAAC;IAQpC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,MAAM;IACpC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,kBAAkB;IAC9C,SAAS,CAAC,QAAQ,CAAC,qBAAqB,EAAE,qBAAqB,CAAC,WAAW,CAAC;IAC5E,SAAS,CAAC,QAAQ,CAAC,cAAc,EAAE,oBAAoB,CAAC,WAAW,CAAC;IATtE,OAAO,CAAC,KAAK,CAAQ;IACrB,OAAO,CAAC,QAAQ,CAAW;IAC3B,OAAO,CAAC,QAAQ,CAAW;IAC3B,OAAO,CAAC,iBAAiB,CAA0B;gBAG9B,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,kBAAkB,EAC3B,qBAAqB,EAAE,qBAAqB,CAAC,WAAW,CAAC,EACzD,cAAc,EAAE,oBAAoB,CAAC,WAAW,CAAC;YAaxD,aAAa;IA4DrB,UAAU,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;IA4EpC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAKtB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAI7B"}
1
+ {"version":3,"file":"kafkaWorker.consumer.d.ts","sourceRoot":"","sources":["../../consumers/kafkaWorker.consumer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,0CAA0C,CAAC;AAC1E,OAAO,EACL,iBAAiB,EACjB,oBAAoB,EACpB,qBAAqB,EACtB,MAAM,qCAAqC,CAAC;AAE7C,OAAO,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAEhE,qBAAa,mBAAmB,CAC9B,WAAW,SAAS,iBAAiB,EACrC,OAAO,SAAS,kBAAkB,CAClC,YAAW,cAAc,CAAC,WAAW,CAAC;IAQpC,SAAS,CAAC,QAAQ,CAAC,SAAS,EAAE,MAAM;IACpC,SAAS,CAAC,QAAQ,CAAC,OAAO,EAAE,OAAO;IACnC,SAAS,CAAC,QAAQ,CAAC,qBAAqB,EAAE,qBAAqB,CAAC,WAAW,CAAC;IAC5E,SAAS,CAAC,QAAQ,CAAC,cAAc,EAAE,oBAAoB,CAAC,WAAW,CAAC;IATtE,OAAO,CAAC,KAAK,CAAQ;IACrB,OAAO,CAAC,QAAQ,CAAW;IAC3B,OAAO,CAAC,QAAQ,CAAW;IAC3B,OAAO,CAAC,iBAAiB,CAA0B;gBAG9B,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,OAAO,EAChB,qBAAqB,EAAE,qBAAqB,CAAC,WAAW,CAAC,EACzD,cAAc,EAAE,oBAAoB,CAAC,WAAW,CAAC;YAaxD,aAAa;IA4DrB,UAAU,IAAI,OAAO,CAAC,WAAW,EAAE,CAAC;IA4EpC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAKtB,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;CAI7B"}
@@ -1,149 +1,147 @@
1
1
  import { Kafka } from 'kafkajs';
2
2
  export class KafkaWorkerConsumer {
3
- queueName;
4
- options;
5
- processEventsFunction;
6
- failureHandler;
7
- kafka;
8
- producer;
9
- consumer;
10
- processedMessages = new Set();
11
- constructor(queueName, options, processEventsFunction, failureHandler) {
12
- this.queueName = queueName;
13
- this.options = options;
14
- this.processEventsFunction = processEventsFunction;
15
- this.failureHandler = failureHandler;
16
- this.kafka = new Kafka({
17
- clientId: this.options.clientId,
18
- brokers: this.options.brokers
19
- });
20
- this.producer = this.kafka.producer();
21
- this.consumer = this.kafka.consumer({
22
- groupId: this.options.groupId
3
+ queueName;
4
+ options;
5
+ processEventsFunction;
6
+ failureHandler;
7
+ kafka;
8
+ producer;
9
+ consumer;
10
+ processedMessages = new Set();
11
+ constructor(queueName, options, processEventsFunction, failureHandler) {
12
+ this.queueName = queueName;
13
+ this.options = options;
14
+ this.processEventsFunction = processEventsFunction;
15
+ this.failureHandler = failureHandler;
16
+ this.kafka = new Kafka({
17
+ clientId: this.options.clientId,
18
+ brokers: this.options.brokers
19
+ });
20
+ this.producer = this.kafka.producer();
21
+ this.consumer = this.kafka.consumer({
22
+ groupId: this.options.groupId
23
+ });
24
+ }
25
+ async setupConsumer() {
26
+ await this.consumer.connect();
27
+ await this.consumer.subscribe({
28
+ topic: this.queueName,
29
+ fromBeginning: false
30
+ });
31
+ await this.consumer.run({
32
+ eachMessage: async ({ topic, partition, message }) => {
33
+ if (!message.value) return;
34
+ const messageKey = `${topic}-${partition}-${message.offset}`;
35
+ if (this.processedMessages.has(messageKey)) {
36
+ return;
37
+ }
38
+ const events = JSON.parse(message.value.toString());
39
+ try {
40
+ await this.processEventsFunction(events);
41
+ this.processedMessages.add(messageKey);
42
+ await this.consumer.commitOffsets([
43
+ {
44
+ topic,
45
+ partition,
46
+ offset: (parseInt(message.offset) + 1).toString()
47
+ }
48
+ ]);
49
+ } catch (error) {
50
+ this.failureHandler([
51
+ {
52
+ value: events[0],
53
+ error: error
54
+ }
55
+ ]);
56
+ for (const event of events) {
57
+ if (event.retryCount <= this.options.retries) {
58
+ await this.producer.send({
59
+ topic: this.queueName,
60
+ messages: [
61
+ {
62
+ value: JSON.stringify([
63
+ {
64
+ ...event,
65
+ retryCount: event.retryCount + 1
66
+ }
67
+ ]),
68
+ key: event.id
69
+ }
70
+ ]
71
+ });
72
+ }
73
+ }
74
+ }
75
+ }
76
+ });
77
+ }
78
+ async peekEvents() {
79
+ const events = [];
80
+ const admin = this.kafka.admin();
81
+ await admin.connect();
82
+ try {
83
+ // Get topic metadata to find partitions
84
+ const metadata = await admin.fetchTopicMetadata({
85
+ topics: [this.queueName]
86
+ });
87
+ const topic = metadata.topics[0];
88
+ if (!topic) {
89
+ return events;
90
+ }
91
+ // For each partition, get the latest offset
92
+ for (const partition of topic.partitions) {
93
+ const offsets = await admin.fetchTopicOffsets(this.queueName);
94
+ const partitionOffset = offsets.find(
95
+ (o) => o.partition === partition.partitionId
96
+ );
97
+ if (!partitionOffset) {
98
+ continue;
99
+ }
100
+ // Create a temporary consumer to read messages
101
+ const peekConsumer = this.kafka.consumer({
102
+ groupId: `${this.options.groupId}-peek-${Date.now()}`
23
103
  });
24
- }
25
- async setupConsumer() {
26
- await this.consumer.connect();
27
- await this.consumer.subscribe({
104
+ try {
105
+ await peekConsumer.connect();
106
+ await peekConsumer.subscribe({
28
107
  topic: this.queueName,
29
108
  fromBeginning: false
30
- });
31
- await this.consumer.run({
32
- eachMessage: async ({ topic, partition, message }) => {
33
- if (!message.value)
34
- return;
35
- const messageKey = `${topic}-${partition}-${message.offset}`;
36
- if (this.processedMessages.has(messageKey)) {
37
- return;
38
- }
39
- const events = JSON.parse(message.value.toString());
40
- try {
41
- await this.processEventsFunction(events);
42
- this.processedMessages.add(messageKey);
43
- await this.consumer.commitOffsets([
44
- {
45
- topic,
46
- partition,
47
- offset: (parseInt(message.offset) + 1).toString()
48
- }
49
- ]);
50
- }
51
- catch (error) {
52
- this.failureHandler([
53
- {
54
- value: events[0],
55
- error: error
56
- }
57
- ]);
58
- for (const event of events) {
59
- if (event.retryCount <= this.options.retries) {
60
- await this.producer.send({
61
- topic: this.queueName,
62
- messages: [
63
- {
64
- value: JSON.stringify([
65
- {
66
- ...event,
67
- retryCount: event.retryCount + 1
68
- }
69
- ]),
70
- key: event.id
71
- }
72
- ]
73
- });
74
- }
75
- }
109
+ });
110
+ const messagePromise = new Promise((resolve) => {
111
+ peekConsumer.run({
112
+ eachMessage: async ({ message }) => {
113
+ if (message.value && events.length < this.options.peekCount) {
114
+ const messageEvents = JSON.parse(message.value.toString());
115
+ events.push(...messageEvents);
116
+ if (events.length >= this.options.peekCount) {
117
+ resolve();
118
+ }
76
119
  }
77
- }
78
- });
79
- }
80
- async peekEvents() {
81
- const events = [];
82
- const admin = this.kafka.admin();
83
- await admin.connect();
84
- try {
85
- // Get topic metadata to find partitions
86
- const metadata = await admin.fetchTopicMetadata({
87
- topics: [this.queueName]
120
+ }
88
121
  });
89
- const topic = metadata.topics[0];
90
- if (!topic) {
91
- return events;
92
- }
93
- // For each partition, get the latest offset
94
- for (const partition of topic.partitions) {
95
- const offsets = await admin.fetchTopicOffsets(this.queueName);
96
- const partitionOffset = offsets.find((o) => o.partition === partition.partitionId);
97
- if (!partitionOffset) {
98
- continue;
99
- }
100
- // Create a temporary consumer to read messages
101
- const peekConsumer = this.kafka.consumer({
102
- groupId: `${this.options.groupId}-peek-${Date.now()}`
103
- });
104
- try {
105
- await peekConsumer.connect();
106
- await peekConsumer.subscribe({
107
- topic: this.queueName,
108
- fromBeginning: false
109
- });
110
- const messagePromise = new Promise((resolve) => {
111
- peekConsumer.run({
112
- eachMessage: async ({ message }) => {
113
- if (message.value && events.length < this.options.peekCount) {
114
- const messageEvents = JSON.parse(message.value.toString());
115
- events.push(...messageEvents);
116
- if (events.length >= this.options.peekCount) {
117
- resolve();
118
- }
119
- }
120
- }
121
- });
122
- });
123
- await Promise.race([
124
- messagePromise,
125
- new Promise((resolve) => setTimeout(resolve, 5000))
126
- ]);
127
- if (events.length >= this.options.peekCount) {
128
- break;
129
- }
130
- }
131
- finally {
132
- await peekConsumer.disconnect();
133
- }
134
- }
135
- return events;
122
+ });
123
+ await Promise.race([
124
+ messagePromise,
125
+ new Promise((resolve) => setTimeout(resolve, 5000))
126
+ ]);
127
+ if (events.length >= this.options.peekCount) {
128
+ break;
129
+ }
130
+ } finally {
131
+ await peekConsumer.disconnect();
136
132
  }
137
- finally {
138
- await admin.disconnect();
139
- }
140
- }
141
- async start() {
142
- await this.setupConsumer();
143
- await this.producer.connect();
144
- }
145
- async close() {
146
- await this.producer.disconnect();
147
- await this.consumer.disconnect();
133
+ }
134
+ return events;
135
+ } finally {
136
+ await admin.disconnect();
148
137
  }
138
+ }
139
+ async start() {
140
+ await this.setupConsumer();
141
+ await this.producer.connect();
142
+ }
143
+ async close() {
144
+ await this.producer.disconnect();
145
+ await this.consumer.disconnect();
146
+ }
149
147
  }
@@ -7,8 +7,10 @@ import {
7
7
  import { Consumer, Kafka, Producer } from 'kafkajs';
8
8
  import { KafkaWorkerOptions } from '../types/kafkaWorker.types';
9
9
 
10
- export class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity>
11
- implements WorkerConsumer<EventEntity>
10
+ export class KafkaWorkerConsumer<
11
+ EventEntity extends WorkerEventEntity,
12
+ Options extends KafkaWorkerOptions
13
+ > implements WorkerConsumer<EventEntity>
12
14
  {
13
15
  private kafka: Kafka;
14
16
  private producer: Producer;
@@ -17,7 +19,7 @@ export class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity>
17
19
 
18
20
  constructor(
19
21
  protected readonly queueName: string,
20
- protected readonly options: KafkaWorkerOptions,
22
+ protected readonly options: Options,
21
23
  protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>,
22
24
  protected readonly failureHandler: WorkerFailureHandler<EventEntity>
23
25
  ) {
@@ -2,12 +2,15 @@ import { WorkerEventEntity } from '@forklaunch/interfaces-worker/types';
2
2
  import { Kafka } from 'kafkajs';
3
3
  import { KafkaWorkerOptions } from '../types/kafkaWorker.types';
4
4
 
5
- export class KafkaWorkerProducer<EventEntity extends WorkerEventEntity> {
5
+ export class KafkaWorkerProducer<
6
+ EventEntity extends WorkerEventEntity,
7
+ Options extends KafkaWorkerOptions
8
+ > {
6
9
  private producer;
7
10
 
8
11
  constructor(
9
12
  private readonly queueName: string,
10
- private readonly options: KafkaWorkerOptions
13
+ private readonly options: Options
11
14
  ) {
12
15
  const kafka = new Kafka({
13
16
  clientId: this.options.clientId,
@@ -1,4 +1,4 @@
1
1
  import type { JestConfigWithTsJest } from 'ts-jest';
2
2
  declare const jestConfig: JestConfigWithTsJest;
3
3
  export default jestConfig;
4
- //# sourceMappingURL=jest.config.d.ts.map
4
+ //# sourceMappingURL=jest.config.d.ts.map
@@ -1,19 +1,19 @@
1
1
  const jestConfig = {
2
- preset: 'ts-jest/presets/default-esm', // or other ESM presets
3
- moduleNameMapper: {
4
- '^(\\.{1,2}/.*)\\.js$': '$1'
5
- },
6
- transform: {
7
- // '^.+\\.[tj]sx?$' to process ts,js,tsx,jsx with `ts-jest`
8
- // '^.+\\.m?[tj]sx?$' to process ts,js,tsx,jsx,mts,mjs,mtsx,mjsx with `ts-jest`
9
- '^.+\\.[tj]sx?$': [
10
- 'ts-jest',
11
- {
12
- useESM: true
13
- }
14
- ],
15
- '^.+\\.js$': 'babel-jest'
16
- },
17
- testPathIgnorePatterns: ['.*dist/', '.*node_modules/']
2
+ preset: 'ts-jest/presets/default-esm', // or other ESM presets
3
+ moduleNameMapper: {
4
+ '^(\\.{1,2}/.*)\\.js$': '$1'
5
+ },
6
+ transform: {
7
+ // '^.+\\.[tj]sx?$' to process ts,js,tsx,jsx with `ts-jest`
8
+ // '^.+\\.m?[tj]sx?$' to process ts,js,tsx,jsx,mts,mjs,mtsx,mjsx with `ts-jest`
9
+ '^.+\\.[tj]sx?$': [
10
+ 'ts-jest',
11
+ {
12
+ useESM: true
13
+ }
14
+ ],
15
+ '^.+\\.js$': 'babel-jest'
16
+ },
17
+ testPathIgnorePatterns: ['.*dist/', '.*node_modules/']
18
18
  };
19
19
  export default jestConfig;
@@ -1,2 +1,2 @@
1
1
  export * from './kafkaWorker.producer';
2
- //# sourceMappingURL=index.d.ts.map
2
+ //# sourceMappingURL=index.d.ts.map
@@ -1,11 +1,14 @@
1
1
  import { WorkerEventEntity } from '@forklaunch/interfaces-worker/types';
2
2
  import { KafkaWorkerOptions } from '../types/kafkaWorker.types';
3
- export declare class KafkaWorkerProducer<EventEntity extends WorkerEventEntity> {
4
- private readonly queueName;
5
- private readonly options;
6
- private producer;
7
- constructor(queueName: string, options: KafkaWorkerOptions);
8
- enqueueJob(event: EventEntity): Promise<void>;
9
- enqueueBatchJobs(events: EventEntity[]): Promise<void>;
3
+ export declare class KafkaWorkerProducer<
4
+ EventEntity extends WorkerEventEntity,
5
+ Options extends KafkaWorkerOptions
6
+ > {
7
+ private readonly queueName;
8
+ private readonly options;
9
+ private producer;
10
+ constructor(queueName: string, options: Options);
11
+ enqueueJob(event: EventEntity): Promise<void>;
12
+ enqueueBatchJobs(events: EventEntity[]): Promise<void>;
10
13
  }
11
- //# sourceMappingURL=kafkaWorker.producer.d.ts.map
14
+ //# sourceMappingURL=kafkaWorker.producer.d.ts.map
@@ -1 +1 @@
1
- {"version":3,"file":"kafkaWorker.producer.d.ts","sourceRoot":"","sources":["../../producers/kafkaWorker.producer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,qCAAqC,CAAC;AAExE,OAAO,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAEhE,qBAAa,mBAAmB,CAAC,WAAW,SAAS,iBAAiB;IAIlE,OAAO,CAAC,QAAQ,CAAC,SAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,OAAO;IAJ1B,OAAO,CAAC,QAAQ,CAAC;gBAGE,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,kBAAkB;IAUxC,UAAU,CAAC,KAAK,EAAE,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC;IAO7C,gBAAgB,CAAC,MAAM,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CAM7D"}
1
+ {"version":3,"file":"kafkaWorker.producer.d.ts","sourceRoot":"","sources":["../../producers/kafkaWorker.producer.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,iBAAiB,EAAE,MAAM,qCAAqC,CAAC;AAExE,OAAO,EAAE,kBAAkB,EAAE,MAAM,4BAA4B,CAAC;AAEhE,qBAAa,mBAAmB,CAC9B,WAAW,SAAS,iBAAiB,EACrC,OAAO,SAAS,kBAAkB;IAKhC,OAAO,CAAC,QAAQ,CAAC,SAAS;IAC1B,OAAO,CAAC,QAAQ,CAAC,OAAO;IAJ1B,OAAO,CAAC,QAAQ,CAAC;gBAGE,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,OAAO;IAU7B,UAAU,CAAC,KAAK,EAAE,WAAW,GAAG,OAAO,CAAC,IAAI,CAAC;IAO7C,gBAAgB,CAAC,MAAM,EAAE,WAAW,EAAE,GAAG,OAAO,CAAC,IAAI,CAAC;CAM7D"}
@@ -1,28 +1,28 @@
1
1
  import { Kafka } from 'kafkajs';
2
2
  export class KafkaWorkerProducer {
3
- queueName;
4
- options;
5
- producer;
6
- constructor(queueName, options) {
7
- this.queueName = queueName;
8
- this.options = options;
9
- const kafka = new Kafka({
10
- clientId: this.options.clientId,
11
- brokers: this.options.brokers
12
- });
13
- this.producer = kafka.producer();
14
- this.producer.connect();
15
- }
16
- async enqueueJob(event) {
17
- await this.producer.send({
18
- topic: this.queueName,
19
- messages: [{ value: JSON.stringify([event]) }]
20
- });
21
- }
22
- async enqueueBatchJobs(events) {
23
- await this.producer.send({
24
- topic: this.queueName,
25
- messages: events.map((event) => ({ value: JSON.stringify(event) }))
26
- });
27
- }
3
+ queueName;
4
+ options;
5
+ producer;
6
+ constructor(queueName, options) {
7
+ this.queueName = queueName;
8
+ this.options = options;
9
+ const kafka = new Kafka({
10
+ clientId: this.options.clientId,
11
+ brokers: this.options.brokers
12
+ });
13
+ this.producer = kafka.producer();
14
+ this.producer.connect();
15
+ }
16
+ async enqueueJob(event) {
17
+ await this.producer.send({
18
+ topic: this.queueName,
19
+ messages: [{ value: JSON.stringify([event]) }]
20
+ });
21
+ }
22
+ async enqueueBatchJobs(events) {
23
+ await this.producer.send({
24
+ topic: this.queueName,
25
+ messages: events.map((event) => ({ value: JSON.stringify(event) }))
26
+ });
27
+ }
28
28
  }
@@ -1,2 +1,2 @@
1
1
  export * from './kafka.schema';
2
- //# sourceMappingURL=index.d.ts.map
2
+ //# sourceMappingURL=index.d.ts.map
@@ -1,18 +1,64 @@
1
- export declare const KafkaWorkerSchemas: <SchemaValidator extends import("@forklaunch/validator").AnySchemaValidator>(options: Record<string, unknown> & {
1
+ export declare const KafkaWorkerSchemas: <
2
+ SchemaValidator extends import('@forklaunch/validator').AnySchemaValidator
3
+ >(
4
+ options: Record<string, unknown> & {
2
5
  validator: SchemaValidator;
3
- }) => import("@forklaunch/core/mappers").SchemasByValidator<SchemaValidator, (options: Record<string, unknown>) => {
4
- brokers: import("@sinclair/typebox").TArray<import("@sinclair/typebox").TString>;
5
- clientId: import("@sinclair/typebox").TString;
6
- groupId: import("@sinclair/typebox").TString;
7
- retries: import("@sinclair/typebox").TTransform<import("@sinclair/typebox").TUnion<[import("@sinclair/typebox").TNumber, import("@sinclair/typebox").TString, import("@sinclair/typebox").TBoolean, import("@sinclair/typebox").TNull, import("@sinclair/typebox").TDate, import("@sinclair/typebox").TBigInt]>, number>;
8
- interval: import("@sinclair/typebox").TTransform<import("@sinclair/typebox").TUnion<[import("@sinclair/typebox").TNumber, import("@sinclair/typebox").TString, import("@sinclair/typebox").TBoolean, import("@sinclair/typebox").TNull, import("@sinclair/typebox").TDate, import("@sinclair/typebox").TBigInt]>, number>;
9
- peekCount: import("@sinclair/typebox").TTransform<import("@sinclair/typebox").TUnion<[import("@sinclair/typebox").TNumber, import("@sinclair/typebox").TString, import("@sinclair/typebox").TBoolean, import("@sinclair/typebox").TNull, import("@sinclair/typebox").TDate, import("@sinclair/typebox").TBigInt]>, number>;
10
- }, (options: Record<string, unknown>) => {
11
- brokers: import("zod").ZodArray<import("zod").ZodString, "many">;
12
- clientId: import("zod").ZodString;
13
- groupId: import("zod").ZodString;
14
- retries: import("zod").ZodNumber;
15
- interval: import("zod").ZodNumber;
16
- peekCount: import("zod").ZodNumber;
17
- }>;
18
- //# sourceMappingURL=kafka.schema.d.ts.map
6
+ }
7
+ ) => import('@forklaunch/core/mappers').SchemasByValidator<
8
+ SchemaValidator,
9
+ (options: Record<string, unknown>) => {
10
+ brokers: import('@sinclair/typebox').TArray<
11
+ import('@sinclair/typebox').TString
12
+ >;
13
+ clientId: import('@sinclair/typebox').TString;
14
+ groupId: import('@sinclair/typebox').TString;
15
+ retries: import('@sinclair/typebox').TTransform<
16
+ import('@sinclair/typebox').TUnion<
17
+ [
18
+ import('@sinclair/typebox').TNumber,
19
+ import('@sinclair/typebox').TString,
20
+ import('@sinclair/typebox').TBoolean,
21
+ import('@sinclair/typebox').TNull,
22
+ import('@sinclair/typebox').TDate,
23
+ import('@sinclair/typebox').TBigInt
24
+ ]
25
+ >,
26
+ number
27
+ >;
28
+ interval: import('@sinclair/typebox').TTransform<
29
+ import('@sinclair/typebox').TUnion<
30
+ [
31
+ import('@sinclair/typebox').TNumber,
32
+ import('@sinclair/typebox').TString,
33
+ import('@sinclair/typebox').TBoolean,
34
+ import('@sinclair/typebox').TNull,
35
+ import('@sinclair/typebox').TDate,
36
+ import('@sinclair/typebox').TBigInt
37
+ ]
38
+ >,
39
+ number
40
+ >;
41
+ peekCount: import('@sinclair/typebox').TTransform<
42
+ import('@sinclair/typebox').TUnion<
43
+ [
44
+ import('@sinclair/typebox').TNumber,
45
+ import('@sinclair/typebox').TString,
46
+ import('@sinclair/typebox').TBoolean,
47
+ import('@sinclair/typebox').TNull,
48
+ import('@sinclair/typebox').TDate,
49
+ import('@sinclair/typebox').TBigInt
50
+ ]
51
+ >,
52
+ number
53
+ >;
54
+ },
55
+ (options: Record<string, unknown>) => {
56
+ brokers: import('zod').ZodArray<import('zod').ZodString, 'many'>;
57
+ clientId: import('zod').ZodString;
58
+ groupId: import('zod').ZodString;
59
+ retries: import('zod').ZodNumber;
60
+ interval: import('zod').ZodNumber;
61
+ peekCount: import('zod').ZodNumber;
62
+ }
63
+ >;
64
+ //# sourceMappingURL=kafka.schema.d.ts.map
@@ -1,4 +1,7 @@
1
1
  import { serviceSchemaResolver } from '@forklaunch/core/mappers';
2
2
  import { KafkaWorkerOptionsSchema as TypeBoxSchemas } from './typebox/kafkaWorker.schema';
3
3
  import { KafkaWorkerOptionsSchema as ZodSchemas } from './zod/kafkaWorker.schema';
4
- export const KafkaWorkerSchemas = serviceSchemaResolver(() => TypeBoxSchemas, () => ZodSchemas);
4
+ export const KafkaWorkerSchemas = serviceSchemaResolver(
5
+ () => TypeBoxSchemas,
6
+ () => ZodSchemas
7
+ );