@forklaunch/implementation-worker-kafka 0.2.3 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/lib/consumers/index.d.mts +21 -0
  2. package/lib/consumers/index.d.ts +21 -2
  3. package/lib/consumers/index.js +173 -1
  4. package/lib/consumers/{kafkaWorker.consumer.js → index.mjs} +16 -17
  5. package/lib/producers/index.d.mts +13 -0
  6. package/lib/producers/index.d.ts +13 -2
  7. package/lib/producers/index.js +57 -1
  8. package/lib/producers/{kafkaWorker.producer.js → index.mjs} +8 -6
  9. package/lib/schemas/index.d.mts +24 -0
  10. package/lib/schemas/index.d.ts +24 -2
  11. package/lib/schemas/index.js +1377 -1
  12. package/lib/schemas/index.mjs +1381 -0
  13. package/lib/types/index.d.mts +10 -0
  14. package/lib/types/index.d.ts +10 -2
  15. package/lib/types/index.js +18 -1
  16. package/lib/types/index.mjs +0 -0
  17. package/package.json +5 -4
  18. package/lib/__test__/schemaEquality.test.d.ts +0 -2
  19. package/lib/__test__/schemaEquality.test.d.ts.map +0 -1
  20. package/lib/__test__/schemaEquality.test.js +0 -24
  21. package/lib/consumers/index.d.ts.map +0 -1
  22. package/lib/consumers/kafkaWorker.consumer.d.ts +0 -32
  23. package/lib/consumers/kafkaWorker.consumer.d.ts.map +0 -1
  24. package/lib/jest.config.d.ts +0 -4
  25. package/lib/jest.config.d.ts.map +0 -1
  26. package/lib/jest.config.js +0 -19
  27. package/lib/producers/index.d.ts.map +0 -1
  28. package/lib/producers/kafkaWorker.producer.d.ts +0 -14
  29. package/lib/producers/kafkaWorker.producer.d.ts.map +0 -1
  30. package/lib/schemas/index.d.ts.map +0 -1
  31. package/lib/schemas/kafka.schema.d.ts +0 -72
  32. package/lib/schemas/kafka.schema.d.ts.map +0 -1
  33. package/lib/schemas/kafka.schema.js +0 -7
  34. package/lib/schemas/typebox/kafkaWorker.schema.d.ts +0 -47
  35. package/lib/schemas/typebox/kafkaWorker.schema.d.ts.map +0 -1
  36. package/lib/schemas/typebox/kafkaWorker.schema.js +0 -9
  37. package/lib/schemas/zod/kafkaWorker.schema.d.ts +0 -9
  38. package/lib/schemas/zod/kafkaWorker.schema.d.ts.map +0 -1
  39. package/lib/schemas/zod/kafkaWorker.schema.js +0 -9
  40. package/lib/tsconfig.tsbuildinfo +0 -1
  41. package/lib/types/index.d.ts.map +0 -1
  42. package/lib/types/kafkaWorker.types.d.ts +0 -9
  43. package/lib/types/kafkaWorker.types.d.ts.map +0 -1
  44. package/lib/types/kafkaWorker.types.js +0 -1
  45. package/lib/vitest.config.d.ts +0 -3
  46. package/lib/vitest.config.d.ts.map +0 -1
  47. package/lib/vitest.config.js +0 -7
@@ -0,0 +1,21 @@
1
+ import { WorkerConsumer } from '@forklaunch/interfaces-worker/interfaces';
2
+ import { WorkerEventEntity, WorkerProcessFunction, WorkerFailureHandler } from '@forklaunch/interfaces-worker/types';
3
+ import { KafkaWorkerOptions } from '../types/index.mjs';
4
+
5
+ declare class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> implements WorkerConsumer<EventEntity> {
6
+ protected readonly queueName: string;
7
+ protected readonly options: Options;
8
+ protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
9
+ protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
10
+ private kafka;
11
+ private producer;
12
+ private consumer;
13
+ private processedMessages;
14
+ constructor(queueName: string, options: Options, processEventsFunction: WorkerProcessFunction<EventEntity>, failureHandler: WorkerFailureHandler<EventEntity>);
15
+ private setupConsumer;
16
+ peekEvents(): Promise<EventEntity[]>;
17
+ start(): Promise<void>;
18
+ close(): Promise<void>;
19
+ }
20
+
21
+ export { KafkaWorkerConsumer };
@@ -1,2 +1,21 @@
1
- export * from './kafkaWorker.consumer';
2
- //# sourceMappingURL=index.d.ts.map
1
+ import { WorkerConsumer } from '@forklaunch/interfaces-worker/interfaces';
2
+ import { WorkerEventEntity, WorkerProcessFunction, WorkerFailureHandler } from '@forklaunch/interfaces-worker/types';
3
+ import { KafkaWorkerOptions } from '../types/index.js';
4
+
5
+ declare class KafkaWorkerConsumer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> implements WorkerConsumer<EventEntity> {
6
+ protected readonly queueName: string;
7
+ protected readonly options: Options;
8
+ protected readonly processEventsFunction: WorkerProcessFunction<EventEntity>;
9
+ protected readonly failureHandler: WorkerFailureHandler<EventEntity>;
10
+ private kafka;
11
+ private producer;
12
+ private consumer;
13
+ private processedMessages;
14
+ constructor(queueName: string, options: Options, processEventsFunction: WorkerProcessFunction<EventEntity>, failureHandler: WorkerFailureHandler<EventEntity>);
15
+ private setupConsumer;
16
+ peekEvents(): Promise<EventEntity[]>;
17
+ start(): Promise<void>;
18
+ close(): Promise<void>;
19
+ }
20
+
21
+ export { KafkaWorkerConsumer };
@@ -1 +1,173 @@
1
- export * from './kafkaWorker.consumer';
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // consumers/index.ts
21
+ var consumers_exports = {};
22
+ __export(consumers_exports, {
23
+ KafkaWorkerConsumer: () => KafkaWorkerConsumer
24
+ });
25
+ module.exports = __toCommonJS(consumers_exports);
26
+
27
+ // consumers/kafkaWorker.consumer.ts
28
+ var import_kafkajs = require("kafkajs");
29
+ var KafkaWorkerConsumer = class {
30
+ constructor(queueName, options, processEventsFunction, failureHandler) {
31
+ this.queueName = queueName;
32
+ this.options = options;
33
+ this.processEventsFunction = processEventsFunction;
34
+ this.failureHandler = failureHandler;
35
+ this.kafka = new import_kafkajs.Kafka({
36
+ clientId: this.options.clientId,
37
+ brokers: this.options.brokers
38
+ });
39
+ this.producer = this.kafka.producer();
40
+ this.consumer = this.kafka.consumer({
41
+ groupId: this.options.groupId
42
+ });
43
+ }
44
+ kafka;
45
+ producer;
46
+ consumer;
47
+ processedMessages = /* @__PURE__ */ new Set();
48
+ async setupConsumer() {
49
+ await this.consumer.connect();
50
+ await this.consumer.subscribe({
51
+ topic: this.queueName,
52
+ fromBeginning: false
53
+ });
54
+ await this.consumer.run({
55
+ eachMessage: async ({ topic, partition, message }) => {
56
+ if (!message.value) return;
57
+ const messageKey = `${topic}-${partition}-${message.offset}`;
58
+ if (this.processedMessages.has(messageKey)) {
59
+ return;
60
+ }
61
+ const events = JSON.parse(message.value.toString());
62
+ try {
63
+ await this.processEventsFunction(events);
64
+ this.processedMessages.add(messageKey);
65
+ await this.consumer.commitOffsets([
66
+ {
67
+ topic,
68
+ partition,
69
+ offset: (parseInt(message.offset) + 1).toString()
70
+ }
71
+ ]);
72
+ } catch (error) {
73
+ this.failureHandler([
74
+ {
75
+ value: events[0],
76
+ error
77
+ }
78
+ ]);
79
+ for (const event of events) {
80
+ if (event.retryCount <= this.options.retries) {
81
+ await this.producer.send({
82
+ topic: this.queueName,
83
+ messages: [
84
+ {
85
+ value: JSON.stringify([
86
+ {
87
+ ...event,
88
+ retryCount: event.retryCount + 1
89
+ }
90
+ ]),
91
+ key: event.id
92
+ }
93
+ ]
94
+ });
95
+ }
96
+ }
97
+ }
98
+ }
99
+ });
100
+ }
101
+ async peekEvents() {
102
+ const events = [];
103
+ const admin = this.kafka.admin();
104
+ await admin.connect();
105
+ try {
106
+ const metadata = await admin.fetchTopicMetadata({
107
+ topics: [this.queueName]
108
+ });
109
+ const topic = metadata.topics[0];
110
+ if (!topic) {
111
+ return events;
112
+ }
113
+ for (const partition of topic.partitions) {
114
+ const offsets = await admin.fetchTopicOffsets(this.queueName);
115
+ const partitionOffset = offsets.find(
116
+ (o) => o.partition === partition.partitionId
117
+ );
118
+ if (!partitionOffset) {
119
+ continue;
120
+ }
121
+ const peekConsumer = this.kafka.consumer({
122
+ groupId: `${this.options.groupId}-peek-${Date.now()}`
123
+ });
124
+ try {
125
+ await peekConsumer.connect();
126
+ await peekConsumer.subscribe({
127
+ topic: this.queueName,
128
+ fromBeginning: false
129
+ });
130
+ const messagePromise = new Promise((resolve) => {
131
+ peekConsumer.run({
132
+ eachMessage: async ({ message }) => {
133
+ if (message.value && events.length < this.options.peekCount) {
134
+ const messageEvents = JSON.parse(
135
+ message.value.toString()
136
+ );
137
+ events.push(...messageEvents);
138
+ if (events.length >= this.options.peekCount) {
139
+ resolve();
140
+ }
141
+ }
142
+ }
143
+ });
144
+ });
145
+ await Promise.race([
146
+ messagePromise,
147
+ new Promise((resolve) => setTimeout(resolve, 5e3))
148
+ ]);
149
+ if (events.length >= this.options.peekCount) {
150
+ break;
151
+ }
152
+ } finally {
153
+ await peekConsumer.disconnect();
154
+ }
155
+ }
156
+ return events;
157
+ } finally {
158
+ await admin.disconnect();
159
+ }
160
+ }
161
+ async start() {
162
+ await this.setupConsumer();
163
+ await this.producer.connect();
164
+ }
165
+ async close() {
166
+ await this.producer.disconnect();
167
+ await this.consumer.disconnect();
168
+ }
169
+ };
170
+ // Annotate the CommonJS export names for ESM import in node:
171
+ 0 && (module.exports = {
172
+ KafkaWorkerConsumer
173
+ });
@@ -1,13 +1,6 @@
1
- import { Kafka } from 'kafkajs';
2
- export class KafkaWorkerConsumer {
3
- queueName;
4
- options;
5
- processEventsFunction;
6
- failureHandler;
7
- kafka;
8
- producer;
9
- consumer;
10
- processedMessages = new Set();
1
+ // consumers/kafkaWorker.consumer.ts
2
+ import { Kafka } from "kafkajs";
3
+ var KafkaWorkerConsumer = class {
11
4
  constructor(queueName, options, processEventsFunction, failureHandler) {
12
5
  this.queueName = queueName;
13
6
  this.options = options;
@@ -22,6 +15,10 @@ export class KafkaWorkerConsumer {
22
15
  groupId: this.options.groupId
23
16
  });
24
17
  }
18
+ kafka;
19
+ producer;
20
+ consumer;
21
+ processedMessages = /* @__PURE__ */ new Set();
25
22
  async setupConsumer() {
26
23
  await this.consumer.connect();
27
24
  await this.consumer.subscribe({
@@ -50,7 +47,7 @@ export class KafkaWorkerConsumer {
50
47
  this.failureHandler([
51
48
  {
52
49
  value: events[0],
53
- error: error
50
+ error
54
51
  }
55
52
  ]);
56
53
  for (const event of events) {
@@ -80,7 +77,6 @@ export class KafkaWorkerConsumer {
80
77
  const admin = this.kafka.admin();
81
78
  await admin.connect();
82
79
  try {
83
- // Get topic metadata to find partitions
84
80
  const metadata = await admin.fetchTopicMetadata({
85
81
  topics: [this.queueName]
86
82
  });
@@ -88,7 +84,6 @@ export class KafkaWorkerConsumer {
88
84
  if (!topic) {
89
85
  return events;
90
86
  }
91
- // For each partition, get the latest offset
92
87
  for (const partition of topic.partitions) {
93
88
  const offsets = await admin.fetchTopicOffsets(this.queueName);
94
89
  const partitionOffset = offsets.find(
@@ -97,7 +92,6 @@ export class KafkaWorkerConsumer {
97
92
  if (!partitionOffset) {
98
93
  continue;
99
94
  }
100
- // Create a temporary consumer to read messages
101
95
  const peekConsumer = this.kafka.consumer({
102
96
  groupId: `${this.options.groupId}-peek-${Date.now()}`
103
97
  });
@@ -111,7 +105,9 @@ export class KafkaWorkerConsumer {
111
105
  peekConsumer.run({
112
106
  eachMessage: async ({ message }) => {
113
107
  if (message.value && events.length < this.options.peekCount) {
114
- const messageEvents = JSON.parse(message.value.toString());
108
+ const messageEvents = JSON.parse(
109
+ message.value.toString()
110
+ );
115
111
  events.push(...messageEvents);
116
112
  if (events.length >= this.options.peekCount) {
117
113
  resolve();
@@ -122,7 +118,7 @@ export class KafkaWorkerConsumer {
122
118
  });
123
119
  await Promise.race([
124
120
  messagePromise,
125
- new Promise((resolve) => setTimeout(resolve, 5000))
121
+ new Promise((resolve) => setTimeout(resolve, 5e3))
126
122
  ]);
127
123
  if (events.length >= this.options.peekCount) {
128
124
  break;
@@ -144,4 +140,7 @@ export class KafkaWorkerConsumer {
144
140
  await this.producer.disconnect();
145
141
  await this.consumer.disconnect();
146
142
  }
147
- }
143
+ };
144
+ export {
145
+ KafkaWorkerConsumer
146
+ };
@@ -0,0 +1,13 @@
1
+ import { WorkerEventEntity } from '@forklaunch/interfaces-worker/types';
2
+ import { KafkaWorkerOptions } from '../types/index.mjs';
3
+
4
+ declare class KafkaWorkerProducer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> {
5
+ private readonly queueName;
6
+ private readonly options;
7
+ private producer;
8
+ constructor(queueName: string, options: Options);
9
+ enqueueJob(event: EventEntity): Promise<void>;
10
+ enqueueBatchJobs(events: EventEntity[]): Promise<void>;
11
+ }
12
+
13
+ export { KafkaWorkerProducer };
@@ -1,2 +1,13 @@
1
- export * from './kafkaWorker.producer';
2
- //# sourceMappingURL=index.d.ts.map
1
+ import { WorkerEventEntity } from '@forklaunch/interfaces-worker/types';
2
+ import { KafkaWorkerOptions } from '../types/index.js';
3
+
4
+ declare class KafkaWorkerProducer<EventEntity extends WorkerEventEntity, Options extends KafkaWorkerOptions> {
5
+ private readonly queueName;
6
+ private readonly options;
7
+ private producer;
8
+ constructor(queueName: string, options: Options);
9
+ enqueueJob(event: EventEntity): Promise<void>;
10
+ enqueueBatchJobs(events: EventEntity[]): Promise<void>;
11
+ }
12
+
13
+ export { KafkaWorkerProducer };
@@ -1 +1,57 @@
1
- export * from './kafkaWorker.producer';
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __export = (target, all) => {
7
+ for (var name in all)
8
+ __defProp(target, name, { get: all[name], enumerable: true });
9
+ };
10
+ var __copyProps = (to, from, except, desc) => {
11
+ if (from && typeof from === "object" || typeof from === "function") {
12
+ for (let key of __getOwnPropNames(from))
13
+ if (!__hasOwnProp.call(to, key) && key !== except)
14
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
15
+ }
16
+ return to;
17
+ };
18
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
19
+
20
+ // producers/index.ts
21
+ var producers_exports = {};
22
+ __export(producers_exports, {
23
+ KafkaWorkerProducer: () => KafkaWorkerProducer
24
+ });
25
+ module.exports = __toCommonJS(producers_exports);
26
+
27
+ // producers/kafkaWorker.producer.ts
28
+ var import_kafkajs = require("kafkajs");
29
+ var KafkaWorkerProducer = class {
30
+ constructor(queueName, options) {
31
+ this.queueName = queueName;
32
+ this.options = options;
33
+ const kafka = new import_kafkajs.Kafka({
34
+ clientId: this.options.clientId,
35
+ brokers: this.options.brokers
36
+ });
37
+ this.producer = kafka.producer();
38
+ this.producer.connect();
39
+ }
40
+ producer;
41
+ async enqueueJob(event) {
42
+ await this.producer.send({
43
+ topic: this.queueName,
44
+ messages: [{ value: JSON.stringify([event]) }]
45
+ });
46
+ }
47
+ async enqueueBatchJobs(events) {
48
+ await this.producer.send({
49
+ topic: this.queueName,
50
+ messages: events.map((event) => ({ value: JSON.stringify(event) }))
51
+ });
52
+ }
53
+ };
54
+ // Annotate the CommonJS export names for ESM import in node:
55
+ 0 && (module.exports = {
56
+ KafkaWorkerProducer
57
+ });
@@ -1,8 +1,6 @@
1
- import { Kafka } from 'kafkajs';
2
- export class KafkaWorkerProducer {
3
- queueName;
4
- options;
5
- producer;
1
+ // producers/kafkaWorker.producer.ts
2
+ import { Kafka } from "kafkajs";
3
+ var KafkaWorkerProducer = class {
6
4
  constructor(queueName, options) {
7
5
  this.queueName = queueName;
8
6
  this.options = options;
@@ -13,6 +11,7 @@ export class KafkaWorkerProducer {
13
11
  this.producer = kafka.producer();
14
12
  this.producer.connect();
15
13
  }
14
+ producer;
16
15
  async enqueueJob(event) {
17
16
  await this.producer.send({
18
17
  topic: this.queueName,
@@ -25,4 +24,7 @@ export class KafkaWorkerProducer {
25
24
  messages: events.map((event) => ({ value: JSON.stringify(event) }))
26
25
  });
27
26
  }
28
- }
27
+ };
28
+ export {
29
+ KafkaWorkerProducer
30
+ };
@@ -0,0 +1,24 @@
1
+ import * as _forklaunch_core_mappers from '@forklaunch/core/mappers';
2
+ import * as zod from 'zod';
3
+ import * as _sinclair_typebox from '@sinclair/typebox';
4
+ import * as _forklaunch_validator from '@forklaunch/validator';
5
+
6
+ declare const KafkaWorkerSchemas: <SchemaValidator extends _forklaunch_validator.AnySchemaValidator>(options: Record<string, unknown> & {
7
+ validator: SchemaValidator;
8
+ }) => _forklaunch_core_mappers.SchemasByValidator<SchemaValidator, (options: Record<string, unknown>) => {
9
+ brokers: _sinclair_typebox.TArray<_sinclair_typebox.TString>;
10
+ clientId: _sinclair_typebox.TString;
11
+ groupId: _sinclair_typebox.TString;
12
+ retries: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
13
+ interval: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
14
+ peekCount: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
15
+ }, (options: Record<string, unknown>) => {
16
+ brokers: zod.ZodArray<zod.ZodString, "many">;
17
+ clientId: zod.ZodString;
18
+ groupId: zod.ZodString;
19
+ retries: zod.ZodEffects<zod.ZodNumber, number, unknown>;
20
+ interval: zod.ZodEffects<zod.ZodNumber, number, unknown>;
21
+ peekCount: zod.ZodEffects<zod.ZodNumber, number, unknown>;
22
+ }>;
23
+
24
+ export { KafkaWorkerSchemas };
@@ -1,2 +1,24 @@
1
- export * from './kafka.schema';
2
- //# sourceMappingURL=index.d.ts.map
1
+ import * as _forklaunch_core_mappers from '@forklaunch/core/mappers';
2
+ import * as zod from 'zod';
3
+ import * as _sinclair_typebox from '@sinclair/typebox';
4
+ import * as _forklaunch_validator from '@forklaunch/validator';
5
+
6
+ declare const KafkaWorkerSchemas: <SchemaValidator extends _forklaunch_validator.AnySchemaValidator>(options: Record<string, unknown> & {
7
+ validator: SchemaValidator;
8
+ }) => _forklaunch_core_mappers.SchemasByValidator<SchemaValidator, (options: Record<string, unknown>) => {
9
+ brokers: _sinclair_typebox.TArray<_sinclair_typebox.TString>;
10
+ clientId: _sinclair_typebox.TString;
11
+ groupId: _sinclair_typebox.TString;
12
+ retries: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
13
+ interval: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
14
+ peekCount: _sinclair_typebox.TTransform<_sinclair_typebox.TUnion<[_sinclair_typebox.TNumber, _sinclair_typebox.TString, _sinclair_typebox.TBoolean, _sinclair_typebox.TNull, _sinclair_typebox.TBigInt, _sinclair_typebox.TDate]>, number>;
15
+ }, (options: Record<string, unknown>) => {
16
+ brokers: zod.ZodArray<zod.ZodString, "many">;
17
+ clientId: zod.ZodString;
18
+ groupId: zod.ZodString;
19
+ retries: zod.ZodEffects<zod.ZodNumber, number, unknown>;
20
+ interval: zod.ZodEffects<zod.ZodNumber, number, unknown>;
21
+ peekCount: zod.ZodEffects<zod.ZodNumber, number, unknown>;
22
+ }>;
23
+
24
+ export { KafkaWorkerSchemas };