@drarzter/kafka-client 0.8.0 → 0.9.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -3
- package/dist/{chunk-CHFLNQXK.mjs → chunk-Z2DOJQRI.mjs} +2914 -2870
- package/dist/chunk-Z2DOJQRI.mjs.map +1 -0
- package/dist/core.d.mts +30 -381
- package/dist/core.d.ts +30 -381
- package/dist/core.js +2913 -2869
- package/dist/core.js.map +1 -1
- package/dist/core.mjs +1 -1
- package/dist/index.d.mts +2 -2
- package/dist/index.d.ts +2 -2
- package/dist/index.js +2912 -2868
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +1 -1
- package/dist/otel.d.mts +1 -1
- package/dist/otel.d.ts +1 -1
- package/dist/testing.d.mts +215 -2
- package/dist/testing.d.ts +215 -2
- package/dist/testing.js +298 -2
- package/dist/testing.js.map +1 -1
- package/dist/testing.mjs +293 -2
- package/dist/testing.mjs.map +1 -1
- package/dist/{types-CNfeoF3_.d.mts → types-4XNxkici.d.mts} +263 -1
- package/dist/{types-CNfeoF3_.d.ts → types-4XNxkici.d.ts} +263 -1
- package/package.json +1 -1
- package/dist/chunk-CHFLNQXK.mjs.map +0 -1
package/dist/testing.js
CHANGED
|
@@ -20,6 +20,11 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
20
20
|
// src/testing.ts
|
|
21
21
|
var testing_exports = {};
|
|
22
22
|
__export(testing_exports, {
|
|
23
|
+
FakeAdmin: () => FakeAdmin,
|
|
24
|
+
FakeConsumer: () => FakeConsumer,
|
|
25
|
+
FakeProducer: () => FakeProducer,
|
|
26
|
+
FakeTransaction: () => FakeTransaction,
|
|
27
|
+
FakeTransport: () => FakeTransport,
|
|
23
28
|
KafkaTestContainer: () => KafkaTestContainer,
|
|
24
29
|
createMockKafkaClient: () => createMockKafkaClient
|
|
25
30
|
});
|
|
@@ -69,11 +74,13 @@ function createMockKafkaClient(mockFactory) {
|
|
|
69
74
|
),
|
|
70
75
|
startConsumer: resolved({
|
|
71
76
|
groupId: "mock-group",
|
|
72
|
-
stop: mock().mockResolvedValue(void 0)
|
|
77
|
+
stop: mock().mockResolvedValue(void 0),
|
|
78
|
+
ready: mock().mockResolvedValue(void 0)
|
|
73
79
|
}),
|
|
74
80
|
startBatchConsumer: resolved({
|
|
75
81
|
groupId: "mock-group",
|
|
76
|
-
stop: mock().mockResolvedValue(void 0)
|
|
82
|
+
stop: mock().mockResolvedValue(void 0),
|
|
83
|
+
ready: mock().mockResolvedValue(void 0)
|
|
77
84
|
}),
|
|
78
85
|
stopConsumer: resolved(void 0),
|
|
79
86
|
consume: returning(
|
|
@@ -181,8 +188,297 @@ var KafkaTestContainer = class {
|
|
|
181
188
|
return [`${host}:${port}`];
|
|
182
189
|
}
|
|
183
190
|
};
|
|
191
|
+
|
|
192
|
+
// src/testing/fake-transport.ts
|
|
193
|
+
var FakeTransaction = class {
|
|
194
|
+
constructor(producer) {
|
|
195
|
+
this.producer = producer;
|
|
196
|
+
}
|
|
197
|
+
/** Records staged within this transaction (not yet committed). */
|
|
198
|
+
staged = [];
|
|
199
|
+
/** True after `commit()` was called. */
|
|
200
|
+
committed = false;
|
|
201
|
+
/** True after `abort()` was called. */
|
|
202
|
+
aborted = false;
|
|
203
|
+
/** sendOffsets calls (for EOS assertions). */
|
|
204
|
+
offsetsCommitted = [];
|
|
205
|
+
async send(record) {
|
|
206
|
+
this.staged.push(record);
|
|
207
|
+
}
|
|
208
|
+
async sendOffsets(options) {
|
|
209
|
+
this.offsetsCommitted.push(options);
|
|
210
|
+
}
|
|
211
|
+
async commit() {
|
|
212
|
+
if (this.aborted) throw new Error("FakeTransaction: already aborted");
|
|
213
|
+
this.committed = true;
|
|
214
|
+
for (const record of this.staged) {
|
|
215
|
+
this.producer.sent.push(record);
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
async abort() {
|
|
219
|
+
if (this.committed) throw new Error("FakeTransaction: already committed");
|
|
220
|
+
this.aborted = true;
|
|
221
|
+
this.staged.length = 0;
|
|
222
|
+
}
|
|
223
|
+
};
|
|
224
|
+
var FakeProducer = class {
|
|
225
|
+
/** All records delivered via `send()` (direct + committed transactions). */
|
|
226
|
+
sent = [];
|
|
227
|
+
/** All transactions opened via `transaction()`. */
|
|
228
|
+
transactions = [];
|
|
229
|
+
options;
|
|
230
|
+
connected = false;
|
|
231
|
+
constructor(options) {
|
|
232
|
+
this.options = options;
|
|
233
|
+
}
|
|
234
|
+
async connect() {
|
|
235
|
+
this.connected = true;
|
|
236
|
+
}
|
|
237
|
+
async disconnect() {
|
|
238
|
+
this.connected = false;
|
|
239
|
+
}
|
|
240
|
+
async send(record) {
|
|
241
|
+
this.sent.push(record);
|
|
242
|
+
}
|
|
243
|
+
async transaction() {
|
|
244
|
+
const tx = new FakeTransaction(this);
|
|
245
|
+
this.transactions.push(tx);
|
|
246
|
+
return tx;
|
|
247
|
+
}
|
|
248
|
+
/** Return the last committed transaction, or throw if none exist. */
|
|
249
|
+
get lastTransaction() {
|
|
250
|
+
const tx = this.transactions.at(-1);
|
|
251
|
+
if (!tx) throw new Error("FakeProducer: no transactions opened yet");
|
|
252
|
+
return tx;
|
|
253
|
+
}
|
|
254
|
+
/** All topic names that received at least one message. */
|
|
255
|
+
sentTopics() {
|
|
256
|
+
return [...new Set(this.sent.map((r) => r.topic))];
|
|
257
|
+
}
|
|
258
|
+
/** All messages sent to a specific topic. */
|
|
259
|
+
sentTo(topic) {
|
|
260
|
+
return this.sent.filter((r) => r.topic === topic).flatMap((r) => r.messages);
|
|
261
|
+
}
|
|
262
|
+
};
|
|
263
|
+
var FakeConsumer = class {
|
|
264
|
+
groupId;
|
|
265
|
+
fromBeginning;
|
|
266
|
+
/** Topics subscribed via `subscribe()`. */
|
|
267
|
+
subscribed = [];
|
|
268
|
+
_runConfig;
|
|
269
|
+
_assignments = [];
|
|
270
|
+
pausedTopics = /* @__PURE__ */ new Set();
|
|
271
|
+
connected = false;
|
|
272
|
+
onRebalance;
|
|
273
|
+
constructor(options) {
|
|
274
|
+
this.groupId = options.groupId;
|
|
275
|
+
this.fromBeginning = options.fromBeginning ?? false;
|
|
276
|
+
this.onRebalance = options.onRebalance;
|
|
277
|
+
}
|
|
278
|
+
async connect() {
|
|
279
|
+
this.connected = true;
|
|
280
|
+
}
|
|
281
|
+
async disconnect() {
|
|
282
|
+
this.connected = false;
|
|
283
|
+
}
|
|
284
|
+
async subscribe(options) {
|
|
285
|
+
for (const t of options.topics) {
|
|
286
|
+
if (typeof t === "string") this.subscribed.push(t);
|
|
287
|
+
}
|
|
288
|
+
this._assignments = this.subscribed.map((topic) => ({ topic, partition: 0 }));
|
|
289
|
+
this.onRebalance?.("assign", this._assignments);
|
|
290
|
+
}
|
|
291
|
+
async run(config) {
|
|
292
|
+
this._runConfig = config;
|
|
293
|
+
}
|
|
294
|
+
pause(assignments) {
|
|
295
|
+
for (const { topic } of assignments) this.pausedTopics.add(topic);
|
|
296
|
+
}
|
|
297
|
+
resume(assignments) {
|
|
298
|
+
for (const { topic } of assignments) this.pausedTopics.delete(topic);
|
|
299
|
+
}
|
|
300
|
+
seek(_options) {
|
|
301
|
+
}
|
|
302
|
+
assignment() {
|
|
303
|
+
return this._assignments;
|
|
304
|
+
}
|
|
305
|
+
async commitOffsets(_offsets) {
|
|
306
|
+
}
|
|
307
|
+
async stop() {
|
|
308
|
+
this.connected = false;
|
|
309
|
+
}
|
|
310
|
+
// ── Test helpers ─────────────────────────────────────────────────────
|
|
311
|
+
/**
|
|
312
|
+
* Push a message through the `eachMessage` handler.
|
|
313
|
+
* Throws if `run()` has not been called yet.
|
|
314
|
+
*/
|
|
315
|
+
async deliver(topic, message, partition = 0, offset = "0") {
|
|
316
|
+
if (!this._runConfig?.eachMessage) {
|
|
317
|
+
throw new Error(
|
|
318
|
+
`FakeConsumer(${this.groupId}): run() with eachMessage not called yet`
|
|
319
|
+
);
|
|
320
|
+
}
|
|
321
|
+
await this._runConfig.eachMessage({
|
|
322
|
+
topic,
|
|
323
|
+
partition,
|
|
324
|
+
message: {
|
|
325
|
+
value: message.value,
|
|
326
|
+
headers: message.headers ?? {},
|
|
327
|
+
offset,
|
|
328
|
+
key: message.key
|
|
329
|
+
}
|
|
330
|
+
});
|
|
331
|
+
}
|
|
332
|
+
/**
|
|
333
|
+
* Simulate a partition-assign rebalance event.
|
|
334
|
+
* Useful for testing onRebalance callbacks.
|
|
335
|
+
*/
|
|
336
|
+
triggerRebalance(type, assignments) {
|
|
337
|
+
this.onRebalance?.(type, assignments);
|
|
338
|
+
}
|
|
339
|
+
/** Whether `run()` has been called (consumer is active). */
|
|
340
|
+
get isRunning() {
|
|
341
|
+
return this._runConfig !== void 0;
|
|
342
|
+
}
|
|
343
|
+
};
|
|
344
|
+
var FakeAdmin = class {
|
|
345
|
+
/** Topics returned by `listTopics()`. Add to this from your test. */
|
|
346
|
+
existingTopics = [];
|
|
347
|
+
/** Per-topic partition watermarks returned by `fetchTopicOffsets()`. */
|
|
348
|
+
topicOffsets = /* @__PURE__ */ new Map();
|
|
349
|
+
/** Per-groupId committed offsets returned by `fetchOffsets()`. */
|
|
350
|
+
groupOffsets = /* @__PURE__ */ new Map();
|
|
351
|
+
/** Calls captured by `setOffsets()` — inspect in tests. */
|
|
352
|
+
setOffsetsCalls = [];
|
|
353
|
+
/** Group IDs deleted via `deleteGroups()`. */
|
|
354
|
+
deletedGroups = [];
|
|
355
|
+
/** Records deleted via `deleteTopicRecords()`. */
|
|
356
|
+
deletedRecords = [];
|
|
357
|
+
connected = false;
|
|
358
|
+
async connect() {
|
|
359
|
+
this.connected = true;
|
|
360
|
+
}
|
|
361
|
+
async disconnect() {
|
|
362
|
+
this.connected = false;
|
|
363
|
+
}
|
|
364
|
+
async createTopics(options) {
|
|
365
|
+
for (const { topic } of options.topics) {
|
|
366
|
+
if (!this.existingTopics.includes(topic)) this.existingTopics.push(topic);
|
|
367
|
+
}
|
|
368
|
+
}
|
|
369
|
+
async fetchTopicOffsets(topic) {
|
|
370
|
+
return this.topicOffsets.get(topic) ?? [{ partition: 0, low: "0", high: "0" }];
|
|
371
|
+
}
|
|
372
|
+
async fetchTopicOffsetsByTimestamp(_topic, _timestamp) {
|
|
373
|
+
return [];
|
|
374
|
+
}
|
|
375
|
+
async fetchOffsets(options) {
|
|
376
|
+
return this.groupOffsets.get(options.groupId) ?? [];
|
|
377
|
+
}
|
|
378
|
+
async setOffsets(options) {
|
|
379
|
+
this.setOffsetsCalls.push(options);
|
|
380
|
+
}
|
|
381
|
+
async listTopics() {
|
|
382
|
+
return this.existingTopics;
|
|
383
|
+
}
|
|
384
|
+
async listGroups() {
|
|
385
|
+
return { groups: [] };
|
|
386
|
+
}
|
|
387
|
+
async fetchTopicMetadata(_options) {
|
|
388
|
+
return { topics: [] };
|
|
389
|
+
}
|
|
390
|
+
async deleteGroups(groupIds) {
|
|
391
|
+
this.deletedGroups.push(...groupIds);
|
|
392
|
+
}
|
|
393
|
+
async deleteTopicRecords(options) {
|
|
394
|
+
this.deletedRecords.push(options);
|
|
395
|
+
}
|
|
396
|
+
};
|
|
397
|
+
var FakeTransport = class {
|
|
398
|
+
_producers = [];
|
|
399
|
+
_consumers = [];
|
|
400
|
+
_admin = new FakeAdmin();
|
|
401
|
+
producer(options) {
|
|
402
|
+
const p = new FakeProducer(options);
|
|
403
|
+
this._producers.push(p);
|
|
404
|
+
return p;
|
|
405
|
+
}
|
|
406
|
+
consumer(options) {
|
|
407
|
+
const c = new FakeConsumer(options);
|
|
408
|
+
this._consumers.push(c);
|
|
409
|
+
return c;
|
|
410
|
+
}
|
|
411
|
+
admin() {
|
|
412
|
+
return this._admin;
|
|
413
|
+
}
|
|
414
|
+
// ── Convenience accessors ─────────────────────────────────────────
|
|
415
|
+
/** The admin client shared across all admin() calls. */
|
|
416
|
+
get fakeAdmin() {
|
|
417
|
+
return this._admin;
|
|
418
|
+
}
|
|
419
|
+
/**
|
|
420
|
+
* The first (default) producer — the non-transactional producer
|
|
421
|
+
* created during `KafkaClient` construction.
|
|
422
|
+
*/
|
|
423
|
+
get mainProducer() {
|
|
424
|
+
const p = this._producers[0];
|
|
425
|
+
if (!p) throw new Error("FakeTransport: no producers created yet");
|
|
426
|
+
return p;
|
|
427
|
+
}
|
|
428
|
+
/** All producers created so far (main + transactional). */
|
|
429
|
+
get producers() {
|
|
430
|
+
return this._producers;
|
|
431
|
+
}
|
|
432
|
+
/** All consumers created so far. */
|
|
433
|
+
get consumers() {
|
|
434
|
+
return this._consumers;
|
|
435
|
+
}
|
|
436
|
+
/**
|
|
437
|
+
* Find the consumer for a given group ID.
|
|
438
|
+
* Throws if no consumer with that group exists.
|
|
439
|
+
*/
|
|
440
|
+
consumerFor(groupId) {
|
|
441
|
+
const c = this._consumers.find(
|
|
442
|
+
(c2) => c2.groupId === groupId || c2.groupId.startsWith(groupId)
|
|
443
|
+
);
|
|
444
|
+
if (!c)
|
|
445
|
+
throw new Error(
|
|
446
|
+
`FakeTransport: no consumer for group "${groupId}". Available: ${this._consumers.map((c2) => c2.groupId).join(", ") || "(none)"}`
|
|
447
|
+
);
|
|
448
|
+
return c;
|
|
449
|
+
}
|
|
450
|
+
/**
|
|
451
|
+
* Deliver a JSON-serialized message to the first consumer subscribed to `topic`.
|
|
452
|
+
* Simulates a broker dispatching a message to the consumer handler.
|
|
453
|
+
*/
|
|
454
|
+
async deliver(topic, payload, options = {}) {
|
|
455
|
+
const consumer = this._consumers.find((c) => c.subscribed.includes(topic));
|
|
456
|
+
if (!consumer) {
|
|
457
|
+
throw new Error(
|
|
458
|
+
`FakeTransport: no consumer subscribed to "${topic}". Subscribed topics: ${this._consumers.flatMap((c) => c.subscribed).join(", ") || "(none)"}`
|
|
459
|
+
);
|
|
460
|
+
}
|
|
461
|
+
await consumer.deliver(
|
|
462
|
+
topic,
|
|
463
|
+
{
|
|
464
|
+
value: Buffer.from(JSON.stringify(payload)),
|
|
465
|
+
headers: options.headers ? Object.fromEntries(
|
|
466
|
+
Object.entries(options.headers).map(([k, v]) => [k, [v]])
|
|
467
|
+
) : {},
|
|
468
|
+
key: options.key !== void 0 ? Buffer.from(options.key) : void 0
|
|
469
|
+
},
|
|
470
|
+
options.partition ?? 0,
|
|
471
|
+
options.offset ?? "0"
|
|
472
|
+
);
|
|
473
|
+
}
|
|
474
|
+
};
|
|
184
475
|
// Annotate the CommonJS export names for ESM import in node:
|
|
185
476
|
0 && (module.exports = {
|
|
477
|
+
FakeAdmin,
|
|
478
|
+
FakeConsumer,
|
|
479
|
+
FakeProducer,
|
|
480
|
+
FakeTransaction,
|
|
481
|
+
FakeTransport,
|
|
186
482
|
KafkaTestContainer,
|
|
187
483
|
createMockKafkaClient
|
|
188
484
|
});
|
package/dist/testing.js.map
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
{"version":3,"sources":["../src/testing.ts","../src/testing/mock-client.ts","../src/testing/test-container.ts"],"sourcesContent":["export * from \"./testing/index\";\n","import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n consume: returning(\n (function* () {})() as unknown as AsyncIterableIterator<any>,\n ),\n replayDlq: resolved({ replayed: 0, skipped: 0 }),\n resetOffsets: resolved(undefined),\n seekToOffset: resolved(undefined),\n seekToTimestamp: resolved(undefined),\n getCircuitState: returning(undefined),\n pauseConsumer: mock(),\n resumeConsumer: mock(),\n getMetrics: returning({\n processedCount: 0,\n retryCount: 0,\n dlqCount: 0,\n dedupCount: 0,\n }),\n resetMetrics: mock(),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC1C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,SAAS;AAAA,OACN,aAAa;AAAA,MAAC,GAAG;AAAA,IACpB;AAAA,IACA,WAAW,SAAS,EAAE,UAAU,GAAG,SAAS,EAAE,CAAC;AAAA,IAC/C,cAAc,SAAS,MAAS;AAAA,IAChC,cAAc,SAAS,MAAS;AAAA,IAChC,iBAAiB,SAAS,MAAS;AAAA,IACnC,iBAAiB,UAAU,MAAS;AAAA,IACpC,eAAe,KAAK;AAAA,IACpB,gBAAgB,KAAK;AAAA,IACrB,YAAY,UAAU;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,YAAY;AAAA,IACd,CAAC;AAAA,IACD,cAAc,KAAK;AAAA,IACnB,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;AC1HA,mBAGO;AACP,8BAAwB;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,4BAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;","names":[]}
|
|
1
|
+
{"version":3,"sources":["../src/testing.ts","../src/testing/mock-client.ts","../src/testing/test-container.ts","../src/testing/fake-transport.ts"],"sourcesContent":["export * from \"./testing/index\";\n","import type { IKafkaClient, TopicMapConstraint } from \"../client/types\";\n\n/**\n * Fully typed mock of `IKafkaClient<T>` where every method is a mock function.\n * Compatible with Jest, Vitest, or any framework whose `fn()` returns\n * an object with `.mock`, `.mockResolvedValue`, etc.\n */\nexport type MockKafkaClient<T extends TopicMapConstraint<T>> = {\n [K in keyof IKafkaClient<T>]: IKafkaClient<T>[K] & Record<string, any>;\n};\n\n/** Factory that creates a no-op mock function (e.g. `() => jest.fn()`). */\nexport type MockFactory = () => (...args: any[]) => any;\n\nfunction detectMockFactory(): MockFactory {\n // Jest and Vitest inject their globals (`jest` / `vi`) as module-scope\n // bindings, not as properties of `globalThis`. The only reliable way to\n // detect them without a hard import is via `eval`, which evaluates in the\n // current module scope where those bindings are available.\n try {\n if (eval(\"typeof jest === 'object' && typeof jest.fn === 'function'\")) {\n return () => eval(\"jest.fn()\");\n }\n } catch {\n /* not available */\n }\n try {\n if (eval(\"typeof vi === 'object' && typeof vi.fn === 'function'\")) {\n return () => eval(\"vi.fn()\");\n }\n } catch {\n /* not available */\n }\n throw new Error(\n \"createMockKafkaClient: no mock framework detected (jest/vitest). \" +\n \"Pass a custom mockFactory.\",\n );\n}\n\n/**\n * Create a fully typed mock implementing every `IKafkaClient<T>` method.\n * Useful for unit-testing services that depend on `KafkaClient` without\n * touching a real broker.\n *\n * Auto-detects Jest (`jest.fn()`) or Vitest (`vi.fn()`). Pass a custom\n * `mockFactory` for other frameworks.\n *\n * All methods resolve to sensible defaults:\n * - `checkStatus()` → `{ status: 'up', clientId: 'mock-client', topics: [] }`\n * - `getClientId()` → `\"mock-client\"`\n * - void methods → `undefined`\n *\n * @example\n * ```ts\n * const kafka = createMockKafkaClient<MyTopics>();\n *\n * const service = new OrdersService(kafka);\n * await service.createOrder();\n *\n * expect(kafka.sendMessage).toHaveBeenCalledWith(\n * 'order.created',\n * expect.objectContaining({ orderId: '123' }),\n * );\n * ```\n */\nexport function createMockKafkaClient<T extends TopicMapConstraint<T>>(\n mockFactory?: MockFactory,\n): MockKafkaClient<T> {\n const fn = mockFactory ?? detectMockFactory();\n\n const mock = () => fn() as any;\n const resolved = (value: unknown) => mock().mockResolvedValue(value);\n const returning = (value: unknown) => mock().mockReturnValue(value);\n\n return {\n checkStatus: resolved({\n status: \"up\",\n clientId: \"mock-client\",\n topics: [],\n }),\n getConsumerLag: resolved([]),\n getClientId: returning(\"mock-client\"),\n sendMessage: resolved(undefined),\n sendBatch: resolved(undefined),\n transaction: mock().mockImplementation(\n async (cb: (ctx: Record<string, unknown>) => Promise<void>) => {\n const ctx = {\n send: resolved(undefined),\n sendBatch: resolved(undefined),\n };\n await cb(ctx);\n },\n ),\n startConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n ready: mock().mockResolvedValue(undefined),\n }),\n startBatchConsumer: resolved({\n groupId: \"mock-group\",\n stop: mock().mockResolvedValue(undefined),\n ready: mock().mockResolvedValue(undefined),\n }),\n stopConsumer: resolved(undefined),\n consume: returning(\n (function* () {})() as unknown as AsyncIterableIterator<any>,\n ),\n replayDlq: resolved({ replayed: 0, skipped: 0 }),\n resetOffsets: resolved(undefined),\n seekToOffset: resolved(undefined),\n seekToTimestamp: resolved(undefined),\n getCircuitState: returning(undefined),\n pauseConsumer: mock(),\n resumeConsumer: mock(),\n getMetrics: returning({\n processedCount: 0,\n retryCount: 0,\n dlqCount: 0,\n dedupCount: 0,\n }),\n resetMetrics: mock(),\n disconnect: resolved(undefined),\n enableGracefulShutdown: mock(),\n } as unknown as MockKafkaClient<T>;\n}\n","import {\n KafkaContainer,\n type StartedKafkaContainer,\n} from \"@testcontainers/kafka\";\nimport { KafkaJS } from \"@confluentinc/kafka-javascript\";\nconst { Kafka, logLevel: KafkaLogLevel } = KafkaJS;\n\n/** Options for `KafkaTestContainer`. */\nexport interface KafkaTestContainerOptions {\n /** Docker image. Default: `\"confluentinc/cp-kafka:7.7.0\"`. */\n image?: string;\n /** Warm up the transactional coordinator on start. Default: `true`. */\n transactionWarmup?: boolean;\n /** Topics to pre-create. Each entry can be a string (1 partition) or `{ topic, numPartitions }`. */\n topics?: Array<string | { topic: string; numPartitions?: number }>;\n}\n\n/**\n * Thin wrapper around `@testcontainers/kafka` that starts a single-node\n * KRaft Kafka container and exposes `brokers` for use with `KafkaClient`.\n *\n * Handles common setup pain points:\n * - Transaction coordinator warmup (avoids transactional producer hangs)\n * - Topic pre-creation (avoids race conditions)\n *\n * @example\n * ```ts\n * const container = new KafkaTestContainer({ topics: ['orders', 'payments'] });\n * const brokers = await container.start();\n *\n * const kafka = new KafkaClient('test', 'test-group', brokers);\n * // ... run tests ...\n *\n * await container.stop();\n * ```\n *\n * @example Jest lifecycle\n * ```ts\n * let container: KafkaTestContainer;\n * let brokers: string[];\n *\n * beforeAll(async () => {\n * container = new KafkaTestContainer({ topics: ['orders'] });\n * brokers = await container.start();\n * }, 120_000);\n *\n * afterAll(() => container.stop());\n * ```\n */\nexport class KafkaTestContainer {\n private container: StartedKafkaContainer | undefined;\n private readonly image: string;\n private readonly transactionWarmup: boolean;\n private readonly topics: Array<\n string | { topic: string; numPartitions?: number }\n >;\n\n constructor(options?: KafkaTestContainerOptions) {\n this.image = options?.image ?? \"confluentinc/cp-kafka:7.7.0\";\n this.transactionWarmup = options?.transactionWarmup ?? true;\n this.topics = options?.topics ?? [];\n }\n\n /**\n * Start the Kafka container, pre-create topics, and optionally warm up\n * the transaction coordinator.\n *\n * @returns Broker connection strings, e.g. `[\"localhost:55123\"]`.\n */\n async start(): Promise<string[]> {\n this.container = await new KafkaContainer(this.image)\n .withKraft()\n .withExposedPorts(9093)\n .withEnvironment({\n KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: \"1\",\n KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: \"1\",\n })\n .start();\n\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n const brokers = [`${host}:${port}`];\n\n const kafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-setup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n\n if (this.topics.length > 0) {\n const admin = kafka.admin();\n await admin.connect();\n await admin.createTopics({\n topics: this.topics.map((t) =>\n typeof t === \"string\"\n ? { topic: t, numPartitions: 1 }\n : { topic: t.topic, numPartitions: t.numPartitions ?? 1 },\n ),\n });\n await admin.disconnect();\n }\n\n if (this.transactionWarmup) {\n const warmupKafka = new Kafka({\n kafkaJS: {\n clientId: \"test-container-warmup\",\n brokers,\n logLevel: KafkaLogLevel.NOTHING,\n },\n });\n const txProducer = warmupKafka.producer({\n kafkaJS: {\n transactionalId: \"test-container-warmup-tx\",\n idempotent: true,\n maxInFlightRequests: 1,\n },\n });\n await txProducer.connect();\n const tx = await txProducer.transaction();\n await tx.abort();\n await txProducer.disconnect();\n }\n\n return brokers;\n }\n\n /** Stop and remove the container. */\n async stop(): Promise<void> {\n await this.container?.stop();\n this.container = undefined;\n }\n\n /** Broker connection strings. Throws if container is not started. */\n get brokers(): string[] {\n if (!this.container) {\n throw new Error(\"KafkaTestContainer is not started. Call start() first.\");\n }\n const host = this.container.getHost();\n const port = this.container.getMappedPort(9093);\n return [`${host}:${port}`];\n }\n}\n","import type {\n KafkaTransport,\n IProducer,\n IConsumer,\n IAdmin,\n ITransaction,\n IProducerRecord,\n IProducerCreationOptions,\n IConsumerCreationOptions,\n IConsumerRunConfig,\n ITopicPartition,\n ITopicPartitions,\n ITopicPartitionOffset,\n IPartitionWatermarks,\n IPartitionOffset,\n IGroupTopicOffsets,\n IGroupDescription,\n ITopicMetadata,\n IMessage,\n} from \"../client/transport\";\n\n// ── FakeTransaction ───────────────────────────────────────────────────────────\n\n/**\n * An in-memory Kafka transaction.\n * Staged sends are visible in `staged`; committed sends are flushed\n * to the owning `FakeProducer.sent` on `commit()`.\n */\nexport class FakeTransaction implements ITransaction {\n /** Records staged within this transaction (not yet committed). */\n readonly staged: IProducerRecord[] = [];\n /** True after `commit()` was called. */\n committed = false;\n /** True after `abort()` was called. */\n aborted = false;\n /** sendOffsets calls (for EOS assertions). */\n readonly offsetsCommitted: Array<{\n consumer: IConsumer;\n topics: Array<{ topic: string; partitions: IPartitionOffset[] }>;\n }> = [];\n\n constructor(private readonly producer: FakeProducer) {}\n\n async send(record: IProducerRecord): Promise<void> {\n this.staged.push(record);\n }\n\n async sendOffsets(options: {\n consumer: IConsumer;\n topics: Array<{ topic: string; partitions: Array<{ partition: number; offset: string }> }>;\n }): Promise<void> {\n this.offsetsCommitted.push(options);\n }\n\n async commit(): Promise<void> {\n if (this.aborted) throw new Error(\"FakeTransaction: already aborted\");\n this.committed = true;\n for (const record of this.staged) {\n this.producer.sent.push(record);\n }\n }\n\n async abort(): Promise<void> {\n if (this.committed) throw new Error(\"FakeTransaction: already committed\");\n this.aborted = true;\n this.staged.length = 0;\n }\n}\n\n// ── FakeProducer ──────────────────────────────────────────────────────────────\n\n/**\n * In-memory producer. All `send()` calls are captured in `sent`.\n * Transactions are backed by `FakeTransaction`.\n */\nexport class FakeProducer implements IProducer {\n /** All records delivered via `send()` (direct + committed transactions). */\n readonly sent: IProducerRecord[] = [];\n /** All transactions opened via `transaction()`. */\n readonly transactions: FakeTransaction[] = [];\n\n readonly options: IProducerCreationOptions | undefined;\n connected = false;\n\n constructor(options?: IProducerCreationOptions) {\n this.options = options;\n }\n\n async connect(): Promise<void> {\n this.connected = true;\n }\n\n async disconnect(): Promise<void> {\n this.connected = false;\n }\n\n async send(record: IProducerRecord): Promise<void> {\n this.sent.push(record);\n }\n\n async transaction(): Promise<ITransaction> {\n const tx = new FakeTransaction(this);\n this.transactions.push(tx);\n return tx;\n }\n\n /** Return the last committed transaction, or throw if none exist. */\n get lastTransaction(): FakeTransaction {\n const tx = this.transactions.at(-1);\n if (!tx) throw new Error(\"FakeProducer: no transactions opened yet\");\n return tx;\n }\n\n /** All topic names that received at least one message. */\n sentTopics(): string[] {\n return [...new Set(this.sent.map((r) => r.topic))];\n }\n\n /** All messages sent to a specific topic. */\n sentTo(topic: string): IProducerRecord[\"messages\"] {\n return this.sent.filter((r) => r.topic === topic).flatMap((r) => r.messages);\n }\n}\n\n// ── FakeConsumer ──────────────────────────────────────────────────────────────\n\n/**\n * In-memory consumer.\n * Call `deliver(topic, message)` from your test to push messages through\n * the `eachMessage` handler without a real broker.\n */\nexport class FakeConsumer implements IConsumer {\n readonly groupId: string;\n readonly fromBeginning: boolean;\n\n /** Topics subscribed via `subscribe()`. */\n readonly subscribed: string[] = [];\n\n private _runConfig: IConsumerRunConfig | undefined;\n private _assignments: ITopicPartition[] = [];\n readonly pausedTopics = new Set<string>();\n connected = false;\n\n private readonly onRebalance: IConsumerCreationOptions[\"onRebalance\"];\n\n constructor(options: IConsumerCreationOptions) {\n this.groupId = options.groupId;\n this.fromBeginning = options.fromBeginning ?? false;\n this.onRebalance = options.onRebalance;\n }\n\n async connect(): Promise<void> {\n this.connected = true;\n }\n\n async disconnect(): Promise<void> {\n this.connected = false;\n }\n\n async subscribe(options: { topics: (string | RegExp)[] }): Promise<void> {\n for (const t of options.topics) {\n if (typeof t === \"string\") this.subscribed.push(t);\n }\n // Auto-assign one partition per subscribed topic and immediately fire the\n // rebalance callback so that handle.ready() resolves without a real broker.\n this._assignments = this.subscribed.map((topic) => ({ topic, partition: 0 }));\n this.onRebalance?.(\"assign\", this._assignments);\n }\n\n async run(config: IConsumerRunConfig): Promise<void> {\n this._runConfig = config;\n }\n\n pause(assignments: ITopicPartitions[]): void {\n for (const { topic } of assignments) this.pausedTopics.add(topic);\n }\n\n resume(assignments: ITopicPartitions[]): void {\n for (const { topic } of assignments) this.pausedTopics.delete(topic);\n }\n\n seek(_options: ITopicPartitionOffset): void {}\n\n assignment(): ITopicPartition[] {\n return this._assignments;\n }\n\n async commitOffsets(_offsets: ITopicPartitionOffset[]): Promise<void> {}\n\n async stop(): Promise<void> {\n this.connected = false;\n }\n\n // ── Test helpers ─────────────────────────────────────────────────────\n\n /**\n * Push a message through the `eachMessage` handler.\n * Throws if `run()` has not been called yet.\n */\n async deliver(\n topic: string,\n message: Partial<IMessage> & { value: Buffer | null },\n partition = 0,\n offset = \"0\",\n ): Promise<void> {\n if (!this._runConfig?.eachMessage) {\n throw new Error(\n `FakeConsumer(${this.groupId}): run() with eachMessage not called yet`,\n );\n }\n await this._runConfig.eachMessage({\n topic,\n partition,\n message: {\n value: message.value,\n headers: message.headers ?? {},\n offset,\n key: message.key,\n },\n });\n }\n\n /**\n * Simulate a partition-assign rebalance event.\n * Useful for testing onRebalance callbacks.\n */\n triggerRebalance(\n type: \"assign\" | \"revoke\",\n assignments: ITopicPartition[],\n ): void {\n this.onRebalance?.(type, assignments);\n }\n\n /** Whether `run()` has been called (consumer is active). */\n get isRunning(): boolean {\n return this._runConfig !== undefined;\n }\n}\n\n// ── FakeAdmin ─────────────────────────────────────────────────────────────────\n\n/**\n * In-memory admin client.\n * Pre-populate `topicOffsets`, `groupOffsets`, and `existingTopics`\n * to control what admin queries return.\n */\nexport class FakeAdmin implements IAdmin {\n /** Topics returned by `listTopics()`. Add to this from your test. */\n readonly existingTopics: string[] = [];\n\n /** Per-topic partition watermarks returned by `fetchTopicOffsets()`. */\n readonly topicOffsets = new Map<string, IPartitionWatermarks[]>();\n\n /** Per-groupId committed offsets returned by `fetchOffsets()`. */\n readonly groupOffsets = new Map<string, IGroupTopicOffsets[]>();\n\n /** Calls captured by `setOffsets()` — inspect in tests. */\n readonly setOffsetsCalls: Array<{\n groupId: string;\n topic: string;\n partitions: IPartitionOffset[];\n }> = [];\n\n /** Group IDs deleted via `deleteGroups()`. */\n readonly deletedGroups: string[] = [];\n\n /** Records deleted via `deleteTopicRecords()`. */\n readonly deletedRecords: Array<{\n topic: string;\n partitions: IPartitionOffset[];\n }> = [];\n\n connected = false;\n\n async connect(): Promise<void> {\n this.connected = true;\n }\n\n async disconnect(): Promise<void> {\n this.connected = false;\n }\n\n async createTopics(options: {\n topics: Array<{ topic: string; numPartitions: number }>;\n }): Promise<void> {\n for (const { topic } of options.topics) {\n if (!this.existingTopics.includes(topic)) this.existingTopics.push(topic);\n }\n }\n\n async fetchTopicOffsets(topic: string): Promise<IPartitionWatermarks[]> {\n return this.topicOffsets.get(topic) ?? [{ partition: 0, low: \"0\", high: \"0\" }];\n }\n\n async fetchTopicOffsetsByTimestamp(\n _topic: string,\n _timestamp: number,\n ): Promise<IPartitionOffset[]> {\n return [];\n }\n\n async fetchOffsets(options: {\n groupId: string;\n }): Promise<IGroupTopicOffsets[]> {\n return this.groupOffsets.get(options.groupId) ?? [];\n }\n\n async setOffsets(options: {\n groupId: string;\n topic: string;\n partitions: IPartitionOffset[];\n }): Promise<void> {\n this.setOffsetsCalls.push(options);\n }\n\n async listTopics(): Promise<string[]> {\n return this.existingTopics;\n }\n\n async listGroups(): Promise<{ groups: IGroupDescription[] }> {\n return { groups: [] };\n }\n\n async fetchTopicMetadata(_options?: {\n topics?: string[];\n }): Promise<{ topics: ITopicMetadata[] }> {\n return { topics: [] };\n }\n\n async deleteGroups(groupIds: string[]): Promise<void> {\n this.deletedGroups.push(...groupIds);\n }\n\n async deleteTopicRecords(options: {\n topic: string;\n partitions: IPartitionOffset[];\n }): Promise<void> {\n this.deletedRecords.push(options);\n }\n}\n\n// ── FakeTransport ─────────────────────────────────────────────────────────────\n\n/**\n * In-memory `KafkaTransport` for unit testing.\n *\n * Inject into `KafkaClient` via `KafkaClientOptions.transport` to test\n * producer/consumer logic without `jest.mock('@confluentinc/kafka-javascript')`.\n *\n * @example\n * ```ts\n * const transport = new FakeTransport();\n * const client = new KafkaClient('svc', 'grp', [], { transport });\n *\n * await client.connectProducer();\n * await client.sendMessage('orders', { id: '1' });\n *\n * expect(transport.mainProducer.sentTo('orders')).toHaveLength(1);\n * ```\n */\nexport class FakeTransport implements KafkaTransport {\n private readonly _producers: FakeProducer[] = [];\n private readonly _consumers: FakeConsumer[] = [];\n private readonly _admin = new FakeAdmin();\n\n producer(options?: IProducerCreationOptions): IProducer {\n const p = new FakeProducer(options);\n this._producers.push(p);\n return p;\n }\n\n consumer(options: IConsumerCreationOptions): IConsumer {\n const c = new FakeConsumer(options);\n this._consumers.push(c);\n return c;\n }\n\n admin(): IAdmin {\n return this._admin;\n }\n\n // ── Convenience accessors ─────────────────────────────────────────\n\n /** The admin client shared across all admin() calls. */\n get fakeAdmin(): FakeAdmin {\n return this._admin;\n }\n\n /**\n * The first (default) producer — the non-transactional producer\n * created during `KafkaClient` construction.\n */\n get mainProducer(): FakeProducer {\n const p = this._producers[0];\n if (!p) throw new Error(\"FakeTransport: no producers created yet\");\n return p;\n }\n\n /** All producers created so far (main + transactional). */\n get producers(): readonly FakeProducer[] {\n return this._producers;\n }\n\n /** All consumers created so far. */\n get consumers(): readonly FakeConsumer[] {\n return this._consumers;\n }\n\n /**\n * Find the consumer for a given group ID.\n * Throws if no consumer with that group exists.\n */\n consumerFor(groupId: string): FakeConsumer {\n const c = this._consumers.find(\n (c) => c.groupId === groupId || c.groupId.startsWith(groupId),\n );\n if (!c)\n throw new Error(\n `FakeTransport: no consumer for group \"${groupId}\". ` +\n `Available: ${this._consumers.map((c) => c.groupId).join(\", \") || \"(none)\"}`,\n );\n return c;\n }\n\n /**\n * Deliver a JSON-serialized message to the first consumer subscribed to `topic`.\n * Simulates a broker dispatching a message to the consumer handler.\n */\n async deliver<T>(\n topic: string,\n payload: T,\n options: {\n key?: string;\n headers?: Record<string, string>;\n partition?: number;\n offset?: string;\n } = {},\n ): Promise<void> {\n const consumer = this._consumers.find((c) => c.subscribed.includes(topic));\n if (!consumer) {\n throw new Error(\n `FakeTransport: no consumer subscribed to \"${topic}\". ` +\n `Subscribed topics: ${this._consumers.flatMap((c) => c.subscribed).join(\", \") || \"(none)\"}`,\n );\n }\n await consumer.deliver(\n topic,\n {\n value: Buffer.from(JSON.stringify(payload)),\n headers: options.headers\n ? Object.fromEntries(\n Object.entries(options.headers).map(([k, v]) => [k, [v]]),\n )\n : {},\n key: options.key !== undefined ? Buffer.from(options.key) : undefined,\n },\n options.partition ?? 0,\n options.offset ?? \"0\",\n );\n }\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;;;ACcA,SAAS,oBAAiC;AAKxC,MAAI;AACF,QAAI,KAAK,2DAA2D,GAAG;AACrE,aAAO,MAAM,KAAK,WAAW;AAAA,IAC/B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,MAAI;AACF,QAAI,KAAK,uDAAuD,GAAG;AACjE,aAAO,MAAM,KAAK,SAAS;AAAA,IAC7B;AAAA,EACF,QAAQ;AAAA,EAER;AACA,QAAM,IAAI;AAAA,IACR;AAAA,EAEF;AACF;AA4BO,SAAS,sBACd,aACoB;AACpB,QAAM,KAAK,eAAe,kBAAkB;AAE5C,QAAM,OAAO,MAAM,GAAG;AACtB,QAAM,WAAW,CAAC,UAAmB,KAAK,EAAE,kBAAkB,KAAK;AACnE,QAAM,YAAY,CAAC,UAAmB,KAAK,EAAE,gBAAgB,KAAK;AAElE,SAAO;AAAA,IACL,aAAa,SAAS;AAAA,MACpB,QAAQ;AAAA,MACR,UAAU;AAAA,MACV,QAAQ,CAAC;AAAA,IACX,CAAC;AAAA,IACD,gBAAgB,SAAS,CAAC,CAAC;AAAA,IAC3B,aAAa,UAAU,aAAa;AAAA,IACpC,aAAa,SAAS,MAAS;AAAA,IAC/B,WAAW,SAAS,MAAS;AAAA,IAC7B,aAAa,KAAK,EAAE;AAAA,MAClB,OAAO,OAAwD;AAC7D,cAAM,MAAM;AAAA,UACV,MAAM,SAAS,MAAS;AAAA,UACxB,WAAW,SAAS,MAAS;AAAA,QAC/B;AACA,cAAM,GAAG,GAAG;AAAA,MACd;AAAA,IACF;AAAA,IACA,eAAe,SAAS;AAAA,MACtB,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,MACxC,OAAO,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC3C,CAAC;AAAA,IACD,oBAAoB,SAAS;AAAA,MAC3B,SAAS;AAAA,MACT,MAAM,KAAK,EAAE,kBAAkB,MAAS;AAAA,MACxC,OAAO,KAAK,EAAE,kBAAkB,MAAS;AAAA,IAC3C,CAAC;AAAA,IACD,cAAc,SAAS,MAAS;AAAA,IAChC,SAAS;AAAA,OACN,aAAa;AAAA,MAAC,GAAG;AAAA,IACpB;AAAA,IACA,WAAW,SAAS,EAAE,UAAU,GAAG,SAAS,EAAE,CAAC;AAAA,IAC/C,cAAc,SAAS,MAAS;AAAA,IAChC,cAAc,SAAS,MAAS;AAAA,IAChC,iBAAiB,SAAS,MAAS;AAAA,IACnC,iBAAiB,UAAU,MAAS;AAAA,IACpC,eAAe,KAAK;AAAA,IACpB,gBAAgB,KAAK;AAAA,IACrB,YAAY,UAAU;AAAA,MACpB,gBAAgB;AAAA,MAChB,YAAY;AAAA,MACZ,UAAU;AAAA,MACV,YAAY;AAAA,IACd,CAAC;AAAA,IACD,cAAc,KAAK;AAAA,IACnB,YAAY,SAAS,MAAS;AAAA,IAC9B,wBAAwB,KAAK;AAAA,EAC/B;AACF;;;AC5HA,mBAGO;AACP,8BAAwB;AACxB,IAAM,EAAE,OAAO,UAAU,cAAc,IAAI;AA4CpC,IAAM,qBAAN,MAAyB;AAAA,EACtB;AAAA,EACS;AAAA,EACA;AAAA,EACA;AAAA,EAIjB,YAAY,SAAqC;AAC/C,SAAK,QAAQ,SAAS,SAAS;AAC/B,SAAK,oBAAoB,SAAS,qBAAqB;AACvD,SAAK,SAAS,SAAS,UAAU,CAAC;AAAA,EACpC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QAA2B;AAC/B,SAAK,YAAY,MAAM,IAAI,4BAAe,KAAK,KAAK,EACjD,UAAU,EACV,iBAAiB,IAAI,EACrB,gBAAgB;AAAA,MACf,gDAAgD;AAAA,MAChD,qCAAqC;AAAA,IACvC,CAAC,EACA,MAAM;AAET,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,UAAM,UAAU,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAElC,UAAM,QAAQ,IAAI,MAAM;AAAA,MACtB,SAAS;AAAA,QACP,UAAU;AAAA,QACV;AAAA,QACA,UAAU,cAAc;AAAA,MAC1B;AAAA,IACF,CAAC;AAED,QAAI,KAAK,OAAO,SAAS,GAAG;AAC1B,YAAM,QAAQ,MAAM,MAAM;AAC1B,YAAM,MAAM,QAAQ;AACpB,YAAM,MAAM,aAAa;AAAA,QACvB,QAAQ,KAAK,OAAO;AAAA,UAAI,CAAC,MACvB,OAAO,MAAM,WACT,EAAE,OAAO,GAAG,eAAe,EAAE,IAC7B,EAAE,OAAO,EAAE,OAAO,eAAe,EAAE,iBAAiB,EAAE;AAAA,QAC5D;AAAA,MACF,CAAC;AACD,YAAM,MAAM,WAAW;AAAA,IACzB;AAEA,QAAI,KAAK,mBAAmB;AAC1B,YAAM,cAAc,IAAI,MAAM;AAAA,QAC5B,SAAS;AAAA,UACP,UAAU;AAAA,UACV;AAAA,UACA,UAAU,cAAc;AAAA,QAC1B;AAAA,MACF,CAAC;AACD,YAAM,aAAa,YAAY,SAAS;AAAA,QACtC,SAAS;AAAA,UACP,iBAAiB;AAAA,UACjB,YAAY;AAAA,UACZ,qBAAqB;AAAA,QACvB;AAAA,MACF,CAAC;AACD,YAAM,WAAW,QAAQ;AACzB,YAAM,KAAK,MAAM,WAAW,YAAY;AACxC,YAAM,GAAG,MAAM;AACf,YAAM,WAAW,WAAW;AAAA,IAC9B;AAEA,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,MAAM,OAAsB;AAC1B,UAAM,KAAK,WAAW,KAAK;AAC3B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA,EAGA,IAAI,UAAoB;AACtB,QAAI,CAAC,KAAK,WAAW;AACnB,YAAM,IAAI,MAAM,wDAAwD;AAAA,IAC1E;AACA,UAAM,OAAO,KAAK,UAAU,QAAQ;AACpC,UAAM,OAAO,KAAK,UAAU,cAAc,IAAI;AAC9C,WAAO,CAAC,GAAG,IAAI,IAAI,IAAI,EAAE;AAAA,EAC3B;AACF;;;ACnHO,IAAM,kBAAN,MAA8C;AAAA,EAanD,YAA6B,UAAwB;AAAxB;AAAA,EAAyB;AAAA;AAAA,EAX7C,SAA4B,CAAC;AAAA;AAAA,EAEtC,YAAY;AAAA;AAAA,EAEZ,UAAU;AAAA;AAAA,EAED,mBAGJ,CAAC;AAAA,EAIN,MAAM,KAAK,QAAwC;AACjD,SAAK,OAAO,KAAK,MAAM;AAAA,EACzB;AAAA,EAEA,MAAM,YAAY,SAGA;AAChB,SAAK,iBAAiB,KAAK,OAAO;AAAA,EACpC;AAAA,EAEA,MAAM,SAAwB;AAC5B,QAAI,KAAK,QAAS,OAAM,IAAI,MAAM,kCAAkC;AACpE,SAAK,YAAY;AACjB,eAAW,UAAU,KAAK,QAAQ;AAChC,WAAK,SAAS,KAAK,KAAK,MAAM;AAAA,IAChC;AAAA,EACF;AAAA,EAEA,MAAM,QAAuB;AAC3B,QAAI,KAAK,UAAW,OAAM,IAAI,MAAM,oCAAoC;AACxE,SAAK,UAAU;AACf,SAAK,OAAO,SAAS;AAAA,EACvB;AACF;AAQO,IAAM,eAAN,MAAwC;AAAA;AAAA,EAEpC,OAA0B,CAAC;AAAA;AAAA,EAE3B,eAAkC,CAAC;AAAA,EAEnC;AAAA,EACT,YAAY;AAAA,EAEZ,YAAY,SAAoC;AAC9C,SAAK,UAAU;AAAA,EACjB;AAAA,EAEA,MAAM,UAAyB;AAC7B,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,aAA4B;AAChC,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,KAAK,QAAwC;AACjD,SAAK,KAAK,KAAK,MAAM;AAAA,EACvB;AAAA,EAEA,MAAM,cAAqC;AACzC,UAAM,KAAK,IAAI,gBAAgB,IAAI;AACnC,SAAK,aAAa,KAAK,EAAE;AACzB,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,IAAI,kBAAmC;AACrC,UAAM,KAAK,KAAK,aAAa,GAAG,EAAE;AAClC,QAAI,CAAC,GAAI,OAAM,IAAI,MAAM,0CAA0C;AACnE,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,aAAuB;AACrB,WAAO,CAAC,GAAG,IAAI,IAAI,KAAK,KAAK,IAAI,CAAC,MAAM,EAAE,KAAK,CAAC,CAAC;AAAA,EACnD;AAAA;AAAA,EAGA,OAAO,OAA4C;AACjD,WAAO,KAAK,KAAK,OAAO,CAAC,MAAM,EAAE,UAAU,KAAK,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ;AAAA,EAC7E;AACF;AASO,IAAM,eAAN,MAAwC;AAAA,EACpC;AAAA,EACA;AAAA;AAAA,EAGA,aAAuB,CAAC;AAAA,EAEzB;AAAA,EACA,eAAkC,CAAC;AAAA,EAClC,eAAe,oBAAI,IAAY;AAAA,EACxC,YAAY;AAAA,EAEK;AAAA,EAEjB,YAAY,SAAmC;AAC7C,SAAK,UAAU,QAAQ;AACvB,SAAK,gBAAgB,QAAQ,iBAAiB;AAC9C,SAAK,cAAc,QAAQ;AAAA,EAC7B;AAAA,EAEA,MAAM,UAAyB;AAC7B,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,aAA4B;AAChC,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,UAAU,SAAyD;AACvE,eAAW,KAAK,QAAQ,QAAQ;AAC9B,UAAI,OAAO,MAAM,SAAU,MAAK,WAAW,KAAK,CAAC;AAAA,IACnD;AAGA,SAAK,eAAe,KAAK,WAAW,IAAI,CAAC,WAAW,EAAE,OAAO,WAAW,EAAE,EAAE;AAC5E,SAAK,cAAc,UAAU,KAAK,YAAY;AAAA,EAChD;AAAA,EAEA,MAAM,IAAI,QAA2C;AACnD,SAAK,aAAa;AAAA,EACpB;AAAA,EAEA,MAAM,aAAuC;AAC3C,eAAW,EAAE,MAAM,KAAK,YAAa,MAAK,aAAa,IAAI,KAAK;AAAA,EAClE;AAAA,EAEA,OAAO,aAAuC;AAC5C,eAAW,EAAE,MAAM,KAAK,YAAa,MAAK,aAAa,OAAO,KAAK;AAAA,EACrE;AAAA,EAEA,KAAK,UAAuC;AAAA,EAAC;AAAA,EAE7C,aAAgC;AAC9B,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,cAAc,UAAkD;AAAA,EAAC;AAAA,EAEvE,MAAM,OAAsB;AAC1B,SAAK,YAAY;AAAA,EACnB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAQA,MAAM,QACJ,OACA,SACA,YAAY,GACZ,SAAS,KACM;AACf,QAAI,CAAC,KAAK,YAAY,aAAa;AACjC,YAAM,IAAI;AAAA,QACR,gBAAgB,KAAK,OAAO;AAAA,MAC9B;AAAA,IACF;AACA,UAAM,KAAK,WAAW,YAAY;AAAA,MAChC;AAAA,MACA;AAAA,MACA,SAAS;AAAA,QACP,OAAO,QAAQ;AAAA,QACf,SAAS,QAAQ,WAAW,CAAC;AAAA,QAC7B;AAAA,QACA,KAAK,QAAQ;AAAA,MACf;AAAA,IACF,CAAC;AAAA,EACH;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,iBACE,MACA,aACM;AACN,SAAK,cAAc,MAAM,WAAW;AAAA,EACtC;AAAA;AAAA,EAGA,IAAI,YAAqB;AACvB,WAAO,KAAK,eAAe;AAAA,EAC7B;AACF;AASO,IAAM,YAAN,MAAkC;AAAA;AAAA,EAE9B,iBAA2B,CAAC;AAAA;AAAA,EAG5B,eAAe,oBAAI,IAAoC;AAAA;AAAA,EAGvD,eAAe,oBAAI,IAAkC;AAAA;AAAA,EAGrD,kBAIJ,CAAC;AAAA;AAAA,EAGG,gBAA0B,CAAC;AAAA;AAAA,EAG3B,iBAGJ,CAAC;AAAA,EAEN,YAAY;AAAA,EAEZ,MAAM,UAAyB;AAC7B,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,aAA4B;AAChC,SAAK,YAAY;AAAA,EACnB;AAAA,EAEA,MAAM,aAAa,SAED;AAChB,eAAW,EAAE,MAAM,KAAK,QAAQ,QAAQ;AACtC,UAAI,CAAC,KAAK,eAAe,SAAS,KAAK,EAAG,MAAK,eAAe,KAAK,KAAK;AAAA,IAC1E;AAAA,EACF;AAAA,EAEA,MAAM,kBAAkB,OAAgD;AACtE,WAAO,KAAK,aAAa,IAAI,KAAK,KAAK,CAAC,EAAE,WAAW,GAAG,KAAK,KAAK,MAAM,IAAI,CAAC;AAAA,EAC/E;AAAA,EAEA,MAAM,6BACJ,QACA,YAC6B;AAC7B,WAAO,CAAC;AAAA,EACV;AAAA,EAEA,MAAM,aAAa,SAEe;AAChC,WAAO,KAAK,aAAa,IAAI,QAAQ,OAAO,KAAK,CAAC;AAAA,EACpD;AAAA,EAEA,MAAM,WAAW,SAIC;AAChB,SAAK,gBAAgB,KAAK,OAAO;AAAA,EACnC;AAAA,EAEA,MAAM,aAAgC;AACpC,WAAO,KAAK;AAAA,EACd;AAAA,EAEA,MAAM,aAAuD;AAC3D,WAAO,EAAE,QAAQ,CAAC,EAAE;AAAA,EACtB;AAAA,EAEA,MAAM,mBAAmB,UAEiB;AACxC,WAAO,EAAE,QAAQ,CAAC,EAAE;AAAA,EACtB;AAAA,EAEA,MAAM,aAAa,UAAmC;AACpD,SAAK,cAAc,KAAK,GAAG,QAAQ;AAAA,EACrC;AAAA,EAEA,MAAM,mBAAmB,SAGP;AAChB,SAAK,eAAe,KAAK,OAAO;AAAA,EAClC;AACF;AAqBO,IAAM,gBAAN,MAA8C;AAAA,EAClC,aAA6B,CAAC;AAAA,EAC9B,aAA6B,CAAC;AAAA,EAC9B,SAAS,IAAI,UAAU;AAAA,EAExC,SAAS,SAA+C;AACtD,UAAM,IAAI,IAAI,aAAa,OAAO;AAClC,SAAK,WAAW,KAAK,CAAC;AACtB,WAAO;AAAA,EACT;AAAA,EAEA,SAAS,SAA8C;AACrD,UAAM,IAAI,IAAI,aAAa,OAAO;AAClC,SAAK,WAAW,KAAK,CAAC;AACtB,WAAO;AAAA,EACT;AAAA,EAEA,QAAgB;AACd,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA,EAKA,IAAI,YAAuB;AACzB,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,IAAI,eAA6B;AAC/B,UAAM,IAAI,KAAK,WAAW,CAAC;AAC3B,QAAI,CAAC,EAAG,OAAM,IAAI,MAAM,yCAAyC;AACjE,WAAO;AAAA,EACT;AAAA;AAAA,EAGA,IAAI,YAAqC;AACvC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA,EAGA,IAAI,YAAqC;AACvC,WAAO,KAAK;AAAA,EACd;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,YAAY,SAA+B;AACzC,UAAM,IAAI,KAAK,WAAW;AAAA,MACxB,CAACA,OAAMA,GAAE,YAAY,WAAWA,GAAE,QAAQ,WAAW,OAAO;AAAA,IAC9D;AACA,QAAI,CAAC;AACH,YAAM,IAAI;AAAA,QACR,yCAAyC,OAAO,iBAChC,KAAK,WAAW,IAAI,CAACA,OAAMA,GAAE,OAAO,EAAE,KAAK,IAAI,KAAK,QAAQ;AAAA,MAC9E;AACF,WAAO;AAAA,EACT;AAAA;AAAA;AAAA;AAAA;AAAA,EAMA,MAAM,QACJ,OACA,SACA,UAKI,CAAC,GACU;AACf,UAAM,WAAW,KAAK,WAAW,KAAK,CAAC,MAAM,EAAE,WAAW,SAAS,KAAK,CAAC;AACzE,QAAI,CAAC,UAAU;AACb,YAAM,IAAI;AAAA,QACR,6CAA6C,KAAK,yBAC1B,KAAK,WAAW,QAAQ,CAAC,MAAM,EAAE,UAAU,EAAE,KAAK,IAAI,KAAK,QAAQ;AAAA,MAC7F;AAAA,IACF;AACA,UAAM,SAAS;AAAA,MACb;AAAA,MACA;AAAA,QACE,OAAO,OAAO,KAAK,KAAK,UAAU,OAAO,CAAC;AAAA,QAC1C,SAAS,QAAQ,UACb,OAAO;AAAA,UACL,OAAO,QAAQ,QAAQ,OAAO,EAAE,IAAI,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;AAAA,QAC1D,IACA,CAAC;AAAA,QACL,KAAK,QAAQ,QAAQ,SAAY,OAAO,KAAK,QAAQ,GAAG,IAAI;AAAA,MAC9D;AAAA,MACA,QAAQ,aAAa;AAAA,MACrB,QAAQ,UAAU;AAAA,IACpB;AAAA,EACF;AACF;","names":["c"]}
|