kafka-ts 0.0.1-beta.2 → 0.0.1-beta.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -14,7 +14,7 @@ const trace = createTracer('FetchManager');
14
14
  export type BatchGranularity = 'partition' | 'topic' | 'broker';
15
15
 
16
16
  type FetchManagerOptions = {
17
- fetch: (nodeId: number, assignment: Assignment) => Promise<ReturnType<(typeof API.FETCH)['response']>>;
17
+ fetch: (nodeId: number, assignment: Assignment) => Promise<Awaited<ReturnType<(typeof API.FETCH)['response']>>>;
18
18
  process: (batch: Batch) => Promise<void>;
19
19
  metadata: Metadata;
20
20
  consumerGroup?: ConsumerGroup;
@@ -86,6 +86,7 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
86
86
 
87
87
  const batch = this.queue.shift();
88
88
  if (!batch) {
89
+ // wait until new data is available or fetch manager is requested to stop
89
90
  await new Promise<void>((resolve) => {
90
91
  const onData = () => {
91
92
  this.removeListener('stop', onStop);
@@ -109,35 +110,40 @@ export class FetchManager extends EventEmitter<{ data: []; checkpoint: [number];
109
110
  return batch as Exclude<Entry, Checkpoint>;
110
111
  }
111
112
 
112
- private async onResponse(fetcherId: number, response: ReturnType<(typeof API.FETCH)['response']>) {
113
+ private async onResponse(fetcherId: number, response: Awaited<ReturnType<(typeof API.FETCH)['response']>>) {
113
114
  const { metadata, batchGranularity } = this.options;
114
115
 
115
116
  const batches = fetchResponseToBatches(response, batchGranularity, metadata);
116
- if (batches.length) {
117
+ if (!batches.length) {
118
+ return;
119
+ }
120
+
121
+ // wait until all broker batches have been processed or fetch manager is requested to stop
122
+ await new Promise<void>((resolve) => {
123
+ const onCheckpoint = (id: number) => {
124
+ if (id === fetcherId) {
125
+ this.removeListener('checkpoint', onCheckpoint);
126
+ this.removeListener('stop', onStop);
127
+ resolve();
128
+ }
129
+ };
130
+ const onStop = () => {
131
+ this.removeListener('checkpoint', onCheckpoint);
132
+ resolve();
133
+ };
134
+ this.on('checkpoint', onCheckpoint);
135
+ this.once('stop', onStop);
136
+
117
137
  this.queue.push(...batches);
118
138
  this.queue.push({ kind: 'checkpoint', fetcherId });
119
139
 
120
140
  this.emit('data');
121
- await new Promise<void>((resolve) => {
122
- const onCheckpoint = (id: number) => {
123
- if (id === fetcherId) {
124
- this.removeListener('stop', onStop);
125
- resolve();
126
- }
127
- };
128
- const onStop = () => {
129
- this.removeListener('checkpoint', onCheckpoint);
130
- resolve();
131
- };
132
- this.once('checkpoint', onCheckpoint);
133
- this.once('stop', onStop);
134
- });
135
- }
141
+ });
136
142
  }
137
143
  }
138
144
 
139
145
  const fetchResponseToBatches = (
140
- batch: ReturnType<typeof API.FETCH.response>,
146
+ batch: Awaited<ReturnType<typeof API.FETCH.response>>,
141
147
  batchGranularity: BatchGranularity,
142
148
  metadata: Metadata,
143
149
  ): Batch[] => {
@@ -10,8 +10,8 @@ type FetcherOptions = {
10
10
  nodeId: number;
11
11
  assignment: Assignment;
12
12
  consumerGroup?: ConsumerGroup;
13
- fetch: (nodeId: number, assignment: Assignment) => Promise<ReturnType<(typeof API.FETCH)['response']>>;
14
- onResponse: (fetcherId: number, response: ReturnType<(typeof API.FETCH)['response']>) => Promise<void>;
13
+ fetch: (nodeId: number, assignment: Assignment) => Promise<Awaited<ReturnType<(typeof API.FETCH)['response']>>>;
14
+ onResponse: (fetcherId: number, response: Awaited<ReturnType<(typeof API.FETCH)['response']>>) => Promise<void>;
15
15
  };
16
16
 
17
17
  export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; drain: [] }> {
@@ -26,10 +26,10 @@ export class Fetcher extends EventEmitter<{ stop: []; stopped: []; data: []; dra
26
26
 
27
27
  public async loop() {
28
28
  const { nodeId, assignment, consumerGroup, fetch, onResponse } = this.options;
29
-
29
+
30
30
  this.isRunning = true;
31
31
  this.once('stop', () => (this.isRunning = false));
32
-
32
+
33
33
  try {
34
34
  while (this.isRunning) {
35
35
  const response = await fetch(nodeId, assignment);
@@ -25,7 +25,9 @@ export class Processor extends EventEmitter<{ stop: []; stopped: [] }> {
25
25
  try {
26
26
  while (this.isRunning) {
27
27
  const batch = await poll();
28
- await process(batch);
28
+ if (batch.length) {
29
+ await process(batch);
30
+ }
29
31
  }
30
32
  } finally {
31
33
  this.isRunning = false;
package/src/index.ts CHANGED
@@ -5,3 +5,5 @@ export * from './client';
5
5
  export * from './distributors/partitioner';
6
6
  export * from './types';
7
7
  export * from './utils/error';
8
+ export * from './utils/logger';
9
+
@@ -34,7 +34,7 @@ export class Producer {
34
34
  this.partition = this.options.partitioner({ metadata: this.metadata });
35
35
  }
36
36
 
37
- public async send(messages: Message[]) {
37
+ public async send(messages: Message[], { acks = -1 }: { acks?: -1 | 1 } = {}) {
38
38
  await this.ensureConnected();
39
39
 
40
40
  const { allowTopicAutoCreation } = this.options;
@@ -44,7 +44,7 @@ export class Producer {
44
44
  await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
45
45
 
46
46
  const nodeTopicPartitionMessages = distributeMessagesToTopicPartitionLeaders(
47
- messages.map(message => ({ ...message, partition: this.partition(message) })),
47
+ messages.map((message) => ({ ...message, partition: this.partition(message) })),
48
48
  this.metadata.getTopicPartitionLeaderIds(),
49
49
  );
50
50
 
@@ -52,7 +52,7 @@ export class Producer {
52
52
  Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
53
53
  await this.cluster.sendRequestToNode(parseInt(nodeId))(API.PRODUCE, {
54
54
  transactionalId: null,
55
- acks: 1,
55
+ acks,
56
56
  timeoutMs: 5000,
57
57
  topicData: Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
58
58
  name: topic,
@@ -85,7 +85,7 @@ export class Producer {
85
85
  attributes: 0,
86
86
  timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
87
87
  offsetDelta: index,
88
- key: message.key,
88
+ key: message.key ?? null,
89
89
  value: message.value,
90
90
  headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({
91
91
  key: Buffer.from(key),
package/src/types.ts CHANGED
@@ -1,9 +1,9 @@
1
1
  export type Message = {
2
2
  topic: string;
3
- partition: number;
3
+ partition?: number;
4
4
  offset?: bigint;
5
5
  timestamp?: bigint;
6
- key: Buffer | null;
6
+ key?: Buffer | null;
7
7
  value: Buffer | null;
8
8
  headers?: Record<string, string>;
9
9
  };
package/src/utils/api.ts CHANGED
@@ -5,7 +5,7 @@ export type Api<Request, Response> = {
5
5
  apiKey: number;
6
6
  apiVersion: number;
7
7
  request: (encoder: Encoder, body: Request) => Encoder;
8
- response: (buffer: Decoder) => Response;
8
+ response: (buffer: Decoder) => Promise<Response> | Response;
9
9
  };
10
10
 
11
11
  export const createApi = <Request, Response>(api: Api<Request, Response>) => api;
@@ -7,6 +7,10 @@ export class Decoder {
7
7
  return this.offset;
8
8
  }
9
9
 
10
+ public getBuffer() {
11
+ return this.buffer;
12
+ }
13
+
10
14
  public getBufferLength() {
11
15
  return this.buffer.length;
12
16
  }
@@ -132,6 +136,12 @@ export class Decoder {
132
136
  return results;
133
137
  }
134
138
 
139
+ public readVarIntArray<T>(callback: (opts: Decoder) => T): T[] {
140
+ const length = this.readVarInt();
141
+ const results = Array.from({ length }).map(() => callback(this));
142
+ return results;
143
+ }
144
+
135
145
  public readRecords<T>(callback: (opts: Decoder) => T): T[] {
136
146
  const length = this.readInt32();
137
147
 
@@ -143,9 +153,9 @@ export class Decoder {
143
153
  });
144
154
  }
145
155
 
146
- public read(length: number) {
147
- const value = this.buffer.subarray(this.offset, this.offset + length);
148
- this.offset += length;
156
+ public read(length?: number) {
157
+ const value = this.buffer.subarray(this.offset, length !== undefined ? this.offset + length : undefined);
158
+ this.offset += Buffer.byteLength(value);
149
159
  return value;
150
160
  }
151
161
 
@@ -0,0 +1,37 @@
1
+ export interface Logger {
2
+ debug: (message: string, metadata?: unknown) => void;
3
+ info: (message: string, metadata?: unknown) => void;
4
+ warn: (message: string, metadata?: unknown) => void;
5
+ error: (message: string, metadata?: unknown) => void;
6
+ }
7
+
8
+ export const jsonSerializer = (_: unknown, v: unknown) => {
9
+ if (v instanceof Error) {
10
+ return { name: v.name, message: v.message, stack: v.stack, cause: v.cause };
11
+ }
12
+ if (typeof v === 'bigint') {
13
+ return v.toString();
14
+ }
15
+ return v;
16
+ };
17
+
18
+ class JsonLogger implements Logger {
19
+ debug(message: string, metadata?: unknown) {
20
+ console.debug(JSON.stringify({ message, metadata }, jsonSerializer));
21
+ }
22
+ info(message: string, metadata?: unknown) {
23
+ console.info(JSON.stringify({ message, metadata }, jsonSerializer));
24
+ }
25
+ warn(message: string, metadata?: unknown) {
26
+ console.warn(JSON.stringify({ message, metadata }, jsonSerializer));
27
+ }
28
+ error(message: string, metadata?: unknown) {
29
+ console.error(JSON.stringify({ message, metadata }, jsonSerializer));
30
+ }
31
+ }
32
+
33
+ export let log: Logger = new JsonLogger();
34
+
35
+ export const setLogger = (newLogger: Logger) => {
36
+ log = newLogger;
37
+ };
@@ -0,0 +1,31 @@
1
+ export const createMutex = () => {
2
+ const queue: (() => void)[] = [];
3
+ let isLocked = false;
4
+
5
+ const acquire = () => {
6
+ return new Promise<void>((resolve) => {
7
+ if (!isLocked) {
8
+ isLocked = true;
9
+ return resolve();
10
+ }
11
+ queue.push(resolve);
12
+ });
13
+ };
14
+
15
+ const release = () => {
16
+ isLocked = false;
17
+ const next = queue.shift();
18
+ next && next();
19
+ };
20
+
21
+ const exclusive = async (fn: () => Promise<void>) => {
22
+ await acquire();
23
+ try {
24
+ await fn();
25
+ } finally {
26
+ release();
27
+ }
28
+ };
29
+
30
+ return { exclusive };
31
+ };
@@ -1,4 +1,4 @@
1
- import { serializer } from './debug';
1
+ import { log } from './logger';
2
2
 
3
3
  export const createTracer =
4
4
  (module: string, attributes?: Record<string, unknown>) =>
@@ -12,9 +12,11 @@ export const createTracer =
12
12
  const metadata = fn?.(...args);
13
13
 
14
14
  const onEnd = <T>(result: T): T => {
15
- console.log(
16
- `[${module}.${propertyKey}] +${Date.now() - startTime}ms ${JSON.stringify({ ...attributes, ...metadata, result }, serializer)}`,
17
- );
15
+ log.debug(`[${module}.${propertyKey}] ${metadata?.message ?? ''} +${Date.now() - startTime}ms`, {
16
+ ...attributes,
17
+ ...metadata,
18
+ ...result && { result},
19
+ });
18
20
  return result;
19
21
  };
20
22
 
@@ -1,9 +0,0 @@
1
- export const serializer = (_: string, value: unknown) => (typeof value === 'bigint' ? value.toString() : value);
2
-
3
- export const createDebugger = (module: string) => (func: string, message: string, data?: unknown) => {
4
- if (!process.env.DEBUG?.includes('kafka-ts')) return;
5
- console.debug(
6
- `[${module}] ${func}: ${message}`,
7
- data && `(${data instanceof Error ? data : JSON.stringify(data, serializer, 4)})`,
8
- );
9
- };