@platformatic/kafka 1.27.0-alpha.1 → 1.27.0-alpha.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/README.md +58 -0
  2. package/dist/clients/consumer/consumer.js +24 -1
  3. package/dist/clients/consumer/messages-stream.js +66 -10
  4. package/dist/clients/consumer/options.d.ts +24 -0
  5. package/dist/clients/consumer/options.js +3 -1
  6. package/dist/clients/consumer/types.d.ts +4 -1
  7. package/dist/clients/producer/options.d.ts +2 -18
  8. package/dist/clients/producer/options.js +3 -1
  9. package/dist/clients/producer/producer.js +75 -15
  10. package/dist/clients/producer/types.d.ts +4 -1
  11. package/dist/clients/serde.d.ts +11 -6
  12. package/dist/errors.d.ts +5 -1
  13. package/dist/errors.js +8 -0
  14. package/dist/index.d.ts +1 -0
  15. package/dist/index.js +2 -0
  16. package/dist/network/connection.d.ts +7 -6
  17. package/dist/protocol/compression.js +45 -6
  18. package/dist/protocol/crc32c.js +4 -3
  19. package/dist/protocol/definitions.js +1 -1
  20. package/dist/protocol/reader.js +1 -1
  21. package/dist/protocol/records.d.ts +7 -18
  22. package/dist/protocol/records.js +2 -6
  23. package/dist/protocol/sasl/oauth-bearer.d.ts +3 -3
  24. package/dist/protocol/sasl/plain.d.ts +3 -3
  25. package/dist/protocol/sasl/scram-sha.d.ts +3 -3
  26. package/dist/protocol/sasl/utils.d.ts +3 -3
  27. package/dist/protocol/writer.js +1 -1
  28. package/dist/registries/abstract.d.ts +22 -0
  29. package/dist/registries/abstract.js +38 -0
  30. package/dist/registries/confluent-schema-registry.d.ts +41 -0
  31. package/dist/registries/confluent-schema-registry.js +222 -0
  32. package/dist/registries/index.d.ts +2 -0
  33. package/dist/registries/index.js +2 -0
  34. package/dist/typescript-4/dist/clients/consumer/options.d.ts +24 -0
  35. package/dist/typescript-4/dist/clients/consumer/types.d.ts +4 -1
  36. package/dist/typescript-4/dist/clients/producer/options.d.ts +2 -18
  37. package/dist/typescript-4/dist/clients/producer/types.d.ts +4 -1
  38. package/dist/typescript-4/dist/clients/serde.d.ts +11 -6
  39. package/dist/typescript-4/dist/errors.d.ts +5 -1
  40. package/dist/typescript-4/dist/index.d.ts +2 -1
  41. package/dist/typescript-4/dist/network/connection.d.ts +7 -6
  42. package/dist/typescript-4/dist/protocol/records.d.ts +7 -18
  43. package/dist/typescript-4/dist/protocol/sasl/oauth-bearer.d.ts +3 -3
  44. package/dist/typescript-4/dist/protocol/sasl/plain.d.ts +3 -3
  45. package/dist/typescript-4/dist/protocol/sasl/scram-sha.d.ts +3 -3
  46. package/dist/typescript-4/dist/protocol/sasl/utils.d.ts +3 -3
  47. package/dist/typescript-4/dist/registries/abstract.d.ts +22 -0
  48. package/dist/typescript-4/dist/registries/confluent-schema-registry.d.ts +41 -0
  49. package/dist/typescript-4/dist/registries/index.d.ts +2 -0
  50. package/dist/version.js +1 -1
  51. package/package.json +12 -12
  52. package/dist/native.wasm +0 -0
  53. package/dist/protocol/native.d.ts +0 -8
  54. package/dist/protocol/native.js +0 -48
  55. package/dist/typescript-4/dist/protocol/native.d.ts +0 -8
package/README.md CHANGED
@@ -10,6 +10,7 @@ A modern, high-performance, pure TypeScript/JavaScript type safe client for Apac
10
10
  - **Flexible API**: You can use promises or callbacks on all APIs.
11
11
  - **Streaming or Event-based Consumers**: Thanks to Node.js streams, you can choose your preferred consuming method.
12
12
  - **Flexible Serialisation**: Pluggable serialisers and deserialisers.
13
+ - **Schema Registry Support**: Built-in Confluent Schema Registry integration with AVRO, Protobuf, and JSON Schema support.
13
14
  - **Connection Management**: Automatic connection pooling and recovery.
14
15
  - **Low Dependencies**: Minimal external dependencies.
15
16
 
@@ -141,6 +142,63 @@ await admin.deleteTopics({ topics: ['my-topic'] })
141
142
  await admin.close()
142
143
  ```
143
144
 
145
+ ### Schema Registry
146
+
147
+ The library includes built-in support for Confluent Schema Registry with AVRO, Protocol Buffers, and JSON Schema:
148
+
149
+ ```typescript
150
+ import { Producer, Consumer } from '@platformatic/kafka'
151
+ import { ConfluentSchemaRegistry } from '@platformatic/kafka/registries'
152
+
153
+ // Create a schema registry instance
154
+ const registry = new ConfluentSchemaRegistry({
155
+ url: 'http://localhost:8081',
156
+ auth: {
157
+ username: 'user',
158
+ password: 'password'
159
+ }
160
+ })
161
+
162
+ // Producer with schema registry
163
+ const producer = new Producer({
164
+ clientId: 'schema-producer',
165
+ bootstrapBrokers: ['localhost:9092'],
166
+ registry // Automatic serialization with schemas
167
+ })
168
+
169
+ // Send messages with schema IDs
170
+ await producer.send({
171
+ messages: [{
172
+ topic: 'users',
173
+ value: { id: 1, name: 'Alice' },
174
+ metadata: {
175
+ schemas: {
176
+ value: 100 // Schema ID in the registry
177
+ }
178
+ }
179
+ }]
180
+ })
181
+
182
+ // Consumer with schema registry
183
+ const consumer = new Consumer({
184
+ groupId: 'schema-consumers',
185
+ clientId: 'schema-consumer',
186
+ bootstrapBrokers: ['localhost:9092'],
187
+ registry // Automatic deserialization with schemas
188
+ })
189
+
190
+ const stream = await consumer.consume({
191
+ topics: ['users']
192
+ })
193
+
194
+ // Messages are automatically deserialized
195
+ for await (const message of stream) {
196
+ console.log('User:', message.value) // Typed object
197
+ }
198
+ ```
199
+
200
+ For more details, see the [Confluent Schema Registry documentation](./docs/confluent-schema-registry.md).
201
+
144
202
  ## TLS and SASL
145
203
 
146
204
  See the relevant sections in the the [Base Client](./docs/base.md) page.
@@ -40,6 +40,15 @@ export class Consumer extends Base {
40
40
  constructor(options) {
41
41
  super({ ...defaultConsumerOptions, ...options });
42
42
  this[kValidateOptions](options, consumerOptionsValidator, '/options');
43
+ if (options.registry) {
44
+ if (options.beforeDeserialization) {
45
+ throw new UserError('/options/beforeDeserialization cannot be provided when /options/registry is provided.');
46
+ }
47
+ else if (options.deserializers) {
48
+ throw new UserError('/options/deserializers cannot be provided when /options/registry is provided.');
49
+ }
50
+ options.registry.getDeserializers();
51
+ }
43
52
  this.groupId = options.groupId;
44
53
  this.groupInstanceId = options.groupInstanceId ?? null;
45
54
  this.generationId = 0;
@@ -150,8 +159,22 @@ export class Consumer extends Base {
150
159
  }
151
160
  options.autocommit ??= this[kOptions].autocommit ?? true;
152
161
  options.maxBytes ??= this[kOptions].maxBytes;
153
- options.deserializers = Object.assign({}, options.deserializers, this[kOptions].deserializers);
154
162
  options.highWaterMark ??= this[kOptions].highWaterMark;
163
+ options.registry ??= this[kOptions].registry;
164
+ options.beforeDeserialization ??= this[kOptions].beforeDeserialization;
165
+ if (options.registry) {
166
+ if (options.beforeDeserialization) {
167
+ throw new UserError('/options/beforeDeserialization cannot be provided when /options/registry is provided.');
168
+ /* c8 ignore next - Hard to test */
169
+ }
170
+ else if (options.deserializers || this[kOptions].deserializers) {
171
+ throw new UserError('/options/deserializers cannot be provided when /options/registry is provided.');
172
+ }
173
+ options.deserializers = options.registry.getDeserializers();
174
+ }
175
+ else {
176
+ options.deserializers = Object.assign({}, options.deserializers, this[kOptions].deserializers);
177
+ }
155
178
  this.#consume(options, callback);
156
179
  return callback[kCallbackPromise];
157
180
  }
@@ -4,6 +4,7 @@ import { ListOffsetTimestamps } from "../../apis/enumerations.js";
4
4
  import { consumerReceivesChannel, createDiagnosticContext, notifyCreation } from "../../diagnostic.js";
5
5
  import { UserError } from "../../errors.js";
6
6
  import { IS_CONTROL } from "../../protocol/records.js";
7
+ import { runAsyncSeries } from "../../registries/abstract.js";
7
8
  import { kAutocommit, kInstance, kRefreshOffsetsAndFetch } from "../../symbols.js";
8
9
  import { kConnections, kCreateConnectionPool, kInspect, kPrometheus } from "../base/base.js";
9
10
  import { ensureMetric } from "../metrics.js";
@@ -56,6 +57,7 @@ export class MessagesStream extends Readable {
56
57
  #closeCallbacks;
57
58
  #metricsConsumedMessages;
58
59
  #corruptedMessageHandler;
60
+ #pushRecordsOperation;
59
61
  [kInstance];
60
62
  /*
61
63
  The following requests are blocking in Kafka:
@@ -74,7 +76,7 @@ export class MessagesStream extends Readable {
74
76
  */
75
77
  [kConnections];
76
78
  constructor(consumer, options) {
77
- const { autocommit, mode, fallbackMode, maxFetches, offsets, deserializers, onCorruptedMessage,
79
+ const { autocommit, mode, fallbackMode, maxFetches, offsets, deserializers, onCorruptedMessage, registry, beforeDeserialization,
78
80
  // The options below are only destructured to avoid being part of structuredClone below
79
81
  partitionAssigner: _partitionAssigner, ...otherOptions } = options;
80
82
  if (offsets && mode !== MessagesStreamModes.MANUAL) {
@@ -101,8 +103,10 @@ export class MessagesStream extends Readable {
101
103
  this.#maxFetches = maxFetches ?? 0;
102
104
  this.#topics = structuredClone(options.topics);
103
105
  this.#inflightNodes = new Set();
104
- this.#keyDeserializer = deserializers?.key ?? noopDeserializer;
105
- this.#valueDeserializer = deserializers?.value ?? noopDeserializer;
106
+ this.#keyDeserializer =
107
+ deserializers?.key ?? noopDeserializer;
108
+ this.#valueDeserializer =
109
+ deserializers?.value ?? noopDeserializer;
106
110
  this.#headerKeyDeserializer = deserializers?.headerKey ?? noopDeserializer;
107
111
  this.#headerValueDeserializer = deserializers?.headerValue ?? noopDeserializer;
108
112
  this.#autocommitEnabled = !!options.autocommit;
@@ -110,6 +114,15 @@ export class MessagesStream extends Readable {
110
114
  this.#closed = false;
111
115
  this.#closeCallbacks = [];
112
116
  this.#corruptedMessageHandler = onCorruptedMessage ?? defaultCorruptedMessageHandler;
117
+ if (registry) {
118
+ this.#pushRecordsOperation = this.#beforeDeserialization.bind(this, registry.getBeforeDeserializationHook());
119
+ }
120
+ else if (beforeDeserialization) {
121
+ this.#pushRecordsOperation = this.#beforeDeserialization.bind(this, beforeDeserialization);
122
+ }
123
+ else {
124
+ this.#pushRecordsOperation = this.#pushRecords.bind(this);
125
+ }
113
126
  // Restore offsets
114
127
  this.#offsetsToFetch = new Map();
115
128
  if (offsets) {
@@ -362,10 +375,7 @@ export class MessagesStream extends Readable {
362
375
  }
363
376
  return;
364
377
  }
365
- this.#pushRecords(metadata, topicIds, response, requestedOffsets);
366
- if (this.#maxFetches > 0 && ++this.#fetches >= this.#maxFetches) {
367
- this.push(null);
368
- }
378
+ this.#pushRecordsOperation(metadata, topicIds, response, requestedOffsets);
369
379
  });
370
380
  }
371
381
  });
@@ -419,6 +429,7 @@ export class MessagesStream extends Readable {
419
429
  }
420
430
  // Process messages
421
431
  for (const record of batch.records) {
432
+ const messageToConsume = { ...record, topic, partition };
422
433
  const offset = batch.firstOffset + BigInt(record.offsetDelta);
423
434
  if (offset < requestedOffsets.get(`${topic}:${partition}`)) {
424
435
  // Thi is a duplicate message, ignore it
@@ -437,10 +448,10 @@ export class MessagesStream extends Readable {
437
448
  try {
438
449
  const headers = new Map();
439
450
  for (const [headerKey, headerValue] of record.headers) {
440
- headers.set(headerKeyDeserializer(headerKey), headerValueDeserializer(headerValue));
451
+ headers.set(headerKeyDeserializer(headerKey, messageToConsume), headerValueDeserializer(headerValue, messageToConsume));
441
452
  }
442
- const key = keyDeserializer(record.key, headers);
443
- const value = valueDeserializer(record.value, headers);
453
+ const key = keyDeserializer(record.key, headers, messageToConsume);
454
+ const value = valueDeserializer(record.value, headers, messageToConsume);
444
455
  this.#metricsConsumedMessages?.inc();
445
456
  const message = {
446
457
  key,
@@ -483,6 +494,9 @@ export class MessagesStream extends Readable {
483
494
  this.#fetch();
484
495
  });
485
496
  }
497
+ if (this.#maxFetches > 0 && ++this.#fetches >= this.#maxFetches) {
498
+ this.push(null);
499
+ }
486
500
  }
487
501
  #updateCommittedOffset(topic, partition, offset) {
488
502
  const key = `${topic}:${partition}`;
@@ -637,4 +651,46 @@ export class MessagesStream extends Readable {
637
651
  [kInspect](...args) {
638
652
  this.#consumer[kInspect](...args);
639
653
  }
654
+ #beforeDeserialization(hook, metadata, topicIds, response, requestedOffsets) {
655
+ const requests = [];
656
+ // Create the pre-deserialization requests
657
+ for (const topicResponse of response.responses) {
658
+ for (const { records: recordsBatches, partitionIndex: partition } of topicResponse.partitions) {
659
+ /* c8 ignore next 3 - Hard to test */
660
+ if (!recordsBatches) {
661
+ continue;
662
+ }
663
+ for (const batch of recordsBatches) {
664
+ // Filter control markers
665
+ /* c8 ignore next 3 - Hard to test */
666
+ if (batch.attributes & IS_CONTROL) {
667
+ continue;
668
+ }
669
+ for (const message of batch.records) {
670
+ message.topic = topicIds.get(topicResponse.topicId);
671
+ message.partition = partition;
672
+ requests.push([message.key, 'key', message]);
673
+ requests.push([message.value, 'value', message]);
674
+ for (const [headerKey, headerValue] of message.headers) {
675
+ requests.push([headerKey, 'headerKey', message]);
676
+ requests.push([headerValue, 'headerValue', message]);
677
+ }
678
+ }
679
+ }
680
+ }
681
+ }
682
+ runAsyncSeries((request, cb) => {
683
+ const [data, type, message] = request;
684
+ const result = hook(data, type, message, cb);
685
+ if (typeof result?.then === 'function') {
686
+ result.then(() => cb(null), cb);
687
+ }
688
+ }, requests, 0, error => {
689
+ if (error) {
690
+ this.destroy(error);
691
+ return;
692
+ }
693
+ this.#pushRecords(metadata, topicIds, response, requestedOffsets);
694
+ });
695
+ }
640
696
  }
@@ -127,6 +127,12 @@ export declare const consumeOptionsProperties: {
127
127
  type: string;
128
128
  minimum: number;
129
129
  };
130
+ beforeDeserialization: {
131
+ function: boolean;
132
+ };
133
+ registry: {
134
+ type: string;
135
+ };
130
136
  };
131
137
  export declare const groupOptionsSchema: {
132
138
  type: string;
@@ -241,6 +247,12 @@ export declare const consumeOptionsSchema: {
241
247
  type: string;
242
248
  minimum: number;
243
249
  };
250
+ beforeDeserialization: {
251
+ function: boolean;
252
+ };
253
+ registry: {
254
+ type: string;
255
+ };
244
256
  groupInstanceId: {
245
257
  type: string;
246
258
  pattern: string;
@@ -396,6 +408,12 @@ export declare const consumerOptionsSchema: {
396
408
  type: string;
397
409
  minimum: number;
398
410
  };
411
+ beforeDeserialization: {
412
+ function: boolean;
413
+ };
414
+ registry: {
415
+ type: string;
416
+ };
399
417
  groupInstanceId: {
400
418
  type: string;
401
419
  pattern: string;
@@ -511,6 +529,12 @@ export declare const fetchOptionsSchema: {
511
529
  type: string;
512
530
  minimum: number;
513
531
  };
532
+ beforeDeserialization: {
533
+ function: boolean;
534
+ };
535
+ registry: {
536
+ type: string;
537
+ };
514
538
  groupInstanceId: {
515
539
  type: string;
516
540
  pattern: string;
@@ -61,7 +61,9 @@ export const consumeOptionsProperties = {
61
61
  maxWaitTime: { type: 'number', minimum: 0 },
62
62
  isolationLevel: { type: 'number', enum: allowedFetchIsolationLevels },
63
63
  deserializers: serdeProperties,
64
- highWaterMark: { type: 'number', minimum: 1 }
64
+ highWaterMark: { type: 'number', minimum: 1 },
65
+ beforeDeserialization: { function: true },
66
+ registry: { type: 'object' }
65
67
  };
66
68
  export const groupOptionsSchema = {
67
69
  type: 'object',
@@ -2,8 +2,9 @@ import { type FetchRequestTopic } from '../../apis/consumer/fetch-v17.ts';
2
2
  import { type GroupProtocols } from '../../apis/enumerations.ts';
3
3
  import { type ConnectionPool } from '../../network/connection-pool.ts';
4
4
  import { type KafkaRecord, type Message } from '../../protocol/records.ts';
5
+ import { type SchemaRegistry } from '../../registries/abstract.ts';
5
6
  import { type BaseOptions, type ClusterMetadata, type TopicWithPartitionAndOffset } from '../base/types.ts';
6
- import { type Deserializers } from '../serde.ts';
7
+ import { type BeforeDeserializationHook, type Deserializers } from '../serde.ts';
7
8
  export interface GroupProtocolSubscription {
8
9
  name: string;
9
10
  version: number;
@@ -68,6 +69,8 @@ export interface ConsumeBaseOptions<Key, Value, HeaderKey, HeaderValue> {
68
69
  isolationLevel?: number;
69
70
  deserializers?: Partial<Deserializers<Key, Value, HeaderKey, HeaderValue>>;
70
71
  highWaterMark?: number;
72
+ beforeDeserialization?: BeforeDeserializationHook;
73
+ registry?: SchemaRegistry<unknown, unknown, Key, Value, HeaderKey, HeaderValue>;
71
74
  }
72
75
  export interface StreamOptions {
73
76
  topics: string[];
@@ -121,24 +121,8 @@ export declare const sendOptionsSchema: {
121
121
  items: {
122
122
  type: string;
123
123
  properties: {
124
- key: {
125
- oneOf: ({
126
- type: string;
127
- buffer?: undefined;
128
- } | {
129
- buffer: boolean;
130
- type?: undefined;
131
- })[];
132
- };
133
- value: {
134
- oneOf: ({
135
- type: string;
136
- buffer?: undefined;
137
- } | {
138
- buffer: boolean;
139
- type?: undefined;
140
- })[];
141
- };
124
+ key: boolean;
125
+ value: boolean;
142
126
  headers: {
143
127
  anyOf: ({
144
128
  map: boolean;
@@ -36,7 +36,9 @@ export const producerOptionsValidator = ajv.compile({
36
36
  type: 'object',
37
37
  properties: {
38
38
  ...produceOptionsProperties,
39
- serializers: serdeProperties
39
+ serializers: serdeProperties,
40
+ beforeSerialization: { function: true },
41
+ registry: { type: 'object' }
40
42
  },
41
43
  additionalProperties: true
42
44
  });
@@ -4,6 +4,7 @@ import { FindCoordinatorKeyTypes, ProduceAcks } from "../../apis/enumerations.js
4
4
  import { createDiagnosticContext, producerInitIdempotentChannel, producerSendsChannel, producerTransactionsChannel } from "../../diagnostic.js";
5
5
  import { UserError } from "../../errors.js";
6
6
  import { murmur2 } from "../../protocol/murmur2.js";
7
+ import { runAsyncSeries } from "../../registries/abstract.js";
7
8
  import { kInstance, kTransaction, kTransactionAddOffsets, kTransactionAddPartitions, kTransactionCancel, kTransactionCommitOffset, kTransactionEnd, kTransactionFindCoordinator, kTransactionPrepare } from "../../symbols.js";
8
9
  import { NumericMap } from "../../utils.js";
9
10
  import { Base, kAfterCreate, kCheckNotClosed, kClosed, kGetApi, kGetBootstrapConnection, kGetConnection, kMetadata, kOptions, kPerformDeduplicated, kPerformWithRetry, kPrometheus, kValidateOptions } from "../base/base.js";
@@ -28,6 +29,7 @@ export class Producer extends Base {
28
29
  #metricsProducedMessages;
29
30
  #coordinatorId;
30
31
  #transaction;
32
+ #sendOperation;
31
33
  constructor(options) {
32
34
  if (options.idempotent) {
33
35
  options.maxInflights = 1;
@@ -39,18 +41,37 @@ export class Producer extends Base {
39
41
  }
40
42
  options.repeatOnStaleMetadata ??= true;
41
43
  super(options);
44
+ this[kValidateOptions](options, producerOptionsValidator, '/options');
45
+ let serializers = options.serializers;
46
+ if (options.registry) {
47
+ if (options.beforeSerialization) {
48
+ throw new UserError('/options/beforeSerialization cannot be provided when /options/registry is provided.');
49
+ }
50
+ else if (options.serializers) {
51
+ throw new UserError('/options/serializers cannot be provided when /options/registry is provided.');
52
+ }
53
+ serializers = options.registry.getSerializers();
54
+ }
42
55
  this.#partitionsRoundRobin = new NumericMap();
43
56
  this.#sequences = new NumericMap();
44
- this.#keySerializer = options.serializers?.key ?? noopSerializer;
45
- this.#valueSerializer = options.serializers?.value ?? noopSerializer;
46
- this.#headerKeySerializer = options.serializers?.headerKey ?? noopSerializer;
47
- this.#headerValueSerializer = options.serializers?.headerValue ?? noopSerializer;
57
+ this.#keySerializer = serializers?.key ?? noopSerializer;
58
+ this.#valueSerializer = serializers?.value ?? noopSerializer;
59
+ this.#headerKeySerializer = serializers?.headerKey ?? noopSerializer;
60
+ this.#headerValueSerializer = serializers?.headerValue ?? noopSerializer;
48
61
  this[kOptions].transactionalId ??= randomUUID();
49
- this[kValidateOptions](options, producerOptionsValidator, '/options');
50
62
  if (this[kPrometheus]) {
51
63
  ensureMetric(this[kPrometheus], 'Gauge', 'kafka_producers', 'Number of active Kafka producers').inc();
52
64
  this.#metricsProducedMessages = ensureMetric(this[kPrometheus], 'Counter', 'kafka_produced_messages', 'Number of produced Kafka messages');
53
65
  }
66
+ if (options.registry) {
67
+ this.#sendOperation = this.#beforeSerialization.bind(this, options.registry.getBeforeSerializationHook());
68
+ }
69
+ else if (options.beforeSerialization) {
70
+ this.#sendOperation = this.#beforeSerialization.bind(this, options.beforeSerialization);
71
+ }
72
+ else {
73
+ this.#sendOperation = this.#send.bind(this);
74
+ }
54
75
  this[kAfterCreate]('producer');
55
76
  }
56
77
  get producerId() {
@@ -132,7 +153,7 @@ export class Producer extends Base {
132
153
  }
133
154
  }
134
155
  options.acks ??= idempotent ? ProduceAcks.ALL : ProduceAcks.LEADER;
135
- producerSendsChannel.traceCallback(this.#send, 1, createDiagnosticContext({ client: this, operation: 'send', options }), this, options, callback);
156
+ producerSendsChannel.traceCallback(this.#sendOperation, 1, createDiagnosticContext({ client: this, operation: 'send', options }), this, options, callback);
136
157
  return callback[kCallbackPromise];
137
158
  }
138
159
  beginTransaction(options, callback) {
@@ -430,19 +451,28 @@ export class Producer extends Base {
430
451
  const messages = [];
431
452
  for (const message of options.messages) {
432
453
  const topic = message.topic;
454
+ let key;
455
+ let value;
433
456
  let headers = new Map();
434
457
  const serializedHeaders = new Map();
435
- if (message.headers) {
436
- headers =
437
- message.headers instanceof Map
438
- ? message.headers
439
- : new Map(Object.entries(message.headers));
440
- for (const [key, value] of headers) {
441
- serializedHeaders.set(this.#headerKeySerializer(key), this.#headerValueSerializer(value));
458
+ const metadata = message.metadata;
459
+ try {
460
+ if (message.headers) {
461
+ headers =
462
+ message.headers instanceof Map
463
+ ? message.headers
464
+ : new Map(Object.entries(message.headers));
465
+ for (const [key, value] of headers) {
466
+ serializedHeaders.set(this.#headerKeySerializer(key, metadata), this.#headerValueSerializer(value, metadata));
467
+ }
442
468
  }
469
+ key = this.#keySerializer(message.key, headers, message);
470
+ value = this.#valueSerializer(message.value, headers, message);
471
+ }
472
+ catch (error) {
473
+ callback(new UserError('Failed to serialize a message.', { cause: error }));
474
+ return;
443
475
  }
444
- const key = this.#keySerializer(message.key, headers);
445
- const value = this.#valueSerializer(message.value, headers);
446
476
  let partition = 0;
447
477
  if (typeof message.partition !== 'number') {
448
478
  if (partitioner) {
@@ -622,4 +652,34 @@ export class Producer extends Base {
622
652
  this.#transaction = undefined;
623
653
  }
624
654
  }
655
+ #beforeSerialization(hook, options, callback) {
656
+ // Create the pre-serialization requests
657
+ const requests = [];
658
+ for (const message of options.messages) {
659
+ requests.push([message.key, 'key', message]);
660
+ requests.push([message.value, 'value', message]);
661
+ if (typeof message.headers !== 'undefined') {
662
+ const headers = message.headers instanceof Map
663
+ ? message.headers
664
+ : new Map(Object.entries(message.headers));
665
+ for (const [headerKey, headerValue] of headers) {
666
+ requests.push([headerKey, 'headerKey', message]);
667
+ requests.push([headerValue, 'headerValue', message]);
668
+ }
669
+ }
670
+ }
671
+ runAsyncSeries((request, cb) => {
672
+ const [data, type, message] = request;
673
+ const result = hook(data, type, message, cb);
674
+ if (typeof result?.then === 'function') {
675
+ result.then(() => cb(null), cb);
676
+ }
677
+ }, requests, 0, error => {
678
+ if (error) {
679
+ callback(error);
680
+ return;
681
+ }
682
+ this.#send(options, callback);
683
+ });
684
+ }
625
685
  }
@@ -1,7 +1,8 @@
1
1
  import { type CompressionAlgorithmValue } from '../../protocol/compression.ts';
2
2
  import { type MessageToProduce } from '../../protocol/records.ts';
3
+ import { type SchemaRegistry } from '../../registries/abstract.ts';
3
4
  import { type BaseOptions, type TopicWithPartitionAndOffset } from '../base/types.ts';
4
- import { type Serializers } from '../serde.ts';
5
+ import { type BeforeSerializationHook, type Serializers } from '../serde.ts';
5
6
  export interface ProducerInfo {
6
7
  producerId: bigint;
7
8
  producerEpoch: number;
@@ -25,6 +26,8 @@ export interface ProduceOptions<Key, Value, HeaderKey, HeaderValue> {
25
26
  export type ProducerOptions<Key, Value, HeaderKey, HeaderValue> = BaseOptions & ProduceOptions<Key, Value, HeaderKey, HeaderValue> & {
26
27
  transactionalId?: string;
27
28
  serializers?: Partial<Serializers<Key, Value, HeaderKey, HeaderValue>>;
29
+ beforeSerialization?: BeforeSerializationHook<Key, Value, HeaderKey, HeaderValue>;
30
+ registry?: SchemaRegistry<unknown, unknown, Key, Value, HeaderKey, HeaderValue>;
28
31
  };
29
32
  export type SendOptions<Key, Value, HeaderKey, HeaderValue> = {
30
33
  messages: MessageToProduce<Key, Value, HeaderKey, HeaderValue>[];
@@ -1,7 +1,9 @@
1
- export type Serializer<InputType = unknown> = (data?: InputType) => Buffer | undefined;
2
- export type Deserializer<OutputType = unknown> = (data?: Buffer) => OutputType | undefined;
3
- export type SerializerWithHeaders<InputType = unknown, HeaderKey = unknown, HeaderValue = unknown> = (data?: InputType, headers?: Map<HeaderKey, HeaderValue>) => Buffer | undefined;
4
- export type DeserializerWithHeaders<OutputType = unknown, HeaderKey = unknown, HeaderValue = unknown> = (data?: Buffer, headers?: Map<HeaderKey, HeaderValue>) => OutputType | undefined;
1
+ import { type Callback } from '../apis/definitions.ts';
2
+ import { type MessageToConsume, type MessageToProduce } from '../protocol/records.ts';
3
+ export type Serializer<InputType = unknown> = (data?: InputType, metadata?: unknown) => Buffer | undefined;
4
+ export type SerializerWithHeaders<InputType = unknown, HeaderKey = unknown, HeaderValue = unknown> = (data?: InputType, headers?: Map<HeaderKey, HeaderValue>, message?: MessageToProduce<unknown, unknown, unknown, unknown>) => Buffer | undefined;
5
+ export type Deserializer<OutputType = unknown> = (data?: Buffer, message?: MessageToConsume) => OutputType | undefined;
6
+ export type DeserializerWithHeaders<OutputType = unknown, HeaderKey = unknown, HeaderValue = unknown> = (data?: Buffer, headers?: Map<HeaderKey, HeaderValue>, message?: MessageToConsume) => OutputType | undefined;
5
7
  export interface Serializers<Key, Value, HeaderKey, HeaderValue> {
6
8
  key: SerializerWithHeaders<Key, HeaderKey, HeaderValue>;
7
9
  value: SerializerWithHeaders<Value, HeaderKey, HeaderValue>;
@@ -9,11 +11,14 @@ export interface Serializers<Key, Value, HeaderKey, HeaderValue> {
9
11
  headerValue: Serializer<HeaderValue>;
10
12
  }
11
13
  export interface Deserializers<Key, Value, HeaderKey, HeaderValue> {
12
- key: DeserializerWithHeaders<Key>;
13
- value: DeserializerWithHeaders<Value>;
14
+ key: DeserializerWithHeaders<Key, HeaderKey, HeaderValue>;
15
+ value: DeserializerWithHeaders<Value, HeaderKey, HeaderValue>;
14
16
  headerKey: Deserializer<HeaderKey>;
15
17
  headerValue: Deserializer<HeaderValue>;
16
18
  }
19
+ export type BeforeHookPayloadType = 'key' | 'value' | 'headerKey' | 'headerValue';
20
+ export type BeforeDeserializationHook = (payload: Buffer, type: BeforeHookPayloadType, message: MessageToConsume, callback: Callback<void>) => void | Promise<void>;
21
+ export type BeforeSerializationHook<Key, Value, HeaderKey, HeaderValue> = (payload: unknown, type: BeforeHookPayloadType, message: MessageToProduce<Key, Value, HeaderKey, HeaderValue>, callback: Callback<void>) => void | Promise<void>;
17
22
  export declare function stringSerializer(data?: string): Buffer | undefined;
18
23
  export declare function stringDeserializer(data?: string | Buffer): string | undefined;
19
24
  export declare function jsonSerializer<T = Record<string, any>>(data?: T): Buffer | undefined;
package/dist/errors.d.ts CHANGED
@@ -2,7 +2,7 @@ import { type NullableString } from './protocol/definitions.ts';
2
2
  declare const kGenericError: unique symbol;
3
3
  declare const kMultipleErrors: unique symbol;
4
4
  export declare const ERROR_PREFIX = "PLT_KFK_";
5
- export declare const errorCodes: readonly ["PLT_KFK_AUTHENTICATION", "PLT_KFK_MULTIPLE", "PLT_KFK_NETWORK", "PLT_KFK_OUT_OF_BOUNDS", "PLT_KFK_PROTOCOL", "PLT_KFK_RESPONSE", "PLT_KFK_TIMEOUT", "PLT_KFK_UNEXPECTED_CORRELATION_ID", "PLT_KFK_UNFINISHED_WRITE_BUFFER", "PLT_KFK_UNSUPPORTED_API", "PLT_KFK_UNSUPPORTED_COMPRESSION", "PLT_KFK_UNSUPPORTED", "PLT_KFK_USER"];
5
+ export declare const errorCodes: readonly ["PLT_KFK_AUTHENTICATION", "PLT_KFK_MULTIPLE", "PLT_KFK_NETWORK", "PLT_KFK_OUT_OF_BOUNDS", "PLT_KFK_PROTOCOL", "PLT_KFK_RESPONSE", "PLT_KFK_TIMEOUT", "PLT_KFK_UNEXPECTED_CORRELATION_ID", "PLT_KFK_UNFINISHED_WRITE_BUFFER", "PLT_KFK_UNSUPPORTED_API", "PLT_KFK_UNSUPPORTED_COMPRESSION", "PLT_KFK_UNSUPPORTED_FORMAT", "PLT_KFK_UNSUPPORTED", "PLT_KFK_USER"];
6
6
  export type ErrorCode = (typeof errorCodes)[number];
7
7
  export type ErrorProperties = {
8
8
  cause?: Error;
@@ -67,6 +67,10 @@ export declare class UnsupportedCompressionError extends GenericError {
67
67
  static code: ErrorCode;
68
68
  constructor(message: string, properties?: ErrorProperties);
69
69
  }
70
+ export declare class UnsupportedFormatError extends GenericError {
71
+ static code: ErrorCode;
72
+ constructor(message: string, properties?: ErrorProperties);
73
+ }
70
74
  export declare class UnsupportedError extends GenericError {
71
75
  static code: ErrorCode;
72
76
  constructor(message: string, properties?: ErrorProperties);
package/dist/errors.js CHANGED
@@ -15,6 +15,7 @@ export const errorCodes = [
15
15
  'PLT_KFK_UNFINISHED_WRITE_BUFFER',
16
16
  'PLT_KFK_UNSUPPORTED_API',
17
17
  'PLT_KFK_UNSUPPORTED_COMPRESSION',
18
+ 'PLT_KFK_UNSUPPORTED_FORMAT',
18
19
  'PLT_KFK_UNSUPPORTED',
19
20
  'PLT_KFK_USER'
20
21
  ];
@@ -166,6 +167,13 @@ export class UnsupportedCompressionError extends GenericError {
166
167
  super(UnsupportedCompressionError.code, message, { canRetry: false, ...properties });
167
168
  }
168
169
  }
170
+ /* c8 ignore next 7 - Format libraries are always available in tests */
171
+ export class UnsupportedFormatError extends GenericError {
172
+ static code = 'PLT_KFK_UNSUPPORTED_FORMAT';
173
+ constructor(message, properties = {}) {
174
+ super(UnsupportedFormatError.code, message, { canRetry: false, ...properties });
175
+ }
176
+ }
169
177
  export class UnsupportedError extends GenericError {
170
178
  static code = 'PLT_KFK_UNSUPPORTED';
171
179
  constructor(message, properties = {}) {
package/dist/index.d.ts CHANGED
@@ -6,3 +6,4 @@ export * from './network/index.ts';
6
6
  export * from './protocol/index.ts';
7
7
  export * from './apis/index.ts';
8
8
  export * from './clients/index.ts';
9
+ export * from './registries/index.ts';
package/dist/index.js CHANGED
@@ -10,3 +10,5 @@ export * from "./protocol/index.js";
10
10
  export * from "./apis/index.js";
11
11
  // Clients
12
12
  export * from "./clients/index.js";
13
+ // Registries
14
+ export * from "./registries/index.js";
@@ -20,18 +20,19 @@ export interface ConnectionEvents extends TypedEvents {
20
20
  'sasl:authentication:extended': (authBytes?: Buffer) => void;
21
21
  drain: () => void;
22
22
  }
23
- export type SASLCredentialProvider<T = string> = () => T | Promise<T>;
23
+ export type CredentialProvider<T = string> = () => T | Promise<T>;
24
+ export type SASLCredentialProvider<T> = CredentialProvider<T>;
24
25
  export interface Broker {
25
26
  host: string;
26
27
  port: number;
27
28
  }
28
- export type SASLCustomAuthenticator = (mechanism: SASLMechanismValue, connection: Connection, authenticate: SASLAuthenticationAPI, usernameProvider: string | SASLCredentialProvider | undefined, passwordProvider: string | SASLCredentialProvider | undefined, tokenProvider: string | SASLCredentialProvider | undefined, callback: CallbackWithPromise<SaslAuthenticateResponse>) => void;
29
+ export type SASLCustomAuthenticator = (mechanism: SASLMechanismValue, connection: Connection, authenticate: SASLAuthenticationAPI, usernameProvider: string | CredentialProvider | undefined, passwordProvider: string | CredentialProvider | undefined, tokenProvider: string | CredentialProvider | undefined, callback: CallbackWithPromise<SaslAuthenticateResponse>) => void;
29
30
  export interface SASLOptions {
30
31
  mechanism: SASLMechanismValue;
31
- username?: string | SASLCredentialProvider;
32
- password?: string | SASLCredentialProvider;
33
- token?: string | SASLCredentialProvider;
34
- oauthBearerExtensions?: Record<string, string> | SASLCredentialProvider<Record<string, string>>;
32
+ username?: string | CredentialProvider;
33
+ password?: string | CredentialProvider;
34
+ token?: string | CredentialProvider;
35
+ oauthBearerExtensions?: Record<string, string> | CredentialProvider<Record<string, string>>;
35
36
  authenticate?: SASLCustomAuthenticator;
36
37
  authBytesValidator?: (authBytes: Buffer, callback: CallbackWithPromise<Buffer>) => void;
37
38
  }