kafka-ts 0.0.6-beta.4 → 0.0.6-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/broker.d.ts CHANGED
@@ -21,7 +21,7 @@ export declare class Broker {
21
21
  sendRequest: SendRequest;
22
22
  constructor(options: BrokerOptions);
23
23
  connect(): Promise<this>;
24
- ensureConnected: () => Promise<this>;
24
+ ensureConnected(): Promise<void>;
25
25
  disconnect(): Promise<void>;
26
26
  private validateApiVersions;
27
27
  private saslHandshake;
package/dist/broker.js CHANGED
@@ -4,7 +4,6 @@ exports.Broker = void 0;
4
4
  const api_1 = require("./api");
5
5
  const connection_1 = require("./connection");
6
6
  const error_1 = require("./utils/error");
7
- const memo_1 = require("./utils/memo");
8
7
  class Broker {
9
8
  options;
10
9
  connection;
@@ -25,7 +24,11 @@ class Broker {
25
24
  await this.saslAuthenticate();
26
25
  return this;
27
26
  }
28
- ensureConnected = (0, memo_1.memo)(() => this.connect());
27
+ async ensureConnected() {
28
+ if (!this.connection.isConnected()) {
29
+ await this.connect();
30
+ }
31
+ }
29
32
  async disconnect() {
30
33
  await this.connection.disconnect();
31
34
  }
package/dist/cluster.d.ts CHANGED
@@ -17,6 +17,7 @@ export declare class Cluster {
17
17
  private brokerMetadata;
18
18
  constructor(options: ClusterOptions);
19
19
  connect(): Promise<void>;
20
+ ensureConnected(): Promise<void>;
20
21
  disconnect(): Promise<void>;
21
22
  setSeedBroker: (nodeId: number) => Promise<void>;
22
23
  sendRequest: SendRequest;
package/dist/cluster.js CHANGED
@@ -23,6 +23,9 @@ class Cluster {
23
23
  });
24
24
  this.brokerMetadata = Object.fromEntries(metadata.brokers.map((options) => [options.nodeId, options]));
25
25
  }
26
+ async ensureConnected() {
27
+ await Promise.all([this.seedBroker, ...Object.values(this.brokerById)].map((x) => x.ensureConnected()));
28
+ }
26
29
  async disconnect() {
27
30
  await Promise.all([this.seedBroker.disconnect(), ...Object.values(this.brokerById).map((x) => x.disconnect())]);
28
31
  }
@@ -15,6 +15,7 @@ export declare class Connection {
15
15
  private lastCorrelationId;
16
16
  private chunks;
17
17
  constructor(options: ConnectionOptions);
18
+ isConnected(): boolean;
18
19
  connect(): Promise<void>;
19
20
  disconnect(): Promise<void>;
20
21
  sendRequest<Request, Response>(api: Api<Request, Response>, body: Request): Promise<Response>;
@@ -55,6 +55,9 @@ class Connection {
55
55
  constructor(options) {
56
56
  this.options = options;
57
57
  }
58
+ isConnected() {
59
+ return !this.socket.pending && !this.socket.destroyed;
60
+ }
58
61
  async connect() {
59
62
  this.queue = {};
60
63
  this.chunks = [];
@@ -15,7 +15,7 @@ const messages_to_topic_partition_leaders_1 = require("../distributors/messages-
15
15
  const partitioner_1 = require("../distributors/partitioner");
16
16
  const metadata_1 = require("../metadata");
17
17
  const delay_1 = require("../utils/delay");
18
- const memo_1 = require("../utils/memo");
18
+ const error_1 = require("../utils/error");
19
19
  const tracer_1 = require("../utils/tracer");
20
20
  const trace = (0, tracer_1.createTracer)('Producer');
21
21
  class Producer {
@@ -43,63 +43,73 @@ class Producer {
43
43
  const topics = Array.from(new Set(messages.map((message) => message.topic)));
44
44
  await this.metadata.fetchMetadataIfNecessary({ topics, allowTopicAutoCreation });
45
45
  const nodeTopicPartitionMessages = (0, messages_to_topic_partition_leaders_1.distributeMessagesToTopicPartitionLeaders)(messages.map((message) => ({ ...message, partition: this.partition(message) })), this.metadata.getTopicPartitionLeaderIds());
46
- await Promise.all(Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
47
- const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
48
- name: topic,
49
- partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
50
- const partitionIndex = parseInt(partition);
51
- let baseTimestamp;
52
- let maxTimestamp;
53
- messages.forEach(({ timestamp = defaultTimestamp }) => {
54
- if (!baseTimestamp || timestamp < baseTimestamp) {
55
- baseTimestamp = timestamp;
56
- }
57
- if (!maxTimestamp || timestamp > maxTimestamp) {
58
- maxTimestamp = timestamp;
59
- }
60
- });
61
- return {
62
- index: partitionIndex,
63
- baseOffset: 0n,
64
- partitionLeaderEpoch: -1,
65
- attributes: 0,
66
- lastOffsetDelta: messages.length - 1,
67
- baseTimestamp: baseTimestamp ?? 0n,
68
- maxTimestamp: maxTimestamp ?? 0n,
69
- producerId: this.producerId,
70
- producerEpoch: 0,
71
- baseSequence: this.getSequence(topic, partitionIndex),
72
- records: messages.map((message, index) => ({
46
+ try {
47
+ await Promise.all(Object.entries(nodeTopicPartitionMessages).map(async ([nodeId, topicPartitionMessages]) => {
48
+ const topicData = Object.entries(topicPartitionMessages).map(([topic, partitionMessages]) => ({
49
+ name: topic,
50
+ partitionData: Object.entries(partitionMessages).map(([partition, messages]) => {
51
+ const partitionIndex = parseInt(partition);
52
+ let baseTimestamp;
53
+ let maxTimestamp;
54
+ messages.forEach(({ timestamp = defaultTimestamp }) => {
55
+ if (!baseTimestamp || timestamp < baseTimestamp) {
56
+ baseTimestamp = timestamp;
57
+ }
58
+ if (!maxTimestamp || timestamp > maxTimestamp) {
59
+ maxTimestamp = timestamp;
60
+ }
61
+ });
62
+ return {
63
+ index: partitionIndex,
64
+ baseOffset: 0n,
65
+ partitionLeaderEpoch: -1,
73
66
  attributes: 0,
74
- timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
75
- offsetDelta: index,
76
- key: message.key ?? null,
77
- value: message.value,
78
- headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({ key, value })),
79
- })),
80
- };
81
- }),
82
- }));
83
- await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
84
- transactionalId: null,
85
- acks,
86
- timeoutMs: 5000,
87
- topicData,
88
- });
89
- topicData.forEach(({ name, partitionData }) => {
90
- partitionData.forEach(({ index, records }) => {
91
- this.updateSequence(name, index, records.length);
67
+ lastOffsetDelta: messages.length - 1,
68
+ baseTimestamp: baseTimestamp ?? 0n,
69
+ maxTimestamp: maxTimestamp ?? 0n,
70
+ producerId: this.producerId,
71
+ producerEpoch: 0,
72
+ baseSequence: this.getSequence(topic, partitionIndex),
73
+ records: messages.map((message, index) => ({
74
+ attributes: 0,
75
+ timestampDelta: (message.timestamp ?? defaultTimestamp) - (baseTimestamp ?? 0n),
76
+ offsetDelta: index,
77
+ key: message.key ?? null,
78
+ value: message.value,
79
+ headers: Object.entries(message.headers ?? {}).map(([key, value]) => ({ key, value })),
80
+ })),
81
+ };
82
+ }),
83
+ }));
84
+ await this.cluster.sendRequestToNode(parseInt(nodeId))(api_1.API.PRODUCE, {
85
+ transactionalId: null,
86
+ acks,
87
+ timeoutMs: 5000,
88
+ topicData,
92
89
  });
93
- });
94
- }));
90
+ topicData.forEach(({ name, partitionData }) => {
91
+ partitionData.forEach(({ index, records }) => {
92
+ this.updateSequence(name, index, records.length);
93
+ });
94
+ });
95
+ }));
96
+ }
97
+ catch (error) {
98
+ if ((error instanceof error_1.KafkaTSApiError) && error.errorCode === api_1.API_ERROR.OUT_OF_ORDER_SEQUENCE_NUMBER) {
99
+ await this.initProducerId();
100
+ }
101
+ throw error;
102
+ }
95
103
  }
96
104
  async close() {
97
105
  await this.cluster.disconnect();
98
106
  }
99
- ensureConnected = (0, memo_1.memo)(async () => {
100
- await this.cluster.connect();
101
- await this.initProducerId();
102
- });
107
+ async ensureConnected() {
108
+ await this.cluster.ensureConnected();
109
+ if (!this.producerId) {
110
+ await this.initProducerId();
111
+ }
112
+ }
103
113
  async initProducerId() {
104
114
  try {
105
115
  const result = await this.cluster.sendRequest(api_1.API.INIT_PRODUCER_ID, {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.6-beta.4",
3
+ "version": "0.0.6-beta.6",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",
@@ -1 +0,0 @@
1
- export declare const memo: <T extends (...args: any[]) => any>(fn: T) => (...args: Parameters<T>) => ReturnType<T>;
@@ -1,16 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.memo = void 0;
4
- const memo = (fn) => {
5
- const cache = {};
6
- return (...args) => {
7
- const key = JSON.stringify(args);
8
- if (cache[key]) {
9
- return cache[key];
10
- }
11
- const result = fn(...args);
12
- cache[key] = result;
13
- return result;
14
- };
15
- };
16
- exports.memo = memo;