kafka-ts 0.0.6-beta.0 → 0.0.6-beta.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -112,9 +112,17 @@ class Connection {
112
112
  }
113
113
  });
114
114
  clearTimeout(timeout);
115
- const response = await api.response(responseDecoder);
116
- (0, assert_1.default)(responseDecoder.getOffset() - 4 === responseSize, `Buffer not correctly consumed: ${responseDecoder.getOffset() - 4} !== ${responseSize}`);
117
- return response;
115
+ try {
116
+ const response = await api.response(responseDecoder);
117
+ (0, assert_1.default)(responseDecoder.getOffset() - 4 === responseSize, `Buffer not correctly consumed: ${responseDecoder.getOffset() - 4} !== ${responseSize}`);
118
+ return response;
119
+ }
120
+ catch (error) {
121
+ if (error instanceof error_1.KafkaTSApiError) {
122
+ error.request = JSON.stringify(body, logger_1.jsonSerializer);
123
+ }
124
+ throw error;
125
+ }
118
126
  }
119
127
  write(buffer) {
120
128
  return new Promise((resolve, reject) => {
@@ -49,7 +49,13 @@ class OffsetManager {
49
49
  .flatMap(([topic, partitions]) => partitions.map((partition) => ({ topic, partition })))
50
50
  .map(({ topic, partition }) => ({
51
51
  name: topic,
52
- partitions: [{ partitionIndex: partition, currentLeaderEpoch: -1, timestamp: -1n }],
52
+ partitions: [
53
+ {
54
+ partitionIndex: partition,
55
+ currentLeaderEpoch: -1,
56
+ timestamp: fromBeginning ? -2n : -1n,
57
+ },
58
+ ],
53
59
  })),
54
60
  });
55
61
  const topicPartitions = {};
@@ -57,7 +63,7 @@ class OffsetManager {
57
63
  topicPartitions[name] ??= new Set();
58
64
  partitions.forEach(({ partitionIndex, offset }) => {
59
65
  topicPartitions[name].add(partitionIndex);
60
- this.resolve(name, partitionIndex, fromBeginning ? 0n : offset);
66
+ this.resolve(name, partitionIndex, offset);
61
67
  });
62
68
  });
63
69
  this.flush(topicPartitions);
@@ -5,6 +5,7 @@ export declare class KafkaTSApiError<T = any> extends KafkaTSError {
5
5
  errorCode: number;
6
6
  errorMessage: string | null;
7
7
  response: T;
8
+ request: string | undefined;
8
9
  constructor(errorCode: number, errorMessage: string | null, response: T);
9
10
  }
10
11
  export declare class ConnectionError extends KafkaTSError {
@@ -13,6 +13,7 @@ class KafkaTSApiError extends KafkaTSError {
13
13
  errorCode;
14
14
  errorMessage;
15
15
  response;
16
+ request;
16
17
  constructor(errorCode, errorMessage, response) {
17
18
  const [errorName] = Object.entries(api_1.API_ERROR).find(([, value]) => value === errorCode) ?? ['UNKNOWN'];
18
19
  super(`${errorName}${errorMessage ? `: ${errorMessage}` : ''}`);
@@ -3,7 +3,13 @@ Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.setLogger = exports.log = exports.jsonSerializer = void 0;
4
4
  const jsonSerializer = (_, v) => {
5
5
  if (v instanceof Error) {
6
- return { name: v.name, message: v.message, stack: v.stack, cause: v.cause };
6
+ return Object.getOwnPropertyNames(v).reduce((acc, key) => {
7
+ acc[key] = v[key];
8
+ return acc;
9
+ }, {});
10
+ }
11
+ if (Buffer.isBuffer(v)) {
12
+ return v.toString();
7
13
  }
8
14
  if (typeof v === 'bigint') {
9
15
  return v.toString();
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.6-beta.0",
3
+ "version": "0.0.6-beta.1",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",
@@ -10,7 +10,7 @@
10
10
  },
11
11
  "scripts": {
12
12
  "start": "docker-compose down && KAFKA_VERSION=3.7.1 docker-compose up -d && sleep 5 && bash ./scripts/create-scram-user.sh",
13
- "version:beta": "npm version prerelease --preid=beta",
13
+ "version:prerelease": "npm version prerelease",
14
14
  "version:patch": "npm version patch",
15
15
  "format": "prettier --write .",
16
16
  "build": "tsc",