kafka-ts 0.0.1-beta.3 → 0.0.1-beta.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (164) hide show
  1. package/.github/workflows/release.yml +19 -6
  2. package/README.md +15 -21
  3. package/dist/api/api-versions.d.ts +9 -0
  4. package/dist/api/api-versions.js +24 -0
  5. package/dist/api/create-topics.d.ts +38 -0
  6. package/dist/api/create-topics.js +53 -0
  7. package/dist/api/delete-topics.d.ts +18 -0
  8. package/dist/api/delete-topics.js +33 -0
  9. package/dist/api/fetch.d.ts +84 -0
  10. package/dist/api/fetch.js +142 -0
  11. package/dist/api/find-coordinator.d.ts +21 -0
  12. package/dist/api/find-coordinator.js +39 -0
  13. package/dist/api/heartbeat.d.ts +11 -0
  14. package/dist/api/heartbeat.js +27 -0
  15. package/dist/api/index.d.ts +578 -0
  16. package/dist/api/index.js +165 -0
  17. package/dist/api/init-producer-id.d.ts +13 -0
  18. package/dist/api/init-producer-id.js +29 -0
  19. package/dist/api/join-group.d.ts +34 -0
  20. package/dist/api/join-group.js +51 -0
  21. package/dist/api/leave-group.d.ts +19 -0
  22. package/dist/api/leave-group.js +39 -0
  23. package/dist/api/list-offsets.d.ts +29 -0
  24. package/dist/api/list-offsets.js +48 -0
  25. package/dist/api/metadata.d.ts +40 -0
  26. package/dist/api/metadata.js +58 -0
  27. package/dist/api/offset-commit.d.ts +28 -0
  28. package/dist/api/offset-commit.js +48 -0
  29. package/dist/api/offset-fetch.d.ts +33 -0
  30. package/dist/api/offset-fetch.js +57 -0
  31. package/dist/api/produce.d.ts +54 -0
  32. package/dist/api/produce.js +126 -0
  33. package/dist/api/sasl-authenticate.d.ts +11 -0
  34. package/dist/api/sasl-authenticate.js +23 -0
  35. package/dist/api/sasl-handshake.d.ts +6 -0
  36. package/dist/api/sasl-handshake.js +19 -0
  37. package/dist/api/sync-group.d.ts +24 -0
  38. package/dist/api/sync-group.js +36 -0
  39. package/dist/auth/index.d.ts +2 -0
  40. package/dist/auth/index.js +8 -0
  41. package/dist/auth/plain.d.ts +5 -0
  42. package/dist/auth/plain.js +12 -0
  43. package/dist/auth/scram.d.ts +9 -0
  44. package/dist/auth/scram.js +40 -0
  45. package/dist/broker.d.ts +30 -0
  46. package/dist/broker.js +55 -0
  47. package/dist/client.d.ts +23 -0
  48. package/dist/client.js +36 -0
  49. package/dist/cluster.d.ts +27 -0
  50. package/dist/cluster.js +70 -0
  51. package/dist/cluster.test.d.ts +1 -0
  52. package/dist/cluster.test.js +345 -0
  53. package/dist/codecs/gzip.d.ts +2 -0
  54. package/dist/codecs/gzip.js +8 -0
  55. package/dist/codecs/index.d.ts +2 -0
  56. package/dist/codecs/index.js +17 -0
  57. package/dist/codecs/none.d.ts +2 -0
  58. package/dist/codecs/none.js +7 -0
  59. package/dist/codecs/types.d.ts +5 -0
  60. package/dist/codecs/types.js +2 -0
  61. package/dist/connection.d.ts +26 -0
  62. package/dist/connection.js +175 -0
  63. package/dist/consumer/consumer-group.d.ts +41 -0
  64. package/dist/consumer/consumer-group.js +217 -0
  65. package/dist/consumer/consumer-metadata.d.ts +7 -0
  66. package/dist/consumer/consumer-metadata.js +14 -0
  67. package/dist/consumer/consumer.d.ts +44 -0
  68. package/dist/consumer/consumer.js +225 -0
  69. package/dist/consumer/fetch-manager.d.ts +33 -0
  70. package/dist/consumer/fetch-manager.js +140 -0
  71. package/dist/consumer/fetcher.d.ts +25 -0
  72. package/dist/consumer/fetcher.js +64 -0
  73. package/dist/consumer/offset-manager.d.ts +22 -0
  74. package/dist/consumer/offset-manager.js +66 -0
  75. package/dist/consumer/processor.d.ts +19 -0
  76. package/dist/consumer/processor.js +59 -0
  77. package/dist/distributors/assignments-to-replicas.d.ts +16 -0
  78. package/dist/distributors/assignments-to-replicas.js +59 -0
  79. package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
  80. package/dist/distributors/assignments-to-replicas.test.js +40 -0
  81. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  82. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  83. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  84. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  85. package/dist/distributors/partitioner.d.ts +7 -0
  86. package/dist/distributors/partitioner.js +23 -0
  87. package/dist/index.d.ts +9 -0
  88. package/dist/index.js +26 -0
  89. package/dist/metadata.d.ts +24 -0
  90. package/dist/metadata.js +106 -0
  91. package/dist/producer/producer.d.ts +24 -0
  92. package/dist/producer/producer.js +131 -0
  93. package/dist/types.d.ts +11 -0
  94. package/dist/types.js +2 -0
  95. package/dist/utils/api.d.ts +9 -0
  96. package/dist/utils/api.js +5 -0
  97. package/dist/utils/crypto.d.ts +8 -0
  98. package/dist/utils/crypto.js +18 -0
  99. package/dist/utils/decoder.d.ts +30 -0
  100. package/dist/utils/decoder.js +152 -0
  101. package/dist/utils/delay.d.ts +1 -0
  102. package/dist/utils/delay.js +5 -0
  103. package/dist/utils/encoder.d.ts +28 -0
  104. package/dist/utils/encoder.js +125 -0
  105. package/dist/utils/error.d.ts +11 -0
  106. package/dist/utils/error.js +27 -0
  107. package/dist/utils/logger.d.ts +9 -0
  108. package/dist/utils/logger.js +32 -0
  109. package/dist/utils/memo.d.ts +1 -0
  110. package/dist/utils/memo.js +16 -0
  111. package/dist/utils/murmur2.d.ts +3 -0
  112. package/dist/utils/murmur2.js +40 -0
  113. package/dist/utils/retrier.d.ts +10 -0
  114. package/dist/utils/retrier.js +22 -0
  115. package/dist/utils/tracer.d.ts +5 -0
  116. package/dist/utils/tracer.js +39 -0
  117. package/docker-compose.yml +3 -3
  118. package/examples/package-lock.json +3501 -3
  119. package/examples/package.json +8 -1
  120. package/examples/src/benchmark/common.ts +98 -0
  121. package/examples/src/benchmark/kafka-ts.ts +67 -0
  122. package/examples/src/benchmark/kafkajs.ts +51 -0
  123. package/examples/src/client.ts +4 -1
  124. package/examples/src/consumer.ts +7 -1
  125. package/examples/src/create-topic.ts +3 -3
  126. package/examples/src/opentelemetry.ts +46 -0
  127. package/examples/src/producer.ts +11 -11
  128. package/examples/src/replicator.ts +2 -1
  129. package/package.json +4 -2
  130. package/scripts/create-scram-user.sh +4 -2
  131. package/scripts/generate-certs.sh +2 -0
  132. package/src/__snapshots__/cluster.test.ts.snap +160 -53
  133. package/src/api/fetch.ts +83 -28
  134. package/src/api/index.ts +3 -1
  135. package/src/api/metadata.ts +1 -1
  136. package/src/api/produce.ts +7 -10
  137. package/src/cluster.test.ts +10 -7
  138. package/src/cluster.ts +36 -38
  139. package/src/codecs/gzip.ts +9 -0
  140. package/src/codecs/index.ts +16 -0
  141. package/src/codecs/none.ts +6 -0
  142. package/src/codecs/types.ts +4 -0
  143. package/src/connection.ts +31 -17
  144. package/src/consumer/consumer-group.ts +43 -21
  145. package/src/consumer/consumer.ts +58 -37
  146. package/src/consumer/fetch-manager.ts +36 -46
  147. package/src/consumer/fetcher.ts +20 -13
  148. package/src/consumer/offset-manager.ts +18 -7
  149. package/src/consumer/processor.ts +14 -8
  150. package/src/distributors/assignments-to-replicas.ts +1 -3
  151. package/src/index.ts +2 -0
  152. package/src/metadata.ts +4 -0
  153. package/src/producer/producer.ts +14 -9
  154. package/src/utils/api.ts +1 -1
  155. package/src/utils/decoder.ts +9 -3
  156. package/src/utils/encoder.ts +26 -19
  157. package/src/utils/logger.ts +37 -0
  158. package/src/utils/tracer.ts +40 -22
  159. package/certs/ca.key +0 -52
  160. package/certs/ca.srl +0 -1
  161. package/certs/kafka.crt +0 -29
  162. package/certs/kafka.csr +0 -26
  163. package/certs/kafka.key +0 -52
  164. package/src/utils/debug.ts +0 -9
@@ -5,10 +5,17 @@
5
5
  "main": "dist/replicator.js",
6
6
  "scripts": {
7
7
  "test": "echo \"Error: no test specified\" && exit 1",
8
+ "start:jaeger": "docker run --rm --name jaeger -e COLLECTOR_OTLP_ENABLED=true -p 16686:16686 -p 4317:4317 -p 4318:4318 -d jaegertracing/all-in-one:1.62.0",
8
9
  "build": "tsc"
9
10
  },
10
11
  "dependencies": {
11
- "kafka-ts": "file:../"
12
+ "@opentelemetry/api": "^1.9.0",
13
+ "@opentelemetry/auto-instrumentations-node": "^0.51.0",
14
+ "@opentelemetry/context-async-hooks": "^1.27.0",
15
+ "@opentelemetry/exporter-trace-otlp-grpc": "^0.54.0",
16
+ "@opentelemetry/sdk-node": "^0.54.0",
17
+ "kafka-ts": "file:../",
18
+ "kafkajs": "^2.2.4"
12
19
  },
13
20
  "author": "",
14
21
  "license": "ISC"
@@ -0,0 +1,98 @@
1
+ import { delay } from '../utils/delay';
2
+
3
+ export const startBenchmarker = async ({
4
+ createTopic,
5
+ connectProducer,
6
+ startConsumer,
7
+ produce,
8
+ }: {
9
+ createTopic: (opts: { topic: string; partitions: number; replicationFactor: number }) => Promise<void>;
10
+ connectProducer: () => Promise<() => unknown>;
11
+ startConsumer: (
12
+ opts: {
13
+ groupId: string;
14
+ topic: string;
15
+ concurrency: number;
16
+ incrementCount: (key: string, value: number) => void;
17
+ },
18
+ callback: (timestamp: number) => void,
19
+ ) => Promise<() => unknown>;
20
+ produce: (opts: { topic: string; length: number; timestamp: number; acks: -1 | 1 }) => Promise<void>;
21
+ }) => {
22
+ const benchmarkId = `benchmark-${Date.now()}`;
23
+ const {
24
+ TOPIC = benchmarkId,
25
+ PRODUCER = 'true',
26
+ CONSUMER = 'true',
27
+ PARTITIONS = '10',
28
+ REPLICATION_FACTOR = '3',
29
+ CONCURRENCY = '1',
30
+ PRODUCE_BATCH_SIZE = '10',
31
+ PRODUCE_DELAY_MS = '0',
32
+ } = process.env;
33
+ const enableProducer = PRODUCER === 'true';
34
+ const enableConsumer = CONSUMER === 'true';
35
+ const partitions = parseInt(PARTITIONS);
36
+ const replicationFactor = parseInt(REPLICATION_FACTOR);
37
+ const concurrency = parseInt(CONCURRENCY);
38
+ const produceBatchSize = parseInt(PRODUCE_BATCH_SIZE);
39
+ const produceDelayMs = parseInt(PRODUCE_DELAY_MS);
40
+
41
+ await createTopic({ topic: TOPIC, partitions, replicationFactor }).catch(console.error);
42
+ await delay(2500);
43
+
44
+ let counts: Record<string, number> = {};
45
+ let sums: Record<string, number> = {};
46
+
47
+ const incrementCount = (key: string, value: number) => {
48
+ counts[key] = (counts[key] || 0) + value;
49
+ };
50
+
51
+ const incrementSum = (key: string, value: number) => {
52
+ sums[key] = (sums[key] || 0) + value;
53
+ };
54
+
55
+ const stopProducer = await connectProducer();
56
+
57
+ const stopConsumer =
58
+ enableConsumer &&
59
+ (await startConsumer({ groupId: benchmarkId, topic: TOPIC, concurrency, incrementCount }, (timestamp) => {
60
+ incrementCount('CONSUMER', 1);
61
+ incrementSum('CONSUMER', Date.now() - timestamp);
62
+ }));
63
+
64
+ const interval = setInterval(() => {
65
+ const latencies = Object.entries(sums)
66
+ .map(([key, sum]) => `${key} ${(sum / counts[key]).toFixed(2)}ms`)
67
+ .sort()
68
+ .join(', ');
69
+
70
+ const counters = Object.entries(counts)
71
+ .map(([key, count]) => `${key} ${count}`)
72
+ .sort()
73
+ .join(', ');
74
+
75
+ console.log(`Latency: ${latencies} | Counters: ${counters}`);
76
+ counts = {};
77
+ sums = {};
78
+ }, 1000);
79
+
80
+ let isRunning = true;
81
+ const produceLoop = async () => {
82
+ if (!isRunning) return;
83
+ const start = Date.now();
84
+ await produce({ topic: TOPIC, length: produceBatchSize, timestamp: Date.now(), acks: -1 });
85
+ incrementCount('PRODUCER', 1);
86
+ incrementSum('PRODUCER', Date.now() - start);
87
+ produceDelayMs && (await delay(produceDelayMs));
88
+ produceLoop();
89
+ };
90
+ enableProducer && produceLoop();
91
+
92
+ process.once('SIGINT', async () => {
93
+ isRunning = false;
94
+ stopConsumer && (await stopConsumer());
95
+ await stopProducer();
96
+ clearInterval(interval);
97
+ });
98
+ };
@@ -0,0 +1,67 @@
1
+ import { readFileSync } from 'fs';
2
+ import { API, createKafkaClient, saslScramSha512 } from 'kafka-ts';
3
+ import { startBenchmarker } from './common';
4
+
5
+ // setTracer(new OpenTelemetryTracer());
6
+
7
+ const kafka = createKafkaClient({
8
+ bootstrapServers: [{ host: 'localhost', port: 9092 }],
9
+ clientId: 'kafka-ts',
10
+ sasl: saslScramSha512({ username: 'admin', password: 'admin' }),
11
+ ssl: { ca: readFileSync('../certs/ca.crt').toString() },
12
+ });
13
+
14
+ const producer = kafka.createProducer({ allowTopicAutoCreation: false });
15
+
16
+ startBenchmarker({
17
+ createTopic: async ({ topic, partitions, replicationFactor }) => {
18
+ const cluster = kafka.createCluster();
19
+ await cluster.connect();
20
+
21
+ const { controllerId } = await cluster.sendRequest(API.METADATA, {
22
+ allowTopicAutoCreation: false,
23
+ includeTopicAuthorizedOperations: false,
24
+ topics: [],
25
+ });
26
+ await cluster.setSeedBroker(controllerId);
27
+ await cluster.sendRequest(API.CREATE_TOPICS, {
28
+ validateOnly: false,
29
+ timeoutMs: 10_000,
30
+ topics: [
31
+ {
32
+ name: topic,
33
+ numPartitions: partitions,
34
+ replicationFactor,
35
+ assignments: [],
36
+ configs: [],
37
+ },
38
+ ],
39
+ });
40
+ await cluster.disconnect();
41
+ },
42
+ connectProducer: async () => () => producer.close(),
43
+ startConsumer: async ({ groupId, topic, concurrency, incrementCount }, callback) => {
44
+ const consumer = await kafka.startConsumer({
45
+ groupId,
46
+ topics: [topic],
47
+ onBatch: async (messages) => {
48
+ for (const message of messages) {
49
+ callback(parseInt(message.timestamp.toString()));
50
+ }
51
+ },
52
+ concurrency,
53
+ });
54
+ consumer.on('offsetCommit', () => incrementCount('OFFSET_COMMIT', 1));
55
+ return () => consumer.close();
56
+ },
57
+ produce: async ({ topic, length, timestamp, acks }) => {
58
+ await producer.send(
59
+ Array.from({ length }).map(() => ({
60
+ topic: topic,
61
+ value: Buffer.from('hello'),
62
+ timestamp: BigInt(timestamp),
63
+ })),
64
+ { acks },
65
+ );
66
+ },
67
+ });
@@ -0,0 +1,51 @@
1
+ import { readFileSync } from 'fs';
2
+ import { Kafka } from 'kafkajs';
3
+ import { startBenchmarker } from './common';
4
+
5
+ const kafkajs = new Kafka({
6
+ brokers: ['localhost:9092'],
7
+ clientId: 'kafkajs',
8
+ sasl: { username: 'admin', password: 'admin', mechanism: 'plain' },
9
+ ssl: { ca: readFileSync('../certs/ca.crt').toString() },
10
+ });
11
+
12
+ const producer = kafkajs.producer({ allowAutoTopicCreation: false });
13
+
14
+ startBenchmarker({
15
+ createTopic: async ({ topic, partitions, replicationFactor }) => {
16
+ const admin = kafkajs.admin();
17
+ await admin.connect();
18
+ await admin.createTopics({ topics: [{ topic, numPartitions: partitions, replicationFactor }] });
19
+ await admin.disconnect();
20
+ },
21
+ connectProducer: async () => {
22
+ await producer.connect();
23
+ return () => producer.disconnect();
24
+ },
25
+ startConsumer: async ({ groupId, topic, concurrency, incrementCount }, callback) => {
26
+ const consumer = kafkajs.consumer({ groupId, allowAutoTopicCreation: false });
27
+ await consumer.connect();
28
+ await consumer.subscribe({ topic });
29
+ await consumer.run({
30
+ eachBatch: async ({ batch }) => {
31
+ for (const message of batch.messages) {
32
+ callback(parseInt(message.timestamp));
33
+ }
34
+ },
35
+ partitionsConsumedConcurrently: concurrency,
36
+ autoCommit: true,
37
+ });
38
+ consumer.on(consumer.events.COMMIT_OFFSETS, () => incrementCount('OFFSET_COMMIT', 1));
39
+ return () => consumer.disconnect();
40
+ },
41
+ produce: async ({ topic, length, timestamp, acks }) => {
42
+ await producer.send({
43
+ topic,
44
+ messages: Array.from({ length }).map(() => ({
45
+ value: Buffer.from(timestamp.toString()),
46
+ timestamp: timestamp.toString(),
47
+ })),
48
+ acks,
49
+ });
50
+ },
51
+ });
@@ -1,5 +1,8 @@
1
1
  import { readFileSync } from 'fs';
2
- import { createKafkaClient, saslScramSha512 } from 'kafka-ts';
2
+ import { createKafkaClient, saslScramSha512, setTracer } from 'kafka-ts';
3
+ import { OpenTelemetryTracer } from './opentelemetry';
4
+
5
+ setTracer(new OpenTelemetryTracer());
3
6
 
4
7
  export const kafka = createKafkaClient({
5
8
  clientId: 'examples',
@@ -1,12 +1,18 @@
1
+ import { jsonSerializer, log } from 'kafka-ts';
1
2
  import { kafka } from './client';
3
+ import { delay } from '../../dist/utils/delay';
2
4
 
3
5
  (async () => {
4
6
  const consumer = await kafka.startConsumer({
5
7
  groupId: 'example-group',
6
8
  groupInstanceId: 'example-group-instance',
7
9
  topics: ['my-topic'],
10
+ allowTopicAutoCreation: true,
8
11
  onBatch: (batch) => {
9
- console.log(batch);
12
+ log.info(
13
+ `Received batch: ${JSON.stringify(batch.map((message) => ({ ...message, value: message.value?.toString() })), jsonSerializer)}`,
14
+ );
15
+ log.info(`Latency: ${Date.now() - parseInt(batch[0].timestamp.toString())}ms`)
10
16
  },
11
17
  batchGranularity: 'broker',
12
18
  concurrency: 10,
@@ -1,4 +1,4 @@
1
- import { API, API_ERROR, KafkaTSApiError } from 'kafka-ts';
1
+ import { API, API_ERROR, KafkaTSApiError, log } from 'kafka-ts';
2
2
  import { kafka } from './client';
3
3
 
4
4
  (async () => {
@@ -19,7 +19,7 @@ import { kafka } from './client';
19
19
  {
20
20
  name: 'my-topic',
21
21
  numPartitions: 10,
22
- replicationFactor: 3,
22
+ replicationFactor: 1,
23
23
  assignments: [],
24
24
  configs: [],
25
25
  },
@@ -37,7 +37,7 @@ import { kafka } from './client';
37
37
  topics: [{ id: null, name: 'my-topic' }],
38
38
  });
39
39
 
40
- console.log(metadata);
40
+ log.info('Metadata', metadata);
41
41
 
42
42
  await cluster.disconnect();
43
43
  })();
@@ -0,0 +1,46 @@
1
+ import { context, ROOT_CONTEXT, trace } from '@opentelemetry/api';
2
+ import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
3
+ import { AsyncHooksContextManager } from '@opentelemetry/context-async-hooks';
4
+ import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-grpc';
5
+ import { NodeSDK } from '@opentelemetry/sdk-node';
6
+ import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
7
+ import { Tracer } from 'kafka-ts';
8
+
9
+ const contextManager = new AsyncHooksContextManager();
10
+ contextManager.enable();
11
+ context.setGlobalContextManager(contextManager);
12
+
13
+ const exporter = new OTLPTraceExporter({ url: 'http://localhost:4317' });
14
+
15
+ const sdk = new NodeSDK({
16
+ serviceName: 'kafka-ts',
17
+ traceExporter: exporter,
18
+ spanProcessors: [new BatchSpanProcessor(exporter)],
19
+ instrumentations: [getNodeAutoInstrumentations()],
20
+ });
21
+
22
+ sdk.start();
23
+
24
+ process.once('SIGINT', () => {
25
+ sdk.shutdown();
26
+ });
27
+
28
+ const tracer = trace.getTracer('kafka-ts');
29
+
30
+ export class OpenTelemetryTracer implements Tracer {
31
+ startActiveSpan(module, method, { body, ...metadata } = {} as any, callback) {
32
+ return tracer.startActiveSpan(
33
+ `${module}.${method} ${metadata?.message ?? ''}`,
34
+ { attributes: metadata },
35
+ metadata?.root ? ROOT_CONTEXT : context.active(),
36
+ (span) => {
37
+ const result = callback();
38
+ if (result instanceof Promise) {
39
+ return result.finally(() => span.end());
40
+ }
41
+ span.end();
42
+ return result;
43
+ },
44
+ );
45
+ }
46
+ }
@@ -4,19 +4,19 @@ import { kafka } from './client';
4
4
  const producer = kafka.createProducer({ allowTopicAutoCreation: true });
5
5
 
6
6
  const rl = createInterface({ input: process.stdin, output: process.stdout });
7
+ process.once('SIGINT', rl.close);
7
8
 
8
9
  process.stdout.write('> ');
9
10
  rl.on('line', async (line) => {
10
- await producer.send([
11
- {
12
- topic: 'example-topic-f',
13
- value: Buffer.from(line),
14
- },
15
- ]);
11
+ await producer.send(
12
+ [
13
+ {
14
+ topic: 'my-topic',
15
+ value: Buffer.from(line),
16
+ },
17
+ ],
18
+ { acks: -1 },
19
+ );
16
20
  process.stdout.write('> ');
17
21
  });
18
-
19
- process.on('SIGINT', async () => {
20
- rl.close();
21
- await producer.close();
22
- });
22
+ rl.once('close', () => producer.close());
@@ -1,3 +1,4 @@
1
+ import { log } from 'kafka-ts';
1
2
  import { kafka } from './client';
2
3
 
3
4
  (async () => {
@@ -15,7 +16,7 @@ import { kafka } from './client';
15
16
  offset: 0n,
16
17
  })),
17
18
  );
18
- console.log(`Replicated ${messages.length} messages`);
19
+ log.info(`Replicated ${messages.length} messages`);
19
20
  },
20
21
  });
21
22
  process.on('SIGINT', async () => {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "kafka-ts",
3
- "version": "0.0.1-beta.3",
3
+ "version": "0.0.1-beta.6",
4
4
  "main": "dist/index.js",
5
5
  "author": "Priit Käärd",
6
6
  "license": "MIT",
@@ -9,7 +9,9 @@
9
9
  "url": "https://github.com/priitkaard/kafka-ts.git"
10
10
  },
11
11
  "scripts": {
12
- "version:bump": "npm version prerelease --preid=beta",
12
+ "start": "docker-compose down && KAFKA_VERSION=3.7.1 docker-compose up -d && sleep 5 && bash ./scripts/create-scram-user.sh",
13
+ "version:beta": "npm version prerelease --preid=beta",
14
+ "version:patch": "npm version patch",
13
15
  "format": "prettier --write .",
14
16
  "build": "tsc",
15
17
  "watch": "tsc -w",
@@ -1,5 +1,7 @@
1
1
  #!/bin/bash
2
2
  set -e
3
3
 
4
- kafka-configs --bootstrap-server localhost:9092 --command-config kafka-local.properties --alter --add-config 'SCRAM-SHA-256=[password=admin]' --entity-type users --entity-name admin
5
- kafka-configs --bootstrap-server localhost:9092 --command-config kafka-local.properties --alter --add-config 'SCRAM-SHA-512=[password=admin]' --entity-type users --entity-name admin
4
+ SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
5
+
6
+ kafka-configs --bootstrap-server localhost:9092 --command-config "$SCRIPT_DIR/kafka-local.properties" --alter --add-config 'SCRAM-SHA-256=[password=admin]' --entity-type users --entity-name admin
7
+ kafka-configs --bootstrap-server localhost:9092 --command-config "$SCRIPT_DIR/kafka-local.properties" --alter --add-config 'SCRAM-SHA-512=[password=admin]' --entity-type users --entity-name admin
@@ -23,3 +23,5 @@ keytool -importkeystore -srckeystore certs/kafka.keystore.jks \
23
23
  -deststorepass password \
24
24
  -destkeystore certs/kafka.truststore.jks \
25
25
  -noprompt
26
+
27
+ rm certs/{ca.key,ca.srl,kafka.crt,kafka.csr,kafka.key}