kafka-ts 0.0.1-beta.3 → 0.0.1-beta.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.github/workflows/release.yml +19 -6
- package/README.md +15 -21
- package/dist/api/api-versions.d.ts +9 -0
- package/dist/api/api-versions.js +24 -0
- package/dist/api/create-topics.d.ts +38 -0
- package/dist/api/create-topics.js +53 -0
- package/dist/api/delete-topics.d.ts +18 -0
- package/dist/api/delete-topics.js +33 -0
- package/dist/api/fetch.d.ts +84 -0
- package/dist/api/fetch.js +142 -0
- package/dist/api/find-coordinator.d.ts +21 -0
- package/dist/api/find-coordinator.js +39 -0
- package/dist/api/heartbeat.d.ts +11 -0
- package/dist/api/heartbeat.js +27 -0
- package/dist/api/index.d.ts +578 -0
- package/dist/api/index.js +165 -0
- package/dist/api/init-producer-id.d.ts +13 -0
- package/dist/api/init-producer-id.js +29 -0
- package/dist/api/join-group.d.ts +34 -0
- package/dist/api/join-group.js +51 -0
- package/dist/api/leave-group.d.ts +19 -0
- package/dist/api/leave-group.js +39 -0
- package/dist/api/list-offsets.d.ts +29 -0
- package/dist/api/list-offsets.js +48 -0
- package/dist/api/metadata.d.ts +40 -0
- package/dist/api/metadata.js +58 -0
- package/dist/api/offset-commit.d.ts +28 -0
- package/dist/api/offset-commit.js +48 -0
- package/dist/api/offset-fetch.d.ts +33 -0
- package/dist/api/offset-fetch.js +57 -0
- package/dist/api/produce.d.ts +54 -0
- package/dist/api/produce.js +126 -0
- package/dist/api/sasl-authenticate.d.ts +11 -0
- package/dist/api/sasl-authenticate.js +23 -0
- package/dist/api/sasl-handshake.d.ts +6 -0
- package/dist/api/sasl-handshake.js +19 -0
- package/dist/api/sync-group.d.ts +24 -0
- package/dist/api/sync-group.js +36 -0
- package/dist/auth/index.d.ts +2 -0
- package/dist/auth/index.js +8 -0
- package/dist/auth/plain.d.ts +5 -0
- package/dist/auth/plain.js +12 -0
- package/dist/auth/scram.d.ts +9 -0
- package/dist/auth/scram.js +40 -0
- package/dist/broker.d.ts +30 -0
- package/dist/broker.js +55 -0
- package/dist/client.d.ts +23 -0
- package/dist/client.js +36 -0
- package/dist/cluster.d.ts +27 -0
- package/dist/cluster.js +70 -0
- package/dist/cluster.test.d.ts +1 -0
- package/dist/cluster.test.js +345 -0
- package/dist/codecs/gzip.d.ts +2 -0
- package/dist/codecs/gzip.js +8 -0
- package/dist/codecs/index.d.ts +2 -0
- package/dist/codecs/index.js +17 -0
- package/dist/codecs/none.d.ts +2 -0
- package/dist/codecs/none.js +7 -0
- package/dist/codecs/types.d.ts +5 -0
- package/dist/codecs/types.js +2 -0
- package/dist/connection.d.ts +26 -0
- package/dist/connection.js +175 -0
- package/dist/consumer/consumer-group.d.ts +41 -0
- package/dist/consumer/consumer-group.js +217 -0
- package/dist/consumer/consumer-metadata.d.ts +7 -0
- package/dist/consumer/consumer-metadata.js +14 -0
- package/dist/consumer/consumer.d.ts +44 -0
- package/dist/consumer/consumer.js +225 -0
- package/dist/consumer/fetch-manager.d.ts +33 -0
- package/dist/consumer/fetch-manager.js +140 -0
- package/dist/consumer/fetcher.d.ts +25 -0
- package/dist/consumer/fetcher.js +64 -0
- package/dist/consumer/offset-manager.d.ts +22 -0
- package/dist/consumer/offset-manager.js +66 -0
- package/dist/consumer/processor.d.ts +19 -0
- package/dist/consumer/processor.js +59 -0
- package/dist/distributors/assignments-to-replicas.d.ts +16 -0
- package/dist/distributors/assignments-to-replicas.js +59 -0
- package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
- package/dist/distributors/assignments-to-replicas.test.js +40 -0
- package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
- package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
- package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
- package/dist/distributors/partitioner.d.ts +7 -0
- package/dist/distributors/partitioner.js +23 -0
- package/dist/index.d.ts +9 -0
- package/dist/index.js +26 -0
- package/dist/metadata.d.ts +24 -0
- package/dist/metadata.js +106 -0
- package/dist/producer/producer.d.ts +24 -0
- package/dist/producer/producer.js +131 -0
- package/dist/types.d.ts +11 -0
- package/dist/types.js +2 -0
- package/dist/utils/api.d.ts +9 -0
- package/dist/utils/api.js +5 -0
- package/dist/utils/crypto.d.ts +8 -0
- package/dist/utils/crypto.js +18 -0
- package/dist/utils/decoder.d.ts +30 -0
- package/dist/utils/decoder.js +152 -0
- package/dist/utils/delay.d.ts +1 -0
- package/dist/utils/delay.js +5 -0
- package/dist/utils/encoder.d.ts +28 -0
- package/dist/utils/encoder.js +125 -0
- package/dist/utils/error.d.ts +11 -0
- package/dist/utils/error.js +27 -0
- package/dist/utils/logger.d.ts +9 -0
- package/dist/utils/logger.js +32 -0
- package/dist/utils/memo.d.ts +1 -0
- package/dist/utils/memo.js +16 -0
- package/dist/utils/murmur2.d.ts +3 -0
- package/dist/utils/murmur2.js +40 -0
- package/dist/utils/retrier.d.ts +10 -0
- package/dist/utils/retrier.js +22 -0
- package/dist/utils/tracer.d.ts +5 -0
- package/dist/utils/tracer.js +39 -0
- package/docker-compose.yml +3 -3
- package/examples/package-lock.json +3501 -3
- package/examples/package.json +8 -1
- package/examples/src/benchmark/common.ts +98 -0
- package/examples/src/benchmark/kafka-ts.ts +67 -0
- package/examples/src/benchmark/kafkajs.ts +51 -0
- package/examples/src/client.ts +4 -1
- package/examples/src/consumer.ts +7 -1
- package/examples/src/create-topic.ts +3 -3
- package/examples/src/opentelemetry.ts +46 -0
- package/examples/src/producer.ts +11 -11
- package/examples/src/replicator.ts +2 -1
- package/package.json +4 -2
- package/scripts/create-scram-user.sh +4 -2
- package/scripts/generate-certs.sh +2 -0
- package/src/__snapshots__/cluster.test.ts.snap +160 -53
- package/src/api/fetch.ts +83 -28
- package/src/api/index.ts +3 -1
- package/src/api/metadata.ts +1 -1
- package/src/api/produce.ts +7 -10
- package/src/cluster.test.ts +10 -7
- package/src/cluster.ts +36 -38
- package/src/codecs/gzip.ts +9 -0
- package/src/codecs/index.ts +16 -0
- package/src/codecs/none.ts +6 -0
- package/src/codecs/types.ts +4 -0
- package/src/connection.ts +31 -17
- package/src/consumer/consumer-group.ts +43 -21
- package/src/consumer/consumer.ts +58 -37
- package/src/consumer/fetch-manager.ts +36 -46
- package/src/consumer/fetcher.ts +20 -13
- package/src/consumer/offset-manager.ts +18 -7
- package/src/consumer/processor.ts +14 -8
- package/src/distributors/assignments-to-replicas.ts +1 -3
- package/src/index.ts +2 -0
- package/src/metadata.ts +4 -0
- package/src/producer/producer.ts +14 -9
- package/src/utils/api.ts +1 -1
- package/src/utils/decoder.ts +9 -3
- package/src/utils/encoder.ts +26 -19
- package/src/utils/logger.ts +37 -0
- package/src/utils/tracer.ts +40 -22
- package/certs/ca.key +0 -52
- package/certs/ca.srl +0 -1
- package/certs/kafka.crt +0 -29
- package/certs/kafka.csr +0 -26
- package/certs/kafka.key +0 -52
- package/src/utils/debug.ts +0 -9
package/examples/package.json
CHANGED
|
@@ -5,10 +5,17 @@
|
|
|
5
5
|
"main": "dist/replicator.js",
|
|
6
6
|
"scripts": {
|
|
7
7
|
"test": "echo \"Error: no test specified\" && exit 1",
|
|
8
|
+
"start:jaeger": "docker run --rm --name jaeger -e COLLECTOR_OTLP_ENABLED=true -p 16686:16686 -p 4317:4317 -p 4318:4318 -d jaegertracing/all-in-one:1.62.0",
|
|
8
9
|
"build": "tsc"
|
|
9
10
|
},
|
|
10
11
|
"dependencies": {
|
|
11
|
-
"
|
|
12
|
+
"@opentelemetry/api": "^1.9.0",
|
|
13
|
+
"@opentelemetry/auto-instrumentations-node": "^0.51.0",
|
|
14
|
+
"@opentelemetry/context-async-hooks": "^1.27.0",
|
|
15
|
+
"@opentelemetry/exporter-trace-otlp-grpc": "^0.54.0",
|
|
16
|
+
"@opentelemetry/sdk-node": "^0.54.0",
|
|
17
|
+
"kafka-ts": "file:../",
|
|
18
|
+
"kafkajs": "^2.2.4"
|
|
12
19
|
},
|
|
13
20
|
"author": "",
|
|
14
21
|
"license": "ISC"
|
|
@@ -0,0 +1,98 @@
|
|
|
1
|
+
import { delay } from '../utils/delay';
|
|
2
|
+
|
|
3
|
+
export const startBenchmarker = async ({
|
|
4
|
+
createTopic,
|
|
5
|
+
connectProducer,
|
|
6
|
+
startConsumer,
|
|
7
|
+
produce,
|
|
8
|
+
}: {
|
|
9
|
+
createTopic: (opts: { topic: string; partitions: number; replicationFactor: number }) => Promise<void>;
|
|
10
|
+
connectProducer: () => Promise<() => unknown>;
|
|
11
|
+
startConsumer: (
|
|
12
|
+
opts: {
|
|
13
|
+
groupId: string;
|
|
14
|
+
topic: string;
|
|
15
|
+
concurrency: number;
|
|
16
|
+
incrementCount: (key: string, value: number) => void;
|
|
17
|
+
},
|
|
18
|
+
callback: (timestamp: number) => void,
|
|
19
|
+
) => Promise<() => unknown>;
|
|
20
|
+
produce: (opts: { topic: string; length: number; timestamp: number; acks: -1 | 1 }) => Promise<void>;
|
|
21
|
+
}) => {
|
|
22
|
+
const benchmarkId = `benchmark-${Date.now()}`;
|
|
23
|
+
const {
|
|
24
|
+
TOPIC = benchmarkId,
|
|
25
|
+
PRODUCER = 'true',
|
|
26
|
+
CONSUMER = 'true',
|
|
27
|
+
PARTITIONS = '10',
|
|
28
|
+
REPLICATION_FACTOR = '3',
|
|
29
|
+
CONCURRENCY = '1',
|
|
30
|
+
PRODUCE_BATCH_SIZE = '10',
|
|
31
|
+
PRODUCE_DELAY_MS = '0',
|
|
32
|
+
} = process.env;
|
|
33
|
+
const enableProducer = PRODUCER === 'true';
|
|
34
|
+
const enableConsumer = CONSUMER === 'true';
|
|
35
|
+
const partitions = parseInt(PARTITIONS);
|
|
36
|
+
const replicationFactor = parseInt(REPLICATION_FACTOR);
|
|
37
|
+
const concurrency = parseInt(CONCURRENCY);
|
|
38
|
+
const produceBatchSize = parseInt(PRODUCE_BATCH_SIZE);
|
|
39
|
+
const produceDelayMs = parseInt(PRODUCE_DELAY_MS);
|
|
40
|
+
|
|
41
|
+
await createTopic({ topic: TOPIC, partitions, replicationFactor }).catch(console.error);
|
|
42
|
+
await delay(2500);
|
|
43
|
+
|
|
44
|
+
let counts: Record<string, number> = {};
|
|
45
|
+
let sums: Record<string, number> = {};
|
|
46
|
+
|
|
47
|
+
const incrementCount = (key: string, value: number) => {
|
|
48
|
+
counts[key] = (counts[key] || 0) + value;
|
|
49
|
+
};
|
|
50
|
+
|
|
51
|
+
const incrementSum = (key: string, value: number) => {
|
|
52
|
+
sums[key] = (sums[key] || 0) + value;
|
|
53
|
+
};
|
|
54
|
+
|
|
55
|
+
const stopProducer = await connectProducer();
|
|
56
|
+
|
|
57
|
+
const stopConsumer =
|
|
58
|
+
enableConsumer &&
|
|
59
|
+
(await startConsumer({ groupId: benchmarkId, topic: TOPIC, concurrency, incrementCount }, (timestamp) => {
|
|
60
|
+
incrementCount('CONSUMER', 1);
|
|
61
|
+
incrementSum('CONSUMER', Date.now() - timestamp);
|
|
62
|
+
}));
|
|
63
|
+
|
|
64
|
+
const interval = setInterval(() => {
|
|
65
|
+
const latencies = Object.entries(sums)
|
|
66
|
+
.map(([key, sum]) => `${key} ${(sum / counts[key]).toFixed(2)}ms`)
|
|
67
|
+
.sort()
|
|
68
|
+
.join(', ');
|
|
69
|
+
|
|
70
|
+
const counters = Object.entries(counts)
|
|
71
|
+
.map(([key, count]) => `${key} ${count}`)
|
|
72
|
+
.sort()
|
|
73
|
+
.join(', ');
|
|
74
|
+
|
|
75
|
+
console.log(`Latency: ${latencies} | Counters: ${counters}`);
|
|
76
|
+
counts = {};
|
|
77
|
+
sums = {};
|
|
78
|
+
}, 1000);
|
|
79
|
+
|
|
80
|
+
let isRunning = true;
|
|
81
|
+
const produceLoop = async () => {
|
|
82
|
+
if (!isRunning) return;
|
|
83
|
+
const start = Date.now();
|
|
84
|
+
await produce({ topic: TOPIC, length: produceBatchSize, timestamp: Date.now(), acks: -1 });
|
|
85
|
+
incrementCount('PRODUCER', 1);
|
|
86
|
+
incrementSum('PRODUCER', Date.now() - start);
|
|
87
|
+
produceDelayMs && (await delay(produceDelayMs));
|
|
88
|
+
produceLoop();
|
|
89
|
+
};
|
|
90
|
+
enableProducer && produceLoop();
|
|
91
|
+
|
|
92
|
+
process.once('SIGINT', async () => {
|
|
93
|
+
isRunning = false;
|
|
94
|
+
stopConsumer && (await stopConsumer());
|
|
95
|
+
await stopProducer();
|
|
96
|
+
clearInterval(interval);
|
|
97
|
+
});
|
|
98
|
+
};
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import { readFileSync } from 'fs';
|
|
2
|
+
import { API, createKafkaClient, saslScramSha512 } from 'kafka-ts';
|
|
3
|
+
import { startBenchmarker } from './common';
|
|
4
|
+
|
|
5
|
+
// setTracer(new OpenTelemetryTracer());
|
|
6
|
+
|
|
7
|
+
const kafka = createKafkaClient({
|
|
8
|
+
bootstrapServers: [{ host: 'localhost', port: 9092 }],
|
|
9
|
+
clientId: 'kafka-ts',
|
|
10
|
+
sasl: saslScramSha512({ username: 'admin', password: 'admin' }),
|
|
11
|
+
ssl: { ca: readFileSync('../certs/ca.crt').toString() },
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
const producer = kafka.createProducer({ allowTopicAutoCreation: false });
|
|
15
|
+
|
|
16
|
+
startBenchmarker({
|
|
17
|
+
createTopic: async ({ topic, partitions, replicationFactor }) => {
|
|
18
|
+
const cluster = kafka.createCluster();
|
|
19
|
+
await cluster.connect();
|
|
20
|
+
|
|
21
|
+
const { controllerId } = await cluster.sendRequest(API.METADATA, {
|
|
22
|
+
allowTopicAutoCreation: false,
|
|
23
|
+
includeTopicAuthorizedOperations: false,
|
|
24
|
+
topics: [],
|
|
25
|
+
});
|
|
26
|
+
await cluster.setSeedBroker(controllerId);
|
|
27
|
+
await cluster.sendRequest(API.CREATE_TOPICS, {
|
|
28
|
+
validateOnly: false,
|
|
29
|
+
timeoutMs: 10_000,
|
|
30
|
+
topics: [
|
|
31
|
+
{
|
|
32
|
+
name: topic,
|
|
33
|
+
numPartitions: partitions,
|
|
34
|
+
replicationFactor,
|
|
35
|
+
assignments: [],
|
|
36
|
+
configs: [],
|
|
37
|
+
},
|
|
38
|
+
],
|
|
39
|
+
});
|
|
40
|
+
await cluster.disconnect();
|
|
41
|
+
},
|
|
42
|
+
connectProducer: async () => () => producer.close(),
|
|
43
|
+
startConsumer: async ({ groupId, topic, concurrency, incrementCount }, callback) => {
|
|
44
|
+
const consumer = await kafka.startConsumer({
|
|
45
|
+
groupId,
|
|
46
|
+
topics: [topic],
|
|
47
|
+
onBatch: async (messages) => {
|
|
48
|
+
for (const message of messages) {
|
|
49
|
+
callback(parseInt(message.timestamp.toString()));
|
|
50
|
+
}
|
|
51
|
+
},
|
|
52
|
+
concurrency,
|
|
53
|
+
});
|
|
54
|
+
consumer.on('offsetCommit', () => incrementCount('OFFSET_COMMIT', 1));
|
|
55
|
+
return () => consumer.close();
|
|
56
|
+
},
|
|
57
|
+
produce: async ({ topic, length, timestamp, acks }) => {
|
|
58
|
+
await producer.send(
|
|
59
|
+
Array.from({ length }).map(() => ({
|
|
60
|
+
topic: topic,
|
|
61
|
+
value: Buffer.from('hello'),
|
|
62
|
+
timestamp: BigInt(timestamp),
|
|
63
|
+
})),
|
|
64
|
+
{ acks },
|
|
65
|
+
);
|
|
66
|
+
},
|
|
67
|
+
});
|
|
@@ -0,0 +1,51 @@
|
|
|
1
|
+
import { readFileSync } from 'fs';
|
|
2
|
+
import { Kafka } from 'kafkajs';
|
|
3
|
+
import { startBenchmarker } from './common';
|
|
4
|
+
|
|
5
|
+
const kafkajs = new Kafka({
|
|
6
|
+
brokers: ['localhost:9092'],
|
|
7
|
+
clientId: 'kafkajs',
|
|
8
|
+
sasl: { username: 'admin', password: 'admin', mechanism: 'plain' },
|
|
9
|
+
ssl: { ca: readFileSync('../certs/ca.crt').toString() },
|
|
10
|
+
});
|
|
11
|
+
|
|
12
|
+
const producer = kafkajs.producer({ allowAutoTopicCreation: false });
|
|
13
|
+
|
|
14
|
+
startBenchmarker({
|
|
15
|
+
createTopic: async ({ topic, partitions, replicationFactor }) => {
|
|
16
|
+
const admin = kafkajs.admin();
|
|
17
|
+
await admin.connect();
|
|
18
|
+
await admin.createTopics({ topics: [{ topic, numPartitions: partitions, replicationFactor }] });
|
|
19
|
+
await admin.disconnect();
|
|
20
|
+
},
|
|
21
|
+
connectProducer: async () => {
|
|
22
|
+
await producer.connect();
|
|
23
|
+
return () => producer.disconnect();
|
|
24
|
+
},
|
|
25
|
+
startConsumer: async ({ groupId, topic, concurrency, incrementCount }, callback) => {
|
|
26
|
+
const consumer = kafkajs.consumer({ groupId, allowAutoTopicCreation: false });
|
|
27
|
+
await consumer.connect();
|
|
28
|
+
await consumer.subscribe({ topic });
|
|
29
|
+
await consumer.run({
|
|
30
|
+
eachBatch: async ({ batch }) => {
|
|
31
|
+
for (const message of batch.messages) {
|
|
32
|
+
callback(parseInt(message.timestamp));
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
partitionsConsumedConcurrently: concurrency,
|
|
36
|
+
autoCommit: true,
|
|
37
|
+
});
|
|
38
|
+
consumer.on(consumer.events.COMMIT_OFFSETS, () => incrementCount('OFFSET_COMMIT', 1));
|
|
39
|
+
return () => consumer.disconnect();
|
|
40
|
+
},
|
|
41
|
+
produce: async ({ topic, length, timestamp, acks }) => {
|
|
42
|
+
await producer.send({
|
|
43
|
+
topic,
|
|
44
|
+
messages: Array.from({ length }).map(() => ({
|
|
45
|
+
value: Buffer.from(timestamp.toString()),
|
|
46
|
+
timestamp: timestamp.toString(),
|
|
47
|
+
})),
|
|
48
|
+
acks,
|
|
49
|
+
});
|
|
50
|
+
},
|
|
51
|
+
});
|
package/examples/src/client.ts
CHANGED
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
import { readFileSync } from 'fs';
|
|
2
|
-
import { createKafkaClient, saslScramSha512 } from 'kafka-ts';
|
|
2
|
+
import { createKafkaClient, saslScramSha512, setTracer } from 'kafka-ts';
|
|
3
|
+
import { OpenTelemetryTracer } from './opentelemetry';
|
|
4
|
+
|
|
5
|
+
setTracer(new OpenTelemetryTracer());
|
|
3
6
|
|
|
4
7
|
export const kafka = createKafkaClient({
|
|
5
8
|
clientId: 'examples',
|
package/examples/src/consumer.ts
CHANGED
|
@@ -1,12 +1,18 @@
|
|
|
1
|
+
import { jsonSerializer, log } from 'kafka-ts';
|
|
1
2
|
import { kafka } from './client';
|
|
3
|
+
import { delay } from '../../dist/utils/delay';
|
|
2
4
|
|
|
3
5
|
(async () => {
|
|
4
6
|
const consumer = await kafka.startConsumer({
|
|
5
7
|
groupId: 'example-group',
|
|
6
8
|
groupInstanceId: 'example-group-instance',
|
|
7
9
|
topics: ['my-topic'],
|
|
10
|
+
allowTopicAutoCreation: true,
|
|
8
11
|
onBatch: (batch) => {
|
|
9
|
-
|
|
12
|
+
log.info(
|
|
13
|
+
`Received batch: ${JSON.stringify(batch.map((message) => ({ ...message, value: message.value?.toString() })), jsonSerializer)}`,
|
|
14
|
+
);
|
|
15
|
+
log.info(`Latency: ${Date.now() - parseInt(batch[0].timestamp.toString())}ms`)
|
|
10
16
|
},
|
|
11
17
|
batchGranularity: 'broker',
|
|
12
18
|
concurrency: 10,
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { API, API_ERROR, KafkaTSApiError } from 'kafka-ts';
|
|
1
|
+
import { API, API_ERROR, KafkaTSApiError, log } from 'kafka-ts';
|
|
2
2
|
import { kafka } from './client';
|
|
3
3
|
|
|
4
4
|
(async () => {
|
|
@@ -19,7 +19,7 @@ import { kafka } from './client';
|
|
|
19
19
|
{
|
|
20
20
|
name: 'my-topic',
|
|
21
21
|
numPartitions: 10,
|
|
22
|
-
replicationFactor:
|
|
22
|
+
replicationFactor: 1,
|
|
23
23
|
assignments: [],
|
|
24
24
|
configs: [],
|
|
25
25
|
},
|
|
@@ -37,7 +37,7 @@ import { kafka } from './client';
|
|
|
37
37
|
topics: [{ id: null, name: 'my-topic' }],
|
|
38
38
|
});
|
|
39
39
|
|
|
40
|
-
|
|
40
|
+
log.info('Metadata', metadata);
|
|
41
41
|
|
|
42
42
|
await cluster.disconnect();
|
|
43
43
|
})();
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import { context, ROOT_CONTEXT, trace } from '@opentelemetry/api';
|
|
2
|
+
import { getNodeAutoInstrumentations } from '@opentelemetry/auto-instrumentations-node';
|
|
3
|
+
import { AsyncHooksContextManager } from '@opentelemetry/context-async-hooks';
|
|
4
|
+
import { OTLPTraceExporter } from '@opentelemetry/exporter-trace-otlp-grpc';
|
|
5
|
+
import { NodeSDK } from '@opentelemetry/sdk-node';
|
|
6
|
+
import { BatchSpanProcessor } from '@opentelemetry/sdk-trace-base';
|
|
7
|
+
import { Tracer } from 'kafka-ts';
|
|
8
|
+
|
|
9
|
+
const contextManager = new AsyncHooksContextManager();
|
|
10
|
+
contextManager.enable();
|
|
11
|
+
context.setGlobalContextManager(contextManager);
|
|
12
|
+
|
|
13
|
+
const exporter = new OTLPTraceExporter({ url: 'http://localhost:4317' });
|
|
14
|
+
|
|
15
|
+
const sdk = new NodeSDK({
|
|
16
|
+
serviceName: 'kafka-ts',
|
|
17
|
+
traceExporter: exporter,
|
|
18
|
+
spanProcessors: [new BatchSpanProcessor(exporter)],
|
|
19
|
+
instrumentations: [getNodeAutoInstrumentations()],
|
|
20
|
+
});
|
|
21
|
+
|
|
22
|
+
sdk.start();
|
|
23
|
+
|
|
24
|
+
process.once('SIGINT', () => {
|
|
25
|
+
sdk.shutdown();
|
|
26
|
+
});
|
|
27
|
+
|
|
28
|
+
const tracer = trace.getTracer('kafka-ts');
|
|
29
|
+
|
|
30
|
+
export class OpenTelemetryTracer implements Tracer {
|
|
31
|
+
startActiveSpan(module, method, { body, ...metadata } = {} as any, callback) {
|
|
32
|
+
return tracer.startActiveSpan(
|
|
33
|
+
`${module}.${method} ${metadata?.message ?? ''}`,
|
|
34
|
+
{ attributes: metadata },
|
|
35
|
+
metadata?.root ? ROOT_CONTEXT : context.active(),
|
|
36
|
+
(span) => {
|
|
37
|
+
const result = callback();
|
|
38
|
+
if (result instanceof Promise) {
|
|
39
|
+
return result.finally(() => span.end());
|
|
40
|
+
}
|
|
41
|
+
span.end();
|
|
42
|
+
return result;
|
|
43
|
+
},
|
|
44
|
+
);
|
|
45
|
+
}
|
|
46
|
+
}
|
package/examples/src/producer.ts
CHANGED
|
@@ -4,19 +4,19 @@ import { kafka } from './client';
|
|
|
4
4
|
const producer = kafka.createProducer({ allowTopicAutoCreation: true });
|
|
5
5
|
|
|
6
6
|
const rl = createInterface({ input: process.stdin, output: process.stdout });
|
|
7
|
+
process.once('SIGINT', rl.close);
|
|
7
8
|
|
|
8
9
|
process.stdout.write('> ');
|
|
9
10
|
rl.on('line', async (line) => {
|
|
10
|
-
await producer.send(
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
11
|
+
await producer.send(
|
|
12
|
+
[
|
|
13
|
+
{
|
|
14
|
+
topic: 'my-topic',
|
|
15
|
+
value: Buffer.from(line),
|
|
16
|
+
},
|
|
17
|
+
],
|
|
18
|
+
{ acks: -1 },
|
|
19
|
+
);
|
|
16
20
|
process.stdout.write('> ');
|
|
17
21
|
});
|
|
18
|
-
|
|
19
|
-
process.on('SIGINT', async () => {
|
|
20
|
-
rl.close();
|
|
21
|
-
await producer.close();
|
|
22
|
-
});
|
|
22
|
+
rl.once('close', () => producer.close());
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { log } from 'kafka-ts';
|
|
1
2
|
import { kafka } from './client';
|
|
2
3
|
|
|
3
4
|
(async () => {
|
|
@@ -15,7 +16,7 @@ import { kafka } from './client';
|
|
|
15
16
|
offset: 0n,
|
|
16
17
|
})),
|
|
17
18
|
);
|
|
18
|
-
|
|
19
|
+
log.info(`Replicated ${messages.length} messages`);
|
|
19
20
|
},
|
|
20
21
|
});
|
|
21
22
|
process.on('SIGINT', async () => {
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "kafka-ts",
|
|
3
|
-
"version": "0.0.1-beta.
|
|
3
|
+
"version": "0.0.1-beta.6",
|
|
4
4
|
"main": "dist/index.js",
|
|
5
5
|
"author": "Priit Käärd",
|
|
6
6
|
"license": "MIT",
|
|
@@ -9,7 +9,9 @@
|
|
|
9
9
|
"url": "https://github.com/priitkaard/kafka-ts.git"
|
|
10
10
|
},
|
|
11
11
|
"scripts": {
|
|
12
|
-
"
|
|
12
|
+
"start": "docker-compose down && KAFKA_VERSION=3.7.1 docker-compose up -d && sleep 5 && bash ./scripts/create-scram-user.sh",
|
|
13
|
+
"version:beta": "npm version prerelease --preid=beta",
|
|
14
|
+
"version:patch": "npm version patch",
|
|
13
15
|
"format": "prettier --write .",
|
|
14
16
|
"build": "tsc",
|
|
15
17
|
"watch": "tsc -w",
|
|
@@ -1,5 +1,7 @@
|
|
|
1
1
|
#!/bin/bash
|
|
2
2
|
set -e
|
|
3
3
|
|
|
4
|
-
|
|
5
|
-
|
|
4
|
+
SCRIPT_DIR=$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" &>/dev/null && pwd)
|
|
5
|
+
|
|
6
|
+
kafka-configs --bootstrap-server localhost:9092 --command-config "$SCRIPT_DIR/kafka-local.properties" --alter --add-config 'SCRAM-SHA-256=[password=admin]' --entity-type users --entity-name admin
|
|
7
|
+
kafka-configs --bootstrap-server localhost:9092 --command-config "$SCRIPT_DIR/kafka-local.properties" --alter --add-config 'SCRAM-SHA-512=[password=admin]' --entity-type users --entity-name admin
|