kafka-ts 0.0.1-beta

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (196) hide show
  1. package/.prettierrc +7 -0
  2. package/LICENSE +24 -0
  3. package/README.md +88 -0
  4. package/certs/ca.crt +29 -0
  5. package/certs/ca.key +52 -0
  6. package/certs/ca.srl +1 -0
  7. package/certs/kafka.crt +29 -0
  8. package/certs/kafka.csr +26 -0
  9. package/certs/kafka.key +52 -0
  10. package/certs/kafka.keystore.jks +0 -0
  11. package/certs/kafka.truststore.jks +0 -0
  12. package/dist/api/api-versions.d.ts +9 -0
  13. package/dist/api/api-versions.js +24 -0
  14. package/dist/api/create-topics.d.ts +38 -0
  15. package/dist/api/create-topics.js +53 -0
  16. package/dist/api/delete-topics.d.ts +18 -0
  17. package/dist/api/delete-topics.js +33 -0
  18. package/dist/api/fetch.d.ts +77 -0
  19. package/dist/api/fetch.js +106 -0
  20. package/dist/api/find-coordinator.d.ts +21 -0
  21. package/dist/api/find-coordinator.js +39 -0
  22. package/dist/api/heartbeat.d.ts +11 -0
  23. package/dist/api/heartbeat.js +27 -0
  24. package/dist/api/index.d.ts +573 -0
  25. package/dist/api/index.js +164 -0
  26. package/dist/api/init-producer-id.d.ts +13 -0
  27. package/dist/api/init-producer-id.js +29 -0
  28. package/dist/api/join-group.d.ts +34 -0
  29. package/dist/api/join-group.js +51 -0
  30. package/dist/api/leave-group.d.ts +19 -0
  31. package/dist/api/leave-group.js +39 -0
  32. package/dist/api/list-offsets.d.ts +29 -0
  33. package/dist/api/list-offsets.js +48 -0
  34. package/dist/api/metadata.d.ts +40 -0
  35. package/dist/api/metadata.js +58 -0
  36. package/dist/api/offset-commit.d.ts +28 -0
  37. package/dist/api/offset-commit.js +48 -0
  38. package/dist/api/offset-fetch.d.ts +33 -0
  39. package/dist/api/offset-fetch.js +57 -0
  40. package/dist/api/produce.d.ts +53 -0
  41. package/dist/api/produce.js +129 -0
  42. package/dist/api/sasl-authenticate.d.ts +11 -0
  43. package/dist/api/sasl-authenticate.js +23 -0
  44. package/dist/api/sasl-handshake.d.ts +6 -0
  45. package/dist/api/sasl-handshake.js +19 -0
  46. package/dist/api/sync-group.d.ts +24 -0
  47. package/dist/api/sync-group.js +36 -0
  48. package/dist/broker.d.ts +29 -0
  49. package/dist/broker.js +60 -0
  50. package/dist/client.d.ts +23 -0
  51. package/dist/client.js +36 -0
  52. package/dist/cluster.d.ts +24 -0
  53. package/dist/cluster.js +72 -0
  54. package/dist/connection.d.ts +25 -0
  55. package/dist/connection.js +155 -0
  56. package/dist/consumer/consumer-group.d.ts +36 -0
  57. package/dist/consumer/consumer-group.js +182 -0
  58. package/dist/consumer/consumer-metadata.d.ts +7 -0
  59. package/dist/consumer/consumer-metadata.js +14 -0
  60. package/dist/consumer/consumer.d.ts +37 -0
  61. package/dist/consumer/consumer.js +178 -0
  62. package/dist/consumer/metadata.d.ts +24 -0
  63. package/dist/consumer/metadata.js +64 -0
  64. package/dist/consumer/offset-manager.d.ts +22 -0
  65. package/dist/consumer/offset-manager.js +56 -0
  66. package/dist/distributors/assignments-to-replicas.d.ts +17 -0
  67. package/dist/distributors/assignments-to-replicas.js +60 -0
  68. package/dist/distributors/assignments-to-replicas.test.d.ts +1 -0
  69. package/dist/distributors/assignments-to-replicas.test.js +40 -0
  70. package/dist/distributors/messages-to-topic-partition-leaders.d.ts +17 -0
  71. package/dist/distributors/messages-to-topic-partition-leaders.js +15 -0
  72. package/dist/distributors/messages-to-topic-partition-leaders.test.d.ts +1 -0
  73. package/dist/distributors/messages-to-topic-partition-leaders.test.js +30 -0
  74. package/dist/examples/src/replicator.js +34 -0
  75. package/dist/examples/src/utils/json.js +5 -0
  76. package/dist/index.d.ts +3 -0
  77. package/dist/index.js +19 -0
  78. package/dist/metadata.d.ts +24 -0
  79. package/dist/metadata.js +89 -0
  80. package/dist/producer/producer.d.ts +19 -0
  81. package/dist/producer/producer.js +111 -0
  82. package/dist/request-handler.d.ts +16 -0
  83. package/dist/request-handler.js +67 -0
  84. package/dist/request-handler.test.d.ts +1 -0
  85. package/dist/request-handler.test.js +340 -0
  86. package/dist/src/api/api-versions.js +18 -0
  87. package/dist/src/api/create-topics.js +46 -0
  88. package/dist/src/api/delete-topics.js +26 -0
  89. package/dist/src/api/fetch.js +95 -0
  90. package/dist/src/api/find-coordinator.js +34 -0
  91. package/dist/src/api/heartbeat.js +22 -0
  92. package/dist/src/api/index.js +38 -0
  93. package/dist/src/api/init-producer-id.js +24 -0
  94. package/dist/src/api/join-group.js +48 -0
  95. package/dist/src/api/leave-group.js +30 -0
  96. package/dist/src/api/list-offsets.js +39 -0
  97. package/dist/src/api/metadata.js +47 -0
  98. package/dist/src/api/offset-commit.js +39 -0
  99. package/dist/src/api/offset-fetch.js +44 -0
  100. package/dist/src/api/produce.js +119 -0
  101. package/dist/src/api/sync-group.js +31 -0
  102. package/dist/src/broker.js +35 -0
  103. package/dist/src/connection.js +21 -0
  104. package/dist/src/consumer/consumer-group.js +131 -0
  105. package/dist/src/consumer/consumer.js +103 -0
  106. package/dist/src/consumer/metadata.js +52 -0
  107. package/dist/src/consumer/offset-manager.js +23 -0
  108. package/dist/src/index.js +19 -0
  109. package/dist/src/producer/producer.js +84 -0
  110. package/dist/src/request-handler.js +57 -0
  111. package/dist/src/request-handler.test.js +321 -0
  112. package/dist/src/types.js +2 -0
  113. package/dist/src/utils/api.js +5 -0
  114. package/dist/src/utils/decoder.js +161 -0
  115. package/dist/src/utils/encoder.js +137 -0
  116. package/dist/src/utils/error.js +10 -0
  117. package/dist/types.d.ts +9 -0
  118. package/dist/types.js +2 -0
  119. package/dist/utils/api.d.ts +9 -0
  120. package/dist/utils/api.js +5 -0
  121. package/dist/utils/debug.d.ts +2 -0
  122. package/dist/utils/debug.js +11 -0
  123. package/dist/utils/decoder.d.ts +29 -0
  124. package/dist/utils/decoder.js +147 -0
  125. package/dist/utils/delay.d.ts +1 -0
  126. package/dist/utils/delay.js +5 -0
  127. package/dist/utils/encoder.d.ts +28 -0
  128. package/dist/utils/encoder.js +122 -0
  129. package/dist/utils/error.d.ts +11 -0
  130. package/dist/utils/error.js +27 -0
  131. package/dist/utils/memo.d.ts +1 -0
  132. package/dist/utils/memo.js +16 -0
  133. package/dist/utils/retrier.d.ts +10 -0
  134. package/dist/utils/retrier.js +22 -0
  135. package/dist/utils/tracer.d.ts +1 -0
  136. package/dist/utils/tracer.js +26 -0
  137. package/docker-compose.yml +104 -0
  138. package/examples/node_modules/.package-lock.json +22 -0
  139. package/examples/package-lock.json +30 -0
  140. package/examples/package.json +14 -0
  141. package/examples/src/client.ts +9 -0
  142. package/examples/src/consumer.ts +17 -0
  143. package/examples/src/create-topic.ts +37 -0
  144. package/examples/src/producer.ts +24 -0
  145. package/examples/src/replicator.ts +25 -0
  146. package/examples/src/utils/json.ts +1 -0
  147. package/examples/tsconfig.json +7 -0
  148. package/log4j.properties +95 -0
  149. package/package.json +17 -0
  150. package/scripts/generate-certs.sh +24 -0
  151. package/src/__snapshots__/request-handler.test.ts.snap +1687 -0
  152. package/src/api/api-versions.ts +21 -0
  153. package/src/api/create-topics.ts +78 -0
  154. package/src/api/delete-topics.ts +42 -0
  155. package/src/api/fetch.ts +143 -0
  156. package/src/api/find-coordinator.ts +39 -0
  157. package/src/api/heartbeat.ts +33 -0
  158. package/src/api/index.ts +164 -0
  159. package/src/api/init-producer-id.ts +35 -0
  160. package/src/api/join-group.ts +67 -0
  161. package/src/api/leave-group.ts +48 -0
  162. package/src/api/list-offsets.ts +65 -0
  163. package/src/api/metadata.ts +66 -0
  164. package/src/api/offset-commit.ts +67 -0
  165. package/src/api/offset-fetch.ts +74 -0
  166. package/src/api/produce.ts +173 -0
  167. package/src/api/sasl-authenticate.ts +21 -0
  168. package/src/api/sasl-handshake.ts +16 -0
  169. package/src/api/sync-group.ts +54 -0
  170. package/src/broker.ts +74 -0
  171. package/src/client.ts +47 -0
  172. package/src/cluster.ts +87 -0
  173. package/src/connection.ts +141 -0
  174. package/src/consumer/consumer-group.ts +209 -0
  175. package/src/consumer/consumer-metadata.ts +14 -0
  176. package/src/consumer/consumer.ts +229 -0
  177. package/src/consumer/offset-manager.ts +93 -0
  178. package/src/distributors/assignments-to-replicas.test.ts +43 -0
  179. package/src/distributors/assignments-to-replicas.ts +85 -0
  180. package/src/distributors/messages-to-topic-partition-leaders.test.ts +32 -0
  181. package/src/distributors/messages-to-topic-partition-leaders.ts +19 -0
  182. package/src/index.ts +3 -0
  183. package/src/metadata.ts +122 -0
  184. package/src/producer/producer.ts +132 -0
  185. package/src/request-handler.test.ts +366 -0
  186. package/src/types.ts +9 -0
  187. package/src/utils/api.ts +11 -0
  188. package/src/utils/debug.ts +9 -0
  189. package/src/utils/decoder.ts +168 -0
  190. package/src/utils/delay.ts +1 -0
  191. package/src/utils/encoder.ts +141 -0
  192. package/src/utils/error.ts +21 -0
  193. package/src/utils/memo.ts +12 -0
  194. package/src/utils/retrier.ts +39 -0
  195. package/src/utils/tracer.ts +28 -0
  196. package/tsconfig.json +17 -0
@@ -0,0 +1,11 @@
1
+ export declare class KafkaTSError extends Error {
2
+ constructor(message: string);
3
+ }
4
+ export declare class KafkaTSApiError<T = any> extends KafkaTSError {
5
+ errorCode: number;
6
+ errorMessage: string | null;
7
+ response: T;
8
+ constructor(errorCode: number, errorMessage: string | null, response: T);
9
+ }
10
+ export declare class ConnectionError extends KafkaTSError {
11
+ }
@@ -0,0 +1,27 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.ConnectionError = exports.KafkaTSApiError = exports.KafkaTSError = void 0;
4
+ const api_1 = require("../api");
5
+ class KafkaTSError extends Error {
6
+ constructor(message) {
7
+ super(message);
8
+ this.name = this.constructor.name;
9
+ }
10
+ }
11
+ exports.KafkaTSError = KafkaTSError;
12
+ class KafkaTSApiError extends KafkaTSError {
13
+ errorCode;
14
+ errorMessage;
15
+ response;
16
+ constructor(errorCode, errorMessage, response) {
17
+ const [errorName] = Object.entries(api_1.API_ERROR).find(([, value]) => value === errorCode) ?? ["UNKNOWN"];
18
+ super(`${errorName}${errorMessage ? `: ${errorMessage}` : ""}`);
19
+ this.errorCode = errorCode;
20
+ this.errorMessage = errorMessage;
21
+ this.response = response;
22
+ }
23
+ }
24
+ exports.KafkaTSApiError = KafkaTSApiError;
25
+ class ConnectionError extends KafkaTSError {
26
+ }
27
+ exports.ConnectionError = ConnectionError;
@@ -0,0 +1 @@
1
+ export declare const memo: <T extends (...args: any[]) => any>(fn: T) => (...args: Parameters<T>) => ReturnType<T>;
@@ -0,0 +1,16 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.memo = void 0;
4
+ const memo = (fn) => {
5
+ const cache = {};
6
+ return (...args) => {
7
+ const key = JSON.stringify(args);
8
+ if (cache[key]) {
9
+ return cache[key];
10
+ }
11
+ const result = fn(...args);
12
+ cache[key] = result;
13
+ return result;
14
+ };
15
+ };
16
+ exports.memo = memo;
@@ -0,0 +1,10 @@
1
+ export type Retrier = (func: () => unknown) => Promise<void>;
2
+ export declare const createExponentialBackoffRetrier: (options: {
3
+ onFailure?: (error: unknown) => Promise<void>;
4
+ maxRetries?: number;
5
+ initialDelayMs?: number;
6
+ maxDelayMs?: number;
7
+ multiplier?: number;
8
+ retry?: number;
9
+ }) => Retrier;
10
+ export declare const defaultRetrier: Retrier;
@@ -0,0 +1,22 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.defaultRetrier = exports.createExponentialBackoffRetrier = void 0;
4
+ const delay_1 = require("./delay");
5
+ const createExponentialBackoffRetrier = (options) => async (func) => {
6
+ try {
7
+ await func();
8
+ }
9
+ catch (error) {
10
+ const { retry = 0, maxRetries = 3, onFailure = (error) => {
11
+ throw error;
12
+ }, initialDelayMs = 100, maxDelayMs = 3000, multiplier = 2, } = options;
13
+ const isMaxRetriesExceeded = retry > maxRetries;
14
+ if (isMaxRetriesExceeded)
15
+ return onFailure(error);
16
+ const delayMs = Math.min(maxDelayMs, initialDelayMs * multiplier ** retry);
17
+ await (0, delay_1.delay)(delayMs);
18
+ return (0, exports.createExponentialBackoffRetrier)({ ...options, retry: retry + 1 })(func);
19
+ }
20
+ };
21
+ exports.createExponentialBackoffRetrier = createExponentialBackoffRetrier;
22
+ exports.defaultRetrier = (0, exports.createExponentialBackoffRetrier)({});
@@ -0,0 +1 @@
1
+ export declare const trace: (fn?: (...args: any[]) => Record<string, unknown> | undefined) => (target: any, propertyKey: string, descriptor: PropertyDescriptor) => void;
@@ -0,0 +1,26 @@
1
+ "use strict";
2
+ Object.defineProperty(exports, "__esModule", { value: true });
3
+ exports.trace = void 0;
4
+ const debug_1 = require("./debug");
5
+ const trace = (fn) => (target, propertyKey, descriptor) => {
6
+ if (!process.env.DEBUG?.includes("kafkats"))
7
+ return;
8
+ const original = descriptor.value;
9
+ descriptor.value = function (...args) {
10
+ const startTime = Date.now();
11
+ const metadata = fn?.(...args);
12
+ const onEnd = (result) => {
13
+ console.log(`[${propertyKey}] +${Date.now() - startTime}ms ${JSON.stringify({ ...metadata, result }, debug_1.serializer)}`);
14
+ return result;
15
+ };
16
+ const result = original.apply(this, args);
17
+ if (result instanceof Promise) {
18
+ return result.then(onEnd);
19
+ }
20
+ else {
21
+ onEnd(result);
22
+ return result;
23
+ }
24
+ };
25
+ };
26
+ exports.trace = trace;
@@ -0,0 +1,104 @@
1
+ # kafka with raft:
2
+ services:
3
+ kafka-0:
4
+ container_name: kafka-0
5
+ image: apache/kafka:3.7.1
6
+ ports:
7
+ - "9092:9092"
8
+ - "29092:29092"
9
+ environment:
10
+ KAFKA_NODE_ID: 0
11
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
12
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
13
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
14
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9092,INTERBROKER://kafka-0:19092'
15
+ KAFKA_PROCESS_ROLES: 'broker,controller'
16
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
17
+ KAFKA_LISTENERS: 'EXTERNAL://:9092,INTERBROKER://:19092,CONTROLLER://:29092'
18
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
19
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
20
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
21
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
22
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
23
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
24
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
26
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
27
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
28
+ KAFKA_SSL_KEY_PASSWORD: 'password'
29
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
30
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
31
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
32
+ KAFKA_SSL_CLIENT_AUTH: 'required'
33
+ volumes:
34
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
35
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
36
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
37
+ kafka-1:
38
+ container_name: kafka-1
39
+ image: apache/kafka:3.7.1
40
+ ports:
41
+ - "9093:9093"
42
+ - "29093:29093"
43
+ environment:
44
+ KAFKA_NODE_ID: 1
45
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
46
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
47
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
48
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9093,INTERBROKER://kafka-1:19093'
49
+ KAFKA_PROCESS_ROLES: 'broker,controller'
50
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
51
+ KAFKA_LISTENERS: 'EXTERNAL://:9093,INTERBROKER://:19093,CONTROLLER://:29093'
52
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
53
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
54
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
55
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
56
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
57
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
58
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
59
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
60
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
61
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
62
+ KAFKA_SSL_KEY_PASSWORD: 'password'
63
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
64
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
65
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
66
+ KAFKA_SSL_CLIENT_AUTH: 'required'
67
+ volumes:
68
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
69
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
70
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
71
+ kafka-2:
72
+ container_name: kafka-2
73
+ image: apache/kafka:3.7.1
74
+ ports:
75
+ - "9094:9094"
76
+ - "29094:29094"
77
+ environment:
78
+ KAFKA_NODE_ID: 2
79
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'CONTROLLER:PLAINTEXT,INTERBROKER:PLAINTEXT,EXTERNAL:SASL_SSL'
80
+ KAFKA_LISTENER_NAME_EXTERNAL_PLAIN_SASL_JAAS_CONFIG: 'org.apache.kafka.common.security.plain.PlainLoginModule required user_admin="admin";'
81
+ KAFKA_SASL_ENABLED_MECHANISMS: 'PLAIN'
82
+ KAFKA_ADVERTISED_LISTENERS: 'EXTERNAL://localhost:9094,INTERBROKER://kafka-2:19094'
83
+ KAFKA_PROCESS_ROLES: 'broker,controller'
84
+ KAFKA_CONTROLLER_QUORUM_VOTERS: '0@kafka-0:29092,1@kafka-1:29093,2@kafka-2:29094'
85
+ KAFKA_LISTENERS: 'EXTERNAL://:9094,INTERBROKER://:19094,CONTROLLER://:29094'
86
+ KAFKA_INTER_BROKER_LISTENER_NAME: 'INTERBROKER'
87
+ KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
88
+ CLUSTER_ID: '4L6g3nShT-eMCtK--X86sw'
89
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
90
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
91
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
92
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
93
+ KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
94
+ KAFKA_SSL_KEYSTORE_LOCATION: '/run/secrets/kafka.server.keystore.jks'
95
+ KAFKA_SSL_KEYSTORE_PASSWORD: 'password'
96
+ KAFKA_SSL_KEY_PASSWORD: 'password'
97
+ KAFKA_SSL_TRUSTSTORE_LOCATION: '/run/secrets/kafka.server.truststore.jks'
98
+ KAFKA_SSL_TRUSTSTORE_PASSWORD: 'password'
99
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION.algorithm: ''
100
+ KAFKA_SSL_CLIENT_AUTH: 'required'
101
+ volumes:
102
+ - ./log4j.properties:/etc/kafka/docker/log4j.properties
103
+ - ./certs/kafka.truststore.jks:/run/secrets/kafka.server.truststore.jks
104
+ - ./certs/kafka.keystore.jks:/run/secrets/kafka.server.keystore.jks
@@ -0,0 +1,22 @@
1
+ {
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "lockfileVersion": 3,
5
+ "requires": true,
6
+ "packages": {
7
+ "..": {
8
+ "version": "1.0.0",
9
+ "license": "MIT",
10
+ "devDependencies": {
11
+ "@types/node": "^20.12.12",
12
+ "prettier": "^3.2.5",
13
+ "typescript": "^5.4.5",
14
+ "vitest": "^1.6.0"
15
+ }
16
+ },
17
+ "node_modules/kafkats": {
18
+ "resolved": "..",
19
+ "link": true
20
+ }
21
+ }
22
+ }
@@ -0,0 +1,30 @@
1
+ {
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "lockfileVersion": 3,
5
+ "requires": true,
6
+ "packages": {
7
+ "": {
8
+ "name": "examples",
9
+ "version": "1.0.0",
10
+ "license": "ISC",
11
+ "dependencies": {
12
+ "kafkats": "file:../"
13
+ }
14
+ },
15
+ "..": {
16
+ "version": "1.0.0",
17
+ "license": "MIT",
18
+ "devDependencies": {
19
+ "@types/node": "^20.12.12",
20
+ "prettier": "^3.2.5",
21
+ "typescript": "^5.4.5",
22
+ "vitest": "^1.6.0"
23
+ }
24
+ },
25
+ "node_modules/kafkats": {
26
+ "resolved": "..",
27
+ "link": true
28
+ }
29
+ }
30
+ }
@@ -0,0 +1,14 @@
1
+ {
2
+ "name": "examples",
3
+ "version": "1.0.0",
4
+ "description": "",
5
+ "main": "dist/replicator.js",
6
+ "scripts": {
7
+ "test": "echo \"Error: no test specified\" && exit 1"
8
+ },
9
+ "dependencies": {
10
+ "kafkats": "file:../"
11
+ },
12
+ "author": "",
13
+ "license": "ISC"
14
+ }
@@ -0,0 +1,9 @@
1
+ import { readFileSync } from "fs";
2
+ import { createKafkaClient } from "kafkats";
3
+
4
+ export const kafka = createKafkaClient({
5
+ clientId: "examples",
6
+ bootstrapServers: [{ host: "localhost", port: 9092 }],
7
+ sasl: { mechanism: "PLAIN", username: "admin", password: "admin" },
8
+ ssl: { ca: readFileSync("../certs/ca.crt").toString() },
9
+ });
@@ -0,0 +1,17 @@
1
+ import { kafka } from "./client";
2
+
3
+ (async () => {
4
+ const consumer = await kafka.startConsumer({
5
+ groupId: "example-group",
6
+ groupInstanceId: "example-group-instance",
7
+ topics: ["example-topic-f"],
8
+ allowTopicAutoCreation: true,
9
+ onMessage: (message) => {
10
+ console.log(message);
11
+ },
12
+ });
13
+
14
+ process.on("SIGINT", async () => {
15
+ await consumer.close();
16
+ });
17
+ })();
@@ -0,0 +1,37 @@
1
+ import { kafka } from "./client";
2
+ import { API } from "kafkats";
3
+
4
+ (async () => {
5
+ const cluster = kafka.createCluster();
6
+ await cluster.connect();
7
+
8
+ const { controllerId } = await cluster.sendRequest(API.METADATA, {
9
+ allowTopicAutoCreation: false,
10
+ includeTopicAuthorizedOperations: false,
11
+ topics: [],
12
+ });
13
+
14
+ await cluster.sendRequestToNode(controllerId)(API.CREATE_TOPICS, {
15
+ validateOnly: false,
16
+ timeoutMs: 10_000,
17
+ topics: [
18
+ {
19
+ name: "my-topic",
20
+ numPartitions: 10,
21
+ replicationFactor: 3,
22
+ assignments: [],
23
+ configs: [],
24
+ },
25
+ ],
26
+ });
27
+
28
+ const metadata = await cluster.sendRequestToNode(controllerId)(API.METADATA, {
29
+ allowTopicAutoCreation: false,
30
+ includeTopicAuthorizedOperations: false,
31
+ topics: [{ id: null, name: "my-topic" }],
32
+ });
33
+
34
+ console.log(metadata);
35
+
36
+ await cluster.disconnect();
37
+ })();
@@ -0,0 +1,24 @@
1
+ import { createInterface } from "readline";
2
+ import { kafka } from "./client";
3
+
4
+ const producer = kafka.createProducer({ allowTopicAutoCreation: true });
5
+
6
+ const rl = createInterface({ input: process.stdin, output: process.stdout });
7
+
8
+ process.stdout.write("> ");
9
+ rl.on("line", async (line) => {
10
+ await producer.send([
11
+ {
12
+ topic: "example-topic-f",
13
+ key: null,
14
+ value: line,
15
+ partition: 0,
16
+ },
17
+ ]);
18
+ process.stdout.write("> ");
19
+ });
20
+
21
+ process.on("SIGINT", async () => {
22
+ rl.close();
23
+ await producer.close();
24
+ });
@@ -0,0 +1,25 @@
1
+ import { kafka } from "./client";
2
+
3
+ (async () => {
4
+ const topic = "example-topic";
5
+
6
+ const producer = kafka.createProducer({ allowTopicAutoCreation: true });
7
+ const consumer = await kafka.startConsumer({
8
+ topics: [topic],
9
+ onBatch: async (messages) => {
10
+ await producer.send(
11
+ messages.map((message) => ({
12
+ ...message,
13
+ headers: { "X-Replicated": "true" },
14
+ topic: `${message.topic}-replicated`,
15
+ offset: 0n,
16
+ })),
17
+ );
18
+ console.log(`Replicated ${messages.length} messages`);
19
+ },
20
+ });
21
+ process.on("SIGINT", async () => {
22
+ await consumer.close();
23
+ await producer.close();
24
+ });
25
+ })();
@@ -0,0 +1 @@
1
+ export const serializer = (_: string, value: unknown) => (typeof value === "bigint" ? value.toString() : value);
@@ -0,0 +1,7 @@
1
+ {
2
+ "extends": "../tsconfig.json",
3
+ "compilerOptions": {
4
+ "outDir": "dist",
5
+ "inlineSourceMap": true
6
+ },
7
+ }
@@ -0,0 +1,95 @@
1
+ # Licensed to the Apache Software Foundation (ASF) under one or more
2
+ # contributor license agreements. See the NOTICE file distributed with
3
+ # this work for additional information regarding copyright ownership.
4
+ # The ASF licenses this file to You under the Apache License, Version 2.0
5
+ # (the "License"); you may not use this file except in compliance with
6
+ # the License. You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ # Unspecified loggers and loggers with additivity=true output to server.log and stdout
17
+ # Note that INFO only applies to unspecified loggers, the log level of the child logger is used otherwise
18
+ log4j.rootLogger=INFO, stdout, kafkaAppender
19
+
20
+ log4j.appender.stdout=org.apache.log4j.ConsoleAppender
21
+ log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
22
+ log4j.appender.stdout.layout.ConversionPattern=[%d] %p %m (%c)%n
23
+
24
+ log4j.appender.kafkaAppender=org.apache.log4j.DailyRollingFileAppender
25
+ log4j.appender.kafkaAppender.DatePattern='.'yyyy-MM-dd-HH
26
+ log4j.appender.kafkaAppender.File=${kafka.logs.dir}/server.log
27
+ log4j.appender.kafkaAppender.layout=org.apache.log4j.PatternLayout
28
+ log4j.appender.kafkaAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
29
+
30
+ log4j.appender.stateChangeAppender=org.apache.log4j.DailyRollingFileAppender
31
+ log4j.appender.stateChangeAppender.DatePattern='.'yyyy-MM-dd-HH
32
+ log4j.appender.stateChangeAppender.File=${kafka.logs.dir}/state-change.log
33
+ log4j.appender.stateChangeAppender.layout=org.apache.log4j.PatternLayout
34
+ log4j.appender.stateChangeAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
35
+
36
+ log4j.appender.requestAppender=org.apache.log4j.DailyRollingFileAppender
37
+ log4j.appender.requestAppender.DatePattern='.'yyyy-MM-dd-HH
38
+ log4j.appender.requestAppender.File=${kafka.logs.dir}/kafka-request.log
39
+ log4j.appender.requestAppender.layout=org.apache.log4j.PatternLayout
40
+ log4j.appender.requestAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
41
+
42
+ log4j.appender.cleanerAppender=org.apache.log4j.DailyRollingFileAppender
43
+ log4j.appender.cleanerAppender.DatePattern='.'yyyy-MM-dd-HH
44
+ log4j.appender.cleanerAppender.File=${kafka.logs.dir}/log-cleaner.log
45
+ log4j.appender.cleanerAppender.layout=org.apache.log4j.PatternLayout
46
+ log4j.appender.cleanerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
47
+
48
+ log4j.appender.controllerAppender=org.apache.log4j.DailyRollingFileAppender
49
+ log4j.appender.controllerAppender.DatePattern='.'yyyy-MM-dd-HH
50
+ log4j.appender.controllerAppender.File=${kafka.logs.dir}/controller.log
51
+ log4j.appender.controllerAppender.layout=org.apache.log4j.PatternLayout
52
+ log4j.appender.controllerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
53
+
54
+ log4j.appender.authorizerAppender=org.apache.log4j.DailyRollingFileAppender
55
+ log4j.appender.authorizerAppender.DatePattern='.'yyyy-MM-dd-HH
56
+ log4j.appender.authorizerAppender.File=${kafka.logs.dir}/kafka-authorizer.log
57
+ log4j.appender.authorizerAppender.layout=org.apache.log4j.PatternLayout
58
+ log4j.appender.authorizerAppender.layout.ConversionPattern=[%d] %p %m (%c)%n
59
+
60
+ # Change the line below to adjust ZK client logging
61
+ log4j.logger.org.apache.zookeeper=INFO
62
+
63
+ # Change the two lines below to adjust the general broker logging level (output to server.log and stdout)
64
+ log4j.logger.kafka=INFO
65
+ log4j.logger.org.apache.kafka=INFO
66
+
67
+ # Change to INFO or TRACE to enable request logging
68
+ # log4j.logger.kafka.request.logger=TRACE, requestAppender
69
+ # log4j.additivity.kafka.request.logger=false
70
+
71
+ # Uncomment the lines below and change log4j.logger.kafka.network.RequestChannel$ to TRACE for additional output
72
+ # related to the handling of requests
73
+ # log4j.logger.kafka.network.Processor=TRACE, requestAppender
74
+ log4j.logger.kafka.server.KafkaApis=TRACE, requestAppender
75
+ # log4j.additivity.kafka.server.KafkaApis=false
76
+ # log4j.logger.kafka.network.RequestChannel$=TRACE, requestAppender
77
+ # log4j.additivity.kafka.network.RequestChannel$=false
78
+
79
+ # Change the line below to adjust KRaft mode controller logging
80
+ log4j.logger.org.apache.kafka.controller=INFO, controllerAppender
81
+ log4j.additivity.org.apache.kafka.controller=false
82
+
83
+ # Change the line below to adjust ZK mode controller logging
84
+ log4j.logger.kafka.controller=INFO, controllerAppender
85
+ log4j.additivity.kafka.controller=false
86
+
87
+ log4j.logger.kafka.log.LogCleaner=INFO, cleanerAppender
88
+ log4j.additivity.kafka.log.LogCleaner=false
89
+
90
+ log4j.logger.state.change.logger=INFO, stateChangeAppender
91
+ log4j.additivity.state.change.logger=false
92
+
93
+ # Access denials are logged at INFO level, change to INFO to also log allowed accesses
94
+ log4j.logger.kafka.authorizer.logger=INFO, authorizerAppender
95
+ log4j.additivity.kafka.authorizer.logger=false
package/package.json ADDED
@@ -0,0 +1,17 @@
1
+ {
2
+ "name": "kafka-ts",
3
+ "version": "0.0.1-beta",
4
+ "main": "dist/index.js",
5
+ "author": "Priit Käärd",
6
+ "license": "MIT",
7
+ "scripts": {
8
+ "build": "tsc",
9
+ "test": "vitest --testTimeout 60000 --bail 1"
10
+ },
11
+ "devDependencies": {
12
+ "@types/node": "^20.12.12",
13
+ "prettier": "^3.2.5",
14
+ "typescript": "^5.4.5",
15
+ "vitest": "^1.6.0"
16
+ }
17
+ }
@@ -0,0 +1,24 @@
1
+ #!/bin/bash
2
+
3
+ # 1. Generating a x509 (CA) cert from a private key:
4
+ openssl genrsa -out certs/ca.key 4096
5
+ openssl req -new -x509 -key certs/ca.key -days 87660 -subj "/CN=kafka-ca" -out certs/ca.crt
6
+
7
+ # 2. Generating a private key for kafka server and csr:
8
+ openssl genrsa -out certs/kafka.key 4096
9
+ openssl req -new -nodes -key certs/kafka.key -out certs/kafka.csr -subj "/CN=kafka"
10
+ openssl x509 -req -in certs/kafka.csr -CA certs/ca.crt -CAkey certs/ca.key -CAcreateserial -out certs/kafka.crt -days 3650 -extensions SAN -extfile <(printf "[SAN]\nsubjectAltName=DNS:localhost")
11
+
12
+ # 3. Generating keystore for kafka server:
13
+ openssl pkcs12 -export -in certs/kafka.crt \
14
+ -passout pass:password \
15
+ -inkey certs/kafka.key \
16
+ -out certs/kafka.keystore.jks
17
+
18
+ # 4. Generating truststore for kafka server:
19
+ keytool -importkeystore -srckeystore certs/kafka.keystore.jks \
20
+ -srcstoretype PKCS12 \
21
+ -srcstorepass password \
22
+ -deststorepass password \
23
+ -destkeystore certs/kafka.truststore.jks \
24
+ -noprompt