@514labs/moose-lib 0.6.276-ci-6-g278c5539 → 0.6.276-ci-1-gfe86cd2c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.mjs CHANGED
@@ -18,6 +18,8 @@ var __export = (target, all) => {
18
18
  var commons_exports = {};
19
19
  __export(commons_exports, {
20
20
  ACKs: () => ACKs,
21
+ CODES: () => CODES,
22
+ KafkaConsumer: () => KafkaConsumer,
21
23
  MAX_RETRIES: () => MAX_RETRIES,
22
24
  MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
23
25
  MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
@@ -26,6 +28,7 @@ __export(commons_exports, {
26
28
  antiCachePath: () => antiCachePath,
27
29
  cliLog: () => cliLog,
28
30
  compilerLog: () => compilerLog,
31
+ createNativeKafkaConsumer: () => createNativeKafkaConsumer,
29
32
  createProducerConfig: () => createProducerConfig,
30
33
  getClickhouseClient: () => getClickhouseClient,
31
34
  getFileName: () => getFileName,
@@ -36,7 +39,11 @@ __export(commons_exports, {
36
39
  });
37
40
  import http from "http";
38
41
  import { createClient } from "@clickhouse/client";
39
- import { KafkaJS } from "@confluentinc/kafka-javascript";
42
+ import {
43
+ KafkaConsumer,
44
+ KafkaJS,
45
+ CODES
46
+ } from "@confluentinc/kafka-javascript";
40
47
  function isTruthy(value) {
41
48
  if (!value) return false;
42
49
  switch (value.trim().toLowerCase()) {
@@ -74,7 +81,7 @@ async function getKafkaProducer(cfg, logger, maxMessageBytes) {
74
81
  await producer.connect();
75
82
  return producer;
76
83
  }
77
- var Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient;
84
+ var Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient, buildNativeSaslConfig, createNativeKafkaConsumer;
78
85
  var init_commons = __esm({
79
86
  "src/commons.ts"() {
80
87
  "use strict";
@@ -178,6 +185,59 @@ var init_commons = __esm({
178
185
  }
179
186
  });
180
187
  };
188
+ buildNativeSaslConfig = (logger, cfg) => {
189
+ if (!cfg.saslMechanism || !cfg.saslUsername || !cfg.saslPassword) {
190
+ return {};
191
+ }
192
+ const mechanism = cfg.saslMechanism.toUpperCase();
193
+ const validMechanisms = ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"];
194
+ if (!validMechanisms.includes(mechanism)) {
195
+ logger.warn(`Unsupported SASL mechanism: ${cfg.saslMechanism}`);
196
+ return {};
197
+ }
198
+ return {
199
+ "sasl.mechanisms": mechanism,
200
+ "sasl.username": cfg.saslUsername,
201
+ "sasl.password": cfg.saslPassword
202
+ };
203
+ };
204
+ createNativeKafkaConsumer = (cfg, logger, rebalanceCb) => {
205
+ const brokers = parseBrokerString(cfg.broker || "");
206
+ if (brokers.length === 0) {
207
+ throw new Error(`No valid broker addresses found in: "${cfg.broker}"`);
208
+ }
209
+ logger.log(
210
+ `Creating native KafkaConsumer with brokers: ${brokers.join(", ")}`
211
+ );
212
+ logger.log(`Security protocol: ${cfg.securityProtocol || "plaintext"}`);
213
+ logger.log(`Client ID: ${cfg.clientId}`);
214
+ logger.log(`Group ID: ${cfg.groupId}`);
215
+ const saslConfig = buildNativeSaslConfig(logger, cfg);
216
+ const consumerConfig = {
217
+ // Connection
218
+ "bootstrap.servers": brokers.join(","),
219
+ "client.id": cfg.clientId,
220
+ // Group management
221
+ "group.id": cfg.groupId,
222
+ "session.timeout.ms": cfg.sessionTimeoutMs ?? 3e4,
223
+ "heartbeat.interval.ms": cfg.heartbeatIntervalMs ?? 3e3,
224
+ "max.poll.interval.ms": cfg.maxPollIntervalMs ?? 3e5,
225
+ // Offset management
226
+ "enable.auto.commit": cfg.autoCommit ?? true,
227
+ "auto.commit.interval.ms": cfg.autoCommitIntervalMs ?? 5e3,
228
+ // Security
229
+ ...cfg.securityProtocol === "SASL_SSL" && {
230
+ "security.protocol": "sasl_ssl"
231
+ },
232
+ ...saslConfig,
233
+ // Rebalance callback
234
+ ...rebalanceCb && { rebalance_cb: rebalanceCb }
235
+ };
236
+ const topicConfig = {
237
+ "auto.offset.reset": cfg.autoOffsetReset ?? "earliest"
238
+ };
239
+ return new KafkaConsumer(consumerConfig, topicConfig);
240
+ };
181
241
  }
182
242
  });
183
243
 
@@ -3334,6 +3394,7 @@ export {
3334
3394
  ACKs,
3335
3395
  Api,
3336
3396
  ApiHelpers,
3397
+ CODES,
3337
3398
  CSV_DELIMITERS,
3338
3399
  ClickHouseEngines,
3339
3400
  ConsumptionApi,
@@ -3345,6 +3406,7 @@ export {
3345
3406
  ETLPipeline,
3346
3407
  IngestApi,
3347
3408
  IngestPipeline,
3409
+ KafkaConsumer,
3348
3410
  LifeCycle,
3349
3411
  MAX_RETRIES,
3350
3412
  MAX_RETRIES_PRODUCER,
@@ -3372,6 +3434,7 @@ export {
3372
3434
  createClickhouseParameter,
3373
3435
  createConsumptionApi,
3374
3436
  createMaterializedView,
3437
+ createNativeKafkaConsumer,
3375
3438
  createProducerConfig,
3376
3439
  dropView,
3377
3440
  expressMiddleware,