@514labs/moose-lib 0.6.276-ci-6-g278c5539 → 0.6.276-ci-1-gfe86cd2c

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -2,7 +2,8 @@ export { C as ClickHouseByteSize, q as ClickHouseCodec, j as ClickHouseDecimal,
2
2
  import { K as ApiUtil, a4 as MooseClient } from './index-BtkwFbT9.mjs';
3
3
  export { A as Aggregated, h as Api, i as ApiConfig, ad as ApiHelpers, a5 as Blocks, a6 as ClickHouseEngines, C as ConsumptionApi, ae as ConsumptionHelpers, N as ConsumptionUtil, e as DeadLetter, D as DeadLetterModel, f as DeadLetterQueue, l as ETLPipeline, m as ETLPipelineConfig, E as EgressConfig, F as FrameworkApp, Q as IdentifierBrandedString, I as IngestApi, g as IngestConfig, j as IngestPipeline, L as LifeCycle, M as MaterializedView, R as NonIdentifierBrandedString, a as OlapConfig, O as OlapTable, aa as QueryClient, X as RawValue, b as S3QueueTableSettings, S as SimpleAggregated, Z as Sql, k as SqlResource, c as Stream, d as StreamConfig, T as Task, U as Value, V as View, n as WebApp, o as WebAppConfig, p as WebAppHandler, W as Workflow, ab as WorkflowClient, a2 as createClickhouseParameter, a8 as createMaterializedView, a7 as dropView, x as getApi, w as getApis, v as getIngestApi, u as getIngestApis, z as getSqlResource, y as getSqlResources, t as getStream, s as getStreams, r as getTable, q as getTables, ac as getTemporalClient, a1 as getValueFromParameter, J as getWebApp, H as getWebApps, G as getWorkflow, B as getWorkflows, af as joinQueries, a3 as mapToClickHouseType, a9 as populateTable, P as quoteIdentifier, Y as sql, $ as toQuery, a0 as toQueryPreview, _ as toStaticQuery } from './index-BtkwFbT9.mjs';
4
4
  import * as _clickhouse_client from '@clickhouse/client';
5
- import { KafkaJS } from '@confluentinc/kafka-javascript';
5
+ import { KafkaJS, LibrdKafkaError, TopicPartition, KafkaConsumer } from '@confluentinc/kafka-javascript';
6
+ export { CODES, ConsumerGlobalConfig, KafkaConsumer, LibrdKafkaError, Message as NativeKafkaMessage, TopicPartition, TopicPartitionOffset } from '@confluentinc/kafka-javascript';
6
7
  import http from 'http';
7
8
  import { IsTuple } from 'typia/lib/typings/IsTuple';
8
9
  import { Readable } from 'node:stream';
@@ -15,6 +16,7 @@ import 'jose';
15
16
  declare const Kafka: typeof KafkaJS.Kafka;
16
17
  type Kafka = KafkaJS.Kafka;
17
18
  type Producer = KafkaJS.Producer;
19
+
18
20
  declare const compilerLog: (message: string) => void;
19
21
  declare const antiCachePath: (path: string) => string;
20
22
  declare const getFileName: (filePath: string) => string;
@@ -95,6 +97,29 @@ declare const logError: (logger: Logger, e: Error) => void;
95
97
  * Use this to construct producers/consumers with custom options.
96
98
  */
97
99
  declare const getKafkaClient: (cfg: KafkaClientConfig, logger: Logger) => Promise<Kafka>;
100
+ /**
101
+ * Configuration for native KafkaConsumer
102
+ */
103
+ interface NativeConsumerConfig extends KafkaClientConfig {
104
+ groupId: string;
105
+ sessionTimeoutMs?: number;
106
+ heartbeatIntervalMs?: number;
107
+ maxPollIntervalMs?: number;
108
+ autoCommit?: boolean;
109
+ autoCommitIntervalMs?: number;
110
+ autoOffsetReset?: "smallest" | "earliest" | "largest" | "latest" | "error";
111
+ maxBatchSize?: number;
112
+ }
113
+ /**
114
+ * Creates a native KafkaConsumer instance (using librdkafka directly).
115
+ * This provides lower-level control and potentially better performance than the KafkaJS wrapper.
116
+ *
117
+ * @param cfg - Consumer configuration
118
+ * @param logger - Logger instance
119
+ * @param rebalanceCb - Optional callback for rebalance events
120
+ * @returns Configured but not yet connected KafkaConsumer
121
+ */
122
+ declare const createNativeKafkaConsumer: (cfg: NativeConsumerConfig, logger: Logger, rebalanceCb?: (err: LibrdKafkaError, assignments: TopicPartition[]) => void) => KafkaConsumer;
98
123
 
99
124
  /**
100
125
  * @module secrets
@@ -554,4 +579,4 @@ type DataModelConfig<T> = Partial<{
554
579
  parallelism?: number;
555
580
  }>;
556
581
 
557
- export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
582
+ export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type NativeConsumerConfig, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createNativeKafkaConsumer, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
package/dist/index.d.ts CHANGED
@@ -2,7 +2,8 @@ export { C as ClickHouseByteSize, q as ClickHouseCodec, j as ClickHouseDecimal,
2
2
  import { K as ApiUtil, a4 as MooseClient } from './index-BtkwFbT9.js';
3
3
  export { A as Aggregated, h as Api, i as ApiConfig, ad as ApiHelpers, a5 as Blocks, a6 as ClickHouseEngines, C as ConsumptionApi, ae as ConsumptionHelpers, N as ConsumptionUtil, e as DeadLetter, D as DeadLetterModel, f as DeadLetterQueue, l as ETLPipeline, m as ETLPipelineConfig, E as EgressConfig, F as FrameworkApp, Q as IdentifierBrandedString, I as IngestApi, g as IngestConfig, j as IngestPipeline, L as LifeCycle, M as MaterializedView, R as NonIdentifierBrandedString, a as OlapConfig, O as OlapTable, aa as QueryClient, X as RawValue, b as S3QueueTableSettings, S as SimpleAggregated, Z as Sql, k as SqlResource, c as Stream, d as StreamConfig, T as Task, U as Value, V as View, n as WebApp, o as WebAppConfig, p as WebAppHandler, W as Workflow, ab as WorkflowClient, a2 as createClickhouseParameter, a8 as createMaterializedView, a7 as dropView, x as getApi, w as getApis, v as getIngestApi, u as getIngestApis, z as getSqlResource, y as getSqlResources, t as getStream, s as getStreams, r as getTable, q as getTables, ac as getTemporalClient, a1 as getValueFromParameter, J as getWebApp, H as getWebApps, G as getWorkflow, B as getWorkflows, af as joinQueries, a3 as mapToClickHouseType, a9 as populateTable, P as quoteIdentifier, Y as sql, $ as toQuery, a0 as toQueryPreview, _ as toStaticQuery } from './index-BtkwFbT9.js';
4
4
  import * as _clickhouse_client from '@clickhouse/client';
5
- import { KafkaJS } from '@confluentinc/kafka-javascript';
5
+ import { KafkaJS, LibrdKafkaError, TopicPartition, KafkaConsumer } from '@confluentinc/kafka-javascript';
6
+ export { CODES, ConsumerGlobalConfig, KafkaConsumer, LibrdKafkaError, Message as NativeKafkaMessage, TopicPartition, TopicPartitionOffset } from '@confluentinc/kafka-javascript';
6
7
  import http from 'http';
7
8
  import { IsTuple } from 'typia/lib/typings/IsTuple';
8
9
  import { Readable } from 'node:stream';
@@ -15,6 +16,7 @@ import 'jose';
15
16
  declare const Kafka: typeof KafkaJS.Kafka;
16
17
  type Kafka = KafkaJS.Kafka;
17
18
  type Producer = KafkaJS.Producer;
19
+
18
20
  declare const compilerLog: (message: string) => void;
19
21
  declare const antiCachePath: (path: string) => string;
20
22
  declare const getFileName: (filePath: string) => string;
@@ -95,6 +97,29 @@ declare const logError: (logger: Logger, e: Error) => void;
95
97
  * Use this to construct producers/consumers with custom options.
96
98
  */
97
99
  declare const getKafkaClient: (cfg: KafkaClientConfig, logger: Logger) => Promise<Kafka>;
100
+ /**
101
+ * Configuration for native KafkaConsumer
102
+ */
103
+ interface NativeConsumerConfig extends KafkaClientConfig {
104
+ groupId: string;
105
+ sessionTimeoutMs?: number;
106
+ heartbeatIntervalMs?: number;
107
+ maxPollIntervalMs?: number;
108
+ autoCommit?: boolean;
109
+ autoCommitIntervalMs?: number;
110
+ autoOffsetReset?: "smallest" | "earliest" | "largest" | "latest" | "error";
111
+ maxBatchSize?: number;
112
+ }
113
+ /**
114
+ * Creates a native KafkaConsumer instance (using librdkafka directly).
115
+ * This provides lower-level control and potentially better performance than the KafkaJS wrapper.
116
+ *
117
+ * @param cfg - Consumer configuration
118
+ * @param logger - Logger instance
119
+ * @param rebalanceCb - Optional callback for rebalance events
120
+ * @returns Configured but not yet connected KafkaConsumer
121
+ */
122
+ declare const createNativeKafkaConsumer: (cfg: NativeConsumerConfig, logger: Logger, rebalanceCb?: (err: LibrdKafkaError, assignments: TopicPartition[]) => void) => KafkaConsumer;
98
123
 
99
124
  /**
100
125
  * @module secrets
@@ -554,4 +579,4 @@ type DataModelConfig<T> = Partial<{
554
579
  parallelism?: number;
555
580
  }>;
556
581
 
557
- export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
582
+ export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type NativeConsumerConfig, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createNativeKafkaConsumer, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
package/dist/index.js CHANGED
@@ -34,6 +34,8 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
34
34
  var commons_exports = {};
35
35
  __export(commons_exports, {
36
36
  ACKs: () => ACKs,
37
+ CODES: () => import_kafka_javascript.CODES,
38
+ KafkaConsumer: () => import_kafka_javascript.KafkaConsumer,
37
39
  MAX_RETRIES: () => MAX_RETRIES,
38
40
  MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
39
41
  MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
@@ -42,6 +44,7 @@ __export(commons_exports, {
42
44
  antiCachePath: () => antiCachePath,
43
45
  cliLog: () => cliLog,
44
46
  compilerLog: () => compilerLog,
47
+ createNativeKafkaConsumer: () => createNativeKafkaConsumer,
45
48
  createProducerConfig: () => createProducerConfig,
46
49
  getClickhouseClient: () => getClickhouseClient,
47
50
  getFileName: () => getFileName,
@@ -87,7 +90,7 @@ async function getKafkaProducer(cfg, logger, maxMessageBytes) {
87
90
  await producer.connect();
88
91
  return producer;
89
92
  }
90
- var import_http, import_client, import_kafka_javascript, Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient;
93
+ var import_http, import_client, import_kafka_javascript, Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient, buildNativeSaslConfig, createNativeKafkaConsumer;
91
94
  var init_commons = __esm({
92
95
  "src/commons.ts"() {
93
96
  "use strict";
@@ -194,6 +197,59 @@ var init_commons = __esm({
194
197
  }
195
198
  });
196
199
  };
200
+ buildNativeSaslConfig = (logger, cfg) => {
201
+ if (!cfg.saslMechanism || !cfg.saslUsername || !cfg.saslPassword) {
202
+ return {};
203
+ }
204
+ const mechanism = cfg.saslMechanism.toUpperCase();
205
+ const validMechanisms = ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"];
206
+ if (!validMechanisms.includes(mechanism)) {
207
+ logger.warn(`Unsupported SASL mechanism: ${cfg.saslMechanism}`);
208
+ return {};
209
+ }
210
+ return {
211
+ "sasl.mechanisms": mechanism,
212
+ "sasl.username": cfg.saslUsername,
213
+ "sasl.password": cfg.saslPassword
214
+ };
215
+ };
216
+ createNativeKafkaConsumer = (cfg, logger, rebalanceCb) => {
217
+ const brokers = parseBrokerString(cfg.broker || "");
218
+ if (brokers.length === 0) {
219
+ throw new Error(`No valid broker addresses found in: "${cfg.broker}"`);
220
+ }
221
+ logger.log(
222
+ `Creating native KafkaConsumer with brokers: ${brokers.join(", ")}`
223
+ );
224
+ logger.log(`Security protocol: ${cfg.securityProtocol || "plaintext"}`);
225
+ logger.log(`Client ID: ${cfg.clientId}`);
226
+ logger.log(`Group ID: ${cfg.groupId}`);
227
+ const saslConfig = buildNativeSaslConfig(logger, cfg);
228
+ const consumerConfig = {
229
+ // Connection
230
+ "bootstrap.servers": brokers.join(","),
231
+ "client.id": cfg.clientId,
232
+ // Group management
233
+ "group.id": cfg.groupId,
234
+ "session.timeout.ms": cfg.sessionTimeoutMs ?? 3e4,
235
+ "heartbeat.interval.ms": cfg.heartbeatIntervalMs ?? 3e3,
236
+ "max.poll.interval.ms": cfg.maxPollIntervalMs ?? 3e5,
237
+ // Offset management
238
+ "enable.auto.commit": cfg.autoCommit ?? true,
239
+ "auto.commit.interval.ms": cfg.autoCommitIntervalMs ?? 5e3,
240
+ // Security
241
+ ...cfg.securityProtocol === "SASL_SSL" && {
242
+ "security.protocol": "sasl_ssl"
243
+ },
244
+ ...saslConfig,
245
+ // Rebalance callback
246
+ ...rebalanceCb && { rebalance_cb: rebalanceCb }
247
+ };
248
+ const topicConfig = {
249
+ "auto.offset.reset": cfg.autoOffsetReset ?? "earliest"
250
+ };
251
+ return new import_kafka_javascript.KafkaConsumer(consumerConfig, topicConfig);
252
+ };
197
253
  }
198
254
  });
199
255
 
@@ -387,6 +443,7 @@ __export(index_exports, {
387
443
  ACKs: () => ACKs,
388
444
  Api: () => Api,
389
445
  ApiHelpers: () => ApiHelpers,
446
+ CODES: () => import_kafka_javascript.CODES,
390
447
  CSV_DELIMITERS: () => CSV_DELIMITERS,
391
448
  ClickHouseEngines: () => ClickHouseEngines,
392
449
  ConsumptionApi: () => ConsumptionApi,
@@ -398,6 +455,7 @@ __export(index_exports, {
398
455
  ETLPipeline: () => ETLPipeline,
399
456
  IngestApi: () => IngestApi,
400
457
  IngestPipeline: () => IngestPipeline,
458
+ KafkaConsumer: () => import_kafka_javascript.KafkaConsumer,
401
459
  LifeCycle: () => LifeCycle,
402
460
  MAX_RETRIES: () => MAX_RETRIES,
403
461
  MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
@@ -425,6 +483,7 @@ __export(index_exports, {
425
483
  createClickhouseParameter: () => createClickhouseParameter,
426
484
  createConsumptionApi: () => createConsumptionApi,
427
485
  createMaterializedView: () => createMaterializedView,
486
+ createNativeKafkaConsumer: () => createNativeKafkaConsumer,
428
487
  createProducerConfig: () => createProducerConfig,
429
488
  dropView: () => dropView,
430
489
  expressMiddleware: () => expressMiddleware,
@@ -3436,6 +3495,7 @@ var DataSource = class {
3436
3495
  ACKs,
3437
3496
  Api,
3438
3497
  ApiHelpers,
3498
+ CODES,
3439
3499
  CSV_DELIMITERS,
3440
3500
  ClickHouseEngines,
3441
3501
  ConsumptionApi,
@@ -3447,6 +3507,7 @@ var DataSource = class {
3447
3507
  ETLPipeline,
3448
3508
  IngestApi,
3449
3509
  IngestPipeline,
3510
+ KafkaConsumer,
3450
3511
  LifeCycle,
3451
3512
  MAX_RETRIES,
3452
3513
  MAX_RETRIES_PRODUCER,
@@ -3474,6 +3535,7 @@ var DataSource = class {
3474
3535
  createClickhouseParameter,
3475
3536
  createConsumptionApi,
3476
3537
  createMaterializedView,
3538
+ createNativeKafkaConsumer,
3477
3539
  createProducerConfig,
3478
3540
  dropView,
3479
3541
  expressMiddleware,