@514labs/moose-lib 0.6.256-ci-1-g6ca86038 → 0.6.256-ci-4-g0ca62054

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -44,6 +44,25 @@ declare const RETRY_INITIAL_TIME_MS = 100;
44
44
  declare const MAX_RETRIES_PRODUCER = 150;
45
45
  declare const RETRY_FACTOR_PRODUCER = 0.2;
46
46
  declare const ACKs = -1;
47
+ /**
48
+ * Creates the base producer configuration for Kafka.
49
+ * Used by both the SDK stream publishing and streaming function workers.
50
+ *
51
+ * @param maxMessageBytes - Optional max message size in bytes (synced with topic config)
52
+ * @returns Producer configuration object for the Confluent Kafka client
53
+ */
54
+ declare function createProducerConfig(maxMessageBytes?: number): {
55
+ "message.max.bytes"?: number | undefined;
56
+ kafkaJS: {
57
+ idempotent: boolean;
58
+ acks: number;
59
+ retry: {
60
+ retries: number;
61
+ maxRetryTime: number;
62
+ };
63
+ };
64
+ "linger.ms": number;
65
+ };
47
66
  type KafkaClientConfig = {
48
67
  clientId: string;
49
68
  broker: string;
@@ -55,8 +74,12 @@ type KafkaClientConfig = {
55
74
  /**
56
75
  * Dynamically creates and connects a KafkaJS producer using the provided configuration.
57
76
  * Returns a connected producer instance.
77
+ *
78
+ * @param cfg - Kafka client configuration
79
+ * @param logger - Logger instance
80
+ * @param maxMessageBytes - Optional max message size in bytes (synced with topic config)
58
81
  */
59
- declare function getKafkaProducer(cfg: KafkaClientConfig, logger: Logger): Promise<Producer>;
82
+ declare function getKafkaProducer(cfg: KafkaClientConfig, logger: Logger, maxMessageBytes?: number): Promise<Producer>;
60
83
  /**
61
84
  * Interface for logging functionality
62
85
  */
@@ -531,4 +554,4 @@ type DataModelConfig<T> = Partial<{
531
554
  parallelism?: number;
532
555
  }>;
533
556
 
534
- export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
557
+ export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
package/dist/index.d.ts CHANGED
@@ -44,6 +44,25 @@ declare const RETRY_INITIAL_TIME_MS = 100;
44
44
  declare const MAX_RETRIES_PRODUCER = 150;
45
45
  declare const RETRY_FACTOR_PRODUCER = 0.2;
46
46
  declare const ACKs = -1;
47
+ /**
48
+ * Creates the base producer configuration for Kafka.
49
+ * Used by both the SDK stream publishing and streaming function workers.
50
+ *
51
+ * @param maxMessageBytes - Optional max message size in bytes (synced with topic config)
52
+ * @returns Producer configuration object for the Confluent Kafka client
53
+ */
54
+ declare function createProducerConfig(maxMessageBytes?: number): {
55
+ "message.max.bytes"?: number | undefined;
56
+ kafkaJS: {
57
+ idempotent: boolean;
58
+ acks: number;
59
+ retry: {
60
+ retries: number;
61
+ maxRetryTime: number;
62
+ };
63
+ };
64
+ "linger.ms": number;
65
+ };
47
66
  type KafkaClientConfig = {
48
67
  clientId: string;
49
68
  broker: string;
@@ -55,8 +74,12 @@ type KafkaClientConfig = {
55
74
  /**
56
75
  * Dynamically creates and connects a KafkaJS producer using the provided configuration.
57
76
  * Returns a connected producer instance.
77
+ *
78
+ * @param cfg - Kafka client configuration
79
+ * @param logger - Logger instance
80
+ * @param maxMessageBytes - Optional max message size in bytes (synced with topic config)
58
81
  */
59
- declare function getKafkaProducer(cfg: KafkaClientConfig, logger: Logger): Promise<Producer>;
82
+ declare function getKafkaProducer(cfg: KafkaClientConfig, logger: Logger, maxMessageBytes?: number): Promise<Producer>;
60
83
  /**
61
84
  * Interface for logging functionality
62
85
  */
@@ -531,4 +554,4 @@ type DataModelConfig<T> = Partial<{
531
554
  parallelism?: number;
532
555
  }>;
533
556
 
534
- export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
557
+ export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
package/dist/index.js CHANGED
@@ -42,6 +42,7 @@ __export(commons_exports, {
42
42
  antiCachePath: () => antiCachePath,
43
43
  cliLog: () => cliLog,
44
44
  compilerLog: () => compilerLog,
45
+ createProducerConfig: () => createProducerConfig,
45
46
  getClickhouseClient: () => getClickhouseClient,
46
47
  getFileName: () => getFileName,
47
48
  getKafkaClient: () => getKafkaClient,
@@ -64,18 +65,25 @@ function isTruthy(value) {
64
65
  function mapTstoJs(filePath) {
65
66
  return filePath.replace(/\.ts$/, ".js").replace(/\.cts$/, ".cjs").replace(/\.mts$/, ".mjs");
66
67
  }
67
- async function getKafkaProducer(cfg, logger) {
68
- const kafka = await getKafkaClient(cfg, logger);
69
- const producer = kafka.producer({
68
+ function createProducerConfig(maxMessageBytes) {
69
+ return {
70
70
  kafkaJS: {
71
- idempotent: true,
71
+ idempotent: false,
72
+ // Not needed for at-least-once delivery
72
73
  acks: ACKs,
73
74
  retry: {
74
75
  retries: MAX_RETRIES_PRODUCER,
75
76
  maxRetryTime: MAX_RETRY_TIME_MS
76
77
  }
77
- }
78
- });
78
+ },
79
+ "linger.ms": 0,
80
+ // This is to make sure at least once delivery with immediate feedback on the send
81
+ ...maxMessageBytes && { "message.max.bytes": maxMessageBytes }
82
+ };
83
+ }
84
+ async function getKafkaProducer(cfg, logger, maxMessageBytes) {
85
+ const kafka = await getKafkaClient(cfg, logger);
86
+ const producer = kafka.producer(createProducerConfig(maxMessageBytes));
79
87
  await producer.connect();
80
88
  return producer;
81
89
  }
@@ -417,6 +425,7 @@ __export(index_exports, {
417
425
  createClickhouseParameter: () => createClickhouseParameter,
418
426
  createConsumptionApi: () => createConsumptionApi,
419
427
  createMaterializedView: () => createMaterializedView,
428
+ createProducerConfig: () => createProducerConfig,
420
429
  dropView: () => dropView,
421
430
  expressMiddleware: () => expressMiddleware,
422
431
  getApi: () => getApi,
@@ -3405,6 +3414,7 @@ var DataSource = class {
3405
3414
  createClickhouseParameter,
3406
3415
  createConsumptionApi,
3407
3416
  createMaterializedView,
3417
+ createProducerConfig,
3408
3418
  dropView,
3409
3419
  expressMiddleware,
3410
3420
  getApi,