@514labs/moose-lib 0.6.260-ci-3-g63948580 → 0.6.260-ci-2-g1b93253f

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.d.mts CHANGED
@@ -15,7 +15,23 @@ import 'jose';
15
15
  declare const Kafka: typeof KafkaJS.Kafka;
16
16
  type Kafka = KafkaJS.Kafka;
17
17
  type Producer = KafkaJS.Producer;
18
- declare const compilerLog: (message: string) => void;
18
+ /**
19
+ * Log levels matching Rust CLI logger levels
20
+ */
21
+ declare enum LogLevel {
22
+ Debug = "Debug",
23
+ Info = "Info",
24
+ Warn = "Warn",
25
+ Error = "Error"
26
+ }
27
+ /**
28
+ * Compiler logging with level support.
29
+ * Respects both MOOSE_DISABLE_COMPILER_LOGS (legacy) and MOOSE_LOGGER__LEVEL.
30
+ *
31
+ * @param message - Log message to output
32
+ * @param level - Log level (defaults to Debug for backward compatibility)
33
+ */
34
+ declare const compilerLog: (message: string, level?: LogLevel) => void;
19
35
  declare const antiCachePath: (path: string) => string;
20
36
  declare const getFileName: (filePath: string) => string;
21
37
  interface ClientConfig {
@@ -554,4 +570,4 @@ type DataModelConfig<T> = Partial<{
554
570
  parallelism?: number;
555
571
  }>;
556
572
 
557
- export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
573
+ export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, LogLevel, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
package/dist/index.d.ts CHANGED
@@ -15,7 +15,23 @@ import 'jose';
15
15
  declare const Kafka: typeof KafkaJS.Kafka;
16
16
  type Kafka = KafkaJS.Kafka;
17
17
  type Producer = KafkaJS.Producer;
18
- declare const compilerLog: (message: string) => void;
18
+ /**
19
+ * Log levels matching Rust CLI logger levels
20
+ */
21
+ declare enum LogLevel {
22
+ Debug = "Debug",
23
+ Info = "Info",
24
+ Warn = "Warn",
25
+ Error = "Error"
26
+ }
27
+ /**
28
+ * Compiler logging with level support.
29
+ * Respects both MOOSE_DISABLE_COMPILER_LOGS (legacy) and MOOSE_LOGGER__LEVEL.
30
+ *
31
+ * @param message - Log message to output
32
+ * @param level - Log level (defaults to Debug for backward compatibility)
33
+ */
34
+ declare const compilerLog: (message: string, level?: LogLevel) => void;
19
35
  declare const antiCachePath: (path: string) => string;
20
36
  declare const getFileName: (filePath: string) => string;
21
37
  interface ClientConfig {
@@ -554,4 +570,4 @@ type DataModelConfig<T> = Partial<{
554
570
  parallelism?: number;
555
571
  }>;
556
572
 
557
- export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
573
+ export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, LogLevel, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
package/dist/index.js CHANGED
@@ -34,6 +34,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
34
34
  var commons_exports = {};
35
35
  __export(commons_exports, {
36
36
  ACKs: () => ACKs,
37
+ LogLevel: () => LogLevel,
37
38
  MAX_RETRIES: () => MAX_RETRIES,
38
39
  MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
39
40
  MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
@@ -62,6 +63,36 @@ function isTruthy(value) {
62
63
  return false;
63
64
  }
64
65
  }
66
+ function parseLogLevel(value) {
67
+ if (!value) return "Info" /* Info */;
68
+ const normalized = value.trim();
69
+ switch (normalized) {
70
+ case "Debug":
71
+ case "debug":
72
+ case "DEBUG":
73
+ return "Debug" /* Debug */;
74
+ case "Info":
75
+ case "info":
76
+ case "INFO":
77
+ return "Info" /* Info */;
78
+ case "Warn":
79
+ case "warn":
80
+ case "WARN":
81
+ return "Warn" /* Warn */;
82
+ case "Error":
83
+ case "error":
84
+ case "ERROR":
85
+ return "Error" /* Error */;
86
+ default:
87
+ return "Info" /* Info */;
88
+ }
89
+ }
90
+ function getLogLevel() {
91
+ if (cachedLogLevel === null) {
92
+ cachedLogLevel = parseLogLevel(process.env.MOOSE_LOGGER__LEVEL);
93
+ }
94
+ return cachedLogLevel;
95
+ }
65
96
  function mapTstoJs(filePath) {
66
97
  return filePath.replace(/\.ts$/, ".js").replace(/\.cts$/, ".cjs").replace(/\.mts$/, ".mjs");
67
98
  }
@@ -87,7 +118,7 @@ async function getKafkaProducer(cfg, logger, maxMessageBytes) {
87
118
  await producer.connect();
88
119
  return producer;
89
120
  }
90
- var import_http, import_client, import_kafka_javascript, Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient;
121
+ var import_http, import_client, import_kafka_javascript, Kafka, LogLevel, cachedLogLevel, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient;
91
122
  var init_commons = __esm({
92
123
  "src/commons.ts"() {
93
124
  "use strict";
@@ -95,8 +126,26 @@ var init_commons = __esm({
95
126
  import_client = require("@clickhouse/client");
96
127
  import_kafka_javascript = require("@confluentinc/kafka-javascript");
97
128
  ({ Kafka } = import_kafka_javascript.KafkaJS);
98
- compilerLog = (message) => {
99
- if (!isTruthy(process.env.MOOSE_DISABLE_COMPILER_LOGS)) {
129
+ LogLevel = /* @__PURE__ */ ((LogLevel2) => {
130
+ LogLevel2["Debug"] = "Debug";
131
+ LogLevel2["Info"] = "Info";
132
+ LogLevel2["Warn"] = "Warn";
133
+ LogLevel2["Error"] = "Error";
134
+ return LogLevel2;
135
+ })(LogLevel || {});
136
+ cachedLogLevel = null;
137
+ compilerLog = (message, level = "Debug" /* Debug */) => {
138
+ if (isTruthy(process.env.MOOSE_DISABLE_COMPILER_LOGS)) {
139
+ return;
140
+ }
141
+ const currentLevel = getLogLevel();
142
+ const levelPriority = {
143
+ ["Debug" /* Debug */]: 0,
144
+ ["Info" /* Info */]: 1,
145
+ ["Warn" /* Warn */]: 2,
146
+ ["Error" /* Error */]: 3
147
+ };
148
+ if (levelPriority[level] >= levelPriority[currentLevel]) {
100
149
  console.log(message);
101
150
  }
102
151
  };
@@ -124,7 +173,17 @@ var init_commons = __esm({
124
173
  username,
125
174
  password,
126
175
  database,
127
- application: "moose"
176
+ application: "moose",
177
+ // Connection pool configuration for high load (100+ concurrent users)
178
+ max_open_connections: 50,
179
+ // Increased from default 10 to handle 100 concurrent users
180
+ request_timeout: 6e4,
181
+ // 60s timeout for HTTP requests (queries and inserts)
182
+ keep_alive: {
183
+ enabled: true,
184
+ idle_socket_ttl: 2e3
185
+ // 2s idle time (lower than default to prevent socket hang-ups)
186
+ }
128
187
  // Note: wait_end_of_query is configured per operation type, not globally
129
188
  // to preserve SELECT query performance while ensuring INSERT/DDL reliability
130
189
  });
@@ -399,6 +458,7 @@ __export(index_exports, {
399
458
  IngestApi: () => IngestApi,
400
459
  IngestPipeline: () => IngestPipeline,
401
460
  LifeCycle: () => LifeCycle,
461
+ LogLevel: () => LogLevel,
402
462
  MAX_RETRIES: () => MAX_RETRIES,
403
463
  MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
404
464
  MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
@@ -2931,7 +2991,21 @@ async function getTemporalClient(temporalUrl, namespace, clientCert, clientKey,
2931
2991
  );
2932
2992
  let connectionOptions = {
2933
2993
  address: temporalUrl,
2934
- connectTimeout: "3s"
2994
+ connectTimeout: "30s",
2995
+ // Increased from 3s to handle high load
2996
+ // Add gRPC keepalive to prevent connection drops
2997
+ channelArgs: {
2998
+ "grpc.keepalive_time_ms": 3e4,
2999
+ // Send keepalive every 30s
3000
+ "grpc.keepalive_timeout_ms": 15e3,
3001
+ // Wait 15s for keepalive response
3002
+ "grpc.keepalive_permit_without_calls": 1,
3003
+ // Allow keepalive without active calls
3004
+ "grpc.http2.max_pings_without_data": 0,
3005
+ // No limit on pings without data
3006
+ "grpc.http2.min_time_between_pings_ms": 1e4
3007
+ // Min 10s between pings
3008
+ }
2935
3009
  };
2936
3010
  if (clientCert && clientKey) {
2937
3011
  console.log("Using TLS for secure Temporal");
@@ -3388,6 +3462,7 @@ var DataSource = class {
3388
3462
  IngestApi,
3389
3463
  IngestPipeline,
3390
3464
  LifeCycle,
3465
+ LogLevel,
3391
3466
  MAX_RETRIES,
3392
3467
  MAX_RETRIES_PRODUCER,
3393
3468
  MAX_RETRY_TIME_MS,