@514labs/moose-lib 0.6.260-ci-5-g3b5261dd → 0.6.260-ci-2-g1b93253f
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browserCompatible.js +52 -3
- package/dist/browserCompatible.js.map +1 -1
- package/dist/browserCompatible.mjs +52 -3
- package/dist/browserCompatible.mjs.map +1 -1
- package/dist/compilerPlugin.js +73 -14
- package/dist/compilerPlugin.js.map +1 -1
- package/dist/compilerPlugin.mjs +73 -14
- package/dist/compilerPlugin.mjs.map +1 -1
- package/dist/dmv2/index.js +52 -3
- package/dist/dmv2/index.js.map +1 -1
- package/dist/dmv2/index.mjs +52 -3
- package/dist/dmv2/index.mjs.map +1 -1
- package/dist/index.d.mts +18 -2
- package/dist/index.d.ts +18 -2
- package/dist/index.js +54 -3
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +53 -3
- package/dist/index.mjs.map +1 -1
- package/dist/moose-runner.js +44 -3
- package/dist/moose-runner.js.map +1 -1
- package/dist/moose-runner.mjs +44 -3
- package/dist/moose-runner.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.d.mts
CHANGED
|
@@ -15,7 +15,23 @@ import 'jose';
|
|
|
15
15
|
declare const Kafka: typeof KafkaJS.Kafka;
|
|
16
16
|
type Kafka = KafkaJS.Kafka;
|
|
17
17
|
type Producer = KafkaJS.Producer;
|
|
18
|
-
|
|
18
|
+
/**
|
|
19
|
+
* Log levels matching Rust CLI logger levels
|
|
20
|
+
*/
|
|
21
|
+
declare enum LogLevel {
|
|
22
|
+
Debug = "Debug",
|
|
23
|
+
Info = "Info",
|
|
24
|
+
Warn = "Warn",
|
|
25
|
+
Error = "Error"
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Compiler logging with level support.
|
|
29
|
+
* Respects both MOOSE_DISABLE_COMPILER_LOGS (legacy) and MOOSE_LOGGER__LEVEL.
|
|
30
|
+
*
|
|
31
|
+
* @param message - Log message to output
|
|
32
|
+
* @param level - Log level (defaults to Debug for backward compatibility)
|
|
33
|
+
*/
|
|
34
|
+
declare const compilerLog: (message: string, level?: LogLevel) => void;
|
|
19
35
|
declare const antiCachePath: (path: string) => string;
|
|
20
36
|
declare const getFileName: (filePath: string) => string;
|
|
21
37
|
interface ClientConfig {
|
|
@@ -554,4 +570,4 @@ type DataModelConfig<T> = Partial<{
|
|
|
554
570
|
parallelism?: number;
|
|
555
571
|
}>;
|
|
556
572
|
|
|
557
|
-
export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
|
|
573
|
+
export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, LogLevel, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
|
package/dist/index.d.ts
CHANGED
|
@@ -15,7 +15,23 @@ import 'jose';
|
|
|
15
15
|
declare const Kafka: typeof KafkaJS.Kafka;
|
|
16
16
|
type Kafka = KafkaJS.Kafka;
|
|
17
17
|
type Producer = KafkaJS.Producer;
|
|
18
|
-
|
|
18
|
+
/**
|
|
19
|
+
* Log levels matching Rust CLI logger levels
|
|
20
|
+
*/
|
|
21
|
+
declare enum LogLevel {
|
|
22
|
+
Debug = "Debug",
|
|
23
|
+
Info = "Info",
|
|
24
|
+
Warn = "Warn",
|
|
25
|
+
Error = "Error"
|
|
26
|
+
}
|
|
27
|
+
/**
|
|
28
|
+
* Compiler logging with level support.
|
|
29
|
+
* Respects both MOOSE_DISABLE_COMPILER_LOGS (legacy) and MOOSE_LOGGER__LEVEL.
|
|
30
|
+
*
|
|
31
|
+
* @param message - Log message to output
|
|
32
|
+
* @param level - Log level (defaults to Debug for backward compatibility)
|
|
33
|
+
*/
|
|
34
|
+
declare const compilerLog: (message: string, level?: LogLevel) => void;
|
|
19
35
|
declare const antiCachePath: (path: string) => string;
|
|
20
36
|
declare const getFileName: (filePath: string) => string;
|
|
21
37
|
interface ClientConfig {
|
|
@@ -554,4 +570,4 @@ type DataModelConfig<T> = Partial<{
|
|
|
554
570
|
parallelism?: number;
|
|
555
571
|
}>;
|
|
556
572
|
|
|
557
|
-
export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
|
|
573
|
+
export { ACKs, ApiUtil, type CSVParsingConfig, CSV_DELIMITERS, type CliLogData, DEFAULT_CSV_CONFIG, DEFAULT_JSON_CONFIG, type DataModelConfig, DataSource, type DataSourceConfig, type ExpressRequestWithMoose, type ExtractionResult, type JSONParsingConfig, type KafkaClientConfig, LogLevel, type Logger, MAX_RETRIES, MAX_RETRIES_PRODUCER, MAX_RETRY_TIME_MS, MOOSE_RUNTIME_ENV_PREFIX, MooseCache, MooseClient, type Producer, RETRY_FACTOR_PRODUCER, RETRY_INITIAL_TIME_MS, type StripDateIntersection, type TaskConfig, type TaskDefinition, type TaskFunction, antiCachePath, cliLog, compilerLog, createApi, createConsumptionApi, createProducerConfig, expressMiddleware, getClickhouseClient, getFileName, getKafkaClient, getKafkaProducer, getMooseClients, getMooseUtils, isValidCSVDelimiter, logError, mapTstoJs, mooseEnvSecrets, mooseRuntimeEnv, parseCSV, parseJSON, parseJSONWithDates };
|
package/dist/index.js
CHANGED
|
@@ -34,6 +34,7 @@ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: tru
|
|
|
34
34
|
var commons_exports = {};
|
|
35
35
|
__export(commons_exports, {
|
|
36
36
|
ACKs: () => ACKs,
|
|
37
|
+
LogLevel: () => LogLevel,
|
|
37
38
|
MAX_RETRIES: () => MAX_RETRIES,
|
|
38
39
|
MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
|
|
39
40
|
MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
|
|
@@ -62,6 +63,36 @@ function isTruthy(value) {
|
|
|
62
63
|
return false;
|
|
63
64
|
}
|
|
64
65
|
}
|
|
66
|
+
function parseLogLevel(value) {
|
|
67
|
+
if (!value) return "Info" /* Info */;
|
|
68
|
+
const normalized = value.trim();
|
|
69
|
+
switch (normalized) {
|
|
70
|
+
case "Debug":
|
|
71
|
+
case "debug":
|
|
72
|
+
case "DEBUG":
|
|
73
|
+
return "Debug" /* Debug */;
|
|
74
|
+
case "Info":
|
|
75
|
+
case "info":
|
|
76
|
+
case "INFO":
|
|
77
|
+
return "Info" /* Info */;
|
|
78
|
+
case "Warn":
|
|
79
|
+
case "warn":
|
|
80
|
+
case "WARN":
|
|
81
|
+
return "Warn" /* Warn */;
|
|
82
|
+
case "Error":
|
|
83
|
+
case "error":
|
|
84
|
+
case "ERROR":
|
|
85
|
+
return "Error" /* Error */;
|
|
86
|
+
default:
|
|
87
|
+
return "Info" /* Info */;
|
|
88
|
+
}
|
|
89
|
+
}
|
|
90
|
+
function getLogLevel() {
|
|
91
|
+
if (cachedLogLevel === null) {
|
|
92
|
+
cachedLogLevel = parseLogLevel(process.env.MOOSE_LOGGER__LEVEL);
|
|
93
|
+
}
|
|
94
|
+
return cachedLogLevel;
|
|
95
|
+
}
|
|
65
96
|
function mapTstoJs(filePath) {
|
|
66
97
|
return filePath.replace(/\.ts$/, ".js").replace(/\.cts$/, ".cjs").replace(/\.mts$/, ".mjs");
|
|
67
98
|
}
|
|
@@ -87,7 +118,7 @@ async function getKafkaProducer(cfg, logger, maxMessageBytes) {
|
|
|
87
118
|
await producer.connect();
|
|
88
119
|
return producer;
|
|
89
120
|
}
|
|
90
|
-
var import_http, import_client, import_kafka_javascript, Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient;
|
|
121
|
+
var import_http, import_client, import_kafka_javascript, Kafka, LogLevel, cachedLogLevel, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient;
|
|
91
122
|
var init_commons = __esm({
|
|
92
123
|
"src/commons.ts"() {
|
|
93
124
|
"use strict";
|
|
@@ -95,8 +126,26 @@ var init_commons = __esm({
|
|
|
95
126
|
import_client = require("@clickhouse/client");
|
|
96
127
|
import_kafka_javascript = require("@confluentinc/kafka-javascript");
|
|
97
128
|
({ Kafka } = import_kafka_javascript.KafkaJS);
|
|
98
|
-
|
|
99
|
-
|
|
129
|
+
LogLevel = /* @__PURE__ */ ((LogLevel2) => {
|
|
130
|
+
LogLevel2["Debug"] = "Debug";
|
|
131
|
+
LogLevel2["Info"] = "Info";
|
|
132
|
+
LogLevel2["Warn"] = "Warn";
|
|
133
|
+
LogLevel2["Error"] = "Error";
|
|
134
|
+
return LogLevel2;
|
|
135
|
+
})(LogLevel || {});
|
|
136
|
+
cachedLogLevel = null;
|
|
137
|
+
compilerLog = (message, level = "Debug" /* Debug */) => {
|
|
138
|
+
if (isTruthy(process.env.MOOSE_DISABLE_COMPILER_LOGS)) {
|
|
139
|
+
return;
|
|
140
|
+
}
|
|
141
|
+
const currentLevel = getLogLevel();
|
|
142
|
+
const levelPriority = {
|
|
143
|
+
["Debug" /* Debug */]: 0,
|
|
144
|
+
["Info" /* Info */]: 1,
|
|
145
|
+
["Warn" /* Warn */]: 2,
|
|
146
|
+
["Error" /* Error */]: 3
|
|
147
|
+
};
|
|
148
|
+
if (levelPriority[level] >= levelPriority[currentLevel]) {
|
|
100
149
|
console.log(message);
|
|
101
150
|
}
|
|
102
151
|
};
|
|
@@ -409,6 +458,7 @@ __export(index_exports, {
|
|
|
409
458
|
IngestApi: () => IngestApi,
|
|
410
459
|
IngestPipeline: () => IngestPipeline,
|
|
411
460
|
LifeCycle: () => LifeCycle,
|
|
461
|
+
LogLevel: () => LogLevel,
|
|
412
462
|
MAX_RETRIES: () => MAX_RETRIES,
|
|
413
463
|
MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
|
|
414
464
|
MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
|
|
@@ -3412,6 +3462,7 @@ var DataSource = class {
|
|
|
3412
3462
|
IngestApi,
|
|
3413
3463
|
IngestPipeline,
|
|
3414
3464
|
LifeCycle,
|
|
3465
|
+
LogLevel,
|
|
3415
3466
|
MAX_RETRIES,
|
|
3416
3467
|
MAX_RETRIES_PRODUCER,
|
|
3417
3468
|
MAX_RETRY_TIME_MS,
|