@514labs/moose-lib 0.6.276-ci-1-gfe86cd2c → 0.6.276-ci-1-gb608fe53
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browserCompatible.js +1 -57
- package/dist/browserCompatible.js.map +1 -1
- package/dist/browserCompatible.mjs +2 -62
- package/dist/browserCompatible.mjs.map +1 -1
- package/dist/compilerPlugin.js.map +1 -1
- package/dist/compilerPlugin.mjs +1 -5
- package/dist/compilerPlugin.mjs.map +1 -1
- package/dist/dmv2/index.js +1 -57
- package/dist/dmv2/index.js.map +1 -1
- package/dist/dmv2/index.mjs +2 -62
- package/dist/dmv2/index.mjs.map +1 -1
- package/dist/index.d.mts +2 -27
- package/dist/index.d.ts +2 -27
- package/dist/index.js +1 -63
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2 -65
- package/dist/index.mjs.map +1 -1
- package/dist/moose-runner.js +75 -191
- package/dist/moose-runner.js.map +1 -1
- package/dist/moose-runner.mjs +77 -197
- package/dist/moose-runner.mjs.map +1 -1
- package/package.json +1 -1
package/dist/index.mjs
CHANGED
|
@@ -18,8 +18,6 @@ var __export = (target, all) => {
|
|
|
18
18
|
var commons_exports = {};
|
|
19
19
|
__export(commons_exports, {
|
|
20
20
|
ACKs: () => ACKs,
|
|
21
|
-
CODES: () => CODES,
|
|
22
|
-
KafkaConsumer: () => KafkaConsumer,
|
|
23
21
|
MAX_RETRIES: () => MAX_RETRIES,
|
|
24
22
|
MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
|
|
25
23
|
MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
|
|
@@ -28,7 +26,6 @@ __export(commons_exports, {
|
|
|
28
26
|
antiCachePath: () => antiCachePath,
|
|
29
27
|
cliLog: () => cliLog,
|
|
30
28
|
compilerLog: () => compilerLog,
|
|
31
|
-
createNativeKafkaConsumer: () => createNativeKafkaConsumer,
|
|
32
29
|
createProducerConfig: () => createProducerConfig,
|
|
33
30
|
getClickhouseClient: () => getClickhouseClient,
|
|
34
31
|
getFileName: () => getFileName,
|
|
@@ -39,11 +36,7 @@ __export(commons_exports, {
|
|
|
39
36
|
});
|
|
40
37
|
import http from "http";
|
|
41
38
|
import { createClient } from "@clickhouse/client";
|
|
42
|
-
import {
|
|
43
|
-
KafkaConsumer,
|
|
44
|
-
KafkaJS,
|
|
45
|
-
CODES
|
|
46
|
-
} from "@confluentinc/kafka-javascript";
|
|
39
|
+
import { KafkaJS } from "@confluentinc/kafka-javascript";
|
|
47
40
|
function isTruthy(value) {
|
|
48
41
|
if (!value) return false;
|
|
49
42
|
switch (value.trim().toLowerCase()) {
|
|
@@ -81,7 +74,7 @@ async function getKafkaProducer(cfg, logger, maxMessageBytes) {
|
|
|
81
74
|
await producer.connect();
|
|
82
75
|
return producer;
|
|
83
76
|
}
|
|
84
|
-
var Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient
|
|
77
|
+
var Kafka, compilerLog, antiCachePath, getFileName, getClickhouseClient, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, parseBrokerString, logError, buildSaslConfig, getKafkaClient;
|
|
85
78
|
var init_commons = __esm({
|
|
86
79
|
"src/commons.ts"() {
|
|
87
80
|
"use strict";
|
|
@@ -185,59 +178,6 @@ var init_commons = __esm({
|
|
|
185
178
|
}
|
|
186
179
|
});
|
|
187
180
|
};
|
|
188
|
-
buildNativeSaslConfig = (logger, cfg) => {
|
|
189
|
-
if (!cfg.saslMechanism || !cfg.saslUsername || !cfg.saslPassword) {
|
|
190
|
-
return {};
|
|
191
|
-
}
|
|
192
|
-
const mechanism = cfg.saslMechanism.toUpperCase();
|
|
193
|
-
const validMechanisms = ["PLAIN", "SCRAM-SHA-256", "SCRAM-SHA-512"];
|
|
194
|
-
if (!validMechanisms.includes(mechanism)) {
|
|
195
|
-
logger.warn(`Unsupported SASL mechanism: ${cfg.saslMechanism}`);
|
|
196
|
-
return {};
|
|
197
|
-
}
|
|
198
|
-
return {
|
|
199
|
-
"sasl.mechanisms": mechanism,
|
|
200
|
-
"sasl.username": cfg.saslUsername,
|
|
201
|
-
"sasl.password": cfg.saslPassword
|
|
202
|
-
};
|
|
203
|
-
};
|
|
204
|
-
createNativeKafkaConsumer = (cfg, logger, rebalanceCb) => {
|
|
205
|
-
const brokers = parseBrokerString(cfg.broker || "");
|
|
206
|
-
if (brokers.length === 0) {
|
|
207
|
-
throw new Error(`No valid broker addresses found in: "${cfg.broker}"`);
|
|
208
|
-
}
|
|
209
|
-
logger.log(
|
|
210
|
-
`Creating native KafkaConsumer with brokers: ${brokers.join(", ")}`
|
|
211
|
-
);
|
|
212
|
-
logger.log(`Security protocol: ${cfg.securityProtocol || "plaintext"}`);
|
|
213
|
-
logger.log(`Client ID: ${cfg.clientId}`);
|
|
214
|
-
logger.log(`Group ID: ${cfg.groupId}`);
|
|
215
|
-
const saslConfig = buildNativeSaslConfig(logger, cfg);
|
|
216
|
-
const consumerConfig = {
|
|
217
|
-
// Connection
|
|
218
|
-
"bootstrap.servers": brokers.join(","),
|
|
219
|
-
"client.id": cfg.clientId,
|
|
220
|
-
// Group management
|
|
221
|
-
"group.id": cfg.groupId,
|
|
222
|
-
"session.timeout.ms": cfg.sessionTimeoutMs ?? 3e4,
|
|
223
|
-
"heartbeat.interval.ms": cfg.heartbeatIntervalMs ?? 3e3,
|
|
224
|
-
"max.poll.interval.ms": cfg.maxPollIntervalMs ?? 3e5,
|
|
225
|
-
// Offset management
|
|
226
|
-
"enable.auto.commit": cfg.autoCommit ?? true,
|
|
227
|
-
"auto.commit.interval.ms": cfg.autoCommitIntervalMs ?? 5e3,
|
|
228
|
-
// Security
|
|
229
|
-
...cfg.securityProtocol === "SASL_SSL" && {
|
|
230
|
-
"security.protocol": "sasl_ssl"
|
|
231
|
-
},
|
|
232
|
-
...saslConfig,
|
|
233
|
-
// Rebalance callback
|
|
234
|
-
...rebalanceCb && { rebalance_cb: rebalanceCb }
|
|
235
|
-
};
|
|
236
|
-
const topicConfig = {
|
|
237
|
-
"auto.offset.reset": cfg.autoOffsetReset ?? "earliest"
|
|
238
|
-
};
|
|
239
|
-
return new KafkaConsumer(consumerConfig, topicConfig);
|
|
240
|
-
};
|
|
241
181
|
}
|
|
242
182
|
});
|
|
243
183
|
|
|
@@ -3394,7 +3334,6 @@ export {
|
|
|
3394
3334
|
ACKs,
|
|
3395
3335
|
Api,
|
|
3396
3336
|
ApiHelpers,
|
|
3397
|
-
CODES,
|
|
3398
3337
|
CSV_DELIMITERS,
|
|
3399
3338
|
ClickHouseEngines,
|
|
3400
3339
|
ConsumptionApi,
|
|
@@ -3406,7 +3345,6 @@ export {
|
|
|
3406
3345
|
ETLPipeline,
|
|
3407
3346
|
IngestApi,
|
|
3408
3347
|
IngestPipeline,
|
|
3409
|
-
KafkaConsumer,
|
|
3410
3348
|
LifeCycle,
|
|
3411
3349
|
MAX_RETRIES,
|
|
3412
3350
|
MAX_RETRIES_PRODUCER,
|
|
@@ -3434,7 +3372,6 @@ export {
|
|
|
3434
3372
|
createClickhouseParameter,
|
|
3435
3373
|
createConsumptionApi,
|
|
3436
3374
|
createMaterializedView,
|
|
3437
|
-
createNativeKafkaConsumer,
|
|
3438
3375
|
createProducerConfig,
|
|
3439
3376
|
dropView,
|
|
3440
3377
|
expressMiddleware,
|