@bayoudhi/moose-lib-serverless 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,3084 @@
1
+ "use strict";
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf;
7
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
8
+ var __esm = (fn, res) => function __init() {
9
+ return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
10
+ };
11
+ var __export = (target, all) => {
12
+ for (var name in all)
13
+ __defProp(target, name, { get: all[name], enumerable: true });
14
+ };
15
+ var __copyProps = (to, from, except, desc) => {
16
+ if (from && typeof from === "object" || typeof from === "function") {
17
+ for (let key of __getOwnPropNames(from))
18
+ if (!__hasOwnProp.call(to, key) && key !== except)
19
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
20
+ }
21
+ return to;
22
+ };
23
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
24
+ // If the importer is in node compatibility mode or this is not an ESM
25
+ // file that has been converted to a CommonJS file using a Babel-
26
+ // compatible transform (i.e. "__esModule" has not been set), then set
27
+ // "default" to the CommonJS "module.exports" for node compatibility.
28
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
29
+ mod
30
+ ));
31
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
32
+
33
+ // src/commons-types.ts
34
+ function isTruthy(value) {
35
+ if (!value) return false;
36
+ switch (value.trim().toLowerCase()) {
37
+ case "1":
38
+ case "true":
39
+ case "yes":
40
+ case "on":
41
+ return true;
42
+ default:
43
+ return false;
44
+ }
45
+ }
46
+ function mapTstoJs(filePath) {
47
+ return filePath.replace(/\.ts$/, ".js").replace(/\.cts$/, ".cjs").replace(/\.mts$/, ".mjs");
48
+ }
49
+ var compilerLog, antiCachePath, getFileName, cliLog, MAX_RETRIES, MAX_RETRY_TIME_MS, RETRY_INITIAL_TIME_MS, MAX_RETRIES_PRODUCER, RETRY_FACTOR_PRODUCER, ACKs, logError;
50
+ var init_commons_types = __esm({
51
+ "src/commons-types.ts"() {
52
+ "use strict";
53
+ compilerLog = (message) => {
54
+ if (!isTruthy(process.env.MOOSE_DISABLE_COMPILER_LOGS)) {
55
+ console.log(message);
56
+ }
57
+ };
58
+ antiCachePath = (path2) => `${path2}?num=${Math.random().toString()}&time=${Date.now()}`;
59
+ getFileName = (filePath) => {
60
+ const regex = /\/([^/]+)\.ts/;
61
+ const matches = filePath.match(regex);
62
+ if (matches && matches.length > 1) {
63
+ return matches[1];
64
+ }
65
+ return "";
66
+ };
67
+ cliLog = (log) => {
68
+ const level = log.message_type === "Error" ? "error" : log.message_type === "Warning" ? "warn" : "info";
69
+ const structuredLog = {
70
+ __moose_structured_log__: true,
71
+ level,
72
+ message: log.message,
73
+ resource_type: "runtime",
74
+ cli_action: log.action,
75
+ cli_message_type: log.message_type ?? "Info",
76
+ timestamp: (/* @__PURE__ */ new Date()).toISOString()
77
+ };
78
+ process.stderr.write(JSON.stringify(structuredLog) + "\n");
79
+ };
80
+ MAX_RETRIES = 150;
81
+ MAX_RETRY_TIME_MS = 1e3;
82
+ RETRY_INITIAL_TIME_MS = 100;
83
+ MAX_RETRIES_PRODUCER = 150;
84
+ RETRY_FACTOR_PRODUCER = 0.2;
85
+ ACKs = -1;
86
+ logError = (logger, e) => {
87
+ logger.error(e.message);
88
+ const stack = e.stack;
89
+ if (stack) {
90
+ logger.error(stack);
91
+ }
92
+ };
93
+ }
94
+ });
95
+
96
+ // src/config/configFile.ts
97
+ async function findConfigFile(startDir = process.cwd()) {
98
+ const fs = await import("fs");
99
+ let currentDir = import_node_path.default.resolve(startDir);
100
+ while (true) {
101
+ const configPath = import_node_path.default.join(currentDir, "moose.config.toml");
102
+ if (fs.existsSync(configPath)) {
103
+ return configPath;
104
+ }
105
+ const parentDir = import_node_path.default.dirname(currentDir);
106
+ if (parentDir === currentDir) {
107
+ break;
108
+ }
109
+ currentDir = parentDir;
110
+ }
111
+ return null;
112
+ }
113
+ async function readProjectConfig() {
114
+ const fs = await import("fs");
115
+ const configPath = await findConfigFile();
116
+ if (!configPath) {
117
+ throw new ConfigError(
118
+ "moose.config.toml not found in current directory or any parent directory"
119
+ );
120
+ }
121
+ try {
122
+ const configContent = fs.readFileSync(configPath, "utf-8");
123
+ const config = toml.parse(configContent);
124
+ return config;
125
+ } catch (error) {
126
+ throw new ConfigError(`Failed to parse moose.config.toml: ${error}`);
127
+ }
128
+ }
129
+ var import_node_path, toml, ConfigError;
130
+ var init_configFile = __esm({
131
+ "src/config/configFile.ts"() {
132
+ "use strict";
133
+ import_node_path = __toESM(require("path"));
134
+ toml = __toESM(require("toml"));
135
+ ConfigError = class extends Error {
136
+ constructor(message) {
137
+ super(message);
138
+ this.name = "ConfigError";
139
+ }
140
+ };
141
+ }
142
+ });
143
+
144
+ // src/config/runtime.ts
145
+ var runtime_exports = {};
146
+ var ConfigurationRegistry;
147
+ var init_runtime = __esm({
148
+ "src/config/runtime.ts"() {
149
+ "use strict";
150
+ init_configFile();
151
+ ConfigurationRegistry = class _ConfigurationRegistry {
152
+ static instance;
153
+ clickhouseConfig;
154
+ kafkaConfig;
155
+ static getInstance() {
156
+ if (!_ConfigurationRegistry.instance) {
157
+ _ConfigurationRegistry.instance = new _ConfigurationRegistry();
158
+ }
159
+ return _ConfigurationRegistry.instance;
160
+ }
161
+ setClickHouseConfig(config) {
162
+ this.clickhouseConfig = config;
163
+ }
164
+ setKafkaConfig(config) {
165
+ this.kafkaConfig = config;
166
+ }
167
+ _env(name) {
168
+ const value = process.env[name];
169
+ if (value === void 0) return void 0;
170
+ const trimmed = value.trim();
171
+ return trimmed.length > 0 ? trimmed : void 0;
172
+ }
173
+ _parseBool(value) {
174
+ if (value === void 0) return void 0;
175
+ switch (value.trim().toLowerCase()) {
176
+ case "1":
177
+ case "true":
178
+ case "yes":
179
+ case "on":
180
+ return true;
181
+ case "0":
182
+ case "false":
183
+ case "no":
184
+ case "off":
185
+ return false;
186
+ default:
187
+ return void 0;
188
+ }
189
+ }
190
+ async getClickHouseConfig() {
191
+ if (this.clickhouseConfig) {
192
+ return this.clickhouseConfig;
193
+ }
194
+ const projectConfig = await readProjectConfig();
195
+ const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
196
+ const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
197
+ const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
198
+ const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
199
+ const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
200
+ const envUseSSL = this._parseBool(
201
+ this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
202
+ );
203
+ return {
204
+ host: envHost ?? projectConfig.clickhouse_config.host,
205
+ port: envPort ?? projectConfig.clickhouse_config.host_port.toString(),
206
+ username: envUser ?? projectConfig.clickhouse_config.user,
207
+ password: envPassword ?? projectConfig.clickhouse_config.password,
208
+ database: envDb ?? projectConfig.clickhouse_config.db_name,
209
+ useSSL: envUseSSL !== void 0 ? envUseSSL : projectConfig.clickhouse_config.use_ssl || false
210
+ };
211
+ }
212
+ async getStandaloneClickhouseConfig(overrides) {
213
+ if (this.clickhouseConfig) {
214
+ return { ...this.clickhouseConfig, ...overrides };
215
+ }
216
+ const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
217
+ const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
218
+ const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
219
+ const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
220
+ const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
221
+ const envUseSSL = this._parseBool(
222
+ this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
223
+ );
224
+ let projectConfig;
225
+ try {
226
+ projectConfig = await readProjectConfig();
227
+ } catch (error) {
228
+ projectConfig = null;
229
+ }
230
+ const defaults = {
231
+ host: "localhost",
232
+ port: "18123",
233
+ username: "default",
234
+ password: "",
235
+ database: "local",
236
+ useSSL: false
237
+ };
238
+ return {
239
+ host: overrides?.host ?? envHost ?? projectConfig?.clickhouse_config.host ?? defaults.host,
240
+ port: overrides?.port ?? envPort ?? projectConfig?.clickhouse_config.host_port.toString() ?? defaults.port,
241
+ username: overrides?.username ?? envUser ?? projectConfig?.clickhouse_config.user ?? defaults.username,
242
+ password: overrides?.password ?? envPassword ?? projectConfig?.clickhouse_config.password ?? defaults.password,
243
+ database: overrides?.database ?? envDb ?? projectConfig?.clickhouse_config.db_name ?? defaults.database,
244
+ useSSL: overrides?.useSSL ?? envUseSSL ?? projectConfig?.clickhouse_config.use_ssl ?? defaults.useSSL
245
+ };
246
+ }
247
+ async getKafkaConfig() {
248
+ if (this.kafkaConfig) {
249
+ return this.kafkaConfig;
250
+ }
251
+ const projectConfig = await readProjectConfig();
252
+ const envBroker = this._env("MOOSE_REDPANDA_CONFIG__BROKER") ?? this._env("MOOSE_KAFKA_CONFIG__BROKER");
253
+ const envMsgTimeout = this._env("MOOSE_REDPANDA_CONFIG__MESSAGE_TIMEOUT_MS") ?? this._env("MOOSE_KAFKA_CONFIG__MESSAGE_TIMEOUT_MS");
254
+ const envSaslUsername = this._env("MOOSE_REDPANDA_CONFIG__SASL_USERNAME") ?? this._env("MOOSE_KAFKA_CONFIG__SASL_USERNAME");
255
+ const envSaslPassword = this._env("MOOSE_REDPANDA_CONFIG__SASL_PASSWORD") ?? this._env("MOOSE_KAFKA_CONFIG__SASL_PASSWORD");
256
+ const envSaslMechanism = this._env("MOOSE_REDPANDA_CONFIG__SASL_MECHANISM") ?? this._env("MOOSE_KAFKA_CONFIG__SASL_MECHANISM");
257
+ const envSecurityProtocol = this._env("MOOSE_REDPANDA_CONFIG__SECURITY_PROTOCOL") ?? this._env("MOOSE_KAFKA_CONFIG__SECURITY_PROTOCOL");
258
+ const envNamespace = this._env("MOOSE_REDPANDA_CONFIG__NAMESPACE") ?? this._env("MOOSE_KAFKA_CONFIG__NAMESPACE");
259
+ const envSchemaRegistryUrl = this._env("MOOSE_REDPANDA_CONFIG__SCHEMA_REGISTRY_URL") ?? this._env("MOOSE_KAFKA_CONFIG__SCHEMA_REGISTRY_URL");
260
+ const fileKafka = projectConfig.kafka_config ?? projectConfig.redpanda_config;
261
+ return {
262
+ broker: envBroker ?? fileKafka?.broker ?? "localhost:19092",
263
+ messageTimeoutMs: envMsgTimeout ? parseInt(envMsgTimeout, 10) : fileKafka?.message_timeout_ms ?? 1e3,
264
+ saslUsername: envSaslUsername ?? fileKafka?.sasl_username,
265
+ saslPassword: envSaslPassword ?? fileKafka?.sasl_password,
266
+ saslMechanism: envSaslMechanism ?? fileKafka?.sasl_mechanism,
267
+ securityProtocol: envSecurityProtocol ?? fileKafka?.security_protocol,
268
+ namespace: envNamespace ?? fileKafka?.namespace,
269
+ schemaRegistryUrl: envSchemaRegistryUrl ?? fileKafka?.schema_registry_url
270
+ };
271
+ }
272
+ hasRuntimeConfig() {
273
+ return !!this.clickhouseConfig || !!this.kafkaConfig;
274
+ }
275
+ };
276
+ globalThis._mooseConfigRegistry = ConfigurationRegistry.getInstance();
277
+ }
278
+ });
279
+
280
+ // src/commons.ts
281
+ var commons_exports = {};
282
+ __export(commons_exports, {
283
+ ACKs: () => ACKs,
284
+ MAX_RETRIES: () => MAX_RETRIES,
285
+ MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
286
+ MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
287
+ RETRY_FACTOR_PRODUCER: () => RETRY_FACTOR_PRODUCER,
288
+ RETRY_INITIAL_TIME_MS: () => RETRY_INITIAL_TIME_MS,
289
+ antiCachePath: () => antiCachePath,
290
+ cliLog: () => cliLog,
291
+ compilerLog: () => compilerLog,
292
+ createProducerConfig: () => createProducerConfig,
293
+ getClickhouseClient: () => getClickhouseClient,
294
+ getFileName: () => getFileName,
295
+ getKafkaClient: () => getKafkaClient,
296
+ getKafkaProducer: () => getKafkaProducer,
297
+ logError: () => logError,
298
+ mapTstoJs: () => mapTstoJs,
299
+ rewriteImportExtensions: () => rewriteImportExtensions
300
+ });
301
+ function walkDirectory(dir, extensions) {
302
+ const results = [];
303
+ if (!(0, import_fs.existsSync)(dir)) {
304
+ return results;
305
+ }
306
+ try {
307
+ const entries = (0, import_fs.readdirSync)(dir, { withFileTypes: true });
308
+ for (const entry of entries) {
309
+ const fullPath = import_path.default.join(dir, entry.name);
310
+ if (entry.isDirectory()) {
311
+ if (entry.name !== "node_modules") {
312
+ results.push(...walkDirectory(fullPath, extensions));
313
+ }
314
+ } else if (entry.isFile()) {
315
+ const ext = import_path.default.extname(entry.name);
316
+ if (extensions.includes(ext)) {
317
+ results.push(fullPath);
318
+ }
319
+ }
320
+ }
321
+ } catch (e) {
322
+ console.debug(`[moose] Failed to read directory ${dir}:`, e);
323
+ }
324
+ return results;
325
+ }
326
+ function addJsExtensionToImports(content, fileDir) {
327
+ const fromPattern = /(from\s+['"])(\.\.?\/[^'"]*?)(['"])/g;
328
+ const bareImportPattern = /(import\s+['"])(\.\.?\/[^'"]*?)(['"])/g;
329
+ const dynamicPattern = /(import\s*\(\s*['"])(\.\.?\/[^'"]*?)(['"])/g;
330
+ let result = content;
331
+ result = result.replace(fromPattern, (match, prefix, importPath, quote) => {
332
+ return rewriteImportPath(match, prefix, importPath, quote, fileDir);
333
+ });
334
+ result = result.replace(bareImportPattern, (match, prefix, importPath, quote) => {
335
+ return rewriteImportPath(match, prefix, importPath, quote, fileDir);
336
+ });
337
+ result = result.replace(dynamicPattern, (match, prefix, importPath, quote) => {
338
+ return rewriteImportPath(match, prefix, importPath, quote, fileDir);
339
+ });
340
+ return result;
341
+ }
342
+ function rewriteImportPath(match, prefix, importPath, quote, fileDir) {
343
+ if (/\.[cm]?js$/.test(importPath)) {
344
+ return match;
345
+ }
346
+ if (/\.json$/.test(importPath)) {
347
+ return match;
348
+ }
349
+ if (fileDir) {
350
+ const resolvedPath = import_path.default.resolve(fileDir, importPath);
351
+ if ((0, import_fs.existsSync)(`${resolvedPath}.js`)) {
352
+ return `${prefix}${importPath}.js${quote}`;
353
+ }
354
+ if ((0, import_fs.existsSync)(import_path.default.join(resolvedPath, "index.js"))) {
355
+ return `${prefix}${importPath}/index.js${quote}`;
356
+ }
357
+ if ((0, import_fs.existsSync)(`${resolvedPath}.mjs`)) {
358
+ return `${prefix}${importPath}.mjs${quote}`;
359
+ }
360
+ if ((0, import_fs.existsSync)(import_path.default.join(resolvedPath, "index.mjs"))) {
361
+ return `${prefix}${importPath}/index.mjs${quote}`;
362
+ }
363
+ if ((0, import_fs.existsSync)(`${resolvedPath}.cjs`)) {
364
+ return `${prefix}${importPath}.cjs${quote}`;
365
+ }
366
+ if ((0, import_fs.existsSync)(import_path.default.join(resolvedPath, "index.cjs"))) {
367
+ return `${prefix}${importPath}/index.cjs${quote}`;
368
+ }
369
+ }
370
+ return `${prefix}${importPath}.js${quote}`;
371
+ }
372
+ function rewriteImportExtensions(outDir) {
373
+ const files = walkDirectory(outDir, [".js", ".mjs"]);
374
+ for (const filePath of files) {
375
+ const content = (0, import_fs.readFileSync)(filePath, "utf-8");
376
+ const fileDir = import_path.default.dirname(filePath);
377
+ const rewritten = addJsExtensionToImports(content, fileDir);
378
+ if (content !== rewritten) {
379
+ (0, import_fs.writeFileSync)(filePath, rewritten, "utf-8");
380
+ }
381
+ }
382
+ }
383
+ function createProducerConfig(maxMessageBytes) {
384
+ return {
385
+ kafkaJS: {
386
+ idempotent: false,
387
+ // Not needed for at-least-once delivery
388
+ acks: ACKs,
389
+ retry: {
390
+ retries: MAX_RETRIES_PRODUCER,
391
+ maxRetryTime: MAX_RETRY_TIME_MS
392
+ }
393
+ },
394
+ "linger.ms": 0,
395
+ // This is to make sure at least once delivery with immediate feedback on the send
396
+ ...maxMessageBytes && { "message.max.bytes": maxMessageBytes }
397
+ };
398
+ }
399
+ async function getKafkaProducer(cfg, logger, maxMessageBytes) {
400
+ const kafka = await getKafkaClient(cfg, logger);
401
+ const producer = kafka.producer(createProducerConfig(maxMessageBytes));
402
+ await producer.connect();
403
+ return producer;
404
+ }
405
+ var import_kafka_javascript, import_client, import_fs, import_path, Kafka, getClickhouseClient, parseBrokerString, buildSaslConfig, getKafkaClient;
406
+ var init_commons = __esm({
407
+ "src/commons.ts"() {
408
+ "use strict";
409
+ init_commons_types();
410
+ import_kafka_javascript = require("@514labs/kafka-javascript");
411
+ import_client = require("@clickhouse/client");
412
+ import_fs = require("fs");
413
+ import_path = __toESM(require("path"));
414
+ init_commons_types();
415
+ ({ Kafka } = import_kafka_javascript.KafkaJS);
416
+ getClickhouseClient = ({
417
+ username,
418
+ password,
419
+ database,
420
+ useSSL,
421
+ host,
422
+ port
423
+ }) => {
424
+ const protocol = useSSL === "1" || useSSL.toLowerCase() === "true" ? "https" : "http";
425
+ console.log(`Connecting to Clickhouse at ${protocol}://${host}:${port}`);
426
+ return (0, import_client.createClient)({
427
+ url: `${protocol}://${host}:${port}`,
428
+ username,
429
+ password,
430
+ database,
431
+ application: "moose"
432
+ // Note: wait_end_of_query is configured per operation type, not globally
433
+ // to preserve SELECT query performance while ensuring INSERT/DDL reliability
434
+ });
435
+ };
436
+ parseBrokerString = (brokerString) => brokerString.split(",").map((b) => b.trim()).filter((b) => b.length > 0);
437
+ buildSaslConfig = (logger, args) => {
438
+ const mechanism = args.saslMechanism ? args.saslMechanism.toLowerCase() : "";
439
+ switch (mechanism) {
440
+ case "plain":
441
+ case "scram-sha-256":
442
+ case "scram-sha-512":
443
+ return {
444
+ mechanism,
445
+ username: args.saslUsername || "",
446
+ password: args.saslPassword || ""
447
+ };
448
+ default:
449
+ logger.warn(`Unsupported SASL mechanism: ${args.saslMechanism}`);
450
+ return void 0;
451
+ }
452
+ };
453
+ getKafkaClient = async (cfg, logger) => {
454
+ const brokers = parseBrokerString(cfg.broker || "");
455
+ if (brokers.length === 0) {
456
+ throw new Error(`No valid broker addresses found in: "${cfg.broker}"`);
457
+ }
458
+ logger.log(`Creating Kafka client with brokers: ${brokers.join(", ")}`);
459
+ logger.log(`Security protocol: ${cfg.securityProtocol || "plaintext"}`);
460
+ logger.log(`Client ID: ${cfg.clientId}`);
461
+ const saslConfig = buildSaslConfig(logger, cfg);
462
+ return new Kafka({
463
+ kafkaJS: {
464
+ clientId: cfg.clientId,
465
+ brokers,
466
+ ssl: cfg.securityProtocol === "SASL_SSL",
467
+ ...saslConfig && { sasl: saslConfig },
468
+ retry: {
469
+ initialRetryTime: RETRY_INITIAL_TIME_MS,
470
+ maxRetryTime: MAX_RETRY_TIME_MS,
471
+ retries: MAX_RETRIES
472
+ }
473
+ }
474
+ });
475
+ };
476
+ }
477
+ });
478
+
479
+ // src/serverless.ts
480
+ var serverless_exports = {};
481
+ __export(serverless_exports, {
482
+ ACKs: () => ACKs,
483
+ Api: () => Api,
484
+ CSV_DELIMITERS: () => CSV_DELIMITERS,
485
+ ClickHouseEngines: () => ClickHouseEngines,
486
+ ConsumptionApi: () => ConsumptionApi,
487
+ DEFAULT_CSV_CONFIG: () => DEFAULT_CSV_CONFIG,
488
+ DEFAULT_JSON_CONFIG: () => DEFAULT_JSON_CONFIG,
489
+ DataSource: () => DataSource,
490
+ DeadLetterQueue: () => DeadLetterQueue,
491
+ ETLPipeline: () => ETLPipeline,
492
+ IngestApi: () => IngestApi,
493
+ IngestPipeline: () => IngestPipeline,
494
+ LifeCycle: () => LifeCycle,
495
+ MAX_RETRIES: () => MAX_RETRIES,
496
+ MAX_RETRIES_PRODUCER: () => MAX_RETRIES_PRODUCER,
497
+ MAX_RETRY_TIME_MS: () => MAX_RETRY_TIME_MS,
498
+ MOOSE_RUNTIME_ENV_PREFIX: () => MOOSE_RUNTIME_ENV_PREFIX,
499
+ MaterializedView: () => MaterializedView,
500
+ OlapTable: () => OlapTable,
501
+ RETRY_FACTOR_PRODUCER: () => RETRY_FACTOR_PRODUCER,
502
+ RETRY_INITIAL_TIME_MS: () => RETRY_INITIAL_TIME_MS,
503
+ Sql: () => Sql,
504
+ SqlResource: () => SqlResource,
505
+ Stream: () => Stream,
506
+ Task: () => Task,
507
+ View: () => View,
508
+ WebApp: () => WebApp,
509
+ Workflow: () => Workflow,
510
+ antiCachePath: () => antiCachePath,
511
+ cliLog: () => cliLog,
512
+ compilerLog: () => compilerLog,
513
+ createClickhouseParameter: () => createClickhouseParameter,
514
+ getApi: () => getApi,
515
+ getApis: () => getApis,
516
+ getFileName: () => getFileName,
517
+ getIngestApi: () => getIngestApi,
518
+ getIngestApis: () => getIngestApis,
519
+ getMaterializedView: () => getMaterializedView,
520
+ getMaterializedViews: () => getMaterializedViews,
521
+ getSqlResource: () => getSqlResource,
522
+ getSqlResources: () => getSqlResources,
523
+ getStream: () => getStream,
524
+ getStreams: () => getStreams,
525
+ getTable: () => getTable,
526
+ getTables: () => getTables,
527
+ getValueFromParameter: () => getValueFromParameter,
528
+ getView: () => getView,
529
+ getViews: () => getViews,
530
+ getWebApp: () => getWebApp,
531
+ getWebApps: () => getWebApps,
532
+ getWorkflow: () => getWorkflow,
533
+ getWorkflows: () => getWorkflows,
534
+ isValidCSVDelimiter: () => isValidCSVDelimiter,
535
+ logError: () => logError,
536
+ mapToClickHouseType: () => mapToClickHouseType,
537
+ mapTstoJs: () => mapTstoJs,
538
+ mooseEnvSecrets: () => mooseEnvSecrets,
539
+ mooseRuntimeEnv: () => mooseRuntimeEnv,
540
+ parseCSV: () => parseCSV,
541
+ parseJSON: () => parseJSON,
542
+ parseJSONWithDates: () => parseJSONWithDates,
543
+ quoteIdentifier: () => quoteIdentifier,
544
+ sql: () => sql,
545
+ toQuery: () => toQuery,
546
+ toQueryPreview: () => toQueryPreview,
547
+ toStaticQuery: () => toStaticQuery
548
+ });
549
+ module.exports = __toCommonJS(serverless_exports);
550
+
551
+ // src/dataModels/types.ts
552
+ var ClickHouseEngines = /* @__PURE__ */ ((ClickHouseEngines2) => {
553
+ ClickHouseEngines2["MergeTree"] = "MergeTree";
554
+ ClickHouseEngines2["ReplacingMergeTree"] = "ReplacingMergeTree";
555
+ ClickHouseEngines2["SummingMergeTree"] = "SummingMergeTree";
556
+ ClickHouseEngines2["AggregatingMergeTree"] = "AggregatingMergeTree";
557
+ ClickHouseEngines2["CollapsingMergeTree"] = "CollapsingMergeTree";
558
+ ClickHouseEngines2["VersionedCollapsingMergeTree"] = "VersionedCollapsingMergeTree";
559
+ ClickHouseEngines2["GraphiteMergeTree"] = "GraphiteMergeTree";
560
+ ClickHouseEngines2["S3Queue"] = "S3Queue";
561
+ ClickHouseEngines2["S3"] = "S3";
562
+ ClickHouseEngines2["Buffer"] = "Buffer";
563
+ ClickHouseEngines2["Distributed"] = "Distributed";
564
+ ClickHouseEngines2["IcebergS3"] = "IcebergS3";
565
+ ClickHouseEngines2["Kafka"] = "Kafka";
566
+ ClickHouseEngines2["ReplicatedMergeTree"] = "ReplicatedMergeTree";
567
+ ClickHouseEngines2["ReplicatedReplacingMergeTree"] = "ReplicatedReplacingMergeTree";
568
+ ClickHouseEngines2["ReplicatedAggregatingMergeTree"] = "ReplicatedAggregatingMergeTree";
569
+ ClickHouseEngines2["ReplicatedSummingMergeTree"] = "ReplicatedSummingMergeTree";
570
+ ClickHouseEngines2["ReplicatedCollapsingMergeTree"] = "ReplicatedCollapsingMergeTree";
571
+ ClickHouseEngines2["ReplicatedVersionedCollapsingMergeTree"] = "ReplicatedVersionedCollapsingMergeTree";
572
+ return ClickHouseEngines2;
573
+ })(ClickHouseEngines || {});
574
+
575
+ // src/dmv2/utils/stackTrace.ts
576
+ function shouldSkipStackLine(line) {
577
+ return line.includes("node_modules") || // Skip npm installed packages (prod)
578
+ line.includes("node:internal") || // Skip Node.js internals (modern format)
579
+ line.includes("internal/modules") || // Skip Node.js internals (older format)
580
+ line.includes("ts-node") || // Skip TypeScript execution
581
+ line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
582
+ line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
583
+ line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
584
+ line.includes("\\ts-moose-lib\\dist\\");
585
+ }
586
+ function parseStackLine(line) {
587
+ const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
588
+ if (match && match[1]) {
589
+ return {
590
+ file: match[1],
591
+ line: match[2]
592
+ };
593
+ }
594
+ return void 0;
595
+ }
596
+ function getSourceFileInfo(stack) {
597
+ if (!stack) return {};
598
+ const lines = stack.split("\n");
599
+ for (const line of lines) {
600
+ if (shouldSkipStackLine(line)) continue;
601
+ const info = parseStackLine(line);
602
+ if (info) return info;
603
+ }
604
+ return {};
605
+ }
606
+ function getSourceLocationFromStack(stack) {
607
+ if (!stack) return void 0;
608
+ const lines = stack.split("\n");
609
+ for (const line of lines.slice(1)) {
610
+ if (shouldSkipStackLine(line)) {
611
+ continue;
612
+ }
613
+ const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
614
+ if (v8Match) {
615
+ return {
616
+ file: v8Match[1],
617
+ line: parseInt(v8Match[2], 10),
618
+ column: parseInt(v8Match[3], 10)
619
+ };
620
+ }
621
+ const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
622
+ if (smMatch) {
623
+ return {
624
+ file: smMatch[1],
625
+ line: parseInt(smMatch[2], 10),
626
+ column: parseInt(smMatch[3], 10)
627
+ };
628
+ }
629
+ }
630
+ return void 0;
631
+ }
632
+ function getSourceFileFromStack(stack) {
633
+ const location = getSourceLocationFromStack(stack);
634
+ return location?.file;
635
+ }
636
+
637
+ // src/dmv2/typedBase.ts
638
+ var TypedBase = class {
639
+ /** The JSON schema representation of type T. Injected by the compiler plugin. */
640
+ schema;
641
+ /** The name assigned to this resource instance. */
642
+ name;
643
+ /** A dictionary mapping column names (keys of T) to their Column definitions. */
644
+ columns;
645
+ /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
646
+ columnArray;
647
+ /** The configuration object specific to this resource type. */
648
+ config;
649
+ /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
650
+ validators;
651
+ /** Optional metadata for the resource, always present as an object. */
652
+ metadata;
653
+ /**
654
+ * Whether this resource allows extra fields beyond the defined columns.
655
+ * When true, extra fields in payloads are passed through to streaming functions.
656
+ * Injected by the compiler plugin when the type has an index signature.
657
+ */
658
+ allowExtraFields;
659
+ /**
660
+ * @internal Constructor intended for internal use by subclasses and the compiler plugin.
661
+ * It expects the schema and columns to be provided, typically injected by the compiler.
662
+ *
663
+ * @param name The name for the resource instance.
664
+ * @param config The configuration object for the resource.
665
+ * @param schema The JSON schema for the resource's data type T (injected).
666
+ * @param columns The array of Column definitions for T (injected).
667
+ * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
668
+ */
669
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
670
+ if (schema === void 0 || columns === void 0) {
671
+ throw new Error(
672
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
673
+ );
674
+ }
675
+ this.schema = schema;
676
+ this.columnArray = columns;
677
+ const columnsObj = {};
678
+ columns.forEach((column) => {
679
+ columnsObj[column.name] = column;
680
+ });
681
+ this.columns = columnsObj;
682
+ this.name = name;
683
+ this.config = config;
684
+ this.validators = validators;
685
+ this.allowExtraFields = allowExtraFields ?? false;
686
+ this.metadata = config?.metadata ? { ...config.metadata } : {};
687
+ if (!this.metadata.source) {
688
+ const stack = new Error().stack;
689
+ if (stack) {
690
+ const info = getSourceFileInfo(stack);
691
+ this.metadata.source = { file: info.file, line: info.line };
692
+ }
693
+ }
694
+ }
695
+ };
696
+
697
+ // src/dataModels/dataModelTypes.ts
698
+ function isArrayNestedType(dt) {
699
+ return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
700
+ }
701
+ function isNestedType(dt) {
702
+ return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
703
+ }
704
+
705
+ // src/dmv2/internal.ts
706
+ var import_process = __toESM(require("process"));
707
+ init_commons_types();
708
+ var isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
709
+ var moose_internal = {
710
+ tables: /* @__PURE__ */ new Map(),
711
+ streams: /* @__PURE__ */ new Map(),
712
+ ingestApis: /* @__PURE__ */ new Map(),
713
+ apis: /* @__PURE__ */ new Map(),
714
+ sqlResources: /* @__PURE__ */ new Map(),
715
+ workflows: /* @__PURE__ */ new Map(),
716
+ webApps: /* @__PURE__ */ new Map(),
717
+ materializedViews: /* @__PURE__ */ new Map(),
718
+ views: /* @__PURE__ */ new Map()
719
+ };
720
+ var defaultRetentionPeriod = 60 * 60 * 24 * 7;
721
+ var getMooseInternal = () => globalThis.moose_internal;
722
+ if (getMooseInternal() === void 0) {
723
+ globalThis.moose_internal = moose_internal;
724
+ }
725
+ var dlqSchema = {
726
+ version: "3.1",
727
+ components: {
728
+ schemas: {
729
+ DeadLetterModel: {
730
+ type: "object",
731
+ properties: {
732
+ originalRecord: {
733
+ $ref: "#/components/schemas/Recordstringany"
734
+ },
735
+ errorMessage: {
736
+ type: "string"
737
+ },
738
+ errorType: {
739
+ type: "string"
740
+ },
741
+ failedAt: {
742
+ type: "string",
743
+ format: "date-time"
744
+ },
745
+ source: {
746
+ oneOf: [
747
+ {
748
+ const: "api"
749
+ },
750
+ {
751
+ const: "transform"
752
+ },
753
+ {
754
+ const: "table"
755
+ }
756
+ ]
757
+ }
758
+ },
759
+ required: ["originalRecord", "errorMessage", "errorType", "failedAt", "source"]
760
+ },
761
+ Recordstringany: {
762
+ type: "object",
763
+ properties: {},
764
+ required: [],
765
+ description: "Construct a type with a set of properties K of type T",
766
+ additionalProperties: {}
767
+ }
768
+ }
769
+ },
770
+ schemas: [
771
+ {
772
+ $ref: "#/components/schemas/DeadLetterModel"
773
+ }
774
+ ]
775
+ };
776
+ var dlqColumns = [
777
+ {
778
+ name: "originalRecord",
779
+ data_type: "Json",
780
+ primary_key: false,
781
+ required: true,
782
+ unique: false,
783
+ default: null,
784
+ annotations: [],
785
+ ttl: null,
786
+ codec: null,
787
+ materialized: null,
788
+ comment: null
789
+ },
790
+ {
791
+ name: "errorMessage",
792
+ data_type: "String",
793
+ primary_key: false,
794
+ required: true,
795
+ unique: false,
796
+ default: null,
797
+ annotations: [],
798
+ ttl: null,
799
+ codec: null,
800
+ materialized: null,
801
+ comment: null
802
+ },
803
+ {
804
+ name: "errorType",
805
+ data_type: "String",
806
+ primary_key: false,
807
+ required: true,
808
+ unique: false,
809
+ default: null,
810
+ annotations: [],
811
+ ttl: null,
812
+ codec: null,
813
+ materialized: null,
814
+ comment: null
815
+ },
816
+ {
817
+ name: "failedAt",
818
+ data_type: "DateTime",
819
+ primary_key: false,
820
+ required: true,
821
+ unique: false,
822
+ default: null,
823
+ annotations: [],
824
+ ttl: null,
825
+ codec: null,
826
+ materialized: null,
827
+ comment: null
828
+ },
829
+ {
830
+ name: "source",
831
+ data_type: "String",
832
+ primary_key: false,
833
+ required: true,
834
+ unique: false,
835
+ default: null,
836
+ annotations: [],
837
+ ttl: null,
838
+ codec: null,
839
+ materialized: null,
840
+ comment: null
841
+ }
842
+ ];
843
+
844
+ // src/dmv2/sdk/olapTable.ts
845
+ var import_node_stream = require("stream");
846
+ var import_node_crypto = require("crypto");
847
+
848
+ // src/sqlHelpers.ts
849
+ var quoteIdentifier = (name) => {
850
+ return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
851
+ };
852
+ var isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
853
+ var isView = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "View";
854
+ var isColumn = (value) => typeof value === "object" && value !== null && !("kind" in value) && "name" in value && "annotations" in value;
855
+ function sqlImpl(strings, ...values) {
856
+ return new Sql(strings, values);
857
+ }
858
+ var sql = sqlImpl;
859
+ var instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
860
+ var Sql = class _Sql {
861
+ values;
862
+ strings;
863
+ constructor(rawStrings, rawValues) {
864
+ if (rawStrings.length - 1 !== rawValues.length) {
865
+ if (rawStrings.length === 0) {
866
+ throw new TypeError("Expected at least 1 string");
867
+ }
868
+ throw new TypeError(
869
+ `Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
870
+ );
871
+ }
872
+ const valuesLength = rawValues.reduce(
873
+ (len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) || isView(value) ? 0 : 1),
874
+ 0
875
+ );
876
+ this.values = new Array(valuesLength);
877
+ this.strings = new Array(valuesLength + 1);
878
+ this.strings[0] = rawStrings[0];
879
+ let i = 0, pos = 0;
880
+ while (i < rawValues.length) {
881
+ const child = rawValues[i++];
882
+ const rawString = rawStrings[i];
883
+ if (instanceofSql(child)) {
884
+ this.strings[pos] += child.strings[0];
885
+ let childIndex = 0;
886
+ while (childIndex < child.values.length) {
887
+ this.values[pos++] = child.values[childIndex++];
888
+ this.strings[pos] = child.strings[childIndex];
889
+ }
890
+ this.strings[pos] += rawString;
891
+ } else if (isColumn(child)) {
892
+ const aggregationFunction = child.annotations.find(
893
+ ([k, _]) => k === "aggregationFunction"
894
+ );
895
+ if (aggregationFunction !== void 0) {
896
+ this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
897
+ } else {
898
+ this.strings[pos] += `\`${child.name}\``;
899
+ }
900
+ this.strings[pos] += rawString;
901
+ } else if (isTable(child)) {
902
+ if (child.config.database) {
903
+ this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
904
+ } else {
905
+ this.strings[pos] += `\`${child.name}\``;
906
+ }
907
+ this.strings[pos] += rawString;
908
+ } else if (isView(child)) {
909
+ this.strings[pos] += `\`${child.name}\``;
910
+ this.strings[pos] += rawString;
911
+ } else {
912
+ this.values[pos++] = child;
913
+ this.strings[pos] = rawString;
914
+ }
915
+ }
916
+ }
917
+ /**
918
+ * Append another Sql fragment, returning a new Sql instance.
919
+ */
920
+ append(other) {
921
+ return new _Sql([...this.strings, ""], [...this.values, other]);
922
+ }
923
+ };
924
+ sql.join = function(fragments, separator) {
925
+ if (fragments.length === 0) return new Sql([""], []);
926
+ if (fragments.length === 1) return fragments[0];
927
+ const sep = separator ?? ", ";
928
+ const normalized = sep.includes(" ") ? sep : ` ${sep} `;
929
+ const strings = ["", ...Array(fragments.length - 1).fill(normalized), ""];
930
+ return new Sql(strings, fragments);
931
+ };
932
+ sql.raw = function(text) {
933
+ return new Sql([text], []);
934
+ };
935
+ var toStaticQuery = (sql2) => {
936
+ const [query, params] = toQuery(sql2);
937
+ if (Object.keys(params).length !== 0) {
938
+ throw new Error(
939
+ "Dynamic SQL is not allowed in the select statement in view creation."
940
+ );
941
+ }
942
+ return query;
943
+ };
944
+ var toQuery = (sql2) => {
945
+ const parameterizedStubs = sql2.values.map(
946
+ (v, i) => createClickhouseParameter(i, v)
947
+ );
948
+ const query = sql2.strings.map(
949
+ (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
950
+ ).join("");
951
+ const query_params = sql2.values.reduce(
952
+ (acc, v, i) => ({
953
+ ...acc,
954
+ [`p${i}`]: getValueFromParameter(v)
955
+ }),
956
+ {}
957
+ );
958
+ return [query, query_params];
959
+ };
960
+ var toQueryPreview = (sql2) => {
961
+ try {
962
+ const formatValue = (v) => {
963
+ if (Array.isArray(v)) {
964
+ const [type, val] = v;
965
+ if (type === "Identifier") {
966
+ return `\`${String(val)}\``;
967
+ }
968
+ return `[${v.map((x) => formatValue(x)).join(", ")}]`;
969
+ }
970
+ if (v === null || v === void 0) return "NULL";
971
+ if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
972
+ if (typeof v === "number") return String(v);
973
+ if (typeof v === "boolean") return v ? "true" : "false";
974
+ if (v instanceof Date)
975
+ return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
976
+ try {
977
+ return JSON.stringify(v);
978
+ } catch {
979
+ return String(v);
980
+ }
981
+ };
982
+ let out = sql2.strings[0] ?? "";
983
+ for (let i = 0; i < sql2.values.length; i++) {
984
+ const val = getValueFromParameter(sql2.values[i]);
985
+ out += formatValue(val);
986
+ out += sql2.strings[i + 1] ?? "";
987
+ }
988
+ return out.replace(/\s+/g, " ").trim();
989
+ } catch (error) {
990
+ console.log(`toQueryPreview error: ${error}`);
991
+ return "/* query preview unavailable */";
992
+ }
993
+ };
994
+ var getValueFromParameter = (value) => {
995
+ if (Array.isArray(value)) {
996
+ const [type, val] = value;
997
+ if (type === "Identifier") return val;
998
+ }
999
+ return value;
1000
+ };
1001
+ function createClickhouseParameter(parameterIndex, value) {
1002
+ return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
1003
+ }
1004
+ var mapToClickHouseType = (value) => {
1005
+ if (typeof value === "number") {
1006
+ return Number.isInteger(value) ? "Int" : "Float";
1007
+ }
1008
+ if (typeof value === "boolean") return "Bool";
1009
+ if (value instanceof Date) return "DateTime";
1010
+ if (Array.isArray(value)) {
1011
+ const [type, _] = value;
1012
+ return type;
1013
+ }
1014
+ return "String";
1015
+ };
1016
+ function emptyIfUndefined(value) {
1017
+ return value === void 0 ? "" : value;
1018
+ }
1019
+
1020
+ // src/dmv2/sdk/olapTable.ts
1021
+ var OlapTable = class extends TypedBase {
1022
+ name;
1023
+ /** @internal */
1024
+ kind = "OlapTable";
1025
+ /** @internal Memoized ClickHouse client for reusing connections across insert calls */
1026
+ _memoizedClient;
1027
+ /** @internal Hash of the configuration used to create the memoized client */
1028
+ _configHash;
1029
+ /** @internal Cached table name to avoid repeated generation */
1030
+ _cachedTableName;
1031
+ constructor(name, config, schema, columns, validators) {
1032
+ const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
1033
+ const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
1034
+ const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
1035
+ if (hasFields && hasExpr) {
1036
+ throw new Error(
1037
+ `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
1038
+ );
1039
+ }
1040
+ const hasCluster = typeof resolvedConfig.cluster === "string";
1041
+ const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
1042
+ const hasReplicaName = typeof resolvedConfig.replicaName === "string";
1043
+ if (hasCluster && (hasKeeperPath || hasReplicaName)) {
1044
+ throw new Error(
1045
+ `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
1046
+ );
1047
+ }
1048
+ super(name, resolvedConfig, schema, columns, validators);
1049
+ this.name = name;
1050
+ const tables = getMooseInternal().tables;
1051
+ const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
1052
+ if (!isClientOnlyMode() && tables.has(registryKey)) {
1053
+ throw new Error(
1054
+ `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
1055
+ );
1056
+ }
1057
+ tables.set(registryKey, this);
1058
+ }
1059
+ /**
1060
+ * Generates the versioned table name following Moose's naming convention
1061
+ * Format: {tableName}_{version_with_dots_replaced_by_underscores}
1062
+ */
1063
+ generateTableName() {
1064
+ if (this._cachedTableName) {
1065
+ return this._cachedTableName;
1066
+ }
1067
+ const tableVersion = this.config.version;
1068
+ if (!tableVersion) {
1069
+ this._cachedTableName = this.name;
1070
+ } else {
1071
+ const versionSuffix = tableVersion.replace(/\./g, "_");
1072
+ this._cachedTableName = `${this.name}_${versionSuffix}`;
1073
+ }
1074
+ return this._cachedTableName;
1075
+ }
1076
+ /**
1077
+ * Creates a fast hash of the ClickHouse configuration.
1078
+ * Uses crypto.createHash for better performance than JSON.stringify.
1079
+ *
1080
+ * @private
1081
+ */
1082
+ createConfigHash(clickhouseConfig) {
1083
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1084
+ const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
1085
+ return (0, import_node_crypto.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1086
+ }
1087
+ /**
1088
+ * Gets or creates a memoized ClickHouse client.
1089
+ * The client is cached and reused across multiple insert calls for better performance.
1090
+ * If the configuration changes, a new client will be created.
1091
+ *
1092
+ * @private
1093
+ */
1094
+ async getMemoizedClient() {
1095
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1096
+ const configRegistry = globalThis._mooseConfigRegistry;
1097
+ const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1098
+ const clickhouseConfig = await configRegistry.getClickHouseConfig();
1099
+ const currentConfigHash = this.createConfigHash(clickhouseConfig);
1100
+ if (this._memoizedClient && this._configHash === currentConfigHash) {
1101
+ return { client: this._memoizedClient, config: clickhouseConfig };
1102
+ }
1103
+ if (this._memoizedClient && this._configHash !== currentConfigHash) {
1104
+ try {
1105
+ await this._memoizedClient.close();
1106
+ } catch (error) {
1107
+ }
1108
+ }
1109
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1110
+ const client = getClickhouseClient2({
1111
+ username: clickhouseConfig.username,
1112
+ password: clickhouseConfig.password,
1113
+ database: effectiveDatabase,
1114
+ useSSL: clickhouseConfig.useSSL ? "true" : "false",
1115
+ host: clickhouseConfig.host,
1116
+ port: clickhouseConfig.port
1117
+ });
1118
+ this._memoizedClient = client;
1119
+ this._configHash = currentConfigHash;
1120
+ return { client, config: clickhouseConfig };
1121
+ }
1122
+ /**
1123
+ * Closes the memoized ClickHouse client if it exists.
1124
+ * This is useful for cleaning up connections when the table instance is no longer needed.
1125
+ * The client will be automatically recreated on the next insert call if needed.
1126
+ */
1127
+ async closeClient() {
1128
+ if (this._memoizedClient) {
1129
+ try {
1130
+ await this._memoizedClient.close();
1131
+ } catch (error) {
1132
+ } finally {
1133
+ this._memoizedClient = void 0;
1134
+ this._configHash = void 0;
1135
+ }
1136
+ }
1137
+ }
1138
+ /**
1139
+ * Validates a single record using typia's comprehensive type checking.
1140
+ * This provides the most accurate validation as it uses the exact TypeScript type information.
1141
+ *
1142
+ * @param record The record to validate
1143
+ * @returns Validation result with detailed error information
1144
+ */
1145
+ validateRecord(record) {
1146
+ if (this.validators?.validate) {
1147
+ try {
1148
+ const result = this.validators.validate(record);
1149
+ return {
1150
+ success: result.success,
1151
+ data: result.data,
1152
+ errors: result.errors?.map(
1153
+ (err) => typeof err === "string" ? err : JSON.stringify(err)
1154
+ )
1155
+ };
1156
+ } catch (error) {
1157
+ return {
1158
+ success: false,
1159
+ errors: [error instanceof Error ? error.message : String(error)]
1160
+ };
1161
+ }
1162
+ }
1163
+ throw new Error("No typia validator found");
1164
+ }
1165
+ /**
1166
+ * Type guard function using typia's is() function.
1167
+ * Provides compile-time type narrowing for TypeScript.
1168
+ *
1169
+ * @param record The record to check
1170
+ * @returns True if record matches type T, with type narrowing
1171
+ */
1172
+ isValidRecord(record) {
1173
+ if (this.validators?.is) {
1174
+ return this.validators.is(record);
1175
+ }
1176
+ throw new Error("No typia validator found");
1177
+ }
1178
+ /**
1179
+ * Assert that a record matches type T, throwing detailed errors if not.
1180
+ * Uses typia's assert() function for the most detailed error reporting.
1181
+ *
1182
+ * @param record The record to assert
1183
+ * @returns The validated and typed record
1184
+ * @throws Detailed validation error if record doesn't match type T
1185
+ */
1186
+ assertValidRecord(record) {
1187
+ if (this.validators?.assert) {
1188
+ return this.validators.assert(record);
1189
+ }
1190
+ throw new Error("No typia validator found");
1191
+ }
1192
+ /**
1193
+ * Validates an array of records with comprehensive error reporting.
1194
+ * Uses the most appropriate validation method available (typia or basic).
1195
+ *
1196
+ * @param data Array of records to validate
1197
+ * @returns Detailed validation results
1198
+ */
1199
+ async validateRecords(data) {
1200
+ const valid = [];
1201
+ const invalid = [];
1202
+ valid.length = 0;
1203
+ invalid.length = 0;
1204
+ const dataLength = data.length;
1205
+ for (let i = 0; i < dataLength; i++) {
1206
+ const record = data[i];
1207
+ try {
1208
+ if (this.isValidRecord(record)) {
1209
+ valid.push(this.mapToClickhouseRecord(record));
1210
+ } else {
1211
+ const result = this.validateRecord(record);
1212
+ if (result.success) {
1213
+ valid.push(this.mapToClickhouseRecord(record));
1214
+ } else {
1215
+ invalid.push({
1216
+ record,
1217
+ error: result.errors?.join(", ") || "Validation failed",
1218
+ index: i,
1219
+ path: "root"
1220
+ });
1221
+ }
1222
+ }
1223
+ } catch (error) {
1224
+ invalid.push({
1225
+ record,
1226
+ error: error instanceof Error ? error.message : String(error),
1227
+ index: i,
1228
+ path: "root"
1229
+ });
1230
+ }
1231
+ }
1232
+ return {
1233
+ valid,
1234
+ invalid,
1235
+ total: dataLength
1236
+ };
1237
+ }
1238
+ /**
1239
+ * Optimized batch retry that minimizes individual insert operations.
1240
+ * Groups records into smaller batches to reduce round trips while still isolating failures.
1241
+ *
1242
+ * @private
1243
+ */
1244
+ async retryIndividualRecords(client, tableName, records) {
1245
+ const successful = [];
1246
+ const failed = [];
1247
+ const RETRY_BATCH_SIZE = 10;
1248
+ const totalRecords = records.length;
1249
+ for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1250
+ const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1251
+ const batch = records.slice(i, batchEnd);
1252
+ try {
1253
+ await client.insert({
1254
+ table: quoteIdentifier(tableName),
1255
+ values: batch,
1256
+ format: "JSONEachRow",
1257
+ clickhouse_settings: {
1258
+ date_time_input_format: "best_effort",
1259
+ // Add performance settings for retries
1260
+ max_insert_block_size: RETRY_BATCH_SIZE,
1261
+ max_block_size: RETRY_BATCH_SIZE
1262
+ }
1263
+ });
1264
+ successful.push(...batch);
1265
+ } catch (batchError) {
1266
+ for (let j = 0; j < batch.length; j++) {
1267
+ const record = batch[j];
1268
+ try {
1269
+ await client.insert({
1270
+ table: quoteIdentifier(tableName),
1271
+ values: [record],
1272
+ format: "JSONEachRow",
1273
+ clickhouse_settings: {
1274
+ date_time_input_format: "best_effort"
1275
+ }
1276
+ });
1277
+ successful.push(record);
1278
+ } catch (error) {
1279
+ failed.push({
1280
+ record,
1281
+ error: error instanceof Error ? error.message : String(error),
1282
+ index: i + j
1283
+ });
1284
+ }
1285
+ }
1286
+ }
1287
+ }
1288
+ return { successful, failed };
1289
+ }
1290
+ /**
1291
+ * Validates input parameters and strategy compatibility
1292
+ * @private
1293
+ */
1294
+ validateInsertParameters(data, options) {
1295
+ const isStream = data instanceof import_node_stream.Readable;
1296
+ const strategy = options?.strategy || "fail-fast";
1297
+ const shouldValidate = options?.validate !== false;
1298
+ if (isStream && strategy === "isolate") {
1299
+ throw new Error(
1300
+ "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1301
+ );
1302
+ }
1303
+ if (isStream && shouldValidate) {
1304
+ console.warn(
1305
+ "Validation is not supported with stream input. Validation will be skipped."
1306
+ );
1307
+ }
1308
+ return { isStream, strategy, shouldValidate };
1309
+ }
1310
+ /**
1311
+ * Handles early return cases for empty data
1312
+ * @private
1313
+ */
1314
+ handleEmptyData(data, isStream) {
1315
+ if (isStream && !data) {
1316
+ return {
1317
+ successful: 0,
1318
+ failed: 0,
1319
+ total: 0
1320
+ };
1321
+ }
1322
+ if (!isStream && (!data || data.length === 0)) {
1323
+ return {
1324
+ successful: 0,
1325
+ failed: 0,
1326
+ total: 0
1327
+ };
1328
+ }
1329
+ return null;
1330
+ }
1331
+ /**
1332
+ * Performs pre-insertion validation for array data
1333
+ * @private
1334
+ */
1335
+ async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1336
+ if (!shouldValidate) {
1337
+ return { validatedData: data, validationErrors: [] };
1338
+ }
1339
+ try {
1340
+ const validationResult = await this.validateRecords(data);
1341
+ const validatedData = validationResult.valid;
1342
+ const validationErrors = validationResult.invalid;
1343
+ if (validationErrors.length > 0) {
1344
+ this.handleValidationErrors(validationErrors, strategy, data, options);
1345
+ switch (strategy) {
1346
+ case "discard":
1347
+ return { validatedData, validationErrors };
1348
+ case "isolate":
1349
+ return { validatedData: data, validationErrors };
1350
+ default:
1351
+ return { validatedData, validationErrors };
1352
+ }
1353
+ }
1354
+ return { validatedData, validationErrors };
1355
+ } catch (validationError) {
1356
+ if (strategy === "fail-fast") {
1357
+ throw validationError;
1358
+ }
1359
+ console.warn("Validation error:", validationError);
1360
+ return { validatedData: data, validationErrors: [] };
1361
+ }
1362
+ }
1363
+ /**
1364
+ * Handles validation errors based on the specified strategy
1365
+ * @private
1366
+ */
1367
+ handleValidationErrors(validationErrors, strategy, data, options) {
1368
+ switch (strategy) {
1369
+ case "fail-fast":
1370
+ const firstError = validationErrors[0];
1371
+ throw new Error(
1372
+ `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1373
+ );
1374
+ case "discard":
1375
+ this.checkValidationThresholds(validationErrors, data.length, options);
1376
+ break;
1377
+ case "isolate":
1378
+ break;
1379
+ }
1380
+ }
1381
+ /**
1382
+ * Checks if validation errors exceed configured thresholds
1383
+ * @private
1384
+ */
1385
+ checkValidationThresholds(validationErrors, totalRecords, options) {
1386
+ const validationFailedCount = validationErrors.length;
1387
+ const validationFailedRatio = validationFailedCount / totalRecords;
1388
+ if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1389
+ throw new Error(
1390
+ `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1391
+ );
1392
+ }
1393
+ if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1394
+ throw new Error(
1395
+ `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1396
+ );
1397
+ }
1398
+ }
1399
+ /**
1400
+ * Optimized insert options preparation with better memory management
1401
+ * @private
1402
+ */
1403
+ prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1404
+ const insertOptions = {
1405
+ table: quoteIdentifier(tableName),
1406
+ format: "JSONEachRow",
1407
+ clickhouse_settings: {
1408
+ date_time_input_format: "best_effort",
1409
+ wait_end_of_query: 1,
1410
+ // Ensure at least once delivery for INSERT operations
1411
+ // Performance optimizations
1412
+ max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1413
+ max_block_size: 65536,
1414
+ // Use async inserts for better performance with large datasets
1415
+ async_insert: validatedData.length > 1e3 ? 1 : 0,
1416
+ wait_for_async_insert: 1
1417
+ // For at least once delivery
1418
+ }
1419
+ };
1420
+ if (isStream) {
1421
+ insertOptions.values = data;
1422
+ } else {
1423
+ insertOptions.values = validatedData;
1424
+ }
1425
+ if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1426
+ if (options.allowErrors !== void 0) {
1427
+ insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1428
+ }
1429
+ if (options.allowErrorsRatio !== void 0) {
1430
+ insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1431
+ }
1432
+ }
1433
+ return insertOptions;
1434
+ }
1435
+ /**
1436
+ * Creates success result for completed insertions
1437
+ * @private
1438
+ */
1439
+ createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1440
+ if (isStream) {
1441
+ return {
1442
+ successful: -1,
1443
+ // -1 indicates stream mode where count is unknown
1444
+ failed: 0,
1445
+ total: -1
1446
+ };
1447
+ }
1448
+ const insertedCount = validatedData.length;
1449
+ const totalProcessed = shouldValidate ? data.length : insertedCount;
1450
+ const result = {
1451
+ successful: insertedCount,
1452
+ failed: shouldValidate ? validationErrors.length : 0,
1453
+ total: totalProcessed
1454
+ };
1455
+ if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1456
+ result.failedRecords = validationErrors.map((ve) => ({
1457
+ record: ve.record,
1458
+ error: `Validation error: ${ve.error}`,
1459
+ index: ve.index
1460
+ }));
1461
+ }
1462
+ return result;
1463
+ }
1464
+ /**
1465
+ * Handles insertion errors based on the specified strategy
1466
+ * @private
1467
+ */
1468
+ async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1469
+ switch (strategy) {
1470
+ case "fail-fast":
1471
+ throw new Error(
1472
+ `Failed to insert data into table ${tableName}: ${batchError}`
1473
+ );
1474
+ case "discard":
1475
+ throw new Error(
1476
+ `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1477
+ );
1478
+ case "isolate":
1479
+ return await this.handleIsolateStrategy(
1480
+ batchError,
1481
+ tableName,
1482
+ data,
1483
+ validatedData,
1484
+ validationErrors,
1485
+ isStream,
1486
+ shouldValidate,
1487
+ options
1488
+ );
1489
+ default:
1490
+ throw new Error(`Unknown error strategy: ${strategy}`);
1491
+ }
1492
+ }
1493
+ /**
1494
+ * Handles the isolate strategy for insertion errors
1495
+ * @private
1496
+ */
1497
+ async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1498
+ if (isStream) {
1499
+ throw new Error(
1500
+ `Isolate strategy is not supported with stream input: ${batchError}`
1501
+ );
1502
+ }
1503
+ try {
1504
+ const { client } = await this.getMemoizedClient();
1505
+ const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1506
+ const retryData = skipValidationOnRetry ? data : validatedData;
1507
+ const { successful, failed } = await this.retryIndividualRecords(
1508
+ client,
1509
+ tableName,
1510
+ retryData
1511
+ );
1512
+ const allFailedRecords = [
1513
+ // Validation errors (if any and not skipping validation on retry)
1514
+ ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1515
+ record: ve.record,
1516
+ error: `Validation error: ${ve.error}`,
1517
+ index: ve.index
1518
+ })) : [],
1519
+ // Insertion errors
1520
+ ...failed
1521
+ ];
1522
+ this.checkInsertionThresholds(
1523
+ allFailedRecords,
1524
+ data.length,
1525
+ options
1526
+ );
1527
+ return {
1528
+ successful: successful.length,
1529
+ failed: allFailedRecords.length,
1530
+ total: data.length,
1531
+ failedRecords: allFailedRecords
1532
+ };
1533
+ } catch (isolationError) {
1534
+ throw new Error(
1535
+ `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1536
+ );
1537
+ }
1538
+ }
1539
+ /**
1540
+ * Checks if insertion errors exceed configured thresholds
1541
+ * @private
1542
+ */
1543
+ checkInsertionThresholds(failedRecords, totalRecords, options) {
1544
+ const totalFailed = failedRecords.length;
1545
+ const failedRatio = totalFailed / totalRecords;
1546
+ if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1547
+ throw new Error(
1548
+ `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1549
+ );
1550
+ }
1551
+ if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1552
+ throw new Error(
1553
+ `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1554
+ );
1555
+ }
1556
+ }
1557
+ /**
1558
+ * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1559
+ *
1560
+ * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1561
+ * - For every Nested struct (not array), it recurses into the struct.
1562
+ * - This ensures compatibility with kafka_clickhouse_sync
1563
+ *
1564
+ * @param record The input record to transform (may be deeply nested)
1565
+ * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1566
+ * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1567
+ */
1568
+ mapToClickhouseRecord(record, columns = this.columnArray) {
1569
+ const result = { ...record };
1570
+ for (const col of columns) {
1571
+ const value = record[col.name];
1572
+ const dt = col.data_type;
1573
+ if (isArrayNestedType(dt)) {
1574
+ if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1575
+ result[col.name] = value.map((item) => [
1576
+ this.mapToClickhouseRecord(item, dt.elementType.columns)
1577
+ ]);
1578
+ }
1579
+ } else if (isNestedType(dt)) {
1580
+ if (value && typeof value === "object") {
1581
+ result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1582
+ }
1583
+ }
1584
+ }
1585
+ return result;
1586
+ }
1587
+ /**
1588
+ * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1589
+ * This method establishes a direct connection to ClickHouse using the project configuration
1590
+ * and inserts the provided data into the versioned table.
1591
+ *
1592
+ * PERFORMANCE OPTIMIZATIONS:
1593
+ * - Memoized client connections with fast config hashing
1594
+ * - Single-pass validation with pre-allocated arrays
1595
+ * - Batch-optimized retry strategy (batches of 10, then individual)
1596
+ * - Optimized ClickHouse settings for large datasets
1597
+ * - Reduced memory allocations and object creation
1598
+ *
1599
+ * Uses advanced typia validation when available for comprehensive type checking,
1600
+ * with fallback to basic validation for compatibility.
1601
+ *
1602
+ * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1603
+ * If the configuration changes, a new client will be automatically created.
1604
+ *
1605
+ * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1606
+ * @param options Optional configuration for error handling, validation, and insertion behavior
1607
+ * @returns Promise resolving to detailed insertion results
1608
+ * @throws {ConfigError} When configuration cannot be read or parsed
1609
+ * @throws {ClickHouseError} When insertion fails based on the error strategy
1610
+ * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1611
+ *
1612
+ * @example
1613
+ * ```typescript
1614
+ * // Create an OlapTable instance (typia validators auto-injected)
1615
+ * const userTable = new OlapTable<User>('users');
1616
+ *
1617
+ * // Insert with comprehensive typia validation
1618
+ * const result1 = await userTable.insert([
1619
+ * { id: 1, name: 'John', email: 'john@example.com' },
1620
+ * { id: 2, name: 'Jane', email: 'jane@example.com' }
1621
+ * ]);
1622
+ *
1623
+ * // Insert data with stream input (validation not available for streams)
1624
+ * const dataStream = new Readable({
1625
+ * objectMode: true,
1626
+ * read() { // Stream implementation }
1627
+ * });
1628
+ * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1629
+ *
1630
+ * // Insert with validation disabled for performance
1631
+ * const result3 = await userTable.insert(data, { validate: false });
1632
+ *
1633
+ * // Insert with error handling strategies
1634
+ * const result4 = await userTable.insert(mixedData, {
1635
+ * strategy: 'isolate',
1636
+ * allowErrorsRatio: 0.1,
1637
+ * validate: true // Use typia validation (default)
1638
+ * });
1639
+ *
1640
+ * // Optional: Clean up connection when completely done
1641
+ * await userTable.closeClient();
1642
+ * ```
1643
+ */
1644
+ async insert(data, options) {
1645
+ const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1646
+ const emptyResult = this.handleEmptyData(data, isStream);
1647
+ if (emptyResult) {
1648
+ return emptyResult;
1649
+ }
1650
+ let validatedData = [];
1651
+ let validationErrors = [];
1652
+ if (!isStream && shouldValidate) {
1653
+ const validationResult = await this.performPreInsertionValidation(
1654
+ data,
1655
+ shouldValidate,
1656
+ strategy,
1657
+ options
1658
+ );
1659
+ validatedData = validationResult.validatedData;
1660
+ validationErrors = validationResult.validationErrors;
1661
+ } else {
1662
+ validatedData = isStream ? [] : data;
1663
+ }
1664
+ const { client } = await this.getMemoizedClient();
1665
+ const tableName = this.generateTableName();
1666
+ try {
1667
+ const insertOptions = this.prepareInsertOptions(
1668
+ tableName,
1669
+ data,
1670
+ validatedData,
1671
+ isStream,
1672
+ strategy,
1673
+ options
1674
+ );
1675
+ await client.insert(insertOptions);
1676
+ return this.createSuccessResult(
1677
+ data,
1678
+ validatedData,
1679
+ validationErrors,
1680
+ isStream,
1681
+ shouldValidate,
1682
+ strategy
1683
+ );
1684
+ } catch (batchError) {
1685
+ return await this.handleInsertionError(
1686
+ batchError,
1687
+ strategy,
1688
+ tableName,
1689
+ data,
1690
+ validatedData,
1691
+ validationErrors,
1692
+ isStream,
1693
+ shouldValidate,
1694
+ options
1695
+ );
1696
+ }
1697
+ }
1698
+ // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1699
+ // were removed in ENG-856. Use direct configuration instead, e.g.:
1700
+ // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1701
+ };
1702
+
1703
+ // src/dmv2/sdk/stream.ts
1704
+ var import_node_crypto2 = require("crypto");
1705
+ var RoutedMessage = class {
1706
+ /** The destination stream for the message */
1707
+ destination;
1708
+ /** The message value(s) to send */
1709
+ values;
1710
+ /**
1711
+ * Creates a new routed message.
1712
+ *
1713
+ * @param destination The target stream
1714
+ * @param values The message(s) to route
1715
+ */
1716
+ constructor(destination, values) {
1717
+ this.destination = destination;
1718
+ this.values = values;
1719
+ }
1720
+ };
1721
+ var Stream = class extends TypedBase {
1722
+ defaultDeadLetterQueue;
1723
+ /** @internal Memoized KafkaJS producer for reusing connections across sends */
1724
+ _memoizedProducer;
1725
+ /** @internal Hash of the configuration used to create the memoized Kafka producer */
1726
+ _kafkaConfigHash;
1727
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1728
+ super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1729
+ const streams = getMooseInternal().streams;
1730
+ if (streams.has(name)) {
1731
+ throw new Error(`Stream with name ${name} already exists`);
1732
+ }
1733
+ streams.set(name, this);
1734
+ this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1735
+ }
1736
+ /**
1737
+ * Internal map storing transformation configurations.
1738
+ * Maps destination stream names to arrays of transformation functions and their configs.
1739
+ *
1740
+ * @internal
1741
+ */
1742
+ _transformations = /* @__PURE__ */ new Map();
1743
+ /**
1744
+ * Internal function for multi-stream transformations.
1745
+ * Allows a single transformation to route messages to multiple destinations.
1746
+ *
1747
+ * @internal
1748
+ */
1749
+ _multipleTransformations;
1750
+ /**
1751
+ * Internal array storing consumer configurations.
1752
+ *
1753
+ * @internal
1754
+ */
1755
+ _consumers = [];
1756
+ /**
1757
+ * Builds the full Kafka topic name including optional namespace and version suffix.
1758
+ * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1759
+ */
1760
+ buildFullTopicName(namespace) {
1761
+ const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1762
+ const base = `${this.name}${versionSuffix}`;
1763
+ return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1764
+ }
1765
+ /**
1766
+ * Creates a fast hash string from relevant Kafka configuration fields.
1767
+ */
1768
+ createConfigHash(kafkaConfig) {
1769
+ const configString = [
1770
+ kafkaConfig.broker,
1771
+ kafkaConfig.messageTimeoutMs,
1772
+ kafkaConfig.saslUsername,
1773
+ kafkaConfig.saslPassword,
1774
+ kafkaConfig.saslMechanism,
1775
+ kafkaConfig.securityProtocol,
1776
+ kafkaConfig.namespace
1777
+ ].join(":");
1778
+ return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1779
+ }
1780
+ /**
1781
+ * Gets or creates a memoized KafkaJS producer using runtime configuration.
1782
+ */
1783
+ async getMemoizedProducer() {
1784
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1785
+ const configRegistry = globalThis._mooseConfigRegistry;
1786
+ const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1787
+ const kafkaConfig = await configRegistry.getKafkaConfig();
1788
+ const currentHash = this.createConfigHash(kafkaConfig);
1789
+ if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1790
+ return { producer: this._memoizedProducer, kafkaConfig };
1791
+ }
1792
+ if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1793
+ try {
1794
+ await this._memoizedProducer.disconnect();
1795
+ } catch {
1796
+ }
1797
+ this._memoizedProducer = void 0;
1798
+ }
1799
+ const clientId = `moose-sdk-stream-${this.name}`;
1800
+ const logger = {
1801
+ logPrefix: clientId,
1802
+ log: (message) => {
1803
+ console.log(`${clientId}: ${message}`);
1804
+ },
1805
+ error: (message) => {
1806
+ console.error(`${clientId}: ${message}`);
1807
+ },
1808
+ warn: (message) => {
1809
+ console.warn(`${clientId}: ${message}`);
1810
+ }
1811
+ };
1812
+ const producer = await getKafkaProducer2(
1813
+ {
1814
+ clientId,
1815
+ broker: kafkaConfig.broker,
1816
+ securityProtocol: kafkaConfig.securityProtocol,
1817
+ saslUsername: kafkaConfig.saslUsername,
1818
+ saslPassword: kafkaConfig.saslPassword,
1819
+ saslMechanism: kafkaConfig.saslMechanism
1820
+ },
1821
+ logger
1822
+ );
1823
+ this._memoizedProducer = producer;
1824
+ this._kafkaConfigHash = currentHash;
1825
+ return { producer, kafkaConfig };
1826
+ }
1827
+ /**
1828
+ * Closes the memoized Kafka producer if it exists.
1829
+ */
1830
+ async closeProducer() {
1831
+ if (this._memoizedProducer) {
1832
+ try {
1833
+ await this._memoizedProducer.disconnect();
1834
+ } catch {
1835
+ } finally {
1836
+ this._memoizedProducer = void 0;
1837
+ this._kafkaConfigHash = void 0;
1838
+ }
1839
+ }
1840
+ }
1841
+ /**
1842
+ * Sends one or more records to this stream's Kafka topic.
1843
+ * Values are JSON-serialized as message values.
1844
+ */
1845
+ async send(values) {
1846
+ const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1847
+ if (flat.length === 0) return;
1848
+ const { producer, kafkaConfig } = await this.getMemoizedProducer();
1849
+ const topic = this.buildFullTopicName(kafkaConfig.namespace);
1850
+ const sr = this.config.schemaConfig;
1851
+ if (sr && sr.kind === "JSON") {
1852
+ const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1853
+ if (!schemaRegistryUrl) {
1854
+ throw new Error("Schema Registry URL not configured");
1855
+ }
1856
+ const {
1857
+ default: { SchemaRegistry }
1858
+ } = await import("@kafkajs/confluent-schema-registry");
1859
+ const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1860
+ let schemaId;
1861
+ if ("id" in sr.reference) {
1862
+ schemaId = sr.reference.id;
1863
+ } else if ("subjectLatest" in sr.reference) {
1864
+ schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1865
+ } else if ("subject" in sr.reference) {
1866
+ schemaId = await registry.getRegistryId(sr.reference.subject, sr.reference.version);
1867
+ }
1868
+ if (schemaId === void 0) {
1869
+ throw new Error("Malformed schema reference.");
1870
+ }
1871
+ const encoded = await Promise.all(
1872
+ flat.map((v) => registry.encode(schemaId, v))
1873
+ );
1874
+ await producer.send({
1875
+ topic,
1876
+ messages: encoded.map((value) => ({ value }))
1877
+ });
1878
+ return;
1879
+ } else if (sr !== void 0) {
1880
+ throw new Error("Currently only JSON Schema is supported.");
1881
+ }
1882
+ await producer.send({
1883
+ topic,
1884
+ messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1885
+ });
1886
+ }
1887
+ /**
1888
+ * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1889
+ * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1890
+ *
1891
+ * @template U The data type of the messages in the destination stream.
1892
+ * @param destination The destination stream for the transformed messages.
1893
+ * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1894
+ * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1895
+ * @param config Optional configuration for this specific transformation step, like a version.
1896
+ */
1897
+ addTransform(destination, transformation, config) {
1898
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1899
+ const transformConfig = {
1900
+ ...config ?? {},
1901
+ sourceFile
1902
+ };
1903
+ if (transformConfig.deadLetterQueue === void 0) {
1904
+ transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1905
+ }
1906
+ if (this._transformations.has(destination.name)) {
1907
+ const existingTransforms = this._transformations.get(destination.name);
1908
+ const hasVersion = existingTransforms.some(
1909
+ ([_, __, cfg]) => cfg.version === transformConfig.version
1910
+ );
1911
+ if (!hasVersion) {
1912
+ existingTransforms.push([destination, transformation, transformConfig]);
1913
+ }
1914
+ } else {
1915
+ this._transformations.set(destination.name, [[destination, transformation, transformConfig]]);
1916
+ }
1917
+ }
1918
+ /**
1919
+ * Adds a consumer function that processes messages from this stream.
1920
+ * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1921
+ *
1922
+ * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1923
+ * @param config Optional configuration for this specific consumer, like a version.
1924
+ */
1925
+ addConsumer(consumer, config) {
1926
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1927
+ const consumerConfig = {
1928
+ ...config ?? {},
1929
+ sourceFile
1930
+ };
1931
+ if (consumerConfig.deadLetterQueue === void 0) {
1932
+ consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1933
+ }
1934
+ const hasVersion = this._consumers.some(
1935
+ (existing) => existing.config.version === consumerConfig.version
1936
+ );
1937
+ if (!hasVersion) {
1938
+ this._consumers.push({ consumer, config: consumerConfig });
1939
+ }
1940
+ }
1941
+ /**
1942
+ * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1943
+ * @param values The value or values to send to this stream.
1944
+ * @returns A `RoutedMessage` object associating the values with this stream.
1945
+ *
1946
+ * @example
1947
+ * ```typescript
1948
+ * sourceStream.addMultiTransform((record) => [
1949
+ * destinationStream1.routed(transformedRecord1),
1950
+ * destinationStream2.routed([record2a, record2b])
1951
+ * ]);
1952
+ * ```
1953
+ */
1954
+ routed = (values) => new RoutedMessage(this, values);
1955
+ /**
1956
+ * Adds a single transformation function that can route messages to multiple destination streams.
1957
+ * This is an alternative to adding multiple individual `addTransform` calls.
1958
+ * Only one multi-transform function can be added per stream.
1959
+ *
1960
+ * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1961
+ * each specifying a destination stream and the message(s) to send to it.
1962
+ */
1963
+ addMultiTransform(transformation) {
1964
+ this._multipleTransformations = transformation;
1965
+ }
1966
+ };
1967
+ function attachTypeGuard(dl, typeGuard) {
1968
+ dl.asTyped = () => typeGuard(dl.originalRecord);
1969
+ }
1970
+ var DeadLetterQueue = class extends Stream {
1971
+ constructor(name, config, typeGuard) {
1972
+ if (typeGuard === void 0) {
1973
+ throw new Error(
1974
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
1975
+ );
1976
+ }
1977
+ super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1978
+ this.typeGuard = typeGuard;
1979
+ getMooseInternal().streams.set(name, this);
1980
+ }
1981
+ /**
1982
+ * Internal type guard function for validating and casting original records.
1983
+ *
1984
+ * @internal
1985
+ */
1986
+ typeGuard;
1987
+ /**
1988
+ * Adds a transformation step for dead letter records.
1989
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1990
+ *
1991
+ * @template U The output type for the transformation
1992
+ * @param destination The destination stream for transformed messages
1993
+ * @param transformation Function to transform dead letter records
1994
+ * @param config Optional transformation configuration
1995
+ */
1996
+ addTransform(destination, transformation, config) {
1997
+ const withValidate = (deadLetter) => {
1998
+ attachTypeGuard(deadLetter, this.typeGuard);
1999
+ return transformation(deadLetter);
2000
+ };
2001
+ super.addTransform(destination, withValidate, config);
2002
+ }
2003
+ /**
2004
+ * Adds a consumer for dead letter records.
2005
+ * The consumer function receives a DeadLetter<T> with type recovery capabilities.
2006
+ *
2007
+ * @param consumer Function to process dead letter records
2008
+ * @param config Optional consumer configuration
2009
+ */
2010
+ addConsumer(consumer, config) {
2011
+ const withValidate = (deadLetter) => {
2012
+ attachTypeGuard(deadLetter, this.typeGuard);
2013
+ return consumer(deadLetter);
2014
+ };
2015
+ super.addConsumer(withValidate, config);
2016
+ }
2017
+ /**
2018
+ * Adds a multi-stream transformation for dead letter records.
2019
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2020
+ *
2021
+ * @param transformation Function to route dead letter records to multiple destinations
2022
+ */
2023
+ addMultiTransform(transformation) {
2024
+ const withValidate = (deadLetter) => {
2025
+ attachTypeGuard(deadLetter, this.typeGuard);
2026
+ return transformation(deadLetter);
2027
+ };
2028
+ super.addMultiTransform(withValidate);
2029
+ }
2030
+ };
2031
+
2032
+ // src/dmv2/sdk/workflow.ts
2033
+ var Task = class {
2034
+ /**
2035
+ * Creates a new Task instance.
2036
+ *
2037
+ * @param name - Unique identifier for the task
2038
+ * @param config - Configuration object defining the task behavior
2039
+ *
2040
+ * @example
2041
+ * ```typescript
2042
+ * // No input, no output
2043
+ * const task1 = new Task<null, void>("task1", {
2044
+ * run: async () => {
2045
+ * console.log("No input/output");
2046
+ * }
2047
+ * });
2048
+ *
2049
+ * // No input, but has output
2050
+ * const task2 = new Task<null, OutputType>("task2", {
2051
+ * run: async () => {
2052
+ * return someOutput;
2053
+ * }
2054
+ * });
2055
+ *
2056
+ * // Has input, no output
2057
+ * const task3 = new Task<InputType, void>("task3", {
2058
+ * run: async (input: InputType) => {
2059
+ * // process input but return nothing
2060
+ * }
2061
+ * });
2062
+ *
2063
+ * // Has both input and output
2064
+ * const task4 = new Task<InputType, OutputType>("task4", {
2065
+ * run: async (input: InputType) => {
2066
+ * return process(input);
2067
+ * }
2068
+ * });
2069
+ * ```
2070
+ */
2071
+ constructor(name, config) {
2072
+ this.name = name;
2073
+ this.config = config;
2074
+ }
2075
+ };
2076
+ var Workflow = class {
2077
+ /**
2078
+ * Creates a new Workflow instance and registers it with the Moose system.
2079
+ *
2080
+ * @param name - Unique identifier for the workflow
2081
+ * @param config - Configuration object defining the workflow behavior and task orchestration
2082
+ * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
2083
+ */
2084
+ constructor(name, config) {
2085
+ this.name = name;
2086
+ this.config = config;
2087
+ const workflows = getMooseInternal().workflows;
2088
+ if (workflows.has(name)) {
2089
+ throw new Error(`Workflow with name ${name} already exists`);
2090
+ }
2091
+ this.validateTaskGraph(config.startingTask, name);
2092
+ workflows.set(name, this);
2093
+ }
2094
+ /**
2095
+ * Validates the task graph to ensure there are no null tasks or infinite loops.
2096
+ *
2097
+ * @private
2098
+ * @param startingTask - The starting task to begin validation from
2099
+ * @param workflowName - The name of the workflow being validated (for error messages)
2100
+ * @throws {Error} When null/undefined tasks are found or infinite loops are detected
2101
+ */
2102
+ validateTaskGraph(startingTask, workflowName) {
2103
+ if (startingTask === null || startingTask === void 0) {
2104
+ throw new Error(
2105
+ `Workflow "${workflowName}" has a null or undefined starting task`
2106
+ );
2107
+ }
2108
+ const visited = /* @__PURE__ */ new Set();
2109
+ const recursionStack = /* @__PURE__ */ new Set();
2110
+ const validateTask = (task, currentPath) => {
2111
+ if (task === null || task === void 0) {
2112
+ const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
2113
+ throw new Error(
2114
+ `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
2115
+ );
2116
+ }
2117
+ const taskName = task.name;
2118
+ if (recursionStack.has(taskName)) {
2119
+ const cycleStartIndex = currentPath.indexOf(taskName);
2120
+ const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
2121
+ throw new Error(
2122
+ `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2123
+ );
2124
+ }
2125
+ if (visited.has(taskName)) {
2126
+ return;
2127
+ }
2128
+ visited.add(taskName);
2129
+ recursionStack.add(taskName);
2130
+ if (task.config.onComplete) {
2131
+ for (const nextTask of task.config.onComplete) {
2132
+ validateTask(nextTask, [...currentPath, taskName]);
2133
+ }
2134
+ }
2135
+ recursionStack.delete(taskName);
2136
+ };
2137
+ validateTask(startingTask, []);
2138
+ }
2139
+ };
2140
+
2141
+ // src/dmv2/sdk/ingestApi.ts
2142
+ var IngestApi = class extends TypedBase {
2143
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2144
+ super(name, config, schema, columns, void 0, allowExtraFields);
2145
+ const ingestApis = getMooseInternal().ingestApis;
2146
+ if (ingestApis.has(name)) {
2147
+ throw new Error(`Ingest API with name ${name} already exists`);
2148
+ }
2149
+ ingestApis.set(name, this);
2150
+ }
2151
+ };
2152
+
2153
+ // src/dmv2/sdk/consumptionApi.ts
2154
+ var Api = class extends TypedBase {
2155
+ /** @internal The handler function that processes requests and generates responses. */
2156
+ _handler;
2157
+ /** @internal The JSON schema definition for the response type R. */
2158
+ responseSchema;
2159
+ constructor(name, handler, config, schema, columns, responseSchema) {
2160
+ super(name, config ?? {}, schema, columns);
2161
+ this._handler = handler;
2162
+ this.responseSchema = responseSchema ?? {
2163
+ version: "3.1",
2164
+ schemas: [{ type: "array", items: { type: "object" } }],
2165
+ components: { schemas: {} }
2166
+ };
2167
+ const apis = getMooseInternal().apis;
2168
+ const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2169
+ if (apis.has(key)) {
2170
+ throw new Error(
2171
+ `Consumption API with name ${name} and version ${config?.version} already exists`
2172
+ );
2173
+ }
2174
+ apis.set(key, this);
2175
+ if (config?.path) {
2176
+ if (config.version) {
2177
+ const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2178
+ if (pathEndsWithVersion) {
2179
+ if (apis.has(config.path)) {
2180
+ const existing = apis.get(config.path);
2181
+ throw new Error(
2182
+ `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2183
+ );
2184
+ }
2185
+ apis.set(config.path, this);
2186
+ } else {
2187
+ const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2188
+ if (apis.has(versionedPath)) {
2189
+ const existing = apis.get(versionedPath);
2190
+ throw new Error(
2191
+ `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2192
+ );
2193
+ }
2194
+ apis.set(versionedPath, this);
2195
+ if (!apis.has(config.path)) {
2196
+ apis.set(config.path, this);
2197
+ }
2198
+ }
2199
+ } else {
2200
+ if (apis.has(config.path)) {
2201
+ const existing = apis.get(config.path);
2202
+ throw new Error(
2203
+ `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2204
+ );
2205
+ }
2206
+ apis.set(config.path, this);
2207
+ }
2208
+ }
2209
+ }
2210
+ /**
2211
+ * Retrieves the handler function associated with this Consumption API.
2212
+ * @returns The handler function.
2213
+ */
2214
+ getHandler = () => {
2215
+ return this._handler;
2216
+ };
2217
+ async call(baseUrl, queryParams) {
2218
+ let path2;
2219
+ if (this.config?.path) {
2220
+ if (this.config.version) {
2221
+ const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2222
+ if (pathEndsWithVersion) {
2223
+ path2 = this.config.path;
2224
+ } else {
2225
+ path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2226
+ }
2227
+ } else {
2228
+ path2 = this.config.path;
2229
+ }
2230
+ } else {
2231
+ path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2232
+ }
2233
+ const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2234
+ const searchParams = url.searchParams;
2235
+ for (const [key, value] of Object.entries(queryParams)) {
2236
+ if (Array.isArray(value)) {
2237
+ for (const item of value) {
2238
+ if (item !== null && item !== void 0) {
2239
+ searchParams.append(key, String(item));
2240
+ }
2241
+ }
2242
+ } else if (value !== null && value !== void 0) {
2243
+ searchParams.append(key, String(value));
2244
+ }
2245
+ }
2246
+ const response = await fetch(url, {
2247
+ method: "GET",
2248
+ headers: {
2249
+ Accept: "application/json"
2250
+ }
2251
+ });
2252
+ if (!response.ok) {
2253
+ throw new Error(`HTTP error! status: ${response.status}`);
2254
+ }
2255
+ const data = await response.json();
2256
+ return data;
2257
+ }
2258
+ };
2259
+ var ConsumptionApi = Api;
2260
+
2261
+ // src/dmv2/sdk/ingestPipeline.ts
2262
+ var IngestPipeline = class extends TypedBase {
2263
+ /**
2264
+ * The OLAP table component of the pipeline, if configured.
2265
+ * Provides analytical query capabilities for the ingested data.
2266
+ * Only present when `config.table` is not `false`.
2267
+ */
2268
+ table;
2269
+ /**
2270
+ * The stream component of the pipeline, if configured.
2271
+ * Handles real-time data flow and processing between components.
2272
+ * Only present when `config.stream` is not `false`.
2273
+ */
2274
+ stream;
2275
+ /**
2276
+ * The ingest API component of the pipeline, if configured.
2277
+ * Provides HTTP endpoints for data ingestion.
2278
+ * Only present when `config.ingestApi` is not `false`.
2279
+ */
2280
+ ingestApi;
2281
+ /** The dead letter queue of the pipeline, if configured. */
2282
+ deadLetterQueue;
2283
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2284
+ super(name, config, schema, columns, validators, allowExtraFields);
2285
+ if (config.ingest !== void 0) {
2286
+ console.warn(
2287
+ "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2288
+ );
2289
+ if (config.ingestApi === void 0) {
2290
+ config.ingestApi = config.ingest;
2291
+ }
2292
+ }
2293
+ if (config.table) {
2294
+ const tableConfig = typeof config.table === "object" ? {
2295
+ ...config.table,
2296
+ lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2297
+ ...config.version && { version: config.version }
2298
+ } : {
2299
+ lifeCycle: config.lifeCycle,
2300
+ engine: "MergeTree" /* MergeTree */,
2301
+ ...config.version && { version: config.version }
2302
+ };
2303
+ this.table = new OlapTable(
2304
+ name,
2305
+ tableConfig,
2306
+ this.schema,
2307
+ this.columnArray,
2308
+ this.validators
2309
+ );
2310
+ }
2311
+ if (config.deadLetterQueue) {
2312
+ const streamConfig = {
2313
+ destination: void 0,
2314
+ ...typeof config.deadLetterQueue === "object" ? {
2315
+ ...config.deadLetterQueue,
2316
+ lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2317
+ } : { lifeCycle: config.lifeCycle },
2318
+ ...config.version && { version: config.version }
2319
+ };
2320
+ this.deadLetterQueue = new DeadLetterQueue(
2321
+ `${name}DeadLetterQueue`,
2322
+ streamConfig,
2323
+ validators.assert
2324
+ );
2325
+ }
2326
+ if (config.stream) {
2327
+ const streamConfig = {
2328
+ destination: this.table,
2329
+ defaultDeadLetterQueue: this.deadLetterQueue,
2330
+ ...typeof config.stream === "object" ? {
2331
+ ...config.stream,
2332
+ lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2333
+ } : { lifeCycle: config.lifeCycle },
2334
+ ...config.version && { version: config.version }
2335
+ };
2336
+ this.stream = new Stream(
2337
+ name,
2338
+ streamConfig,
2339
+ this.schema,
2340
+ this.columnArray,
2341
+ void 0,
2342
+ this.allowExtraFields
2343
+ );
2344
+ this.stream.pipelineParent = this;
2345
+ }
2346
+ const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2347
+ if (effectiveIngestAPI) {
2348
+ if (!this.stream) {
2349
+ throw new Error("Ingest API needs a stream to write to.");
2350
+ }
2351
+ const ingestConfig = {
2352
+ destination: this.stream,
2353
+ deadLetterQueue: this.deadLetterQueue,
2354
+ ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2355
+ ...config.version && { version: config.version },
2356
+ ...config.path && { path: config.path }
2357
+ };
2358
+ this.ingestApi = new IngestApi(
2359
+ name,
2360
+ ingestConfig,
2361
+ this.schema,
2362
+ this.columnArray,
2363
+ void 0,
2364
+ this.allowExtraFields
2365
+ );
2366
+ this.ingestApi.pipelineParent = this;
2367
+ }
2368
+ }
2369
+ };
2370
+
2371
+ // src/dmv2/sdk/etlPipeline.ts
2372
+ var InternalBatcher = class {
2373
+ iterator;
2374
+ batchSize;
2375
+ constructor(asyncIterable, batchSize = 20) {
2376
+ this.iterator = asyncIterable[Symbol.asyncIterator]();
2377
+ this.batchSize = batchSize;
2378
+ }
2379
+ async getNextBatch() {
2380
+ const items = [];
2381
+ for (let i = 0; i < this.batchSize; i++) {
2382
+ const { value, done } = await this.iterator.next();
2383
+ if (done) {
2384
+ return { items, hasMore: false };
2385
+ }
2386
+ items.push(value);
2387
+ }
2388
+ return { items, hasMore: true };
2389
+ }
2390
+ };
2391
+ var ETLPipeline = class {
2392
+ constructor(name, config) {
2393
+ this.name = name;
2394
+ this.config = config;
2395
+ this.setupPipeline();
2396
+ }
2397
+ batcher;
2398
+ setupPipeline() {
2399
+ this.batcher = this.createBatcher();
2400
+ const tasks = this.createAllTasks();
2401
+ tasks.extract.config.onComplete = [tasks.transform];
2402
+ tasks.transform.config.onComplete = [tasks.load];
2403
+ new Workflow(this.name, {
2404
+ startingTask: tasks.extract,
2405
+ retries: 1,
2406
+ timeout: "30m"
2407
+ });
2408
+ }
2409
+ createBatcher() {
2410
+ const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2411
+ return new InternalBatcher(iterable);
2412
+ }
2413
+ getDefaultTaskConfig() {
2414
+ return {
2415
+ retries: 1,
2416
+ timeout: "30m"
2417
+ };
2418
+ }
2419
+ createAllTasks() {
2420
+ const taskConfig = this.getDefaultTaskConfig();
2421
+ return {
2422
+ extract: this.createExtractTask(taskConfig),
2423
+ transform: this.createTransformTask(taskConfig),
2424
+ load: this.createLoadTask(taskConfig)
2425
+ };
2426
+ }
2427
+ createExtractTask(taskConfig) {
2428
+ return new Task(`${this.name}_extract`, {
2429
+ run: async ({}) => {
2430
+ console.log(`Running extract task for ${this.name}...`);
2431
+ const batch = await this.batcher.getNextBatch();
2432
+ console.log(`Extract task completed with ${batch.items.length} items`);
2433
+ return batch;
2434
+ },
2435
+ retries: taskConfig.retries,
2436
+ timeout: taskConfig.timeout
2437
+ });
2438
+ }
2439
+ createTransformTask(taskConfig) {
2440
+ return new Task(
2441
+ `${this.name}_transform`,
2442
+ {
2443
+ // Use new single-parameter context API for handlers
2444
+ run: async ({ input }) => {
2445
+ const batch = input;
2446
+ console.log(
2447
+ `Running transform task for ${this.name} with ${batch.items.length} items...`
2448
+ );
2449
+ const transformedItems = [];
2450
+ for (const item of batch.items) {
2451
+ const transformed = await this.config.transform(item);
2452
+ transformedItems.push(transformed);
2453
+ }
2454
+ console.log(
2455
+ `Transform task completed with ${transformedItems.length} items`
2456
+ );
2457
+ return { items: transformedItems };
2458
+ },
2459
+ retries: taskConfig.retries,
2460
+ timeout: taskConfig.timeout
2461
+ }
2462
+ );
2463
+ }
2464
+ createLoadTask(taskConfig) {
2465
+ return new Task(`${this.name}_load`, {
2466
+ run: async ({ input: transformedItems }) => {
2467
+ console.log(
2468
+ `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2469
+ );
2470
+ if ("insert" in this.config.load) {
2471
+ await this.config.load.insert(transformedItems.items);
2472
+ } else {
2473
+ await this.config.load(transformedItems.items);
2474
+ }
2475
+ console.log(`Load task completed`);
2476
+ },
2477
+ retries: taskConfig.retries,
2478
+ timeout: taskConfig.timeout
2479
+ });
2480
+ }
2481
+ // Execute the entire ETL pipeline
2482
+ async run() {
2483
+ console.log(`Starting ETL Pipeline: ${this.name}`);
2484
+ let batchNumber = 1;
2485
+ do {
2486
+ console.log(`Processing batch ${batchNumber}...`);
2487
+ const batch = await this.batcher.getNextBatch();
2488
+ if (batch.items.length === 0) {
2489
+ break;
2490
+ }
2491
+ const transformedItems = [];
2492
+ for (const extractedData of batch.items) {
2493
+ const transformedData = await this.config.transform(extractedData);
2494
+ transformedItems.push(transformedData);
2495
+ }
2496
+ if ("insert" in this.config.load) {
2497
+ await this.config.load.insert(transformedItems);
2498
+ } else {
2499
+ await this.config.load(transformedItems);
2500
+ }
2501
+ console.log(
2502
+ `Completed batch ${batchNumber} with ${batch.items.length} items`
2503
+ );
2504
+ batchNumber++;
2505
+ if (!batch.hasMore) {
2506
+ break;
2507
+ }
2508
+ } while (true);
2509
+ console.log(`Completed ETL Pipeline: ${this.name}`);
2510
+ }
2511
+ };
2512
+
2513
+ // src/dmv2/sdk/materializedView.ts
2514
+ function formatTableReference(table) {
2515
+ const database = table instanceof OlapTable ? table.config.database : void 0;
2516
+ if (database) {
2517
+ return `\`${database}\`.\`${table.name}\``;
2518
+ }
2519
+ return `\`${table.name}\``;
2520
+ }
2521
+ var requireTargetTableName = (tableName) => {
2522
+ if (typeof tableName === "string") {
2523
+ return tableName;
2524
+ } else {
2525
+ throw new Error("Name of targetTable is not specified.");
2526
+ }
2527
+ };
2528
+ var MaterializedView = class {
2529
+ /** @internal */
2530
+ kind = "MaterializedView";
2531
+ /** The name of the materialized view */
2532
+ name;
2533
+ /** The target OlapTable instance where the materialized data is stored. */
2534
+ targetTable;
2535
+ /** The SELECT SQL statement */
2536
+ selectSql;
2537
+ /** Names of source tables that the SELECT reads from */
2538
+ sourceTables;
2539
+ /** Optional metadata for the materialized view */
2540
+ metadata;
2541
+ constructor(options, targetSchema, targetColumns) {
2542
+ let selectStatement = options.selectStatement;
2543
+ if (typeof selectStatement !== "string") {
2544
+ selectStatement = toStaticQuery(selectStatement);
2545
+ }
2546
+ if (targetSchema === void 0 || targetColumns === void 0) {
2547
+ throw new Error(
2548
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
2549
+ );
2550
+ }
2551
+ const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2552
+ requireTargetTableName(
2553
+ options.targetTable?.name ?? options.tableName
2554
+ ),
2555
+ {
2556
+ orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2557
+ engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2558
+ },
2559
+ targetSchema,
2560
+ targetColumns
2561
+ );
2562
+ if (targetTable.name === options.materializedViewName) {
2563
+ throw new Error(
2564
+ "Materialized view name cannot be the same as the target table name."
2565
+ );
2566
+ }
2567
+ this.name = options.materializedViewName;
2568
+ this.targetTable = targetTable;
2569
+ this.selectSql = selectStatement;
2570
+ this.sourceTables = options.selectTables.map(
2571
+ (t) => formatTableReference(t)
2572
+ );
2573
+ this.metadata = options.metadata ? { ...options.metadata } : {};
2574
+ if (!this.metadata.source) {
2575
+ const stack = new Error().stack;
2576
+ const sourceInfo = getSourceFileFromStack(stack);
2577
+ if (sourceInfo) {
2578
+ this.metadata.source = { file: sourceInfo };
2579
+ }
2580
+ }
2581
+ const materializedViews = getMooseInternal().materializedViews;
2582
+ if (!isClientOnlyMode() && materializedViews.has(this.name)) {
2583
+ throw new Error(`MaterializedView with name ${this.name} already exists`);
2584
+ }
2585
+ materializedViews.set(this.name, this);
2586
+ }
2587
+ };
2588
+
2589
+ // src/dmv2/sdk/sqlResource.ts
2590
+ var SqlResource = class {
2591
+ /** @internal */
2592
+ kind = "SqlResource";
2593
+ /** Array of SQL statements to execute for setting up the resource. */
2594
+ setup;
2595
+ /** Array of SQL statements to execute for tearing down the resource. */
2596
+ teardown;
2597
+ /** The name of the SQL resource (e.g., view name, materialized view name). */
2598
+ name;
2599
+ /** List of OlapTables or Views that this resource reads data from. */
2600
+ pullsDataFrom;
2601
+ /** List of OlapTables or Views that this resource writes data to. */
2602
+ pushesDataTo;
2603
+ /** @internal Source file path where this resource was defined */
2604
+ sourceFile;
2605
+ /** @internal Source line number where this resource was defined */
2606
+ sourceLine;
2607
+ /** @internal Source column number where this resource was defined */
2608
+ sourceColumn;
2609
+ /**
2610
+ * Creates a new SqlResource instance.
2611
+ * @param name The name of the resource.
2612
+ * @param setup An array of SQL DDL statements to create the resource.
2613
+ * @param teardown An array of SQL DDL statements to drop the resource.
2614
+ * @param options Optional configuration for specifying data dependencies.
2615
+ * @param options.pullsDataFrom Tables/Views this resource reads from.
2616
+ * @param options.pushesDataTo Tables/Views this resource writes to.
2617
+ */
2618
+ constructor(name, setup, teardown, options) {
2619
+ const sqlResources = getMooseInternal().sqlResources;
2620
+ if (!isClientOnlyMode() && sqlResources.has(name)) {
2621
+ throw new Error(`SqlResource with name ${name} already exists`);
2622
+ }
2623
+ sqlResources.set(name, this);
2624
+ this.name = name;
2625
+ this.setup = setup.map(
2626
+ (sql2) => typeof sql2 === "string" ? sql2 : toStaticQuery(sql2)
2627
+ );
2628
+ this.teardown = teardown.map(
2629
+ (sql2) => typeof sql2 === "string" ? sql2 : toStaticQuery(sql2)
2630
+ );
2631
+ this.pullsDataFrom = options?.pullsDataFrom ?? [];
2632
+ this.pushesDataTo = options?.pushesDataTo ?? [];
2633
+ const stack = new Error().stack;
2634
+ const location = getSourceLocationFromStack(stack);
2635
+ if (location) {
2636
+ this.sourceFile = location.file;
2637
+ this.sourceLine = location.line;
2638
+ this.sourceColumn = location.column;
2639
+ }
2640
+ }
2641
+ };
2642
+
2643
+ // src/dmv2/sdk/view.ts
2644
+ function formatTableReference2(table) {
2645
+ const database = table instanceof OlapTable ? table.config.database : void 0;
2646
+ if (database) {
2647
+ return `\`${database}\`.\`${table.name}\``;
2648
+ }
2649
+ return `\`${table.name}\``;
2650
+ }
2651
+ var View = class {
2652
+ /** @internal */
2653
+ kind = "View";
2654
+ /** The name of the view */
2655
+ name;
2656
+ /** The SELECT SQL statement that defines the view */
2657
+ selectSql;
2658
+ /** Names of source tables/views that the SELECT reads from */
2659
+ sourceTables;
2660
+ /** Optional metadata for the view */
2661
+ metadata;
2662
+ /**
2663
+ * Creates a new View instance.
2664
+ * @param name The name of the view to be created.
2665
+ * @param selectStatement The SQL SELECT statement that defines the view's logic.
2666
+ * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2667
+ * @param metadata Optional metadata for the view (e.g., description, source file).
2668
+ */
2669
+ constructor(name, selectStatement, baseTables, metadata) {
2670
+ if (typeof selectStatement !== "string") {
2671
+ selectStatement = toStaticQuery(selectStatement);
2672
+ }
2673
+ this.name = name;
2674
+ this.selectSql = selectStatement;
2675
+ this.sourceTables = baseTables.map((t) => formatTableReference2(t));
2676
+ this.metadata = metadata ? { ...metadata } : {};
2677
+ if (!this.metadata.source) {
2678
+ const stack = new Error().stack;
2679
+ const sourceInfo = getSourceFileFromStack(stack);
2680
+ if (sourceInfo) {
2681
+ this.metadata.source = { file: sourceInfo };
2682
+ }
2683
+ }
2684
+ const views = getMooseInternal().views;
2685
+ if (!isClientOnlyMode() && views.has(this.name)) {
2686
+ throw new Error(`View with name ${this.name} already exists`);
2687
+ }
2688
+ views.set(this.name, this);
2689
+ }
2690
+ };
2691
+
2692
+ // src/dmv2/sdk/lifeCycle.ts
2693
+ var LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2694
+ LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2695
+ LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2696
+ LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2697
+ return LifeCycle2;
2698
+ })(LifeCycle || {});
2699
+
2700
+ // src/dmv2/sdk/webApp.ts
2701
+ var RESERVED_MOUNT_PATHS = [
2702
+ "/admin",
2703
+ "/api",
2704
+ "/consumption",
2705
+ "/health",
2706
+ "/ingest",
2707
+ "/liveness",
2708
+ "/moose",
2709
+ // reserved for future use
2710
+ "/ready",
2711
+ "/workflows"
2712
+ ];
2713
+ var WebApp = class {
2714
+ name;
2715
+ handler;
2716
+ config;
2717
+ _rawApp;
2718
+ constructor(name, appOrHandler, config) {
2719
+ this.name = name;
2720
+ this.config = config;
2721
+ if (!this.config.mountPath) {
2722
+ throw new Error(
2723
+ `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2724
+ );
2725
+ }
2726
+ const mountPath = this.config.mountPath;
2727
+ if (mountPath === "/") {
2728
+ throw new Error(
2729
+ `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2730
+ );
2731
+ }
2732
+ if (mountPath.endsWith("/")) {
2733
+ throw new Error(
2734
+ `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2735
+ );
2736
+ }
2737
+ for (const reserved of RESERVED_MOUNT_PATHS) {
2738
+ if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2739
+ throw new Error(
2740
+ `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2741
+ );
2742
+ }
2743
+ }
2744
+ this.handler = this.toHandler(appOrHandler);
2745
+ this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2746
+ const webApps = getMooseInternal().webApps;
2747
+ if (webApps.has(name)) {
2748
+ throw new Error(`WebApp with name ${name} already exists`);
2749
+ }
2750
+ if (this.config.mountPath) {
2751
+ for (const [existingName, existingApp] of webApps) {
2752
+ if (existingApp.config.mountPath === this.config.mountPath) {
2753
+ throw new Error(
2754
+ `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2755
+ );
2756
+ }
2757
+ }
2758
+ }
2759
+ webApps.set(name, this);
2760
+ }
2761
+ toHandler(appOrHandler) {
2762
+ if (typeof appOrHandler === "function") {
2763
+ return appOrHandler;
2764
+ }
2765
+ const app = appOrHandler;
2766
+ if (typeof app.handle === "function") {
2767
+ return (req, res) => {
2768
+ app.handle(req, res, (err) => {
2769
+ if (err) {
2770
+ console.error("WebApp handler error:", err);
2771
+ if (!res.headersSent) {
2772
+ res.writeHead(500, { "Content-Type": "application/json" });
2773
+ res.end(JSON.stringify({ error: "Internal Server Error" }));
2774
+ }
2775
+ }
2776
+ });
2777
+ };
2778
+ }
2779
+ if (typeof app.callback === "function") {
2780
+ return app.callback();
2781
+ }
2782
+ if (typeof app.routing === "function") {
2783
+ const routing = app.routing;
2784
+ const appWithReady = app;
2785
+ let readyPromise = null;
2786
+ return async (req, res) => {
2787
+ if (readyPromise === null) {
2788
+ readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2789
+ }
2790
+ await readyPromise;
2791
+ routing(req, res);
2792
+ };
2793
+ }
2794
+ throw new Error(
2795
+ `Unable to convert app to handler. The provided object must be:
2796
+ - A function (raw Node.js handler)
2797
+ - An object with .handle() method (Express, Connect)
2798
+ - An object with .callback() method (Koa)
2799
+ - An object with .routing function (Fastify)
2800
+
2801
+ Examples:
2802
+ Express: new WebApp("name", expressApp)
2803
+ Koa: new WebApp("name", koaApp)
2804
+ Fastify: new WebApp("name", fastifyApp)
2805
+ Raw: new WebApp("name", (req, res) => { ... })
2806
+ `
2807
+ );
2808
+ }
2809
+ getRawApp() {
2810
+ return this._rawApp;
2811
+ }
2812
+ };
2813
+
2814
+ // src/dmv2/registry.ts
2815
+ function getTables() {
2816
+ return getMooseInternal().tables;
2817
+ }
2818
+ function getTable(name) {
2819
+ return getMooseInternal().tables.get(name);
2820
+ }
2821
+ function getStreams() {
2822
+ return getMooseInternal().streams;
2823
+ }
2824
+ function getStream(name) {
2825
+ return getMooseInternal().streams.get(name);
2826
+ }
2827
+ function getIngestApis() {
2828
+ return getMooseInternal().ingestApis;
2829
+ }
2830
+ function getIngestApi(name) {
2831
+ return getMooseInternal().ingestApis.get(name);
2832
+ }
2833
+ function getApis() {
2834
+ return getMooseInternal().apis;
2835
+ }
2836
+ function getApi(nameOrPath) {
2837
+ const registry = getMooseInternal();
2838
+ const directMatch = registry.apis.get(nameOrPath);
2839
+ if (directMatch) {
2840
+ return directMatch;
2841
+ }
2842
+ const versionedApis = /* @__PURE__ */ new Map();
2843
+ const pathMap = /* @__PURE__ */ new Map();
2844
+ registry.apis.forEach((api, key) => {
2845
+ const baseName = api.name;
2846
+ if (!versionedApis.has(baseName)) {
2847
+ versionedApis.set(baseName, []);
2848
+ }
2849
+ versionedApis.get(baseName).push(api);
2850
+ if (api.config.path) {
2851
+ pathMap.set(api.config.path, api);
2852
+ }
2853
+ });
2854
+ const candidates = versionedApis.get(nameOrPath);
2855
+ if (candidates && candidates.length === 1) {
2856
+ return candidates[0];
2857
+ }
2858
+ return pathMap.get(nameOrPath);
2859
+ }
2860
+ function getSqlResources() {
2861
+ return getMooseInternal().sqlResources;
2862
+ }
2863
+ function getSqlResource(name) {
2864
+ return getMooseInternal().sqlResources.get(name);
2865
+ }
2866
+ function getWorkflows() {
2867
+ return getMooseInternal().workflows;
2868
+ }
2869
+ function getWorkflow(name) {
2870
+ return getMooseInternal().workflows.get(name);
2871
+ }
2872
+ function getWebApps() {
2873
+ return getMooseInternal().webApps;
2874
+ }
2875
+ function getWebApp(name) {
2876
+ return getMooseInternal().webApps.get(name);
2877
+ }
2878
+ function getMaterializedViews() {
2879
+ return getMooseInternal().materializedViews;
2880
+ }
2881
+ function getMaterializedView(name) {
2882
+ return getMooseInternal().materializedViews.get(name);
2883
+ }
2884
+ function getViews() {
2885
+ return getMooseInternal().views;
2886
+ }
2887
+ function getView(name) {
2888
+ return getMooseInternal().views.get(name);
2889
+ }
2890
+
2891
+ // src/serverless.ts
2892
+ init_commons_types();
2893
+
2894
+ // src/connectors/dataSource.ts
2895
+ var DataSource = class {
2896
+ name;
2897
+ supportsIncremental;
2898
+ constructor(config) {
2899
+ this.name = config.name;
2900
+ this.supportsIncremental = config.supportsIncremental ?? false;
2901
+ }
2902
+ };
2903
+
2904
+ // src/secrets.ts
2905
+ var MOOSE_RUNTIME_ENV_PREFIX = "__MOOSE_RUNTIME_ENV__:";
2906
+ var mooseRuntimeEnv = {
2907
+ /**
2908
+ * Gets a value from an environment variable, with behavior depending on context.
2909
+ *
2910
+ * When IS_LOADING_INFRA_MAP=true (infrastructure loading):
2911
+ * Returns a marker string that Moose CLI will resolve later
2912
+ *
2913
+ * When IS_LOADING_INFRA_MAP is unset (function/workflow runtime):
2914
+ * Returns the actual value from the environment variable
2915
+ *
2916
+ * @param envVarName - Name of the environment variable to resolve
2917
+ * @returns Either a marker string or the actual environment variable value
2918
+ * @throws {Error} If the environment variable name is empty
2919
+ * @throws {Error} If the environment variable is not set (runtime mode only)
2920
+ *
2921
+ * @example
2922
+ * ```typescript
2923
+ * // Instead of this (evaluated at build time):
2924
+ * awsAccessKeyId: process.env.AWS_ACCESS_KEY_ID
2925
+ *
2926
+ * // Use this (evaluated at runtime):
2927
+ * awsAccessKeyId: mooseRuntimeEnv.get("AWS_ACCESS_KEY_ID")
2928
+ * ```
2929
+ */
2930
+ get(envVarName) {
2931
+ if (!envVarName || envVarName.trim() === "") {
2932
+ throw new Error("Environment variable name cannot be empty");
2933
+ }
2934
+ const isLoadingInfraMap = process.env.IS_LOADING_INFRA_MAP === "true";
2935
+ if (isLoadingInfraMap) {
2936
+ return `${MOOSE_RUNTIME_ENV_PREFIX}${envVarName}`;
2937
+ } else {
2938
+ const value = process.env[envVarName];
2939
+ if (value === void 0) {
2940
+ throw new Error(
2941
+ `Environment variable '${envVarName}' is not set. This is required for runtime execution of functions/workflows.`
2942
+ );
2943
+ }
2944
+ return value;
2945
+ }
2946
+ }
2947
+ };
2948
+ var mooseEnvSecrets = mooseRuntimeEnv;
2949
+
2950
+ // src/utilities/dataParser.ts
2951
+ var import_csv_parse = require("csv-parse");
2952
+
2953
+ // src/utilities/json.ts
2954
+ function jsonDateReviver(key, value) {
2955
+ const iso8601Format = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24\:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)$/;
2956
+ if (typeof value === "string" && iso8601Format.test(value)) {
2957
+ return new Date(value);
2958
+ }
2959
+ return value;
2960
+ }
2961
+
2962
+ // src/utilities/dataParser.ts
2963
+ function parseCSV(content, config) {
2964
+ return new Promise((resolve, reject) => {
2965
+ const results = [];
2966
+ (0, import_csv_parse.parse)(content, {
2967
+ delimiter: config.delimiter,
2968
+ columns: config.columns ?? true,
2969
+ skip_empty_lines: config.skipEmptyLines ?? true,
2970
+ trim: config.trim ?? true
2971
+ }).on("data", (row) => {
2972
+ results.push(row);
2973
+ }).on("end", () => {
2974
+ resolve(results);
2975
+ }).on("error", (error) => {
2976
+ reject(error);
2977
+ });
2978
+ });
2979
+ }
2980
+ function parseJSON(content, config = {}) {
2981
+ try {
2982
+ const parsed = JSON.parse(content, config.reviver);
2983
+ if (Array.isArray(parsed)) {
2984
+ return parsed;
2985
+ } else {
2986
+ return [parsed];
2987
+ }
2988
+ } catch (error) {
2989
+ throw new Error(
2990
+ `Failed to parse JSON: ${error instanceof Error ? error.message : "Unknown error"}`
2991
+ );
2992
+ }
2993
+ }
2994
+ function parseJSONWithDates(content) {
2995
+ return parseJSON(content, { reviver: jsonDateReviver });
2996
+ }
2997
+ function isValidCSVDelimiter(delimiter) {
2998
+ return delimiter.length === 1 && !/\s/.test(delimiter);
2999
+ }
3000
+ var CSV_DELIMITERS = {
3001
+ COMMA: ",",
3002
+ TAB: " ",
3003
+ SEMICOLON: ";",
3004
+ PIPE: "|"
3005
+ };
3006
+ var DEFAULT_CSV_CONFIG = {
3007
+ delimiter: CSV_DELIMITERS.COMMA,
3008
+ columns: true,
3009
+ skipEmptyLines: true,
3010
+ trim: true
3011
+ };
3012
+ var DEFAULT_JSON_CONFIG = {
3013
+ reviver: jsonDateReviver
3014
+ };
3015
+ // Annotate the CommonJS export names for ESM import in node:
3016
+ 0 && (module.exports = {
3017
+ ACKs,
3018
+ Api,
3019
+ CSV_DELIMITERS,
3020
+ ClickHouseEngines,
3021
+ ConsumptionApi,
3022
+ DEFAULT_CSV_CONFIG,
3023
+ DEFAULT_JSON_CONFIG,
3024
+ DataSource,
3025
+ DeadLetterQueue,
3026
+ ETLPipeline,
3027
+ IngestApi,
3028
+ IngestPipeline,
3029
+ LifeCycle,
3030
+ MAX_RETRIES,
3031
+ MAX_RETRIES_PRODUCER,
3032
+ MAX_RETRY_TIME_MS,
3033
+ MOOSE_RUNTIME_ENV_PREFIX,
3034
+ MaterializedView,
3035
+ OlapTable,
3036
+ RETRY_FACTOR_PRODUCER,
3037
+ RETRY_INITIAL_TIME_MS,
3038
+ Sql,
3039
+ SqlResource,
3040
+ Stream,
3041
+ Task,
3042
+ View,
3043
+ WebApp,
3044
+ Workflow,
3045
+ antiCachePath,
3046
+ cliLog,
3047
+ compilerLog,
3048
+ createClickhouseParameter,
3049
+ getApi,
3050
+ getApis,
3051
+ getFileName,
3052
+ getIngestApi,
3053
+ getIngestApis,
3054
+ getMaterializedView,
3055
+ getMaterializedViews,
3056
+ getSqlResource,
3057
+ getSqlResources,
3058
+ getStream,
3059
+ getStreams,
3060
+ getTable,
3061
+ getTables,
3062
+ getValueFromParameter,
3063
+ getView,
3064
+ getViews,
3065
+ getWebApp,
3066
+ getWebApps,
3067
+ getWorkflow,
3068
+ getWorkflows,
3069
+ isValidCSVDelimiter,
3070
+ logError,
3071
+ mapToClickHouseType,
3072
+ mapTstoJs,
3073
+ mooseEnvSecrets,
3074
+ mooseRuntimeEnv,
3075
+ parseCSV,
3076
+ parseJSON,
3077
+ parseJSONWithDates,
3078
+ quoteIdentifier,
3079
+ sql,
3080
+ toQuery,
3081
+ toQueryPreview,
3082
+ toStaticQuery
3083
+ });
3084
+ //# sourceMappingURL=serverless.js.map