@514labs/moose-lib 0.6.296 → 0.6.297-ci-28-g84f3192e

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,325 @@ var __export = (target, all) => {
8
8
  __defProp(target, name, { get: all[name], enumerable: true });
9
9
  };
10
10
 
11
+ // src/dmv2/utils/stackTrace.ts
12
+ function shouldSkipStackLine(line) {
13
+ return line.includes("node_modules") || // Skip npm installed packages (prod)
14
+ line.includes("node:internal") || // Skip Node.js internals (modern format)
15
+ line.includes("internal/modules") || // Skip Node.js internals (older format)
16
+ line.includes("ts-node") || // Skip TypeScript execution
17
+ line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
18
+ line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
19
+ line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
20
+ line.includes("\\ts-moose-lib\\dist\\");
21
+ }
22
+ function parseStackLine(line) {
23
+ const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
24
+ if (match && match[1]) {
25
+ return {
26
+ file: match[1],
27
+ line: match[2]
28
+ };
29
+ }
30
+ return void 0;
31
+ }
32
+ function getSourceFileInfo(stack) {
33
+ if (!stack) return {};
34
+ const lines = stack.split("\n");
35
+ for (const line of lines) {
36
+ if (shouldSkipStackLine(line)) continue;
37
+ const info = parseStackLine(line);
38
+ if (info) return info;
39
+ }
40
+ return {};
41
+ }
42
+ function getSourceLocationFromStack(stack) {
43
+ if (!stack) return void 0;
44
+ const lines = stack.split("\n");
45
+ for (const line of lines.slice(1)) {
46
+ if (shouldSkipStackLine(line)) {
47
+ continue;
48
+ }
49
+ const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
50
+ if (v8Match) {
51
+ return {
52
+ file: v8Match[1],
53
+ line: parseInt(v8Match[2], 10),
54
+ column: parseInt(v8Match[3], 10)
55
+ };
56
+ }
57
+ const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
58
+ if (smMatch) {
59
+ return {
60
+ file: smMatch[1],
61
+ line: parseInt(smMatch[2], 10),
62
+ column: parseInt(smMatch[3], 10)
63
+ };
64
+ }
65
+ }
66
+ return void 0;
67
+ }
68
+ function getSourceFileFromStack(stack) {
69
+ const location = getSourceLocationFromStack(stack);
70
+ return location?.file;
71
+ }
72
+ var init_stackTrace = __esm({
73
+ "src/dmv2/utils/stackTrace.ts"() {
74
+ "use strict";
75
+ }
76
+ });
77
+
78
+ // src/dmv2/typedBase.ts
79
+ var TypedBase;
80
+ var init_typedBase = __esm({
81
+ "src/dmv2/typedBase.ts"() {
82
+ "use strict";
83
+ init_stackTrace();
84
+ TypedBase = class {
85
+ /** The JSON schema representation of type T. Injected by the compiler plugin. */
86
+ schema;
87
+ /** The name assigned to this resource instance. */
88
+ name;
89
+ /** A dictionary mapping column names (keys of T) to their Column definitions. */
90
+ columns;
91
+ /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
92
+ columnArray;
93
+ /** The configuration object specific to this resource type. */
94
+ config;
95
+ /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
96
+ validators;
97
+ /** Optional metadata for the resource, always present as an object. */
98
+ metadata;
99
+ /**
100
+ * Whether this resource allows extra fields beyond the defined columns.
101
+ * When true, extra fields in payloads are passed through to streaming functions.
102
+ * Injected by the compiler plugin when the type has an index signature.
103
+ */
104
+ allowExtraFields;
105
+ /**
106
+ * @internal Constructor intended for internal use by subclasses and the compiler plugin.
107
+ * It expects the schema and columns to be provided, typically injected by the compiler.
108
+ *
109
+ * @param name The name for the resource instance.
110
+ * @param config The configuration object for the resource.
111
+ * @param schema The JSON schema for the resource's data type T (injected).
112
+ * @param columns The array of Column definitions for T (injected).
113
+ * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
114
+ */
115
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
116
+ if (schema === void 0 || columns === void 0) {
117
+ throw new Error(
118
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
119
+ );
120
+ }
121
+ this.schema = schema;
122
+ this.columnArray = columns;
123
+ const columnsObj = {};
124
+ columns.forEach((column) => {
125
+ columnsObj[column.name] = column;
126
+ });
127
+ this.columns = columnsObj;
128
+ this.name = name;
129
+ this.config = config;
130
+ this.validators = validators;
131
+ this.allowExtraFields = allowExtraFields ?? false;
132
+ this.metadata = config?.metadata ? { ...config.metadata } : {};
133
+ if (!this.metadata.source) {
134
+ const stack = new Error().stack;
135
+ if (stack) {
136
+ const info = getSourceFileInfo(stack);
137
+ this.metadata.source = { file: info.file, line: info.line };
138
+ }
139
+ }
140
+ }
141
+ };
142
+ }
143
+ });
144
+
145
+ // src/dataModels/dataModelTypes.ts
146
+ function isArrayNestedType(dt) {
147
+ return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
148
+ }
149
+ function isNestedType(dt) {
150
+ return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
151
+ }
152
+ var init_dataModelTypes = __esm({
153
+ "src/dataModels/dataModelTypes.ts"() {
154
+ "use strict";
155
+ }
156
+ });
157
+
158
+ // src/sqlHelpers.ts
159
+ function sql(strings, ...values) {
160
+ return new Sql(strings, values);
161
+ }
162
+ function createClickhouseParameter(parameterIndex, value) {
163
+ return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
164
+ }
165
+ function emptyIfUndefined(value) {
166
+ return value === void 0 ? "" : value;
167
+ }
168
+ var quoteIdentifier, isTable, isColumn, instanceofSql, Sql, toStaticQuery, toQuery, toQueryPreview, getValueFromParameter, mapToClickHouseType;
169
+ var init_sqlHelpers = __esm({
170
+ "src/sqlHelpers.ts"() {
171
+ "use strict";
172
+ quoteIdentifier = (name) => {
173
+ return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
174
+ };
175
+ isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
176
+ isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
177
+ instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
178
+ Sql = class {
179
+ values;
180
+ strings;
181
+ constructor(rawStrings, rawValues) {
182
+ if (rawStrings.length - 1 !== rawValues.length) {
183
+ if (rawStrings.length === 0) {
184
+ throw new TypeError("Expected at least 1 string");
185
+ }
186
+ throw new TypeError(
187
+ `Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
188
+ );
189
+ }
190
+ const valuesLength = rawValues.reduce(
191
+ (len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
192
+ 0
193
+ );
194
+ this.values = new Array(valuesLength);
195
+ this.strings = new Array(valuesLength + 1);
196
+ this.strings[0] = rawStrings[0];
197
+ let i = 0, pos = 0;
198
+ while (i < rawValues.length) {
199
+ const child = rawValues[i++];
200
+ const rawString = rawStrings[i];
201
+ if (instanceofSql(child)) {
202
+ this.strings[pos] += child.strings[0];
203
+ let childIndex = 0;
204
+ while (childIndex < child.values.length) {
205
+ this.values[pos++] = child.values[childIndex++];
206
+ this.strings[pos] = child.strings[childIndex];
207
+ }
208
+ this.strings[pos] += rawString;
209
+ } else if (isColumn(child)) {
210
+ const aggregationFunction = child.annotations.find(
211
+ ([k, _]) => k === "aggregationFunction"
212
+ );
213
+ if (aggregationFunction !== void 0) {
214
+ this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
215
+ } else {
216
+ this.strings[pos] += `\`${child.name}\``;
217
+ }
218
+ this.strings[pos] += rawString;
219
+ } else if (isTable(child)) {
220
+ if (child.config.database) {
221
+ this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
222
+ } else {
223
+ this.strings[pos] += `\`${child.name}\``;
224
+ }
225
+ this.strings[pos] += rawString;
226
+ } else {
227
+ this.values[pos++] = child;
228
+ this.strings[pos] = rawString;
229
+ }
230
+ }
231
+ }
232
+ };
233
+ toStaticQuery = (sql3) => {
234
+ const [query, params] = toQuery(sql3);
235
+ if (Object.keys(params).length !== 0) {
236
+ throw new Error(
237
+ "Dynamic SQL is not allowed in the select statement in view creation."
238
+ );
239
+ }
240
+ return query;
241
+ };
242
+ toQuery = (sql3) => {
243
+ const parameterizedStubs = sql3.values.map(
244
+ (v, i) => createClickhouseParameter(i, v)
245
+ );
246
+ const query = sql3.strings.map(
247
+ (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
248
+ ).join("");
249
+ const query_params = sql3.values.reduce(
250
+ (acc, v, i) => ({
251
+ ...acc,
252
+ [`p${i}`]: getValueFromParameter(v)
253
+ }),
254
+ {}
255
+ );
256
+ return [query, query_params];
257
+ };
258
+ toQueryPreview = (sql3) => {
259
+ try {
260
+ const formatValue = (v) => {
261
+ if (Array.isArray(v)) {
262
+ const [type, val] = v;
263
+ if (type === "Identifier") {
264
+ return `\`${String(val)}\``;
265
+ }
266
+ return `[${v.map((x) => formatValue(x)).join(", ")}]`;
267
+ }
268
+ if (v === null || v === void 0) return "NULL";
269
+ if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
270
+ if (typeof v === "number") return String(v);
271
+ if (typeof v === "boolean") return v ? "true" : "false";
272
+ if (v instanceof Date)
273
+ return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
274
+ try {
275
+ return JSON.stringify(v);
276
+ } catch {
277
+ return String(v);
278
+ }
279
+ };
280
+ let out = sql3.strings[0] ?? "";
281
+ for (let i = 0; i < sql3.values.length; i++) {
282
+ const val = getValueFromParameter(sql3.values[i]);
283
+ out += formatValue(val);
284
+ out += sql3.strings[i + 1] ?? "";
285
+ }
286
+ return out.replace(/\s+/g, " ").trim();
287
+ } catch (error) {
288
+ console.log(`toQueryPreview error: ${error}`);
289
+ return "/* query preview unavailable */";
290
+ }
291
+ };
292
+ getValueFromParameter = (value) => {
293
+ if (Array.isArray(value)) {
294
+ const [type, val] = value;
295
+ if (type === "Identifier") return val;
296
+ }
297
+ return value;
298
+ };
299
+ mapToClickHouseType = (value) => {
300
+ if (typeof value === "number") {
301
+ return Number.isInteger(value) ? "Int" : "Float";
302
+ }
303
+ if (typeof value === "boolean") return "Bool";
304
+ if (value instanceof Date) return "DateTime";
305
+ if (Array.isArray(value)) {
306
+ const [type, _] = value;
307
+ return type;
308
+ }
309
+ return "String";
310
+ };
311
+ }
312
+ });
313
+
314
+ // src/blocks/helpers.ts
315
+ function dropView(name) {
316
+ return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
317
+ }
318
+ function createMaterializedView(options) {
319
+ return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
320
+ TO ${quoteIdentifier(options.destinationTable)}
321
+ AS ${options.select}`.trim();
322
+ }
323
+ var init_helpers = __esm({
324
+ "src/blocks/helpers.ts"() {
325
+ "use strict";
326
+ init_sqlHelpers();
327
+ }
328
+ });
329
+
11
330
  // src/commons.ts
12
331
  var commons_exports = {};
13
332
  __export(commons_exports, {
@@ -175,66 +494,365 @@ var init_commons = __esm({
175
494
  }
176
495
  });
177
496
 
178
- // src/config/configFile.ts
179
- import path from "path";
180
- import * as toml from "toml";
181
- async function findConfigFile(startDir = process.cwd()) {
182
- const fs = await import("fs");
183
- let currentDir = path.resolve(startDir);
184
- while (true) {
185
- const configPath = path.join(currentDir, "moose.config.toml");
186
- if (fs.existsSync(configPath)) {
187
- return configPath;
188
- }
189
- const parentDir = path.dirname(currentDir);
190
- if (parentDir === currentDir) {
191
- break;
192
- }
193
- currentDir = parentDir;
497
+ // src/secrets.ts
498
+ var init_secrets = __esm({
499
+ "src/secrets.ts"() {
500
+ "use strict";
194
501
  }
195
- return null;
196
- }
197
- async function readProjectConfig() {
198
- const fs = await import("fs");
199
- const configPath = await findConfigFile();
200
- if (!configPath) {
201
- throw new ConfigError(
202
- "moose.config.toml not found in current directory or any parent directory"
203
- );
502
+ });
503
+
504
+ // src/consumption-apis/helpers.ts
505
+ import {
506
+ Client as TemporalClient,
507
+ Connection
508
+ } from "@temporalio/client";
509
+ import { createHash, randomUUID } from "crypto";
510
+ var init_helpers2 = __esm({
511
+ "src/consumption-apis/helpers.ts"() {
512
+ "use strict";
513
+ init_internal();
514
+ init_sqlHelpers();
204
515
  }
205
- try {
206
- const configContent = fs.readFileSync(configPath, "utf-8");
207
- const config = toml.parse(configContent);
208
- return config;
209
- } catch (error) {
210
- throw new ConfigError(`Failed to parse moose.config.toml: ${error}`);
516
+ });
517
+
518
+ // src/consumption-apis/webAppHelpers.ts
519
+ var init_webAppHelpers = __esm({
520
+ "src/consumption-apis/webAppHelpers.ts"() {
521
+ "use strict";
211
522
  }
212
- }
213
- var ConfigError;
214
- var init_configFile = __esm({
215
- "src/config/configFile.ts"() {
523
+ });
524
+
525
+ // src/scripts/task.ts
526
+ var init_task = __esm({
527
+ "src/scripts/task.ts"() {
216
528
  "use strict";
217
- ConfigError = class extends Error {
218
- constructor(message) {
219
- super(message);
220
- this.name = "ConfigError";
221
- }
529
+ }
530
+ });
531
+
532
+ // src/cluster-utils.ts
533
+ import cluster from "cluster";
534
+ import { availableParallelism } from "os";
535
+ import { exit } from "process";
536
+ var init_cluster_utils = __esm({
537
+ "src/cluster-utils.ts"() {
538
+ "use strict";
539
+ }
540
+ });
541
+
542
+ // src/consumption-apis/runner.ts
543
+ import * as jose from "jose";
544
+ var init_runner = __esm({
545
+ "src/consumption-apis/runner.ts"() {
546
+ "use strict";
547
+ init_commons();
548
+ init_helpers2();
549
+ init_cluster_utils();
550
+ init_sqlHelpers();
551
+ init_internal();
552
+ }
553
+ });
554
+
555
+ // src/clients/redisClient.ts
556
+ import { createClient as createClient2 } from "redis";
557
+ var init_redisClient = __esm({
558
+ "src/clients/redisClient.ts"() {
559
+ "use strict";
560
+ }
561
+ });
562
+
563
+ // src/consumption-apis/standalone.ts
564
+ var init_standalone = __esm({
565
+ "src/consumption-apis/standalone.ts"() {
566
+ "use strict";
567
+ init_helpers2();
568
+ init_commons();
569
+ init_sqlHelpers();
570
+ }
571
+ });
572
+
573
+ // src/utilities/json.ts
574
+ var init_json = __esm({
575
+ "src/utilities/json.ts"() {
576
+ "use strict";
577
+ }
578
+ });
579
+
580
+ // src/utilities/dataParser.ts
581
+ import { parse } from "csv-parse";
582
+ var CSV_DELIMITERS, DEFAULT_CSV_CONFIG;
583
+ var init_dataParser = __esm({
584
+ "src/utilities/dataParser.ts"() {
585
+ "use strict";
586
+ init_json();
587
+ CSV_DELIMITERS = {
588
+ COMMA: ",",
589
+ TAB: " ",
590
+ SEMICOLON: ";",
591
+ PIPE: "|"
592
+ };
593
+ DEFAULT_CSV_CONFIG = {
594
+ delimiter: CSV_DELIMITERS.COMMA,
595
+ columns: true,
596
+ skipEmptyLines: true,
597
+ trim: true
222
598
  };
223
599
  }
224
600
  });
225
601
 
226
- // src/config/runtime.ts
227
- var runtime_exports = {};
228
- var ConfigurationRegistry;
229
- var init_runtime = __esm({
230
- "src/config/runtime.ts"() {
602
+ // src/utilities/index.ts
603
+ var init_utilities = __esm({
604
+ "src/utilities/index.ts"() {
231
605
  "use strict";
232
- init_configFile();
233
- ConfigurationRegistry = class _ConfigurationRegistry {
234
- static instance;
235
- clickhouseConfig;
236
- kafkaConfig;
237
- static getInstance() {
606
+ init_dataParser();
607
+ }
608
+ });
609
+
610
+ // src/connectors/dataSource.ts
611
+ var init_dataSource = __esm({
612
+ "src/connectors/dataSource.ts"() {
613
+ "use strict";
614
+ }
615
+ });
616
+
617
+ // src/dataModels/types.ts
618
+ var init_types = __esm({
619
+ "src/dataModels/types.ts"() {
620
+ "use strict";
621
+ }
622
+ });
623
+
624
+ // src/index.ts
625
+ var init_index = __esm({
626
+ "src/index.ts"() {
627
+ "use strict";
628
+ init_browserCompatible();
629
+ init_helpers();
630
+ init_commons();
631
+ init_secrets();
632
+ init_helpers2();
633
+ init_webAppHelpers();
634
+ init_task();
635
+ init_runner();
636
+ init_redisClient();
637
+ init_helpers2();
638
+ init_standalone();
639
+ init_sqlHelpers();
640
+ init_utilities();
641
+ init_dataSource();
642
+ init_types();
643
+ }
644
+ });
645
+
646
+ // src/dmv2/internal.ts
647
+ import process2 from "process";
648
+ var isClientOnlyMode, moose_internal, defaultRetentionPeriod, getMooseInternal, dlqSchema, dlqColumns;
649
+ var init_internal = __esm({
650
+ "src/dmv2/internal.ts"() {
651
+ "use strict";
652
+ init_index();
653
+ init_commons();
654
+ isClientOnlyMode = () => process2.env.MOOSE_CLIENT_ONLY === "true";
655
+ moose_internal = {
656
+ tables: /* @__PURE__ */ new Map(),
657
+ streams: /* @__PURE__ */ new Map(),
658
+ ingestApis: /* @__PURE__ */ new Map(),
659
+ apis: /* @__PURE__ */ new Map(),
660
+ sqlResources: /* @__PURE__ */ new Map(),
661
+ workflows: /* @__PURE__ */ new Map(),
662
+ webApps: /* @__PURE__ */ new Map()
663
+ };
664
+ defaultRetentionPeriod = 60 * 60 * 24 * 7;
665
+ getMooseInternal = () => globalThis.moose_internal;
666
+ if (getMooseInternal() === void 0) {
667
+ globalThis.moose_internal = moose_internal;
668
+ }
669
+ dlqSchema = {
670
+ version: "3.1",
671
+ components: {
672
+ schemas: {
673
+ DeadLetterModel: {
674
+ type: "object",
675
+ properties: {
676
+ originalRecord: {
677
+ $ref: "#/components/schemas/Recordstringany"
678
+ },
679
+ errorMessage: {
680
+ type: "string"
681
+ },
682
+ errorType: {
683
+ type: "string"
684
+ },
685
+ failedAt: {
686
+ type: "string",
687
+ format: "date-time"
688
+ },
689
+ source: {
690
+ oneOf: [
691
+ {
692
+ const: "api"
693
+ },
694
+ {
695
+ const: "transform"
696
+ },
697
+ {
698
+ const: "table"
699
+ }
700
+ ]
701
+ }
702
+ },
703
+ required: [
704
+ "originalRecord",
705
+ "errorMessage",
706
+ "errorType",
707
+ "failedAt",
708
+ "source"
709
+ ]
710
+ },
711
+ Recordstringany: {
712
+ type: "object",
713
+ properties: {},
714
+ required: [],
715
+ description: "Construct a type with a set of properties K of type T",
716
+ additionalProperties: {}
717
+ }
718
+ }
719
+ },
720
+ schemas: [
721
+ {
722
+ $ref: "#/components/schemas/DeadLetterModel"
723
+ }
724
+ ]
725
+ };
726
+ dlqColumns = [
727
+ {
728
+ name: "originalRecord",
729
+ data_type: "Json",
730
+ primary_key: false,
731
+ required: true,
732
+ unique: false,
733
+ default: null,
734
+ annotations: [],
735
+ ttl: null,
736
+ codec: null,
737
+ materialized: null,
738
+ comment: null
739
+ },
740
+ {
741
+ name: "errorMessage",
742
+ data_type: "String",
743
+ primary_key: false,
744
+ required: true,
745
+ unique: false,
746
+ default: null,
747
+ annotations: [],
748
+ ttl: null,
749
+ codec: null,
750
+ materialized: null,
751
+ comment: null
752
+ },
753
+ {
754
+ name: "errorType",
755
+ data_type: "String",
756
+ primary_key: false,
757
+ required: true,
758
+ unique: false,
759
+ default: null,
760
+ annotations: [],
761
+ ttl: null,
762
+ codec: null,
763
+ materialized: null,
764
+ comment: null
765
+ },
766
+ {
767
+ name: "failedAt",
768
+ data_type: "DateTime",
769
+ primary_key: false,
770
+ required: true,
771
+ unique: false,
772
+ default: null,
773
+ annotations: [],
774
+ ttl: null,
775
+ codec: null,
776
+ materialized: null,
777
+ comment: null
778
+ },
779
+ {
780
+ name: "source",
781
+ data_type: "String",
782
+ primary_key: false,
783
+ required: true,
784
+ unique: false,
785
+ default: null,
786
+ annotations: [],
787
+ ttl: null,
788
+ codec: null,
789
+ materialized: null,
790
+ comment: null
791
+ }
792
+ ];
793
+ }
794
+ });
795
+
796
+ // src/config/configFile.ts
797
+ import path from "path";
798
+ import * as toml from "toml";
799
+ async function findConfigFile(startDir = process.cwd()) {
800
+ const fs = await import("fs");
801
+ let currentDir = path.resolve(startDir);
802
+ while (true) {
803
+ const configPath = path.join(currentDir, "moose.config.toml");
804
+ if (fs.existsSync(configPath)) {
805
+ return configPath;
806
+ }
807
+ const parentDir = path.dirname(currentDir);
808
+ if (parentDir === currentDir) {
809
+ break;
810
+ }
811
+ currentDir = parentDir;
812
+ }
813
+ return null;
814
+ }
815
+ async function readProjectConfig() {
816
+ const fs = await import("fs");
817
+ const configPath = await findConfigFile();
818
+ if (!configPath) {
819
+ throw new ConfigError(
820
+ "moose.config.toml not found in current directory or any parent directory"
821
+ );
822
+ }
823
+ try {
824
+ const configContent = fs.readFileSync(configPath, "utf-8");
825
+ const config = toml.parse(configContent);
826
+ return config;
827
+ } catch (error) {
828
+ throw new ConfigError(`Failed to parse moose.config.toml: ${error}`);
829
+ }
830
+ }
831
+ var ConfigError;
832
+ var init_configFile = __esm({
833
+ "src/config/configFile.ts"() {
834
+ "use strict";
835
+ ConfigError = class extends Error {
836
+ constructor(message) {
837
+ super(message);
838
+ this.name = "ConfigError";
839
+ }
840
+ };
841
+ }
842
+ });
843
+
844
+ // src/config/runtime.ts
845
+ var runtime_exports = {};
846
+ var ConfigurationRegistry;
847
+ var init_runtime = __esm({
848
+ "src/config/runtime.ts"() {
849
+ "use strict";
850
+ init_configFile();
851
+ ConfigurationRegistry = class _ConfigurationRegistry {
852
+ static instance;
853
+ clickhouseConfig;
854
+ kafkaConfig;
855
+ static getInstance() {
238
856
  if (!_ConfigurationRegistry.instance) {
239
857
  _ConfigurationRegistry.instance = new _ConfigurationRegistry();
240
858
  }
@@ -359,2224 +977,1849 @@ var init_runtime = __esm({
359
977
  }
360
978
  });
361
979
 
362
- // src/dmv2/utils/stackTrace.ts
363
- function shouldSkipStackLine(line) {
364
- return line.includes("node_modules") || // Skip npm installed packages (prod)
365
- line.includes("node:internal") || // Skip Node.js internals (modern format)
366
- line.includes("internal/modules") || // Skip Node.js internals (older format)
367
- line.includes("ts-node") || // Skip TypeScript execution
368
- line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
369
- line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
370
- line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
371
- line.includes("\\ts-moose-lib\\dist\\");
372
- }
373
- function parseStackLine(line) {
374
- const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
375
- if (match && match[1]) {
376
- return {
377
- file: match[1],
378
- line: match[2]
379
- };
380
- }
381
- return void 0;
382
- }
383
- function getSourceFileInfo(stack) {
384
- if (!stack) return {};
385
- const lines = stack.split("\n");
386
- for (const line of lines) {
387
- if (shouldSkipStackLine(line)) continue;
388
- const info = parseStackLine(line);
389
- if (info) return info;
390
- }
391
- return {};
392
- }
393
- function getSourceLocationFromStack(stack) {
394
- if (!stack) return void 0;
395
- const lines = stack.split("\n");
396
- for (const line of lines.slice(1)) {
397
- if (shouldSkipStackLine(line)) {
398
- continue;
399
- }
400
- const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
401
- if (v8Match) {
402
- return {
403
- file: v8Match[1],
404
- line: parseInt(v8Match[2], 10),
405
- column: parseInt(v8Match[3], 10)
406
- };
407
- }
408
- const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
409
- if (smMatch) {
410
- return {
411
- file: smMatch[1],
412
- line: parseInt(smMatch[2], 10),
413
- column: parseInt(smMatch[3], 10)
414
- };
415
- }
416
- }
417
- return void 0;
418
- }
419
- function getSourceFileFromStack(stack) {
420
- const location = getSourceLocationFromStack(stack);
421
- return location?.file;
422
- }
423
-
424
- // src/dmv2/typedBase.ts
425
- var TypedBase = class {
426
- /** The JSON schema representation of type T. Injected by the compiler plugin. */
427
- schema;
428
- /** The name assigned to this resource instance. */
429
- name;
430
- /** A dictionary mapping column names (keys of T) to their Column definitions. */
431
- columns;
432
- /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
433
- columnArray;
434
- /** The configuration object specific to this resource type. */
435
- config;
436
- /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
437
- validators;
438
- /** Optional metadata for the resource, always present as an object. */
439
- metadata;
440
- /**
441
- * Whether this resource allows extra fields beyond the defined columns.
442
- * When true, extra fields in payloads are passed through to streaming functions.
443
- * Injected by the compiler plugin when the type has an index signature.
444
- */
445
- allowExtraFields;
446
- /**
447
- * @internal Constructor intended for internal use by subclasses and the compiler plugin.
448
- * It expects the schema and columns to be provided, typically injected by the compiler.
449
- *
450
- * @param name The name for the resource instance.
451
- * @param config The configuration object for the resource.
452
- * @param schema The JSON schema for the resource's data type T (injected).
453
- * @param columns The array of Column definitions for T (injected).
454
- * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
455
- */
456
- constructor(name, config, schema, columns, validators, allowExtraFields) {
457
- if (schema === void 0 || columns === void 0) {
458
- throw new Error(
459
- "Supply the type param T so that the schema is inserted by the compiler plugin."
460
- );
461
- }
462
- this.schema = schema;
463
- this.columnArray = columns;
464
- const columnsObj = {};
465
- columns.forEach((column) => {
466
- columnsObj[column.name] = column;
467
- });
468
- this.columns = columnsObj;
469
- this.name = name;
470
- this.config = config;
471
- this.validators = validators;
472
- this.allowExtraFields = allowExtraFields ?? false;
473
- this.metadata = config?.metadata ? { ...config.metadata } : {};
474
- if (!this.metadata.source) {
475
- const stack = new Error().stack;
476
- if (stack) {
477
- const info = getSourceFileInfo(stack);
478
- this.metadata.source = { file: info.file, line: info.line };
980
+ // src/dmv2/sdk/olapTable.ts
981
+ import { Readable } from "stream";
982
+ import { createHash as createHash2 } from "crypto";
983
+ var OlapTable;
984
+ var init_olapTable = __esm({
985
+ "src/dmv2/sdk/olapTable.ts"() {
986
+ "use strict";
987
+ init_typedBase();
988
+ init_dataModelTypes();
989
+ init_helpers();
990
+ init_internal();
991
+ init_sqlHelpers();
992
+ OlapTable = class extends TypedBase {
993
+ name;
994
+ /** @internal */
995
+ kind = "OlapTable";
996
+ /** @internal Memoized ClickHouse client for reusing connections across insert calls */
997
+ _memoizedClient;
998
+ /** @internal Hash of the configuration used to create the memoized client */
999
+ _configHash;
1000
+ /** @internal Cached table name to avoid repeated generation */
1001
+ _cachedTableName;
1002
+ constructor(name, config, schema, columns, validators) {
1003
+ const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
1004
+ const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
1005
+ const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
1006
+ if (hasFields && hasExpr) {
1007
+ throw new Error(
1008
+ `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
1009
+ );
1010
+ }
1011
+ const hasCluster = typeof resolvedConfig.cluster === "string";
1012
+ const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
1013
+ const hasReplicaName = typeof resolvedConfig.replicaName === "string";
1014
+ if (hasCluster && (hasKeeperPath || hasReplicaName)) {
1015
+ throw new Error(
1016
+ `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
1017
+ );
1018
+ }
1019
+ super(name, resolvedConfig, schema, columns, validators);
1020
+ this.name = name;
1021
+ const tables = getMooseInternal().tables;
1022
+ const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
1023
+ if (!isClientOnlyMode() && tables.has(registryKey)) {
1024
+ throw new Error(
1025
+ `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
1026
+ );
1027
+ }
1028
+ tables.set(registryKey, this);
479
1029
  }
480
- }
481
- }
482
- };
483
-
484
- // src/dataModels/dataModelTypes.ts
485
- function isArrayNestedType(dt) {
486
- return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
487
- }
488
- function isNestedType(dt) {
489
- return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
490
- }
491
-
492
- // src/sqlHelpers.ts
493
- var quoteIdentifier = (name) => {
494
- return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
495
- };
496
- var isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
497
- var isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
498
- function sql(strings, ...values) {
499
- return new Sql(strings, values);
500
- }
501
- var instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
502
- var Sql = class {
503
- values;
504
- strings;
505
- constructor(rawStrings, rawValues) {
506
- if (rawStrings.length - 1 !== rawValues.length) {
507
- if (rawStrings.length === 0) {
508
- throw new TypeError("Expected at least 1 string");
509
- }
510
- throw new TypeError(
511
- `Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
512
- );
513
- }
514
- const valuesLength = rawValues.reduce(
515
- (len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
516
- 0
517
- );
518
- this.values = new Array(valuesLength);
519
- this.strings = new Array(valuesLength + 1);
520
- this.strings[0] = rawStrings[0];
521
- let i = 0, pos = 0;
522
- while (i < rawValues.length) {
523
- const child = rawValues[i++];
524
- const rawString = rawStrings[i];
525
- if (instanceofSql(child)) {
526
- this.strings[pos] += child.strings[0];
527
- let childIndex = 0;
528
- while (childIndex < child.values.length) {
529
- this.values[pos++] = child.values[childIndex++];
530
- this.strings[pos] = child.strings[childIndex];
531
- }
532
- this.strings[pos] += rawString;
533
- } else if (isColumn(child)) {
534
- const aggregationFunction = child.annotations.find(
535
- ([k, _]) => k === "aggregationFunction"
536
- );
537
- if (aggregationFunction !== void 0) {
538
- this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
539
- } else {
540
- this.strings[pos] += `\`${child.name}\``;
1030
+ /**
1031
+ * Generates the versioned table name following Moose's naming convention
1032
+ * Format: {tableName}_{version_with_dots_replaced_by_underscores}
1033
+ */
1034
+ generateTableName() {
1035
+ if (this._cachedTableName) {
1036
+ return this._cachedTableName;
541
1037
  }
542
- this.strings[pos] += rawString;
543
- } else if (isTable(child)) {
544
- if (child.config.database) {
545
- this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
1038
+ const tableVersion = this.config.version;
1039
+ if (!tableVersion) {
1040
+ this._cachedTableName = this.name;
546
1041
  } else {
547
- this.strings[pos] += `\`${child.name}\``;
1042
+ const versionSuffix = tableVersion.replace(/\./g, "_");
1043
+ this._cachedTableName = `${this.name}_${versionSuffix}`;
548
1044
  }
549
- this.strings[pos] += rawString;
550
- } else {
551
- this.values[pos++] = child;
552
- this.strings[pos] = rawString;
1045
+ return this._cachedTableName;
553
1046
  }
554
- }
555
- }
556
- };
557
- var toStaticQuery = (sql3) => {
558
- const [query, params] = toQuery(sql3);
559
- if (Object.keys(params).length !== 0) {
560
- throw new Error(
561
- "Dynamic SQL is not allowed in the select statement in view creation."
562
- );
563
- }
564
- return query;
565
- };
566
- var toQuery = (sql3) => {
567
- const parameterizedStubs = sql3.values.map(
568
- (v, i) => createClickhouseParameter(i, v)
569
- );
570
- const query = sql3.strings.map(
571
- (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
572
- ).join("");
573
- const query_params = sql3.values.reduce(
574
- (acc, v, i) => ({
575
- ...acc,
576
- [`p${i}`]: getValueFromParameter(v)
577
- }),
578
- {}
579
- );
580
- return [query, query_params];
581
- };
582
- var toQueryPreview = (sql3) => {
583
- try {
584
- const formatValue = (v) => {
585
- if (Array.isArray(v)) {
586
- const [type, val] = v;
587
- if (type === "Identifier") {
588
- return `\`${String(val)}\``;
589
- }
590
- return `[${v.map((x) => formatValue(x)).join(", ")}]`;
591
- }
592
- if (v === null || v === void 0) return "NULL";
593
- if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
594
- if (typeof v === "number") return String(v);
595
- if (typeof v === "boolean") return v ? "true" : "false";
596
- if (v instanceof Date)
597
- return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
598
- try {
599
- return JSON.stringify(v);
600
- } catch {
601
- return String(v);
1047
+ /**
1048
+ * Creates a fast hash of the ClickHouse configuration.
1049
+ * Uses crypto.createHash for better performance than JSON.stringify.
1050
+ *
1051
+ * @private
1052
+ */
1053
+ createConfigHash(clickhouseConfig) {
1054
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1055
+ const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
1056
+ return createHash2("sha256").update(configString).digest("hex").substring(0, 16);
602
1057
  }
603
- };
604
- let out = sql3.strings[0] ?? "";
605
- for (let i = 0; i < sql3.values.length; i++) {
606
- const val = getValueFromParameter(sql3.values[i]);
607
- out += formatValue(val);
608
- out += sql3.strings[i + 1] ?? "";
609
- }
610
- return out.replace(/\s+/g, " ").trim();
611
- } catch (error) {
612
- console.log(`toQueryPreview error: ${error}`);
613
- return "/* query preview unavailable */";
614
- }
615
- };
616
- var getValueFromParameter = (value) => {
617
- if (Array.isArray(value)) {
618
- const [type, val] = value;
619
- if (type === "Identifier") return val;
620
- }
621
- return value;
622
- };
623
- function createClickhouseParameter(parameterIndex, value) {
624
- return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
625
- }
626
- var mapToClickHouseType = (value) => {
627
- if (typeof value === "number") {
628
- return Number.isInteger(value) ? "Int" : "Float";
629
- }
630
- if (typeof value === "boolean") return "Bool";
631
- if (value instanceof Date) return "DateTime";
632
- if (Array.isArray(value)) {
633
- const [type, _] = value;
634
- return type;
635
- }
636
- return "String";
637
- };
638
- function emptyIfUndefined(value) {
639
- return value === void 0 ? "" : value;
640
- }
641
-
642
- // src/blocks/helpers.ts
643
- function dropView(name) {
644
- return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
645
- }
646
- function createMaterializedView(options) {
647
- return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
648
- TO ${quoteIdentifier(options.destinationTable)}
649
- AS ${options.select}`.trim();
650
- }
651
-
652
- // src/dmv2/internal.ts
653
- import process2 from "process";
654
-
655
- // src/index.ts
656
- init_commons();
657
-
658
- // src/consumption-apis/helpers.ts
659
- import {
660
- Client as TemporalClient,
661
- Connection
662
- } from "@temporalio/client";
663
- import { createHash, randomUUID } from "crypto";
664
-
665
- // src/consumption-apis/runner.ts
666
- init_commons();
667
- import * as jose from "jose";
668
-
669
- // src/cluster-utils.ts
670
- import cluster from "cluster";
671
- import { availableParallelism } from "os";
672
- import { exit } from "process";
673
-
674
- // src/clients/redisClient.ts
675
- import { createClient as createClient2 } from "redis";
676
-
677
- // src/consumption-apis/standalone.ts
678
- init_commons();
679
-
680
- // src/utilities/dataParser.ts
681
- import { parse } from "csv-parse";
682
- var CSV_DELIMITERS = {
683
- COMMA: ",",
684
- TAB: " ",
685
- SEMICOLON: ";",
686
- PIPE: "|"
687
- };
688
- var DEFAULT_CSV_CONFIG = {
689
- delimiter: CSV_DELIMITERS.COMMA,
690
- columns: true,
691
- skipEmptyLines: true,
692
- trim: true
693
- };
694
-
695
- // src/dmv2/internal.ts
696
- init_commons();
697
- var isClientOnlyMode = () => process2.env.MOOSE_CLIENT_ONLY === "true";
698
- var moose_internal = {
699
- tables: /* @__PURE__ */ new Map(),
700
- streams: /* @__PURE__ */ new Map(),
701
- ingestApis: /* @__PURE__ */ new Map(),
702
- apis: /* @__PURE__ */ new Map(),
703
- sqlResources: /* @__PURE__ */ new Map(),
704
- workflows: /* @__PURE__ */ new Map(),
705
- webApps: /* @__PURE__ */ new Map()
706
- };
707
- var defaultRetentionPeriod = 60 * 60 * 24 * 7;
708
- var getMooseInternal = () => globalThis.moose_internal;
709
- if (getMooseInternal() === void 0) {
710
- globalThis.moose_internal = moose_internal;
711
- }
712
- var dlqSchema = {
713
- version: "3.1",
714
- components: {
715
- schemas: {
716
- DeadLetterModel: {
717
- type: "object",
718
- properties: {
719
- originalRecord: {
720
- $ref: "#/components/schemas/Recordstringany"
721
- },
722
- errorMessage: {
723
- type: "string"
724
- },
725
- errorType: {
726
- type: "string"
727
- },
728
- failedAt: {
729
- type: "string",
730
- format: "date-time"
731
- },
732
- source: {
733
- oneOf: [
734
- {
735
- const: "api"
736
- },
737
- {
738
- const: "transform"
739
- },
740
- {
741
- const: "table"
742
- }
743
- ]
1058
+ /**
1059
+ * Gets or creates a memoized ClickHouse client.
1060
+ * The client is cached and reused across multiple insert calls for better performance.
1061
+ * If the configuration changes, a new client will be created.
1062
+ *
1063
+ * @private
1064
+ */
1065
+ async getMemoizedClient() {
1066
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1067
+ const configRegistry = globalThis._mooseConfigRegistry;
1068
+ const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1069
+ const clickhouseConfig = await configRegistry.getClickHouseConfig();
1070
+ const currentConfigHash = this.createConfigHash(clickhouseConfig);
1071
+ if (this._memoizedClient && this._configHash === currentConfigHash) {
1072
+ return { client: this._memoizedClient, config: clickhouseConfig };
1073
+ }
1074
+ if (this._memoizedClient && this._configHash !== currentConfigHash) {
1075
+ try {
1076
+ await this._memoizedClient.close();
1077
+ } catch (error) {
744
1078
  }
745
- },
746
- required: [
747
- "originalRecord",
748
- "errorMessage",
749
- "errorType",
750
- "failedAt",
751
- "source"
752
- ]
753
- },
754
- Recordstringany: {
755
- type: "object",
756
- properties: {},
757
- required: [],
758
- description: "Construct a type with a set of properties K of type T",
759
- additionalProperties: {}
1079
+ }
1080
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1081
+ const client = getClickhouseClient2({
1082
+ username: clickhouseConfig.username,
1083
+ password: clickhouseConfig.password,
1084
+ database: effectiveDatabase,
1085
+ useSSL: clickhouseConfig.useSSL ? "true" : "false",
1086
+ host: clickhouseConfig.host,
1087
+ port: clickhouseConfig.port
1088
+ });
1089
+ this._memoizedClient = client;
1090
+ this._configHash = currentConfigHash;
1091
+ return { client, config: clickhouseConfig };
760
1092
  }
761
- }
762
- },
763
- schemas: [
764
- {
765
- $ref: "#/components/schemas/DeadLetterModel"
766
- }
767
- ]
768
- };
769
- var dlqColumns = [
770
- {
771
- name: "originalRecord",
772
- data_type: "Json",
773
- primary_key: false,
774
- required: true,
775
- unique: false,
776
- default: null,
777
- annotations: [],
778
- ttl: null,
779
- codec: null,
780
- materialized: null,
781
- comment: null
782
- },
783
- {
784
- name: "errorMessage",
785
- data_type: "String",
786
- primary_key: false,
787
- required: true,
788
- unique: false,
789
- default: null,
790
- annotations: [],
791
- ttl: null,
792
- codec: null,
793
- materialized: null,
794
- comment: null
795
- },
796
- {
797
- name: "errorType",
798
- data_type: "String",
799
- primary_key: false,
800
- required: true,
801
- unique: false,
802
- default: null,
803
- annotations: [],
804
- ttl: null,
805
- codec: null,
806
- materialized: null,
807
- comment: null
808
- },
809
- {
810
- name: "failedAt",
811
- data_type: "DateTime",
812
- primary_key: false,
813
- required: true,
814
- unique: false,
815
- default: null,
816
- annotations: [],
817
- ttl: null,
818
- codec: null,
819
- materialized: null,
820
- comment: null
821
- },
822
- {
823
- name: "source",
824
- data_type: "String",
825
- primary_key: false,
826
- required: true,
827
- unique: false,
828
- default: null,
829
- annotations: [],
830
- ttl: null,
831
- codec: null,
832
- materialized: null,
833
- comment: null
834
- }
835
- ];
836
-
837
- // src/dmv2/sdk/olapTable.ts
838
- import { Readable } from "stream";
839
- import { createHash as createHash2 } from "crypto";
840
- var OlapTable = class extends TypedBase {
841
- name;
842
- /** @internal */
843
- kind = "OlapTable";
844
- /** @internal Memoized ClickHouse client for reusing connections across insert calls */
845
- _memoizedClient;
846
- /** @internal Hash of the configuration used to create the memoized client */
847
- _configHash;
848
- /** @internal Cached table name to avoid repeated generation */
849
- _cachedTableName;
850
- constructor(name, config, schema, columns, validators) {
851
- const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
852
- const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
853
- const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
854
- if (hasFields && hasExpr) {
855
- throw new Error(
856
- `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
857
- );
858
- }
859
- const hasCluster = typeof resolvedConfig.cluster === "string";
860
- const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
861
- const hasReplicaName = typeof resolvedConfig.replicaName === "string";
862
- if (hasCluster && (hasKeeperPath || hasReplicaName)) {
863
- throw new Error(
864
- `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
865
- );
866
- }
867
- super(name, resolvedConfig, schema, columns, validators);
868
- this.name = name;
869
- const tables = getMooseInternal().tables;
870
- const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
871
- if (!isClientOnlyMode() && tables.has(registryKey)) {
872
- throw new Error(
873
- `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
874
- );
875
- }
876
- tables.set(registryKey, this);
877
- }
878
- /**
879
- * Generates the versioned table name following Moose's naming convention
880
- * Format: {tableName}_{version_with_dots_replaced_by_underscores}
881
- */
882
- generateTableName() {
883
- if (this._cachedTableName) {
884
- return this._cachedTableName;
885
- }
886
- const tableVersion = this.config.version;
887
- if (!tableVersion) {
888
- this._cachedTableName = this.name;
889
- } else {
890
- const versionSuffix = tableVersion.replace(/\./g, "_");
891
- this._cachedTableName = `${this.name}_${versionSuffix}`;
892
- }
893
- return this._cachedTableName;
894
- }
895
- /**
896
- * Creates a fast hash of the ClickHouse configuration.
897
- * Uses crypto.createHash for better performance than JSON.stringify.
898
- *
899
- * @private
900
- */
901
- createConfigHash(clickhouseConfig) {
902
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
903
- const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
904
- return createHash2("sha256").update(configString).digest("hex").substring(0, 16);
905
- }
906
- /**
907
- * Gets or creates a memoized ClickHouse client.
908
- * The client is cached and reused across multiple insert calls for better performance.
909
- * If the configuration changes, a new client will be created.
910
- *
911
- * @private
912
- */
913
- async getMemoizedClient() {
914
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
915
- const configRegistry = globalThis._mooseConfigRegistry;
916
- const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
917
- const clickhouseConfig = await configRegistry.getClickHouseConfig();
918
- const currentConfigHash = this.createConfigHash(clickhouseConfig);
919
- if (this._memoizedClient && this._configHash === currentConfigHash) {
920
- return { client: this._memoizedClient, config: clickhouseConfig };
921
- }
922
- if (this._memoizedClient && this._configHash !== currentConfigHash) {
923
- try {
924
- await this._memoizedClient.close();
925
- } catch (error) {
1093
+ /**
1094
+ * Closes the memoized ClickHouse client if it exists.
1095
+ * This is useful for cleaning up connections when the table instance is no longer needed.
1096
+ * The client will be automatically recreated on the next insert call if needed.
1097
+ */
1098
+ async closeClient() {
1099
+ if (this._memoizedClient) {
1100
+ try {
1101
+ await this._memoizedClient.close();
1102
+ } catch (error) {
1103
+ } finally {
1104
+ this._memoizedClient = void 0;
1105
+ this._configHash = void 0;
1106
+ }
1107
+ }
926
1108
  }
927
- }
928
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
929
- const client = getClickhouseClient2({
930
- username: clickhouseConfig.username,
931
- password: clickhouseConfig.password,
932
- database: effectiveDatabase,
933
- useSSL: clickhouseConfig.useSSL ? "true" : "false",
934
- host: clickhouseConfig.host,
935
- port: clickhouseConfig.port
936
- });
937
- this._memoizedClient = client;
938
- this._configHash = currentConfigHash;
939
- return { client, config: clickhouseConfig };
940
- }
941
- /**
942
- * Closes the memoized ClickHouse client if it exists.
943
- * This is useful for cleaning up connections when the table instance is no longer needed.
944
- * The client will be automatically recreated on the next insert call if needed.
945
- */
946
- async closeClient() {
947
- if (this._memoizedClient) {
948
- try {
949
- await this._memoizedClient.close();
950
- } catch (error) {
951
- } finally {
952
- this._memoizedClient = void 0;
953
- this._configHash = void 0;
1109
+ /**
1110
+ * Validates a single record using typia's comprehensive type checking.
1111
+ * This provides the most accurate validation as it uses the exact TypeScript type information.
1112
+ *
1113
+ * @param record The record to validate
1114
+ * @returns Validation result with detailed error information
1115
+ */
1116
+ validateRecord(record) {
1117
+ if (this.validators?.validate) {
1118
+ try {
1119
+ const result = this.validators.validate(record);
1120
+ return {
1121
+ success: result.success,
1122
+ data: result.data,
1123
+ errors: result.errors?.map(
1124
+ (err) => typeof err === "string" ? err : JSON.stringify(err)
1125
+ )
1126
+ };
1127
+ } catch (error) {
1128
+ return {
1129
+ success: false,
1130
+ errors: [error instanceof Error ? error.message : String(error)]
1131
+ };
1132
+ }
1133
+ }
1134
+ throw new Error("No typia validator found");
954
1135
  }
955
- }
956
- }
957
- /**
958
- * Validates a single record using typia's comprehensive type checking.
959
- * This provides the most accurate validation as it uses the exact TypeScript type information.
960
- *
961
- * @param record The record to validate
962
- * @returns Validation result with detailed error information
963
- */
964
- validateRecord(record) {
965
- if (this.validators?.validate) {
966
- try {
967
- const result = this.validators.validate(record);
968
- return {
969
- success: result.success,
970
- data: result.data,
971
- errors: result.errors?.map(
972
- (err) => typeof err === "string" ? err : JSON.stringify(err)
973
- )
974
- };
975
- } catch (error) {
976
- return {
977
- success: false,
978
- errors: [error instanceof Error ? error.message : String(error)]
979
- };
1136
+ /**
1137
+ * Type guard function using typia's is() function.
1138
+ * Provides compile-time type narrowing for TypeScript.
1139
+ *
1140
+ * @param record The record to check
1141
+ * @returns True if record matches type T, with type narrowing
1142
+ */
1143
+ isValidRecord(record) {
1144
+ if (this.validators?.is) {
1145
+ return this.validators.is(record);
1146
+ }
1147
+ throw new Error("No typia validator found");
980
1148
  }
981
- }
982
- throw new Error("No typia validator found");
983
- }
984
- /**
985
- * Type guard function using typia's is() function.
986
- * Provides compile-time type narrowing for TypeScript.
987
- *
988
- * @param record The record to check
989
- * @returns True if record matches type T, with type narrowing
990
- */
991
- isValidRecord(record) {
992
- if (this.validators?.is) {
993
- return this.validators.is(record);
994
- }
995
- throw new Error("No typia validator found");
996
- }
997
- /**
998
- * Assert that a record matches type T, throwing detailed errors if not.
999
- * Uses typia's assert() function for the most detailed error reporting.
1000
- *
1001
- * @param record The record to assert
1002
- * @returns The validated and typed record
1003
- * @throws Detailed validation error if record doesn't match type T
1004
- */
1005
- assertValidRecord(record) {
1006
- if (this.validators?.assert) {
1007
- return this.validators.assert(record);
1008
- }
1009
- throw new Error("No typia validator found");
1010
- }
1011
- /**
1012
- * Validates an array of records with comprehensive error reporting.
1013
- * Uses the most appropriate validation method available (typia or basic).
1014
- *
1015
- * @param data Array of records to validate
1016
- * @returns Detailed validation results
1017
- */
1018
- async validateRecords(data) {
1019
- const valid = [];
1020
- const invalid = [];
1021
- valid.length = 0;
1022
- invalid.length = 0;
1023
- const dataLength = data.length;
1024
- for (let i = 0; i < dataLength; i++) {
1025
- const record = data[i];
1026
- try {
1027
- if (this.isValidRecord(record)) {
1028
- valid.push(this.mapToClickhouseRecord(record));
1029
- } else {
1030
- const result = this.validateRecord(record);
1031
- if (result.success) {
1032
- valid.push(this.mapToClickhouseRecord(record));
1033
- } else {
1149
+ /**
1150
+ * Assert that a record matches type T, throwing detailed errors if not.
1151
+ * Uses typia's assert() function for the most detailed error reporting.
1152
+ *
1153
+ * @param record The record to assert
1154
+ * @returns The validated and typed record
1155
+ * @throws Detailed validation error if record doesn't match type T
1156
+ */
1157
+ assertValidRecord(record) {
1158
+ if (this.validators?.assert) {
1159
+ return this.validators.assert(record);
1160
+ }
1161
+ throw new Error("No typia validator found");
1162
+ }
1163
+ /**
1164
+ * Validates an array of records with comprehensive error reporting.
1165
+ * Uses the most appropriate validation method available (typia or basic).
1166
+ *
1167
+ * @param data Array of records to validate
1168
+ * @returns Detailed validation results
1169
+ */
1170
+ async validateRecords(data) {
1171
+ const valid = [];
1172
+ const invalid = [];
1173
+ valid.length = 0;
1174
+ invalid.length = 0;
1175
+ const dataLength = data.length;
1176
+ for (let i = 0; i < dataLength; i++) {
1177
+ const record = data[i];
1178
+ try {
1179
+ if (this.isValidRecord(record)) {
1180
+ valid.push(this.mapToClickhouseRecord(record));
1181
+ } else {
1182
+ const result = this.validateRecord(record);
1183
+ if (result.success) {
1184
+ valid.push(this.mapToClickhouseRecord(record));
1185
+ } else {
1186
+ invalid.push({
1187
+ record,
1188
+ error: result.errors?.join(", ") || "Validation failed",
1189
+ index: i,
1190
+ path: "root"
1191
+ });
1192
+ }
1193
+ }
1194
+ } catch (error) {
1034
1195
  invalid.push({
1035
1196
  record,
1036
- error: result.errors?.join(", ") || "Validation failed",
1197
+ error: error instanceof Error ? error.message : String(error),
1037
1198
  index: i,
1038
1199
  path: "root"
1039
1200
  });
1040
1201
  }
1041
1202
  }
1042
- } catch (error) {
1043
- invalid.push({
1044
- record,
1045
- error: error instanceof Error ? error.message : String(error),
1046
- index: i,
1047
- path: "root"
1048
- });
1203
+ return {
1204
+ valid,
1205
+ invalid,
1206
+ total: dataLength
1207
+ };
1049
1208
  }
1050
- }
1051
- return {
1052
- valid,
1053
- invalid,
1054
- total: dataLength
1055
- };
1056
- }
1057
- /**
1058
- * Optimized batch retry that minimizes individual insert operations.
1059
- * Groups records into smaller batches to reduce round trips while still isolating failures.
1060
- *
1061
- * @private
1062
- */
1063
- async retryIndividualRecords(client, tableName, records) {
1064
- const successful = [];
1065
- const failed = [];
1066
- const RETRY_BATCH_SIZE = 10;
1067
- const totalRecords = records.length;
1068
- for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1069
- const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1070
- const batch = records.slice(i, batchEnd);
1071
- try {
1072
- await client.insert({
1073
- table: quoteIdentifier(tableName),
1074
- values: batch,
1075
- format: "JSONEachRow",
1076
- clickhouse_settings: {
1077
- date_time_input_format: "best_effort",
1078
- // Add performance settings for retries
1079
- max_insert_block_size: RETRY_BATCH_SIZE,
1080
- max_block_size: RETRY_BATCH_SIZE
1081
- }
1082
- });
1083
- successful.push(...batch);
1084
- } catch (batchError) {
1085
- for (let j = 0; j < batch.length; j++) {
1086
- const record = batch[j];
1209
+ /**
1210
+ * Optimized batch retry that minimizes individual insert operations.
1211
+ * Groups records into smaller batches to reduce round trips while still isolating failures.
1212
+ *
1213
+ * @private
1214
+ */
1215
+ async retryIndividualRecords(client, tableName, records) {
1216
+ const successful = [];
1217
+ const failed = [];
1218
+ const RETRY_BATCH_SIZE = 10;
1219
+ const totalRecords = records.length;
1220
+ for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1221
+ const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1222
+ const batch = records.slice(i, batchEnd);
1087
1223
  try {
1088
1224
  await client.insert({
1089
1225
  table: quoteIdentifier(tableName),
1090
- values: [record],
1226
+ values: batch,
1091
1227
  format: "JSONEachRow",
1092
1228
  clickhouse_settings: {
1093
- date_time_input_format: "best_effort"
1229
+ date_time_input_format: "best_effort",
1230
+ // Add performance settings for retries
1231
+ max_insert_block_size: RETRY_BATCH_SIZE,
1232
+ max_block_size: RETRY_BATCH_SIZE
1094
1233
  }
1095
1234
  });
1096
- successful.push(record);
1097
- } catch (error) {
1098
- failed.push({
1099
- record,
1100
- error: error instanceof Error ? error.message : String(error),
1101
- index: i + j
1102
- });
1235
+ successful.push(...batch);
1236
+ } catch (batchError) {
1237
+ for (let j = 0; j < batch.length; j++) {
1238
+ const record = batch[j];
1239
+ try {
1240
+ await client.insert({
1241
+ table: quoteIdentifier(tableName),
1242
+ values: [record],
1243
+ format: "JSONEachRow",
1244
+ clickhouse_settings: {
1245
+ date_time_input_format: "best_effort"
1246
+ }
1247
+ });
1248
+ successful.push(record);
1249
+ } catch (error) {
1250
+ failed.push({
1251
+ record,
1252
+ error: error instanceof Error ? error.message : String(error),
1253
+ index: i + j
1254
+ });
1255
+ }
1256
+ }
1103
1257
  }
1104
1258
  }
1259
+ return { successful, failed };
1105
1260
  }
1106
- }
1107
- return { successful, failed };
1108
- }
1109
- /**
1110
- * Validates input parameters and strategy compatibility
1111
- * @private
1112
- */
1113
- validateInsertParameters(data, options) {
1114
- const isStream = data instanceof Readable;
1115
- const strategy = options?.strategy || "fail-fast";
1116
- const shouldValidate = options?.validate !== false;
1117
- if (isStream && strategy === "isolate") {
1118
- throw new Error(
1119
- "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1120
- );
1121
- }
1122
- if (isStream && shouldValidate) {
1123
- console.warn(
1124
- "Validation is not supported with stream input. Validation will be skipped."
1125
- );
1126
- }
1127
- return { isStream, strategy, shouldValidate };
1128
- }
1129
- /**
1130
- * Handles early return cases for empty data
1131
- * @private
1132
- */
1133
- handleEmptyData(data, isStream) {
1134
- if (isStream && !data) {
1135
- return {
1136
- successful: 0,
1137
- failed: 0,
1138
- total: 0
1139
- };
1140
- }
1141
- if (!isStream && (!data || data.length === 0)) {
1142
- return {
1143
- successful: 0,
1144
- failed: 0,
1145
- total: 0
1146
- };
1147
- }
1148
- return null;
1149
- }
1150
- /**
1151
- * Performs pre-insertion validation for array data
1152
- * @private
1153
- */
1154
- async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1155
- if (!shouldValidate) {
1156
- return { validatedData: data, validationErrors: [] };
1157
- }
1158
- try {
1159
- const validationResult = await this.validateRecords(data);
1160
- const validatedData = validationResult.valid;
1161
- const validationErrors = validationResult.invalid;
1162
- if (validationErrors.length > 0) {
1163
- this.handleValidationErrors(validationErrors, strategy, data, options);
1261
+ /**
1262
+ * Validates input parameters and strategy compatibility
1263
+ * @private
1264
+ */
1265
+ validateInsertParameters(data, options) {
1266
+ const isStream = data instanceof Readable;
1267
+ const strategy = options?.strategy || "fail-fast";
1268
+ const shouldValidate = options?.validate !== false;
1269
+ if (isStream && strategy === "isolate") {
1270
+ throw new Error(
1271
+ "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1272
+ );
1273
+ }
1274
+ if (isStream && shouldValidate) {
1275
+ console.warn(
1276
+ "Validation is not supported with stream input. Validation will be skipped."
1277
+ );
1278
+ }
1279
+ return { isStream, strategy, shouldValidate };
1280
+ }
1281
+ /**
1282
+ * Handles early return cases for empty data
1283
+ * @private
1284
+ */
1285
+ handleEmptyData(data, isStream) {
1286
+ if (isStream && !data) {
1287
+ return {
1288
+ successful: 0,
1289
+ failed: 0,
1290
+ total: 0
1291
+ };
1292
+ }
1293
+ if (!isStream && (!data || data.length === 0)) {
1294
+ return {
1295
+ successful: 0,
1296
+ failed: 0,
1297
+ total: 0
1298
+ };
1299
+ }
1300
+ return null;
1301
+ }
1302
+ /**
1303
+ * Performs pre-insertion validation for array data
1304
+ * @private
1305
+ */
1306
+ async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1307
+ if (!shouldValidate) {
1308
+ return { validatedData: data, validationErrors: [] };
1309
+ }
1310
+ try {
1311
+ const validationResult = await this.validateRecords(data);
1312
+ const validatedData = validationResult.valid;
1313
+ const validationErrors = validationResult.invalid;
1314
+ if (validationErrors.length > 0) {
1315
+ this.handleValidationErrors(validationErrors, strategy, data, options);
1316
+ switch (strategy) {
1317
+ case "discard":
1318
+ return { validatedData, validationErrors };
1319
+ case "isolate":
1320
+ return { validatedData: data, validationErrors };
1321
+ default:
1322
+ return { validatedData, validationErrors };
1323
+ }
1324
+ }
1325
+ return { validatedData, validationErrors };
1326
+ } catch (validationError) {
1327
+ if (strategy === "fail-fast") {
1328
+ throw validationError;
1329
+ }
1330
+ console.warn("Validation error:", validationError);
1331
+ return { validatedData: data, validationErrors: [] };
1332
+ }
1333
+ }
1334
+ /**
1335
+ * Handles validation errors based on the specified strategy
1336
+ * @private
1337
+ */
1338
+ handleValidationErrors(validationErrors, strategy, data, options) {
1164
1339
  switch (strategy) {
1340
+ case "fail-fast":
1341
+ const firstError = validationErrors[0];
1342
+ throw new Error(
1343
+ `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1344
+ );
1165
1345
  case "discard":
1166
- return { validatedData, validationErrors };
1346
+ this.checkValidationThresholds(validationErrors, data.length, options);
1347
+ break;
1167
1348
  case "isolate":
1168
- return { validatedData: data, validationErrors };
1169
- default:
1170
- return { validatedData, validationErrors };
1349
+ break;
1171
1350
  }
1172
1351
  }
1173
- return { validatedData, validationErrors };
1174
- } catch (validationError) {
1175
- if (strategy === "fail-fast") {
1176
- throw validationError;
1352
+ /**
1353
+ * Checks if validation errors exceed configured thresholds
1354
+ * @private
1355
+ */
1356
+ checkValidationThresholds(validationErrors, totalRecords, options) {
1357
+ const validationFailedCount = validationErrors.length;
1358
+ const validationFailedRatio = validationFailedCount / totalRecords;
1359
+ if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1360
+ throw new Error(
1361
+ `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1362
+ );
1363
+ }
1364
+ if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1365
+ throw new Error(
1366
+ `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1367
+ );
1368
+ }
1177
1369
  }
1178
- console.warn("Validation error:", validationError);
1179
- return { validatedData: data, validationErrors: [] };
1180
- }
1181
- }
1182
- /**
1183
- * Handles validation errors based on the specified strategy
1184
- * @private
1185
- */
1186
- handleValidationErrors(validationErrors, strategy, data, options) {
1187
- switch (strategy) {
1188
- case "fail-fast":
1189
- const firstError = validationErrors[0];
1190
- throw new Error(
1191
- `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1192
- );
1193
- case "discard":
1194
- this.checkValidationThresholds(validationErrors, data.length, options);
1195
- break;
1196
- case "isolate":
1197
- break;
1198
- }
1199
- }
1200
- /**
1201
- * Checks if validation errors exceed configured thresholds
1202
- * @private
1203
- */
1204
- checkValidationThresholds(validationErrors, totalRecords, options) {
1205
- const validationFailedCount = validationErrors.length;
1206
- const validationFailedRatio = validationFailedCount / totalRecords;
1207
- if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1208
- throw new Error(
1209
- `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1210
- );
1211
- }
1212
- if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1213
- throw new Error(
1214
- `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1215
- );
1216
- }
1217
- }
1218
- /**
1219
- * Optimized insert options preparation with better memory management
1220
- * @private
1221
- */
1222
- prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1223
- const insertOptions = {
1224
- table: quoteIdentifier(tableName),
1225
- format: "JSONEachRow",
1226
- clickhouse_settings: {
1227
- date_time_input_format: "best_effort",
1228
- wait_end_of_query: 1,
1229
- // Ensure at least once delivery for INSERT operations
1230
- // Performance optimizations
1231
- max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1232
- max_block_size: 65536,
1233
- // Use async inserts for better performance with large datasets
1234
- async_insert: validatedData.length > 1e3 ? 1 : 0,
1235
- wait_for_async_insert: 1
1236
- // For at least once delivery
1370
+ /**
1371
+ * Optimized insert options preparation with better memory management
1372
+ * @private
1373
+ */
1374
+ prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1375
+ const insertOptions = {
1376
+ table: quoteIdentifier(tableName),
1377
+ format: "JSONEachRow",
1378
+ clickhouse_settings: {
1379
+ date_time_input_format: "best_effort",
1380
+ wait_end_of_query: 1,
1381
+ // Ensure at least once delivery for INSERT operations
1382
+ // Performance optimizations
1383
+ max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1384
+ max_block_size: 65536,
1385
+ // Use async inserts for better performance with large datasets
1386
+ async_insert: validatedData.length > 1e3 ? 1 : 0,
1387
+ wait_for_async_insert: 1
1388
+ // For at least once delivery
1389
+ }
1390
+ };
1391
+ if (isStream) {
1392
+ insertOptions.values = data;
1393
+ } else {
1394
+ insertOptions.values = validatedData;
1395
+ }
1396
+ if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1397
+ if (options.allowErrors !== void 0) {
1398
+ insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1399
+ }
1400
+ if (options.allowErrorsRatio !== void 0) {
1401
+ insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1402
+ }
1403
+ }
1404
+ return insertOptions;
1237
1405
  }
1238
- };
1239
- if (isStream) {
1240
- insertOptions.values = data;
1241
- } else {
1242
- insertOptions.values = validatedData;
1243
- }
1244
- if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1245
- if (options.allowErrors !== void 0) {
1246
- insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1406
+ /**
1407
+ * Creates success result for completed insertions
1408
+ * @private
1409
+ */
1410
+ createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1411
+ if (isStream) {
1412
+ return {
1413
+ successful: -1,
1414
+ // -1 indicates stream mode where count is unknown
1415
+ failed: 0,
1416
+ total: -1
1417
+ };
1418
+ }
1419
+ const insertedCount = validatedData.length;
1420
+ const totalProcessed = shouldValidate ? data.length : insertedCount;
1421
+ const result = {
1422
+ successful: insertedCount,
1423
+ failed: shouldValidate ? validationErrors.length : 0,
1424
+ total: totalProcessed
1425
+ };
1426
+ if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1427
+ result.failedRecords = validationErrors.map((ve) => ({
1428
+ record: ve.record,
1429
+ error: `Validation error: ${ve.error}`,
1430
+ index: ve.index
1431
+ }));
1432
+ }
1433
+ return result;
1247
1434
  }
1248
- if (options.allowErrorsRatio !== void 0) {
1249
- insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1435
+ /**
1436
+ * Handles insertion errors based on the specified strategy
1437
+ * @private
1438
+ */
1439
+ async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1440
+ switch (strategy) {
1441
+ case "fail-fast":
1442
+ throw new Error(
1443
+ `Failed to insert data into table ${tableName}: ${batchError}`
1444
+ );
1445
+ case "discard":
1446
+ throw new Error(
1447
+ `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1448
+ );
1449
+ case "isolate":
1450
+ return await this.handleIsolateStrategy(
1451
+ batchError,
1452
+ tableName,
1453
+ data,
1454
+ validatedData,
1455
+ validationErrors,
1456
+ isStream,
1457
+ shouldValidate,
1458
+ options
1459
+ );
1460
+ default:
1461
+ throw new Error(`Unknown error strategy: ${strategy}`);
1462
+ }
1250
1463
  }
1251
- }
1252
- return insertOptions;
1253
- }
1254
- /**
1255
- * Creates success result for completed insertions
1256
- * @private
1257
- */
1258
- createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1259
- if (isStream) {
1260
- return {
1261
- successful: -1,
1262
- // -1 indicates stream mode where count is unknown
1263
- failed: 0,
1264
- total: -1
1265
- };
1266
- }
1267
- const insertedCount = validatedData.length;
1268
- const totalProcessed = shouldValidate ? data.length : insertedCount;
1269
- const result = {
1270
- successful: insertedCount,
1271
- failed: shouldValidate ? validationErrors.length : 0,
1272
- total: totalProcessed
1273
- };
1274
- if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1275
- result.failedRecords = validationErrors.map((ve) => ({
1276
- record: ve.record,
1277
- error: `Validation error: ${ve.error}`,
1278
- index: ve.index
1279
- }));
1280
- }
1281
- return result;
1282
- }
1283
- /**
1284
- * Handles insertion errors based on the specified strategy
1285
- * @private
1286
- */
1287
- async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1288
- switch (strategy) {
1289
- case "fail-fast":
1290
- throw new Error(
1291
- `Failed to insert data into table ${tableName}: ${batchError}`
1292
- );
1293
- case "discard":
1294
- throw new Error(
1295
- `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1296
- );
1297
- case "isolate":
1298
- return await this.handleIsolateStrategy(
1299
- batchError,
1300
- tableName,
1301
- data,
1302
- validatedData,
1303
- validationErrors,
1304
- isStream,
1305
- shouldValidate,
1306
- options
1307
- );
1308
- default:
1309
- throw new Error(`Unknown error strategy: ${strategy}`);
1310
- }
1311
- }
1312
- /**
1313
- * Handles the isolate strategy for insertion errors
1314
- * @private
1315
- */
1316
- async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1317
- if (isStream) {
1318
- throw new Error(
1319
- `Isolate strategy is not supported with stream input: ${batchError}`
1320
- );
1321
- }
1322
- try {
1323
- const { client } = await this.getMemoizedClient();
1324
- const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1325
- const retryData = skipValidationOnRetry ? data : validatedData;
1326
- const { successful, failed } = await this.retryIndividualRecords(
1327
- client,
1328
- tableName,
1329
- retryData
1330
- );
1331
- const allFailedRecords = [
1332
- // Validation errors (if any and not skipping validation on retry)
1333
- ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1334
- record: ve.record,
1335
- error: `Validation error: ${ve.error}`,
1336
- index: ve.index
1337
- })) : [],
1338
- // Insertion errors
1339
- ...failed
1340
- ];
1341
- this.checkInsertionThresholds(
1342
- allFailedRecords,
1343
- data.length,
1344
- options
1345
- );
1346
- return {
1347
- successful: successful.length,
1348
- failed: allFailedRecords.length,
1349
- total: data.length,
1350
- failedRecords: allFailedRecords
1351
- };
1352
- } catch (isolationError) {
1353
- throw new Error(
1354
- `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1355
- );
1356
- }
1357
- }
1358
- /**
1359
- * Checks if insertion errors exceed configured thresholds
1360
- * @private
1361
- */
1362
- checkInsertionThresholds(failedRecords, totalRecords, options) {
1363
- const totalFailed = failedRecords.length;
1364
- const failedRatio = totalFailed / totalRecords;
1365
- if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1366
- throw new Error(
1367
- `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1368
- );
1369
- }
1370
- if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1371
- throw new Error(
1372
- `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1373
- );
1374
- }
1375
- }
1376
- /**
1377
- * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1378
- *
1379
- * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1380
- * - For every Nested struct (not array), it recurses into the struct.
1381
- * - This ensures compatibility with kafka_clickhouse_sync
1382
- *
1383
- * @param record The input record to transform (may be deeply nested)
1384
- * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1385
- * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1386
- */
1387
- mapToClickhouseRecord(record, columns = this.columnArray) {
1388
- const result = { ...record };
1389
- for (const col of columns) {
1390
- const value = record[col.name];
1391
- const dt = col.data_type;
1392
- if (isArrayNestedType(dt)) {
1393
- if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1394
- result[col.name] = value.map((item) => [
1395
- this.mapToClickhouseRecord(item, dt.elementType.columns)
1396
- ]);
1464
+ /**
1465
+ * Handles the isolate strategy for insertion errors
1466
+ * @private
1467
+ */
1468
+ async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1469
+ if (isStream) {
1470
+ throw new Error(
1471
+ `Isolate strategy is not supported with stream input: ${batchError}`
1472
+ );
1397
1473
  }
1398
- } else if (isNestedType(dt)) {
1399
- if (value && typeof value === "object") {
1400
- result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1474
+ try {
1475
+ const { client } = await this.getMemoizedClient();
1476
+ const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1477
+ const retryData = skipValidationOnRetry ? data : validatedData;
1478
+ const { successful, failed } = await this.retryIndividualRecords(
1479
+ client,
1480
+ tableName,
1481
+ retryData
1482
+ );
1483
+ const allFailedRecords = [
1484
+ // Validation errors (if any and not skipping validation on retry)
1485
+ ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1486
+ record: ve.record,
1487
+ error: `Validation error: ${ve.error}`,
1488
+ index: ve.index
1489
+ })) : [],
1490
+ // Insertion errors
1491
+ ...failed
1492
+ ];
1493
+ this.checkInsertionThresholds(
1494
+ allFailedRecords,
1495
+ data.length,
1496
+ options
1497
+ );
1498
+ return {
1499
+ successful: successful.length,
1500
+ failed: allFailedRecords.length,
1501
+ total: data.length,
1502
+ failedRecords: allFailedRecords
1503
+ };
1504
+ } catch (isolationError) {
1505
+ throw new Error(
1506
+ `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1507
+ );
1401
1508
  }
1402
1509
  }
1403
- }
1404
- return result;
1405
- }
1406
- /**
1407
- * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1408
- * This method establishes a direct connection to ClickHouse using the project configuration
1409
- * and inserts the provided data into the versioned table.
1410
- *
1411
- * PERFORMANCE OPTIMIZATIONS:
1412
- * - Memoized client connections with fast config hashing
1413
- * - Single-pass validation with pre-allocated arrays
1414
- * - Batch-optimized retry strategy (batches of 10, then individual)
1415
- * - Optimized ClickHouse settings for large datasets
1416
- * - Reduced memory allocations and object creation
1417
- *
1418
- * Uses advanced typia validation when available for comprehensive type checking,
1419
- * with fallback to basic validation for compatibility.
1420
- *
1421
- * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1422
- * If the configuration changes, a new client will be automatically created.
1423
- *
1424
- * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1425
- * @param options Optional configuration for error handling, validation, and insertion behavior
1426
- * @returns Promise resolving to detailed insertion results
1427
- * @throws {ConfigError} When configuration cannot be read or parsed
1428
- * @throws {ClickHouseError} When insertion fails based on the error strategy
1429
- * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1430
- *
1431
- * @example
1432
- * ```typescript
1433
- * // Create an OlapTable instance (typia validators auto-injected)
1434
- * const userTable = new OlapTable<User>('users');
1435
- *
1436
- * // Insert with comprehensive typia validation
1437
- * const result1 = await userTable.insert([
1438
- * { id: 1, name: 'John', email: 'john@example.com' },
1439
- * { id: 2, name: 'Jane', email: 'jane@example.com' }
1440
- * ]);
1441
- *
1442
- * // Insert data with stream input (validation not available for streams)
1443
- * const dataStream = new Readable({
1444
- * objectMode: true,
1445
- * read() { // Stream implementation }
1446
- * });
1447
- * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1448
- *
1449
- * // Insert with validation disabled for performance
1450
- * const result3 = await userTable.insert(data, { validate: false });
1451
- *
1452
- * // Insert with error handling strategies
1453
- * const result4 = await userTable.insert(mixedData, {
1454
- * strategy: 'isolate',
1455
- * allowErrorsRatio: 0.1,
1456
- * validate: true // Use typia validation (default)
1457
- * });
1458
- *
1459
- * // Optional: Clean up connection when completely done
1460
- * await userTable.closeClient();
1461
- * ```
1462
- */
1463
- async insert(data, options) {
1464
- const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1465
- const emptyResult = this.handleEmptyData(data, isStream);
1466
- if (emptyResult) {
1467
- return emptyResult;
1468
- }
1469
- let validatedData = [];
1470
- let validationErrors = [];
1471
- if (!isStream && shouldValidate) {
1472
- const validationResult = await this.performPreInsertionValidation(
1473
- data,
1474
- shouldValidate,
1475
- strategy,
1476
- options
1477
- );
1478
- validatedData = validationResult.validatedData;
1479
- validationErrors = validationResult.validationErrors;
1480
- } else {
1481
- validatedData = isStream ? [] : data;
1482
- }
1483
- const { client } = await this.getMemoizedClient();
1484
- const tableName = this.generateTableName();
1485
- try {
1486
- const insertOptions = this.prepareInsertOptions(
1487
- tableName,
1488
- data,
1489
- validatedData,
1490
- isStream,
1491
- strategy,
1492
- options
1493
- );
1494
- await client.insert(insertOptions);
1495
- return this.createSuccessResult(
1496
- data,
1497
- validatedData,
1498
- validationErrors,
1499
- isStream,
1500
- shouldValidate,
1501
- strategy
1502
- );
1503
- } catch (batchError) {
1504
- return await this.handleInsertionError(
1505
- batchError,
1506
- strategy,
1507
- tableName,
1508
- data,
1509
- validatedData,
1510
- validationErrors,
1511
- isStream,
1512
- shouldValidate,
1513
- options
1514
- );
1515
- }
1510
+ /**
1511
+ * Checks if insertion errors exceed configured thresholds
1512
+ * @private
1513
+ */
1514
+ checkInsertionThresholds(failedRecords, totalRecords, options) {
1515
+ const totalFailed = failedRecords.length;
1516
+ const failedRatio = totalFailed / totalRecords;
1517
+ if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1518
+ throw new Error(
1519
+ `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1520
+ );
1521
+ }
1522
+ if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1523
+ throw new Error(
1524
+ `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1525
+ );
1526
+ }
1527
+ }
1528
+ /**
1529
+ * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1530
+ *
1531
+ * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1532
+ * - For every Nested struct (not array), it recurses into the struct.
1533
+ * - This ensures compatibility with kafka_clickhouse_sync
1534
+ *
1535
+ * @param record The input record to transform (may be deeply nested)
1536
+ * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1537
+ * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1538
+ */
1539
+ mapToClickhouseRecord(record, columns = this.columnArray) {
1540
+ const result = { ...record };
1541
+ for (const col of columns) {
1542
+ const value = record[col.name];
1543
+ const dt = col.data_type;
1544
+ if (isArrayNestedType(dt)) {
1545
+ if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1546
+ result[col.name] = value.map((item) => [
1547
+ this.mapToClickhouseRecord(item, dt.elementType.columns)
1548
+ ]);
1549
+ }
1550
+ } else if (isNestedType(dt)) {
1551
+ if (value && typeof value === "object") {
1552
+ result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1553
+ }
1554
+ }
1555
+ }
1556
+ return result;
1557
+ }
1558
+ /**
1559
+ * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1560
+ * This method establishes a direct connection to ClickHouse using the project configuration
1561
+ * and inserts the provided data into the versioned table.
1562
+ *
1563
+ * PERFORMANCE OPTIMIZATIONS:
1564
+ * - Memoized client connections with fast config hashing
1565
+ * - Single-pass validation with pre-allocated arrays
1566
+ * - Batch-optimized retry strategy (batches of 10, then individual)
1567
+ * - Optimized ClickHouse settings for large datasets
1568
+ * - Reduced memory allocations and object creation
1569
+ *
1570
+ * Uses advanced typia validation when available for comprehensive type checking,
1571
+ * with fallback to basic validation for compatibility.
1572
+ *
1573
+ * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1574
+ * If the configuration changes, a new client will be automatically created.
1575
+ *
1576
+ * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1577
+ * @param options Optional configuration for error handling, validation, and insertion behavior
1578
+ * @returns Promise resolving to detailed insertion results
1579
+ * @throws {ConfigError} When configuration cannot be read or parsed
1580
+ * @throws {ClickHouseError} When insertion fails based on the error strategy
1581
+ * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1582
+ *
1583
+ * @example
1584
+ * ```typescript
1585
+ * // Create an OlapTable instance (typia validators auto-injected)
1586
+ * const userTable = new OlapTable<User>('users');
1587
+ *
1588
+ * // Insert with comprehensive typia validation
1589
+ * const result1 = await userTable.insert([
1590
+ * { id: 1, name: 'John', email: 'john@example.com' },
1591
+ * { id: 2, name: 'Jane', email: 'jane@example.com' }
1592
+ * ]);
1593
+ *
1594
+ * // Insert data with stream input (validation not available for streams)
1595
+ * const dataStream = new Readable({
1596
+ * objectMode: true,
1597
+ * read() { // Stream implementation }
1598
+ * });
1599
+ * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1600
+ *
1601
+ * // Insert with validation disabled for performance
1602
+ * const result3 = await userTable.insert(data, { validate: false });
1603
+ *
1604
+ * // Insert with error handling strategies
1605
+ * const result4 = await userTable.insert(mixedData, {
1606
+ * strategy: 'isolate',
1607
+ * allowErrorsRatio: 0.1,
1608
+ * validate: true // Use typia validation (default)
1609
+ * });
1610
+ *
1611
+ * // Optional: Clean up connection when completely done
1612
+ * await userTable.closeClient();
1613
+ * ```
1614
+ */
1615
+ async insert(data, options) {
1616
+ const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1617
+ const emptyResult = this.handleEmptyData(data, isStream);
1618
+ if (emptyResult) {
1619
+ return emptyResult;
1620
+ }
1621
+ let validatedData = [];
1622
+ let validationErrors = [];
1623
+ if (!isStream && shouldValidate) {
1624
+ const validationResult = await this.performPreInsertionValidation(
1625
+ data,
1626
+ shouldValidate,
1627
+ strategy,
1628
+ options
1629
+ );
1630
+ validatedData = validationResult.validatedData;
1631
+ validationErrors = validationResult.validationErrors;
1632
+ } else {
1633
+ validatedData = isStream ? [] : data;
1634
+ }
1635
+ const { client } = await this.getMemoizedClient();
1636
+ const tableName = this.generateTableName();
1637
+ try {
1638
+ const insertOptions = this.prepareInsertOptions(
1639
+ tableName,
1640
+ data,
1641
+ validatedData,
1642
+ isStream,
1643
+ strategy,
1644
+ options
1645
+ );
1646
+ await client.insert(insertOptions);
1647
+ return this.createSuccessResult(
1648
+ data,
1649
+ validatedData,
1650
+ validationErrors,
1651
+ isStream,
1652
+ shouldValidate,
1653
+ strategy
1654
+ );
1655
+ } catch (batchError) {
1656
+ return await this.handleInsertionError(
1657
+ batchError,
1658
+ strategy,
1659
+ tableName,
1660
+ data,
1661
+ validatedData,
1662
+ validationErrors,
1663
+ isStream,
1664
+ shouldValidate,
1665
+ options
1666
+ );
1667
+ }
1668
+ }
1669
+ // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1670
+ // were removed in ENG-856. Use direct configuration instead, e.g.:
1671
+ // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1672
+ };
1516
1673
  }
1517
- // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1518
- // were removed in ENG-856. Use direct configuration instead, e.g.:
1519
- // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1520
- };
1674
+ });
1521
1675
 
1522
1676
  // src/dmv2/sdk/stream.ts
1523
1677
  import { createHash as createHash3 } from "crypto";
1524
- var RoutedMessage = class {
1525
- /** The destination stream for the message */
1526
- destination;
1527
- /** The message value(s) to send */
1528
- values;
1529
- /**
1530
- * Creates a new routed message.
1531
- *
1532
- * @param destination The target stream
1533
- * @param values The message(s) to route
1534
- */
1535
- constructor(destination, values) {
1536
- this.destination = destination;
1537
- this.values = values;
1538
- }
1539
- };
1540
- var Stream = class extends TypedBase {
1541
- defaultDeadLetterQueue;
1542
- /** @internal Memoized KafkaJS producer for reusing connections across sends */
1543
- _memoizedProducer;
1544
- /** @internal Hash of the configuration used to create the memoized Kafka producer */
1545
- _kafkaConfigHash;
1546
- constructor(name, config, schema, columns, validators, allowExtraFields) {
1547
- super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1548
- const streams = getMooseInternal().streams;
1549
- if (streams.has(name)) {
1550
- throw new Error(`Stream with name ${name} already exists`);
1551
- }
1552
- streams.set(name, this);
1553
- this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1554
- }
1555
- /**
1556
- * Internal map storing transformation configurations.
1557
- * Maps destination stream names to arrays of transformation functions and their configs.
1558
- *
1559
- * @internal
1560
- */
1561
- _transformations = /* @__PURE__ */ new Map();
1562
- /**
1563
- * Internal function for multi-stream transformations.
1564
- * Allows a single transformation to route messages to multiple destinations.
1565
- *
1566
- * @internal
1567
- */
1568
- _multipleTransformations;
1569
- /**
1570
- * Internal array storing consumer configurations.
1571
- *
1572
- * @internal
1573
- */
1574
- _consumers = new Array();
1575
- /**
1576
- * Builds the full Kafka topic name including optional namespace and version suffix.
1577
- * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1578
- */
1579
- buildFullTopicName(namespace) {
1580
- const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1581
- const base = `${this.name}${versionSuffix}`;
1582
- return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1583
- }
1584
- /**
1585
- * Creates a fast hash string from relevant Kafka configuration fields.
1586
- */
1587
- createConfigHash(kafkaConfig) {
1588
- const configString = [
1589
- kafkaConfig.broker,
1590
- kafkaConfig.messageTimeoutMs,
1591
- kafkaConfig.saslUsername,
1592
- kafkaConfig.saslPassword,
1593
- kafkaConfig.saslMechanism,
1594
- kafkaConfig.securityProtocol,
1595
- kafkaConfig.namespace
1596
- ].join(":");
1597
- return createHash3("sha256").update(configString).digest("hex").substring(0, 16);
1598
- }
1599
- /**
1600
- * Gets or creates a memoized KafkaJS producer using runtime configuration.
1601
- */
1602
- async getMemoizedProducer() {
1603
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1604
- const configRegistry = globalThis._mooseConfigRegistry;
1605
- const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1606
- const kafkaConfig = await configRegistry.getKafkaConfig();
1607
- const currentHash = this.createConfigHash(kafkaConfig);
1608
- if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1609
- return { producer: this._memoizedProducer, kafkaConfig };
1610
- }
1611
- if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1612
- try {
1613
- await this._memoizedProducer.disconnect();
1614
- } catch {
1615
- }
1616
- this._memoizedProducer = void 0;
1617
- }
1618
- const clientId = `moose-sdk-stream-${this.name}`;
1619
- const logger = {
1620
- logPrefix: clientId,
1621
- log: (message) => {
1622
- console.log(`${clientId}: ${message}`);
1623
- },
1624
- error: (message) => {
1625
- console.error(`${clientId}: ${message}`);
1626
- },
1627
- warn: (message) => {
1628
- console.warn(`${clientId}: ${message}`);
1678
+ function attachTypeGuard(dl, typeGuard) {
1679
+ dl.asTyped = () => typeGuard(dl.originalRecord);
1680
+ }
1681
+ var RoutedMessage, Stream, DeadLetterQueue;
1682
+ var init_stream = __esm({
1683
+ "src/dmv2/sdk/stream.ts"() {
1684
+ "use strict";
1685
+ init_typedBase();
1686
+ init_internal();
1687
+ init_stackTrace();
1688
+ RoutedMessage = class {
1689
+ /** The destination stream for the message */
1690
+ destination;
1691
+ /** The message value(s) to send */
1692
+ values;
1693
+ /**
1694
+ * Creates a new routed message.
1695
+ *
1696
+ * @param destination The target stream
1697
+ * @param values The message(s) to route
1698
+ */
1699
+ constructor(destination, values) {
1700
+ this.destination = destination;
1701
+ this.values = values;
1629
1702
  }
1630
1703
  };
1631
- const producer = await getKafkaProducer2(
1632
- {
1633
- clientId,
1634
- broker: kafkaConfig.broker,
1635
- securityProtocol: kafkaConfig.securityProtocol,
1636
- saslUsername: kafkaConfig.saslUsername,
1637
- saslPassword: kafkaConfig.saslPassword,
1638
- saslMechanism: kafkaConfig.saslMechanism
1639
- },
1640
- logger
1641
- );
1642
- this._memoizedProducer = producer;
1643
- this._kafkaConfigHash = currentHash;
1644
- return { producer, kafkaConfig };
1645
- }
1646
- /**
1647
- * Closes the memoized Kafka producer if it exists.
1648
- */
1649
- async closeProducer() {
1650
- if (this._memoizedProducer) {
1651
- try {
1652
- await this._memoizedProducer.disconnect();
1653
- } catch {
1654
- } finally {
1655
- this._memoizedProducer = void 0;
1656
- this._kafkaConfigHash = void 0;
1704
+ Stream = class extends TypedBase {
1705
+ defaultDeadLetterQueue;
1706
+ /** @internal Memoized KafkaJS producer for reusing connections across sends */
1707
+ _memoizedProducer;
1708
+ /** @internal Hash of the configuration used to create the memoized Kafka producer */
1709
+ _kafkaConfigHash;
1710
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1711
+ super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1712
+ const streams = getMooseInternal().streams;
1713
+ if (streams.has(name)) {
1714
+ throw new Error(`Stream with name ${name} already exists`);
1715
+ }
1716
+ streams.set(name, this);
1717
+ this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1657
1718
  }
1658
- }
1659
- }
1660
- /**
1661
- * Sends one or more records to this stream's Kafka topic.
1662
- * Values are JSON-serialized as message values.
1663
- */
1664
- async send(values) {
1665
- const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1666
- if (flat.length === 0) return;
1667
- const { producer, kafkaConfig } = await this.getMemoizedProducer();
1668
- const topic = this.buildFullTopicName(kafkaConfig.namespace);
1669
- const sr = this.config.schemaConfig;
1670
- if (sr && sr.kind === "JSON") {
1671
- const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1672
- if (!schemaRegistryUrl) {
1673
- throw new Error("Schema Registry URL not configured");
1674
- }
1675
- const {
1676
- default: { SchemaRegistry }
1677
- } = await import("@kafkajs/confluent-schema-registry");
1678
- const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1679
- let schemaId = void 0;
1680
- if ("id" in sr.reference) {
1681
- schemaId = sr.reference.id;
1682
- } else if ("subjectLatest" in sr.reference) {
1683
- schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1684
- } else if ("subject" in sr.reference) {
1685
- schemaId = await registry.getRegistryId(
1686
- sr.reference.subject,
1687
- sr.reference.version
1719
+ /**
1720
+ * Internal map storing transformation configurations.
1721
+ * Maps destination stream names to arrays of transformation functions and their configs.
1722
+ *
1723
+ * @internal
1724
+ */
1725
+ _transformations = /* @__PURE__ */ new Map();
1726
+ /**
1727
+ * Internal function for multi-stream transformations.
1728
+ * Allows a single transformation to route messages to multiple destinations.
1729
+ *
1730
+ * @internal
1731
+ */
1732
+ _multipleTransformations;
1733
+ /**
1734
+ * Internal array storing consumer configurations.
1735
+ *
1736
+ * @internal
1737
+ */
1738
+ _consumers = new Array();
1739
+ /**
1740
+ * Builds the full Kafka topic name including optional namespace and version suffix.
1741
+ * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1742
+ */
1743
+ buildFullTopicName(namespace) {
1744
+ const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1745
+ const base = `${this.name}${versionSuffix}`;
1746
+ return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1747
+ }
1748
+ /**
1749
+ * Creates a fast hash string from relevant Kafka configuration fields.
1750
+ */
1751
+ createConfigHash(kafkaConfig) {
1752
+ const configString = [
1753
+ kafkaConfig.broker,
1754
+ kafkaConfig.messageTimeoutMs,
1755
+ kafkaConfig.saslUsername,
1756
+ kafkaConfig.saslPassword,
1757
+ kafkaConfig.saslMechanism,
1758
+ kafkaConfig.securityProtocol,
1759
+ kafkaConfig.namespace
1760
+ ].join(":");
1761
+ return createHash3("sha256").update(configString).digest("hex").substring(0, 16);
1762
+ }
1763
+ /**
1764
+ * Gets or creates a memoized KafkaJS producer using runtime configuration.
1765
+ */
1766
+ async getMemoizedProducer() {
1767
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1768
+ const configRegistry = globalThis._mooseConfigRegistry;
1769
+ const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1770
+ const kafkaConfig = await configRegistry.getKafkaConfig();
1771
+ const currentHash = this.createConfigHash(kafkaConfig);
1772
+ if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1773
+ return { producer: this._memoizedProducer, kafkaConfig };
1774
+ }
1775
+ if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1776
+ try {
1777
+ await this._memoizedProducer.disconnect();
1778
+ } catch {
1779
+ }
1780
+ this._memoizedProducer = void 0;
1781
+ }
1782
+ const clientId = `moose-sdk-stream-${this.name}`;
1783
+ const logger = {
1784
+ logPrefix: clientId,
1785
+ log: (message) => {
1786
+ console.log(`${clientId}: ${message}`);
1787
+ },
1788
+ error: (message) => {
1789
+ console.error(`${clientId}: ${message}`);
1790
+ },
1791
+ warn: (message) => {
1792
+ console.warn(`${clientId}: ${message}`);
1793
+ }
1794
+ };
1795
+ const producer = await getKafkaProducer2(
1796
+ {
1797
+ clientId,
1798
+ broker: kafkaConfig.broker,
1799
+ securityProtocol: kafkaConfig.securityProtocol,
1800
+ saslUsername: kafkaConfig.saslUsername,
1801
+ saslPassword: kafkaConfig.saslPassword,
1802
+ saslMechanism: kafkaConfig.saslMechanism
1803
+ },
1804
+ logger
1688
1805
  );
1806
+ this._memoizedProducer = producer;
1807
+ this._kafkaConfigHash = currentHash;
1808
+ return { producer, kafkaConfig };
1689
1809
  }
1690
- if (schemaId === void 0) {
1691
- throw new Error("Malformed schema reference.");
1810
+ /**
1811
+ * Closes the memoized Kafka producer if it exists.
1812
+ */
1813
+ async closeProducer() {
1814
+ if (this._memoizedProducer) {
1815
+ try {
1816
+ await this._memoizedProducer.disconnect();
1817
+ } catch {
1818
+ } finally {
1819
+ this._memoizedProducer = void 0;
1820
+ this._kafkaConfigHash = void 0;
1821
+ }
1822
+ }
1692
1823
  }
1693
- const encoded = await Promise.all(
1694
- flat.map(
1695
- (v) => registry.encode(schemaId, v)
1696
- )
1697
- );
1698
- await producer.send({
1699
- topic,
1700
- messages: encoded.map((value) => ({ value }))
1701
- });
1702
- return;
1703
- } else if (sr !== void 0) {
1704
- throw new Error("Currently only JSON Schema is supported.");
1705
- }
1706
- await producer.send({
1707
- topic,
1708
- messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1709
- });
1710
- }
1711
- /**
1712
- * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1713
- * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1714
- *
1715
- * @template U The data type of the messages in the destination stream.
1716
- * @param destination The destination stream for the transformed messages.
1717
- * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1718
- * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1719
- * @param config Optional configuration for this specific transformation step, like a version.
1720
- */
1721
- addTransform(destination, transformation, config) {
1722
- const sourceFile = getSourceFileFromStack(new Error().stack);
1723
- const transformConfig = {
1724
- ...config ?? {},
1725
- sourceFile
1726
- };
1727
- if (transformConfig.deadLetterQueue === void 0) {
1728
- transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1729
- }
1730
- if (this._transformations.has(destination.name)) {
1731
- const existingTransforms = this._transformations.get(destination.name);
1732
- const hasVersion = existingTransforms.some(
1733
- ([_, __, cfg]) => cfg.version === transformConfig.version
1734
- );
1735
- if (!hasVersion) {
1736
- existingTransforms.push([destination, transformation, transformConfig]);
1824
+ /**
1825
+ * Sends one or more records to this stream's Kafka topic.
1826
+ * Values are JSON-serialized as message values.
1827
+ */
1828
+ async send(values) {
1829
+ const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1830
+ if (flat.length === 0) return;
1831
+ const { producer, kafkaConfig } = await this.getMemoizedProducer();
1832
+ const topic = this.buildFullTopicName(kafkaConfig.namespace);
1833
+ const sr = this.config.schemaConfig;
1834
+ if (sr && sr.kind === "JSON") {
1835
+ const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1836
+ if (!schemaRegistryUrl) {
1837
+ throw new Error("Schema Registry URL not configured");
1838
+ }
1839
+ const {
1840
+ default: { SchemaRegistry }
1841
+ } = await import("@kafkajs/confluent-schema-registry");
1842
+ const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1843
+ let schemaId = void 0;
1844
+ if ("id" in sr.reference) {
1845
+ schemaId = sr.reference.id;
1846
+ } else if ("subjectLatest" in sr.reference) {
1847
+ schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1848
+ } else if ("subject" in sr.reference) {
1849
+ schemaId = await registry.getRegistryId(
1850
+ sr.reference.subject,
1851
+ sr.reference.version
1852
+ );
1853
+ }
1854
+ if (schemaId === void 0) {
1855
+ throw new Error("Malformed schema reference.");
1856
+ }
1857
+ const encoded = await Promise.all(
1858
+ flat.map(
1859
+ (v) => registry.encode(schemaId, v)
1860
+ )
1861
+ );
1862
+ await producer.send({
1863
+ topic,
1864
+ messages: encoded.map((value) => ({ value }))
1865
+ });
1866
+ return;
1867
+ } else if (sr !== void 0) {
1868
+ throw new Error("Currently only JSON Schema is supported.");
1869
+ }
1870
+ await producer.send({
1871
+ topic,
1872
+ messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1873
+ });
1737
1874
  }
1738
- } else {
1739
- this._transformations.set(destination.name, [
1740
- [destination, transformation, transformConfig]
1741
- ]);
1742
- }
1743
- }
1744
- /**
1745
- * Adds a consumer function that processes messages from this stream.
1746
- * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1747
- *
1748
- * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1749
- * @param config Optional configuration for this specific consumer, like a version.
1750
- */
1751
- addConsumer(consumer, config) {
1752
- const sourceFile = getSourceFileFromStack(new Error().stack);
1753
- const consumerConfig = {
1754
- ...config ?? {},
1755
- sourceFile
1756
- };
1757
- if (consumerConfig.deadLetterQueue === void 0) {
1758
- consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1759
- }
1760
- const hasVersion = this._consumers.some(
1761
- (existing) => existing.config.version === consumerConfig.version
1762
- );
1763
- if (!hasVersion) {
1764
- this._consumers.push({ consumer, config: consumerConfig });
1765
- }
1766
- }
1767
- /**
1768
- * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1769
- * @param values The value or values to send to this stream.
1770
- * @returns A `RoutedMessage` object associating the values with this stream.
1771
- *
1772
- * @example
1773
- * ```typescript
1774
- * sourceStream.addMultiTransform((record) => [
1775
- * destinationStream1.routed(transformedRecord1),
1776
- * destinationStream2.routed([record2a, record2b])
1777
- * ]);
1778
- * ```
1779
- */
1780
- routed = (values) => new RoutedMessage(this, values);
1781
- /**
1782
- * Adds a single transformation function that can route messages to multiple destination streams.
1783
- * This is an alternative to adding multiple individual `addTransform` calls.
1784
- * Only one multi-transform function can be added per stream.
1785
- *
1786
- * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1787
- * each specifying a destination stream and the message(s) to send to it.
1788
- */
1789
- addMultiTransform(transformation) {
1790
- this._multipleTransformations = transformation;
1791
- }
1792
- };
1793
- function attachTypeGuard(dl, typeGuard) {
1794
- dl.asTyped = () => typeGuard(dl.originalRecord);
1795
- }
1796
- var DeadLetterQueue = class extends Stream {
1797
- constructor(name, config, typeGuard) {
1798
- if (typeGuard === void 0) {
1799
- throw new Error(
1800
- "Supply the type param T so that the schema is inserted by the compiler plugin."
1801
- );
1802
- }
1803
- super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1804
- this.typeGuard = typeGuard;
1805
- getMooseInternal().streams.set(name, this);
1806
- }
1807
- /**
1808
- * Internal type guard function for validating and casting original records.
1809
- *
1810
- * @internal
1811
- */
1812
- typeGuard;
1813
- /**
1814
- * Adds a transformation step for dead letter records.
1815
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1816
- *
1817
- * @template U The output type for the transformation
1818
- * @param destination The destination stream for transformed messages
1819
- * @param transformation Function to transform dead letter records
1820
- * @param config Optional transformation configuration
1821
- */
1822
- addTransform(destination, transformation, config) {
1823
- const withValidate = (deadLetter) => {
1824
- attachTypeGuard(deadLetter, this.typeGuard);
1825
- return transformation(deadLetter);
1826
- };
1827
- super.addTransform(destination, withValidate, config);
1828
- }
1829
- /**
1830
- * Adds a consumer for dead letter records.
1831
- * The consumer function receives a DeadLetter<T> with type recovery capabilities.
1832
- *
1833
- * @param consumer Function to process dead letter records
1834
- * @param config Optional consumer configuration
1835
- */
1836
- addConsumer(consumer, config) {
1837
- const withValidate = (deadLetter) => {
1838
- attachTypeGuard(deadLetter, this.typeGuard);
1839
- return consumer(deadLetter);
1840
- };
1841
- super.addConsumer(withValidate, config);
1842
- }
1843
- /**
1844
- * Adds a multi-stream transformation for dead letter records.
1845
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1846
- *
1847
- * @param transformation Function to route dead letter records to multiple destinations
1848
- */
1849
- addMultiTransform(transformation) {
1850
- const withValidate = (deadLetter) => {
1851
- attachTypeGuard(deadLetter, this.typeGuard);
1852
- return transformation(deadLetter);
1853
- };
1854
- super.addMultiTransform(withValidate);
1855
- }
1856
- };
1857
-
1858
- // src/dmv2/sdk/workflow.ts
1859
- var Task = class {
1860
- /**
1861
- * Creates a new Task instance.
1862
- *
1863
- * @param name - Unique identifier for the task
1864
- * @param config - Configuration object defining the task behavior
1865
- *
1866
- * @example
1867
- * ```typescript
1868
- * // No input, no output
1869
- * const task1 = new Task<null, void>("task1", {
1870
- * run: async () => {
1871
- * console.log("No input/output");
1872
- * }
1873
- * });
1874
- *
1875
- * // No input, but has output
1876
- * const task2 = new Task<null, OutputType>("task2", {
1877
- * run: async () => {
1878
- * return someOutput;
1879
- * }
1880
- * });
1881
- *
1882
- * // Has input, no output
1883
- * const task3 = new Task<InputType, void>("task3", {
1884
- * run: async (input: InputType) => {
1885
- * // process input but return nothing
1886
- * }
1887
- * });
1888
- *
1889
- * // Has both input and output
1890
- * const task4 = new Task<InputType, OutputType>("task4", {
1891
- * run: async (input: InputType) => {
1892
- * return process(input);
1893
- * }
1894
- * });
1895
- * ```
1896
- */
1897
- constructor(name, config) {
1898
- this.name = name;
1899
- this.config = config;
1900
- }
1901
- };
1902
- var Workflow = class {
1903
- /**
1904
- * Creates a new Workflow instance and registers it with the Moose system.
1905
- *
1906
- * @param name - Unique identifier for the workflow
1907
- * @param config - Configuration object defining the workflow behavior and task orchestration
1908
- * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
1909
- */
1910
- constructor(name, config) {
1911
- this.name = name;
1912
- this.config = config;
1913
- const workflows = getMooseInternal().workflows;
1914
- if (workflows.has(name)) {
1915
- throw new Error(`Workflow with name ${name} already exists`);
1916
- }
1917
- this.validateTaskGraph(config.startingTask, name);
1918
- workflows.set(name, this);
1919
- }
1920
- /**
1921
- * Validates the task graph to ensure there are no null tasks or infinite loops.
1922
- *
1923
- * @private
1924
- * @param startingTask - The starting task to begin validation from
1925
- * @param workflowName - The name of the workflow being validated (for error messages)
1926
- * @throws {Error} When null/undefined tasks are found or infinite loops are detected
1927
- */
1928
- validateTaskGraph(startingTask, workflowName) {
1929
- if (startingTask === null || startingTask === void 0) {
1930
- throw new Error(
1931
- `Workflow "${workflowName}" has a null or undefined starting task`
1932
- );
1933
- }
1934
- const visited = /* @__PURE__ */ new Set();
1935
- const recursionStack = /* @__PURE__ */ new Set();
1936
- const validateTask = (task, currentPath) => {
1937
- if (task === null || task === void 0) {
1938
- const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
1939
- throw new Error(
1940
- `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
1941
- );
1875
+ /**
1876
+ * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1877
+ * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1878
+ *
1879
+ * @template U The data type of the messages in the destination stream.
1880
+ * @param destination The destination stream for the transformed messages.
1881
+ * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1882
+ * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1883
+ * @param config Optional configuration for this specific transformation step, like a version.
1884
+ */
1885
+ addTransform(destination, transformation, config) {
1886
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1887
+ const transformConfig = {
1888
+ ...config ?? {},
1889
+ sourceFile
1890
+ };
1891
+ if (transformConfig.deadLetterQueue === void 0) {
1892
+ transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1893
+ }
1894
+ if (this._transformations.has(destination.name)) {
1895
+ const existingTransforms = this._transformations.get(destination.name);
1896
+ const hasVersion = existingTransforms.some(
1897
+ ([_, __, cfg]) => cfg.version === transformConfig.version
1898
+ );
1899
+ if (!hasVersion) {
1900
+ existingTransforms.push([destination, transformation, transformConfig]);
1901
+ }
1902
+ } else {
1903
+ this._transformations.set(destination.name, [
1904
+ [destination, transformation, transformConfig]
1905
+ ]);
1906
+ }
1942
1907
  }
1943
- const taskName = task.name;
1944
- if (recursionStack.has(taskName)) {
1945
- const cycleStartIndex = currentPath.indexOf(taskName);
1946
- const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
1947
- throw new Error(
1948
- `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
1908
+ /**
1909
+ * Adds a consumer function that processes messages from this stream.
1910
+ * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1911
+ *
1912
+ * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1913
+ * @param config Optional configuration for this specific consumer, like a version.
1914
+ */
1915
+ addConsumer(consumer, config) {
1916
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1917
+ const consumerConfig = {
1918
+ ...config ?? {},
1919
+ sourceFile
1920
+ };
1921
+ if (consumerConfig.deadLetterQueue === void 0) {
1922
+ consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1923
+ }
1924
+ const hasVersion = this._consumers.some(
1925
+ (existing) => existing.config.version === consumerConfig.version
1949
1926
  );
1927
+ if (!hasVersion) {
1928
+ this._consumers.push({ consumer, config: consumerConfig });
1929
+ }
1950
1930
  }
1951
- if (visited.has(taskName)) {
1952
- return;
1931
+ /**
1932
+ * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1933
+ * @param values The value or values to send to this stream.
1934
+ * @returns A `RoutedMessage` object associating the values with this stream.
1935
+ *
1936
+ * @example
1937
+ * ```typescript
1938
+ * sourceStream.addMultiTransform((record) => [
1939
+ * destinationStream1.routed(transformedRecord1),
1940
+ * destinationStream2.routed([record2a, record2b])
1941
+ * ]);
1942
+ * ```
1943
+ */
1944
+ routed = (values) => new RoutedMessage(this, values);
1945
+ /**
1946
+ * Adds a single transformation function that can route messages to multiple destination streams.
1947
+ * This is an alternative to adding multiple individual `addTransform` calls.
1948
+ * Only one multi-transform function can be added per stream.
1949
+ *
1950
+ * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1951
+ * each specifying a destination stream and the message(s) to send to it.
1952
+ */
1953
+ addMultiTransform(transformation) {
1954
+ this._multipleTransformations = transformation;
1953
1955
  }
1954
- visited.add(taskName);
1955
- recursionStack.add(taskName);
1956
- if (task.config.onComplete) {
1957
- for (const nextTask of task.config.onComplete) {
1958
- validateTask(nextTask, [...currentPath, taskName]);
1956
+ };
1957
+ DeadLetterQueue = class extends Stream {
1958
+ constructor(name, config, typeGuard) {
1959
+ if (typeGuard === void 0) {
1960
+ throw new Error(
1961
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
1962
+ );
1959
1963
  }
1964
+ super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1965
+ this.typeGuard = typeGuard;
1966
+ getMooseInternal().streams.set(name, this);
1967
+ }
1968
+ /**
1969
+ * Internal type guard function for validating and casting original records.
1970
+ *
1971
+ * @internal
1972
+ */
1973
+ typeGuard;
1974
+ /**
1975
+ * Adds a transformation step for dead letter records.
1976
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1977
+ *
1978
+ * @template U The output type for the transformation
1979
+ * @param destination The destination stream for transformed messages
1980
+ * @param transformation Function to transform dead letter records
1981
+ * @param config Optional transformation configuration
1982
+ */
1983
+ addTransform(destination, transformation, config) {
1984
+ const withValidate = (deadLetter) => {
1985
+ attachTypeGuard(deadLetter, this.typeGuard);
1986
+ return transformation(deadLetter);
1987
+ };
1988
+ super.addTransform(destination, withValidate, config);
1989
+ }
1990
+ /**
1991
+ * Adds a consumer for dead letter records.
1992
+ * The consumer function receives a DeadLetter<T> with type recovery capabilities.
1993
+ *
1994
+ * @param consumer Function to process dead letter records
1995
+ * @param config Optional consumer configuration
1996
+ */
1997
+ addConsumer(consumer, config) {
1998
+ const withValidate = (deadLetter) => {
1999
+ attachTypeGuard(deadLetter, this.typeGuard);
2000
+ return consumer(deadLetter);
2001
+ };
2002
+ super.addConsumer(withValidate, config);
2003
+ }
2004
+ /**
2005
+ * Adds a multi-stream transformation for dead letter records.
2006
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2007
+ *
2008
+ * @param transformation Function to route dead letter records to multiple destinations
2009
+ */
2010
+ addMultiTransform(transformation) {
2011
+ const withValidate = (deadLetter) => {
2012
+ attachTypeGuard(deadLetter, this.typeGuard);
2013
+ return transformation(deadLetter);
2014
+ };
2015
+ super.addMultiTransform(withValidate);
1960
2016
  }
1961
- recursionStack.delete(taskName);
1962
2017
  };
1963
- validateTask(startingTask, []);
1964
- }
1965
- };
1966
-
1967
- // src/dmv2/sdk/ingestApi.ts
1968
- var IngestApi = class extends TypedBase {
1969
- constructor(name, config, schema, columns, validators, allowExtraFields) {
1970
- super(name, config, schema, columns, void 0, allowExtraFields);
1971
- const ingestApis = getMooseInternal().ingestApis;
1972
- if (ingestApis.has(name)) {
1973
- throw new Error(`Ingest API with name ${name} already exists`);
1974
- }
1975
- ingestApis.set(name, this);
1976
2018
  }
1977
- };
2019
+ });
1978
2020
 
1979
- // src/dmv2/sdk/consumptionApi.ts
1980
- var Api = class extends TypedBase {
1981
- /** @internal The handler function that processes requests and generates responses. */
1982
- _handler;
1983
- /** @internal The JSON schema definition for the response type R. */
1984
- responseSchema;
1985
- constructor(name, handler, config, schema, columns, responseSchema) {
1986
- super(name, config ?? {}, schema, columns);
1987
- this._handler = handler;
1988
- this.responseSchema = responseSchema ?? {
1989
- version: "3.1",
1990
- schemas: [{ type: "array", items: { type: "object" } }],
1991
- components: { schemas: {} }
2021
+ // src/dmv2/sdk/workflow.ts
2022
+ var Task, Workflow;
2023
+ var init_workflow = __esm({
2024
+ "src/dmv2/sdk/workflow.ts"() {
2025
+ "use strict";
2026
+ init_internal();
2027
+ Task = class {
2028
+ /**
2029
+ * Creates a new Task instance.
2030
+ *
2031
+ * @param name - Unique identifier for the task
2032
+ * @param config - Configuration object defining the task behavior
2033
+ *
2034
+ * @example
2035
+ * ```typescript
2036
+ * // No input, no output
2037
+ * const task1 = new Task<null, void>("task1", {
2038
+ * run: async () => {
2039
+ * console.log("No input/output");
2040
+ * }
2041
+ * });
2042
+ *
2043
+ * // No input, but has output
2044
+ * const task2 = new Task<null, OutputType>("task2", {
2045
+ * run: async () => {
2046
+ * return someOutput;
2047
+ * }
2048
+ * });
2049
+ *
2050
+ * // Has input, no output
2051
+ * const task3 = new Task<InputType, void>("task3", {
2052
+ * run: async (input: InputType) => {
2053
+ * // process input but return nothing
2054
+ * }
2055
+ * });
2056
+ *
2057
+ * // Has both input and output
2058
+ * const task4 = new Task<InputType, OutputType>("task4", {
2059
+ * run: async (input: InputType) => {
2060
+ * return process(input);
2061
+ * }
2062
+ * });
2063
+ * ```
2064
+ */
2065
+ constructor(name, config) {
2066
+ this.name = name;
2067
+ this.config = config;
2068
+ }
1992
2069
  };
1993
- const apis = getMooseInternal().apis;
1994
- const key = `${name}${config?.version ? `:${config.version}` : ""}`;
1995
- if (apis.has(key)) {
1996
- throw new Error(
1997
- `Consumption API with name ${name} and version ${config?.version} already exists`
1998
- );
1999
- }
2000
- apis.set(key, this);
2001
- if (config?.path) {
2002
- if (config.version) {
2003
- const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2004
- if (pathEndsWithVersion) {
2005
- if (apis.has(config.path)) {
2006
- const existing = apis.get(config.path);
2070
+ Workflow = class {
2071
+ /**
2072
+ * Creates a new Workflow instance and registers it with the Moose system.
2073
+ *
2074
+ * @param name - Unique identifier for the workflow
2075
+ * @param config - Configuration object defining the workflow behavior and task orchestration
2076
+ * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
2077
+ */
2078
+ constructor(name, config) {
2079
+ this.name = name;
2080
+ this.config = config;
2081
+ const workflows = getMooseInternal().workflows;
2082
+ if (workflows.has(name)) {
2083
+ throw new Error(`Workflow with name ${name} already exists`);
2084
+ }
2085
+ this.validateTaskGraph(config.startingTask, name);
2086
+ workflows.set(name, this);
2087
+ }
2088
+ /**
2089
+ * Validates the task graph to ensure there are no null tasks or infinite loops.
2090
+ *
2091
+ * @private
2092
+ * @param startingTask - The starting task to begin validation from
2093
+ * @param workflowName - The name of the workflow being validated (for error messages)
2094
+ * @throws {Error} When null/undefined tasks are found or infinite loops are detected
2095
+ */
2096
+ validateTaskGraph(startingTask, workflowName) {
2097
+ if (startingTask === null || startingTask === void 0) {
2098
+ throw new Error(
2099
+ `Workflow "${workflowName}" has a null or undefined starting task`
2100
+ );
2101
+ }
2102
+ const visited = /* @__PURE__ */ new Set();
2103
+ const recursionStack = /* @__PURE__ */ new Set();
2104
+ const validateTask = (task, currentPath) => {
2105
+ if (task === null || task === void 0) {
2106
+ const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
2007
2107
  throw new Error(
2008
- `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2108
+ `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
2009
2109
  );
2010
2110
  }
2011
- apis.set(config.path, this);
2012
- } else {
2013
- const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2014
- if (apis.has(versionedPath)) {
2015
- const existing = apis.get(versionedPath);
2111
+ const taskName = task.name;
2112
+ if (recursionStack.has(taskName)) {
2113
+ const cycleStartIndex = currentPath.indexOf(taskName);
2114
+ const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
2016
2115
  throw new Error(
2017
- `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2116
+ `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2018
2117
  );
2019
2118
  }
2020
- apis.set(versionedPath, this);
2021
- if (!apis.has(config.path)) {
2022
- apis.set(config.path, this);
2119
+ if (visited.has(taskName)) {
2120
+ return;
2023
2121
  }
2122
+ visited.add(taskName);
2123
+ recursionStack.add(taskName);
2124
+ if (task.config.onComplete) {
2125
+ for (const nextTask of task.config.onComplete) {
2126
+ validateTask(nextTask, [...currentPath, taskName]);
2127
+ }
2128
+ }
2129
+ recursionStack.delete(taskName);
2130
+ };
2131
+ validateTask(startingTask, []);
2132
+ }
2133
+ };
2134
+ }
2135
+ });
2136
+
2137
+ // src/dmv2/sdk/ingestApi.ts
2138
+ var IngestApi;
2139
+ var init_ingestApi = __esm({
2140
+ "src/dmv2/sdk/ingestApi.ts"() {
2141
+ "use strict";
2142
+ init_typedBase();
2143
+ init_internal();
2144
+ IngestApi = class extends TypedBase {
2145
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2146
+ super(name, config, schema, columns, void 0, allowExtraFields);
2147
+ const ingestApis = getMooseInternal().ingestApis;
2148
+ if (ingestApis.has(name)) {
2149
+ throw new Error(`Ingest API with name ${name} already exists`);
2024
2150
  }
2025
- } else {
2026
- if (apis.has(config.path)) {
2027
- const existing = apis.get(config.path);
2151
+ ingestApis.set(name, this);
2152
+ }
2153
+ };
2154
+ }
2155
+ });
2156
+
2157
+ // src/dmv2/sdk/consumptionApi.ts
2158
+ var Api, ConsumptionApi;
2159
+ var init_consumptionApi = __esm({
2160
+ "src/dmv2/sdk/consumptionApi.ts"() {
2161
+ "use strict";
2162
+ init_typedBase();
2163
+ init_internal();
2164
+ Api = class extends TypedBase {
2165
+ /** @internal The handler function that processes requests and generates responses. */
2166
+ _handler;
2167
+ /** @internal The JSON schema definition for the response type R. */
2168
+ responseSchema;
2169
+ constructor(name, handler, config, schema, columns, responseSchema) {
2170
+ super(name, config ?? {}, schema, columns);
2171
+ this._handler = handler;
2172
+ this.responseSchema = responseSchema ?? {
2173
+ version: "3.1",
2174
+ schemas: [{ type: "array", items: { type: "object" } }],
2175
+ components: { schemas: {} }
2176
+ };
2177
+ const apis = getMooseInternal().apis;
2178
+ const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2179
+ if (apis.has(key)) {
2028
2180
  throw new Error(
2029
- `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2181
+ `Consumption API with name ${name} and version ${config?.version} already exists`
2030
2182
  );
2031
2183
  }
2032
- apis.set(config.path, this);
2184
+ apis.set(key, this);
2185
+ if (config?.path) {
2186
+ if (config.version) {
2187
+ const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2188
+ if (pathEndsWithVersion) {
2189
+ if (apis.has(config.path)) {
2190
+ const existing = apis.get(config.path);
2191
+ throw new Error(
2192
+ `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2193
+ );
2194
+ }
2195
+ apis.set(config.path, this);
2196
+ } else {
2197
+ const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2198
+ if (apis.has(versionedPath)) {
2199
+ const existing = apis.get(versionedPath);
2200
+ throw new Error(
2201
+ `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2202
+ );
2203
+ }
2204
+ apis.set(versionedPath, this);
2205
+ if (!apis.has(config.path)) {
2206
+ apis.set(config.path, this);
2207
+ }
2208
+ }
2209
+ } else {
2210
+ if (apis.has(config.path)) {
2211
+ const existing = apis.get(config.path);
2212
+ throw new Error(
2213
+ `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2214
+ );
2215
+ }
2216
+ apis.set(config.path, this);
2217
+ }
2218
+ }
2033
2219
  }
2034
- }
2035
- }
2036
- /**
2037
- * Retrieves the handler function associated with this Consumption API.
2038
- * @returns The handler function.
2039
- */
2040
- getHandler = () => {
2041
- return this._handler;
2042
- };
2043
- async call(baseUrl, queryParams) {
2044
- let path2;
2045
- if (this.config?.path) {
2046
- if (this.config.version) {
2047
- const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2048
- if (pathEndsWithVersion) {
2049
- path2 = this.config.path;
2220
+ /**
2221
+ * Retrieves the handler function associated with this Consumption API.
2222
+ * @returns The handler function.
2223
+ */
2224
+ getHandler = () => {
2225
+ return this._handler;
2226
+ };
2227
+ async call(baseUrl, queryParams) {
2228
+ let path2;
2229
+ if (this.config?.path) {
2230
+ if (this.config.version) {
2231
+ const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2232
+ if (pathEndsWithVersion) {
2233
+ path2 = this.config.path;
2234
+ } else {
2235
+ path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2236
+ }
2237
+ } else {
2238
+ path2 = this.config.path;
2239
+ }
2050
2240
  } else {
2051
- path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2241
+ path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2052
2242
  }
2053
- } else {
2054
- path2 = this.config.path;
2055
- }
2056
- } else {
2057
- path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2058
- }
2059
- const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2060
- const searchParams = url.searchParams;
2061
- for (const [key, value] of Object.entries(queryParams)) {
2062
- if (Array.isArray(value)) {
2063
- for (const item of value) {
2064
- if (item !== null && item !== void 0) {
2065
- searchParams.append(key, String(item));
2243
+ const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2244
+ const searchParams = url.searchParams;
2245
+ for (const [key, value] of Object.entries(queryParams)) {
2246
+ if (Array.isArray(value)) {
2247
+ for (const item of value) {
2248
+ if (item !== null && item !== void 0) {
2249
+ searchParams.append(key, String(item));
2250
+ }
2251
+ }
2252
+ } else if (value !== null && value !== void 0) {
2253
+ searchParams.append(key, String(value));
2254
+ }
2255
+ }
2256
+ const response = await fetch(url, {
2257
+ method: "GET",
2258
+ headers: {
2259
+ Accept: "application/json"
2066
2260
  }
2261
+ });
2262
+ if (!response.ok) {
2263
+ throw new Error(`HTTP error! status: ${response.status}`);
2067
2264
  }
2068
- } else if (value !== null && value !== void 0) {
2069
- searchParams.append(key, String(value));
2265
+ const data = await response.json();
2266
+ return data;
2070
2267
  }
2071
- }
2072
- const response = await fetch(url, {
2073
- method: "GET",
2074
- headers: {
2075
- Accept: "application/json"
2076
- }
2077
- });
2078
- if (!response.ok) {
2079
- throw new Error(`HTTP error! status: ${response.status}`);
2080
- }
2081
- const data = await response.json();
2082
- return data;
2268
+ };
2269
+ ConsumptionApi = Api;
2083
2270
  }
2084
- };
2085
- var ConsumptionApi = Api;
2271
+ });
2086
2272
 
2087
2273
  // src/dmv2/sdk/ingestPipeline.ts
2088
- var IngestPipeline = class extends TypedBase {
2089
- /**
2090
- * The OLAP table component of the pipeline, if configured.
2091
- * Provides analytical query capabilities for the ingested data.
2092
- * Only present when `config.table` is not `false`.
2093
- */
2094
- table;
2095
- /**
2096
- * The stream component of the pipeline, if configured.
2097
- * Handles real-time data flow and processing between components.
2098
- * Only present when `config.stream` is not `false`.
2099
- */
2100
- stream;
2101
- /**
2102
- * The ingest API component of the pipeline, if configured.
2103
- * Provides HTTP endpoints for data ingestion.
2104
- * Only present when `config.ingestApi` is not `false`.
2105
- */
2106
- ingestApi;
2107
- /** The dead letter queue of the pipeline, if configured. */
2108
- deadLetterQueue;
2109
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2110
- super(name, config, schema, columns, validators, allowExtraFields);
2111
- if (config.ingest !== void 0) {
2112
- console.warn(
2113
- "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2114
- );
2115
- if (config.ingestApi === void 0) {
2116
- config.ingestApi = config.ingest;
2274
+ var IngestPipeline;
2275
+ var init_ingestPipeline = __esm({
2276
+ "src/dmv2/sdk/ingestPipeline.ts"() {
2277
+ "use strict";
2278
+ init_typedBase();
2279
+ init_stream();
2280
+ init_olapTable();
2281
+ init_ingestApi();
2282
+ init_helpers();
2283
+ IngestPipeline = class extends TypedBase {
2284
+ /**
2285
+ * The OLAP table component of the pipeline, if configured.
2286
+ * Provides analytical query capabilities for the ingested data.
2287
+ * Only present when `config.table` is not `false`.
2288
+ */
2289
+ table;
2290
+ /**
2291
+ * The stream component of the pipeline, if configured.
2292
+ * Handles real-time data flow and processing between components.
2293
+ * Only present when `config.stream` is not `false`.
2294
+ */
2295
+ stream;
2296
+ /**
2297
+ * The ingest API component of the pipeline, if configured.
2298
+ * Provides HTTP endpoints for data ingestion.
2299
+ * Only present when `config.ingestApi` is not `false`.
2300
+ */
2301
+ ingestApi;
2302
+ /** The dead letter queue of the pipeline, if configured. */
2303
+ deadLetterQueue;
2304
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2305
+ super(name, config, schema, columns, validators, allowExtraFields);
2306
+ if (config.ingest !== void 0) {
2307
+ console.warn(
2308
+ "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2309
+ );
2310
+ if (config.ingestApi === void 0) {
2311
+ config.ingestApi = config.ingest;
2312
+ }
2313
+ }
2314
+ if (config.table) {
2315
+ const tableConfig = typeof config.table === "object" ? {
2316
+ ...config.table,
2317
+ lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2318
+ ...config.version && { version: config.version }
2319
+ } : {
2320
+ lifeCycle: config.lifeCycle,
2321
+ engine: "MergeTree" /* MergeTree */,
2322
+ ...config.version && { version: config.version }
2323
+ };
2324
+ this.table = new OlapTable(
2325
+ name,
2326
+ tableConfig,
2327
+ this.schema,
2328
+ this.columnArray,
2329
+ this.validators
2330
+ );
2331
+ }
2332
+ if (config.deadLetterQueue) {
2333
+ const streamConfig = {
2334
+ destination: void 0,
2335
+ ...typeof config.deadLetterQueue === "object" ? {
2336
+ ...config.deadLetterQueue,
2337
+ lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2338
+ } : { lifeCycle: config.lifeCycle },
2339
+ ...config.version && { version: config.version }
2340
+ };
2341
+ this.deadLetterQueue = new DeadLetterQueue(
2342
+ `${name}DeadLetterQueue`,
2343
+ streamConfig,
2344
+ validators.assert
2345
+ );
2346
+ }
2347
+ if (config.stream) {
2348
+ const streamConfig = {
2349
+ destination: this.table,
2350
+ defaultDeadLetterQueue: this.deadLetterQueue,
2351
+ ...typeof config.stream === "object" ? {
2352
+ ...config.stream,
2353
+ lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2354
+ } : { lifeCycle: config.lifeCycle },
2355
+ ...config.version && { version: config.version }
2356
+ };
2357
+ this.stream = new Stream(
2358
+ name,
2359
+ streamConfig,
2360
+ this.schema,
2361
+ this.columnArray,
2362
+ void 0,
2363
+ this.allowExtraFields
2364
+ );
2365
+ this.stream.pipelineParent = this;
2366
+ }
2367
+ const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2368
+ if (effectiveIngestAPI) {
2369
+ if (!this.stream) {
2370
+ throw new Error("Ingest API needs a stream to write to.");
2371
+ }
2372
+ const ingestConfig = {
2373
+ destination: this.stream,
2374
+ deadLetterQueue: this.deadLetterQueue,
2375
+ ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2376
+ ...config.version && { version: config.version },
2377
+ ...config.path && { path: config.path }
2378
+ };
2379
+ this.ingestApi = new IngestApi(
2380
+ name,
2381
+ ingestConfig,
2382
+ this.schema,
2383
+ this.columnArray,
2384
+ void 0,
2385
+ this.allowExtraFields
2386
+ );
2387
+ this.ingestApi.pipelineParent = this;
2388
+ }
2117
2389
  }
2118
- }
2119
- if (config.table) {
2120
- const tableConfig = typeof config.table === "object" ? {
2121
- ...config.table,
2122
- lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2123
- ...config.version && { version: config.version }
2124
- } : {
2125
- lifeCycle: config.lifeCycle,
2126
- engine: "MergeTree" /* MergeTree */,
2127
- ...config.version && { version: config.version }
2128
- };
2129
- this.table = new OlapTable(
2130
- name,
2131
- tableConfig,
2132
- this.schema,
2133
- this.columnArray,
2134
- this.validators
2135
- );
2136
- }
2137
- if (config.deadLetterQueue) {
2138
- const streamConfig = {
2139
- destination: void 0,
2140
- ...typeof config.deadLetterQueue === "object" ? {
2141
- ...config.deadLetterQueue,
2142
- lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2143
- } : { lifeCycle: config.lifeCycle },
2144
- ...config.version && { version: config.version }
2145
- };
2146
- this.deadLetterQueue = new DeadLetterQueue(
2147
- `${name}DeadLetterQueue`,
2148
- streamConfig,
2149
- validators.assert
2150
- );
2151
- }
2152
- if (config.stream) {
2153
- const streamConfig = {
2154
- destination: this.table,
2155
- defaultDeadLetterQueue: this.deadLetterQueue,
2156
- ...typeof config.stream === "object" ? {
2157
- ...config.stream,
2158
- lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2159
- } : { lifeCycle: config.lifeCycle },
2160
- ...config.version && { version: config.version }
2161
- };
2162
- this.stream = new Stream(
2163
- name,
2164
- streamConfig,
2165
- this.schema,
2166
- this.columnArray,
2167
- void 0,
2168
- this.allowExtraFields
2169
- );
2170
- this.stream.pipelineParent = this;
2171
- }
2172
- const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2173
- if (effectiveIngestAPI) {
2174
- if (!this.stream) {
2175
- throw new Error("Ingest API needs a stream to write to.");
2176
- }
2177
- const ingestConfig = {
2178
- destination: this.stream,
2179
- deadLetterQueue: this.deadLetterQueue,
2180
- ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2181
- ...config.version && { version: config.version },
2182
- ...config.path && { path: config.path }
2183
- };
2184
- this.ingestApi = new IngestApi(
2185
- name,
2186
- ingestConfig,
2187
- this.schema,
2188
- this.columnArray,
2189
- void 0,
2190
- this.allowExtraFields
2191
- );
2192
- this.ingestApi.pipelineParent = this;
2193
- }
2390
+ };
2194
2391
  }
2195
- };
2392
+ });
2196
2393
 
2197
2394
  // src/dmv2/sdk/etlPipeline.ts
2198
- var InternalBatcher = class {
2199
- iterator;
2200
- batchSize;
2201
- constructor(asyncIterable, batchSize = 20) {
2202
- this.iterator = asyncIterable[Symbol.asyncIterator]();
2203
- this.batchSize = batchSize;
2204
- }
2205
- async getNextBatch() {
2206
- const items = [];
2207
- for (let i = 0; i < this.batchSize; i++) {
2208
- const { value, done } = await this.iterator.next();
2209
- if (done) {
2210
- return { items, hasMore: false };
2211
- }
2212
- items.push(value);
2213
- }
2214
- return { items, hasMore: true };
2215
- }
2216
- };
2217
- var ETLPipeline = class {
2218
- constructor(name, config) {
2219
- this.name = name;
2220
- this.config = config;
2221
- this.setupPipeline();
2222
- }
2223
- batcher;
2224
- setupPipeline() {
2225
- this.batcher = this.createBatcher();
2226
- const tasks = this.createAllTasks();
2227
- tasks.extract.config.onComplete = [tasks.transform];
2228
- tasks.transform.config.onComplete = [tasks.load];
2229
- new Workflow(this.name, {
2230
- startingTask: tasks.extract,
2231
- retries: 1,
2232
- timeout: "30m"
2233
- });
2234
- }
2235
- createBatcher() {
2236
- const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2237
- return new InternalBatcher(iterable);
2238
- }
2239
- getDefaultTaskConfig() {
2240
- return {
2241
- retries: 1,
2242
- timeout: "30m"
2243
- };
2244
- }
2245
- createAllTasks() {
2246
- const taskConfig = this.getDefaultTaskConfig();
2247
- return {
2248
- extract: this.createExtractTask(taskConfig),
2249
- transform: this.createTransformTask(taskConfig),
2250
- load: this.createLoadTask(taskConfig)
2395
+ var InternalBatcher, ETLPipeline;
2396
+ var init_etlPipeline = __esm({
2397
+ "src/dmv2/sdk/etlPipeline.ts"() {
2398
+ "use strict";
2399
+ init_workflow();
2400
+ InternalBatcher = class {
2401
+ iterator;
2402
+ batchSize;
2403
+ constructor(asyncIterable, batchSize = 20) {
2404
+ this.iterator = asyncIterable[Symbol.asyncIterator]();
2405
+ this.batchSize = batchSize;
2406
+ }
2407
+ async getNextBatch() {
2408
+ const items = [];
2409
+ for (let i = 0; i < this.batchSize; i++) {
2410
+ const { value, done } = await this.iterator.next();
2411
+ if (done) {
2412
+ return { items, hasMore: false };
2413
+ }
2414
+ items.push(value);
2415
+ }
2416
+ return { items, hasMore: true };
2417
+ }
2251
2418
  };
2252
- }
2253
- createExtractTask(taskConfig) {
2254
- return new Task(`${this.name}_extract`, {
2255
- run: async ({}) => {
2256
- console.log(`Running extract task for ${this.name}...`);
2257
- const batch = await this.batcher.getNextBatch();
2258
- console.log(`Extract task completed with ${batch.items.length} items`);
2259
- return batch;
2260
- },
2261
- retries: taskConfig.retries,
2262
- timeout: taskConfig.timeout
2263
- });
2264
- }
2265
- createTransformTask(taskConfig) {
2266
- return new Task(
2267
- `${this.name}_transform`,
2268
- {
2269
- // Use new single-parameter context API for handlers
2270
- run: async ({ input }) => {
2271
- const batch = input;
2272
- console.log(
2273
- `Running transform task for ${this.name} with ${batch.items.length} items...`
2274
- );
2419
+ ETLPipeline = class {
2420
+ constructor(name, config) {
2421
+ this.name = name;
2422
+ this.config = config;
2423
+ this.setupPipeline();
2424
+ }
2425
+ batcher;
2426
+ setupPipeline() {
2427
+ this.batcher = this.createBatcher();
2428
+ const tasks = this.createAllTasks();
2429
+ tasks.extract.config.onComplete = [tasks.transform];
2430
+ tasks.transform.config.onComplete = [tasks.load];
2431
+ new Workflow(this.name, {
2432
+ startingTask: tasks.extract,
2433
+ retries: 1,
2434
+ timeout: "30m"
2435
+ });
2436
+ }
2437
+ createBatcher() {
2438
+ const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2439
+ return new InternalBatcher(iterable);
2440
+ }
2441
+ getDefaultTaskConfig() {
2442
+ return {
2443
+ retries: 1,
2444
+ timeout: "30m"
2445
+ };
2446
+ }
2447
+ createAllTasks() {
2448
+ const taskConfig = this.getDefaultTaskConfig();
2449
+ return {
2450
+ extract: this.createExtractTask(taskConfig),
2451
+ transform: this.createTransformTask(taskConfig),
2452
+ load: this.createLoadTask(taskConfig)
2453
+ };
2454
+ }
2455
+ createExtractTask(taskConfig) {
2456
+ return new Task(`${this.name}_extract`, {
2457
+ run: async ({}) => {
2458
+ console.log(`Running extract task for ${this.name}...`);
2459
+ const batch = await this.batcher.getNextBatch();
2460
+ console.log(`Extract task completed with ${batch.items.length} items`);
2461
+ return batch;
2462
+ },
2463
+ retries: taskConfig.retries,
2464
+ timeout: taskConfig.timeout
2465
+ });
2466
+ }
2467
+ createTransformTask(taskConfig) {
2468
+ return new Task(
2469
+ `${this.name}_transform`,
2470
+ {
2471
+ // Use new single-parameter context API for handlers
2472
+ run: async ({ input }) => {
2473
+ const batch = input;
2474
+ console.log(
2475
+ `Running transform task for ${this.name} with ${batch.items.length} items...`
2476
+ );
2477
+ const transformedItems = [];
2478
+ for (const item of batch.items) {
2479
+ const transformed = await this.config.transform(item);
2480
+ transformedItems.push(transformed);
2481
+ }
2482
+ console.log(
2483
+ `Transform task completed with ${transformedItems.length} items`
2484
+ );
2485
+ return { items: transformedItems };
2486
+ },
2487
+ retries: taskConfig.retries,
2488
+ timeout: taskConfig.timeout
2489
+ }
2490
+ );
2491
+ }
2492
+ createLoadTask(taskConfig) {
2493
+ return new Task(`${this.name}_load`, {
2494
+ run: async ({ input: transformedItems }) => {
2495
+ console.log(
2496
+ `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2497
+ );
2498
+ if ("insert" in this.config.load) {
2499
+ await this.config.load.insert(transformedItems.items);
2500
+ } else {
2501
+ await this.config.load(transformedItems.items);
2502
+ }
2503
+ console.log(`Load task completed`);
2504
+ },
2505
+ retries: taskConfig.retries,
2506
+ timeout: taskConfig.timeout
2507
+ });
2508
+ }
2509
+ // Execute the entire ETL pipeline
2510
+ async run() {
2511
+ console.log(`Starting ETL Pipeline: ${this.name}`);
2512
+ let batchNumber = 1;
2513
+ do {
2514
+ console.log(`Processing batch ${batchNumber}...`);
2515
+ const batch = await this.batcher.getNextBatch();
2516
+ if (batch.items.length === 0) {
2517
+ break;
2518
+ }
2275
2519
  const transformedItems = [];
2276
- for (const item of batch.items) {
2277
- const transformed = await this.config.transform(item);
2278
- transformedItems.push(transformed);
2520
+ for (const extractedData of batch.items) {
2521
+ const transformedData = await this.config.transform(extractedData);
2522
+ transformedItems.push(transformedData);
2523
+ }
2524
+ if ("insert" in this.config.load) {
2525
+ await this.config.load.insert(transformedItems);
2526
+ } else {
2527
+ await this.config.load(transformedItems);
2279
2528
  }
2280
2529
  console.log(
2281
- `Transform task completed with ${transformedItems.length} items`
2530
+ `Completed batch ${batchNumber} with ${batch.items.length} items`
2282
2531
  );
2283
- return { items: transformedItems };
2284
- },
2285
- retries: taskConfig.retries,
2286
- timeout: taskConfig.timeout
2532
+ batchNumber++;
2533
+ if (!batch.hasMore) {
2534
+ break;
2535
+ }
2536
+ } while (true);
2537
+ console.log(`Completed ETL Pipeline: ${this.name}`);
2287
2538
  }
2288
- );
2539
+ };
2289
2540
  }
2290
- createLoadTask(taskConfig) {
2291
- return new Task(`${this.name}_load`, {
2292
- run: async ({ input: transformedItems }) => {
2293
- console.log(
2294
- `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2541
+ });
2542
+
2543
+ // src/dmv2/sdk/sqlResource.ts
2544
+ var SqlResource;
2545
+ var init_sqlResource = __esm({
2546
+ "src/dmv2/sdk/sqlResource.ts"() {
2547
+ "use strict";
2548
+ init_internal();
2549
+ init_sqlHelpers();
2550
+ init_stackTrace();
2551
+ SqlResource = class {
2552
+ /** @internal */
2553
+ kind = "SqlResource";
2554
+ /** Array of SQL statements to execute for setting up the resource. */
2555
+ setup;
2556
+ /** Array of SQL statements to execute for tearing down the resource. */
2557
+ teardown;
2558
+ /** The name of the SQL resource (e.g., view name, materialized view name). */
2559
+ name;
2560
+ /** List of OlapTables or Views that this resource reads data from. */
2561
+ pullsDataFrom;
2562
+ /** List of OlapTables or Views that this resource writes data to. */
2563
+ pushesDataTo;
2564
+ /** @internal Source file path where this resource was defined */
2565
+ sourceFile;
2566
+ /** @internal Source line number where this resource was defined */
2567
+ sourceLine;
2568
+ /** @internal Source column number where this resource was defined */
2569
+ sourceColumn;
2570
+ /**
2571
+ * Creates a new SqlResource instance.
2572
+ * @param name The name of the resource.
2573
+ * @param setup An array of SQL DDL statements to create the resource.
2574
+ * @param teardown An array of SQL DDL statements to drop the resource.
2575
+ * @param options Optional configuration for specifying data dependencies.
2576
+ * @param options.pullsDataFrom Tables/Views this resource reads from.
2577
+ * @param options.pushesDataTo Tables/Views this resource writes to.
2578
+ */
2579
+ constructor(name, setup, teardown, options) {
2580
+ const sqlResources = getMooseInternal().sqlResources;
2581
+ if (!isClientOnlyMode() && sqlResources.has(name)) {
2582
+ throw new Error(`SqlResource with name ${name} already exists`);
2583
+ }
2584
+ sqlResources.set(name, this);
2585
+ this.name = name;
2586
+ this.setup = setup.map(
2587
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2295
2588
  );
2296
- if ("insert" in this.config.load) {
2297
- await this.config.load.insert(transformedItems.items);
2298
- } else {
2299
- await this.config.load(transformedItems.items);
2589
+ this.teardown = teardown.map(
2590
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2591
+ );
2592
+ this.pullsDataFrom = options?.pullsDataFrom ?? [];
2593
+ this.pushesDataTo = options?.pushesDataTo ?? [];
2594
+ const stack = new Error().stack;
2595
+ const location = getSourceLocationFromStack(stack);
2596
+ if (location) {
2597
+ this.sourceFile = location.file;
2598
+ this.sourceLine = location.line;
2599
+ this.sourceColumn = location.column;
2300
2600
  }
2301
- console.log(`Load task completed`);
2302
- },
2303
- retries: taskConfig.retries,
2304
- timeout: taskConfig.timeout
2305
- });
2306
- }
2307
- // Execute the entire ETL pipeline
2308
- async run() {
2309
- console.log(`Starting ETL Pipeline: ${this.name}`);
2310
- let batchNumber = 1;
2311
- do {
2312
- console.log(`Processing batch ${batchNumber}...`);
2313
- const batch = await this.batcher.getNextBatch();
2314
- if (batch.items.length === 0) {
2315
- break;
2316
- }
2317
- const transformedItems = [];
2318
- for (const extractedData of batch.items) {
2319
- const transformedData = await this.config.transform(extractedData);
2320
- transformedItems.push(transformedData);
2321
- }
2322
- if ("insert" in this.config.load) {
2323
- await this.config.load.insert(transformedItems);
2324
- } else {
2325
- await this.config.load(transformedItems);
2326
- }
2327
- console.log(
2328
- `Completed batch ${batchNumber} with ${batch.items.length} items`
2329
- );
2330
- batchNumber++;
2331
- if (!batch.hasMore) {
2332
- break;
2333
2601
  }
2334
- } while (true);
2335
- console.log(`Completed ETL Pipeline: ${this.name}`);
2336
- }
2337
- };
2338
-
2339
- // src/dmv2/sdk/sqlResource.ts
2340
- var SqlResource = class {
2341
- /** @internal */
2342
- kind = "SqlResource";
2343
- /** Array of SQL statements to execute for setting up the resource. */
2344
- setup;
2345
- /** Array of SQL statements to execute for tearing down the resource. */
2346
- teardown;
2347
- /** The name of the SQL resource (e.g., view name, materialized view name). */
2348
- name;
2349
- /** List of OlapTables or Views that this resource reads data from. */
2350
- pullsDataFrom;
2351
- /** List of OlapTables or Views that this resource writes data to. */
2352
- pushesDataTo;
2353
- /** @internal Source file path where this resource was defined */
2354
- sourceFile;
2355
- /** @internal Source line number where this resource was defined */
2356
- sourceLine;
2357
- /** @internal Source column number where this resource was defined */
2358
- sourceColumn;
2359
- /**
2360
- * Creates a new SqlResource instance.
2361
- * @param name The name of the resource.
2362
- * @param setup An array of SQL DDL statements to create the resource.
2363
- * @param teardown An array of SQL DDL statements to drop the resource.
2364
- * @param options Optional configuration for specifying data dependencies.
2365
- * @param options.pullsDataFrom Tables/Views this resource reads from.
2366
- * @param options.pushesDataTo Tables/Views this resource writes to.
2367
- */
2368
- constructor(name, setup, teardown, options) {
2369
- const sqlResources = getMooseInternal().sqlResources;
2370
- if (!isClientOnlyMode() && sqlResources.has(name)) {
2371
- throw new Error(`SqlResource with name ${name} already exists`);
2372
- }
2373
- sqlResources.set(name, this);
2374
- this.name = name;
2375
- this.setup = setup.map(
2376
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2377
- );
2378
- this.teardown = teardown.map(
2379
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2380
- );
2381
- this.pullsDataFrom = options?.pullsDataFrom ?? [];
2382
- this.pushesDataTo = options?.pushesDataTo ?? [];
2383
- const stack = new Error().stack;
2384
- const location = getSourceLocationFromStack(stack);
2385
- if (location) {
2386
- this.sourceFile = location.file;
2387
- this.sourceLine = location.line;
2388
- this.sourceColumn = location.column;
2389
- }
2602
+ };
2390
2603
  }
2391
- };
2604
+ });
2392
2605
 
2393
2606
  // src/dmv2/sdk/materializedView.ts
2394
- var requireTargetTableName = (tableName) => {
2395
- if (typeof tableName === "string") {
2396
- return tableName;
2397
- } else {
2398
- throw new Error("Name of targetTable is not specified.");
2399
- }
2400
- };
2401
- var MaterializedView = class extends SqlResource {
2402
- /** The target OlapTable instance where the materialized data is stored. */
2403
- targetTable;
2404
- constructor(options, targetSchema, targetColumns) {
2405
- let selectStatement = options.selectStatement;
2406
- if (typeof selectStatement !== "string") {
2407
- selectStatement = toStaticQuery(selectStatement);
2408
- }
2409
- if (targetSchema === void 0 || targetColumns === void 0) {
2410
- throw new Error(
2411
- "Supply the type param T so that the schema is inserted by the compiler plugin."
2412
- );
2413
- }
2414
- const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2415
- requireTargetTableName(
2416
- options.targetTable?.name ?? options.tableName
2417
- ),
2418
- {
2419
- orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2420
- engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2421
- },
2422
- targetSchema,
2423
- targetColumns
2424
- );
2425
- if (targetTable.name === options.materializedViewName) {
2426
- throw new Error(
2427
- "Materialized view name cannot be the same as the target table name."
2428
- );
2429
- }
2430
- super(
2431
- options.materializedViewName,
2432
- [
2433
- createMaterializedView({
2434
- name: options.materializedViewName,
2435
- destinationTable: targetTable.name,
2436
- select: selectStatement
2437
- })
2438
- // Population is now handled automatically by Rust infrastructure
2439
- // based on table engine type and whether this is a new or updated view
2440
- ],
2441
- [dropView(options.materializedViewName)],
2442
- {
2443
- pullsDataFrom: options.selectTables,
2444
- pushesDataTo: [targetTable]
2607
+ var requireTargetTableName, MaterializedView;
2608
+ var init_materializedView = __esm({
2609
+ "src/dmv2/sdk/materializedView.ts"() {
2610
+ "use strict";
2611
+ init_helpers();
2612
+ init_sqlHelpers();
2613
+ init_olapTable();
2614
+ init_sqlResource();
2615
+ requireTargetTableName = (tableName) => {
2616
+ if (typeof tableName === "string") {
2617
+ return tableName;
2618
+ } else {
2619
+ throw new Error("Name of targetTable is not specified.");
2445
2620
  }
2446
- );
2447
- this.targetTable = targetTable;
2621
+ };
2622
+ MaterializedView = class extends SqlResource {
2623
+ /** The target OlapTable instance where the materialized data is stored. */
2624
+ targetTable;
2625
+ constructor(options, targetSchema, targetColumns) {
2626
+ let selectStatement = options.selectStatement;
2627
+ if (typeof selectStatement !== "string") {
2628
+ selectStatement = toStaticQuery(selectStatement);
2629
+ }
2630
+ if (targetSchema === void 0 || targetColumns === void 0) {
2631
+ throw new Error(
2632
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
2633
+ );
2634
+ }
2635
+ const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2636
+ requireTargetTableName(
2637
+ options.targetTable?.name ?? options.tableName
2638
+ ),
2639
+ {
2640
+ orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2641
+ engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2642
+ },
2643
+ targetSchema,
2644
+ targetColumns
2645
+ );
2646
+ if (targetTable.name === options.materializedViewName) {
2647
+ throw new Error(
2648
+ "Materialized view name cannot be the same as the target table name."
2649
+ );
2650
+ }
2651
+ super(
2652
+ options.materializedViewName,
2653
+ [
2654
+ createMaterializedView({
2655
+ name: options.materializedViewName,
2656
+ destinationTable: targetTable.name,
2657
+ select: selectStatement
2658
+ })
2659
+ // Population is now handled automatically by Rust infrastructure
2660
+ // based on table engine type and whether this is a new or updated view
2661
+ ],
2662
+ [dropView(options.materializedViewName)],
2663
+ {
2664
+ pullsDataFrom: options.selectTables,
2665
+ pushesDataTo: [targetTable]
2666
+ }
2667
+ );
2668
+ this.targetTable = targetTable;
2669
+ }
2670
+ };
2448
2671
  }
2449
- };
2672
+ });
2450
2673
 
2451
2674
  // src/dmv2/sdk/view.ts
2452
- var View = class extends SqlResource {
2453
- /**
2454
- * Creates a new View instance.
2455
- * @param name The name of the view to be created.
2456
- * @param selectStatement The SQL SELECT statement that defines the view's logic.
2457
- * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2458
- */
2459
- constructor(name, selectStatement, baseTables) {
2460
- if (typeof selectStatement !== "string") {
2461
- selectStatement = toStaticQuery(selectStatement);
2462
- }
2463
- super(
2464
- name,
2465
- [
2466
- `CREATE VIEW IF NOT EXISTS ${name}
2675
+ var View;
2676
+ var init_view = __esm({
2677
+ "src/dmv2/sdk/view.ts"() {
2678
+ "use strict";
2679
+ init_helpers();
2680
+ init_sqlHelpers();
2681
+ init_sqlResource();
2682
+ View = class extends SqlResource {
2683
+ /**
2684
+ * Creates a new View instance.
2685
+ * @param name The name of the view to be created.
2686
+ * @param selectStatement The SQL SELECT statement that defines the view's logic.
2687
+ * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2688
+ */
2689
+ constructor(name, selectStatement, baseTables) {
2690
+ if (typeof selectStatement !== "string") {
2691
+ selectStatement = toStaticQuery(selectStatement);
2692
+ }
2693
+ super(
2694
+ name,
2695
+ [
2696
+ `CREATE VIEW IF NOT EXISTS ${name}
2467
2697
  AS ${selectStatement}`.trim()
2468
- ],
2469
- [dropView(name)],
2470
- {
2471
- pullsDataFrom: baseTables
2698
+ ],
2699
+ [dropView(name)],
2700
+ {
2701
+ pullsDataFrom: baseTables
2702
+ }
2703
+ );
2472
2704
  }
2473
- );
2705
+ };
2474
2706
  }
2475
- };
2707
+ });
2476
2708
 
2477
2709
  // src/dmv2/sdk/lifeCycle.ts
2478
- var LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2479
- LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2480
- LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2481
- LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2482
- return LifeCycle2;
2483
- })(LifeCycle || {});
2710
+ var LifeCycle;
2711
+ var init_lifeCycle = __esm({
2712
+ "src/dmv2/sdk/lifeCycle.ts"() {
2713
+ "use strict";
2714
+ LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2715
+ LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2716
+ LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2717
+ LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2718
+ return LifeCycle2;
2719
+ })(LifeCycle || {});
2720
+ }
2721
+ });
2484
2722
 
2485
2723
  // src/dmv2/sdk/webApp.ts
2486
- var RESERVED_MOUNT_PATHS = [
2487
- "/admin",
2488
- "/api",
2489
- "/consumption",
2490
- "/health",
2491
- "/ingest",
2492
- "/moose",
2493
- // reserved for future use
2494
- "/ready",
2495
- "/workflows"
2496
- ];
2497
- var WebApp = class {
2498
- name;
2499
- handler;
2500
- config;
2501
- _rawApp;
2502
- constructor(name, appOrHandler, config) {
2503
- this.name = name;
2504
- this.config = config;
2505
- if (!this.config.mountPath) {
2506
- throw new Error(
2507
- `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2508
- );
2509
- }
2510
- const mountPath = this.config.mountPath;
2511
- if (mountPath === "/") {
2512
- throw new Error(
2513
- `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2514
- );
2515
- }
2516
- if (mountPath.endsWith("/")) {
2517
- throw new Error(
2518
- `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2519
- );
2520
- }
2521
- for (const reserved of RESERVED_MOUNT_PATHS) {
2522
- if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2523
- throw new Error(
2524
- `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2525
- );
2526
- }
2527
- }
2528
- this.handler = this.toHandler(appOrHandler);
2529
- this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2530
- const webApps = getMooseInternal().webApps;
2531
- if (webApps.has(name)) {
2532
- throw new Error(`WebApp with name ${name} already exists`);
2533
- }
2534
- if (this.config.mountPath) {
2535
- for (const [existingName, existingApp] of webApps) {
2536
- if (existingApp.config.mountPath === this.config.mountPath) {
2724
+ var RESERVED_MOUNT_PATHS, WebApp;
2725
+ var init_webApp = __esm({
2726
+ "src/dmv2/sdk/webApp.ts"() {
2727
+ "use strict";
2728
+ init_internal();
2729
+ RESERVED_MOUNT_PATHS = [
2730
+ "/admin",
2731
+ "/api",
2732
+ "/consumption",
2733
+ "/health",
2734
+ "/ingest",
2735
+ "/moose",
2736
+ // reserved for future use
2737
+ "/ready",
2738
+ "/workflows"
2739
+ ];
2740
+ WebApp = class {
2741
+ name;
2742
+ handler;
2743
+ config;
2744
+ _rawApp;
2745
+ constructor(name, appOrHandler, config) {
2746
+ this.name = name;
2747
+ this.config = config;
2748
+ if (!this.config.mountPath) {
2537
2749
  throw new Error(
2538
- `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2750
+ `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2539
2751
  );
2540
2752
  }
2541
- }
2542
- }
2543
- webApps.set(name, this);
2544
- }
2545
- toHandler(appOrHandler) {
2546
- if (typeof appOrHandler === "function") {
2547
- return appOrHandler;
2548
- }
2549
- const app = appOrHandler;
2550
- if (typeof app.handle === "function") {
2551
- return (req, res) => {
2552
- app.handle(req, res, (err) => {
2553
- if (err) {
2554
- console.error("WebApp handler error:", err);
2555
- if (!res.headersSent) {
2556
- res.writeHead(500, { "Content-Type": "application/json" });
2557
- res.end(JSON.stringify({ error: "Internal Server Error" }));
2753
+ const mountPath = this.config.mountPath;
2754
+ if (mountPath === "/") {
2755
+ throw new Error(
2756
+ `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2757
+ );
2758
+ }
2759
+ if (mountPath.endsWith("/")) {
2760
+ throw new Error(
2761
+ `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2762
+ );
2763
+ }
2764
+ for (const reserved of RESERVED_MOUNT_PATHS) {
2765
+ if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2766
+ throw new Error(
2767
+ `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2768
+ );
2769
+ }
2770
+ }
2771
+ this.handler = this.toHandler(appOrHandler);
2772
+ this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2773
+ const webApps = getMooseInternal().webApps;
2774
+ if (webApps.has(name)) {
2775
+ throw new Error(`WebApp with name ${name} already exists`);
2776
+ }
2777
+ if (this.config.mountPath) {
2778
+ for (const [existingName, existingApp] of webApps) {
2779
+ if (existingApp.config.mountPath === this.config.mountPath) {
2780
+ throw new Error(
2781
+ `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2782
+ );
2558
2783
  }
2559
2784
  }
2560
- });
2561
- };
2562
- }
2563
- if (typeof app.callback === "function") {
2564
- return app.callback();
2565
- }
2566
- if (typeof app.routing === "function") {
2567
- const routing = app.routing;
2568
- const appWithReady = app;
2569
- let readyPromise = null;
2570
- return async (req, res) => {
2571
- if (readyPromise === null) {
2572
- readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2573
- }
2574
- await readyPromise;
2575
- routing(req, res);
2576
- };
2577
- }
2578
- throw new Error(
2579
- `Unable to convert app to handler. The provided object must be:
2785
+ }
2786
+ webApps.set(name, this);
2787
+ }
2788
+ toHandler(appOrHandler) {
2789
+ if (typeof appOrHandler === "function") {
2790
+ return appOrHandler;
2791
+ }
2792
+ const app = appOrHandler;
2793
+ if (typeof app.handle === "function") {
2794
+ return (req, res) => {
2795
+ app.handle(req, res, (err) => {
2796
+ if (err) {
2797
+ console.error("WebApp handler error:", err);
2798
+ if (!res.headersSent) {
2799
+ res.writeHead(500, { "Content-Type": "application/json" });
2800
+ res.end(JSON.stringify({ error: "Internal Server Error" }));
2801
+ }
2802
+ }
2803
+ });
2804
+ };
2805
+ }
2806
+ if (typeof app.callback === "function") {
2807
+ return app.callback();
2808
+ }
2809
+ if (typeof app.routing === "function") {
2810
+ const routing = app.routing;
2811
+ const appWithReady = app;
2812
+ let readyPromise = null;
2813
+ return async (req, res) => {
2814
+ if (readyPromise === null) {
2815
+ readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2816
+ }
2817
+ await readyPromise;
2818
+ routing(req, res);
2819
+ };
2820
+ }
2821
+ throw new Error(
2822
+ `Unable to convert app to handler. The provided object must be:
2580
2823
  - A function (raw Node.js handler)
2581
2824
  - An object with .handle() method (Express, Connect)
2582
2825
  - An object with .callback() method (Koa)
@@ -2588,12 +2831,14 @@ Examples:
2588
2831
  Fastify: new WebApp("name", fastifyApp)
2589
2832
  Raw: new WebApp("name", (req, res) => { ... })
2590
2833
  `
2591
- );
2592
- }
2593
- getRawApp() {
2594
- return this._rawApp;
2834
+ );
2835
+ }
2836
+ getRawApp() {
2837
+ return this._rawApp;
2838
+ }
2839
+ };
2595
2840
  }
2596
- };
2841
+ });
2597
2842
 
2598
2843
  // src/dmv2/registry.ts
2599
2844
  function getTables() {
@@ -2659,6 +2904,42 @@ function getWebApps2() {
2659
2904
  function getWebApp(name) {
2660
2905
  return getMooseInternal().webApps.get(name);
2661
2906
  }
2907
+ var init_registry = __esm({
2908
+ "src/dmv2/registry.ts"() {
2909
+ "use strict";
2910
+ init_internal();
2911
+ }
2912
+ });
2913
+
2914
+ // src/dmv2/index.ts
2915
+ var init_dmv2 = __esm({
2916
+ "src/dmv2/index.ts"() {
2917
+ "use strict";
2918
+ init_olapTable();
2919
+ init_stream();
2920
+ init_workflow();
2921
+ init_ingestApi();
2922
+ init_consumptionApi();
2923
+ init_ingestPipeline();
2924
+ init_etlPipeline();
2925
+ init_materializedView();
2926
+ init_sqlResource();
2927
+ init_view();
2928
+ init_lifeCycle();
2929
+ init_webApp();
2930
+ init_registry();
2931
+ }
2932
+ });
2933
+
2934
+ // src/browserCompatible.ts
2935
+ var init_browserCompatible = __esm({
2936
+ "src/browserCompatible.ts"() {
2937
+ init_dmv2();
2938
+ init_types();
2939
+ init_sqlHelpers();
2940
+ }
2941
+ });
2942
+ init_browserCompatible();
2662
2943
  export {
2663
2944
  Api,
2664
2945
  ConsumptionApi,