@514labs/moose-lib 0.6.296 → 0.6.297-ci-28-g84f3192e

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -30,6 +30,325 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
30
30
  ));
31
31
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
32
32
 
33
+ // src/dmv2/utils/stackTrace.ts
34
+ function shouldSkipStackLine(line) {
35
+ return line.includes("node_modules") || // Skip npm installed packages (prod)
36
+ line.includes("node:internal") || // Skip Node.js internals (modern format)
37
+ line.includes("internal/modules") || // Skip Node.js internals (older format)
38
+ line.includes("ts-node") || // Skip TypeScript execution
39
+ line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
40
+ line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
41
+ line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
42
+ line.includes("\\ts-moose-lib\\dist\\");
43
+ }
44
+ function parseStackLine(line) {
45
+ const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
46
+ if (match && match[1]) {
47
+ return {
48
+ file: match[1],
49
+ line: match[2]
50
+ };
51
+ }
52
+ return void 0;
53
+ }
54
+ function getSourceFileInfo(stack) {
55
+ if (!stack) return {};
56
+ const lines = stack.split("\n");
57
+ for (const line of lines) {
58
+ if (shouldSkipStackLine(line)) continue;
59
+ const info = parseStackLine(line);
60
+ if (info) return info;
61
+ }
62
+ return {};
63
+ }
64
+ function getSourceLocationFromStack(stack) {
65
+ if (!stack) return void 0;
66
+ const lines = stack.split("\n");
67
+ for (const line of lines.slice(1)) {
68
+ if (shouldSkipStackLine(line)) {
69
+ continue;
70
+ }
71
+ const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
72
+ if (v8Match) {
73
+ return {
74
+ file: v8Match[1],
75
+ line: parseInt(v8Match[2], 10),
76
+ column: parseInt(v8Match[3], 10)
77
+ };
78
+ }
79
+ const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
80
+ if (smMatch) {
81
+ return {
82
+ file: smMatch[1],
83
+ line: parseInt(smMatch[2], 10),
84
+ column: parseInt(smMatch[3], 10)
85
+ };
86
+ }
87
+ }
88
+ return void 0;
89
+ }
90
+ function getSourceFileFromStack(stack) {
91
+ const location = getSourceLocationFromStack(stack);
92
+ return location?.file;
93
+ }
94
+ var init_stackTrace = __esm({
95
+ "src/dmv2/utils/stackTrace.ts"() {
96
+ "use strict";
97
+ }
98
+ });
99
+
100
+ // src/dmv2/typedBase.ts
101
+ var TypedBase;
102
+ var init_typedBase = __esm({
103
+ "src/dmv2/typedBase.ts"() {
104
+ "use strict";
105
+ init_stackTrace();
106
+ TypedBase = class {
107
+ /** The JSON schema representation of type T. Injected by the compiler plugin. */
108
+ schema;
109
+ /** The name assigned to this resource instance. */
110
+ name;
111
+ /** A dictionary mapping column names (keys of T) to their Column definitions. */
112
+ columns;
113
+ /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
114
+ columnArray;
115
+ /** The configuration object specific to this resource type. */
116
+ config;
117
+ /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
118
+ validators;
119
+ /** Optional metadata for the resource, always present as an object. */
120
+ metadata;
121
+ /**
122
+ * Whether this resource allows extra fields beyond the defined columns.
123
+ * When true, extra fields in payloads are passed through to streaming functions.
124
+ * Injected by the compiler plugin when the type has an index signature.
125
+ */
126
+ allowExtraFields;
127
+ /**
128
+ * @internal Constructor intended for internal use by subclasses and the compiler plugin.
129
+ * It expects the schema and columns to be provided, typically injected by the compiler.
130
+ *
131
+ * @param name The name for the resource instance.
132
+ * @param config The configuration object for the resource.
133
+ * @param schema The JSON schema for the resource's data type T (injected).
134
+ * @param columns The array of Column definitions for T (injected).
135
+ * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
136
+ */
137
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
138
+ if (schema === void 0 || columns === void 0) {
139
+ throw new Error(
140
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
141
+ );
142
+ }
143
+ this.schema = schema;
144
+ this.columnArray = columns;
145
+ const columnsObj = {};
146
+ columns.forEach((column) => {
147
+ columnsObj[column.name] = column;
148
+ });
149
+ this.columns = columnsObj;
150
+ this.name = name;
151
+ this.config = config;
152
+ this.validators = validators;
153
+ this.allowExtraFields = allowExtraFields ?? false;
154
+ this.metadata = config?.metadata ? { ...config.metadata } : {};
155
+ if (!this.metadata.source) {
156
+ const stack = new Error().stack;
157
+ if (stack) {
158
+ const info = getSourceFileInfo(stack);
159
+ this.metadata.source = { file: info.file, line: info.line };
160
+ }
161
+ }
162
+ }
163
+ };
164
+ }
165
+ });
166
+
167
+ // src/dataModels/dataModelTypes.ts
168
+ function isArrayNestedType(dt) {
169
+ return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
170
+ }
171
+ function isNestedType(dt) {
172
+ return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
173
+ }
174
+ var init_dataModelTypes = __esm({
175
+ "src/dataModels/dataModelTypes.ts"() {
176
+ "use strict";
177
+ }
178
+ });
179
+
180
+ // src/sqlHelpers.ts
181
+ function sql(strings, ...values) {
182
+ return new Sql(strings, values);
183
+ }
184
+ function createClickhouseParameter(parameterIndex, value) {
185
+ return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
186
+ }
187
+ function emptyIfUndefined(value) {
188
+ return value === void 0 ? "" : value;
189
+ }
190
+ var quoteIdentifier, isTable, isColumn, instanceofSql, Sql, toStaticQuery, toQuery, toQueryPreview, getValueFromParameter, mapToClickHouseType;
191
+ var init_sqlHelpers = __esm({
192
+ "src/sqlHelpers.ts"() {
193
+ "use strict";
194
+ quoteIdentifier = (name) => {
195
+ return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
196
+ };
197
+ isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
198
+ isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
199
+ instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
200
+ Sql = class {
201
+ values;
202
+ strings;
203
+ constructor(rawStrings, rawValues) {
204
+ if (rawStrings.length - 1 !== rawValues.length) {
205
+ if (rawStrings.length === 0) {
206
+ throw new TypeError("Expected at least 1 string");
207
+ }
208
+ throw new TypeError(
209
+ `Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
210
+ );
211
+ }
212
+ const valuesLength = rawValues.reduce(
213
+ (len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
214
+ 0
215
+ );
216
+ this.values = new Array(valuesLength);
217
+ this.strings = new Array(valuesLength + 1);
218
+ this.strings[0] = rawStrings[0];
219
+ let i = 0, pos = 0;
220
+ while (i < rawValues.length) {
221
+ const child = rawValues[i++];
222
+ const rawString = rawStrings[i];
223
+ if (instanceofSql(child)) {
224
+ this.strings[pos] += child.strings[0];
225
+ let childIndex = 0;
226
+ while (childIndex < child.values.length) {
227
+ this.values[pos++] = child.values[childIndex++];
228
+ this.strings[pos] = child.strings[childIndex];
229
+ }
230
+ this.strings[pos] += rawString;
231
+ } else if (isColumn(child)) {
232
+ const aggregationFunction = child.annotations.find(
233
+ ([k, _]) => k === "aggregationFunction"
234
+ );
235
+ if (aggregationFunction !== void 0) {
236
+ this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
237
+ } else {
238
+ this.strings[pos] += `\`${child.name}\``;
239
+ }
240
+ this.strings[pos] += rawString;
241
+ } else if (isTable(child)) {
242
+ if (child.config.database) {
243
+ this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
244
+ } else {
245
+ this.strings[pos] += `\`${child.name}\``;
246
+ }
247
+ this.strings[pos] += rawString;
248
+ } else {
249
+ this.values[pos++] = child;
250
+ this.strings[pos] = rawString;
251
+ }
252
+ }
253
+ }
254
+ };
255
+ toStaticQuery = (sql3) => {
256
+ const [query, params] = toQuery(sql3);
257
+ if (Object.keys(params).length !== 0) {
258
+ throw new Error(
259
+ "Dynamic SQL is not allowed in the select statement in view creation."
260
+ );
261
+ }
262
+ return query;
263
+ };
264
+ toQuery = (sql3) => {
265
+ const parameterizedStubs = sql3.values.map(
266
+ (v, i) => createClickhouseParameter(i, v)
267
+ );
268
+ const query = sql3.strings.map(
269
+ (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
270
+ ).join("");
271
+ const query_params = sql3.values.reduce(
272
+ (acc, v, i) => ({
273
+ ...acc,
274
+ [`p${i}`]: getValueFromParameter(v)
275
+ }),
276
+ {}
277
+ );
278
+ return [query, query_params];
279
+ };
280
+ toQueryPreview = (sql3) => {
281
+ try {
282
+ const formatValue = (v) => {
283
+ if (Array.isArray(v)) {
284
+ const [type, val] = v;
285
+ if (type === "Identifier") {
286
+ return `\`${String(val)}\``;
287
+ }
288
+ return `[${v.map((x) => formatValue(x)).join(", ")}]`;
289
+ }
290
+ if (v === null || v === void 0) return "NULL";
291
+ if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
292
+ if (typeof v === "number") return String(v);
293
+ if (typeof v === "boolean") return v ? "true" : "false";
294
+ if (v instanceof Date)
295
+ return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
296
+ try {
297
+ return JSON.stringify(v);
298
+ } catch {
299
+ return String(v);
300
+ }
301
+ };
302
+ let out = sql3.strings[0] ?? "";
303
+ for (let i = 0; i < sql3.values.length; i++) {
304
+ const val = getValueFromParameter(sql3.values[i]);
305
+ out += formatValue(val);
306
+ out += sql3.strings[i + 1] ?? "";
307
+ }
308
+ return out.replace(/\s+/g, " ").trim();
309
+ } catch (error) {
310
+ console.log(`toQueryPreview error: ${error}`);
311
+ return "/* query preview unavailable */";
312
+ }
313
+ };
314
+ getValueFromParameter = (value) => {
315
+ if (Array.isArray(value)) {
316
+ const [type, val] = value;
317
+ if (type === "Identifier") return val;
318
+ }
319
+ return value;
320
+ };
321
+ mapToClickHouseType = (value) => {
322
+ if (typeof value === "number") {
323
+ return Number.isInteger(value) ? "Int" : "Float";
324
+ }
325
+ if (typeof value === "boolean") return "Bool";
326
+ if (value instanceof Date) return "DateTime";
327
+ if (Array.isArray(value)) {
328
+ const [type, _] = value;
329
+ return type;
330
+ }
331
+ return "String";
332
+ };
333
+ }
334
+ });
335
+
336
+ // src/blocks/helpers.ts
337
+ function dropView(name) {
338
+ return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
339
+ }
340
+ function createMaterializedView(options) {
341
+ return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
342
+ TO ${quoteIdentifier(options.destinationTable)}
343
+ AS ${options.select}`.trim();
344
+ }
345
+ var init_helpers = __esm({
346
+ "src/blocks/helpers.ts"() {
347
+ "use strict";
348
+ init_sqlHelpers();
349
+ }
350
+ });
351
+
33
352
  // src/commons.ts
34
353
  var commons_exports = {};
35
354
  __export(commons_exports, {
@@ -197,6 +516,306 @@ var init_commons = __esm({
197
516
  }
198
517
  });
199
518
 
519
+ // src/secrets.ts
520
+ var init_secrets = __esm({
521
+ "src/secrets.ts"() {
522
+ "use strict";
523
+ }
524
+ });
525
+
526
+ // src/consumption-apis/helpers.ts
527
+ var import_client2, import_node_crypto;
528
+ var init_helpers2 = __esm({
529
+ "src/consumption-apis/helpers.ts"() {
530
+ "use strict";
531
+ import_client2 = require("@temporalio/client");
532
+ import_node_crypto = require("crypto");
533
+ init_internal();
534
+ init_sqlHelpers();
535
+ }
536
+ });
537
+
538
+ // src/consumption-apis/webAppHelpers.ts
539
+ var init_webAppHelpers = __esm({
540
+ "src/consumption-apis/webAppHelpers.ts"() {
541
+ "use strict";
542
+ }
543
+ });
544
+
545
+ // src/scripts/task.ts
546
+ var init_task = __esm({
547
+ "src/scripts/task.ts"() {
548
+ "use strict";
549
+ }
550
+ });
551
+
552
+ // src/cluster-utils.ts
553
+ var import_node_cluster, import_node_os, import_node_process;
554
+ var init_cluster_utils = __esm({
555
+ "src/cluster-utils.ts"() {
556
+ "use strict";
557
+ import_node_cluster = __toESM(require("cluster"));
558
+ import_node_os = require("os");
559
+ import_node_process = require("process");
560
+ }
561
+ });
562
+
563
+ // src/consumption-apis/runner.ts
564
+ var jose;
565
+ var init_runner = __esm({
566
+ "src/consumption-apis/runner.ts"() {
567
+ "use strict";
568
+ init_commons();
569
+ init_helpers2();
570
+ jose = __toESM(require("jose"));
571
+ init_cluster_utils();
572
+ init_sqlHelpers();
573
+ init_internal();
574
+ }
575
+ });
576
+
577
+ // src/clients/redisClient.ts
578
+ var import_redis;
579
+ var init_redisClient = __esm({
580
+ "src/clients/redisClient.ts"() {
581
+ "use strict";
582
+ import_redis = require("redis");
583
+ }
584
+ });
585
+
586
+ // src/consumption-apis/standalone.ts
587
+ var init_standalone = __esm({
588
+ "src/consumption-apis/standalone.ts"() {
589
+ "use strict";
590
+ init_helpers2();
591
+ init_commons();
592
+ init_sqlHelpers();
593
+ }
594
+ });
595
+
596
+ // src/utilities/json.ts
597
+ var init_json = __esm({
598
+ "src/utilities/json.ts"() {
599
+ "use strict";
600
+ }
601
+ });
602
+
603
+ // src/utilities/dataParser.ts
604
+ var import_csv_parse, CSV_DELIMITERS, DEFAULT_CSV_CONFIG;
605
+ var init_dataParser = __esm({
606
+ "src/utilities/dataParser.ts"() {
607
+ "use strict";
608
+ import_csv_parse = require("csv-parse");
609
+ init_json();
610
+ CSV_DELIMITERS = {
611
+ COMMA: ",",
612
+ TAB: " ",
613
+ SEMICOLON: ";",
614
+ PIPE: "|"
615
+ };
616
+ DEFAULT_CSV_CONFIG = {
617
+ delimiter: CSV_DELIMITERS.COMMA,
618
+ columns: true,
619
+ skipEmptyLines: true,
620
+ trim: true
621
+ };
622
+ }
623
+ });
624
+
625
+ // src/utilities/index.ts
626
+ var init_utilities = __esm({
627
+ "src/utilities/index.ts"() {
628
+ "use strict";
629
+ init_dataParser();
630
+ }
631
+ });
632
+
633
+ // src/connectors/dataSource.ts
634
+ var init_dataSource = __esm({
635
+ "src/connectors/dataSource.ts"() {
636
+ "use strict";
637
+ }
638
+ });
639
+
640
+ // src/dataModels/types.ts
641
+ var init_types = __esm({
642
+ "src/dataModels/types.ts"() {
643
+ "use strict";
644
+ }
645
+ });
646
+
647
+ // src/index.ts
648
+ var init_index = __esm({
649
+ "src/index.ts"() {
650
+ "use strict";
651
+ init_browserCompatible();
652
+ init_helpers();
653
+ init_commons();
654
+ init_secrets();
655
+ init_helpers2();
656
+ init_webAppHelpers();
657
+ init_task();
658
+ init_runner();
659
+ init_redisClient();
660
+ init_helpers2();
661
+ init_standalone();
662
+ init_sqlHelpers();
663
+ init_utilities();
664
+ init_dataSource();
665
+ init_types();
666
+ }
667
+ });
668
+
669
+ // src/dmv2/internal.ts
670
+ var import_process, isClientOnlyMode, moose_internal, defaultRetentionPeriod, getMooseInternal, dlqSchema, dlqColumns;
671
+ var init_internal = __esm({
672
+ "src/dmv2/internal.ts"() {
673
+ "use strict";
674
+ import_process = __toESM(require("process"));
675
+ init_index();
676
+ init_commons();
677
+ isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
678
+ moose_internal = {
679
+ tables: /* @__PURE__ */ new Map(),
680
+ streams: /* @__PURE__ */ new Map(),
681
+ ingestApis: /* @__PURE__ */ new Map(),
682
+ apis: /* @__PURE__ */ new Map(),
683
+ sqlResources: /* @__PURE__ */ new Map(),
684
+ workflows: /* @__PURE__ */ new Map(),
685
+ webApps: /* @__PURE__ */ new Map()
686
+ };
687
+ defaultRetentionPeriod = 60 * 60 * 24 * 7;
688
+ getMooseInternal = () => globalThis.moose_internal;
689
+ if (getMooseInternal() === void 0) {
690
+ globalThis.moose_internal = moose_internal;
691
+ }
692
+ dlqSchema = {
693
+ version: "3.1",
694
+ components: {
695
+ schemas: {
696
+ DeadLetterModel: {
697
+ type: "object",
698
+ properties: {
699
+ originalRecord: {
700
+ $ref: "#/components/schemas/Recordstringany"
701
+ },
702
+ errorMessage: {
703
+ type: "string"
704
+ },
705
+ errorType: {
706
+ type: "string"
707
+ },
708
+ failedAt: {
709
+ type: "string",
710
+ format: "date-time"
711
+ },
712
+ source: {
713
+ oneOf: [
714
+ {
715
+ const: "api"
716
+ },
717
+ {
718
+ const: "transform"
719
+ },
720
+ {
721
+ const: "table"
722
+ }
723
+ ]
724
+ }
725
+ },
726
+ required: [
727
+ "originalRecord",
728
+ "errorMessage",
729
+ "errorType",
730
+ "failedAt",
731
+ "source"
732
+ ]
733
+ },
734
+ Recordstringany: {
735
+ type: "object",
736
+ properties: {},
737
+ required: [],
738
+ description: "Construct a type with a set of properties K of type T",
739
+ additionalProperties: {}
740
+ }
741
+ }
742
+ },
743
+ schemas: [
744
+ {
745
+ $ref: "#/components/schemas/DeadLetterModel"
746
+ }
747
+ ]
748
+ };
749
+ dlqColumns = [
750
+ {
751
+ name: "originalRecord",
752
+ data_type: "Json",
753
+ primary_key: false,
754
+ required: true,
755
+ unique: false,
756
+ default: null,
757
+ annotations: [],
758
+ ttl: null,
759
+ codec: null,
760
+ materialized: null,
761
+ comment: null
762
+ },
763
+ {
764
+ name: "errorMessage",
765
+ data_type: "String",
766
+ primary_key: false,
767
+ required: true,
768
+ unique: false,
769
+ default: null,
770
+ annotations: [],
771
+ ttl: null,
772
+ codec: null,
773
+ materialized: null,
774
+ comment: null
775
+ },
776
+ {
777
+ name: "errorType",
778
+ data_type: "String",
779
+ primary_key: false,
780
+ required: true,
781
+ unique: false,
782
+ default: null,
783
+ annotations: [],
784
+ ttl: null,
785
+ codec: null,
786
+ materialized: null,
787
+ comment: null
788
+ },
789
+ {
790
+ name: "failedAt",
791
+ data_type: "DateTime",
792
+ primary_key: false,
793
+ required: true,
794
+ unique: false,
795
+ default: null,
796
+ annotations: [],
797
+ ttl: null,
798
+ codec: null,
799
+ materialized: null,
800
+ comment: null
801
+ },
802
+ {
803
+ name: "source",
804
+ data_type: "String",
805
+ primary_key: false,
806
+ required: true,
807
+ unique: false,
808
+ default: null,
809
+ annotations: [],
810
+ ttl: null,
811
+ codec: null,
812
+ materialized: null,
813
+ comment: null
814
+ }
815
+ ];
816
+ }
817
+ });
818
+
200
819
  // src/config/configFile.ts
201
820
  async function findConfigFile(startDir = process.cwd()) {
202
821
  const fs = await import("fs");
@@ -381,2265 +1000,1849 @@ var init_runtime = __esm({
381
1000
  }
382
1001
  });
383
1002
 
384
- // src/browserCompatible.ts
385
- var browserCompatible_exports = {};
386
- __export(browserCompatible_exports, {
387
- Api: () => Api,
388
- ConsumptionApi: () => ConsumptionApi,
389
- DeadLetterQueue: () => DeadLetterQueue,
390
- ETLPipeline: () => ETLPipeline,
391
- IngestApi: () => IngestApi,
392
- IngestPipeline: () => IngestPipeline,
393
- LifeCycle: () => LifeCycle,
394
- MaterializedView: () => MaterializedView,
395
- OlapTable: () => OlapTable,
396
- Sql: () => Sql,
397
- SqlResource: () => SqlResource,
398
- Stream: () => Stream,
399
- Task: () => Task,
400
- View: () => View,
401
- WebApp: () => WebApp,
402
- Workflow: () => Workflow,
403
- createClickhouseParameter: () => createClickhouseParameter,
404
- getApi: () => getApi,
405
- getApis: () => getApis2,
406
- getIngestApi: () => getIngestApi,
407
- getIngestApis: () => getIngestApis,
408
- getSqlResource: () => getSqlResource,
409
- getSqlResources: () => getSqlResources,
410
- getStream: () => getStream,
411
- getStreams: () => getStreams,
412
- getTable: () => getTable,
413
- getTables: () => getTables,
414
- getValueFromParameter: () => getValueFromParameter,
415
- getWebApp: () => getWebApp,
416
- getWebApps: () => getWebApps2,
417
- getWorkflow: () => getWorkflow,
418
- getWorkflows: () => getWorkflows2,
419
- mapToClickHouseType: () => mapToClickHouseType,
420
- quoteIdentifier: () => quoteIdentifier,
421
- sql: () => sql,
422
- toQuery: () => toQuery,
423
- toQueryPreview: () => toQueryPreview,
424
- toStaticQuery: () => toStaticQuery
425
- });
426
- module.exports = __toCommonJS(browserCompatible_exports);
427
-
428
- // src/dmv2/utils/stackTrace.ts
429
- function shouldSkipStackLine(line) {
430
- return line.includes("node_modules") || // Skip npm installed packages (prod)
431
- line.includes("node:internal") || // Skip Node.js internals (modern format)
432
- line.includes("internal/modules") || // Skip Node.js internals (older format)
433
- line.includes("ts-node") || // Skip TypeScript execution
434
- line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
435
- line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
436
- line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
437
- line.includes("\\ts-moose-lib\\dist\\");
438
- }
439
- function parseStackLine(line) {
440
- const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
441
- if (match && match[1]) {
442
- return {
443
- file: match[1],
444
- line: match[2]
445
- };
446
- }
447
- return void 0;
448
- }
449
- function getSourceFileInfo(stack) {
450
- if (!stack) return {};
451
- const lines = stack.split("\n");
452
- for (const line of lines) {
453
- if (shouldSkipStackLine(line)) continue;
454
- const info = parseStackLine(line);
455
- if (info) return info;
456
- }
457
- return {};
458
- }
459
- function getSourceLocationFromStack(stack) {
460
- if (!stack) return void 0;
461
- const lines = stack.split("\n");
462
- for (const line of lines.slice(1)) {
463
- if (shouldSkipStackLine(line)) {
464
- continue;
465
- }
466
- const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
467
- if (v8Match) {
468
- return {
469
- file: v8Match[1],
470
- line: parseInt(v8Match[2], 10),
471
- column: parseInt(v8Match[3], 10)
472
- };
473
- }
474
- const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
475
- if (smMatch) {
476
- return {
477
- file: smMatch[1],
478
- line: parseInt(smMatch[2], 10),
479
- column: parseInt(smMatch[3], 10)
480
- };
481
- }
482
- }
483
- return void 0;
484
- }
485
- function getSourceFileFromStack(stack) {
486
- const location = getSourceLocationFromStack(stack);
487
- return location?.file;
488
- }
489
-
490
- // src/dmv2/typedBase.ts
491
- var TypedBase = class {
492
- /** The JSON schema representation of type T. Injected by the compiler plugin. */
493
- schema;
494
- /** The name assigned to this resource instance. */
495
- name;
496
- /** A dictionary mapping column names (keys of T) to their Column definitions. */
497
- columns;
498
- /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
499
- columnArray;
500
- /** The configuration object specific to this resource type. */
501
- config;
502
- /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
503
- validators;
504
- /** Optional metadata for the resource, always present as an object. */
505
- metadata;
506
- /**
507
- * Whether this resource allows extra fields beyond the defined columns.
508
- * When true, extra fields in payloads are passed through to streaming functions.
509
- * Injected by the compiler plugin when the type has an index signature.
510
- */
511
- allowExtraFields;
512
- /**
513
- * @internal Constructor intended for internal use by subclasses and the compiler plugin.
514
- * It expects the schema and columns to be provided, typically injected by the compiler.
515
- *
516
- * @param name The name for the resource instance.
517
- * @param config The configuration object for the resource.
518
- * @param schema The JSON schema for the resource's data type T (injected).
519
- * @param columns The array of Column definitions for T (injected).
520
- * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
521
- */
522
- constructor(name, config, schema, columns, validators, allowExtraFields) {
523
- if (schema === void 0 || columns === void 0) {
524
- throw new Error(
525
- "Supply the type param T so that the schema is inserted by the compiler plugin."
526
- );
527
- }
528
- this.schema = schema;
529
- this.columnArray = columns;
530
- const columnsObj = {};
531
- columns.forEach((column) => {
532
- columnsObj[column.name] = column;
533
- });
534
- this.columns = columnsObj;
535
- this.name = name;
536
- this.config = config;
537
- this.validators = validators;
538
- this.allowExtraFields = allowExtraFields ?? false;
539
- this.metadata = config?.metadata ? { ...config.metadata } : {};
540
- if (!this.metadata.source) {
541
- const stack = new Error().stack;
542
- if (stack) {
543
- const info = getSourceFileInfo(stack);
544
- this.metadata.source = { file: info.file, line: info.line };
1003
+ // src/dmv2/sdk/olapTable.ts
1004
+ var import_node_stream, import_node_crypto2, OlapTable;
1005
+ var init_olapTable = __esm({
1006
+ "src/dmv2/sdk/olapTable.ts"() {
1007
+ "use strict";
1008
+ init_typedBase();
1009
+ init_dataModelTypes();
1010
+ init_helpers();
1011
+ init_internal();
1012
+ import_node_stream = require("stream");
1013
+ import_node_crypto2 = require("crypto");
1014
+ init_sqlHelpers();
1015
+ OlapTable = class extends TypedBase {
1016
+ name;
1017
+ /** @internal */
1018
+ kind = "OlapTable";
1019
+ /** @internal Memoized ClickHouse client for reusing connections across insert calls */
1020
+ _memoizedClient;
1021
+ /** @internal Hash of the configuration used to create the memoized client */
1022
+ _configHash;
1023
+ /** @internal Cached table name to avoid repeated generation */
1024
+ _cachedTableName;
1025
+ constructor(name, config, schema, columns, validators) {
1026
+ const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
1027
+ const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
1028
+ const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
1029
+ if (hasFields && hasExpr) {
1030
+ throw new Error(
1031
+ `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
1032
+ );
1033
+ }
1034
+ const hasCluster = typeof resolvedConfig.cluster === "string";
1035
+ const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
1036
+ const hasReplicaName = typeof resolvedConfig.replicaName === "string";
1037
+ if (hasCluster && (hasKeeperPath || hasReplicaName)) {
1038
+ throw new Error(
1039
+ `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
1040
+ );
1041
+ }
1042
+ super(name, resolvedConfig, schema, columns, validators);
1043
+ this.name = name;
1044
+ const tables = getMooseInternal().tables;
1045
+ const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
1046
+ if (!isClientOnlyMode() && tables.has(registryKey)) {
1047
+ throw new Error(
1048
+ `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
1049
+ );
1050
+ }
1051
+ tables.set(registryKey, this);
545
1052
  }
546
- }
547
- }
548
- };
549
-
550
- // src/dataModels/dataModelTypes.ts
551
- function isArrayNestedType(dt) {
552
- return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
553
- }
554
- function isNestedType(dt) {
555
- return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
556
- }
557
-
558
- // src/sqlHelpers.ts
559
- var quoteIdentifier = (name) => {
560
- return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
561
- };
562
- var isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
563
- var isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
564
- function sql(strings, ...values) {
565
- return new Sql(strings, values);
566
- }
567
- var instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
568
- var Sql = class {
569
- values;
570
- strings;
571
- constructor(rawStrings, rawValues) {
572
- if (rawStrings.length - 1 !== rawValues.length) {
573
- if (rawStrings.length === 0) {
574
- throw new TypeError("Expected at least 1 string");
575
- }
576
- throw new TypeError(
577
- `Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
578
- );
579
- }
580
- const valuesLength = rawValues.reduce(
581
- (len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
582
- 0
583
- );
584
- this.values = new Array(valuesLength);
585
- this.strings = new Array(valuesLength + 1);
586
- this.strings[0] = rawStrings[0];
587
- let i = 0, pos = 0;
588
- while (i < rawValues.length) {
589
- const child = rawValues[i++];
590
- const rawString = rawStrings[i];
591
- if (instanceofSql(child)) {
592
- this.strings[pos] += child.strings[0];
593
- let childIndex = 0;
594
- while (childIndex < child.values.length) {
595
- this.values[pos++] = child.values[childIndex++];
596
- this.strings[pos] = child.strings[childIndex];
597
- }
598
- this.strings[pos] += rawString;
599
- } else if (isColumn(child)) {
600
- const aggregationFunction = child.annotations.find(
601
- ([k, _]) => k === "aggregationFunction"
602
- );
603
- if (aggregationFunction !== void 0) {
604
- this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
605
- } else {
606
- this.strings[pos] += `\`${child.name}\``;
1053
+ /**
1054
+ * Generates the versioned table name following Moose's naming convention
1055
+ * Format: {tableName}_{version_with_dots_replaced_by_underscores}
1056
+ */
1057
+ generateTableName() {
1058
+ if (this._cachedTableName) {
1059
+ return this._cachedTableName;
607
1060
  }
608
- this.strings[pos] += rawString;
609
- } else if (isTable(child)) {
610
- if (child.config.database) {
611
- this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
1061
+ const tableVersion = this.config.version;
1062
+ if (!tableVersion) {
1063
+ this._cachedTableName = this.name;
612
1064
  } else {
613
- this.strings[pos] += `\`${child.name}\``;
1065
+ const versionSuffix = tableVersion.replace(/\./g, "_");
1066
+ this._cachedTableName = `${this.name}_${versionSuffix}`;
614
1067
  }
615
- this.strings[pos] += rawString;
616
- } else {
617
- this.values[pos++] = child;
618
- this.strings[pos] = rawString;
1068
+ return this._cachedTableName;
619
1069
  }
620
- }
621
- }
622
- };
623
- var toStaticQuery = (sql3) => {
624
- const [query, params] = toQuery(sql3);
625
- if (Object.keys(params).length !== 0) {
626
- throw new Error(
627
- "Dynamic SQL is not allowed in the select statement in view creation."
628
- );
629
- }
630
- return query;
631
- };
632
- var toQuery = (sql3) => {
633
- const parameterizedStubs = sql3.values.map(
634
- (v, i) => createClickhouseParameter(i, v)
635
- );
636
- const query = sql3.strings.map(
637
- (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
638
- ).join("");
639
- const query_params = sql3.values.reduce(
640
- (acc, v, i) => ({
641
- ...acc,
642
- [`p${i}`]: getValueFromParameter(v)
643
- }),
644
- {}
645
- );
646
- return [query, query_params];
647
- };
648
- var toQueryPreview = (sql3) => {
649
- try {
650
- const formatValue = (v) => {
651
- if (Array.isArray(v)) {
652
- const [type, val] = v;
653
- if (type === "Identifier") {
654
- return `\`${String(val)}\``;
655
- }
656
- return `[${v.map((x) => formatValue(x)).join(", ")}]`;
657
- }
658
- if (v === null || v === void 0) return "NULL";
659
- if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
660
- if (typeof v === "number") return String(v);
661
- if (typeof v === "boolean") return v ? "true" : "false";
662
- if (v instanceof Date)
663
- return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
664
- try {
665
- return JSON.stringify(v);
666
- } catch {
667
- return String(v);
1070
+ /**
1071
+ * Creates a fast hash of the ClickHouse configuration.
1072
+ * Uses crypto.createHash for better performance than JSON.stringify.
1073
+ *
1074
+ * @private
1075
+ */
1076
+ createConfigHash(clickhouseConfig) {
1077
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1078
+ const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
1079
+ return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
668
1080
  }
669
- };
670
- let out = sql3.strings[0] ?? "";
671
- for (let i = 0; i < sql3.values.length; i++) {
672
- const val = getValueFromParameter(sql3.values[i]);
673
- out += formatValue(val);
674
- out += sql3.strings[i + 1] ?? "";
675
- }
676
- return out.replace(/\s+/g, " ").trim();
677
- } catch (error) {
678
- console.log(`toQueryPreview error: ${error}`);
679
- return "/* query preview unavailable */";
680
- }
681
- };
682
- var getValueFromParameter = (value) => {
683
- if (Array.isArray(value)) {
684
- const [type, val] = value;
685
- if (type === "Identifier") return val;
686
- }
687
- return value;
688
- };
689
- function createClickhouseParameter(parameterIndex, value) {
690
- return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
691
- }
692
- var mapToClickHouseType = (value) => {
693
- if (typeof value === "number") {
694
- return Number.isInteger(value) ? "Int" : "Float";
695
- }
696
- if (typeof value === "boolean") return "Bool";
697
- if (value instanceof Date) return "DateTime";
698
- if (Array.isArray(value)) {
699
- const [type, _] = value;
700
- return type;
701
- }
702
- return "String";
703
- };
704
- function emptyIfUndefined(value) {
705
- return value === void 0 ? "" : value;
706
- }
707
-
708
- // src/blocks/helpers.ts
709
- function dropView(name) {
710
- return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
711
- }
712
- function createMaterializedView(options) {
713
- return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
714
- TO ${quoteIdentifier(options.destinationTable)}
715
- AS ${options.select}`.trim();
716
- }
717
-
718
- // src/dmv2/internal.ts
719
- var import_process = __toESM(require("process"));
720
-
721
- // src/index.ts
722
- init_commons();
723
-
724
- // src/consumption-apis/helpers.ts
725
- var import_client2 = require("@temporalio/client");
726
- var import_node_crypto = require("crypto");
727
-
728
- // src/consumption-apis/runner.ts
729
- init_commons();
730
- var jose = __toESM(require("jose"));
731
-
732
- // src/cluster-utils.ts
733
- var import_node_cluster = __toESM(require("cluster"));
734
- var import_node_os = require("os");
735
- var import_node_process = require("process");
736
-
737
- // src/clients/redisClient.ts
738
- var import_redis = require("redis");
739
-
740
- // src/consumption-apis/standalone.ts
741
- init_commons();
742
-
743
- // src/utilities/dataParser.ts
744
- var import_csv_parse = require("csv-parse");
745
- var CSV_DELIMITERS = {
746
- COMMA: ",",
747
- TAB: " ",
748
- SEMICOLON: ";",
749
- PIPE: "|"
750
- };
751
- var DEFAULT_CSV_CONFIG = {
752
- delimiter: CSV_DELIMITERS.COMMA,
753
- columns: true,
754
- skipEmptyLines: true,
755
- trim: true
756
- };
757
-
758
- // src/dmv2/internal.ts
759
- init_commons();
760
- var isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
761
- var moose_internal = {
762
- tables: /* @__PURE__ */ new Map(),
763
- streams: /* @__PURE__ */ new Map(),
764
- ingestApis: /* @__PURE__ */ new Map(),
765
- apis: /* @__PURE__ */ new Map(),
766
- sqlResources: /* @__PURE__ */ new Map(),
767
- workflows: /* @__PURE__ */ new Map(),
768
- webApps: /* @__PURE__ */ new Map()
769
- };
770
- var defaultRetentionPeriod = 60 * 60 * 24 * 7;
771
- var getMooseInternal = () => globalThis.moose_internal;
772
- if (getMooseInternal() === void 0) {
773
- globalThis.moose_internal = moose_internal;
774
- }
775
- var dlqSchema = {
776
- version: "3.1",
777
- components: {
778
- schemas: {
779
- DeadLetterModel: {
780
- type: "object",
781
- properties: {
782
- originalRecord: {
783
- $ref: "#/components/schemas/Recordstringany"
784
- },
785
- errorMessage: {
786
- type: "string"
787
- },
788
- errorType: {
789
- type: "string"
790
- },
791
- failedAt: {
792
- type: "string",
793
- format: "date-time"
794
- },
795
- source: {
796
- oneOf: [
797
- {
798
- const: "api"
799
- },
800
- {
801
- const: "transform"
802
- },
803
- {
804
- const: "table"
805
- }
806
- ]
1081
+ /**
1082
+ * Gets or creates a memoized ClickHouse client.
1083
+ * The client is cached and reused across multiple insert calls for better performance.
1084
+ * If the configuration changes, a new client will be created.
1085
+ *
1086
+ * @private
1087
+ */
1088
+ async getMemoizedClient() {
1089
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1090
+ const configRegistry = globalThis._mooseConfigRegistry;
1091
+ const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1092
+ const clickhouseConfig = await configRegistry.getClickHouseConfig();
1093
+ const currentConfigHash = this.createConfigHash(clickhouseConfig);
1094
+ if (this._memoizedClient && this._configHash === currentConfigHash) {
1095
+ return { client: this._memoizedClient, config: clickhouseConfig };
1096
+ }
1097
+ if (this._memoizedClient && this._configHash !== currentConfigHash) {
1098
+ try {
1099
+ await this._memoizedClient.close();
1100
+ } catch (error) {
807
1101
  }
808
- },
809
- required: [
810
- "originalRecord",
811
- "errorMessage",
812
- "errorType",
813
- "failedAt",
814
- "source"
815
- ]
816
- },
817
- Recordstringany: {
818
- type: "object",
819
- properties: {},
820
- required: [],
821
- description: "Construct a type with a set of properties K of type T",
822
- additionalProperties: {}
1102
+ }
1103
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1104
+ const client = getClickhouseClient2({
1105
+ username: clickhouseConfig.username,
1106
+ password: clickhouseConfig.password,
1107
+ database: effectiveDatabase,
1108
+ useSSL: clickhouseConfig.useSSL ? "true" : "false",
1109
+ host: clickhouseConfig.host,
1110
+ port: clickhouseConfig.port
1111
+ });
1112
+ this._memoizedClient = client;
1113
+ this._configHash = currentConfigHash;
1114
+ return { client, config: clickhouseConfig };
823
1115
  }
824
- }
825
- },
826
- schemas: [
827
- {
828
- $ref: "#/components/schemas/DeadLetterModel"
829
- }
830
- ]
831
- };
832
- var dlqColumns = [
833
- {
834
- name: "originalRecord",
835
- data_type: "Json",
836
- primary_key: false,
837
- required: true,
838
- unique: false,
839
- default: null,
840
- annotations: [],
841
- ttl: null,
842
- codec: null,
843
- materialized: null,
844
- comment: null
845
- },
846
- {
847
- name: "errorMessage",
848
- data_type: "String",
849
- primary_key: false,
850
- required: true,
851
- unique: false,
852
- default: null,
853
- annotations: [],
854
- ttl: null,
855
- codec: null,
856
- materialized: null,
857
- comment: null
858
- },
859
- {
860
- name: "errorType",
861
- data_type: "String",
862
- primary_key: false,
863
- required: true,
864
- unique: false,
865
- default: null,
866
- annotations: [],
867
- ttl: null,
868
- codec: null,
869
- materialized: null,
870
- comment: null
871
- },
872
- {
873
- name: "failedAt",
874
- data_type: "DateTime",
875
- primary_key: false,
876
- required: true,
877
- unique: false,
878
- default: null,
879
- annotations: [],
880
- ttl: null,
881
- codec: null,
882
- materialized: null,
883
- comment: null
884
- },
885
- {
886
- name: "source",
887
- data_type: "String",
888
- primary_key: false,
889
- required: true,
890
- unique: false,
891
- default: null,
892
- annotations: [],
893
- ttl: null,
894
- codec: null,
895
- materialized: null,
896
- comment: null
897
- }
898
- ];
899
-
900
- // src/dmv2/sdk/olapTable.ts
901
- var import_node_stream = require("stream");
902
- var import_node_crypto2 = require("crypto");
903
- var OlapTable = class extends TypedBase {
904
- name;
905
- /** @internal */
906
- kind = "OlapTable";
907
- /** @internal Memoized ClickHouse client for reusing connections across insert calls */
908
- _memoizedClient;
909
- /** @internal Hash of the configuration used to create the memoized client */
910
- _configHash;
911
- /** @internal Cached table name to avoid repeated generation */
912
- _cachedTableName;
913
- constructor(name, config, schema, columns, validators) {
914
- const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
915
- const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
916
- const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
917
- if (hasFields && hasExpr) {
918
- throw new Error(
919
- `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
920
- );
921
- }
922
- const hasCluster = typeof resolvedConfig.cluster === "string";
923
- const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
924
- const hasReplicaName = typeof resolvedConfig.replicaName === "string";
925
- if (hasCluster && (hasKeeperPath || hasReplicaName)) {
926
- throw new Error(
927
- `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
928
- );
929
- }
930
- super(name, resolvedConfig, schema, columns, validators);
931
- this.name = name;
932
- const tables = getMooseInternal().tables;
933
- const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
934
- if (!isClientOnlyMode() && tables.has(registryKey)) {
935
- throw new Error(
936
- `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
937
- );
938
- }
939
- tables.set(registryKey, this);
940
- }
941
- /**
942
- * Generates the versioned table name following Moose's naming convention
943
- * Format: {tableName}_{version_with_dots_replaced_by_underscores}
944
- */
945
- generateTableName() {
946
- if (this._cachedTableName) {
947
- return this._cachedTableName;
948
- }
949
- const tableVersion = this.config.version;
950
- if (!tableVersion) {
951
- this._cachedTableName = this.name;
952
- } else {
953
- const versionSuffix = tableVersion.replace(/\./g, "_");
954
- this._cachedTableName = `${this.name}_${versionSuffix}`;
955
- }
956
- return this._cachedTableName;
957
- }
958
- /**
959
- * Creates a fast hash of the ClickHouse configuration.
960
- * Uses crypto.createHash for better performance than JSON.stringify.
961
- *
962
- * @private
963
- */
964
- createConfigHash(clickhouseConfig) {
965
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
966
- const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
967
- return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
968
- }
969
- /**
970
- * Gets or creates a memoized ClickHouse client.
971
- * The client is cached and reused across multiple insert calls for better performance.
972
- * If the configuration changes, a new client will be created.
973
- *
974
- * @private
975
- */
976
- async getMemoizedClient() {
977
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
978
- const configRegistry = globalThis._mooseConfigRegistry;
979
- const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
980
- const clickhouseConfig = await configRegistry.getClickHouseConfig();
981
- const currentConfigHash = this.createConfigHash(clickhouseConfig);
982
- if (this._memoizedClient && this._configHash === currentConfigHash) {
983
- return { client: this._memoizedClient, config: clickhouseConfig };
984
- }
985
- if (this._memoizedClient && this._configHash !== currentConfigHash) {
986
- try {
987
- await this._memoizedClient.close();
988
- } catch (error) {
1116
+ /**
1117
+ * Closes the memoized ClickHouse client if it exists.
1118
+ * This is useful for cleaning up connections when the table instance is no longer needed.
1119
+ * The client will be automatically recreated on the next insert call if needed.
1120
+ */
1121
+ async closeClient() {
1122
+ if (this._memoizedClient) {
1123
+ try {
1124
+ await this._memoizedClient.close();
1125
+ } catch (error) {
1126
+ } finally {
1127
+ this._memoizedClient = void 0;
1128
+ this._configHash = void 0;
1129
+ }
1130
+ }
989
1131
  }
990
- }
991
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
992
- const client = getClickhouseClient2({
993
- username: clickhouseConfig.username,
994
- password: clickhouseConfig.password,
995
- database: effectiveDatabase,
996
- useSSL: clickhouseConfig.useSSL ? "true" : "false",
997
- host: clickhouseConfig.host,
998
- port: clickhouseConfig.port
999
- });
1000
- this._memoizedClient = client;
1001
- this._configHash = currentConfigHash;
1002
- return { client, config: clickhouseConfig };
1003
- }
1004
- /**
1005
- * Closes the memoized ClickHouse client if it exists.
1006
- * This is useful for cleaning up connections when the table instance is no longer needed.
1007
- * The client will be automatically recreated on the next insert call if needed.
1008
- */
1009
- async closeClient() {
1010
- if (this._memoizedClient) {
1011
- try {
1012
- await this._memoizedClient.close();
1013
- } catch (error) {
1014
- } finally {
1015
- this._memoizedClient = void 0;
1016
- this._configHash = void 0;
1132
+ /**
1133
+ * Validates a single record using typia's comprehensive type checking.
1134
+ * This provides the most accurate validation as it uses the exact TypeScript type information.
1135
+ *
1136
+ * @param record The record to validate
1137
+ * @returns Validation result with detailed error information
1138
+ */
1139
+ validateRecord(record) {
1140
+ if (this.validators?.validate) {
1141
+ try {
1142
+ const result = this.validators.validate(record);
1143
+ return {
1144
+ success: result.success,
1145
+ data: result.data,
1146
+ errors: result.errors?.map(
1147
+ (err) => typeof err === "string" ? err : JSON.stringify(err)
1148
+ )
1149
+ };
1150
+ } catch (error) {
1151
+ return {
1152
+ success: false,
1153
+ errors: [error instanceof Error ? error.message : String(error)]
1154
+ };
1155
+ }
1156
+ }
1157
+ throw new Error("No typia validator found");
1017
1158
  }
1018
- }
1019
- }
1020
- /**
1021
- * Validates a single record using typia's comprehensive type checking.
1022
- * This provides the most accurate validation as it uses the exact TypeScript type information.
1023
- *
1024
- * @param record The record to validate
1025
- * @returns Validation result with detailed error information
1026
- */
1027
- validateRecord(record) {
1028
- if (this.validators?.validate) {
1029
- try {
1030
- const result = this.validators.validate(record);
1031
- return {
1032
- success: result.success,
1033
- data: result.data,
1034
- errors: result.errors?.map(
1035
- (err) => typeof err === "string" ? err : JSON.stringify(err)
1036
- )
1037
- };
1038
- } catch (error) {
1039
- return {
1040
- success: false,
1041
- errors: [error instanceof Error ? error.message : String(error)]
1042
- };
1159
+ /**
1160
+ * Type guard function using typia's is() function.
1161
+ * Provides compile-time type narrowing for TypeScript.
1162
+ *
1163
+ * @param record The record to check
1164
+ * @returns True if record matches type T, with type narrowing
1165
+ */
1166
+ isValidRecord(record) {
1167
+ if (this.validators?.is) {
1168
+ return this.validators.is(record);
1169
+ }
1170
+ throw new Error("No typia validator found");
1043
1171
  }
1044
- }
1045
- throw new Error("No typia validator found");
1046
- }
1047
- /**
1048
- * Type guard function using typia's is() function.
1049
- * Provides compile-time type narrowing for TypeScript.
1050
- *
1051
- * @param record The record to check
1052
- * @returns True if record matches type T, with type narrowing
1053
- */
1054
- isValidRecord(record) {
1055
- if (this.validators?.is) {
1056
- return this.validators.is(record);
1057
- }
1058
- throw new Error("No typia validator found");
1059
- }
1060
- /**
1061
- * Assert that a record matches type T, throwing detailed errors if not.
1062
- * Uses typia's assert() function for the most detailed error reporting.
1063
- *
1064
- * @param record The record to assert
1065
- * @returns The validated and typed record
1066
- * @throws Detailed validation error if record doesn't match type T
1067
- */
1068
- assertValidRecord(record) {
1069
- if (this.validators?.assert) {
1070
- return this.validators.assert(record);
1071
- }
1072
- throw new Error("No typia validator found");
1073
- }
1074
- /**
1075
- * Validates an array of records with comprehensive error reporting.
1076
- * Uses the most appropriate validation method available (typia or basic).
1077
- *
1078
- * @param data Array of records to validate
1079
- * @returns Detailed validation results
1080
- */
1081
- async validateRecords(data) {
1082
- const valid = [];
1083
- const invalid = [];
1084
- valid.length = 0;
1085
- invalid.length = 0;
1086
- const dataLength = data.length;
1087
- for (let i = 0; i < dataLength; i++) {
1088
- const record = data[i];
1089
- try {
1090
- if (this.isValidRecord(record)) {
1091
- valid.push(this.mapToClickhouseRecord(record));
1092
- } else {
1093
- const result = this.validateRecord(record);
1094
- if (result.success) {
1095
- valid.push(this.mapToClickhouseRecord(record));
1096
- } else {
1172
+ /**
1173
+ * Assert that a record matches type T, throwing detailed errors if not.
1174
+ * Uses typia's assert() function for the most detailed error reporting.
1175
+ *
1176
+ * @param record The record to assert
1177
+ * @returns The validated and typed record
1178
+ * @throws Detailed validation error if record doesn't match type T
1179
+ */
1180
+ assertValidRecord(record) {
1181
+ if (this.validators?.assert) {
1182
+ return this.validators.assert(record);
1183
+ }
1184
+ throw new Error("No typia validator found");
1185
+ }
1186
+ /**
1187
+ * Validates an array of records with comprehensive error reporting.
1188
+ * Uses the most appropriate validation method available (typia or basic).
1189
+ *
1190
+ * @param data Array of records to validate
1191
+ * @returns Detailed validation results
1192
+ */
1193
+ async validateRecords(data) {
1194
+ const valid = [];
1195
+ const invalid = [];
1196
+ valid.length = 0;
1197
+ invalid.length = 0;
1198
+ const dataLength = data.length;
1199
+ for (let i = 0; i < dataLength; i++) {
1200
+ const record = data[i];
1201
+ try {
1202
+ if (this.isValidRecord(record)) {
1203
+ valid.push(this.mapToClickhouseRecord(record));
1204
+ } else {
1205
+ const result = this.validateRecord(record);
1206
+ if (result.success) {
1207
+ valid.push(this.mapToClickhouseRecord(record));
1208
+ } else {
1209
+ invalid.push({
1210
+ record,
1211
+ error: result.errors?.join(", ") || "Validation failed",
1212
+ index: i,
1213
+ path: "root"
1214
+ });
1215
+ }
1216
+ }
1217
+ } catch (error) {
1097
1218
  invalid.push({
1098
1219
  record,
1099
- error: result.errors?.join(", ") || "Validation failed",
1220
+ error: error instanceof Error ? error.message : String(error),
1100
1221
  index: i,
1101
1222
  path: "root"
1102
1223
  });
1103
1224
  }
1104
1225
  }
1105
- } catch (error) {
1106
- invalid.push({
1107
- record,
1108
- error: error instanceof Error ? error.message : String(error),
1109
- index: i,
1110
- path: "root"
1111
- });
1226
+ return {
1227
+ valid,
1228
+ invalid,
1229
+ total: dataLength
1230
+ };
1112
1231
  }
1113
- }
1114
- return {
1115
- valid,
1116
- invalid,
1117
- total: dataLength
1118
- };
1119
- }
1120
- /**
1121
- * Optimized batch retry that minimizes individual insert operations.
1122
- * Groups records into smaller batches to reduce round trips while still isolating failures.
1123
- *
1124
- * @private
1125
- */
1126
- async retryIndividualRecords(client, tableName, records) {
1127
- const successful = [];
1128
- const failed = [];
1129
- const RETRY_BATCH_SIZE = 10;
1130
- const totalRecords = records.length;
1131
- for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1132
- const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1133
- const batch = records.slice(i, batchEnd);
1134
- try {
1135
- await client.insert({
1136
- table: quoteIdentifier(tableName),
1137
- values: batch,
1138
- format: "JSONEachRow",
1139
- clickhouse_settings: {
1140
- date_time_input_format: "best_effort",
1141
- // Add performance settings for retries
1142
- max_insert_block_size: RETRY_BATCH_SIZE,
1143
- max_block_size: RETRY_BATCH_SIZE
1144
- }
1145
- });
1146
- successful.push(...batch);
1147
- } catch (batchError) {
1148
- for (let j = 0; j < batch.length; j++) {
1149
- const record = batch[j];
1232
+ /**
1233
+ * Optimized batch retry that minimizes individual insert operations.
1234
+ * Groups records into smaller batches to reduce round trips while still isolating failures.
1235
+ *
1236
+ * @private
1237
+ */
1238
+ async retryIndividualRecords(client, tableName, records) {
1239
+ const successful = [];
1240
+ const failed = [];
1241
+ const RETRY_BATCH_SIZE = 10;
1242
+ const totalRecords = records.length;
1243
+ for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1244
+ const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1245
+ const batch = records.slice(i, batchEnd);
1150
1246
  try {
1151
1247
  await client.insert({
1152
1248
  table: quoteIdentifier(tableName),
1153
- values: [record],
1249
+ values: batch,
1154
1250
  format: "JSONEachRow",
1155
1251
  clickhouse_settings: {
1156
- date_time_input_format: "best_effort"
1252
+ date_time_input_format: "best_effort",
1253
+ // Add performance settings for retries
1254
+ max_insert_block_size: RETRY_BATCH_SIZE,
1255
+ max_block_size: RETRY_BATCH_SIZE
1157
1256
  }
1158
1257
  });
1159
- successful.push(record);
1160
- } catch (error) {
1161
- failed.push({
1162
- record,
1163
- error: error instanceof Error ? error.message : String(error),
1164
- index: i + j
1165
- });
1258
+ successful.push(...batch);
1259
+ } catch (batchError) {
1260
+ for (let j = 0; j < batch.length; j++) {
1261
+ const record = batch[j];
1262
+ try {
1263
+ await client.insert({
1264
+ table: quoteIdentifier(tableName),
1265
+ values: [record],
1266
+ format: "JSONEachRow",
1267
+ clickhouse_settings: {
1268
+ date_time_input_format: "best_effort"
1269
+ }
1270
+ });
1271
+ successful.push(record);
1272
+ } catch (error) {
1273
+ failed.push({
1274
+ record,
1275
+ error: error instanceof Error ? error.message : String(error),
1276
+ index: i + j
1277
+ });
1278
+ }
1279
+ }
1280
+ }
1281
+ }
1282
+ return { successful, failed };
1283
+ }
1284
+ /**
1285
+ * Validates input parameters and strategy compatibility
1286
+ * @private
1287
+ */
1288
+ validateInsertParameters(data, options) {
1289
+ const isStream = data instanceof import_node_stream.Readable;
1290
+ const strategy = options?.strategy || "fail-fast";
1291
+ const shouldValidate = options?.validate !== false;
1292
+ if (isStream && strategy === "isolate") {
1293
+ throw new Error(
1294
+ "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1295
+ );
1296
+ }
1297
+ if (isStream && shouldValidate) {
1298
+ console.warn(
1299
+ "Validation is not supported with stream input. Validation will be skipped."
1300
+ );
1301
+ }
1302
+ return { isStream, strategy, shouldValidate };
1303
+ }
1304
+ /**
1305
+ * Handles early return cases for empty data
1306
+ * @private
1307
+ */
1308
+ handleEmptyData(data, isStream) {
1309
+ if (isStream && !data) {
1310
+ return {
1311
+ successful: 0,
1312
+ failed: 0,
1313
+ total: 0
1314
+ };
1315
+ }
1316
+ if (!isStream && (!data || data.length === 0)) {
1317
+ return {
1318
+ successful: 0,
1319
+ failed: 0,
1320
+ total: 0
1321
+ };
1322
+ }
1323
+ return null;
1324
+ }
1325
+ /**
1326
+ * Performs pre-insertion validation for array data
1327
+ * @private
1328
+ */
1329
+ async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1330
+ if (!shouldValidate) {
1331
+ return { validatedData: data, validationErrors: [] };
1332
+ }
1333
+ try {
1334
+ const validationResult = await this.validateRecords(data);
1335
+ const validatedData = validationResult.valid;
1336
+ const validationErrors = validationResult.invalid;
1337
+ if (validationErrors.length > 0) {
1338
+ this.handleValidationErrors(validationErrors, strategy, data, options);
1339
+ switch (strategy) {
1340
+ case "discard":
1341
+ return { validatedData, validationErrors };
1342
+ case "isolate":
1343
+ return { validatedData: data, validationErrors };
1344
+ default:
1345
+ return { validatedData, validationErrors };
1346
+ }
1347
+ }
1348
+ return { validatedData, validationErrors };
1349
+ } catch (validationError) {
1350
+ if (strategy === "fail-fast") {
1351
+ throw validationError;
1352
+ }
1353
+ console.warn("Validation error:", validationError);
1354
+ return { validatedData: data, validationErrors: [] };
1355
+ }
1356
+ }
1357
+ /**
1358
+ * Handles validation errors based on the specified strategy
1359
+ * @private
1360
+ */
1361
+ handleValidationErrors(validationErrors, strategy, data, options) {
1362
+ switch (strategy) {
1363
+ case "fail-fast":
1364
+ const firstError = validationErrors[0];
1365
+ throw new Error(
1366
+ `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1367
+ );
1368
+ case "discard":
1369
+ this.checkValidationThresholds(validationErrors, data.length, options);
1370
+ break;
1371
+ case "isolate":
1372
+ break;
1373
+ }
1374
+ }
1375
+ /**
1376
+ * Checks if validation errors exceed configured thresholds
1377
+ * @private
1378
+ */
1379
+ checkValidationThresholds(validationErrors, totalRecords, options) {
1380
+ const validationFailedCount = validationErrors.length;
1381
+ const validationFailedRatio = validationFailedCount / totalRecords;
1382
+ if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1383
+ throw new Error(
1384
+ `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1385
+ );
1386
+ }
1387
+ if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1388
+ throw new Error(
1389
+ `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1390
+ );
1391
+ }
1392
+ }
1393
+ /**
1394
+ * Optimized insert options preparation with better memory management
1395
+ * @private
1396
+ */
1397
+ prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1398
+ const insertOptions = {
1399
+ table: quoteIdentifier(tableName),
1400
+ format: "JSONEachRow",
1401
+ clickhouse_settings: {
1402
+ date_time_input_format: "best_effort",
1403
+ wait_end_of_query: 1,
1404
+ // Ensure at least once delivery for INSERT operations
1405
+ // Performance optimizations
1406
+ max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1407
+ max_block_size: 65536,
1408
+ // Use async inserts for better performance with large datasets
1409
+ async_insert: validatedData.length > 1e3 ? 1 : 0,
1410
+ wait_for_async_insert: 1
1411
+ // For at least once delivery
1166
1412
  }
1413
+ };
1414
+ if (isStream) {
1415
+ insertOptions.values = data;
1416
+ } else {
1417
+ insertOptions.values = validatedData;
1418
+ }
1419
+ if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1420
+ if (options.allowErrors !== void 0) {
1421
+ insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1422
+ }
1423
+ if (options.allowErrorsRatio !== void 0) {
1424
+ insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1425
+ }
1426
+ }
1427
+ return insertOptions;
1428
+ }
1429
+ /**
1430
+ * Creates success result for completed insertions
1431
+ * @private
1432
+ */
1433
+ createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1434
+ if (isStream) {
1435
+ return {
1436
+ successful: -1,
1437
+ // -1 indicates stream mode where count is unknown
1438
+ failed: 0,
1439
+ total: -1
1440
+ };
1441
+ }
1442
+ const insertedCount = validatedData.length;
1443
+ const totalProcessed = shouldValidate ? data.length : insertedCount;
1444
+ const result = {
1445
+ successful: insertedCount,
1446
+ failed: shouldValidate ? validationErrors.length : 0,
1447
+ total: totalProcessed
1448
+ };
1449
+ if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1450
+ result.failedRecords = validationErrors.map((ve) => ({
1451
+ record: ve.record,
1452
+ error: `Validation error: ${ve.error}`,
1453
+ index: ve.index
1454
+ }));
1455
+ }
1456
+ return result;
1457
+ }
1458
+ /**
1459
+ * Handles insertion errors based on the specified strategy
1460
+ * @private
1461
+ */
1462
+ async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1463
+ switch (strategy) {
1464
+ case "fail-fast":
1465
+ throw new Error(
1466
+ `Failed to insert data into table ${tableName}: ${batchError}`
1467
+ );
1468
+ case "discard":
1469
+ throw new Error(
1470
+ `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1471
+ );
1472
+ case "isolate":
1473
+ return await this.handleIsolateStrategy(
1474
+ batchError,
1475
+ tableName,
1476
+ data,
1477
+ validatedData,
1478
+ validationErrors,
1479
+ isStream,
1480
+ shouldValidate,
1481
+ options
1482
+ );
1483
+ default:
1484
+ throw new Error(`Unknown error strategy: ${strategy}`);
1485
+ }
1486
+ }
1487
+ /**
1488
+ * Handles the isolate strategy for insertion errors
1489
+ * @private
1490
+ */
1491
+ async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1492
+ if (isStream) {
1493
+ throw new Error(
1494
+ `Isolate strategy is not supported with stream input: ${batchError}`
1495
+ );
1496
+ }
1497
+ try {
1498
+ const { client } = await this.getMemoizedClient();
1499
+ const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1500
+ const retryData = skipValidationOnRetry ? data : validatedData;
1501
+ const { successful, failed } = await this.retryIndividualRecords(
1502
+ client,
1503
+ tableName,
1504
+ retryData
1505
+ );
1506
+ const allFailedRecords = [
1507
+ // Validation errors (if any and not skipping validation on retry)
1508
+ ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1509
+ record: ve.record,
1510
+ error: `Validation error: ${ve.error}`,
1511
+ index: ve.index
1512
+ })) : [],
1513
+ // Insertion errors
1514
+ ...failed
1515
+ ];
1516
+ this.checkInsertionThresholds(
1517
+ allFailedRecords,
1518
+ data.length,
1519
+ options
1520
+ );
1521
+ return {
1522
+ successful: successful.length,
1523
+ failed: allFailedRecords.length,
1524
+ total: data.length,
1525
+ failedRecords: allFailedRecords
1526
+ };
1527
+ } catch (isolationError) {
1528
+ throw new Error(
1529
+ `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1530
+ );
1531
+ }
1532
+ }
1533
+ /**
1534
+ * Checks if insertion errors exceed configured thresholds
1535
+ * @private
1536
+ */
1537
+ checkInsertionThresholds(failedRecords, totalRecords, options) {
1538
+ const totalFailed = failedRecords.length;
1539
+ const failedRatio = totalFailed / totalRecords;
1540
+ if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1541
+ throw new Error(
1542
+ `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1543
+ );
1544
+ }
1545
+ if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1546
+ throw new Error(
1547
+ `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1548
+ );
1549
+ }
1550
+ }
1551
+ /**
1552
+ * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1553
+ *
1554
+ * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1555
+ * - For every Nested struct (not array), it recurses into the struct.
1556
+ * - This ensures compatibility with kafka_clickhouse_sync
1557
+ *
1558
+ * @param record The input record to transform (may be deeply nested)
1559
+ * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1560
+ * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1561
+ */
1562
+ mapToClickhouseRecord(record, columns = this.columnArray) {
1563
+ const result = { ...record };
1564
+ for (const col of columns) {
1565
+ const value = record[col.name];
1566
+ const dt = col.data_type;
1567
+ if (isArrayNestedType(dt)) {
1568
+ if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1569
+ result[col.name] = value.map((item) => [
1570
+ this.mapToClickhouseRecord(item, dt.elementType.columns)
1571
+ ]);
1572
+ }
1573
+ } else if (isNestedType(dt)) {
1574
+ if (value && typeof value === "object") {
1575
+ result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1576
+ }
1577
+ }
1578
+ }
1579
+ return result;
1580
+ }
1581
+ /**
1582
+ * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1583
+ * This method establishes a direct connection to ClickHouse using the project configuration
1584
+ * and inserts the provided data into the versioned table.
1585
+ *
1586
+ * PERFORMANCE OPTIMIZATIONS:
1587
+ * - Memoized client connections with fast config hashing
1588
+ * - Single-pass validation with pre-allocated arrays
1589
+ * - Batch-optimized retry strategy (batches of 10, then individual)
1590
+ * - Optimized ClickHouse settings for large datasets
1591
+ * - Reduced memory allocations and object creation
1592
+ *
1593
+ * Uses advanced typia validation when available for comprehensive type checking,
1594
+ * with fallback to basic validation for compatibility.
1595
+ *
1596
+ * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1597
+ * If the configuration changes, a new client will be automatically created.
1598
+ *
1599
+ * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1600
+ * @param options Optional configuration for error handling, validation, and insertion behavior
1601
+ * @returns Promise resolving to detailed insertion results
1602
+ * @throws {ConfigError} When configuration cannot be read or parsed
1603
+ * @throws {ClickHouseError} When insertion fails based on the error strategy
1604
+ * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1605
+ *
1606
+ * @example
1607
+ * ```typescript
1608
+ * // Create an OlapTable instance (typia validators auto-injected)
1609
+ * const userTable = new OlapTable<User>('users');
1610
+ *
1611
+ * // Insert with comprehensive typia validation
1612
+ * const result1 = await userTable.insert([
1613
+ * { id: 1, name: 'John', email: 'john@example.com' },
1614
+ * { id: 2, name: 'Jane', email: 'jane@example.com' }
1615
+ * ]);
1616
+ *
1617
+ * // Insert data with stream input (validation not available for streams)
1618
+ * const dataStream = new Readable({
1619
+ * objectMode: true,
1620
+ * read() { // Stream implementation }
1621
+ * });
1622
+ * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1623
+ *
1624
+ * // Insert with validation disabled for performance
1625
+ * const result3 = await userTable.insert(data, { validate: false });
1626
+ *
1627
+ * // Insert with error handling strategies
1628
+ * const result4 = await userTable.insert(mixedData, {
1629
+ * strategy: 'isolate',
1630
+ * allowErrorsRatio: 0.1,
1631
+ * validate: true // Use typia validation (default)
1632
+ * });
1633
+ *
1634
+ * // Optional: Clean up connection when completely done
1635
+ * await userTable.closeClient();
1636
+ * ```
1637
+ */
1638
+ async insert(data, options) {
1639
+ const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1640
+ const emptyResult = this.handleEmptyData(data, isStream);
1641
+ if (emptyResult) {
1642
+ return emptyResult;
1643
+ }
1644
+ let validatedData = [];
1645
+ let validationErrors = [];
1646
+ if (!isStream && shouldValidate) {
1647
+ const validationResult = await this.performPreInsertionValidation(
1648
+ data,
1649
+ shouldValidate,
1650
+ strategy,
1651
+ options
1652
+ );
1653
+ validatedData = validationResult.validatedData;
1654
+ validationErrors = validationResult.validationErrors;
1655
+ } else {
1656
+ validatedData = isStream ? [] : data;
1657
+ }
1658
+ const { client } = await this.getMemoizedClient();
1659
+ const tableName = this.generateTableName();
1660
+ try {
1661
+ const insertOptions = this.prepareInsertOptions(
1662
+ tableName,
1663
+ data,
1664
+ validatedData,
1665
+ isStream,
1666
+ strategy,
1667
+ options
1668
+ );
1669
+ await client.insert(insertOptions);
1670
+ return this.createSuccessResult(
1671
+ data,
1672
+ validatedData,
1673
+ validationErrors,
1674
+ isStream,
1675
+ shouldValidate,
1676
+ strategy
1677
+ );
1678
+ } catch (batchError) {
1679
+ return await this.handleInsertionError(
1680
+ batchError,
1681
+ strategy,
1682
+ tableName,
1683
+ data,
1684
+ validatedData,
1685
+ validationErrors,
1686
+ isStream,
1687
+ shouldValidate,
1688
+ options
1689
+ );
1167
1690
  }
1168
1691
  }
1169
- }
1170
- return { successful, failed };
1171
- }
1172
- /**
1173
- * Validates input parameters and strategy compatibility
1174
- * @private
1175
- */
1176
- validateInsertParameters(data, options) {
1177
- const isStream = data instanceof import_node_stream.Readable;
1178
- const strategy = options?.strategy || "fail-fast";
1179
- const shouldValidate = options?.validate !== false;
1180
- if (isStream && strategy === "isolate") {
1181
- throw new Error(
1182
- "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1183
- );
1184
- }
1185
- if (isStream && shouldValidate) {
1186
- console.warn(
1187
- "Validation is not supported with stream input. Validation will be skipped."
1188
- );
1189
- }
1190
- return { isStream, strategy, shouldValidate };
1191
- }
1192
- /**
1193
- * Handles early return cases for empty data
1194
- * @private
1195
- */
1196
- handleEmptyData(data, isStream) {
1197
- if (isStream && !data) {
1198
- return {
1199
- successful: 0,
1200
- failed: 0,
1201
- total: 0
1202
- };
1203
- }
1204
- if (!isStream && (!data || data.length === 0)) {
1205
- return {
1206
- successful: 0,
1207
- failed: 0,
1208
- total: 0
1209
- };
1210
- }
1211
- return null;
1692
+ // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1693
+ // were removed in ENG-856. Use direct configuration instead, e.g.:
1694
+ // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1695
+ };
1212
1696
  }
1213
- /**
1214
- * Performs pre-insertion validation for array data
1215
- * @private
1216
- */
1217
- async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1218
- if (!shouldValidate) {
1219
- return { validatedData: data, validationErrors: [] };
1220
- }
1221
- try {
1222
- const validationResult = await this.validateRecords(data);
1223
- const validatedData = validationResult.valid;
1224
- const validationErrors = validationResult.invalid;
1225
- if (validationErrors.length > 0) {
1226
- this.handleValidationErrors(validationErrors, strategy, data, options);
1227
- switch (strategy) {
1228
- case "discard":
1229
- return { validatedData, validationErrors };
1230
- case "isolate":
1231
- return { validatedData: data, validationErrors };
1232
- default:
1233
- return { validatedData, validationErrors };
1697
+ });
1698
+
1699
+ // src/dmv2/sdk/stream.ts
1700
+ function attachTypeGuard(dl, typeGuard) {
1701
+ dl.asTyped = () => typeGuard(dl.originalRecord);
1702
+ }
1703
+ var import_node_crypto3, RoutedMessage, Stream, DeadLetterQueue;
1704
+ var init_stream = __esm({
1705
+ "src/dmv2/sdk/stream.ts"() {
1706
+ "use strict";
1707
+ init_typedBase();
1708
+ init_internal();
1709
+ import_node_crypto3 = require("crypto");
1710
+ init_stackTrace();
1711
+ RoutedMessage = class {
1712
+ /** The destination stream for the message */
1713
+ destination;
1714
+ /** The message value(s) to send */
1715
+ values;
1716
+ /**
1717
+ * Creates a new routed message.
1718
+ *
1719
+ * @param destination The target stream
1720
+ * @param values The message(s) to route
1721
+ */
1722
+ constructor(destination, values) {
1723
+ this.destination = destination;
1724
+ this.values = values;
1725
+ }
1726
+ };
1727
+ Stream = class extends TypedBase {
1728
+ defaultDeadLetterQueue;
1729
+ /** @internal Memoized KafkaJS producer for reusing connections across sends */
1730
+ _memoizedProducer;
1731
+ /** @internal Hash of the configuration used to create the memoized Kafka producer */
1732
+ _kafkaConfigHash;
1733
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1734
+ super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1735
+ const streams = getMooseInternal().streams;
1736
+ if (streams.has(name)) {
1737
+ throw new Error(`Stream with name ${name} already exists`);
1234
1738
  }
1739
+ streams.set(name, this);
1740
+ this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1235
1741
  }
1236
- return { validatedData, validationErrors };
1237
- } catch (validationError) {
1238
- if (strategy === "fail-fast") {
1239
- throw validationError;
1742
+ /**
1743
+ * Internal map storing transformation configurations.
1744
+ * Maps destination stream names to arrays of transformation functions and their configs.
1745
+ *
1746
+ * @internal
1747
+ */
1748
+ _transformations = /* @__PURE__ */ new Map();
1749
+ /**
1750
+ * Internal function for multi-stream transformations.
1751
+ * Allows a single transformation to route messages to multiple destinations.
1752
+ *
1753
+ * @internal
1754
+ */
1755
+ _multipleTransformations;
1756
+ /**
1757
+ * Internal array storing consumer configurations.
1758
+ *
1759
+ * @internal
1760
+ */
1761
+ _consumers = new Array();
1762
+ /**
1763
+ * Builds the full Kafka topic name including optional namespace and version suffix.
1764
+ * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1765
+ */
1766
+ buildFullTopicName(namespace) {
1767
+ const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1768
+ const base = `${this.name}${versionSuffix}`;
1769
+ return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1240
1770
  }
1241
- console.warn("Validation error:", validationError);
1242
- return { validatedData: data, validationErrors: [] };
1243
- }
1244
- }
1245
- /**
1246
- * Handles validation errors based on the specified strategy
1247
- * @private
1248
- */
1249
- handleValidationErrors(validationErrors, strategy, data, options) {
1250
- switch (strategy) {
1251
- case "fail-fast":
1252
- const firstError = validationErrors[0];
1253
- throw new Error(
1254
- `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1771
+ /**
1772
+ * Creates a fast hash string from relevant Kafka configuration fields.
1773
+ */
1774
+ createConfigHash(kafkaConfig) {
1775
+ const configString = [
1776
+ kafkaConfig.broker,
1777
+ kafkaConfig.messageTimeoutMs,
1778
+ kafkaConfig.saslUsername,
1779
+ kafkaConfig.saslPassword,
1780
+ kafkaConfig.saslMechanism,
1781
+ kafkaConfig.securityProtocol,
1782
+ kafkaConfig.namespace
1783
+ ].join(":");
1784
+ return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1785
+ }
1786
+ /**
1787
+ * Gets or creates a memoized KafkaJS producer using runtime configuration.
1788
+ */
1789
+ async getMemoizedProducer() {
1790
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1791
+ const configRegistry = globalThis._mooseConfigRegistry;
1792
+ const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1793
+ const kafkaConfig = await configRegistry.getKafkaConfig();
1794
+ const currentHash = this.createConfigHash(kafkaConfig);
1795
+ if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1796
+ return { producer: this._memoizedProducer, kafkaConfig };
1797
+ }
1798
+ if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1799
+ try {
1800
+ await this._memoizedProducer.disconnect();
1801
+ } catch {
1802
+ }
1803
+ this._memoizedProducer = void 0;
1804
+ }
1805
+ const clientId = `moose-sdk-stream-${this.name}`;
1806
+ const logger = {
1807
+ logPrefix: clientId,
1808
+ log: (message) => {
1809
+ console.log(`${clientId}: ${message}`);
1810
+ },
1811
+ error: (message) => {
1812
+ console.error(`${clientId}: ${message}`);
1813
+ },
1814
+ warn: (message) => {
1815
+ console.warn(`${clientId}: ${message}`);
1816
+ }
1817
+ };
1818
+ const producer = await getKafkaProducer2(
1819
+ {
1820
+ clientId,
1821
+ broker: kafkaConfig.broker,
1822
+ securityProtocol: kafkaConfig.securityProtocol,
1823
+ saslUsername: kafkaConfig.saslUsername,
1824
+ saslPassword: kafkaConfig.saslPassword,
1825
+ saslMechanism: kafkaConfig.saslMechanism
1826
+ },
1827
+ logger
1255
1828
  );
1256
- case "discard":
1257
- this.checkValidationThresholds(validationErrors, data.length, options);
1258
- break;
1259
- case "isolate":
1260
- break;
1261
- }
1262
- }
1263
- /**
1264
- * Checks if validation errors exceed configured thresholds
1265
- * @private
1266
- */
1267
- checkValidationThresholds(validationErrors, totalRecords, options) {
1268
- const validationFailedCount = validationErrors.length;
1269
- const validationFailedRatio = validationFailedCount / totalRecords;
1270
- if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1271
- throw new Error(
1272
- `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1273
- );
1274
- }
1275
- if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1276
- throw new Error(
1277
- `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1278
- );
1279
- }
1280
- }
1281
- /**
1282
- * Optimized insert options preparation with better memory management
1283
- * @private
1284
- */
1285
- prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1286
- const insertOptions = {
1287
- table: quoteIdentifier(tableName),
1288
- format: "JSONEachRow",
1289
- clickhouse_settings: {
1290
- date_time_input_format: "best_effort",
1291
- wait_end_of_query: 1,
1292
- // Ensure at least once delivery for INSERT operations
1293
- // Performance optimizations
1294
- max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1295
- max_block_size: 65536,
1296
- // Use async inserts for better performance with large datasets
1297
- async_insert: validatedData.length > 1e3 ? 1 : 0,
1298
- wait_for_async_insert: 1
1299
- // For at least once delivery
1829
+ this._memoizedProducer = producer;
1830
+ this._kafkaConfigHash = currentHash;
1831
+ return { producer, kafkaConfig };
1300
1832
  }
1301
- };
1302
- if (isStream) {
1303
- insertOptions.values = data;
1304
- } else {
1305
- insertOptions.values = validatedData;
1306
- }
1307
- if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1308
- if (options.allowErrors !== void 0) {
1309
- insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1833
+ /**
1834
+ * Closes the memoized Kafka producer if it exists.
1835
+ */
1836
+ async closeProducer() {
1837
+ if (this._memoizedProducer) {
1838
+ try {
1839
+ await this._memoizedProducer.disconnect();
1840
+ } catch {
1841
+ } finally {
1842
+ this._memoizedProducer = void 0;
1843
+ this._kafkaConfigHash = void 0;
1844
+ }
1845
+ }
1310
1846
  }
1311
- if (options.allowErrorsRatio !== void 0) {
1312
- insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1847
+ /**
1848
+ * Sends one or more records to this stream's Kafka topic.
1849
+ * Values are JSON-serialized as message values.
1850
+ */
1851
+ async send(values) {
1852
+ const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1853
+ if (flat.length === 0) return;
1854
+ const { producer, kafkaConfig } = await this.getMemoizedProducer();
1855
+ const topic = this.buildFullTopicName(kafkaConfig.namespace);
1856
+ const sr = this.config.schemaConfig;
1857
+ if (sr && sr.kind === "JSON") {
1858
+ const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1859
+ if (!schemaRegistryUrl) {
1860
+ throw new Error("Schema Registry URL not configured");
1861
+ }
1862
+ const {
1863
+ default: { SchemaRegistry }
1864
+ } = await import("@kafkajs/confluent-schema-registry");
1865
+ const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1866
+ let schemaId = void 0;
1867
+ if ("id" in sr.reference) {
1868
+ schemaId = sr.reference.id;
1869
+ } else if ("subjectLatest" in sr.reference) {
1870
+ schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1871
+ } else if ("subject" in sr.reference) {
1872
+ schemaId = await registry.getRegistryId(
1873
+ sr.reference.subject,
1874
+ sr.reference.version
1875
+ );
1876
+ }
1877
+ if (schemaId === void 0) {
1878
+ throw new Error("Malformed schema reference.");
1879
+ }
1880
+ const encoded = await Promise.all(
1881
+ flat.map(
1882
+ (v) => registry.encode(schemaId, v)
1883
+ )
1884
+ );
1885
+ await producer.send({
1886
+ topic,
1887
+ messages: encoded.map((value) => ({ value }))
1888
+ });
1889
+ return;
1890
+ } else if (sr !== void 0) {
1891
+ throw new Error("Currently only JSON Schema is supported.");
1892
+ }
1893
+ await producer.send({
1894
+ topic,
1895
+ messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1896
+ });
1313
1897
  }
1314
- }
1315
- return insertOptions;
1316
- }
1317
- /**
1318
- * Creates success result for completed insertions
1319
- * @private
1320
- */
1321
- createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1322
- if (isStream) {
1323
- return {
1324
- successful: -1,
1325
- // -1 indicates stream mode where count is unknown
1326
- failed: 0,
1327
- total: -1
1328
- };
1329
- }
1330
- const insertedCount = validatedData.length;
1331
- const totalProcessed = shouldValidate ? data.length : insertedCount;
1332
- const result = {
1333
- successful: insertedCount,
1334
- failed: shouldValidate ? validationErrors.length : 0,
1335
- total: totalProcessed
1336
- };
1337
- if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1338
- result.failedRecords = validationErrors.map((ve) => ({
1339
- record: ve.record,
1340
- error: `Validation error: ${ve.error}`,
1341
- index: ve.index
1342
- }));
1343
- }
1344
- return result;
1345
- }
1346
- /**
1347
- * Handles insertion errors based on the specified strategy
1348
- * @private
1349
- */
1350
- async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1351
- switch (strategy) {
1352
- case "fail-fast":
1353
- throw new Error(
1354
- `Failed to insert data into table ${tableName}: ${batchError}`
1355
- );
1356
- case "discard":
1357
- throw new Error(
1358
- `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1359
- );
1360
- case "isolate":
1361
- return await this.handleIsolateStrategy(
1362
- batchError,
1363
- tableName,
1364
- data,
1365
- validatedData,
1366
- validationErrors,
1367
- isStream,
1368
- shouldValidate,
1369
- options
1370
- );
1371
- default:
1372
- throw new Error(`Unknown error strategy: ${strategy}`);
1373
- }
1374
- }
1375
- /**
1376
- * Handles the isolate strategy for insertion errors
1377
- * @private
1378
- */
1379
- async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1380
- if (isStream) {
1381
- throw new Error(
1382
- `Isolate strategy is not supported with stream input: ${batchError}`
1383
- );
1384
- }
1385
- try {
1386
- const { client } = await this.getMemoizedClient();
1387
- const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1388
- const retryData = skipValidationOnRetry ? data : validatedData;
1389
- const { successful, failed } = await this.retryIndividualRecords(
1390
- client,
1391
- tableName,
1392
- retryData
1393
- );
1394
- const allFailedRecords = [
1395
- // Validation errors (if any and not skipping validation on retry)
1396
- ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1397
- record: ve.record,
1398
- error: `Validation error: ${ve.error}`,
1399
- index: ve.index
1400
- })) : [],
1401
- // Insertion errors
1402
- ...failed
1403
- ];
1404
- this.checkInsertionThresholds(
1405
- allFailedRecords,
1406
- data.length,
1407
- options
1408
- );
1409
- return {
1410
- successful: successful.length,
1411
- failed: allFailedRecords.length,
1412
- total: data.length,
1413
- failedRecords: allFailedRecords
1414
- };
1415
- } catch (isolationError) {
1416
- throw new Error(
1417
- `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1418
- );
1419
- }
1420
- }
1421
- /**
1422
- * Checks if insertion errors exceed configured thresholds
1423
- * @private
1424
- */
1425
- checkInsertionThresholds(failedRecords, totalRecords, options) {
1426
- const totalFailed = failedRecords.length;
1427
- const failedRatio = totalFailed / totalRecords;
1428
- if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1429
- throw new Error(
1430
- `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1431
- );
1432
- }
1433
- if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1434
- throw new Error(
1435
- `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1436
- );
1437
- }
1438
- }
1439
- /**
1440
- * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1441
- *
1442
- * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1443
- * - For every Nested struct (not array), it recurses into the struct.
1444
- * - This ensures compatibility with kafka_clickhouse_sync
1445
- *
1446
- * @param record The input record to transform (may be deeply nested)
1447
- * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1448
- * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1449
- */
1450
- mapToClickhouseRecord(record, columns = this.columnArray) {
1451
- const result = { ...record };
1452
- for (const col of columns) {
1453
- const value = record[col.name];
1454
- const dt = col.data_type;
1455
- if (isArrayNestedType(dt)) {
1456
- if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1457
- result[col.name] = value.map((item) => [
1458
- this.mapToClickhouseRecord(item, dt.elementType.columns)
1459
- ]);
1898
+ /**
1899
+ * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1900
+ * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1901
+ *
1902
+ * @template U The data type of the messages in the destination stream.
1903
+ * @param destination The destination stream for the transformed messages.
1904
+ * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1905
+ * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1906
+ * @param config Optional configuration for this specific transformation step, like a version.
1907
+ */
1908
+ addTransform(destination, transformation, config) {
1909
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1910
+ const transformConfig = {
1911
+ ...config ?? {},
1912
+ sourceFile
1913
+ };
1914
+ if (transformConfig.deadLetterQueue === void 0) {
1915
+ transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1460
1916
  }
1461
- } else if (isNestedType(dt)) {
1462
- if (value && typeof value === "object") {
1463
- result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1917
+ if (this._transformations.has(destination.name)) {
1918
+ const existingTransforms = this._transformations.get(destination.name);
1919
+ const hasVersion = existingTransforms.some(
1920
+ ([_, __, cfg]) => cfg.version === transformConfig.version
1921
+ );
1922
+ if (!hasVersion) {
1923
+ existingTransforms.push([destination, transformation, transformConfig]);
1924
+ }
1925
+ } else {
1926
+ this._transformations.set(destination.name, [
1927
+ [destination, transformation, transformConfig]
1928
+ ]);
1464
1929
  }
1465
1930
  }
1466
- }
1467
- return result;
1468
- }
1469
- /**
1470
- * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1471
- * This method establishes a direct connection to ClickHouse using the project configuration
1472
- * and inserts the provided data into the versioned table.
1473
- *
1474
- * PERFORMANCE OPTIMIZATIONS:
1475
- * - Memoized client connections with fast config hashing
1476
- * - Single-pass validation with pre-allocated arrays
1477
- * - Batch-optimized retry strategy (batches of 10, then individual)
1478
- * - Optimized ClickHouse settings for large datasets
1479
- * - Reduced memory allocations and object creation
1480
- *
1481
- * Uses advanced typia validation when available for comprehensive type checking,
1482
- * with fallback to basic validation for compatibility.
1483
- *
1484
- * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1485
- * If the configuration changes, a new client will be automatically created.
1486
- *
1487
- * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1488
- * @param options Optional configuration for error handling, validation, and insertion behavior
1489
- * @returns Promise resolving to detailed insertion results
1490
- * @throws {ConfigError} When configuration cannot be read or parsed
1491
- * @throws {ClickHouseError} When insertion fails based on the error strategy
1492
- * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1493
- *
1494
- * @example
1495
- * ```typescript
1496
- * // Create an OlapTable instance (typia validators auto-injected)
1497
- * const userTable = new OlapTable<User>('users');
1498
- *
1499
- * // Insert with comprehensive typia validation
1500
- * const result1 = await userTable.insert([
1501
- * { id: 1, name: 'John', email: 'john@example.com' },
1502
- * { id: 2, name: 'Jane', email: 'jane@example.com' }
1503
- * ]);
1504
- *
1505
- * // Insert data with stream input (validation not available for streams)
1506
- * const dataStream = new Readable({
1507
- * objectMode: true,
1508
- * read() { // Stream implementation }
1509
- * });
1510
- * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1511
- *
1512
- * // Insert with validation disabled for performance
1513
- * const result3 = await userTable.insert(data, { validate: false });
1514
- *
1515
- * // Insert with error handling strategies
1516
- * const result4 = await userTable.insert(mixedData, {
1517
- * strategy: 'isolate',
1518
- * allowErrorsRatio: 0.1,
1519
- * validate: true // Use typia validation (default)
1520
- * });
1521
- *
1522
- * // Optional: Clean up connection when completely done
1523
- * await userTable.closeClient();
1524
- * ```
1525
- */
1526
- async insert(data, options) {
1527
- const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1528
- const emptyResult = this.handleEmptyData(data, isStream);
1529
- if (emptyResult) {
1530
- return emptyResult;
1531
- }
1532
- let validatedData = [];
1533
- let validationErrors = [];
1534
- if (!isStream && shouldValidate) {
1535
- const validationResult = await this.performPreInsertionValidation(
1536
- data,
1537
- shouldValidate,
1538
- strategy,
1539
- options
1540
- );
1541
- validatedData = validationResult.validatedData;
1542
- validationErrors = validationResult.validationErrors;
1543
- } else {
1544
- validatedData = isStream ? [] : data;
1545
- }
1546
- const { client } = await this.getMemoizedClient();
1547
- const tableName = this.generateTableName();
1548
- try {
1549
- const insertOptions = this.prepareInsertOptions(
1550
- tableName,
1551
- data,
1552
- validatedData,
1553
- isStream,
1554
- strategy,
1555
- options
1556
- );
1557
- await client.insert(insertOptions);
1558
- return this.createSuccessResult(
1559
- data,
1560
- validatedData,
1561
- validationErrors,
1562
- isStream,
1563
- shouldValidate,
1564
- strategy
1565
- );
1566
- } catch (batchError) {
1567
- return await this.handleInsertionError(
1568
- batchError,
1569
- strategy,
1570
- tableName,
1571
- data,
1572
- validatedData,
1573
- validationErrors,
1574
- isStream,
1575
- shouldValidate,
1576
- options
1577
- );
1578
- }
1579
- }
1580
- // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1581
- // were removed in ENG-856. Use direct configuration instead, e.g.:
1582
- // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1583
- };
1584
-
1585
- // src/dmv2/sdk/stream.ts
1586
- var import_node_crypto3 = require("crypto");
1587
- var RoutedMessage = class {
1588
- /** The destination stream for the message */
1589
- destination;
1590
- /** The message value(s) to send */
1591
- values;
1592
- /**
1593
- * Creates a new routed message.
1594
- *
1595
- * @param destination The target stream
1596
- * @param values The message(s) to route
1597
- */
1598
- constructor(destination, values) {
1599
- this.destination = destination;
1600
- this.values = values;
1601
- }
1602
- };
1603
- var Stream = class extends TypedBase {
1604
- defaultDeadLetterQueue;
1605
- /** @internal Memoized KafkaJS producer for reusing connections across sends */
1606
- _memoizedProducer;
1607
- /** @internal Hash of the configuration used to create the memoized Kafka producer */
1608
- _kafkaConfigHash;
1609
- constructor(name, config, schema, columns, validators, allowExtraFields) {
1610
- super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1611
- const streams = getMooseInternal().streams;
1612
- if (streams.has(name)) {
1613
- throw new Error(`Stream with name ${name} already exists`);
1614
- }
1615
- streams.set(name, this);
1616
- this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1617
- }
1618
- /**
1619
- * Internal map storing transformation configurations.
1620
- * Maps destination stream names to arrays of transformation functions and their configs.
1621
- *
1622
- * @internal
1623
- */
1624
- _transformations = /* @__PURE__ */ new Map();
1625
- /**
1626
- * Internal function for multi-stream transformations.
1627
- * Allows a single transformation to route messages to multiple destinations.
1628
- *
1629
- * @internal
1630
- */
1631
- _multipleTransformations;
1632
- /**
1633
- * Internal array storing consumer configurations.
1634
- *
1635
- * @internal
1636
- */
1637
- _consumers = new Array();
1638
- /**
1639
- * Builds the full Kafka topic name including optional namespace and version suffix.
1640
- * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1641
- */
1642
- buildFullTopicName(namespace) {
1643
- const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1644
- const base = `${this.name}${versionSuffix}`;
1645
- return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1646
- }
1647
- /**
1648
- * Creates a fast hash string from relevant Kafka configuration fields.
1649
- */
1650
- createConfigHash(kafkaConfig) {
1651
- const configString = [
1652
- kafkaConfig.broker,
1653
- kafkaConfig.messageTimeoutMs,
1654
- kafkaConfig.saslUsername,
1655
- kafkaConfig.saslPassword,
1656
- kafkaConfig.saslMechanism,
1657
- kafkaConfig.securityProtocol,
1658
- kafkaConfig.namespace
1659
- ].join(":");
1660
- return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1661
- }
1662
- /**
1663
- * Gets or creates a memoized KafkaJS producer using runtime configuration.
1664
- */
1665
- async getMemoizedProducer() {
1666
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1667
- const configRegistry = globalThis._mooseConfigRegistry;
1668
- const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1669
- const kafkaConfig = await configRegistry.getKafkaConfig();
1670
- const currentHash = this.createConfigHash(kafkaConfig);
1671
- if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1672
- return { producer: this._memoizedProducer, kafkaConfig };
1673
- }
1674
- if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1675
- try {
1676
- await this._memoizedProducer.disconnect();
1677
- } catch {
1931
+ /**
1932
+ * Adds a consumer function that processes messages from this stream.
1933
+ * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1934
+ *
1935
+ * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1936
+ * @param config Optional configuration for this specific consumer, like a version.
1937
+ */
1938
+ addConsumer(consumer, config) {
1939
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1940
+ const consumerConfig = {
1941
+ ...config ?? {},
1942
+ sourceFile
1943
+ };
1944
+ if (consumerConfig.deadLetterQueue === void 0) {
1945
+ consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1946
+ }
1947
+ const hasVersion = this._consumers.some(
1948
+ (existing) => existing.config.version === consumerConfig.version
1949
+ );
1950
+ if (!hasVersion) {
1951
+ this._consumers.push({ consumer, config: consumerConfig });
1952
+ }
1678
1953
  }
1679
- this._memoizedProducer = void 0;
1680
- }
1681
- const clientId = `moose-sdk-stream-${this.name}`;
1682
- const logger = {
1683
- logPrefix: clientId,
1684
- log: (message) => {
1685
- console.log(`${clientId}: ${message}`);
1686
- },
1687
- error: (message) => {
1688
- console.error(`${clientId}: ${message}`);
1689
- },
1690
- warn: (message) => {
1691
- console.warn(`${clientId}: ${message}`);
1954
+ /**
1955
+ * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1956
+ * @param values The value or values to send to this stream.
1957
+ * @returns A `RoutedMessage` object associating the values with this stream.
1958
+ *
1959
+ * @example
1960
+ * ```typescript
1961
+ * sourceStream.addMultiTransform((record) => [
1962
+ * destinationStream1.routed(transformedRecord1),
1963
+ * destinationStream2.routed([record2a, record2b])
1964
+ * ]);
1965
+ * ```
1966
+ */
1967
+ routed = (values) => new RoutedMessage(this, values);
1968
+ /**
1969
+ * Adds a single transformation function that can route messages to multiple destination streams.
1970
+ * This is an alternative to adding multiple individual `addTransform` calls.
1971
+ * Only one multi-transform function can be added per stream.
1972
+ *
1973
+ * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1974
+ * each specifying a destination stream and the message(s) to send to it.
1975
+ */
1976
+ addMultiTransform(transformation) {
1977
+ this._multipleTransformations = transformation;
1692
1978
  }
1693
1979
  };
1694
- const producer = await getKafkaProducer2(
1695
- {
1696
- clientId,
1697
- broker: kafkaConfig.broker,
1698
- securityProtocol: kafkaConfig.securityProtocol,
1699
- saslUsername: kafkaConfig.saslUsername,
1700
- saslPassword: kafkaConfig.saslPassword,
1701
- saslMechanism: kafkaConfig.saslMechanism
1702
- },
1703
- logger
1704
- );
1705
- this._memoizedProducer = producer;
1706
- this._kafkaConfigHash = currentHash;
1707
- return { producer, kafkaConfig };
1708
- }
1709
- /**
1710
- * Closes the memoized Kafka producer if it exists.
1711
- */
1712
- async closeProducer() {
1713
- if (this._memoizedProducer) {
1714
- try {
1715
- await this._memoizedProducer.disconnect();
1716
- } catch {
1717
- } finally {
1718
- this._memoizedProducer = void 0;
1719
- this._kafkaConfigHash = void 0;
1980
+ DeadLetterQueue = class extends Stream {
1981
+ constructor(name, config, typeGuard) {
1982
+ if (typeGuard === void 0) {
1983
+ throw new Error(
1984
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
1985
+ );
1986
+ }
1987
+ super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1988
+ this.typeGuard = typeGuard;
1989
+ getMooseInternal().streams.set(name, this);
1720
1990
  }
1721
- }
1722
- }
1723
- /**
1724
- * Sends one or more records to this stream's Kafka topic.
1725
- * Values are JSON-serialized as message values.
1726
- */
1727
- async send(values) {
1728
- const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1729
- if (flat.length === 0) return;
1730
- const { producer, kafkaConfig } = await this.getMemoizedProducer();
1731
- const topic = this.buildFullTopicName(kafkaConfig.namespace);
1732
- const sr = this.config.schemaConfig;
1733
- if (sr && sr.kind === "JSON") {
1734
- const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1735
- if (!schemaRegistryUrl) {
1736
- throw new Error("Schema Registry URL not configured");
1737
- }
1738
- const {
1739
- default: { SchemaRegistry }
1740
- } = await import("@kafkajs/confluent-schema-registry");
1741
- const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1742
- let schemaId = void 0;
1743
- if ("id" in sr.reference) {
1744
- schemaId = sr.reference.id;
1745
- } else if ("subjectLatest" in sr.reference) {
1746
- schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1747
- } else if ("subject" in sr.reference) {
1748
- schemaId = await registry.getRegistryId(
1749
- sr.reference.subject,
1750
- sr.reference.version
1751
- );
1991
+ /**
1992
+ * Internal type guard function for validating and casting original records.
1993
+ *
1994
+ * @internal
1995
+ */
1996
+ typeGuard;
1997
+ /**
1998
+ * Adds a transformation step for dead letter records.
1999
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2000
+ *
2001
+ * @template U The output type for the transformation
2002
+ * @param destination The destination stream for transformed messages
2003
+ * @param transformation Function to transform dead letter records
2004
+ * @param config Optional transformation configuration
2005
+ */
2006
+ addTransform(destination, transformation, config) {
2007
+ const withValidate = (deadLetter) => {
2008
+ attachTypeGuard(deadLetter, this.typeGuard);
2009
+ return transformation(deadLetter);
2010
+ };
2011
+ super.addTransform(destination, withValidate, config);
1752
2012
  }
1753
- if (schemaId === void 0) {
1754
- throw new Error("Malformed schema reference.");
2013
+ /**
2014
+ * Adds a consumer for dead letter records.
2015
+ * The consumer function receives a DeadLetter<T> with type recovery capabilities.
2016
+ *
2017
+ * @param consumer Function to process dead letter records
2018
+ * @param config Optional consumer configuration
2019
+ */
2020
+ addConsumer(consumer, config) {
2021
+ const withValidate = (deadLetter) => {
2022
+ attachTypeGuard(deadLetter, this.typeGuard);
2023
+ return consumer(deadLetter);
2024
+ };
2025
+ super.addConsumer(withValidate, config);
1755
2026
  }
1756
- const encoded = await Promise.all(
1757
- flat.map(
1758
- (v) => registry.encode(schemaId, v)
1759
- )
1760
- );
1761
- await producer.send({
1762
- topic,
1763
- messages: encoded.map((value) => ({ value }))
1764
- });
1765
- return;
1766
- } else if (sr !== void 0) {
1767
- throw new Error("Currently only JSON Schema is supported.");
1768
- }
1769
- await producer.send({
1770
- topic,
1771
- messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1772
- });
1773
- }
1774
- /**
1775
- * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1776
- * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1777
- *
1778
- * @template U The data type of the messages in the destination stream.
1779
- * @param destination The destination stream for the transformed messages.
1780
- * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1781
- * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1782
- * @param config Optional configuration for this specific transformation step, like a version.
1783
- */
1784
- addTransform(destination, transformation, config) {
1785
- const sourceFile = getSourceFileFromStack(new Error().stack);
1786
- const transformConfig = {
1787
- ...config ?? {},
1788
- sourceFile
1789
- };
1790
- if (transformConfig.deadLetterQueue === void 0) {
1791
- transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1792
- }
1793
- if (this._transformations.has(destination.name)) {
1794
- const existingTransforms = this._transformations.get(destination.name);
1795
- const hasVersion = existingTransforms.some(
1796
- ([_, __, cfg]) => cfg.version === transformConfig.version
1797
- );
1798
- if (!hasVersion) {
1799
- existingTransforms.push([destination, transformation, transformConfig]);
2027
+ /**
2028
+ * Adds a multi-stream transformation for dead letter records.
2029
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2030
+ *
2031
+ * @param transformation Function to route dead letter records to multiple destinations
2032
+ */
2033
+ addMultiTransform(transformation) {
2034
+ const withValidate = (deadLetter) => {
2035
+ attachTypeGuard(deadLetter, this.typeGuard);
2036
+ return transformation(deadLetter);
2037
+ };
2038
+ super.addMultiTransform(withValidate);
1800
2039
  }
1801
- } else {
1802
- this._transformations.set(destination.name, [
1803
- [destination, transformation, transformConfig]
1804
- ]);
1805
- }
1806
- }
1807
- /**
1808
- * Adds a consumer function that processes messages from this stream.
1809
- * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1810
- *
1811
- * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1812
- * @param config Optional configuration for this specific consumer, like a version.
1813
- */
1814
- addConsumer(consumer, config) {
1815
- const sourceFile = getSourceFileFromStack(new Error().stack);
1816
- const consumerConfig = {
1817
- ...config ?? {},
1818
- sourceFile
1819
- };
1820
- if (consumerConfig.deadLetterQueue === void 0) {
1821
- consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1822
- }
1823
- const hasVersion = this._consumers.some(
1824
- (existing) => existing.config.version === consumerConfig.version
1825
- );
1826
- if (!hasVersion) {
1827
- this._consumers.push({ consumer, config: consumerConfig });
1828
- }
1829
- }
1830
- /**
1831
- * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1832
- * @param values The value or values to send to this stream.
1833
- * @returns A `RoutedMessage` object associating the values with this stream.
1834
- *
1835
- * @example
1836
- * ```typescript
1837
- * sourceStream.addMultiTransform((record) => [
1838
- * destinationStream1.routed(transformedRecord1),
1839
- * destinationStream2.routed([record2a, record2b])
1840
- * ]);
1841
- * ```
1842
- */
1843
- routed = (values) => new RoutedMessage(this, values);
1844
- /**
1845
- * Adds a single transformation function that can route messages to multiple destination streams.
1846
- * This is an alternative to adding multiple individual `addTransform` calls.
1847
- * Only one multi-transform function can be added per stream.
1848
- *
1849
- * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1850
- * each specifying a destination stream and the message(s) to send to it.
1851
- */
1852
- addMultiTransform(transformation) {
1853
- this._multipleTransformations = transformation;
1854
- }
1855
- };
1856
- function attachTypeGuard(dl, typeGuard) {
1857
- dl.asTyped = () => typeGuard(dl.originalRecord);
1858
- }
1859
- var DeadLetterQueue = class extends Stream {
1860
- constructor(name, config, typeGuard) {
1861
- if (typeGuard === void 0) {
1862
- throw new Error(
1863
- "Supply the type param T so that the schema is inserted by the compiler plugin."
1864
- );
1865
- }
1866
- super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1867
- this.typeGuard = typeGuard;
1868
- getMooseInternal().streams.set(name, this);
1869
- }
1870
- /**
1871
- * Internal type guard function for validating and casting original records.
1872
- *
1873
- * @internal
1874
- */
1875
- typeGuard;
1876
- /**
1877
- * Adds a transformation step for dead letter records.
1878
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1879
- *
1880
- * @template U The output type for the transformation
1881
- * @param destination The destination stream for transformed messages
1882
- * @param transformation Function to transform dead letter records
1883
- * @param config Optional transformation configuration
1884
- */
1885
- addTransform(destination, transformation, config) {
1886
- const withValidate = (deadLetter) => {
1887
- attachTypeGuard(deadLetter, this.typeGuard);
1888
- return transformation(deadLetter);
1889
- };
1890
- super.addTransform(destination, withValidate, config);
1891
- }
1892
- /**
1893
- * Adds a consumer for dead letter records.
1894
- * The consumer function receives a DeadLetter<T> with type recovery capabilities.
1895
- *
1896
- * @param consumer Function to process dead letter records
1897
- * @param config Optional consumer configuration
1898
- */
1899
- addConsumer(consumer, config) {
1900
- const withValidate = (deadLetter) => {
1901
- attachTypeGuard(deadLetter, this.typeGuard);
1902
- return consumer(deadLetter);
1903
2040
  };
1904
- super.addConsumer(withValidate, config);
1905
2041
  }
1906
- /**
1907
- * Adds a multi-stream transformation for dead letter records.
1908
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1909
- *
1910
- * @param transformation Function to route dead letter records to multiple destinations
1911
- */
1912
- addMultiTransform(transformation) {
1913
- const withValidate = (deadLetter) => {
1914
- attachTypeGuard(deadLetter, this.typeGuard);
1915
- return transformation(deadLetter);
1916
- };
1917
- super.addMultiTransform(withValidate);
1918
- }
1919
- };
2042
+ });
1920
2043
 
1921
2044
  // src/dmv2/sdk/workflow.ts
1922
- var Task = class {
1923
- /**
1924
- * Creates a new Task instance.
1925
- *
1926
- * @param name - Unique identifier for the task
1927
- * @param config - Configuration object defining the task behavior
1928
- *
1929
- * @example
1930
- * ```typescript
1931
- * // No input, no output
1932
- * const task1 = new Task<null, void>("task1", {
1933
- * run: async () => {
1934
- * console.log("No input/output");
1935
- * }
1936
- * });
1937
- *
1938
- * // No input, but has output
1939
- * const task2 = new Task<null, OutputType>("task2", {
1940
- * run: async () => {
1941
- * return someOutput;
1942
- * }
1943
- * });
1944
- *
1945
- * // Has input, no output
1946
- * const task3 = new Task<InputType, void>("task3", {
1947
- * run: async (input: InputType) => {
1948
- * // process input but return nothing
1949
- * }
1950
- * });
1951
- *
1952
- * // Has both input and output
1953
- * const task4 = new Task<InputType, OutputType>("task4", {
1954
- * run: async (input: InputType) => {
1955
- * return process(input);
1956
- * }
1957
- * });
1958
- * ```
1959
- */
1960
- constructor(name, config) {
1961
- this.name = name;
1962
- this.config = config;
1963
- }
1964
- };
1965
- var Workflow = class {
1966
- /**
1967
- * Creates a new Workflow instance and registers it with the Moose system.
1968
- *
1969
- * @param name - Unique identifier for the workflow
1970
- * @param config - Configuration object defining the workflow behavior and task orchestration
1971
- * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
1972
- */
1973
- constructor(name, config) {
1974
- this.name = name;
1975
- this.config = config;
1976
- const workflows = getMooseInternal().workflows;
1977
- if (workflows.has(name)) {
1978
- throw new Error(`Workflow with name ${name} already exists`);
1979
- }
1980
- this.validateTaskGraph(config.startingTask, name);
1981
- workflows.set(name, this);
1982
- }
1983
- /**
1984
- * Validates the task graph to ensure there are no null tasks or infinite loops.
1985
- *
1986
- * @private
1987
- * @param startingTask - The starting task to begin validation from
1988
- * @param workflowName - The name of the workflow being validated (for error messages)
1989
- * @throws {Error} When null/undefined tasks are found or infinite loops are detected
1990
- */
1991
- validateTaskGraph(startingTask, workflowName) {
1992
- if (startingTask === null || startingTask === void 0) {
1993
- throw new Error(
1994
- `Workflow "${workflowName}" has a null or undefined starting task`
1995
- );
1996
- }
1997
- const visited = /* @__PURE__ */ new Set();
1998
- const recursionStack = /* @__PURE__ */ new Set();
1999
- const validateTask = (task, currentPath) => {
2000
- if (task === null || task === void 0) {
2001
- const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
2002
- throw new Error(
2003
- `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
2004
- );
2005
- }
2006
- const taskName = task.name;
2007
- if (recursionStack.has(taskName)) {
2008
- const cycleStartIndex = currentPath.indexOf(taskName);
2009
- const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
2010
- throw new Error(
2011
- `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2012
- );
2045
+ var Task, Workflow;
2046
+ var init_workflow = __esm({
2047
+ "src/dmv2/sdk/workflow.ts"() {
2048
+ "use strict";
2049
+ init_internal();
2050
+ Task = class {
2051
+ /**
2052
+ * Creates a new Task instance.
2053
+ *
2054
+ * @param name - Unique identifier for the task
2055
+ * @param config - Configuration object defining the task behavior
2056
+ *
2057
+ * @example
2058
+ * ```typescript
2059
+ * // No input, no output
2060
+ * const task1 = new Task<null, void>("task1", {
2061
+ * run: async () => {
2062
+ * console.log("No input/output");
2063
+ * }
2064
+ * });
2065
+ *
2066
+ * // No input, but has output
2067
+ * const task2 = new Task<null, OutputType>("task2", {
2068
+ * run: async () => {
2069
+ * return someOutput;
2070
+ * }
2071
+ * });
2072
+ *
2073
+ * // Has input, no output
2074
+ * const task3 = new Task<InputType, void>("task3", {
2075
+ * run: async (input: InputType) => {
2076
+ * // process input but return nothing
2077
+ * }
2078
+ * });
2079
+ *
2080
+ * // Has both input and output
2081
+ * const task4 = new Task<InputType, OutputType>("task4", {
2082
+ * run: async (input: InputType) => {
2083
+ * return process(input);
2084
+ * }
2085
+ * });
2086
+ * ```
2087
+ */
2088
+ constructor(name, config) {
2089
+ this.name = name;
2090
+ this.config = config;
2013
2091
  }
2014
- if (visited.has(taskName)) {
2015
- return;
2092
+ };
2093
+ Workflow = class {
2094
+ /**
2095
+ * Creates a new Workflow instance and registers it with the Moose system.
2096
+ *
2097
+ * @param name - Unique identifier for the workflow
2098
+ * @param config - Configuration object defining the workflow behavior and task orchestration
2099
+ * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
2100
+ */
2101
+ constructor(name, config) {
2102
+ this.name = name;
2103
+ this.config = config;
2104
+ const workflows = getMooseInternal().workflows;
2105
+ if (workflows.has(name)) {
2106
+ throw new Error(`Workflow with name ${name} already exists`);
2107
+ }
2108
+ this.validateTaskGraph(config.startingTask, name);
2109
+ workflows.set(name, this);
2016
2110
  }
2017
- visited.add(taskName);
2018
- recursionStack.add(taskName);
2019
- if (task.config.onComplete) {
2020
- for (const nextTask of task.config.onComplete) {
2021
- validateTask(nextTask, [...currentPath, taskName]);
2111
+ /**
2112
+ * Validates the task graph to ensure there are no null tasks or infinite loops.
2113
+ *
2114
+ * @private
2115
+ * @param startingTask - The starting task to begin validation from
2116
+ * @param workflowName - The name of the workflow being validated (for error messages)
2117
+ * @throws {Error} When null/undefined tasks are found or infinite loops are detected
2118
+ */
2119
+ validateTaskGraph(startingTask, workflowName) {
2120
+ if (startingTask === null || startingTask === void 0) {
2121
+ throw new Error(
2122
+ `Workflow "${workflowName}" has a null or undefined starting task`
2123
+ );
2022
2124
  }
2125
+ const visited = /* @__PURE__ */ new Set();
2126
+ const recursionStack = /* @__PURE__ */ new Set();
2127
+ const validateTask = (task, currentPath) => {
2128
+ if (task === null || task === void 0) {
2129
+ const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
2130
+ throw new Error(
2131
+ `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
2132
+ );
2133
+ }
2134
+ const taskName = task.name;
2135
+ if (recursionStack.has(taskName)) {
2136
+ const cycleStartIndex = currentPath.indexOf(taskName);
2137
+ const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
2138
+ throw new Error(
2139
+ `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2140
+ );
2141
+ }
2142
+ if (visited.has(taskName)) {
2143
+ return;
2144
+ }
2145
+ visited.add(taskName);
2146
+ recursionStack.add(taskName);
2147
+ if (task.config.onComplete) {
2148
+ for (const nextTask of task.config.onComplete) {
2149
+ validateTask(nextTask, [...currentPath, taskName]);
2150
+ }
2151
+ }
2152
+ recursionStack.delete(taskName);
2153
+ };
2154
+ validateTask(startingTask, []);
2023
2155
  }
2024
- recursionStack.delete(taskName);
2025
2156
  };
2026
- validateTask(startingTask, []);
2027
2157
  }
2028
- };
2158
+ });
2029
2159
 
2030
2160
  // src/dmv2/sdk/ingestApi.ts
2031
- var IngestApi = class extends TypedBase {
2032
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2033
- super(name, config, schema, columns, void 0, allowExtraFields);
2034
- const ingestApis = getMooseInternal().ingestApis;
2035
- if (ingestApis.has(name)) {
2036
- throw new Error(`Ingest API with name ${name} already exists`);
2037
- }
2038
- ingestApis.set(name, this);
2161
+ var IngestApi;
2162
+ var init_ingestApi = __esm({
2163
+ "src/dmv2/sdk/ingestApi.ts"() {
2164
+ "use strict";
2165
+ init_typedBase();
2166
+ init_internal();
2167
+ IngestApi = class extends TypedBase {
2168
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2169
+ super(name, config, schema, columns, void 0, allowExtraFields);
2170
+ const ingestApis = getMooseInternal().ingestApis;
2171
+ if (ingestApis.has(name)) {
2172
+ throw new Error(`Ingest API with name ${name} already exists`);
2173
+ }
2174
+ ingestApis.set(name, this);
2175
+ }
2176
+ };
2039
2177
  }
2040
- };
2178
+ });
2041
2179
 
2042
2180
  // src/dmv2/sdk/consumptionApi.ts
2043
- var Api = class extends TypedBase {
2044
- /** @internal The handler function that processes requests and generates responses. */
2045
- _handler;
2046
- /** @internal The JSON schema definition for the response type R. */
2047
- responseSchema;
2048
- constructor(name, handler, config, schema, columns, responseSchema) {
2049
- super(name, config ?? {}, schema, columns);
2050
- this._handler = handler;
2051
- this.responseSchema = responseSchema ?? {
2052
- version: "3.1",
2053
- schemas: [{ type: "array", items: { type: "object" } }],
2054
- components: { schemas: {} }
2055
- };
2056
- const apis = getMooseInternal().apis;
2057
- const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2058
- if (apis.has(key)) {
2059
- throw new Error(
2060
- `Consumption API with name ${name} and version ${config?.version} already exists`
2061
- );
2062
- }
2063
- apis.set(key, this);
2064
- if (config?.path) {
2065
- if (config.version) {
2066
- const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2067
- if (pathEndsWithVersion) {
2068
- if (apis.has(config.path)) {
2069
- const existing = apis.get(config.path);
2070
- throw new Error(
2071
- `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2072
- );
2181
+ var Api, ConsumptionApi;
2182
+ var init_consumptionApi = __esm({
2183
+ "src/dmv2/sdk/consumptionApi.ts"() {
2184
+ "use strict";
2185
+ init_typedBase();
2186
+ init_internal();
2187
+ Api = class extends TypedBase {
2188
+ /** @internal The handler function that processes requests and generates responses. */
2189
+ _handler;
2190
+ /** @internal The JSON schema definition for the response type R. */
2191
+ responseSchema;
2192
+ constructor(name, handler, config, schema, columns, responseSchema) {
2193
+ super(name, config ?? {}, schema, columns);
2194
+ this._handler = handler;
2195
+ this.responseSchema = responseSchema ?? {
2196
+ version: "3.1",
2197
+ schemas: [{ type: "array", items: { type: "object" } }],
2198
+ components: { schemas: {} }
2199
+ };
2200
+ const apis = getMooseInternal().apis;
2201
+ const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2202
+ if (apis.has(key)) {
2203
+ throw new Error(
2204
+ `Consumption API with name ${name} and version ${config?.version} already exists`
2205
+ );
2206
+ }
2207
+ apis.set(key, this);
2208
+ if (config?.path) {
2209
+ if (config.version) {
2210
+ const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2211
+ if (pathEndsWithVersion) {
2212
+ if (apis.has(config.path)) {
2213
+ const existing = apis.get(config.path);
2214
+ throw new Error(
2215
+ `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2216
+ );
2217
+ }
2218
+ apis.set(config.path, this);
2219
+ } else {
2220
+ const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2221
+ if (apis.has(versionedPath)) {
2222
+ const existing = apis.get(versionedPath);
2223
+ throw new Error(
2224
+ `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2225
+ );
2226
+ }
2227
+ apis.set(versionedPath, this);
2228
+ if (!apis.has(config.path)) {
2229
+ apis.set(config.path, this);
2230
+ }
2231
+ }
2232
+ } else {
2233
+ if (apis.has(config.path)) {
2234
+ const existing = apis.get(config.path);
2235
+ throw new Error(
2236
+ `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2237
+ );
2238
+ }
2239
+ apis.set(config.path, this);
2240
+ }
2241
+ }
2242
+ }
2243
+ /**
2244
+ * Retrieves the handler function associated with this Consumption API.
2245
+ * @returns The handler function.
2246
+ */
2247
+ getHandler = () => {
2248
+ return this._handler;
2249
+ };
2250
+ async call(baseUrl, queryParams) {
2251
+ let path2;
2252
+ if (this.config?.path) {
2253
+ if (this.config.version) {
2254
+ const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2255
+ if (pathEndsWithVersion) {
2256
+ path2 = this.config.path;
2257
+ } else {
2258
+ path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2259
+ }
2260
+ } else {
2261
+ path2 = this.config.path;
2073
2262
  }
2074
- apis.set(config.path, this);
2075
2263
  } else {
2076
- const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2077
- if (apis.has(versionedPath)) {
2078
- const existing = apis.get(versionedPath);
2079
- throw new Error(
2080
- `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2081
- );
2264
+ path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2265
+ }
2266
+ const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2267
+ const searchParams = url.searchParams;
2268
+ for (const [key, value] of Object.entries(queryParams)) {
2269
+ if (Array.isArray(value)) {
2270
+ for (const item of value) {
2271
+ if (item !== null && item !== void 0) {
2272
+ searchParams.append(key, String(item));
2273
+ }
2274
+ }
2275
+ } else if (value !== null && value !== void 0) {
2276
+ searchParams.append(key, String(value));
2082
2277
  }
2083
- apis.set(versionedPath, this);
2084
- if (!apis.has(config.path)) {
2085
- apis.set(config.path, this);
2278
+ }
2279
+ const response = await fetch(url, {
2280
+ method: "GET",
2281
+ headers: {
2282
+ Accept: "application/json"
2086
2283
  }
2284
+ });
2285
+ if (!response.ok) {
2286
+ throw new Error(`HTTP error! status: ${response.status}`);
2087
2287
  }
2088
- } else {
2089
- if (apis.has(config.path)) {
2090
- const existing = apis.get(config.path);
2091
- throw new Error(
2092
- `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2288
+ const data = await response.json();
2289
+ return data;
2290
+ }
2291
+ };
2292
+ ConsumptionApi = Api;
2293
+ }
2294
+ });
2295
+
2296
+ // src/dmv2/sdk/ingestPipeline.ts
2297
+ var IngestPipeline;
2298
+ var init_ingestPipeline = __esm({
2299
+ "src/dmv2/sdk/ingestPipeline.ts"() {
2300
+ "use strict";
2301
+ init_typedBase();
2302
+ init_stream();
2303
+ init_olapTable();
2304
+ init_ingestApi();
2305
+ init_helpers();
2306
+ IngestPipeline = class extends TypedBase {
2307
+ /**
2308
+ * The OLAP table component of the pipeline, if configured.
2309
+ * Provides analytical query capabilities for the ingested data.
2310
+ * Only present when `config.table` is not `false`.
2311
+ */
2312
+ table;
2313
+ /**
2314
+ * The stream component of the pipeline, if configured.
2315
+ * Handles real-time data flow and processing between components.
2316
+ * Only present when `config.stream` is not `false`.
2317
+ */
2318
+ stream;
2319
+ /**
2320
+ * The ingest API component of the pipeline, if configured.
2321
+ * Provides HTTP endpoints for data ingestion.
2322
+ * Only present when `config.ingestApi` is not `false`.
2323
+ */
2324
+ ingestApi;
2325
+ /** The dead letter queue of the pipeline, if configured. */
2326
+ deadLetterQueue;
2327
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2328
+ super(name, config, schema, columns, validators, allowExtraFields);
2329
+ if (config.ingest !== void 0) {
2330
+ console.warn(
2331
+ "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2332
+ );
2333
+ if (config.ingestApi === void 0) {
2334
+ config.ingestApi = config.ingest;
2335
+ }
2336
+ }
2337
+ if (config.table) {
2338
+ const tableConfig = typeof config.table === "object" ? {
2339
+ ...config.table,
2340
+ lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2341
+ ...config.version && { version: config.version }
2342
+ } : {
2343
+ lifeCycle: config.lifeCycle,
2344
+ engine: "MergeTree" /* MergeTree */,
2345
+ ...config.version && { version: config.version }
2346
+ };
2347
+ this.table = new OlapTable(
2348
+ name,
2349
+ tableConfig,
2350
+ this.schema,
2351
+ this.columnArray,
2352
+ this.validators
2353
+ );
2354
+ }
2355
+ if (config.deadLetterQueue) {
2356
+ const streamConfig = {
2357
+ destination: void 0,
2358
+ ...typeof config.deadLetterQueue === "object" ? {
2359
+ ...config.deadLetterQueue,
2360
+ lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2361
+ } : { lifeCycle: config.lifeCycle },
2362
+ ...config.version && { version: config.version }
2363
+ };
2364
+ this.deadLetterQueue = new DeadLetterQueue(
2365
+ `${name}DeadLetterQueue`,
2366
+ streamConfig,
2367
+ validators.assert
2368
+ );
2369
+ }
2370
+ if (config.stream) {
2371
+ const streamConfig = {
2372
+ destination: this.table,
2373
+ defaultDeadLetterQueue: this.deadLetterQueue,
2374
+ ...typeof config.stream === "object" ? {
2375
+ ...config.stream,
2376
+ lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2377
+ } : { lifeCycle: config.lifeCycle },
2378
+ ...config.version && { version: config.version }
2379
+ };
2380
+ this.stream = new Stream(
2381
+ name,
2382
+ streamConfig,
2383
+ this.schema,
2384
+ this.columnArray,
2385
+ void 0,
2386
+ this.allowExtraFields
2387
+ );
2388
+ this.stream.pipelineParent = this;
2389
+ }
2390
+ const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2391
+ if (effectiveIngestAPI) {
2392
+ if (!this.stream) {
2393
+ throw new Error("Ingest API needs a stream to write to.");
2394
+ }
2395
+ const ingestConfig = {
2396
+ destination: this.stream,
2397
+ deadLetterQueue: this.deadLetterQueue,
2398
+ ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2399
+ ...config.version && { version: config.version },
2400
+ ...config.path && { path: config.path }
2401
+ };
2402
+ this.ingestApi = new IngestApi(
2403
+ name,
2404
+ ingestConfig,
2405
+ this.schema,
2406
+ this.columnArray,
2407
+ void 0,
2408
+ this.allowExtraFields
2093
2409
  );
2410
+ this.ingestApi.pipelineParent = this;
2094
2411
  }
2095
- apis.set(config.path, this);
2096
2412
  }
2097
- }
2413
+ };
2098
2414
  }
2099
- /**
2100
- * Retrieves the handler function associated with this Consumption API.
2101
- * @returns The handler function.
2102
- */
2103
- getHandler = () => {
2104
- return this._handler;
2105
- };
2106
- async call(baseUrl, queryParams) {
2107
- let path2;
2108
- if (this.config?.path) {
2109
- if (this.config.version) {
2110
- const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2111
- if (pathEndsWithVersion) {
2112
- path2 = this.config.path;
2113
- } else {
2114
- path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2115
- }
2116
- } else {
2117
- path2 = this.config.path;
2415
+ });
2416
+
2417
+ // src/dmv2/sdk/etlPipeline.ts
2418
+ var InternalBatcher, ETLPipeline;
2419
+ var init_etlPipeline = __esm({
2420
+ "src/dmv2/sdk/etlPipeline.ts"() {
2421
+ "use strict";
2422
+ init_workflow();
2423
+ InternalBatcher = class {
2424
+ iterator;
2425
+ batchSize;
2426
+ constructor(asyncIterable, batchSize = 20) {
2427
+ this.iterator = asyncIterable[Symbol.asyncIterator]();
2428
+ this.batchSize = batchSize;
2118
2429
  }
2119
- } else {
2120
- path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2121
- }
2122
- const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2123
- const searchParams = url.searchParams;
2124
- for (const [key, value] of Object.entries(queryParams)) {
2125
- if (Array.isArray(value)) {
2126
- for (const item of value) {
2127
- if (item !== null && item !== void 0) {
2128
- searchParams.append(key, String(item));
2430
+ async getNextBatch() {
2431
+ const items = [];
2432
+ for (let i = 0; i < this.batchSize; i++) {
2433
+ const { value, done } = await this.iterator.next();
2434
+ if (done) {
2435
+ return { items, hasMore: false };
2129
2436
  }
2437
+ items.push(value);
2130
2438
  }
2131
- } else if (value !== null && value !== void 0) {
2132
- searchParams.append(key, String(value));
2133
- }
2134
- }
2135
- const response = await fetch(url, {
2136
- method: "GET",
2137
- headers: {
2138
- Accept: "application/json"
2139
- }
2140
- });
2141
- if (!response.ok) {
2142
- throw new Error(`HTTP error! status: ${response.status}`);
2143
- }
2144
- const data = await response.json();
2145
- return data;
2146
- }
2147
- };
2148
- var ConsumptionApi = Api;
2149
-
2150
- // src/dmv2/sdk/ingestPipeline.ts
2151
- var IngestPipeline = class extends TypedBase {
2152
- /**
2153
- * The OLAP table component of the pipeline, if configured.
2154
- * Provides analytical query capabilities for the ingested data.
2155
- * Only present when `config.table` is not `false`.
2156
- */
2157
- table;
2158
- /**
2159
- * The stream component of the pipeline, if configured.
2160
- * Handles real-time data flow and processing between components.
2161
- * Only present when `config.stream` is not `false`.
2162
- */
2163
- stream;
2164
- /**
2165
- * The ingest API component of the pipeline, if configured.
2166
- * Provides HTTP endpoints for data ingestion.
2167
- * Only present when `config.ingestApi` is not `false`.
2168
- */
2169
- ingestApi;
2170
- /** The dead letter queue of the pipeline, if configured. */
2171
- deadLetterQueue;
2172
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2173
- super(name, config, schema, columns, validators, allowExtraFields);
2174
- if (config.ingest !== void 0) {
2175
- console.warn(
2176
- "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2177
- );
2178
- if (config.ingestApi === void 0) {
2179
- config.ingestApi = config.ingest;
2439
+ return { items, hasMore: true };
2180
2440
  }
2181
- }
2182
- if (config.table) {
2183
- const tableConfig = typeof config.table === "object" ? {
2184
- ...config.table,
2185
- lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2186
- ...config.version && { version: config.version }
2187
- } : {
2188
- lifeCycle: config.lifeCycle,
2189
- engine: "MergeTree" /* MergeTree */,
2190
- ...config.version && { version: config.version }
2191
- };
2192
- this.table = new OlapTable(
2193
- name,
2194
- tableConfig,
2195
- this.schema,
2196
- this.columnArray,
2197
- this.validators
2198
- );
2199
- }
2200
- if (config.deadLetterQueue) {
2201
- const streamConfig = {
2202
- destination: void 0,
2203
- ...typeof config.deadLetterQueue === "object" ? {
2204
- ...config.deadLetterQueue,
2205
- lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2206
- } : { lifeCycle: config.lifeCycle },
2207
- ...config.version && { version: config.version }
2208
- };
2209
- this.deadLetterQueue = new DeadLetterQueue(
2210
- `${name}DeadLetterQueue`,
2211
- streamConfig,
2212
- validators.assert
2213
- );
2214
- }
2215
- if (config.stream) {
2216
- const streamConfig = {
2217
- destination: this.table,
2218
- defaultDeadLetterQueue: this.deadLetterQueue,
2219
- ...typeof config.stream === "object" ? {
2220
- ...config.stream,
2221
- lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2222
- } : { lifeCycle: config.lifeCycle },
2223
- ...config.version && { version: config.version }
2224
- };
2225
- this.stream = new Stream(
2226
- name,
2227
- streamConfig,
2228
- this.schema,
2229
- this.columnArray,
2230
- void 0,
2231
- this.allowExtraFields
2232
- );
2233
- this.stream.pipelineParent = this;
2234
- }
2235
- const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2236
- if (effectiveIngestAPI) {
2237
- if (!this.stream) {
2238
- throw new Error("Ingest API needs a stream to write to.");
2239
- }
2240
- const ingestConfig = {
2241
- destination: this.stream,
2242
- deadLetterQueue: this.deadLetterQueue,
2243
- ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2244
- ...config.version && { version: config.version },
2245
- ...config.path && { path: config.path }
2246
- };
2247
- this.ingestApi = new IngestApi(
2248
- name,
2249
- ingestConfig,
2250
- this.schema,
2251
- this.columnArray,
2252
- void 0,
2253
- this.allowExtraFields
2254
- );
2255
- this.ingestApi.pipelineParent = this;
2256
- }
2257
- }
2258
- };
2259
-
2260
- // src/dmv2/sdk/etlPipeline.ts
2261
- var InternalBatcher = class {
2262
- iterator;
2263
- batchSize;
2264
- constructor(asyncIterable, batchSize = 20) {
2265
- this.iterator = asyncIterable[Symbol.asyncIterator]();
2266
- this.batchSize = batchSize;
2267
- }
2268
- async getNextBatch() {
2269
- const items = [];
2270
- for (let i = 0; i < this.batchSize; i++) {
2271
- const { value, done } = await this.iterator.next();
2272
- if (done) {
2273
- return { items, hasMore: false };
2274
- }
2275
- items.push(value);
2276
- }
2277
- return { items, hasMore: true };
2278
- }
2279
- };
2280
- var ETLPipeline = class {
2281
- constructor(name, config) {
2282
- this.name = name;
2283
- this.config = config;
2284
- this.setupPipeline();
2285
- }
2286
- batcher;
2287
- setupPipeline() {
2288
- this.batcher = this.createBatcher();
2289
- const tasks = this.createAllTasks();
2290
- tasks.extract.config.onComplete = [tasks.transform];
2291
- tasks.transform.config.onComplete = [tasks.load];
2292
- new Workflow(this.name, {
2293
- startingTask: tasks.extract,
2294
- retries: 1,
2295
- timeout: "30m"
2296
- });
2297
- }
2298
- createBatcher() {
2299
- const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2300
- return new InternalBatcher(iterable);
2301
- }
2302
- getDefaultTaskConfig() {
2303
- return {
2304
- retries: 1,
2305
- timeout: "30m"
2306
- };
2307
- }
2308
- createAllTasks() {
2309
- const taskConfig = this.getDefaultTaskConfig();
2310
- return {
2311
- extract: this.createExtractTask(taskConfig),
2312
- transform: this.createTransformTask(taskConfig),
2313
- load: this.createLoadTask(taskConfig)
2314
2441
  };
2315
- }
2316
- createExtractTask(taskConfig) {
2317
- return new Task(`${this.name}_extract`, {
2318
- run: async ({}) => {
2319
- console.log(`Running extract task for ${this.name}...`);
2320
- const batch = await this.batcher.getNextBatch();
2321
- console.log(`Extract task completed with ${batch.items.length} items`);
2322
- return batch;
2323
- },
2324
- retries: taskConfig.retries,
2325
- timeout: taskConfig.timeout
2326
- });
2327
- }
2328
- createTransformTask(taskConfig) {
2329
- return new Task(
2330
- `${this.name}_transform`,
2331
- {
2332
- // Use new single-parameter context API for handlers
2333
- run: async ({ input }) => {
2334
- const batch = input;
2335
- console.log(
2336
- `Running transform task for ${this.name} with ${batch.items.length} items...`
2337
- );
2442
+ ETLPipeline = class {
2443
+ constructor(name, config) {
2444
+ this.name = name;
2445
+ this.config = config;
2446
+ this.setupPipeline();
2447
+ }
2448
+ batcher;
2449
+ setupPipeline() {
2450
+ this.batcher = this.createBatcher();
2451
+ const tasks = this.createAllTasks();
2452
+ tasks.extract.config.onComplete = [tasks.transform];
2453
+ tasks.transform.config.onComplete = [tasks.load];
2454
+ new Workflow(this.name, {
2455
+ startingTask: tasks.extract,
2456
+ retries: 1,
2457
+ timeout: "30m"
2458
+ });
2459
+ }
2460
+ createBatcher() {
2461
+ const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2462
+ return new InternalBatcher(iterable);
2463
+ }
2464
+ getDefaultTaskConfig() {
2465
+ return {
2466
+ retries: 1,
2467
+ timeout: "30m"
2468
+ };
2469
+ }
2470
+ createAllTasks() {
2471
+ const taskConfig = this.getDefaultTaskConfig();
2472
+ return {
2473
+ extract: this.createExtractTask(taskConfig),
2474
+ transform: this.createTransformTask(taskConfig),
2475
+ load: this.createLoadTask(taskConfig)
2476
+ };
2477
+ }
2478
+ createExtractTask(taskConfig) {
2479
+ return new Task(`${this.name}_extract`, {
2480
+ run: async ({}) => {
2481
+ console.log(`Running extract task for ${this.name}...`);
2482
+ const batch = await this.batcher.getNextBatch();
2483
+ console.log(`Extract task completed with ${batch.items.length} items`);
2484
+ return batch;
2485
+ },
2486
+ retries: taskConfig.retries,
2487
+ timeout: taskConfig.timeout
2488
+ });
2489
+ }
2490
+ createTransformTask(taskConfig) {
2491
+ return new Task(
2492
+ `${this.name}_transform`,
2493
+ {
2494
+ // Use new single-parameter context API for handlers
2495
+ run: async ({ input }) => {
2496
+ const batch = input;
2497
+ console.log(
2498
+ `Running transform task for ${this.name} with ${batch.items.length} items...`
2499
+ );
2500
+ const transformedItems = [];
2501
+ for (const item of batch.items) {
2502
+ const transformed = await this.config.transform(item);
2503
+ transformedItems.push(transformed);
2504
+ }
2505
+ console.log(
2506
+ `Transform task completed with ${transformedItems.length} items`
2507
+ );
2508
+ return { items: transformedItems };
2509
+ },
2510
+ retries: taskConfig.retries,
2511
+ timeout: taskConfig.timeout
2512
+ }
2513
+ );
2514
+ }
2515
+ createLoadTask(taskConfig) {
2516
+ return new Task(`${this.name}_load`, {
2517
+ run: async ({ input: transformedItems }) => {
2518
+ console.log(
2519
+ `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2520
+ );
2521
+ if ("insert" in this.config.load) {
2522
+ await this.config.load.insert(transformedItems.items);
2523
+ } else {
2524
+ await this.config.load(transformedItems.items);
2525
+ }
2526
+ console.log(`Load task completed`);
2527
+ },
2528
+ retries: taskConfig.retries,
2529
+ timeout: taskConfig.timeout
2530
+ });
2531
+ }
2532
+ // Execute the entire ETL pipeline
2533
+ async run() {
2534
+ console.log(`Starting ETL Pipeline: ${this.name}`);
2535
+ let batchNumber = 1;
2536
+ do {
2537
+ console.log(`Processing batch ${batchNumber}...`);
2538
+ const batch = await this.batcher.getNextBatch();
2539
+ if (batch.items.length === 0) {
2540
+ break;
2541
+ }
2338
2542
  const transformedItems = [];
2339
- for (const item of batch.items) {
2340
- const transformed = await this.config.transform(item);
2341
- transformedItems.push(transformed);
2543
+ for (const extractedData of batch.items) {
2544
+ const transformedData = await this.config.transform(extractedData);
2545
+ transformedItems.push(transformedData);
2546
+ }
2547
+ if ("insert" in this.config.load) {
2548
+ await this.config.load.insert(transformedItems);
2549
+ } else {
2550
+ await this.config.load(transformedItems);
2342
2551
  }
2343
2552
  console.log(
2344
- `Transform task completed with ${transformedItems.length} items`
2553
+ `Completed batch ${batchNumber} with ${batch.items.length} items`
2345
2554
  );
2346
- return { items: transformedItems };
2347
- },
2348
- retries: taskConfig.retries,
2349
- timeout: taskConfig.timeout
2555
+ batchNumber++;
2556
+ if (!batch.hasMore) {
2557
+ break;
2558
+ }
2559
+ } while (true);
2560
+ console.log(`Completed ETL Pipeline: ${this.name}`);
2350
2561
  }
2351
- );
2562
+ };
2352
2563
  }
2353
- createLoadTask(taskConfig) {
2354
- return new Task(`${this.name}_load`, {
2355
- run: async ({ input: transformedItems }) => {
2356
- console.log(
2357
- `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2564
+ });
2565
+
2566
+ // src/dmv2/sdk/sqlResource.ts
2567
+ var SqlResource;
2568
+ var init_sqlResource = __esm({
2569
+ "src/dmv2/sdk/sqlResource.ts"() {
2570
+ "use strict";
2571
+ init_internal();
2572
+ init_sqlHelpers();
2573
+ init_stackTrace();
2574
+ SqlResource = class {
2575
+ /** @internal */
2576
+ kind = "SqlResource";
2577
+ /** Array of SQL statements to execute for setting up the resource. */
2578
+ setup;
2579
+ /** Array of SQL statements to execute for tearing down the resource. */
2580
+ teardown;
2581
+ /** The name of the SQL resource (e.g., view name, materialized view name). */
2582
+ name;
2583
+ /** List of OlapTables or Views that this resource reads data from. */
2584
+ pullsDataFrom;
2585
+ /** List of OlapTables or Views that this resource writes data to. */
2586
+ pushesDataTo;
2587
+ /** @internal Source file path where this resource was defined */
2588
+ sourceFile;
2589
+ /** @internal Source line number where this resource was defined */
2590
+ sourceLine;
2591
+ /** @internal Source column number where this resource was defined */
2592
+ sourceColumn;
2593
+ /**
2594
+ * Creates a new SqlResource instance.
2595
+ * @param name The name of the resource.
2596
+ * @param setup An array of SQL DDL statements to create the resource.
2597
+ * @param teardown An array of SQL DDL statements to drop the resource.
2598
+ * @param options Optional configuration for specifying data dependencies.
2599
+ * @param options.pullsDataFrom Tables/Views this resource reads from.
2600
+ * @param options.pushesDataTo Tables/Views this resource writes to.
2601
+ */
2602
+ constructor(name, setup, teardown, options) {
2603
+ const sqlResources = getMooseInternal().sqlResources;
2604
+ if (!isClientOnlyMode() && sqlResources.has(name)) {
2605
+ throw new Error(`SqlResource with name ${name} already exists`);
2606
+ }
2607
+ sqlResources.set(name, this);
2608
+ this.name = name;
2609
+ this.setup = setup.map(
2610
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2358
2611
  );
2359
- if ("insert" in this.config.load) {
2360
- await this.config.load.insert(transformedItems.items);
2361
- } else {
2362
- await this.config.load(transformedItems.items);
2612
+ this.teardown = teardown.map(
2613
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2614
+ );
2615
+ this.pullsDataFrom = options?.pullsDataFrom ?? [];
2616
+ this.pushesDataTo = options?.pushesDataTo ?? [];
2617
+ const stack = new Error().stack;
2618
+ const location = getSourceLocationFromStack(stack);
2619
+ if (location) {
2620
+ this.sourceFile = location.file;
2621
+ this.sourceLine = location.line;
2622
+ this.sourceColumn = location.column;
2363
2623
  }
2364
- console.log(`Load task completed`);
2365
- },
2366
- retries: taskConfig.retries,
2367
- timeout: taskConfig.timeout
2368
- });
2369
- }
2370
- // Execute the entire ETL pipeline
2371
- async run() {
2372
- console.log(`Starting ETL Pipeline: ${this.name}`);
2373
- let batchNumber = 1;
2374
- do {
2375
- console.log(`Processing batch ${batchNumber}...`);
2376
- const batch = await this.batcher.getNextBatch();
2377
- if (batch.items.length === 0) {
2378
- break;
2379
- }
2380
- const transformedItems = [];
2381
- for (const extractedData of batch.items) {
2382
- const transformedData = await this.config.transform(extractedData);
2383
- transformedItems.push(transformedData);
2384
- }
2385
- if ("insert" in this.config.load) {
2386
- await this.config.load.insert(transformedItems);
2387
- } else {
2388
- await this.config.load(transformedItems);
2389
- }
2390
- console.log(
2391
- `Completed batch ${batchNumber} with ${batch.items.length} items`
2392
- );
2393
- batchNumber++;
2394
- if (!batch.hasMore) {
2395
- break;
2396
2624
  }
2397
- } while (true);
2398
- console.log(`Completed ETL Pipeline: ${this.name}`);
2399
- }
2400
- };
2401
-
2402
- // src/dmv2/sdk/sqlResource.ts
2403
- var SqlResource = class {
2404
- /** @internal */
2405
- kind = "SqlResource";
2406
- /** Array of SQL statements to execute for setting up the resource. */
2407
- setup;
2408
- /** Array of SQL statements to execute for tearing down the resource. */
2409
- teardown;
2410
- /** The name of the SQL resource (e.g., view name, materialized view name). */
2411
- name;
2412
- /** List of OlapTables or Views that this resource reads data from. */
2413
- pullsDataFrom;
2414
- /** List of OlapTables or Views that this resource writes data to. */
2415
- pushesDataTo;
2416
- /** @internal Source file path where this resource was defined */
2417
- sourceFile;
2418
- /** @internal Source line number where this resource was defined */
2419
- sourceLine;
2420
- /** @internal Source column number where this resource was defined */
2421
- sourceColumn;
2422
- /**
2423
- * Creates a new SqlResource instance.
2424
- * @param name The name of the resource.
2425
- * @param setup An array of SQL DDL statements to create the resource.
2426
- * @param teardown An array of SQL DDL statements to drop the resource.
2427
- * @param options Optional configuration for specifying data dependencies.
2428
- * @param options.pullsDataFrom Tables/Views this resource reads from.
2429
- * @param options.pushesDataTo Tables/Views this resource writes to.
2430
- */
2431
- constructor(name, setup, teardown, options) {
2432
- const sqlResources = getMooseInternal().sqlResources;
2433
- if (!isClientOnlyMode() && sqlResources.has(name)) {
2434
- throw new Error(`SqlResource with name ${name} already exists`);
2435
- }
2436
- sqlResources.set(name, this);
2437
- this.name = name;
2438
- this.setup = setup.map(
2439
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2440
- );
2441
- this.teardown = teardown.map(
2442
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2443
- );
2444
- this.pullsDataFrom = options?.pullsDataFrom ?? [];
2445
- this.pushesDataTo = options?.pushesDataTo ?? [];
2446
- const stack = new Error().stack;
2447
- const location = getSourceLocationFromStack(stack);
2448
- if (location) {
2449
- this.sourceFile = location.file;
2450
- this.sourceLine = location.line;
2451
- this.sourceColumn = location.column;
2452
- }
2625
+ };
2453
2626
  }
2454
- };
2627
+ });
2455
2628
 
2456
2629
  // src/dmv2/sdk/materializedView.ts
2457
- var requireTargetTableName = (tableName) => {
2458
- if (typeof tableName === "string") {
2459
- return tableName;
2460
- } else {
2461
- throw new Error("Name of targetTable is not specified.");
2462
- }
2463
- };
2464
- var MaterializedView = class extends SqlResource {
2465
- /** The target OlapTable instance where the materialized data is stored. */
2466
- targetTable;
2467
- constructor(options, targetSchema, targetColumns) {
2468
- let selectStatement = options.selectStatement;
2469
- if (typeof selectStatement !== "string") {
2470
- selectStatement = toStaticQuery(selectStatement);
2471
- }
2472
- if (targetSchema === void 0 || targetColumns === void 0) {
2473
- throw new Error(
2474
- "Supply the type param T so that the schema is inserted by the compiler plugin."
2475
- );
2476
- }
2477
- const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2478
- requireTargetTableName(
2479
- options.targetTable?.name ?? options.tableName
2480
- ),
2481
- {
2482
- orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2483
- engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2484
- },
2485
- targetSchema,
2486
- targetColumns
2487
- );
2488
- if (targetTable.name === options.materializedViewName) {
2489
- throw new Error(
2490
- "Materialized view name cannot be the same as the target table name."
2491
- );
2492
- }
2493
- super(
2494
- options.materializedViewName,
2495
- [
2496
- createMaterializedView({
2497
- name: options.materializedViewName,
2498
- destinationTable: targetTable.name,
2499
- select: selectStatement
2500
- })
2501
- // Population is now handled automatically by Rust infrastructure
2502
- // based on table engine type and whether this is a new or updated view
2503
- ],
2504
- [dropView(options.materializedViewName)],
2505
- {
2506
- pullsDataFrom: options.selectTables,
2507
- pushesDataTo: [targetTable]
2630
+ var requireTargetTableName, MaterializedView;
2631
+ var init_materializedView = __esm({
2632
+ "src/dmv2/sdk/materializedView.ts"() {
2633
+ "use strict";
2634
+ init_helpers();
2635
+ init_sqlHelpers();
2636
+ init_olapTable();
2637
+ init_sqlResource();
2638
+ requireTargetTableName = (tableName) => {
2639
+ if (typeof tableName === "string") {
2640
+ return tableName;
2641
+ } else {
2642
+ throw new Error("Name of targetTable is not specified.");
2508
2643
  }
2509
- );
2510
- this.targetTable = targetTable;
2644
+ };
2645
+ MaterializedView = class extends SqlResource {
2646
+ /** The target OlapTable instance where the materialized data is stored. */
2647
+ targetTable;
2648
+ constructor(options, targetSchema, targetColumns) {
2649
+ let selectStatement = options.selectStatement;
2650
+ if (typeof selectStatement !== "string") {
2651
+ selectStatement = toStaticQuery(selectStatement);
2652
+ }
2653
+ if (targetSchema === void 0 || targetColumns === void 0) {
2654
+ throw new Error(
2655
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
2656
+ );
2657
+ }
2658
+ const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2659
+ requireTargetTableName(
2660
+ options.targetTable?.name ?? options.tableName
2661
+ ),
2662
+ {
2663
+ orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2664
+ engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2665
+ },
2666
+ targetSchema,
2667
+ targetColumns
2668
+ );
2669
+ if (targetTable.name === options.materializedViewName) {
2670
+ throw new Error(
2671
+ "Materialized view name cannot be the same as the target table name."
2672
+ );
2673
+ }
2674
+ super(
2675
+ options.materializedViewName,
2676
+ [
2677
+ createMaterializedView({
2678
+ name: options.materializedViewName,
2679
+ destinationTable: targetTable.name,
2680
+ select: selectStatement
2681
+ })
2682
+ // Population is now handled automatically by Rust infrastructure
2683
+ // based on table engine type and whether this is a new or updated view
2684
+ ],
2685
+ [dropView(options.materializedViewName)],
2686
+ {
2687
+ pullsDataFrom: options.selectTables,
2688
+ pushesDataTo: [targetTable]
2689
+ }
2690
+ );
2691
+ this.targetTable = targetTable;
2692
+ }
2693
+ };
2511
2694
  }
2512
- };
2695
+ });
2513
2696
 
2514
2697
  // src/dmv2/sdk/view.ts
2515
- var View = class extends SqlResource {
2516
- /**
2517
- * Creates a new View instance.
2518
- * @param name The name of the view to be created.
2519
- * @param selectStatement The SQL SELECT statement that defines the view's logic.
2520
- * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2521
- */
2522
- constructor(name, selectStatement, baseTables) {
2523
- if (typeof selectStatement !== "string") {
2524
- selectStatement = toStaticQuery(selectStatement);
2525
- }
2526
- super(
2527
- name,
2528
- [
2529
- `CREATE VIEW IF NOT EXISTS ${name}
2698
+ var View;
2699
+ var init_view = __esm({
2700
+ "src/dmv2/sdk/view.ts"() {
2701
+ "use strict";
2702
+ init_helpers();
2703
+ init_sqlHelpers();
2704
+ init_sqlResource();
2705
+ View = class extends SqlResource {
2706
+ /**
2707
+ * Creates a new View instance.
2708
+ * @param name The name of the view to be created.
2709
+ * @param selectStatement The SQL SELECT statement that defines the view's logic.
2710
+ * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2711
+ */
2712
+ constructor(name, selectStatement, baseTables) {
2713
+ if (typeof selectStatement !== "string") {
2714
+ selectStatement = toStaticQuery(selectStatement);
2715
+ }
2716
+ super(
2717
+ name,
2718
+ [
2719
+ `CREATE VIEW IF NOT EXISTS ${name}
2530
2720
  AS ${selectStatement}`.trim()
2531
- ],
2532
- [dropView(name)],
2533
- {
2534
- pullsDataFrom: baseTables
2721
+ ],
2722
+ [dropView(name)],
2723
+ {
2724
+ pullsDataFrom: baseTables
2725
+ }
2726
+ );
2535
2727
  }
2536
- );
2728
+ };
2537
2729
  }
2538
- };
2730
+ });
2539
2731
 
2540
2732
  // src/dmv2/sdk/lifeCycle.ts
2541
- var LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2542
- LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2543
- LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2544
- LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2545
- return LifeCycle2;
2546
- })(LifeCycle || {});
2733
+ var LifeCycle;
2734
+ var init_lifeCycle = __esm({
2735
+ "src/dmv2/sdk/lifeCycle.ts"() {
2736
+ "use strict";
2737
+ LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2738
+ LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2739
+ LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2740
+ LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2741
+ return LifeCycle2;
2742
+ })(LifeCycle || {});
2743
+ }
2744
+ });
2547
2745
 
2548
2746
  // src/dmv2/sdk/webApp.ts
2549
- var RESERVED_MOUNT_PATHS = [
2550
- "/admin",
2551
- "/api",
2552
- "/consumption",
2553
- "/health",
2554
- "/ingest",
2555
- "/moose",
2556
- // reserved for future use
2557
- "/ready",
2558
- "/workflows"
2559
- ];
2560
- var WebApp = class {
2561
- name;
2562
- handler;
2563
- config;
2564
- _rawApp;
2565
- constructor(name, appOrHandler, config) {
2566
- this.name = name;
2567
- this.config = config;
2568
- if (!this.config.mountPath) {
2569
- throw new Error(
2570
- `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2571
- );
2572
- }
2573
- const mountPath = this.config.mountPath;
2574
- if (mountPath === "/") {
2575
- throw new Error(
2576
- `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2577
- );
2578
- }
2579
- if (mountPath.endsWith("/")) {
2580
- throw new Error(
2581
- `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2582
- );
2583
- }
2584
- for (const reserved of RESERVED_MOUNT_PATHS) {
2585
- if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2586
- throw new Error(
2587
- `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2588
- );
2589
- }
2590
- }
2591
- this.handler = this.toHandler(appOrHandler);
2592
- this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2593
- const webApps = getMooseInternal().webApps;
2594
- if (webApps.has(name)) {
2595
- throw new Error(`WebApp with name ${name} already exists`);
2596
- }
2597
- if (this.config.mountPath) {
2598
- for (const [existingName, existingApp] of webApps) {
2599
- if (existingApp.config.mountPath === this.config.mountPath) {
2747
+ var RESERVED_MOUNT_PATHS, WebApp;
2748
+ var init_webApp = __esm({
2749
+ "src/dmv2/sdk/webApp.ts"() {
2750
+ "use strict";
2751
+ init_internal();
2752
+ RESERVED_MOUNT_PATHS = [
2753
+ "/admin",
2754
+ "/api",
2755
+ "/consumption",
2756
+ "/health",
2757
+ "/ingest",
2758
+ "/moose",
2759
+ // reserved for future use
2760
+ "/ready",
2761
+ "/workflows"
2762
+ ];
2763
+ WebApp = class {
2764
+ name;
2765
+ handler;
2766
+ config;
2767
+ _rawApp;
2768
+ constructor(name, appOrHandler, config) {
2769
+ this.name = name;
2770
+ this.config = config;
2771
+ if (!this.config.mountPath) {
2600
2772
  throw new Error(
2601
- `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2773
+ `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2602
2774
  );
2603
2775
  }
2604
- }
2605
- }
2606
- webApps.set(name, this);
2607
- }
2608
- toHandler(appOrHandler) {
2609
- if (typeof appOrHandler === "function") {
2610
- return appOrHandler;
2611
- }
2612
- const app = appOrHandler;
2613
- if (typeof app.handle === "function") {
2614
- return (req, res) => {
2615
- app.handle(req, res, (err) => {
2616
- if (err) {
2617
- console.error("WebApp handler error:", err);
2618
- if (!res.headersSent) {
2619
- res.writeHead(500, { "Content-Type": "application/json" });
2620
- res.end(JSON.stringify({ error: "Internal Server Error" }));
2776
+ const mountPath = this.config.mountPath;
2777
+ if (mountPath === "/") {
2778
+ throw new Error(
2779
+ `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2780
+ );
2781
+ }
2782
+ if (mountPath.endsWith("/")) {
2783
+ throw new Error(
2784
+ `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2785
+ );
2786
+ }
2787
+ for (const reserved of RESERVED_MOUNT_PATHS) {
2788
+ if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2789
+ throw new Error(
2790
+ `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2791
+ );
2792
+ }
2793
+ }
2794
+ this.handler = this.toHandler(appOrHandler);
2795
+ this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2796
+ const webApps = getMooseInternal().webApps;
2797
+ if (webApps.has(name)) {
2798
+ throw new Error(`WebApp with name ${name} already exists`);
2799
+ }
2800
+ if (this.config.mountPath) {
2801
+ for (const [existingName, existingApp] of webApps) {
2802
+ if (existingApp.config.mountPath === this.config.mountPath) {
2803
+ throw new Error(
2804
+ `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2805
+ );
2621
2806
  }
2622
2807
  }
2623
- });
2624
- };
2625
- }
2626
- if (typeof app.callback === "function") {
2627
- return app.callback();
2628
- }
2629
- if (typeof app.routing === "function") {
2630
- const routing = app.routing;
2631
- const appWithReady = app;
2632
- let readyPromise = null;
2633
- return async (req, res) => {
2634
- if (readyPromise === null) {
2635
- readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2636
- }
2637
- await readyPromise;
2638
- routing(req, res);
2639
- };
2640
- }
2641
- throw new Error(
2642
- `Unable to convert app to handler. The provided object must be:
2808
+ }
2809
+ webApps.set(name, this);
2810
+ }
2811
+ toHandler(appOrHandler) {
2812
+ if (typeof appOrHandler === "function") {
2813
+ return appOrHandler;
2814
+ }
2815
+ const app = appOrHandler;
2816
+ if (typeof app.handle === "function") {
2817
+ return (req, res) => {
2818
+ app.handle(req, res, (err) => {
2819
+ if (err) {
2820
+ console.error("WebApp handler error:", err);
2821
+ if (!res.headersSent) {
2822
+ res.writeHead(500, { "Content-Type": "application/json" });
2823
+ res.end(JSON.stringify({ error: "Internal Server Error" }));
2824
+ }
2825
+ }
2826
+ });
2827
+ };
2828
+ }
2829
+ if (typeof app.callback === "function") {
2830
+ return app.callback();
2831
+ }
2832
+ if (typeof app.routing === "function") {
2833
+ const routing = app.routing;
2834
+ const appWithReady = app;
2835
+ let readyPromise = null;
2836
+ return async (req, res) => {
2837
+ if (readyPromise === null) {
2838
+ readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2839
+ }
2840
+ await readyPromise;
2841
+ routing(req, res);
2842
+ };
2843
+ }
2844
+ throw new Error(
2845
+ `Unable to convert app to handler. The provided object must be:
2643
2846
  - A function (raw Node.js handler)
2644
2847
  - An object with .handle() method (Express, Connect)
2645
2848
  - An object with .callback() method (Koa)
@@ -2651,12 +2854,14 @@ Examples:
2651
2854
  Fastify: new WebApp("name", fastifyApp)
2652
2855
  Raw: new WebApp("name", (req, res) => { ... })
2653
2856
  `
2654
- );
2655
- }
2656
- getRawApp() {
2657
- return this._rawApp;
2857
+ );
2858
+ }
2859
+ getRawApp() {
2860
+ return this._rawApp;
2861
+ }
2862
+ };
2658
2863
  }
2659
- };
2864
+ });
2660
2865
 
2661
2866
  // src/dmv2/registry.ts
2662
2867
  function getTables() {
@@ -2722,6 +2927,84 @@ function getWebApps2() {
2722
2927
  function getWebApp(name) {
2723
2928
  return getMooseInternal().webApps.get(name);
2724
2929
  }
2930
+ var init_registry = __esm({
2931
+ "src/dmv2/registry.ts"() {
2932
+ "use strict";
2933
+ init_internal();
2934
+ }
2935
+ });
2936
+
2937
+ // src/dmv2/index.ts
2938
+ var init_dmv2 = __esm({
2939
+ "src/dmv2/index.ts"() {
2940
+ "use strict";
2941
+ init_olapTable();
2942
+ init_stream();
2943
+ init_workflow();
2944
+ init_ingestApi();
2945
+ init_consumptionApi();
2946
+ init_ingestPipeline();
2947
+ init_etlPipeline();
2948
+ init_materializedView();
2949
+ init_sqlResource();
2950
+ init_view();
2951
+ init_lifeCycle();
2952
+ init_webApp();
2953
+ init_registry();
2954
+ }
2955
+ });
2956
+
2957
+ // src/browserCompatible.ts
2958
+ var browserCompatible_exports = {};
2959
+ __export(browserCompatible_exports, {
2960
+ Api: () => Api,
2961
+ ConsumptionApi: () => ConsumptionApi,
2962
+ DeadLetterQueue: () => DeadLetterQueue,
2963
+ ETLPipeline: () => ETLPipeline,
2964
+ IngestApi: () => IngestApi,
2965
+ IngestPipeline: () => IngestPipeline,
2966
+ LifeCycle: () => LifeCycle,
2967
+ MaterializedView: () => MaterializedView,
2968
+ OlapTable: () => OlapTable,
2969
+ Sql: () => Sql,
2970
+ SqlResource: () => SqlResource,
2971
+ Stream: () => Stream,
2972
+ Task: () => Task,
2973
+ View: () => View,
2974
+ WebApp: () => WebApp,
2975
+ Workflow: () => Workflow,
2976
+ createClickhouseParameter: () => createClickhouseParameter,
2977
+ getApi: () => getApi,
2978
+ getApis: () => getApis2,
2979
+ getIngestApi: () => getIngestApi,
2980
+ getIngestApis: () => getIngestApis,
2981
+ getSqlResource: () => getSqlResource,
2982
+ getSqlResources: () => getSqlResources,
2983
+ getStream: () => getStream,
2984
+ getStreams: () => getStreams,
2985
+ getTable: () => getTable,
2986
+ getTables: () => getTables,
2987
+ getValueFromParameter: () => getValueFromParameter,
2988
+ getWebApp: () => getWebApp,
2989
+ getWebApps: () => getWebApps2,
2990
+ getWorkflow: () => getWorkflow,
2991
+ getWorkflows: () => getWorkflows2,
2992
+ mapToClickHouseType: () => mapToClickHouseType,
2993
+ quoteIdentifier: () => quoteIdentifier,
2994
+ sql: () => sql,
2995
+ toQuery: () => toQuery,
2996
+ toQueryPreview: () => toQueryPreview,
2997
+ toStaticQuery: () => toStaticQuery
2998
+ });
2999
+ module.exports = __toCommonJS(browserCompatible_exports);
3000
+ var init_browserCompatible = __esm({
3001
+ "src/browserCompatible.ts"() {
3002
+ init_dmv2();
3003
+ init_types();
3004
+ init_sqlHelpers();
3005
+ }
3006
+ });
3007
+ init_browserCompatible();
2725
3008
  // Annotate the CommonJS export names for ESM import in node:
2726
3009
  0 && (module.exports = {
2727
3010
  Api,