@514labs/moose-lib 0.6.348 → 0.6.349

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -30,183 +30,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
30
30
  ));
31
31
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
32
32
 
33
- // src/dmv2/utils/stackTrace.ts
34
- function shouldSkipStackLine(line) {
35
- return line.includes("node_modules") || // Skip npm installed packages (prod)
36
- line.includes("node:internal") || // Skip Node.js internals (modern format)
37
- line.includes("internal/modules") || // Skip Node.js internals (older format)
38
- line.includes("ts-node") || // Skip TypeScript execution
39
- line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
40
- line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
41
- line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
42
- line.includes("\\ts-moose-lib\\dist\\");
43
- }
44
- function parseStackLine(line) {
45
- const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
46
- if (match && match[1]) {
47
- return {
48
- file: match[1],
49
- line: match[2]
50
- };
51
- }
52
- return void 0;
53
- }
54
- function getSourceFileInfo(stack) {
55
- if (!stack) return {};
56
- const lines = stack.split("\n");
57
- for (const line of lines) {
58
- if (shouldSkipStackLine(line)) continue;
59
- const info = parseStackLine(line);
60
- if (info) return info;
61
- }
62
- return {};
63
- }
64
- function getSourceLocationFromStack(stack) {
65
- if (!stack) return void 0;
66
- const lines = stack.split("\n");
67
- for (const line of lines.slice(1)) {
68
- if (shouldSkipStackLine(line)) {
69
- continue;
70
- }
71
- const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
72
- if (v8Match) {
73
- return {
74
- file: v8Match[1],
75
- line: parseInt(v8Match[2], 10),
76
- column: parseInt(v8Match[3], 10)
77
- };
78
- }
79
- const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
80
- if (smMatch) {
81
- return {
82
- file: smMatch[1],
83
- line: parseInt(smMatch[2], 10),
84
- column: parseInt(smMatch[3], 10)
85
- };
86
- }
87
- }
88
- return void 0;
89
- }
90
- function getSourceFileFromStack(stack) {
91
- const location = getSourceLocationFromStack(stack);
92
- return location?.file;
93
- }
94
- var init_stackTrace = __esm({
95
- "src/dmv2/utils/stackTrace.ts"() {
96
- "use strict";
97
- }
98
- });
99
-
100
- // src/dmv2/typedBase.ts
101
- var TypedBase;
102
- var init_typedBase = __esm({
103
- "src/dmv2/typedBase.ts"() {
104
- "use strict";
105
- init_stackTrace();
106
- TypedBase = class {
107
- /** The JSON schema representation of type T. Injected by the compiler plugin. */
108
- schema;
109
- /** The name assigned to this resource instance. */
110
- name;
111
- /** A dictionary mapping column names (keys of T) to their Column definitions. */
112
- columns;
113
- /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
114
- columnArray;
115
- /** The configuration object specific to this resource type. */
116
- config;
117
- /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
118
- validators;
119
- /** Optional metadata for the resource, always present as an object. */
120
- metadata;
121
- /**
122
- * Whether this resource allows extra fields beyond the defined columns.
123
- * When true, extra fields in payloads are passed through to streaming functions.
124
- * Injected by the compiler plugin when the type has an index signature.
125
- */
126
- allowExtraFields;
127
- /**
128
- * @internal Constructor intended for internal use by subclasses and the compiler plugin.
129
- * It expects the schema and columns to be provided, typically injected by the compiler.
130
- *
131
- * @param name The name for the resource instance.
132
- * @param config The configuration object for the resource.
133
- * @param schema The JSON schema for the resource's data type T (injected).
134
- * @param columns The array of Column definitions for T (injected).
135
- * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
136
- */
137
- constructor(name, config, schema, columns, validators, allowExtraFields) {
138
- if (schema === void 0 || columns === void 0) {
139
- throw new Error(
140
- "Supply the type param T so that the schema is inserted by the compiler plugin."
141
- );
142
- }
143
- this.schema = schema;
144
- this.columnArray = columns;
145
- const columnsObj = {};
146
- columns.forEach((column) => {
147
- columnsObj[column.name] = column;
148
- });
149
- this.columns = columnsObj;
150
- this.name = name;
151
- this.config = config;
152
- this.validators = validators;
153
- this.allowExtraFields = allowExtraFields ?? false;
154
- this.metadata = config?.metadata ? { ...config.metadata } : {};
155
- if (!this.metadata.source) {
156
- const stack = new Error().stack;
157
- if (stack) {
158
- const info = getSourceFileInfo(stack);
159
- this.metadata.source = { file: info.file, line: info.line };
160
- }
161
- }
162
- }
163
- };
164
- }
165
- });
166
-
167
- // src/dataModels/dataModelTypes.ts
168
- function isArrayNestedType(dt) {
169
- return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
170
- }
171
- function isNestedType(dt) {
172
- return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
173
- }
174
- var init_dataModelTypes = __esm({
175
- "src/dataModels/dataModelTypes.ts"() {
176
- "use strict";
177
- }
178
- });
179
-
180
- // src/dataModels/types.ts
181
- var ClickHouseEngines;
182
- var init_types = __esm({
183
- "src/dataModels/types.ts"() {
184
- "use strict";
185
- ClickHouseEngines = /* @__PURE__ */ ((ClickHouseEngines2) => {
186
- ClickHouseEngines2["MergeTree"] = "MergeTree";
187
- ClickHouseEngines2["ReplacingMergeTree"] = "ReplacingMergeTree";
188
- ClickHouseEngines2["SummingMergeTree"] = "SummingMergeTree";
189
- ClickHouseEngines2["AggregatingMergeTree"] = "AggregatingMergeTree";
190
- ClickHouseEngines2["CollapsingMergeTree"] = "CollapsingMergeTree";
191
- ClickHouseEngines2["VersionedCollapsingMergeTree"] = "VersionedCollapsingMergeTree";
192
- ClickHouseEngines2["GraphiteMergeTree"] = "GraphiteMergeTree";
193
- ClickHouseEngines2["S3Queue"] = "S3Queue";
194
- ClickHouseEngines2["S3"] = "S3";
195
- ClickHouseEngines2["Buffer"] = "Buffer";
196
- ClickHouseEngines2["Distributed"] = "Distributed";
197
- ClickHouseEngines2["IcebergS3"] = "IcebergS3";
198
- ClickHouseEngines2["Kafka"] = "Kafka";
199
- ClickHouseEngines2["ReplicatedMergeTree"] = "ReplicatedMergeTree";
200
- ClickHouseEngines2["ReplicatedReplacingMergeTree"] = "ReplicatedReplacingMergeTree";
201
- ClickHouseEngines2["ReplicatedAggregatingMergeTree"] = "ReplicatedAggregatingMergeTree";
202
- ClickHouseEngines2["ReplicatedSummingMergeTree"] = "ReplicatedSummingMergeTree";
203
- ClickHouseEngines2["ReplicatedCollapsingMergeTree"] = "ReplicatedCollapsingMergeTree";
204
- ClickHouseEngines2["ReplicatedVersionedCollapsingMergeTree"] = "ReplicatedVersionedCollapsingMergeTree";
205
- return ClickHouseEngines2;
206
- })(ClickHouseEngines || {});
207
- }
208
- });
209
-
210
33
  // src/commons.ts
211
34
  var commons_exports = {};
212
35
  __export(commons_exports, {
@@ -465,487 +288,6 @@ var init_commons = __esm({
465
288
  }
466
289
  });
467
290
 
468
- // src/secrets.ts
469
- var init_secrets = __esm({
470
- "src/secrets.ts"() {
471
- "use strict";
472
- }
473
- });
474
-
475
- // src/sqlHelpers.ts
476
- function sqlImpl(strings, ...values) {
477
- return new Sql(strings, values);
478
- }
479
- function createClickhouseParameter(parameterIndex, value) {
480
- return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
481
- }
482
- function emptyIfUndefined(value) {
483
- return value === void 0 ? "" : value;
484
- }
485
- var quoteIdentifier, isTable, isView, isColumn, sql, instanceofSql, Sql, toStaticQuery, toQuery, toQueryPreview, getValueFromParameter, mapToClickHouseType;
486
- var init_sqlHelpers = __esm({
487
- "src/sqlHelpers.ts"() {
488
- "use strict";
489
- quoteIdentifier = (name) => {
490
- return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
491
- };
492
- isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
493
- isView = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "View";
494
- isColumn = (value) => typeof value === "object" && value !== null && !("kind" in value) && "name" in value && "annotations" in value;
495
- sql = sqlImpl;
496
- instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
497
- Sql = class _Sql {
498
- values;
499
- strings;
500
- constructor(rawStrings, rawValues) {
501
- if (rawStrings.length - 1 !== rawValues.length) {
502
- if (rawStrings.length === 0) {
503
- throw new TypeError("Expected at least 1 string");
504
- }
505
- throw new TypeError(
506
- `Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
507
- );
508
- }
509
- const valuesLength = rawValues.reduce(
510
- (len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) || isView(value) ? 0 : 1),
511
- 0
512
- );
513
- this.values = new Array(valuesLength);
514
- this.strings = new Array(valuesLength + 1);
515
- this.strings[0] = rawStrings[0];
516
- let i = 0, pos = 0;
517
- while (i < rawValues.length) {
518
- const child = rawValues[i++];
519
- const rawString = rawStrings[i];
520
- if (instanceofSql(child)) {
521
- this.strings[pos] += child.strings[0];
522
- let childIndex = 0;
523
- while (childIndex < child.values.length) {
524
- this.values[pos++] = child.values[childIndex++];
525
- this.strings[pos] = child.strings[childIndex];
526
- }
527
- this.strings[pos] += rawString;
528
- } else if (isColumn(child)) {
529
- const aggregationFunction = child.annotations.find(
530
- ([k, _]) => k === "aggregationFunction"
531
- );
532
- if (aggregationFunction !== void 0) {
533
- this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
534
- } else {
535
- this.strings[pos] += `\`${child.name}\``;
536
- }
537
- this.strings[pos] += rawString;
538
- } else if (isTable(child)) {
539
- if (child.config.database) {
540
- this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
541
- } else {
542
- this.strings[pos] += `\`${child.name}\``;
543
- }
544
- this.strings[pos] += rawString;
545
- } else if (isView(child)) {
546
- this.strings[pos] += `\`${child.name}\``;
547
- this.strings[pos] += rawString;
548
- } else {
549
- this.values[pos++] = child;
550
- this.strings[pos] = rawString;
551
- }
552
- }
553
- }
554
- /**
555
- * Append another Sql fragment, returning a new Sql instance.
556
- */
557
- append(other) {
558
- return new _Sql([...this.strings, ""], [...this.values, other]);
559
- }
560
- };
561
- sql.join = function(fragments, separator) {
562
- if (fragments.length === 0) return new Sql([""], []);
563
- if (fragments.length === 1) return fragments[0];
564
- const sep = separator ?? ", ";
565
- const normalized = sep.includes(" ") ? sep : ` ${sep} `;
566
- const strings = ["", ...Array(fragments.length - 1).fill(normalized), ""];
567
- return new Sql(strings, fragments);
568
- };
569
- sql.raw = function(text) {
570
- return new Sql([text], []);
571
- };
572
- toStaticQuery = (sql3) => {
573
- const [query, params] = toQuery(sql3);
574
- if (Object.keys(params).length !== 0) {
575
- throw new Error(
576
- "Dynamic SQL is not allowed in the select statement in view creation."
577
- );
578
- }
579
- return query;
580
- };
581
- toQuery = (sql3) => {
582
- const parameterizedStubs = sql3.values.map(
583
- (v, i) => createClickhouseParameter(i, v)
584
- );
585
- const query = sql3.strings.map(
586
- (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
587
- ).join("");
588
- const query_params = sql3.values.reduce(
589
- (acc, v, i) => ({
590
- ...acc,
591
- [`p${i}`]: getValueFromParameter(v)
592
- }),
593
- {}
594
- );
595
- return [query, query_params];
596
- };
597
- toQueryPreview = (sql3) => {
598
- try {
599
- const formatValue = (v) => {
600
- if (Array.isArray(v)) {
601
- const [type, val] = v;
602
- if (type === "Identifier") {
603
- return `\`${String(val)}\``;
604
- }
605
- return `[${v.map((x) => formatValue(x)).join(", ")}]`;
606
- }
607
- if (v === null || v === void 0) return "NULL";
608
- if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
609
- if (typeof v === "number") return String(v);
610
- if (typeof v === "boolean") return v ? "true" : "false";
611
- if (v instanceof Date)
612
- return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
613
- try {
614
- return JSON.stringify(v);
615
- } catch {
616
- return String(v);
617
- }
618
- };
619
- let out = sql3.strings[0] ?? "";
620
- for (let i = 0; i < sql3.values.length; i++) {
621
- const val = getValueFromParameter(sql3.values[i]);
622
- out += formatValue(val);
623
- out += sql3.strings[i + 1] ?? "";
624
- }
625
- return out.replace(/\s+/g, " ").trim();
626
- } catch (error) {
627
- console.log(`toQueryPreview error: ${error}`);
628
- return "/* query preview unavailable */";
629
- }
630
- };
631
- getValueFromParameter = (value) => {
632
- if (Array.isArray(value)) {
633
- const [type, val] = value;
634
- if (type === "Identifier") return val;
635
- }
636
- return value;
637
- };
638
- mapToClickHouseType = (value) => {
639
- if (typeof value === "number") {
640
- return Number.isInteger(value) ? "Int" : "Float";
641
- }
642
- if (typeof value === "boolean") return "Bool";
643
- if (value instanceof Date) return "DateTime";
644
- if (Array.isArray(value)) {
645
- const [type, _] = value;
646
- return type;
647
- }
648
- return "String";
649
- };
650
- }
651
- });
652
-
653
- // src/consumption-apis/helpers.ts
654
- var import_client2, import_node_crypto;
655
- var init_helpers = __esm({
656
- "src/consumption-apis/helpers.ts"() {
657
- "use strict";
658
- import_client2 = require("@temporalio/client");
659
- import_node_crypto = require("crypto");
660
- init_internal();
661
- init_sqlHelpers();
662
- }
663
- });
664
-
665
- // src/consumption-apis/webAppHelpers.ts
666
- var init_webAppHelpers = __esm({
667
- "src/consumption-apis/webAppHelpers.ts"() {
668
- "use strict";
669
- }
670
- });
671
-
672
- // src/scripts/task.ts
673
- var init_task = __esm({
674
- "src/scripts/task.ts"() {
675
- "use strict";
676
- }
677
- });
678
-
679
- // src/cluster-utils.ts
680
- var import_node_cluster, import_node_os, import_node_process;
681
- var init_cluster_utils = __esm({
682
- "src/cluster-utils.ts"() {
683
- "use strict";
684
- import_node_cluster = __toESM(require("cluster"));
685
- import_node_os = require("os");
686
- import_node_process = require("process");
687
- }
688
- });
689
-
690
- // src/compiler-config.ts
691
- var init_compiler_config = __esm({
692
- "src/compiler-config.ts"() {
693
- "use strict";
694
- }
695
- });
696
-
697
- // src/consumption-apis/runner.ts
698
- var jose;
699
- var init_runner = __esm({
700
- "src/consumption-apis/runner.ts"() {
701
- "use strict";
702
- init_commons();
703
- init_helpers();
704
- jose = __toESM(require("jose"));
705
- init_cluster_utils();
706
- init_sqlHelpers();
707
- init_internal();
708
- init_compiler_config();
709
- }
710
- });
711
-
712
- // src/clients/redisClient.ts
713
- var import_redis;
714
- var init_redisClient = __esm({
715
- "src/clients/redisClient.ts"() {
716
- "use strict";
717
- import_redis = require("redis");
718
- }
719
- });
720
-
721
- // src/consumption-apis/standalone.ts
722
- var init_standalone = __esm({
723
- "src/consumption-apis/standalone.ts"() {
724
- "use strict";
725
- init_helpers();
726
- init_commons();
727
- init_sqlHelpers();
728
- }
729
- });
730
-
731
- // src/utilities/json.ts
732
- var init_json = __esm({
733
- "src/utilities/json.ts"() {
734
- "use strict";
735
- }
736
- });
737
-
738
- // src/utilities/dataParser.ts
739
- var import_csv_parse, CSV_DELIMITERS, DEFAULT_CSV_CONFIG;
740
- var init_dataParser = __esm({
741
- "src/utilities/dataParser.ts"() {
742
- "use strict";
743
- import_csv_parse = require("csv-parse");
744
- init_json();
745
- CSV_DELIMITERS = {
746
- COMMA: ",",
747
- TAB: " ",
748
- SEMICOLON: ";",
749
- PIPE: "|"
750
- };
751
- DEFAULT_CSV_CONFIG = {
752
- delimiter: CSV_DELIMITERS.COMMA,
753
- columns: true,
754
- skipEmptyLines: true,
755
- trim: true
756
- };
757
- }
758
- });
759
-
760
- // src/utilities/index.ts
761
- var init_utilities = __esm({
762
- "src/utilities/index.ts"() {
763
- "use strict";
764
- init_dataParser();
765
- }
766
- });
767
-
768
- // src/connectors/dataSource.ts
769
- var init_dataSource = __esm({
770
- "src/connectors/dataSource.ts"() {
771
- "use strict";
772
- }
773
- });
774
-
775
- // src/index.ts
776
- var init_index = __esm({
777
- "src/index.ts"() {
778
- "use strict";
779
- init_browserCompatible();
780
- init_commons();
781
- init_secrets();
782
- init_helpers();
783
- init_webAppHelpers();
784
- init_task();
785
- init_runner();
786
- init_redisClient();
787
- init_helpers();
788
- init_standalone();
789
- init_sqlHelpers();
790
- init_utilities();
791
- init_dataSource();
792
- init_types();
793
- }
794
- });
795
-
796
- // src/dmv2/internal.ts
797
- var import_process, isClientOnlyMode, moose_internal, defaultRetentionPeriod, getMooseInternal, dlqSchema, dlqColumns;
798
- var init_internal = __esm({
799
- "src/dmv2/internal.ts"() {
800
- "use strict";
801
- import_process = __toESM(require("process"));
802
- init_index();
803
- init_commons();
804
- init_compiler_config();
805
- isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
806
- moose_internal = {
807
- tables: /* @__PURE__ */ new Map(),
808
- streams: /* @__PURE__ */ new Map(),
809
- ingestApis: /* @__PURE__ */ new Map(),
810
- apis: /* @__PURE__ */ new Map(),
811
- sqlResources: /* @__PURE__ */ new Map(),
812
- workflows: /* @__PURE__ */ new Map(),
813
- webApps: /* @__PURE__ */ new Map(),
814
- materializedViews: /* @__PURE__ */ new Map(),
815
- views: /* @__PURE__ */ new Map()
816
- };
817
- defaultRetentionPeriod = 60 * 60 * 24 * 7;
818
- getMooseInternal = () => globalThis.moose_internal;
819
- if (getMooseInternal() === void 0) {
820
- globalThis.moose_internal = moose_internal;
821
- }
822
- dlqSchema = {
823
- version: "3.1",
824
- components: {
825
- schemas: {
826
- DeadLetterModel: {
827
- type: "object",
828
- properties: {
829
- originalRecord: {
830
- $ref: "#/components/schemas/Recordstringany"
831
- },
832
- errorMessage: {
833
- type: "string"
834
- },
835
- errorType: {
836
- type: "string"
837
- },
838
- failedAt: {
839
- type: "string",
840
- format: "date-time"
841
- },
842
- source: {
843
- oneOf: [
844
- {
845
- const: "api"
846
- },
847
- {
848
- const: "transform"
849
- },
850
- {
851
- const: "table"
852
- }
853
- ]
854
- }
855
- },
856
- required: [
857
- "originalRecord",
858
- "errorMessage",
859
- "errorType",
860
- "failedAt",
861
- "source"
862
- ]
863
- },
864
- Recordstringany: {
865
- type: "object",
866
- properties: {},
867
- required: [],
868
- description: "Construct a type with a set of properties K of type T",
869
- additionalProperties: {}
870
- }
871
- }
872
- },
873
- schemas: [
874
- {
875
- $ref: "#/components/schemas/DeadLetterModel"
876
- }
877
- ]
878
- };
879
- dlqColumns = [
880
- {
881
- name: "originalRecord",
882
- data_type: "Json",
883
- primary_key: false,
884
- required: true,
885
- unique: false,
886
- default: null,
887
- annotations: [],
888
- ttl: null,
889
- codec: null,
890
- materialized: null,
891
- comment: null
892
- },
893
- {
894
- name: "errorMessage",
895
- data_type: "String",
896
- primary_key: false,
897
- required: true,
898
- unique: false,
899
- default: null,
900
- annotations: [],
901
- ttl: null,
902
- codec: null,
903
- materialized: null,
904
- comment: null
905
- },
906
- {
907
- name: "errorType",
908
- data_type: "String",
909
- primary_key: false,
910
- required: true,
911
- unique: false,
912
- default: null,
913
- annotations: [],
914
- ttl: null,
915
- codec: null,
916
- materialized: null,
917
- comment: null
918
- },
919
- {
920
- name: "failedAt",
921
- data_type: "DateTime",
922
- primary_key: false,
923
- required: true,
924
- unique: false,
925
- default: null,
926
- annotations: [],
927
- ttl: null,
928
- codec: null,
929
- materialized: null,
930
- comment: null
931
- },
932
- {
933
- name: "source",
934
- data_type: "String",
935
- primary_key: false,
936
- required: true,
937
- unique: false,
938
- default: null,
939
- annotations: [],
940
- ttl: null,
941
- codec: null,
942
- materialized: null,
943
- comment: null
944
- }
945
- ];
946
- }
947
- });
948
-
949
291
  // src/config/configFile.ts
950
292
  async function findConfigFile(startDir = process.cwd()) {
951
293
  const fs = await import("fs");
@@ -1130,1568 +472,2057 @@ var init_runtime = __esm({
1130
472
  }
1131
473
  });
1132
474
 
1133
- // src/dmv2/sdk/olapTable.ts
1134
- var import_node_stream, import_node_crypto2, OlapTable;
1135
- var init_olapTable = __esm({
1136
- "src/dmv2/sdk/olapTable.ts"() {
1137
- "use strict";
1138
- init_typedBase();
1139
- init_dataModelTypes();
1140
- init_types();
1141
- init_internal();
1142
- import_node_stream = require("stream");
1143
- import_node_crypto2 = require("crypto");
1144
- init_sqlHelpers();
1145
- OlapTable = class extends TypedBase {
1146
- name;
1147
- /** @internal */
1148
- kind = "OlapTable";
1149
- /** @internal Memoized ClickHouse client for reusing connections across insert calls */
1150
- _memoizedClient;
1151
- /** @internal Hash of the configuration used to create the memoized client */
1152
- _configHash;
1153
- /** @internal Cached table name to avoid repeated generation */
1154
- _cachedTableName;
1155
- constructor(name, config, schema, columns, validators) {
1156
- const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
1157
- const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
1158
- const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
1159
- if (hasFields && hasExpr) {
1160
- throw new Error(
1161
- `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
1162
- );
1163
- }
1164
- const hasCluster = typeof resolvedConfig.cluster === "string";
1165
- const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
1166
- const hasReplicaName = typeof resolvedConfig.replicaName === "string";
1167
- if (hasCluster && (hasKeeperPath || hasReplicaName)) {
1168
- throw new Error(
1169
- `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
1170
- );
1171
- }
1172
- super(name, resolvedConfig, schema, columns, validators);
1173
- this.name = name;
1174
- const tables = getMooseInternal().tables;
1175
- const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
1176
- if (!isClientOnlyMode() && tables.has(registryKey)) {
1177
- throw new Error(
1178
- `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
1179
- );
1180
- }
1181
- tables.set(registryKey, this);
1182
- }
1183
- /**
1184
- * Generates the versioned table name following Moose's naming convention
1185
- * Format: {tableName}_{version_with_dots_replaced_by_underscores}
1186
- */
1187
- generateTableName() {
1188
- if (this._cachedTableName) {
1189
- return this._cachedTableName;
1190
- }
1191
- const tableVersion = this.config.version;
1192
- if (!tableVersion) {
1193
- this._cachedTableName = this.name;
1194
- } else {
1195
- const versionSuffix = tableVersion.replace(/\./g, "_");
1196
- this._cachedTableName = `${this.name}_${versionSuffix}`;
1197
- }
1198
- return this._cachedTableName;
1199
- }
1200
- /**
1201
- * Creates a fast hash of the ClickHouse configuration.
1202
- * Uses crypto.createHash for better performance than JSON.stringify.
1203
- *
1204
- * @private
1205
- */
1206
- createConfigHash(clickhouseConfig) {
1207
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1208
- const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
1209
- return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
475
+ // src/browserCompatible.ts
476
+ var browserCompatible_exports = {};
477
+ __export(browserCompatible_exports, {
478
+ Api: () => Api,
479
+ ClickHouseEngines: () => ClickHouseEngines,
480
+ ConsumptionApi: () => ConsumptionApi,
481
+ DeadLetterQueue: () => DeadLetterQueue,
482
+ ETLPipeline: () => ETLPipeline,
483
+ IngestApi: () => IngestApi,
484
+ IngestPipeline: () => IngestPipeline,
485
+ LifeCycle: () => LifeCycle,
486
+ MaterializedView: () => MaterializedView,
487
+ OlapTable: () => OlapTable,
488
+ Sql: () => Sql,
489
+ SqlResource: () => SqlResource,
490
+ Stream: () => Stream,
491
+ Task: () => Task,
492
+ View: () => View,
493
+ WebApp: () => WebApp,
494
+ Workflow: () => Workflow,
495
+ createClickhouseParameter: () => createClickhouseParameter,
496
+ getApi: () => getApi,
497
+ getApis: () => getApis,
498
+ getIngestApi: () => getIngestApi,
499
+ getIngestApis: () => getIngestApis,
500
+ getMaterializedView: () => getMaterializedView,
501
+ getMaterializedViews: () => getMaterializedViews,
502
+ getSqlResource: () => getSqlResource,
503
+ getSqlResources: () => getSqlResources,
504
+ getStream: () => getStream,
505
+ getStreams: () => getStreams,
506
+ getTable: () => getTable,
507
+ getTables: () => getTables,
508
+ getValueFromParameter: () => getValueFromParameter,
509
+ getView: () => getView,
510
+ getViews: () => getViews,
511
+ getWebApp: () => getWebApp,
512
+ getWebApps: () => getWebApps,
513
+ getWorkflow: () => getWorkflow,
514
+ getWorkflows: () => getWorkflows2,
515
+ mapToClickHouseType: () => mapToClickHouseType,
516
+ quoteIdentifier: () => quoteIdentifier,
517
+ sql: () => sql,
518
+ toQuery: () => toQuery,
519
+ toQueryPreview: () => toQueryPreview,
520
+ toStaticQuery: () => toStaticQuery
521
+ });
522
+ module.exports = __toCommonJS(browserCompatible_exports);
523
+
524
+ // src/dmv2/utils/stackTrace.ts
525
+ function shouldSkipStackLine(line) {
526
+ return line.includes("node_modules") || // Skip npm installed packages (prod)
527
+ line.includes("node:internal") || // Skip Node.js internals (modern format)
528
+ line.includes("internal/modules") || // Skip Node.js internals (older format)
529
+ line.includes("ts-node") || // Skip TypeScript execution
530
+ line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
531
+ line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
532
+ line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
533
+ line.includes("\\ts-moose-lib\\dist\\");
534
+ }
535
+ function parseStackLine(line) {
536
+ const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
537
+ if (match && match[1]) {
538
+ return {
539
+ file: match[1],
540
+ line: match[2]
541
+ };
542
+ }
543
+ return void 0;
544
+ }
545
+ function getSourceFileInfo(stack) {
546
+ if (!stack) return {};
547
+ const lines = stack.split("\n");
548
+ for (const line of lines) {
549
+ if (shouldSkipStackLine(line)) continue;
550
+ const info = parseStackLine(line);
551
+ if (info) return info;
552
+ }
553
+ return {};
554
+ }
555
+ function getSourceLocationFromStack(stack) {
556
+ if (!stack) return void 0;
557
+ const lines = stack.split("\n");
558
+ for (const line of lines.slice(1)) {
559
+ if (shouldSkipStackLine(line)) {
560
+ continue;
561
+ }
562
+ const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
563
+ if (v8Match) {
564
+ return {
565
+ file: v8Match[1],
566
+ line: parseInt(v8Match[2], 10),
567
+ column: parseInt(v8Match[3], 10)
568
+ };
569
+ }
570
+ const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
571
+ if (smMatch) {
572
+ return {
573
+ file: smMatch[1],
574
+ line: parseInt(smMatch[2], 10),
575
+ column: parseInt(smMatch[3], 10)
576
+ };
577
+ }
578
+ }
579
+ return void 0;
580
+ }
581
+ function getSourceFileFromStack(stack) {
582
+ const location = getSourceLocationFromStack(stack);
583
+ return location?.file;
584
+ }
585
+
586
+ // src/dmv2/typedBase.ts
587
+ var TypedBase = class {
588
+ /** The JSON schema representation of type T. Injected by the compiler plugin. */
589
+ schema;
590
+ /** The name assigned to this resource instance. */
591
+ name;
592
+ /** A dictionary mapping column names (keys of T) to their Column definitions. */
593
+ columns;
594
+ /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
595
+ columnArray;
596
+ /** The configuration object specific to this resource type. */
597
+ config;
598
+ /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
599
+ validators;
600
+ /** Optional metadata for the resource, always present as an object. */
601
+ metadata;
602
+ /**
603
+ * Whether this resource allows extra fields beyond the defined columns.
604
+ * When true, extra fields in payloads are passed through to streaming functions.
605
+ * Injected by the compiler plugin when the type has an index signature.
606
+ */
607
+ allowExtraFields;
608
+ /**
609
+ * @internal Constructor intended for internal use by subclasses and the compiler plugin.
610
+ * It expects the schema and columns to be provided, typically injected by the compiler.
611
+ *
612
+ * @param name The name for the resource instance.
613
+ * @param config The configuration object for the resource.
614
+ * @param schema The JSON schema for the resource's data type T (injected).
615
+ * @param columns The array of Column definitions for T (injected).
616
+ * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
617
+ */
618
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
619
+ if (schema === void 0 || columns === void 0) {
620
+ throw new Error(
621
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
622
+ );
623
+ }
624
+ this.schema = schema;
625
+ this.columnArray = columns;
626
+ const columnsObj = {};
627
+ columns.forEach((column) => {
628
+ columnsObj[column.name] = column;
629
+ });
630
+ this.columns = columnsObj;
631
+ this.name = name;
632
+ this.config = config;
633
+ this.validators = validators;
634
+ this.allowExtraFields = allowExtraFields ?? false;
635
+ this.metadata = config?.metadata ? { ...config.metadata } : {};
636
+ if (!this.metadata.source) {
637
+ const stack = new Error().stack;
638
+ if (stack) {
639
+ const info = getSourceFileInfo(stack);
640
+ this.metadata.source = { file: info.file, line: info.line };
1210
641
  }
1211
- /**
1212
- * Gets or creates a memoized ClickHouse client.
1213
- * The client is cached and reused across multiple insert calls for better performance.
1214
- * If the configuration changes, a new client will be created.
1215
- *
1216
- * @private
1217
- */
1218
- async getMemoizedClient() {
1219
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1220
- const configRegistry = globalThis._mooseConfigRegistry;
1221
- const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1222
- const clickhouseConfig = await configRegistry.getClickHouseConfig();
1223
- const currentConfigHash = this.createConfigHash(clickhouseConfig);
1224
- if (this._memoizedClient && this._configHash === currentConfigHash) {
1225
- return { client: this._memoizedClient, config: clickhouseConfig };
642
+ }
643
+ }
644
+ };
645
+
646
+ // src/dataModels/dataModelTypes.ts
647
+ function isArrayNestedType(dt) {
648
+ return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
649
+ }
650
+ function isNestedType(dt) {
651
+ return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
652
+ }
653
+
654
+ // src/dataModels/types.ts
655
+ var ClickHouseEngines = /* @__PURE__ */ ((ClickHouseEngines2) => {
656
+ ClickHouseEngines2["MergeTree"] = "MergeTree";
657
+ ClickHouseEngines2["ReplacingMergeTree"] = "ReplacingMergeTree";
658
+ ClickHouseEngines2["SummingMergeTree"] = "SummingMergeTree";
659
+ ClickHouseEngines2["AggregatingMergeTree"] = "AggregatingMergeTree";
660
+ ClickHouseEngines2["CollapsingMergeTree"] = "CollapsingMergeTree";
661
+ ClickHouseEngines2["VersionedCollapsingMergeTree"] = "VersionedCollapsingMergeTree";
662
+ ClickHouseEngines2["GraphiteMergeTree"] = "GraphiteMergeTree";
663
+ ClickHouseEngines2["S3Queue"] = "S3Queue";
664
+ ClickHouseEngines2["S3"] = "S3";
665
+ ClickHouseEngines2["Buffer"] = "Buffer";
666
+ ClickHouseEngines2["Distributed"] = "Distributed";
667
+ ClickHouseEngines2["IcebergS3"] = "IcebergS3";
668
+ ClickHouseEngines2["Kafka"] = "Kafka";
669
+ ClickHouseEngines2["ReplicatedMergeTree"] = "ReplicatedMergeTree";
670
+ ClickHouseEngines2["ReplicatedReplacingMergeTree"] = "ReplicatedReplacingMergeTree";
671
+ ClickHouseEngines2["ReplicatedAggregatingMergeTree"] = "ReplicatedAggregatingMergeTree";
672
+ ClickHouseEngines2["ReplicatedSummingMergeTree"] = "ReplicatedSummingMergeTree";
673
+ ClickHouseEngines2["ReplicatedCollapsingMergeTree"] = "ReplicatedCollapsingMergeTree";
674
+ ClickHouseEngines2["ReplicatedVersionedCollapsingMergeTree"] = "ReplicatedVersionedCollapsingMergeTree";
675
+ return ClickHouseEngines2;
676
+ })(ClickHouseEngines || {});
677
+
678
+ // src/dmv2/internal.ts
679
+ var import_process = __toESM(require("process"));
680
+
681
+ // src/index.ts
682
+ init_commons();
683
+
684
+ // src/consumption-apis/helpers.ts
685
+ var import_client2 = require("@temporalio/client");
686
+ var import_node_crypto = require("crypto");
687
+
688
+ // src/sqlHelpers.ts
689
+ var quoteIdentifier = (name) => {
690
+ return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
691
+ };
692
+ var isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
693
+ var isView = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "View";
694
+ var isColumn = (value) => typeof value === "object" && value !== null && !("kind" in value) && "name" in value && "annotations" in value;
695
+ function sqlImpl(strings, ...values) {
696
+ return new Sql(strings, values);
697
+ }
698
+ var sql = sqlImpl;
699
+ var instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
700
+ var Sql = class _Sql {
701
+ values;
702
+ strings;
703
+ constructor(rawStrings, rawValues) {
704
+ if (rawStrings.length - 1 !== rawValues.length) {
705
+ if (rawStrings.length === 0) {
706
+ throw new TypeError("Expected at least 1 string");
707
+ }
708
+ throw new TypeError(
709
+ `Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
710
+ );
711
+ }
712
+ const valuesLength = rawValues.reduce(
713
+ (len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) || isView(value) ? 0 : 1),
714
+ 0
715
+ );
716
+ this.values = new Array(valuesLength);
717
+ this.strings = new Array(valuesLength + 1);
718
+ this.strings[0] = rawStrings[0];
719
+ let i = 0, pos = 0;
720
+ while (i < rawValues.length) {
721
+ const child = rawValues[i++];
722
+ const rawString = rawStrings[i];
723
+ if (instanceofSql(child)) {
724
+ this.strings[pos] += child.strings[0];
725
+ let childIndex = 0;
726
+ while (childIndex < child.values.length) {
727
+ this.values[pos++] = child.values[childIndex++];
728
+ this.strings[pos] = child.strings[childIndex];
729
+ }
730
+ this.strings[pos] += rawString;
731
+ } else if (isColumn(child)) {
732
+ const aggregationFunction = child.annotations.find(
733
+ ([k, _]) => k === "aggregationFunction"
734
+ );
735
+ if (aggregationFunction !== void 0) {
736
+ this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
737
+ } else {
738
+ this.strings[pos] += `\`${child.name}\``;
1226
739
  }
1227
- if (this._memoizedClient && this._configHash !== currentConfigHash) {
1228
- try {
1229
- await this._memoizedClient.close();
1230
- } catch (error) {
1231
- }
740
+ this.strings[pos] += rawString;
741
+ } else if (isTable(child)) {
742
+ if (child.config.database) {
743
+ this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
744
+ } else {
745
+ this.strings[pos] += `\`${child.name}\``;
1232
746
  }
1233
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1234
- const client = getClickhouseClient2({
1235
- username: clickhouseConfig.username,
1236
- password: clickhouseConfig.password,
1237
- database: effectiveDatabase,
1238
- useSSL: clickhouseConfig.useSSL ? "true" : "false",
1239
- host: clickhouseConfig.host,
1240
- port: clickhouseConfig.port
1241
- });
1242
- this._memoizedClient = client;
1243
- this._configHash = currentConfigHash;
1244
- return { client, config: clickhouseConfig };
747
+ this.strings[pos] += rawString;
748
+ } else if (isView(child)) {
749
+ this.strings[pos] += `\`${child.name}\``;
750
+ this.strings[pos] += rawString;
751
+ } else {
752
+ this.values[pos++] = child;
753
+ this.strings[pos] = rawString;
1245
754
  }
1246
- /**
1247
- * Closes the memoized ClickHouse client if it exists.
1248
- * This is useful for cleaning up connections when the table instance is no longer needed.
1249
- * The client will be automatically recreated on the next insert call if needed.
1250
- */
1251
- async closeClient() {
1252
- if (this._memoizedClient) {
1253
- try {
1254
- await this._memoizedClient.close();
1255
- } catch (error) {
1256
- } finally {
1257
- this._memoizedClient = void 0;
1258
- this._configHash = void 0;
1259
- }
1260
- }
755
+ }
756
+ }
757
+ /**
758
+ * Append another Sql fragment, returning a new Sql instance.
759
+ */
760
+ append(other) {
761
+ return new _Sql([...this.strings, ""], [...this.values, other]);
762
+ }
763
+ };
764
+ sql.join = function(fragments, separator) {
765
+ if (fragments.length === 0) return new Sql([""], []);
766
+ if (fragments.length === 1) return fragments[0];
767
+ const sep = separator ?? ", ";
768
+ const normalized = sep.includes(" ") ? sep : ` ${sep} `;
769
+ const strings = ["", ...Array(fragments.length - 1).fill(normalized), ""];
770
+ return new Sql(strings, fragments);
771
+ };
772
+ sql.raw = function(text) {
773
+ return new Sql([text], []);
774
+ };
775
+ var toStaticQuery = (sql3) => {
776
+ const [query, params] = toQuery(sql3);
777
+ if (Object.keys(params).length !== 0) {
778
+ throw new Error(
779
+ "Dynamic SQL is not allowed in the select statement in view creation."
780
+ );
781
+ }
782
+ return query;
783
+ };
784
+ var toQuery = (sql3) => {
785
+ const parameterizedStubs = sql3.values.map(
786
+ (v, i) => createClickhouseParameter(i, v)
787
+ );
788
+ const query = sql3.strings.map(
789
+ (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
790
+ ).join("");
791
+ const query_params = sql3.values.reduce(
792
+ (acc, v, i) => ({
793
+ ...acc,
794
+ [`p${i}`]: getValueFromParameter(v)
795
+ }),
796
+ {}
797
+ );
798
+ return [query, query_params];
799
+ };
800
+ var toQueryPreview = (sql3) => {
801
+ try {
802
+ const formatValue = (v) => {
803
+ if (Array.isArray(v)) {
804
+ const [type, val] = v;
805
+ if (type === "Identifier") {
806
+ return `\`${String(val)}\``;
807
+ }
808
+ return `[${v.map((x) => formatValue(x)).join(", ")}]`;
809
+ }
810
+ if (v === null || v === void 0) return "NULL";
811
+ if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
812
+ if (typeof v === "number") return String(v);
813
+ if (typeof v === "boolean") return v ? "true" : "false";
814
+ if (v instanceof Date)
815
+ return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
816
+ try {
817
+ return JSON.stringify(v);
818
+ } catch {
819
+ return String(v);
1261
820
  }
1262
- /**
1263
- * Validates a single record using typia's comprehensive type checking.
1264
- * This provides the most accurate validation as it uses the exact TypeScript type information.
1265
- *
1266
- * @param record The record to validate
1267
- * @returns Validation result with detailed error information
1268
- */
1269
- validateRecord(record) {
1270
- if (this.validators?.validate) {
1271
- try {
1272
- const result = this.validators.validate(record);
1273
- return {
1274
- success: result.success,
1275
- data: result.data,
1276
- errors: result.errors?.map(
1277
- (err) => typeof err === "string" ? err : JSON.stringify(err)
1278
- )
1279
- };
1280
- } catch (error) {
1281
- return {
1282
- success: false,
1283
- errors: [error instanceof Error ? error.message : String(error)]
1284
- };
821
+ };
822
+ let out = sql3.strings[0] ?? "";
823
+ for (let i = 0; i < sql3.values.length; i++) {
824
+ const val = getValueFromParameter(sql3.values[i]);
825
+ out += formatValue(val);
826
+ out += sql3.strings[i + 1] ?? "";
827
+ }
828
+ return out.replace(/\s+/g, " ").trim();
829
+ } catch (error) {
830
+ console.log(`toQueryPreview error: ${error}`);
831
+ return "/* query preview unavailable */";
832
+ }
833
+ };
834
+ var getValueFromParameter = (value) => {
835
+ if (Array.isArray(value)) {
836
+ const [type, val] = value;
837
+ if (type === "Identifier") return val;
838
+ }
839
+ return value;
840
+ };
841
+ function createClickhouseParameter(parameterIndex, value) {
842
+ return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
843
+ }
844
+ var mapToClickHouseType = (value) => {
845
+ if (typeof value === "number") {
846
+ return Number.isInteger(value) ? "Int" : "Float";
847
+ }
848
+ if (typeof value === "boolean") return "Bool";
849
+ if (value instanceof Date) return "DateTime";
850
+ if (Array.isArray(value)) {
851
+ const [type, _] = value;
852
+ return type;
853
+ }
854
+ return "String";
855
+ };
856
+ function emptyIfUndefined(value) {
857
+ return value === void 0 ? "" : value;
858
+ }
859
+
860
+ // src/clients/redisClient.ts
861
+ var import_redis = require("redis");
862
+
863
+ // src/consumption-apis/standalone.ts
864
+ init_commons();
865
+
866
+ // src/utilities/dataParser.ts
867
+ var import_csv_parse = require("csv-parse");
868
+ var CSV_DELIMITERS = {
869
+ COMMA: ",",
870
+ TAB: " ",
871
+ SEMICOLON: ";",
872
+ PIPE: "|"
873
+ };
874
+ var DEFAULT_CSV_CONFIG = {
875
+ delimiter: CSV_DELIMITERS.COMMA,
876
+ columns: true,
877
+ skipEmptyLines: true,
878
+ trim: true
879
+ };
880
+
881
+ // src/dmv2/internal.ts
882
+ init_commons();
883
+ var isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
884
+ var moose_internal = {
885
+ tables: /* @__PURE__ */ new Map(),
886
+ streams: /* @__PURE__ */ new Map(),
887
+ ingestApis: /* @__PURE__ */ new Map(),
888
+ apis: /* @__PURE__ */ new Map(),
889
+ sqlResources: /* @__PURE__ */ new Map(),
890
+ workflows: /* @__PURE__ */ new Map(),
891
+ webApps: /* @__PURE__ */ new Map(),
892
+ materializedViews: /* @__PURE__ */ new Map(),
893
+ views: /* @__PURE__ */ new Map()
894
+ };
895
+ var defaultRetentionPeriod = 60 * 60 * 24 * 7;
896
+ var getMooseInternal = () => globalThis.moose_internal;
897
+ if (getMooseInternal() === void 0) {
898
+ globalThis.moose_internal = moose_internal;
899
+ }
900
+ var dlqSchema = {
901
+ version: "3.1",
902
+ components: {
903
+ schemas: {
904
+ DeadLetterModel: {
905
+ type: "object",
906
+ properties: {
907
+ originalRecord: {
908
+ $ref: "#/components/schemas/Recordstringany"
909
+ },
910
+ errorMessage: {
911
+ type: "string"
912
+ },
913
+ errorType: {
914
+ type: "string"
915
+ },
916
+ failedAt: {
917
+ type: "string",
918
+ format: "date-time"
919
+ },
920
+ source: {
921
+ oneOf: [
922
+ {
923
+ const: "api"
924
+ },
925
+ {
926
+ const: "transform"
927
+ },
928
+ {
929
+ const: "table"
930
+ }
931
+ ]
1285
932
  }
1286
- }
1287
- throw new Error("No typia validator found");
933
+ },
934
+ required: [
935
+ "originalRecord",
936
+ "errorMessage",
937
+ "errorType",
938
+ "failedAt",
939
+ "source"
940
+ ]
941
+ },
942
+ Recordstringany: {
943
+ type: "object",
944
+ properties: {},
945
+ required: [],
946
+ description: "Construct a type with a set of properties K of type T",
947
+ additionalProperties: {}
1288
948
  }
1289
- /**
1290
- * Type guard function using typia's is() function.
1291
- * Provides compile-time type narrowing for TypeScript.
1292
- *
1293
- * @param record The record to check
1294
- * @returns True if record matches type T, with type narrowing
1295
- */
1296
- isValidRecord(record) {
1297
- if (this.validators?.is) {
1298
- return this.validators.is(record);
1299
- }
1300
- throw new Error("No typia validator found");
949
+ }
950
+ },
951
+ schemas: [
952
+ {
953
+ $ref: "#/components/schemas/DeadLetterModel"
954
+ }
955
+ ]
956
+ };
957
+ var dlqColumns = [
958
+ {
959
+ name: "originalRecord",
960
+ data_type: "Json",
961
+ primary_key: false,
962
+ required: true,
963
+ unique: false,
964
+ default: null,
965
+ annotations: [],
966
+ ttl: null,
967
+ codec: null,
968
+ materialized: null,
969
+ comment: null
970
+ },
971
+ {
972
+ name: "errorMessage",
973
+ data_type: "String",
974
+ primary_key: false,
975
+ required: true,
976
+ unique: false,
977
+ default: null,
978
+ annotations: [],
979
+ ttl: null,
980
+ codec: null,
981
+ materialized: null,
982
+ comment: null
983
+ },
984
+ {
985
+ name: "errorType",
986
+ data_type: "String",
987
+ primary_key: false,
988
+ required: true,
989
+ unique: false,
990
+ default: null,
991
+ annotations: [],
992
+ ttl: null,
993
+ codec: null,
994
+ materialized: null,
995
+ comment: null
996
+ },
997
+ {
998
+ name: "failedAt",
999
+ data_type: "DateTime",
1000
+ primary_key: false,
1001
+ required: true,
1002
+ unique: false,
1003
+ default: null,
1004
+ annotations: [],
1005
+ ttl: null,
1006
+ codec: null,
1007
+ materialized: null,
1008
+ comment: null
1009
+ },
1010
+ {
1011
+ name: "source",
1012
+ data_type: "String",
1013
+ primary_key: false,
1014
+ required: true,
1015
+ unique: false,
1016
+ default: null,
1017
+ annotations: [],
1018
+ ttl: null,
1019
+ codec: null,
1020
+ materialized: null,
1021
+ comment: null
1022
+ }
1023
+ ];
1024
+
1025
+ // src/dmv2/sdk/olapTable.ts
1026
+ var import_node_stream = require("stream");
1027
+ var import_node_crypto2 = require("crypto");
1028
+ var OlapTable = class extends TypedBase {
1029
+ name;
1030
+ /** @internal */
1031
+ kind = "OlapTable";
1032
+ /** @internal Memoized ClickHouse client for reusing connections across insert calls */
1033
+ _memoizedClient;
1034
+ /** @internal Hash of the configuration used to create the memoized client */
1035
+ _configHash;
1036
+ /** @internal Cached table name to avoid repeated generation */
1037
+ _cachedTableName;
1038
+ constructor(name, config, schema, columns, validators) {
1039
+ const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
1040
+ const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
1041
+ const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
1042
+ if (hasFields && hasExpr) {
1043
+ throw new Error(
1044
+ `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
1045
+ );
1046
+ }
1047
+ const hasCluster = typeof resolvedConfig.cluster === "string";
1048
+ const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
1049
+ const hasReplicaName = typeof resolvedConfig.replicaName === "string";
1050
+ if (hasCluster && (hasKeeperPath || hasReplicaName)) {
1051
+ throw new Error(
1052
+ `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
1053
+ );
1054
+ }
1055
+ super(name, resolvedConfig, schema, columns, validators);
1056
+ this.name = name;
1057
+ const tables = getMooseInternal().tables;
1058
+ const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
1059
+ if (!isClientOnlyMode() && tables.has(registryKey)) {
1060
+ throw new Error(
1061
+ `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
1062
+ );
1063
+ }
1064
+ tables.set(registryKey, this);
1065
+ }
1066
+ /**
1067
+ * Generates the versioned table name following Moose's naming convention
1068
+ * Format: {tableName}_{version_with_dots_replaced_by_underscores}
1069
+ */
1070
+ generateTableName() {
1071
+ if (this._cachedTableName) {
1072
+ return this._cachedTableName;
1073
+ }
1074
+ const tableVersion = this.config.version;
1075
+ if (!tableVersion) {
1076
+ this._cachedTableName = this.name;
1077
+ } else {
1078
+ const versionSuffix = tableVersion.replace(/\./g, "_");
1079
+ this._cachedTableName = `${this.name}_${versionSuffix}`;
1080
+ }
1081
+ return this._cachedTableName;
1082
+ }
1083
+ /**
1084
+ * Creates a fast hash of the ClickHouse configuration.
1085
+ * Uses crypto.createHash for better performance than JSON.stringify.
1086
+ *
1087
+ * @private
1088
+ */
1089
+ createConfigHash(clickhouseConfig) {
1090
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1091
+ const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
1092
+ return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1093
+ }
1094
+ /**
1095
+ * Gets or creates a memoized ClickHouse client.
1096
+ * The client is cached and reused across multiple insert calls for better performance.
1097
+ * If the configuration changes, a new client will be created.
1098
+ *
1099
+ * @private
1100
+ */
1101
+ async getMemoizedClient() {
1102
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1103
+ const configRegistry = globalThis._mooseConfigRegistry;
1104
+ const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1105
+ const clickhouseConfig = await configRegistry.getClickHouseConfig();
1106
+ const currentConfigHash = this.createConfigHash(clickhouseConfig);
1107
+ if (this._memoizedClient && this._configHash === currentConfigHash) {
1108
+ return { client: this._memoizedClient, config: clickhouseConfig };
1109
+ }
1110
+ if (this._memoizedClient && this._configHash !== currentConfigHash) {
1111
+ try {
1112
+ await this._memoizedClient.close();
1113
+ } catch (error) {
1301
1114
  }
1302
- /**
1303
- * Assert that a record matches type T, throwing detailed errors if not.
1304
- * Uses typia's assert() function for the most detailed error reporting.
1305
- *
1306
- * @param record The record to assert
1307
- * @returns The validated and typed record
1308
- * @throws Detailed validation error if record doesn't match type T
1309
- */
1310
- assertValidRecord(record) {
1311
- if (this.validators?.assert) {
1312
- return this.validators.assert(record);
1313
- }
1314
- throw new Error("No typia validator found");
1115
+ }
1116
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1117
+ const client = getClickhouseClient2({
1118
+ username: clickhouseConfig.username,
1119
+ password: clickhouseConfig.password,
1120
+ database: effectiveDatabase,
1121
+ useSSL: clickhouseConfig.useSSL ? "true" : "false",
1122
+ host: clickhouseConfig.host,
1123
+ port: clickhouseConfig.port
1124
+ });
1125
+ this._memoizedClient = client;
1126
+ this._configHash = currentConfigHash;
1127
+ return { client, config: clickhouseConfig };
1128
+ }
1129
+ /**
1130
+ * Closes the memoized ClickHouse client if it exists.
1131
+ * This is useful for cleaning up connections when the table instance is no longer needed.
1132
+ * The client will be automatically recreated on the next insert call if needed.
1133
+ */
1134
+ async closeClient() {
1135
+ if (this._memoizedClient) {
1136
+ try {
1137
+ await this._memoizedClient.close();
1138
+ } catch (error) {
1139
+ } finally {
1140
+ this._memoizedClient = void 0;
1141
+ this._configHash = void 0;
1315
1142
  }
1316
- /**
1317
- * Validates an array of records with comprehensive error reporting.
1318
- * Uses the most appropriate validation method available (typia or basic).
1319
- *
1320
- * @param data Array of records to validate
1321
- * @returns Detailed validation results
1322
- */
1323
- async validateRecords(data) {
1324
- const valid = [];
1325
- const invalid = [];
1326
- valid.length = 0;
1327
- invalid.length = 0;
1328
- const dataLength = data.length;
1329
- for (let i = 0; i < dataLength; i++) {
1330
- const record = data[i];
1331
- try {
1332
- if (this.isValidRecord(record)) {
1333
- valid.push(this.mapToClickhouseRecord(record));
1334
- } else {
1335
- const result = this.validateRecord(record);
1336
- if (result.success) {
1337
- valid.push(this.mapToClickhouseRecord(record));
1338
- } else {
1339
- invalid.push({
1340
- record,
1341
- error: result.errors?.join(", ") || "Validation failed",
1342
- index: i,
1343
- path: "root"
1344
- });
1345
- }
1346
- }
1347
- } catch (error) {
1143
+ }
1144
+ }
1145
+ /**
1146
+ * Validates a single record using typia's comprehensive type checking.
1147
+ * This provides the most accurate validation as it uses the exact TypeScript type information.
1148
+ *
1149
+ * @param record The record to validate
1150
+ * @returns Validation result with detailed error information
1151
+ */
1152
+ validateRecord(record) {
1153
+ if (this.validators?.validate) {
1154
+ try {
1155
+ const result = this.validators.validate(record);
1156
+ return {
1157
+ success: result.success,
1158
+ data: result.data,
1159
+ errors: result.errors?.map(
1160
+ (err) => typeof err === "string" ? err : JSON.stringify(err)
1161
+ )
1162
+ };
1163
+ } catch (error) {
1164
+ return {
1165
+ success: false,
1166
+ errors: [error instanceof Error ? error.message : String(error)]
1167
+ };
1168
+ }
1169
+ }
1170
+ throw new Error("No typia validator found");
1171
+ }
1172
+ /**
1173
+ * Type guard function using typia's is() function.
1174
+ * Provides compile-time type narrowing for TypeScript.
1175
+ *
1176
+ * @param record The record to check
1177
+ * @returns True if record matches type T, with type narrowing
1178
+ */
1179
+ isValidRecord(record) {
1180
+ if (this.validators?.is) {
1181
+ return this.validators.is(record);
1182
+ }
1183
+ throw new Error("No typia validator found");
1184
+ }
1185
+ /**
1186
+ * Assert that a record matches type T, throwing detailed errors if not.
1187
+ * Uses typia's assert() function for the most detailed error reporting.
1188
+ *
1189
+ * @param record The record to assert
1190
+ * @returns The validated and typed record
1191
+ * @throws Detailed validation error if record doesn't match type T
1192
+ */
1193
+ assertValidRecord(record) {
1194
+ if (this.validators?.assert) {
1195
+ return this.validators.assert(record);
1196
+ }
1197
+ throw new Error("No typia validator found");
1198
+ }
1199
+ /**
1200
+ * Validates an array of records with comprehensive error reporting.
1201
+ * Uses the most appropriate validation method available (typia or basic).
1202
+ *
1203
+ * @param data Array of records to validate
1204
+ * @returns Detailed validation results
1205
+ */
1206
+ async validateRecords(data) {
1207
+ const valid = [];
1208
+ const invalid = [];
1209
+ valid.length = 0;
1210
+ invalid.length = 0;
1211
+ const dataLength = data.length;
1212
+ for (let i = 0; i < dataLength; i++) {
1213
+ const record = data[i];
1214
+ try {
1215
+ if (this.isValidRecord(record)) {
1216
+ valid.push(this.mapToClickhouseRecord(record));
1217
+ } else {
1218
+ const result = this.validateRecord(record);
1219
+ if (result.success) {
1220
+ valid.push(this.mapToClickhouseRecord(record));
1221
+ } else {
1348
1222
  invalid.push({
1349
1223
  record,
1350
- error: error instanceof Error ? error.message : String(error),
1224
+ error: result.errors?.join(", ") || "Validation failed",
1351
1225
  index: i,
1352
1226
  path: "root"
1353
1227
  });
1354
1228
  }
1355
1229
  }
1356
- return {
1357
- valid,
1358
- invalid,
1359
- total: dataLength
1360
- };
1230
+ } catch (error) {
1231
+ invalid.push({
1232
+ record,
1233
+ error: error instanceof Error ? error.message : String(error),
1234
+ index: i,
1235
+ path: "root"
1236
+ });
1361
1237
  }
1362
- /**
1363
- * Optimized batch retry that minimizes individual insert operations.
1364
- * Groups records into smaller batches to reduce round trips while still isolating failures.
1365
- *
1366
- * @private
1367
- */
1368
- async retryIndividualRecords(client, tableName, records) {
1369
- const successful = [];
1370
- const failed = [];
1371
- const RETRY_BATCH_SIZE = 10;
1372
- const totalRecords = records.length;
1373
- for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1374
- const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1375
- const batch = records.slice(i, batchEnd);
1238
+ }
1239
+ return {
1240
+ valid,
1241
+ invalid,
1242
+ total: dataLength
1243
+ };
1244
+ }
1245
+ /**
1246
+ * Optimized batch retry that minimizes individual insert operations.
1247
+ * Groups records into smaller batches to reduce round trips while still isolating failures.
1248
+ *
1249
+ * @private
1250
+ */
1251
+ async retryIndividualRecords(client, tableName, records) {
1252
+ const successful = [];
1253
+ const failed = [];
1254
+ const RETRY_BATCH_SIZE = 10;
1255
+ const totalRecords = records.length;
1256
+ for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1257
+ const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1258
+ const batch = records.slice(i, batchEnd);
1259
+ try {
1260
+ await client.insert({
1261
+ table: quoteIdentifier(tableName),
1262
+ values: batch,
1263
+ format: "JSONEachRow",
1264
+ clickhouse_settings: {
1265
+ date_time_input_format: "best_effort",
1266
+ // Add performance settings for retries
1267
+ max_insert_block_size: RETRY_BATCH_SIZE,
1268
+ max_block_size: RETRY_BATCH_SIZE
1269
+ }
1270
+ });
1271
+ successful.push(...batch);
1272
+ } catch (batchError) {
1273
+ for (let j = 0; j < batch.length; j++) {
1274
+ const record = batch[j];
1376
1275
  try {
1377
1276
  await client.insert({
1378
1277
  table: quoteIdentifier(tableName),
1379
- values: batch,
1278
+ values: [record],
1380
1279
  format: "JSONEachRow",
1381
1280
  clickhouse_settings: {
1382
- date_time_input_format: "best_effort",
1383
- // Add performance settings for retries
1384
- max_insert_block_size: RETRY_BATCH_SIZE,
1385
- max_block_size: RETRY_BATCH_SIZE
1281
+ date_time_input_format: "best_effort"
1386
1282
  }
1387
1283
  });
1388
- successful.push(...batch);
1389
- } catch (batchError) {
1390
- for (let j = 0; j < batch.length; j++) {
1391
- const record = batch[j];
1392
- try {
1393
- await client.insert({
1394
- table: quoteIdentifier(tableName),
1395
- values: [record],
1396
- format: "JSONEachRow",
1397
- clickhouse_settings: {
1398
- date_time_input_format: "best_effort"
1399
- }
1400
- });
1401
- successful.push(record);
1402
- } catch (error) {
1403
- failed.push({
1404
- record,
1405
- error: error instanceof Error ? error.message : String(error),
1406
- index: i + j
1407
- });
1408
- }
1409
- }
1410
- }
1411
- }
1412
- return { successful, failed };
1413
- }
1414
- /**
1415
- * Validates input parameters and strategy compatibility
1416
- * @private
1417
- */
1418
- validateInsertParameters(data, options) {
1419
- const isStream = data instanceof import_node_stream.Readable;
1420
- const strategy = options?.strategy || "fail-fast";
1421
- const shouldValidate = options?.validate !== false;
1422
- if (isStream && strategy === "isolate") {
1423
- throw new Error(
1424
- "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1425
- );
1426
- }
1427
- if (isStream && shouldValidate) {
1428
- console.warn(
1429
- "Validation is not supported with stream input. Validation will be skipped."
1430
- );
1431
- }
1432
- return { isStream, strategy, shouldValidate };
1433
- }
1434
- /**
1435
- * Handles early return cases for empty data
1436
- * @private
1437
- */
1438
- handleEmptyData(data, isStream) {
1439
- if (isStream && !data) {
1440
- return {
1441
- successful: 0,
1442
- failed: 0,
1443
- total: 0
1444
- };
1445
- }
1446
- if (!isStream && (!data || data.length === 0)) {
1447
- return {
1448
- successful: 0,
1449
- failed: 0,
1450
- total: 0
1451
- };
1452
- }
1453
- return null;
1454
- }
1455
- /**
1456
- * Performs pre-insertion validation for array data
1457
- * @private
1458
- */
1459
- async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1460
- if (!shouldValidate) {
1461
- return { validatedData: data, validationErrors: [] };
1462
- }
1463
- try {
1464
- const validationResult = await this.validateRecords(data);
1465
- const validatedData = validationResult.valid;
1466
- const validationErrors = validationResult.invalid;
1467
- if (validationErrors.length > 0) {
1468
- this.handleValidationErrors(validationErrors, strategy, data, options);
1469
- switch (strategy) {
1470
- case "discard":
1471
- return { validatedData, validationErrors };
1472
- case "isolate":
1473
- return { validatedData: data, validationErrors };
1474
- default:
1475
- return { validatedData, validationErrors };
1476
- }
1477
- }
1478
- return { validatedData, validationErrors };
1479
- } catch (validationError) {
1480
- if (strategy === "fail-fast") {
1481
- throw validationError;
1482
- }
1483
- console.warn("Validation error:", validationError);
1484
- return { validatedData: data, validationErrors: [] };
1485
- }
1486
- }
1487
- /**
1488
- * Handles validation errors based on the specified strategy
1489
- * @private
1490
- */
1491
- handleValidationErrors(validationErrors, strategy, data, options) {
1492
- switch (strategy) {
1493
- case "fail-fast":
1494
- const firstError = validationErrors[0];
1495
- throw new Error(
1496
- `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1497
- );
1498
- case "discard":
1499
- this.checkValidationThresholds(validationErrors, data.length, options);
1500
- break;
1501
- case "isolate":
1502
- break;
1503
- }
1504
- }
1505
- /**
1506
- * Checks if validation errors exceed configured thresholds
1507
- * @private
1508
- */
1509
- checkValidationThresholds(validationErrors, totalRecords, options) {
1510
- const validationFailedCount = validationErrors.length;
1511
- const validationFailedRatio = validationFailedCount / totalRecords;
1512
- if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1513
- throw new Error(
1514
- `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1515
- );
1516
- }
1517
- if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1518
- throw new Error(
1519
- `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1520
- );
1521
- }
1522
- }
1523
- /**
1524
- * Optimized insert options preparation with better memory management
1525
- * @private
1526
- */
1527
- prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1528
- const insertOptions = {
1529
- table: quoteIdentifier(tableName),
1530
- format: "JSONEachRow",
1531
- clickhouse_settings: {
1532
- date_time_input_format: "best_effort",
1533
- wait_end_of_query: 1,
1534
- // Ensure at least once delivery for INSERT operations
1535
- // Performance optimizations
1536
- max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1537
- max_block_size: 65536,
1538
- // Use async inserts for better performance with large datasets
1539
- async_insert: validatedData.length > 1e3 ? 1 : 0,
1540
- wait_for_async_insert: 1
1541
- // For at least once delivery
1542
- }
1543
- };
1544
- if (isStream) {
1545
- insertOptions.values = data;
1546
- } else {
1547
- insertOptions.values = validatedData;
1548
- }
1549
- if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1550
- if (options.allowErrors !== void 0) {
1551
- insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1552
- }
1553
- if (options.allowErrorsRatio !== void 0) {
1554
- insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1284
+ successful.push(record);
1285
+ } catch (error) {
1286
+ failed.push({
1287
+ record,
1288
+ error: error instanceof Error ? error.message : String(error),
1289
+ index: i + j
1290
+ });
1555
1291
  }
1556
1292
  }
1557
- return insertOptions;
1558
- }
1559
- /**
1560
- * Creates success result for completed insertions
1561
- * @private
1562
- */
1563
- createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1564
- if (isStream) {
1565
- return {
1566
- successful: -1,
1567
- // -1 indicates stream mode where count is unknown
1568
- failed: 0,
1569
- total: -1
1570
- };
1571
- }
1572
- const insertedCount = validatedData.length;
1573
- const totalProcessed = shouldValidate ? data.length : insertedCount;
1574
- const result = {
1575
- successful: insertedCount,
1576
- failed: shouldValidate ? validationErrors.length : 0,
1577
- total: totalProcessed
1578
- };
1579
- if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1580
- result.failedRecords = validationErrors.map((ve) => ({
1581
- record: ve.record,
1582
- error: `Validation error: ${ve.error}`,
1583
- index: ve.index
1584
- }));
1585
- }
1586
- return result;
1587
1293
  }
1588
- /**
1589
- * Handles insertion errors based on the specified strategy
1590
- * @private
1591
- */
1592
- async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1294
+ }
1295
+ return { successful, failed };
1296
+ }
1297
+ /**
1298
+ * Validates input parameters and strategy compatibility
1299
+ * @private
1300
+ */
1301
+ validateInsertParameters(data, options) {
1302
+ const isStream = data instanceof import_node_stream.Readable;
1303
+ const strategy = options?.strategy || "fail-fast";
1304
+ const shouldValidate = options?.validate !== false;
1305
+ if (isStream && strategy === "isolate") {
1306
+ throw new Error(
1307
+ "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1308
+ );
1309
+ }
1310
+ if (isStream && shouldValidate) {
1311
+ console.warn(
1312
+ "Validation is not supported with stream input. Validation will be skipped."
1313
+ );
1314
+ }
1315
+ return { isStream, strategy, shouldValidate };
1316
+ }
1317
+ /**
1318
+ * Handles early return cases for empty data
1319
+ * @private
1320
+ */
1321
+ handleEmptyData(data, isStream) {
1322
+ if (isStream && !data) {
1323
+ return {
1324
+ successful: 0,
1325
+ failed: 0,
1326
+ total: 0
1327
+ };
1328
+ }
1329
+ if (!isStream && (!data || data.length === 0)) {
1330
+ return {
1331
+ successful: 0,
1332
+ failed: 0,
1333
+ total: 0
1334
+ };
1335
+ }
1336
+ return null;
1337
+ }
1338
+ /**
1339
+ * Performs pre-insertion validation for array data
1340
+ * @private
1341
+ */
1342
+ async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1343
+ if (!shouldValidate) {
1344
+ return { validatedData: data, validationErrors: [] };
1345
+ }
1346
+ try {
1347
+ const validationResult = await this.validateRecords(data);
1348
+ const validatedData = validationResult.valid;
1349
+ const validationErrors = validationResult.invalid;
1350
+ if (validationErrors.length > 0) {
1351
+ this.handleValidationErrors(validationErrors, strategy, data, options);
1593
1352
  switch (strategy) {
1594
- case "fail-fast":
1595
- throw new Error(
1596
- `Failed to insert data into table ${tableName}: ${batchError}`
1597
- );
1598
- case "discard":
1599
- throw new Error(
1600
- `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1601
- );
1602
- case "isolate":
1603
- return await this.handleIsolateStrategy(
1604
- batchError,
1605
- tableName,
1606
- data,
1607
- validatedData,
1608
- validationErrors,
1609
- isStream,
1610
- shouldValidate,
1611
- options
1612
- );
1613
- default:
1614
- throw new Error(`Unknown error strategy: ${strategy}`);
1615
- }
1616
- }
1617
- /**
1618
- * Handles the isolate strategy for insertion errors
1619
- * @private
1620
- */
1621
- async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1622
- if (isStream) {
1623
- throw new Error(
1624
- `Isolate strategy is not supported with stream input: ${batchError}`
1625
- );
1626
- }
1627
- try {
1628
- const { client } = await this.getMemoizedClient();
1629
- const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1630
- const retryData = skipValidationOnRetry ? data : validatedData;
1631
- const { successful, failed } = await this.retryIndividualRecords(
1632
- client,
1633
- tableName,
1634
- retryData
1635
- );
1636
- const allFailedRecords = [
1637
- // Validation errors (if any and not skipping validation on retry)
1638
- ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1639
- record: ve.record,
1640
- error: `Validation error: ${ve.error}`,
1641
- index: ve.index
1642
- })) : [],
1643
- // Insertion errors
1644
- ...failed
1645
- ];
1646
- this.checkInsertionThresholds(
1647
- allFailedRecords,
1648
- data.length,
1649
- options
1650
- );
1651
- return {
1652
- successful: successful.length,
1653
- failed: allFailedRecords.length,
1654
- total: data.length,
1655
- failedRecords: allFailedRecords
1656
- };
1657
- } catch (isolationError) {
1658
- throw new Error(
1659
- `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1660
- );
1661
- }
1662
- }
1663
- /**
1664
- * Checks if insertion errors exceed configured thresholds
1665
- * @private
1666
- */
1667
- checkInsertionThresholds(failedRecords, totalRecords, options) {
1668
- const totalFailed = failedRecords.length;
1669
- const failedRatio = totalFailed / totalRecords;
1670
- if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1671
- throw new Error(
1672
- `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1673
- );
1674
- }
1675
- if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1676
- throw new Error(
1677
- `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1678
- );
1679
- }
1680
- }
1681
- /**
1682
- * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1683
- *
1684
- * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1685
- * - For every Nested struct (not array), it recurses into the struct.
1686
- * - This ensures compatibility with kafka_clickhouse_sync
1687
- *
1688
- * @param record The input record to transform (may be deeply nested)
1689
- * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1690
- * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1691
- */
1692
- mapToClickhouseRecord(record, columns = this.columnArray) {
1693
- const result = { ...record };
1694
- for (const col of columns) {
1695
- const value = record[col.name];
1696
- const dt = col.data_type;
1697
- if (isArrayNestedType(dt)) {
1698
- if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1699
- result[col.name] = value.map((item) => [
1700
- this.mapToClickhouseRecord(item, dt.elementType.columns)
1701
- ]);
1702
- }
1703
- } else if (isNestedType(dt)) {
1704
- if (value && typeof value === "object") {
1705
- result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1706
- }
1707
- }
1353
+ case "discard":
1354
+ return { validatedData, validationErrors };
1355
+ case "isolate":
1356
+ return { validatedData: data, validationErrors };
1357
+ default:
1358
+ return { validatedData, validationErrors };
1708
1359
  }
1709
- return result;
1710
1360
  }
1711
- /**
1712
- * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1713
- * This method establishes a direct connection to ClickHouse using the project configuration
1714
- * and inserts the provided data into the versioned table.
1715
- *
1716
- * PERFORMANCE OPTIMIZATIONS:
1717
- * - Memoized client connections with fast config hashing
1718
- * - Single-pass validation with pre-allocated arrays
1719
- * - Batch-optimized retry strategy (batches of 10, then individual)
1720
- * - Optimized ClickHouse settings for large datasets
1721
- * - Reduced memory allocations and object creation
1722
- *
1723
- * Uses advanced typia validation when available for comprehensive type checking,
1724
- * with fallback to basic validation for compatibility.
1725
- *
1726
- * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1727
- * If the configuration changes, a new client will be automatically created.
1728
- *
1729
- * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1730
- * @param options Optional configuration for error handling, validation, and insertion behavior
1731
- * @returns Promise resolving to detailed insertion results
1732
- * @throws {ConfigError} When configuration cannot be read or parsed
1733
- * @throws {ClickHouseError} When insertion fails based on the error strategy
1734
- * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1735
- *
1736
- * @example
1737
- * ```typescript
1738
- * // Create an OlapTable instance (typia validators auto-injected)
1739
- * const userTable = new OlapTable<User>('users');
1740
- *
1741
- * // Insert with comprehensive typia validation
1742
- * const result1 = await userTable.insert([
1743
- * { id: 1, name: 'John', email: 'john@example.com' },
1744
- * { id: 2, name: 'Jane', email: 'jane@example.com' }
1745
- * ]);
1746
- *
1747
- * // Insert data with stream input (validation not available for streams)
1748
- * const dataStream = new Readable({
1749
- * objectMode: true,
1750
- * read() { // Stream implementation }
1751
- * });
1752
- * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1753
- *
1754
- * // Insert with validation disabled for performance
1755
- * const result3 = await userTable.insert(data, { validate: false });
1756
- *
1757
- * // Insert with error handling strategies
1758
- * const result4 = await userTable.insert(mixedData, {
1759
- * strategy: 'isolate',
1760
- * allowErrorsRatio: 0.1,
1761
- * validate: true // Use typia validation (default)
1762
- * });
1763
- *
1764
- * // Optional: Clean up connection when completely done
1765
- * await userTable.closeClient();
1766
- * ```
1767
- */
1768
- async insert(data, options) {
1769
- const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1770
- const emptyResult = this.handleEmptyData(data, isStream);
1771
- if (emptyResult) {
1772
- return emptyResult;
1773
- }
1774
- let validatedData = [];
1775
- let validationErrors = [];
1776
- if (!isStream && shouldValidate) {
1777
- const validationResult = await this.performPreInsertionValidation(
1778
- data,
1779
- shouldValidate,
1780
- strategy,
1781
- options
1782
- );
1783
- validatedData = validationResult.validatedData;
1784
- validationErrors = validationResult.validationErrors;
1785
- } else {
1786
- validatedData = isStream ? [] : data;
1787
- }
1788
- const { client } = await this.getMemoizedClient();
1789
- const tableName = this.generateTableName();
1790
- try {
1791
- const insertOptions = this.prepareInsertOptions(
1792
- tableName,
1793
- data,
1794
- validatedData,
1795
- isStream,
1796
- strategy,
1797
- options
1798
- );
1799
- await client.insert(insertOptions);
1800
- return this.createSuccessResult(
1801
- data,
1802
- validatedData,
1803
- validationErrors,
1804
- isStream,
1805
- shouldValidate,
1806
- strategy
1807
- );
1808
- } catch (batchError) {
1809
- return await this.handleInsertionError(
1810
- batchError,
1811
- strategy,
1812
- tableName,
1813
- data,
1814
- validatedData,
1815
- validationErrors,
1816
- isStream,
1817
- shouldValidate,
1818
- options
1819
- );
1820
- }
1361
+ return { validatedData, validationErrors };
1362
+ } catch (validationError) {
1363
+ if (strategy === "fail-fast") {
1364
+ throw validationError;
1821
1365
  }
1822
- // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1823
- // were removed in ENG-856. Use direct configuration instead, e.g.:
1824
- // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1825
- };
1366
+ console.warn("Validation error:", validationError);
1367
+ return { validatedData: data, validationErrors: [] };
1368
+ }
1826
1369
  }
1827
- });
1828
-
1829
- // src/dmv2/sdk/stream.ts
1830
- function attachTypeGuard(dl, typeGuard) {
1831
- dl.asTyped = () => typeGuard(dl.originalRecord);
1832
- }
1833
- var import_node_crypto3, RoutedMessage, Stream, DeadLetterQueue;
1834
- var init_stream = __esm({
1835
- "src/dmv2/sdk/stream.ts"() {
1836
- "use strict";
1837
- init_typedBase();
1838
- init_internal();
1839
- import_node_crypto3 = require("crypto");
1840
- init_stackTrace();
1841
- RoutedMessage = class {
1842
- /** The destination stream for the message */
1843
- destination;
1844
- /** The message value(s) to send */
1845
- values;
1846
- /**
1847
- * Creates a new routed message.
1848
- *
1849
- * @param destination The target stream
1850
- * @param values The message(s) to route
1851
- */
1852
- constructor(destination, values) {
1853
- this.destination = destination;
1854
- this.values = values;
1370
+ /**
1371
+ * Handles validation errors based on the specified strategy
1372
+ * @private
1373
+ */
1374
+ handleValidationErrors(validationErrors, strategy, data, options) {
1375
+ switch (strategy) {
1376
+ case "fail-fast":
1377
+ const firstError = validationErrors[0];
1378
+ throw new Error(
1379
+ `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1380
+ );
1381
+ case "discard":
1382
+ this.checkValidationThresholds(validationErrors, data.length, options);
1383
+ break;
1384
+ case "isolate":
1385
+ break;
1386
+ }
1387
+ }
1388
+ /**
1389
+ * Checks if validation errors exceed configured thresholds
1390
+ * @private
1391
+ */
1392
+ checkValidationThresholds(validationErrors, totalRecords, options) {
1393
+ const validationFailedCount = validationErrors.length;
1394
+ const validationFailedRatio = validationFailedCount / totalRecords;
1395
+ if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1396
+ throw new Error(
1397
+ `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1398
+ );
1399
+ }
1400
+ if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1401
+ throw new Error(
1402
+ `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1403
+ );
1404
+ }
1405
+ }
1406
+ /**
1407
+ * Optimized insert options preparation with better memory management
1408
+ * @private
1409
+ */
1410
+ prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1411
+ const insertOptions = {
1412
+ table: quoteIdentifier(tableName),
1413
+ format: "JSONEachRow",
1414
+ clickhouse_settings: {
1415
+ date_time_input_format: "best_effort",
1416
+ wait_end_of_query: 1,
1417
+ // Ensure at least once delivery for INSERT operations
1418
+ // Performance optimizations
1419
+ max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1420
+ max_block_size: 65536,
1421
+ // Use async inserts for better performance with large datasets
1422
+ async_insert: validatedData.length > 1e3 ? 1 : 0,
1423
+ wait_for_async_insert: 1
1424
+ // For at least once delivery
1855
1425
  }
1856
1426
  };
1857
- Stream = class extends TypedBase {
1858
- defaultDeadLetterQueue;
1859
- /** @internal Memoized KafkaJS producer for reusing connections across sends */
1860
- _memoizedProducer;
1861
- /** @internal Hash of the configuration used to create the memoized Kafka producer */
1862
- _kafkaConfigHash;
1863
- constructor(name, config, schema, columns, validators, allowExtraFields) {
1864
- super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1865
- const streams = getMooseInternal().streams;
1866
- if (streams.has(name)) {
1867
- throw new Error(`Stream with name ${name} already exists`);
1868
- }
1869
- streams.set(name, this);
1870
- this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1871
- }
1872
- /**
1873
- * Internal map storing transformation configurations.
1874
- * Maps destination stream names to arrays of transformation functions and their configs.
1875
- *
1876
- * @internal
1877
- */
1878
- _transformations = /* @__PURE__ */ new Map();
1879
- /**
1880
- * Internal function for multi-stream transformations.
1881
- * Allows a single transformation to route messages to multiple destinations.
1882
- *
1883
- * @internal
1884
- */
1885
- _multipleTransformations;
1886
- /**
1887
- * Internal array storing consumer configurations.
1888
- *
1889
- * @internal
1890
- */
1891
- _consumers = new Array();
1892
- /**
1893
- * Builds the full Kafka topic name including optional namespace and version suffix.
1894
- * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1895
- */
1896
- buildFullTopicName(namespace) {
1897
- const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1898
- const base = `${this.name}${versionSuffix}`;
1899
- return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1427
+ if (isStream) {
1428
+ insertOptions.values = data;
1429
+ } else {
1430
+ insertOptions.values = validatedData;
1431
+ }
1432
+ if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1433
+ if (options.allowErrors !== void 0) {
1434
+ insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1900
1435
  }
1901
- /**
1902
- * Creates a fast hash string from relevant Kafka configuration fields.
1903
- */
1904
- createConfigHash(kafkaConfig) {
1905
- const configString = [
1906
- kafkaConfig.broker,
1907
- kafkaConfig.messageTimeoutMs,
1908
- kafkaConfig.saslUsername,
1909
- kafkaConfig.saslPassword,
1910
- kafkaConfig.saslMechanism,
1911
- kafkaConfig.securityProtocol,
1912
- kafkaConfig.namespace
1913
- ].join(":");
1914
- return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1436
+ if (options.allowErrorsRatio !== void 0) {
1437
+ insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1915
1438
  }
1916
- /**
1917
- * Gets or creates a memoized KafkaJS producer using runtime configuration.
1918
- */
1919
- async getMemoizedProducer() {
1920
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1921
- const configRegistry = globalThis._mooseConfigRegistry;
1922
- const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1923
- const kafkaConfig = await configRegistry.getKafkaConfig();
1924
- const currentHash = this.createConfigHash(kafkaConfig);
1925
- if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1926
- return { producer: this._memoizedProducer, kafkaConfig };
1927
- }
1928
- if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1929
- try {
1930
- await this._memoizedProducer.disconnect();
1931
- } catch {
1932
- }
1933
- this._memoizedProducer = void 0;
1934
- }
1935
- const clientId = `moose-sdk-stream-${this.name}`;
1936
- const logger = {
1937
- logPrefix: clientId,
1938
- log: (message) => {
1939
- console.log(`${clientId}: ${message}`);
1940
- },
1941
- error: (message) => {
1942
- console.error(`${clientId}: ${message}`);
1943
- },
1944
- warn: (message) => {
1945
- console.warn(`${clientId}: ${message}`);
1946
- }
1947
- };
1948
- const producer = await getKafkaProducer2(
1949
- {
1950
- clientId,
1951
- broker: kafkaConfig.broker,
1952
- securityProtocol: kafkaConfig.securityProtocol,
1953
- saslUsername: kafkaConfig.saslUsername,
1954
- saslPassword: kafkaConfig.saslPassword,
1955
- saslMechanism: kafkaConfig.saslMechanism
1956
- },
1957
- logger
1439
+ }
1440
+ return insertOptions;
1441
+ }
1442
+ /**
1443
+ * Creates success result for completed insertions
1444
+ * @private
1445
+ */
1446
+ createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1447
+ if (isStream) {
1448
+ return {
1449
+ successful: -1,
1450
+ // -1 indicates stream mode where count is unknown
1451
+ failed: 0,
1452
+ total: -1
1453
+ };
1454
+ }
1455
+ const insertedCount = validatedData.length;
1456
+ const totalProcessed = shouldValidate ? data.length : insertedCount;
1457
+ const result = {
1458
+ successful: insertedCount,
1459
+ failed: shouldValidate ? validationErrors.length : 0,
1460
+ total: totalProcessed
1461
+ };
1462
+ if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1463
+ result.failedRecords = validationErrors.map((ve) => ({
1464
+ record: ve.record,
1465
+ error: `Validation error: ${ve.error}`,
1466
+ index: ve.index
1467
+ }));
1468
+ }
1469
+ return result;
1470
+ }
1471
+ /**
1472
+ * Handles insertion errors based on the specified strategy
1473
+ * @private
1474
+ */
1475
+ async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1476
+ switch (strategy) {
1477
+ case "fail-fast":
1478
+ throw new Error(
1479
+ `Failed to insert data into table ${tableName}: ${batchError}`
1958
1480
  );
1959
- this._memoizedProducer = producer;
1960
- this._kafkaConfigHash = currentHash;
1961
- return { producer, kafkaConfig };
1962
- }
1963
- /**
1964
- * Closes the memoized Kafka producer if it exists.
1965
- */
1966
- async closeProducer() {
1967
- if (this._memoizedProducer) {
1968
- try {
1969
- await this._memoizedProducer.disconnect();
1970
- } catch {
1971
- } finally {
1972
- this._memoizedProducer = void 0;
1973
- this._kafkaConfigHash = void 0;
1974
- }
1975
- }
1976
- }
1977
- /**
1978
- * Sends one or more records to this stream's Kafka topic.
1979
- * Values are JSON-serialized as message values.
1980
- */
1981
- async send(values) {
1982
- const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1983
- if (flat.length === 0) return;
1984
- const { producer, kafkaConfig } = await this.getMemoizedProducer();
1985
- const topic = this.buildFullTopicName(kafkaConfig.namespace);
1986
- const sr = this.config.schemaConfig;
1987
- if (sr && sr.kind === "JSON") {
1988
- const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1989
- if (!schemaRegistryUrl) {
1990
- throw new Error("Schema Registry URL not configured");
1991
- }
1992
- const {
1993
- default: { SchemaRegistry }
1994
- } = await import("@kafkajs/confluent-schema-registry");
1995
- const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1996
- let schemaId = void 0;
1997
- if ("id" in sr.reference) {
1998
- schemaId = sr.reference.id;
1999
- } else if ("subjectLatest" in sr.reference) {
2000
- schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
2001
- } else if ("subject" in sr.reference) {
2002
- schemaId = await registry.getRegistryId(
2003
- sr.reference.subject,
2004
- sr.reference.version
2005
- );
2006
- }
2007
- if (schemaId === void 0) {
2008
- throw new Error("Malformed schema reference.");
2009
- }
2010
- const encoded = await Promise.all(
2011
- flat.map(
2012
- (v) => registry.encode(schemaId, v)
2013
- )
2014
- );
2015
- await producer.send({
2016
- topic,
2017
- messages: encoded.map((value) => ({ value }))
2018
- });
2019
- return;
2020
- } else if (sr !== void 0) {
2021
- throw new Error("Currently only JSON Schema is supported.");
2022
- }
2023
- await producer.send({
2024
- topic,
2025
- messages: flat.map((v) => ({ value: JSON.stringify(v) }))
2026
- });
2027
- }
2028
- /**
2029
- * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
2030
- * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
2031
- *
2032
- * @template U The data type of the messages in the destination stream.
2033
- * @param destination The destination stream for the transformed messages.
2034
- * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
2035
- * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
2036
- * @param config Optional configuration for this specific transformation step, like a version.
2037
- */
2038
- addTransform(destination, transformation, config) {
2039
- const sourceFile = getSourceFileFromStack(new Error().stack);
2040
- const transformConfig = {
2041
- ...config ?? {},
2042
- sourceFile
2043
- };
2044
- if (transformConfig.deadLetterQueue === void 0) {
2045
- transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
2046
- }
2047
- if (this._transformations.has(destination.name)) {
2048
- const existingTransforms = this._transformations.get(destination.name);
2049
- const hasVersion = existingTransforms.some(
2050
- ([_, __, cfg]) => cfg.version === transformConfig.version
2051
- );
2052
- if (!hasVersion) {
2053
- existingTransforms.push([destination, transformation, transformConfig]);
2054
- }
2055
- } else {
2056
- this._transformations.set(destination.name, [
2057
- [destination, transformation, transformConfig]
1481
+ case "discard":
1482
+ throw new Error(
1483
+ `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1484
+ );
1485
+ case "isolate":
1486
+ return await this.handleIsolateStrategy(
1487
+ batchError,
1488
+ tableName,
1489
+ data,
1490
+ validatedData,
1491
+ validationErrors,
1492
+ isStream,
1493
+ shouldValidate,
1494
+ options
1495
+ );
1496
+ default:
1497
+ throw new Error(`Unknown error strategy: ${strategy}`);
1498
+ }
1499
+ }
1500
+ /**
1501
+ * Handles the isolate strategy for insertion errors
1502
+ * @private
1503
+ */
1504
+ async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1505
+ if (isStream) {
1506
+ throw new Error(
1507
+ `Isolate strategy is not supported with stream input: ${batchError}`
1508
+ );
1509
+ }
1510
+ try {
1511
+ const { client } = await this.getMemoizedClient();
1512
+ const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1513
+ const retryData = skipValidationOnRetry ? data : validatedData;
1514
+ const { successful, failed } = await this.retryIndividualRecords(
1515
+ client,
1516
+ tableName,
1517
+ retryData
1518
+ );
1519
+ const allFailedRecords = [
1520
+ // Validation errors (if any and not skipping validation on retry)
1521
+ ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1522
+ record: ve.record,
1523
+ error: `Validation error: ${ve.error}`,
1524
+ index: ve.index
1525
+ })) : [],
1526
+ // Insertion errors
1527
+ ...failed
1528
+ ];
1529
+ this.checkInsertionThresholds(
1530
+ allFailedRecords,
1531
+ data.length,
1532
+ options
1533
+ );
1534
+ return {
1535
+ successful: successful.length,
1536
+ failed: allFailedRecords.length,
1537
+ total: data.length,
1538
+ failedRecords: allFailedRecords
1539
+ };
1540
+ } catch (isolationError) {
1541
+ throw new Error(
1542
+ `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1543
+ );
1544
+ }
1545
+ }
1546
+ /**
1547
+ * Checks if insertion errors exceed configured thresholds
1548
+ * @private
1549
+ */
1550
+ checkInsertionThresholds(failedRecords, totalRecords, options) {
1551
+ const totalFailed = failedRecords.length;
1552
+ const failedRatio = totalFailed / totalRecords;
1553
+ if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1554
+ throw new Error(
1555
+ `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1556
+ );
1557
+ }
1558
+ if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1559
+ throw new Error(
1560
+ `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1561
+ );
1562
+ }
1563
+ }
1564
+ /**
1565
+ * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1566
+ *
1567
+ * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1568
+ * - For every Nested struct (not array), it recurses into the struct.
1569
+ * - This ensures compatibility with kafka_clickhouse_sync
1570
+ *
1571
+ * @param record The input record to transform (may be deeply nested)
1572
+ * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1573
+ * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1574
+ */
1575
+ mapToClickhouseRecord(record, columns = this.columnArray) {
1576
+ const result = { ...record };
1577
+ for (const col of columns) {
1578
+ const value = record[col.name];
1579
+ const dt = col.data_type;
1580
+ if (isArrayNestedType(dt)) {
1581
+ if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1582
+ result[col.name] = value.map((item) => [
1583
+ this.mapToClickhouseRecord(item, dt.elementType.columns)
2058
1584
  ]);
2059
1585
  }
2060
- }
2061
- /**
2062
- * Adds a consumer function that processes messages from this stream.
2063
- * Multiple consumers can be added if they have distinct `version` identifiers in their config.
2064
- *
2065
- * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
2066
- * @param config Optional configuration for this specific consumer, like a version.
2067
- */
2068
- addConsumer(consumer, config) {
2069
- const sourceFile = getSourceFileFromStack(new Error().stack);
2070
- const consumerConfig = {
2071
- ...config ?? {},
2072
- sourceFile
2073
- };
2074
- if (consumerConfig.deadLetterQueue === void 0) {
2075
- consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
2076
- }
2077
- const hasVersion = this._consumers.some(
2078
- (existing) => existing.config.version === consumerConfig.version
2079
- );
2080
- if (!hasVersion) {
2081
- this._consumers.push({ consumer, config: consumerConfig });
1586
+ } else if (isNestedType(dt)) {
1587
+ if (value && typeof value === "object") {
1588
+ result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
2082
1589
  }
2083
1590
  }
2084
- /**
2085
- * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
2086
- * @param values The value or values to send to this stream.
2087
- * @returns A `RoutedMessage` object associating the values with this stream.
2088
- *
2089
- * @example
2090
- * ```typescript
2091
- * sourceStream.addMultiTransform((record) => [
2092
- * destinationStream1.routed(transformedRecord1),
2093
- * destinationStream2.routed([record2a, record2b])
2094
- * ]);
2095
- * ```
2096
- */
2097
- routed = (values) => new RoutedMessage(this, values);
2098
- /**
2099
- * Adds a single transformation function that can route messages to multiple destination streams.
2100
- * This is an alternative to adding multiple individual `addTransform` calls.
2101
- * Only one multi-transform function can be added per stream.
2102
- *
2103
- * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
2104
- * each specifying a destination stream and the message(s) to send to it.
2105
- */
2106
- addMultiTransform(transformation) {
2107
- this._multipleTransformations = transformation;
1591
+ }
1592
+ return result;
1593
+ }
1594
+ /**
1595
+ * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1596
+ * This method establishes a direct connection to ClickHouse using the project configuration
1597
+ * and inserts the provided data into the versioned table.
1598
+ *
1599
+ * PERFORMANCE OPTIMIZATIONS:
1600
+ * - Memoized client connections with fast config hashing
1601
+ * - Single-pass validation with pre-allocated arrays
1602
+ * - Batch-optimized retry strategy (batches of 10, then individual)
1603
+ * - Optimized ClickHouse settings for large datasets
1604
+ * - Reduced memory allocations and object creation
1605
+ *
1606
+ * Uses advanced typia validation when available for comprehensive type checking,
1607
+ * with fallback to basic validation for compatibility.
1608
+ *
1609
+ * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1610
+ * If the configuration changes, a new client will be automatically created.
1611
+ *
1612
+ * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1613
+ * @param options Optional configuration for error handling, validation, and insertion behavior
1614
+ * @returns Promise resolving to detailed insertion results
1615
+ * @throws {ConfigError} When configuration cannot be read or parsed
1616
+ * @throws {ClickHouseError} When insertion fails based on the error strategy
1617
+ * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1618
+ *
1619
+ * @example
1620
+ * ```typescript
1621
+ * // Create an OlapTable instance (typia validators auto-injected)
1622
+ * const userTable = new OlapTable<User>('users');
1623
+ *
1624
+ * // Insert with comprehensive typia validation
1625
+ * const result1 = await userTable.insert([
1626
+ * { id: 1, name: 'John', email: 'john@example.com' },
1627
+ * { id: 2, name: 'Jane', email: 'jane@example.com' }
1628
+ * ]);
1629
+ *
1630
+ * // Insert data with stream input (validation not available for streams)
1631
+ * const dataStream = new Readable({
1632
+ * objectMode: true,
1633
+ * read() { // Stream implementation }
1634
+ * });
1635
+ * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1636
+ *
1637
+ * // Insert with validation disabled for performance
1638
+ * const result3 = await userTable.insert(data, { validate: false });
1639
+ *
1640
+ * // Insert with error handling strategies
1641
+ * const result4 = await userTable.insert(mixedData, {
1642
+ * strategy: 'isolate',
1643
+ * allowErrorsRatio: 0.1,
1644
+ * validate: true // Use typia validation (default)
1645
+ * });
1646
+ *
1647
+ * // Optional: Clean up connection when completely done
1648
+ * await userTable.closeClient();
1649
+ * ```
1650
+ */
1651
+ async insert(data, options) {
1652
+ const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1653
+ const emptyResult = this.handleEmptyData(data, isStream);
1654
+ if (emptyResult) {
1655
+ return emptyResult;
1656
+ }
1657
+ let validatedData = [];
1658
+ let validationErrors = [];
1659
+ if (!isStream && shouldValidate) {
1660
+ const validationResult = await this.performPreInsertionValidation(
1661
+ data,
1662
+ shouldValidate,
1663
+ strategy,
1664
+ options
1665
+ );
1666
+ validatedData = validationResult.validatedData;
1667
+ validationErrors = validationResult.validationErrors;
1668
+ } else {
1669
+ validatedData = isStream ? [] : data;
1670
+ }
1671
+ const { client } = await this.getMemoizedClient();
1672
+ const tableName = this.generateTableName();
1673
+ try {
1674
+ const insertOptions = this.prepareInsertOptions(
1675
+ tableName,
1676
+ data,
1677
+ validatedData,
1678
+ isStream,
1679
+ strategy,
1680
+ options
1681
+ );
1682
+ await client.insert(insertOptions);
1683
+ return this.createSuccessResult(
1684
+ data,
1685
+ validatedData,
1686
+ validationErrors,
1687
+ isStream,
1688
+ shouldValidate,
1689
+ strategy
1690
+ );
1691
+ } catch (batchError) {
1692
+ return await this.handleInsertionError(
1693
+ batchError,
1694
+ strategy,
1695
+ tableName,
1696
+ data,
1697
+ validatedData,
1698
+ validationErrors,
1699
+ isStream,
1700
+ shouldValidate,
1701
+ options
1702
+ );
1703
+ }
1704
+ }
1705
+ // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1706
+ // were removed in ENG-856. Use direct configuration instead, e.g.:
1707
+ // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1708
+ };
1709
+
1710
+ // src/dmv2/sdk/stream.ts
1711
+ var import_node_crypto3 = require("crypto");
1712
+ var RoutedMessage = class {
1713
+ /** The destination stream for the message */
1714
+ destination;
1715
+ /** The message value(s) to send */
1716
+ values;
1717
+ /**
1718
+ * Creates a new routed message.
1719
+ *
1720
+ * @param destination The target stream
1721
+ * @param values The message(s) to route
1722
+ */
1723
+ constructor(destination, values) {
1724
+ this.destination = destination;
1725
+ this.values = values;
1726
+ }
1727
+ };
1728
+ var Stream = class extends TypedBase {
1729
+ defaultDeadLetterQueue;
1730
+ /** @internal Memoized KafkaJS producer for reusing connections across sends */
1731
+ _memoizedProducer;
1732
+ /** @internal Hash of the configuration used to create the memoized Kafka producer */
1733
+ _kafkaConfigHash;
1734
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1735
+ super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1736
+ const streams = getMooseInternal().streams;
1737
+ if (streams.has(name)) {
1738
+ throw new Error(`Stream with name ${name} already exists`);
1739
+ }
1740
+ streams.set(name, this);
1741
+ this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1742
+ }
1743
+ /**
1744
+ * Internal map storing transformation configurations.
1745
+ * Maps destination stream names to arrays of transformation functions and their configs.
1746
+ *
1747
+ * @internal
1748
+ */
1749
+ _transformations = /* @__PURE__ */ new Map();
1750
+ /**
1751
+ * Internal function for multi-stream transformations.
1752
+ * Allows a single transformation to route messages to multiple destinations.
1753
+ *
1754
+ * @internal
1755
+ */
1756
+ _multipleTransformations;
1757
+ /**
1758
+ * Internal array storing consumer configurations.
1759
+ *
1760
+ * @internal
1761
+ */
1762
+ _consumers = new Array();
1763
+ /**
1764
+ * Builds the full Kafka topic name including optional namespace and version suffix.
1765
+ * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1766
+ */
1767
+ buildFullTopicName(namespace) {
1768
+ const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1769
+ const base = `${this.name}${versionSuffix}`;
1770
+ return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1771
+ }
1772
+ /**
1773
+ * Creates a fast hash string from relevant Kafka configuration fields.
1774
+ */
1775
+ createConfigHash(kafkaConfig) {
1776
+ const configString = [
1777
+ kafkaConfig.broker,
1778
+ kafkaConfig.messageTimeoutMs,
1779
+ kafkaConfig.saslUsername,
1780
+ kafkaConfig.saslPassword,
1781
+ kafkaConfig.saslMechanism,
1782
+ kafkaConfig.securityProtocol,
1783
+ kafkaConfig.namespace
1784
+ ].join(":");
1785
+ return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1786
+ }
1787
+ /**
1788
+ * Gets or creates a memoized KafkaJS producer using runtime configuration.
1789
+ */
1790
+ async getMemoizedProducer() {
1791
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1792
+ const configRegistry = globalThis._mooseConfigRegistry;
1793
+ const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1794
+ const kafkaConfig = await configRegistry.getKafkaConfig();
1795
+ const currentHash = this.createConfigHash(kafkaConfig);
1796
+ if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1797
+ return { producer: this._memoizedProducer, kafkaConfig };
1798
+ }
1799
+ if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1800
+ try {
1801
+ await this._memoizedProducer.disconnect();
1802
+ } catch {
1803
+ }
1804
+ this._memoizedProducer = void 0;
1805
+ }
1806
+ const clientId = `moose-sdk-stream-${this.name}`;
1807
+ const logger = {
1808
+ logPrefix: clientId,
1809
+ log: (message) => {
1810
+ console.log(`${clientId}: ${message}`);
1811
+ },
1812
+ error: (message) => {
1813
+ console.error(`${clientId}: ${message}`);
1814
+ },
1815
+ warn: (message) => {
1816
+ console.warn(`${clientId}: ${message}`);
2108
1817
  }
2109
1818
  };
2110
- DeadLetterQueue = class extends Stream {
2111
- constructor(name, config, typeGuard) {
2112
- if (typeGuard === void 0) {
2113
- throw new Error(
2114
- "Supply the type param T so that the schema is inserted by the compiler plugin."
2115
- );
2116
- }
2117
- super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
2118
- this.typeGuard = typeGuard;
2119
- getMooseInternal().streams.set(name, this);
1819
+ const producer = await getKafkaProducer2(
1820
+ {
1821
+ clientId,
1822
+ broker: kafkaConfig.broker,
1823
+ securityProtocol: kafkaConfig.securityProtocol,
1824
+ saslUsername: kafkaConfig.saslUsername,
1825
+ saslPassword: kafkaConfig.saslPassword,
1826
+ saslMechanism: kafkaConfig.saslMechanism
1827
+ },
1828
+ logger
1829
+ );
1830
+ this._memoizedProducer = producer;
1831
+ this._kafkaConfigHash = currentHash;
1832
+ return { producer, kafkaConfig };
1833
+ }
1834
+ /**
1835
+ * Closes the memoized Kafka producer if it exists.
1836
+ */
1837
+ async closeProducer() {
1838
+ if (this._memoizedProducer) {
1839
+ try {
1840
+ await this._memoizedProducer.disconnect();
1841
+ } catch {
1842
+ } finally {
1843
+ this._memoizedProducer = void 0;
1844
+ this._kafkaConfigHash = void 0;
2120
1845
  }
2121
- /**
2122
- * Internal type guard function for validating and casting original records.
2123
- *
2124
- * @internal
2125
- */
2126
- typeGuard;
2127
- /**
2128
- * Adds a transformation step for dead letter records.
2129
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2130
- *
2131
- * @template U The output type for the transformation
2132
- * @param destination The destination stream for transformed messages
2133
- * @param transformation Function to transform dead letter records
2134
- * @param config Optional transformation configuration
2135
- */
2136
- addTransform(destination, transformation, config) {
2137
- const withValidate = (deadLetter) => {
2138
- attachTypeGuard(deadLetter, this.typeGuard);
2139
- return transformation(deadLetter);
2140
- };
2141
- super.addTransform(destination, withValidate, config);
1846
+ }
1847
+ }
1848
+ /**
1849
+ * Sends one or more records to this stream's Kafka topic.
1850
+ * Values are JSON-serialized as message values.
1851
+ */
1852
+ async send(values) {
1853
+ const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1854
+ if (flat.length === 0) return;
1855
+ const { producer, kafkaConfig } = await this.getMemoizedProducer();
1856
+ const topic = this.buildFullTopicName(kafkaConfig.namespace);
1857
+ const sr = this.config.schemaConfig;
1858
+ if (sr && sr.kind === "JSON") {
1859
+ const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1860
+ if (!schemaRegistryUrl) {
1861
+ throw new Error("Schema Registry URL not configured");
1862
+ }
1863
+ const {
1864
+ default: { SchemaRegistry }
1865
+ } = await import("@kafkajs/confluent-schema-registry");
1866
+ const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1867
+ let schemaId = void 0;
1868
+ if ("id" in sr.reference) {
1869
+ schemaId = sr.reference.id;
1870
+ } else if ("subjectLatest" in sr.reference) {
1871
+ schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1872
+ } else if ("subject" in sr.reference) {
1873
+ schemaId = await registry.getRegistryId(
1874
+ sr.reference.subject,
1875
+ sr.reference.version
1876
+ );
2142
1877
  }
2143
- /**
2144
- * Adds a consumer for dead letter records.
2145
- * The consumer function receives a DeadLetter<T> with type recovery capabilities.
2146
- *
2147
- * @param consumer Function to process dead letter records
2148
- * @param config Optional consumer configuration
2149
- */
2150
- addConsumer(consumer, config) {
2151
- const withValidate = (deadLetter) => {
2152
- attachTypeGuard(deadLetter, this.typeGuard);
2153
- return consumer(deadLetter);
2154
- };
2155
- super.addConsumer(withValidate, config);
1878
+ if (schemaId === void 0) {
1879
+ throw new Error("Malformed schema reference.");
2156
1880
  }
2157
- /**
2158
- * Adds a multi-stream transformation for dead letter records.
2159
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2160
- *
2161
- * @param transformation Function to route dead letter records to multiple destinations
2162
- */
2163
- addMultiTransform(transformation) {
2164
- const withValidate = (deadLetter) => {
2165
- attachTypeGuard(deadLetter, this.typeGuard);
2166
- return transformation(deadLetter);
2167
- };
2168
- super.addMultiTransform(withValidate);
1881
+ const encoded = await Promise.all(
1882
+ flat.map(
1883
+ (v) => registry.encode(schemaId, v)
1884
+ )
1885
+ );
1886
+ await producer.send({
1887
+ topic,
1888
+ messages: encoded.map((value) => ({ value }))
1889
+ });
1890
+ return;
1891
+ } else if (sr !== void 0) {
1892
+ throw new Error("Currently only JSON Schema is supported.");
1893
+ }
1894
+ await producer.send({
1895
+ topic,
1896
+ messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1897
+ });
1898
+ }
1899
+ /**
1900
+ * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1901
+ * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1902
+ *
1903
+ * @template U The data type of the messages in the destination stream.
1904
+ * @param destination The destination stream for the transformed messages.
1905
+ * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1906
+ * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1907
+ * @param config Optional configuration for this specific transformation step, like a version.
1908
+ */
1909
+ addTransform(destination, transformation, config) {
1910
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1911
+ const transformConfig = {
1912
+ ...config ?? {},
1913
+ sourceFile
1914
+ };
1915
+ if (transformConfig.deadLetterQueue === void 0) {
1916
+ transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1917
+ }
1918
+ if (this._transformations.has(destination.name)) {
1919
+ const existingTransforms = this._transformations.get(destination.name);
1920
+ const hasVersion = existingTransforms.some(
1921
+ ([_, __, cfg]) => cfg.version === transformConfig.version
1922
+ );
1923
+ if (!hasVersion) {
1924
+ existingTransforms.push([destination, transformation, transformConfig]);
2169
1925
  }
1926
+ } else {
1927
+ this._transformations.set(destination.name, [
1928
+ [destination, transformation, transformConfig]
1929
+ ]);
1930
+ }
1931
+ }
1932
+ /**
1933
+ * Adds a consumer function that processes messages from this stream.
1934
+ * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1935
+ *
1936
+ * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1937
+ * @param config Optional configuration for this specific consumer, like a version.
1938
+ */
1939
+ addConsumer(consumer, config) {
1940
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1941
+ const consumerConfig = {
1942
+ ...config ?? {},
1943
+ sourceFile
2170
1944
  };
1945
+ if (consumerConfig.deadLetterQueue === void 0) {
1946
+ consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1947
+ }
1948
+ const hasVersion = this._consumers.some(
1949
+ (existing) => existing.config.version === consumerConfig.version
1950
+ );
1951
+ if (!hasVersion) {
1952
+ this._consumers.push({ consumer, config: consumerConfig });
1953
+ }
2171
1954
  }
2172
- });
1955
+ /**
1956
+ * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1957
+ * @param values The value or values to send to this stream.
1958
+ * @returns A `RoutedMessage` object associating the values with this stream.
1959
+ *
1960
+ * @example
1961
+ * ```typescript
1962
+ * sourceStream.addMultiTransform((record) => [
1963
+ * destinationStream1.routed(transformedRecord1),
1964
+ * destinationStream2.routed([record2a, record2b])
1965
+ * ]);
1966
+ * ```
1967
+ */
1968
+ routed = (values) => new RoutedMessage(this, values);
1969
+ /**
1970
+ * Adds a single transformation function that can route messages to multiple destination streams.
1971
+ * This is an alternative to adding multiple individual `addTransform` calls.
1972
+ * Only one multi-transform function can be added per stream.
1973
+ *
1974
+ * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1975
+ * each specifying a destination stream and the message(s) to send to it.
1976
+ */
1977
+ addMultiTransform(transformation) {
1978
+ this._multipleTransformations = transformation;
1979
+ }
1980
+ };
1981
+ function attachTypeGuard(dl, typeGuard) {
1982
+ dl.asTyped = () => typeGuard(dl.originalRecord);
1983
+ }
1984
+ var DeadLetterQueue = class extends Stream {
1985
+ constructor(name, config, typeGuard) {
1986
+ if (typeGuard === void 0) {
1987
+ throw new Error(
1988
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
1989
+ );
1990
+ }
1991
+ super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1992
+ this.typeGuard = typeGuard;
1993
+ getMooseInternal().streams.set(name, this);
1994
+ }
1995
+ /**
1996
+ * Internal type guard function for validating and casting original records.
1997
+ *
1998
+ * @internal
1999
+ */
2000
+ typeGuard;
2001
+ /**
2002
+ * Adds a transformation step for dead letter records.
2003
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2004
+ *
2005
+ * @template U The output type for the transformation
2006
+ * @param destination The destination stream for transformed messages
2007
+ * @param transformation Function to transform dead letter records
2008
+ * @param config Optional transformation configuration
2009
+ */
2010
+ addTransform(destination, transformation, config) {
2011
+ const withValidate = (deadLetter) => {
2012
+ attachTypeGuard(deadLetter, this.typeGuard);
2013
+ return transformation(deadLetter);
2014
+ };
2015
+ super.addTransform(destination, withValidate, config);
2016
+ }
2017
+ /**
2018
+ * Adds a consumer for dead letter records.
2019
+ * The consumer function receives a DeadLetter<T> with type recovery capabilities.
2020
+ *
2021
+ * @param consumer Function to process dead letter records
2022
+ * @param config Optional consumer configuration
2023
+ */
2024
+ addConsumer(consumer, config) {
2025
+ const withValidate = (deadLetter) => {
2026
+ attachTypeGuard(deadLetter, this.typeGuard);
2027
+ return consumer(deadLetter);
2028
+ };
2029
+ super.addConsumer(withValidate, config);
2030
+ }
2031
+ /**
2032
+ * Adds a multi-stream transformation for dead letter records.
2033
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
2034
+ *
2035
+ * @param transformation Function to route dead letter records to multiple destinations
2036
+ */
2037
+ addMultiTransform(transformation) {
2038
+ const withValidate = (deadLetter) => {
2039
+ attachTypeGuard(deadLetter, this.typeGuard);
2040
+ return transformation(deadLetter);
2041
+ };
2042
+ super.addMultiTransform(withValidate);
2043
+ }
2044
+ };
2173
2045
 
2174
2046
  // src/dmv2/sdk/workflow.ts
2175
- var Task, Workflow;
2176
- var init_workflow = __esm({
2177
- "src/dmv2/sdk/workflow.ts"() {
2178
- "use strict";
2179
- init_internal();
2180
- Task = class {
2181
- /**
2182
- * Creates a new Task instance.
2183
- *
2184
- * @param name - Unique identifier for the task
2185
- * @param config - Configuration object defining the task behavior
2186
- *
2187
- * @example
2188
- * ```typescript
2189
- * // No input, no output
2190
- * const task1 = new Task<null, void>("task1", {
2191
- * run: async () => {
2192
- * console.log("No input/output");
2193
- * }
2194
- * });
2195
- *
2196
- * // No input, but has output
2197
- * const task2 = new Task<null, OutputType>("task2", {
2198
- * run: async () => {
2199
- * return someOutput;
2200
- * }
2201
- * });
2202
- *
2203
- * // Has input, no output
2204
- * const task3 = new Task<InputType, void>("task3", {
2205
- * run: async (input: InputType) => {
2206
- * // process input but return nothing
2207
- * }
2208
- * });
2209
- *
2210
- * // Has both input and output
2211
- * const task4 = new Task<InputType, OutputType>("task4", {
2212
- * run: async (input: InputType) => {
2213
- * return process(input);
2214
- * }
2215
- * });
2216
- * ```
2217
- */
2218
- constructor(name, config) {
2219
- this.name = name;
2220
- this.config = config;
2047
+ var Task = class {
2048
+ /**
2049
+ * Creates a new Task instance.
2050
+ *
2051
+ * @param name - Unique identifier for the task
2052
+ * @param config - Configuration object defining the task behavior
2053
+ *
2054
+ * @example
2055
+ * ```typescript
2056
+ * // No input, no output
2057
+ * const task1 = new Task<null, void>("task1", {
2058
+ * run: async () => {
2059
+ * console.log("No input/output");
2060
+ * }
2061
+ * });
2062
+ *
2063
+ * // No input, but has output
2064
+ * const task2 = new Task<null, OutputType>("task2", {
2065
+ * run: async () => {
2066
+ * return someOutput;
2067
+ * }
2068
+ * });
2069
+ *
2070
+ * // Has input, no output
2071
+ * const task3 = new Task<InputType, void>("task3", {
2072
+ * run: async (input: InputType) => {
2073
+ * // process input but return nothing
2074
+ * }
2075
+ * });
2076
+ *
2077
+ * // Has both input and output
2078
+ * const task4 = new Task<InputType, OutputType>("task4", {
2079
+ * run: async (input: InputType) => {
2080
+ * return process(input);
2081
+ * }
2082
+ * });
2083
+ * ```
2084
+ */
2085
+ constructor(name, config) {
2086
+ this.name = name;
2087
+ this.config = config;
2088
+ }
2089
+ };
2090
+ var Workflow = class {
2091
+ /**
2092
+ * Creates a new Workflow instance and registers it with the Moose system.
2093
+ *
2094
+ * @param name - Unique identifier for the workflow
2095
+ * @param config - Configuration object defining the workflow behavior and task orchestration
2096
+ * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
2097
+ */
2098
+ constructor(name, config) {
2099
+ this.name = name;
2100
+ this.config = config;
2101
+ const workflows = getMooseInternal().workflows;
2102
+ if (workflows.has(name)) {
2103
+ throw new Error(`Workflow with name ${name} already exists`);
2104
+ }
2105
+ this.validateTaskGraph(config.startingTask, name);
2106
+ workflows.set(name, this);
2107
+ }
2108
+ /**
2109
+ * Validates the task graph to ensure there are no null tasks or infinite loops.
2110
+ *
2111
+ * @private
2112
+ * @param startingTask - The starting task to begin validation from
2113
+ * @param workflowName - The name of the workflow being validated (for error messages)
2114
+ * @throws {Error} When null/undefined tasks are found or infinite loops are detected
2115
+ */
2116
+ validateTaskGraph(startingTask, workflowName) {
2117
+ if (startingTask === null || startingTask === void 0) {
2118
+ throw new Error(
2119
+ `Workflow "${workflowName}" has a null or undefined starting task`
2120
+ );
2121
+ }
2122
+ const visited = /* @__PURE__ */ new Set();
2123
+ const recursionStack = /* @__PURE__ */ new Set();
2124
+ const validateTask = (task, currentPath) => {
2125
+ if (task === null || task === void 0) {
2126
+ const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
2127
+ throw new Error(
2128
+ `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
2129
+ );
2221
2130
  }
2222
- };
2223
- Workflow = class {
2224
- /**
2225
- * Creates a new Workflow instance and registers it with the Moose system.
2226
- *
2227
- * @param name - Unique identifier for the workflow
2228
- * @param config - Configuration object defining the workflow behavior and task orchestration
2229
- * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
2230
- */
2231
- constructor(name, config) {
2232
- this.name = name;
2233
- this.config = config;
2234
- const workflows = getMooseInternal().workflows;
2235
- if (workflows.has(name)) {
2236
- throw new Error(`Workflow with name ${name} already exists`);
2237
- }
2238
- this.validateTaskGraph(config.startingTask, name);
2239
- workflows.set(name, this);
2131
+ const taskName = task.name;
2132
+ if (recursionStack.has(taskName)) {
2133
+ const cycleStartIndex = currentPath.indexOf(taskName);
2134
+ const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
2135
+ throw new Error(
2136
+ `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2137
+ );
2240
2138
  }
2241
- /**
2242
- * Validates the task graph to ensure there are no null tasks or infinite loops.
2243
- *
2244
- * @private
2245
- * @param startingTask - The starting task to begin validation from
2246
- * @param workflowName - The name of the workflow being validated (for error messages)
2247
- * @throws {Error} When null/undefined tasks are found or infinite loops are detected
2248
- */
2249
- validateTaskGraph(startingTask, workflowName) {
2250
- if (startingTask === null || startingTask === void 0) {
2251
- throw new Error(
2252
- `Workflow "${workflowName}" has a null or undefined starting task`
2253
- );
2139
+ if (visited.has(taskName)) {
2140
+ return;
2141
+ }
2142
+ visited.add(taskName);
2143
+ recursionStack.add(taskName);
2144
+ if (task.config.onComplete) {
2145
+ for (const nextTask of task.config.onComplete) {
2146
+ validateTask(nextTask, [...currentPath, taskName]);
2254
2147
  }
2255
- const visited = /* @__PURE__ */ new Set();
2256
- const recursionStack = /* @__PURE__ */ new Set();
2257
- const validateTask = (task, currentPath) => {
2258
- if (task === null || task === void 0) {
2259
- const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
2260
- throw new Error(
2261
- `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
2262
- );
2263
- }
2264
- const taskName = task.name;
2265
- if (recursionStack.has(taskName)) {
2266
- const cycleStartIndex = currentPath.indexOf(taskName);
2267
- const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
2268
- throw new Error(
2269
- `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2270
- );
2271
- }
2272
- if (visited.has(taskName)) {
2273
- return;
2274
- }
2275
- visited.add(taskName);
2276
- recursionStack.add(taskName);
2277
- if (task.config.onComplete) {
2278
- for (const nextTask of task.config.onComplete) {
2279
- validateTask(nextTask, [...currentPath, taskName]);
2280
- }
2281
- }
2282
- recursionStack.delete(taskName);
2283
- };
2284
- validateTask(startingTask, []);
2285
2148
  }
2149
+ recursionStack.delete(taskName);
2286
2150
  };
2151
+ validateTask(startingTask, []);
2287
2152
  }
2288
- });
2153
+ };
2289
2154
 
2290
2155
  // src/dmv2/sdk/ingestApi.ts
2291
- var IngestApi;
2292
- var init_ingestApi = __esm({
2293
- "src/dmv2/sdk/ingestApi.ts"() {
2294
- "use strict";
2295
- init_typedBase();
2296
- init_internal();
2297
- IngestApi = class extends TypedBase {
2298
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2299
- super(name, config, schema, columns, void 0, allowExtraFields);
2300
- const ingestApis = getMooseInternal().ingestApis;
2301
- if (ingestApis.has(name)) {
2302
- throw new Error(`Ingest API with name ${name} already exists`);
2303
- }
2304
- ingestApis.set(name, this);
2305
- }
2306
- };
2156
+ var IngestApi = class extends TypedBase {
2157
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2158
+ super(name, config, schema, columns, void 0, allowExtraFields);
2159
+ const ingestApis = getMooseInternal().ingestApis;
2160
+ if (ingestApis.has(name)) {
2161
+ throw new Error(`Ingest API with name ${name} already exists`);
2162
+ }
2163
+ ingestApis.set(name, this);
2307
2164
  }
2308
- });
2165
+ };
2309
2166
 
2310
2167
  // src/dmv2/sdk/consumptionApi.ts
2311
- var Api, ConsumptionApi;
2312
- var init_consumptionApi = __esm({
2313
- "src/dmv2/sdk/consumptionApi.ts"() {
2314
- "use strict";
2315
- init_typedBase();
2316
- init_internal();
2317
- Api = class extends TypedBase {
2318
- /** @internal The handler function that processes requests and generates responses. */
2319
- _handler;
2320
- /** @internal The JSON schema definition for the response type R. */
2321
- responseSchema;
2322
- constructor(name, handler, config, schema, columns, responseSchema) {
2323
- super(name, config ?? {}, schema, columns);
2324
- this._handler = handler;
2325
- this.responseSchema = responseSchema ?? {
2326
- version: "3.1",
2327
- schemas: [{ type: "array", items: { type: "object" } }],
2328
- components: { schemas: {} }
2329
- };
2330
- const apis = getMooseInternal().apis;
2331
- const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2332
- if (apis.has(key)) {
2333
- throw new Error(
2334
- `Consumption API with name ${name} and version ${config?.version} already exists`
2335
- );
2336
- }
2337
- apis.set(key, this);
2338
- if (config?.path) {
2339
- if (config.version) {
2340
- const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2341
- if (pathEndsWithVersion) {
2342
- if (apis.has(config.path)) {
2343
- const existing = apis.get(config.path);
2344
- throw new Error(
2345
- `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2346
- );
2347
- }
2348
- apis.set(config.path, this);
2349
- } else {
2350
- const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2351
- if (apis.has(versionedPath)) {
2352
- const existing = apis.get(versionedPath);
2353
- throw new Error(
2354
- `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2355
- );
2356
- }
2357
- apis.set(versionedPath, this);
2358
- if (!apis.has(config.path)) {
2359
- apis.set(config.path, this);
2360
- }
2361
- }
2362
- } else {
2363
- if (apis.has(config.path)) {
2364
- const existing = apis.get(config.path);
2365
- throw new Error(
2366
- `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2367
- );
2368
- }
2369
- apis.set(config.path, this);
2370
- }
2371
- }
2372
- }
2373
- /**
2374
- * Retrieves the handler function associated with this Consumption API.
2375
- * @returns The handler function.
2376
- */
2377
- getHandler = () => {
2378
- return this._handler;
2379
- };
2380
- async call(baseUrl, queryParams) {
2381
- let path2;
2382
- if (this.config?.path) {
2383
- if (this.config.version) {
2384
- const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2385
- if (pathEndsWithVersion) {
2386
- path2 = this.config.path;
2387
- } else {
2388
- path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2389
- }
2390
- } else {
2391
- path2 = this.config.path;
2168
+ var Api = class extends TypedBase {
2169
+ /** @internal The handler function that processes requests and generates responses. */
2170
+ _handler;
2171
+ /** @internal The JSON schema definition for the response type R. */
2172
+ responseSchema;
2173
+ constructor(name, handler, config, schema, columns, responseSchema) {
2174
+ super(name, config ?? {}, schema, columns);
2175
+ this._handler = handler;
2176
+ this.responseSchema = responseSchema ?? {
2177
+ version: "3.1",
2178
+ schemas: [{ type: "array", items: { type: "object" } }],
2179
+ components: { schemas: {} }
2180
+ };
2181
+ const apis = getMooseInternal().apis;
2182
+ const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2183
+ if (apis.has(key)) {
2184
+ throw new Error(
2185
+ `Consumption API with name ${name} and version ${config?.version} already exists`
2186
+ );
2187
+ }
2188
+ apis.set(key, this);
2189
+ if (config?.path) {
2190
+ if (config.version) {
2191
+ const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2192
+ if (pathEndsWithVersion) {
2193
+ if (apis.has(config.path)) {
2194
+ const existing = apis.get(config.path);
2195
+ throw new Error(
2196
+ `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2197
+ );
2392
2198
  }
2199
+ apis.set(config.path, this);
2393
2200
  } else {
2394
- path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2395
- }
2396
- const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2397
- const searchParams = url.searchParams;
2398
- for (const [key, value] of Object.entries(queryParams)) {
2399
- if (Array.isArray(value)) {
2400
- for (const item of value) {
2401
- if (item !== null && item !== void 0) {
2402
- searchParams.append(key, String(item));
2403
- }
2404
- }
2405
- } else if (value !== null && value !== void 0) {
2406
- searchParams.append(key, String(value));
2407
- }
2408
- }
2409
- const response = await fetch(url, {
2410
- method: "GET",
2411
- headers: {
2412
- Accept: "application/json"
2201
+ const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2202
+ if (apis.has(versionedPath)) {
2203
+ const existing = apis.get(versionedPath);
2204
+ throw new Error(
2205
+ `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2206
+ );
2413
2207
  }
2414
- });
2415
- if (!response.ok) {
2416
- throw new Error(`HTTP error! status: ${response.status}`);
2417
- }
2418
- const data = await response.json();
2419
- return data;
2420
- }
2421
- };
2422
- ConsumptionApi = Api;
2423
- }
2424
- });
2425
-
2426
- // src/dmv2/sdk/ingestPipeline.ts
2427
- var IngestPipeline;
2428
- var init_ingestPipeline = __esm({
2429
- "src/dmv2/sdk/ingestPipeline.ts"() {
2430
- "use strict";
2431
- init_typedBase();
2432
- init_stream();
2433
- init_olapTable();
2434
- init_ingestApi();
2435
- init_types();
2436
- IngestPipeline = class extends TypedBase {
2437
- /**
2438
- * The OLAP table component of the pipeline, if configured.
2439
- * Provides analytical query capabilities for the ingested data.
2440
- * Only present when `config.table` is not `false`.
2441
- */
2442
- table;
2443
- /**
2444
- * The stream component of the pipeline, if configured.
2445
- * Handles real-time data flow and processing between components.
2446
- * Only present when `config.stream` is not `false`.
2447
- */
2448
- stream;
2449
- /**
2450
- * The ingest API component of the pipeline, if configured.
2451
- * Provides HTTP endpoints for data ingestion.
2452
- * Only present when `config.ingestApi` is not `false`.
2453
- */
2454
- ingestApi;
2455
- /** The dead letter queue of the pipeline, if configured. */
2456
- deadLetterQueue;
2457
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2458
- super(name, config, schema, columns, validators, allowExtraFields);
2459
- if (config.ingest !== void 0) {
2460
- console.warn(
2461
- "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2462
- );
2463
- if (config.ingestApi === void 0) {
2464
- config.ingestApi = config.ingest;
2208
+ apis.set(versionedPath, this);
2209
+ if (!apis.has(config.path)) {
2210
+ apis.set(config.path, this);
2465
2211
  }
2466
2212
  }
2467
- if (config.table) {
2468
- const tableConfig = typeof config.table === "object" ? {
2469
- ...config.table,
2470
- lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2471
- ...config.version && { version: config.version }
2472
- } : {
2473
- lifeCycle: config.lifeCycle,
2474
- engine: "MergeTree" /* MergeTree */,
2475
- ...config.version && { version: config.version }
2476
- };
2477
- this.table = new OlapTable(
2478
- name,
2479
- tableConfig,
2480
- this.schema,
2481
- this.columnArray,
2482
- this.validators
2483
- );
2484
- }
2485
- if (config.deadLetterQueue) {
2486
- const streamConfig = {
2487
- destination: void 0,
2488
- ...typeof config.deadLetterQueue === "object" ? {
2489
- ...config.deadLetterQueue,
2490
- lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2491
- } : { lifeCycle: config.lifeCycle },
2492
- ...config.version && { version: config.version }
2493
- };
2494
- this.deadLetterQueue = new DeadLetterQueue(
2495
- `${name}DeadLetterQueue`,
2496
- streamConfig,
2497
- validators.assert
2213
+ } else {
2214
+ if (apis.has(config.path)) {
2215
+ const existing = apis.get(config.path);
2216
+ throw new Error(
2217
+ `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2498
2218
  );
2499
2219
  }
2500
- if (config.stream) {
2501
- const streamConfig = {
2502
- destination: this.table,
2503
- defaultDeadLetterQueue: this.deadLetterQueue,
2504
- ...typeof config.stream === "object" ? {
2505
- ...config.stream,
2506
- lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2507
- } : { lifeCycle: config.lifeCycle },
2508
- ...config.version && { version: config.version }
2509
- };
2510
- this.stream = new Stream(
2511
- name,
2512
- streamConfig,
2513
- this.schema,
2514
- this.columnArray,
2515
- void 0,
2516
- this.allowExtraFields
2517
- );
2518
- this.stream.pipelineParent = this;
2220
+ apis.set(config.path, this);
2221
+ }
2222
+ }
2223
+ }
2224
+ /**
2225
+ * Retrieves the handler function associated with this Consumption API.
2226
+ * @returns The handler function.
2227
+ */
2228
+ getHandler = () => {
2229
+ return this._handler;
2230
+ };
2231
+ async call(baseUrl, queryParams) {
2232
+ let path2;
2233
+ if (this.config?.path) {
2234
+ if (this.config.version) {
2235
+ const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2236
+ if (pathEndsWithVersion) {
2237
+ path2 = this.config.path;
2238
+ } else {
2239
+ path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2519
2240
  }
2520
- const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2521
- if (effectiveIngestAPI) {
2522
- if (!this.stream) {
2523
- throw new Error("Ingest API needs a stream to write to.");
2524
- }
2525
- const ingestConfig = {
2526
- destination: this.stream,
2527
- deadLetterQueue: this.deadLetterQueue,
2528
- ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2529
- ...config.version && { version: config.version },
2530
- ...config.path && { path: config.path }
2531
- };
2532
- this.ingestApi = new IngestApi(
2533
- name,
2534
- ingestConfig,
2535
- this.schema,
2536
- this.columnArray,
2537
- void 0,
2538
- this.allowExtraFields
2539
- );
2540
- this.ingestApi.pipelineParent = this;
2241
+ } else {
2242
+ path2 = this.config.path;
2243
+ }
2244
+ } else {
2245
+ path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2246
+ }
2247
+ const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2248
+ const searchParams = url.searchParams;
2249
+ for (const [key, value] of Object.entries(queryParams)) {
2250
+ if (Array.isArray(value)) {
2251
+ for (const item of value) {
2252
+ if (item !== null && item !== void 0) {
2253
+ searchParams.append(key, String(item));
2254
+ }
2541
2255
  }
2256
+ } else if (value !== null && value !== void 0) {
2257
+ searchParams.append(key, String(value));
2542
2258
  }
2543
- };
2259
+ }
2260
+ const response = await fetch(url, {
2261
+ method: "GET",
2262
+ headers: {
2263
+ Accept: "application/json"
2264
+ }
2265
+ });
2266
+ if (!response.ok) {
2267
+ throw new Error(`HTTP error! status: ${response.status}`);
2268
+ }
2269
+ const data = await response.json();
2270
+ return data;
2544
2271
  }
2545
- });
2272
+ };
2273
+ var ConsumptionApi = Api;
2546
2274
 
2547
- // src/dmv2/sdk/etlPipeline.ts
2548
- var InternalBatcher, ETLPipeline;
2549
- var init_etlPipeline = __esm({
2550
- "src/dmv2/sdk/etlPipeline.ts"() {
2551
- "use strict";
2552
- init_workflow();
2553
- InternalBatcher = class {
2554
- iterator;
2555
- batchSize;
2556
- constructor(asyncIterable, batchSize = 20) {
2557
- this.iterator = asyncIterable[Symbol.asyncIterator]();
2558
- this.batchSize = batchSize;
2559
- }
2560
- async getNextBatch() {
2561
- const items = [];
2562
- for (let i = 0; i < this.batchSize; i++) {
2563
- const { value, done } = await this.iterator.next();
2564
- if (done) {
2565
- return { items, hasMore: false };
2566
- }
2567
- items.push(value);
2568
- }
2569
- return { items, hasMore: true };
2275
+ // src/dmv2/sdk/ingestPipeline.ts
2276
+ var IngestPipeline = class extends TypedBase {
2277
+ /**
2278
+ * The OLAP table component of the pipeline, if configured.
2279
+ * Provides analytical query capabilities for the ingested data.
2280
+ * Only present when `config.table` is not `false`.
2281
+ */
2282
+ table;
2283
+ /**
2284
+ * The stream component of the pipeline, if configured.
2285
+ * Handles real-time data flow and processing between components.
2286
+ * Only present when `config.stream` is not `false`.
2287
+ */
2288
+ stream;
2289
+ /**
2290
+ * The ingest API component of the pipeline, if configured.
2291
+ * Provides HTTP endpoints for data ingestion.
2292
+ * Only present when `config.ingestApi` is not `false`.
2293
+ */
2294
+ ingestApi;
2295
+ /** The dead letter queue of the pipeline, if configured. */
2296
+ deadLetterQueue;
2297
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2298
+ super(name, config, schema, columns, validators, allowExtraFields);
2299
+ if (config.ingest !== void 0) {
2300
+ console.warn(
2301
+ "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2302
+ );
2303
+ if (config.ingestApi === void 0) {
2304
+ config.ingestApi = config.ingest;
2570
2305
  }
2306
+ }
2307
+ if (config.table) {
2308
+ const tableConfig = typeof config.table === "object" ? {
2309
+ ...config.table,
2310
+ lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2311
+ ...config.version && { version: config.version }
2312
+ } : {
2313
+ lifeCycle: config.lifeCycle,
2314
+ engine: "MergeTree" /* MergeTree */,
2315
+ ...config.version && { version: config.version }
2316
+ };
2317
+ this.table = new OlapTable(
2318
+ name,
2319
+ tableConfig,
2320
+ this.schema,
2321
+ this.columnArray,
2322
+ this.validators
2323
+ );
2324
+ }
2325
+ if (config.deadLetterQueue) {
2326
+ const streamConfig = {
2327
+ destination: void 0,
2328
+ ...typeof config.deadLetterQueue === "object" ? {
2329
+ ...config.deadLetterQueue,
2330
+ lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2331
+ } : { lifeCycle: config.lifeCycle },
2332
+ ...config.version && { version: config.version }
2333
+ };
2334
+ this.deadLetterQueue = new DeadLetterQueue(
2335
+ `${name}DeadLetterQueue`,
2336
+ streamConfig,
2337
+ validators.assert
2338
+ );
2339
+ }
2340
+ if (config.stream) {
2341
+ const streamConfig = {
2342
+ destination: this.table,
2343
+ defaultDeadLetterQueue: this.deadLetterQueue,
2344
+ ...typeof config.stream === "object" ? {
2345
+ ...config.stream,
2346
+ lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2347
+ } : { lifeCycle: config.lifeCycle },
2348
+ ...config.version && { version: config.version }
2349
+ };
2350
+ this.stream = new Stream(
2351
+ name,
2352
+ streamConfig,
2353
+ this.schema,
2354
+ this.columnArray,
2355
+ void 0,
2356
+ this.allowExtraFields
2357
+ );
2358
+ this.stream.pipelineParent = this;
2359
+ }
2360
+ const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2361
+ if (effectiveIngestAPI) {
2362
+ if (!this.stream) {
2363
+ throw new Error("Ingest API needs a stream to write to.");
2364
+ }
2365
+ const ingestConfig = {
2366
+ destination: this.stream,
2367
+ deadLetterQueue: this.deadLetterQueue,
2368
+ ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2369
+ ...config.version && { version: config.version },
2370
+ ...config.path && { path: config.path }
2371
+ };
2372
+ this.ingestApi = new IngestApi(
2373
+ name,
2374
+ ingestConfig,
2375
+ this.schema,
2376
+ this.columnArray,
2377
+ void 0,
2378
+ this.allowExtraFields
2379
+ );
2380
+ this.ingestApi.pipelineParent = this;
2381
+ }
2382
+ }
2383
+ };
2384
+
2385
+ // src/dmv2/sdk/etlPipeline.ts
2386
+ var InternalBatcher = class {
2387
+ iterator;
2388
+ batchSize;
2389
+ constructor(asyncIterable, batchSize = 20) {
2390
+ this.iterator = asyncIterable[Symbol.asyncIterator]();
2391
+ this.batchSize = batchSize;
2392
+ }
2393
+ async getNextBatch() {
2394
+ const items = [];
2395
+ for (let i = 0; i < this.batchSize; i++) {
2396
+ const { value, done } = await this.iterator.next();
2397
+ if (done) {
2398
+ return { items, hasMore: false };
2399
+ }
2400
+ items.push(value);
2401
+ }
2402
+ return { items, hasMore: true };
2403
+ }
2404
+ };
2405
+ var ETLPipeline = class {
2406
+ constructor(name, config) {
2407
+ this.name = name;
2408
+ this.config = config;
2409
+ this.setupPipeline();
2410
+ }
2411
+ batcher;
2412
+ setupPipeline() {
2413
+ this.batcher = this.createBatcher();
2414
+ const tasks = this.createAllTasks();
2415
+ tasks.extract.config.onComplete = [tasks.transform];
2416
+ tasks.transform.config.onComplete = [tasks.load];
2417
+ new Workflow(this.name, {
2418
+ startingTask: tasks.extract,
2419
+ retries: 1,
2420
+ timeout: "30m"
2421
+ });
2422
+ }
2423
+ createBatcher() {
2424
+ const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2425
+ return new InternalBatcher(iterable);
2426
+ }
2427
+ getDefaultTaskConfig() {
2428
+ return {
2429
+ retries: 1,
2430
+ timeout: "30m"
2571
2431
  };
2572
- ETLPipeline = class {
2573
- constructor(name, config) {
2574
- this.name = name;
2575
- this.config = config;
2576
- this.setupPipeline();
2577
- }
2578
- batcher;
2579
- setupPipeline() {
2580
- this.batcher = this.createBatcher();
2581
- const tasks = this.createAllTasks();
2582
- tasks.extract.config.onComplete = [tasks.transform];
2583
- tasks.transform.config.onComplete = [tasks.load];
2584
- new Workflow(this.name, {
2585
- startingTask: tasks.extract,
2586
- retries: 1,
2587
- timeout: "30m"
2588
- });
2589
- }
2590
- createBatcher() {
2591
- const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2592
- return new InternalBatcher(iterable);
2593
- }
2594
- getDefaultTaskConfig() {
2595
- return {
2596
- retries: 1,
2597
- timeout: "30m"
2598
- };
2599
- }
2600
- createAllTasks() {
2601
- const taskConfig = this.getDefaultTaskConfig();
2602
- return {
2603
- extract: this.createExtractTask(taskConfig),
2604
- transform: this.createTransformTask(taskConfig),
2605
- load: this.createLoadTask(taskConfig)
2606
- };
2607
- }
2608
- createExtractTask(taskConfig) {
2609
- return new Task(`${this.name}_extract`, {
2610
- run: async ({}) => {
2611
- console.log(`Running extract task for ${this.name}...`);
2612
- const batch = await this.batcher.getNextBatch();
2613
- console.log(`Extract task completed with ${batch.items.length} items`);
2614
- return batch;
2615
- },
2616
- retries: taskConfig.retries,
2617
- timeout: taskConfig.timeout
2618
- });
2619
- }
2620
- createTransformTask(taskConfig) {
2621
- return new Task(
2622
- `${this.name}_transform`,
2623
- {
2624
- // Use new single-parameter context API for handlers
2625
- run: async ({ input }) => {
2626
- const batch = input;
2627
- console.log(
2628
- `Running transform task for ${this.name} with ${batch.items.length} items...`
2629
- );
2630
- const transformedItems = [];
2631
- for (const item of batch.items) {
2632
- const transformed = await this.config.transform(item);
2633
- transformedItems.push(transformed);
2634
- }
2635
- console.log(
2636
- `Transform task completed with ${transformedItems.length} items`
2637
- );
2638
- return { items: transformedItems };
2639
- },
2640
- retries: taskConfig.retries,
2641
- timeout: taskConfig.timeout
2642
- }
2643
- );
2644
- }
2645
- createLoadTask(taskConfig) {
2646
- return new Task(`${this.name}_load`, {
2647
- run: async ({ input: transformedItems }) => {
2648
- console.log(
2649
- `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2650
- );
2651
- if ("insert" in this.config.load) {
2652
- await this.config.load.insert(transformedItems.items);
2653
- } else {
2654
- await this.config.load(transformedItems.items);
2655
- }
2656
- console.log(`Load task completed`);
2657
- },
2658
- retries: taskConfig.retries,
2659
- timeout: taskConfig.timeout
2660
- });
2661
- }
2662
- // Execute the entire ETL pipeline
2663
- async run() {
2664
- console.log(`Starting ETL Pipeline: ${this.name}`);
2665
- let batchNumber = 1;
2666
- do {
2667
- console.log(`Processing batch ${batchNumber}...`);
2668
- const batch = await this.batcher.getNextBatch();
2669
- if (batch.items.length === 0) {
2670
- break;
2671
- }
2432
+ }
2433
+ createAllTasks() {
2434
+ const taskConfig = this.getDefaultTaskConfig();
2435
+ return {
2436
+ extract: this.createExtractTask(taskConfig),
2437
+ transform: this.createTransformTask(taskConfig),
2438
+ load: this.createLoadTask(taskConfig)
2439
+ };
2440
+ }
2441
+ createExtractTask(taskConfig) {
2442
+ return new Task(`${this.name}_extract`, {
2443
+ run: async ({}) => {
2444
+ console.log(`Running extract task for ${this.name}...`);
2445
+ const batch = await this.batcher.getNextBatch();
2446
+ console.log(`Extract task completed with ${batch.items.length} items`);
2447
+ return batch;
2448
+ },
2449
+ retries: taskConfig.retries,
2450
+ timeout: taskConfig.timeout
2451
+ });
2452
+ }
2453
+ createTransformTask(taskConfig) {
2454
+ return new Task(
2455
+ `${this.name}_transform`,
2456
+ {
2457
+ // Use new single-parameter context API for handlers
2458
+ run: async ({ input }) => {
2459
+ const batch = input;
2460
+ console.log(
2461
+ `Running transform task for ${this.name} with ${batch.items.length} items...`
2462
+ );
2672
2463
  const transformedItems = [];
2673
- for (const extractedData of batch.items) {
2674
- const transformedData = await this.config.transform(extractedData);
2675
- transformedItems.push(transformedData);
2676
- }
2677
- if ("insert" in this.config.load) {
2678
- await this.config.load.insert(transformedItems);
2679
- } else {
2680
- await this.config.load(transformedItems);
2464
+ for (const item of batch.items) {
2465
+ const transformed = await this.config.transform(item);
2466
+ transformedItems.push(transformed);
2681
2467
  }
2682
2468
  console.log(
2683
- `Completed batch ${batchNumber} with ${batch.items.length} items`
2469
+ `Transform task completed with ${transformedItems.length} items`
2684
2470
  );
2685
- batchNumber++;
2686
- if (!batch.hasMore) {
2687
- break;
2688
- }
2689
- } while (true);
2690
- console.log(`Completed ETL Pipeline: ${this.name}`);
2471
+ return { items: transformedItems };
2472
+ },
2473
+ retries: taskConfig.retries,
2474
+ timeout: taskConfig.timeout
2691
2475
  }
2692
- };
2476
+ );
2693
2477
  }
2694
- });
2478
+ createLoadTask(taskConfig) {
2479
+ return new Task(`${this.name}_load`, {
2480
+ run: async ({ input: transformedItems }) => {
2481
+ console.log(
2482
+ `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2483
+ );
2484
+ if ("insert" in this.config.load) {
2485
+ await this.config.load.insert(transformedItems.items);
2486
+ } else {
2487
+ await this.config.load(transformedItems.items);
2488
+ }
2489
+ console.log(`Load task completed`);
2490
+ },
2491
+ retries: taskConfig.retries,
2492
+ timeout: taskConfig.timeout
2493
+ });
2494
+ }
2495
+ // Execute the entire ETL pipeline
2496
+ async run() {
2497
+ console.log(`Starting ETL Pipeline: ${this.name}`);
2498
+ let batchNumber = 1;
2499
+ do {
2500
+ console.log(`Processing batch ${batchNumber}...`);
2501
+ const batch = await this.batcher.getNextBatch();
2502
+ if (batch.items.length === 0) {
2503
+ break;
2504
+ }
2505
+ const transformedItems = [];
2506
+ for (const extractedData of batch.items) {
2507
+ const transformedData = await this.config.transform(extractedData);
2508
+ transformedItems.push(transformedData);
2509
+ }
2510
+ if ("insert" in this.config.load) {
2511
+ await this.config.load.insert(transformedItems);
2512
+ } else {
2513
+ await this.config.load(transformedItems);
2514
+ }
2515
+ console.log(
2516
+ `Completed batch ${batchNumber} with ${batch.items.length} items`
2517
+ );
2518
+ batchNumber++;
2519
+ if (!batch.hasMore) {
2520
+ break;
2521
+ }
2522
+ } while (true);
2523
+ console.log(`Completed ETL Pipeline: ${this.name}`);
2524
+ }
2525
+ };
2695
2526
 
2696
2527
  // src/dmv2/sdk/materializedView.ts
2697
2528
  function formatTableReference(table) {
@@ -2701,147 +2532,127 @@ function formatTableReference(table) {
2701
2532
  }
2702
2533
  return `\`${table.name}\``;
2703
2534
  }
2704
- var requireTargetTableName, MaterializedView;
2705
- var init_materializedView = __esm({
2706
- "src/dmv2/sdk/materializedView.ts"() {
2707
- "use strict";
2708
- init_types();
2709
- init_sqlHelpers();
2710
- init_olapTable();
2711
- init_internal();
2712
- init_stackTrace();
2713
- requireTargetTableName = (tableName) => {
2714
- if (typeof tableName === "string") {
2715
- return tableName;
2716
- } else {
2717
- throw new Error("Name of targetTable is not specified.");
2718
- }
2719
- };
2720
- MaterializedView = class {
2721
- /** @internal */
2722
- kind = "MaterializedView";
2723
- /** The name of the materialized view */
2724
- name;
2725
- /** The target OlapTable instance where the materialized data is stored. */
2726
- targetTable;
2727
- /** The SELECT SQL statement */
2728
- selectSql;
2729
- /** Names of source tables that the SELECT reads from */
2730
- sourceTables;
2731
- /** Optional metadata for the materialized view */
2732
- metadata;
2733
- constructor(options, targetSchema, targetColumns) {
2734
- let selectStatement = options.selectStatement;
2735
- if (typeof selectStatement !== "string") {
2736
- selectStatement = toStaticQuery(selectStatement);
2737
- }
2738
- if (targetSchema === void 0 || targetColumns === void 0) {
2739
- throw new Error(
2740
- "Supply the type param T so that the schema is inserted by the compiler plugin."
2741
- );
2742
- }
2743
- const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2744
- requireTargetTableName(
2745
- options.targetTable?.name ?? options.tableName
2746
- ),
2747
- {
2748
- orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2749
- engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2750
- },
2751
- targetSchema,
2752
- targetColumns
2753
- );
2754
- if (targetTable.name === options.materializedViewName) {
2755
- throw new Error(
2756
- "Materialized view name cannot be the same as the target table name."
2757
- );
2758
- }
2759
- this.name = options.materializedViewName;
2760
- this.targetTable = targetTable;
2761
- this.selectSql = selectStatement;
2762
- this.sourceTables = options.selectTables.map(
2763
- (t) => formatTableReference(t)
2764
- );
2765
- this.metadata = options.metadata ? { ...options.metadata } : {};
2766
- if (!this.metadata.source) {
2767
- const stack = new Error().stack;
2768
- const sourceInfo = getSourceFileFromStack(stack);
2769
- if (sourceInfo) {
2770
- this.metadata.source = { file: sourceInfo };
2771
- }
2772
- }
2773
- const materializedViews = getMooseInternal().materializedViews;
2774
- if (!isClientOnlyMode() && materializedViews.has(this.name)) {
2775
- throw new Error(`MaterializedView with name ${this.name} already exists`);
2776
- }
2777
- materializedViews.set(this.name, this);
2535
+ var requireTargetTableName = (tableName) => {
2536
+ if (typeof tableName === "string") {
2537
+ return tableName;
2538
+ } else {
2539
+ throw new Error("Name of targetTable is not specified.");
2540
+ }
2541
+ };
2542
+ var MaterializedView = class {
2543
+ /** @internal */
2544
+ kind = "MaterializedView";
2545
+ /** The name of the materialized view */
2546
+ name;
2547
+ /** The target OlapTable instance where the materialized data is stored. */
2548
+ targetTable;
2549
+ /** The SELECT SQL statement */
2550
+ selectSql;
2551
+ /** Names of source tables that the SELECT reads from */
2552
+ sourceTables;
2553
+ /** Optional metadata for the materialized view */
2554
+ metadata;
2555
+ constructor(options, targetSchema, targetColumns) {
2556
+ let selectStatement = options.selectStatement;
2557
+ if (typeof selectStatement !== "string") {
2558
+ selectStatement = toStaticQuery(selectStatement);
2559
+ }
2560
+ if (targetSchema === void 0 || targetColumns === void 0) {
2561
+ throw new Error(
2562
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
2563
+ );
2564
+ }
2565
+ const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2566
+ requireTargetTableName(
2567
+ options.targetTable?.name ?? options.tableName
2568
+ ),
2569
+ {
2570
+ orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2571
+ engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2572
+ },
2573
+ targetSchema,
2574
+ targetColumns
2575
+ );
2576
+ if (targetTable.name === options.materializedViewName) {
2577
+ throw new Error(
2578
+ "Materialized view name cannot be the same as the target table name."
2579
+ );
2580
+ }
2581
+ this.name = options.materializedViewName;
2582
+ this.targetTable = targetTable;
2583
+ this.selectSql = selectStatement;
2584
+ this.sourceTables = options.selectTables.map(
2585
+ (t) => formatTableReference(t)
2586
+ );
2587
+ this.metadata = options.metadata ? { ...options.metadata } : {};
2588
+ if (!this.metadata.source) {
2589
+ const stack = new Error().stack;
2590
+ const sourceInfo = getSourceFileFromStack(stack);
2591
+ if (sourceInfo) {
2592
+ this.metadata.source = { file: sourceInfo };
2778
2593
  }
2779
- };
2594
+ }
2595
+ const materializedViews = getMooseInternal().materializedViews;
2596
+ if (!isClientOnlyMode() && materializedViews.has(this.name)) {
2597
+ throw new Error(`MaterializedView with name ${this.name} already exists`);
2598
+ }
2599
+ materializedViews.set(this.name, this);
2780
2600
  }
2781
- });
2601
+ };
2782
2602
 
2783
2603
  // src/dmv2/sdk/sqlResource.ts
2784
- var SqlResource;
2785
- var init_sqlResource = __esm({
2786
- "src/dmv2/sdk/sqlResource.ts"() {
2787
- "use strict";
2788
- init_internal();
2789
- init_sqlHelpers();
2790
- init_stackTrace();
2791
- SqlResource = class {
2792
- /** @internal */
2793
- kind = "SqlResource";
2794
- /** Array of SQL statements to execute for setting up the resource. */
2795
- setup;
2796
- /** Array of SQL statements to execute for tearing down the resource. */
2797
- teardown;
2798
- /** The name of the SQL resource (e.g., view name, materialized view name). */
2799
- name;
2800
- /** List of OlapTables or Views that this resource reads data from. */
2801
- pullsDataFrom;
2802
- /** List of OlapTables or Views that this resource writes data to. */
2803
- pushesDataTo;
2804
- /** @internal Source file path where this resource was defined */
2805
- sourceFile;
2806
- /** @internal Source line number where this resource was defined */
2807
- sourceLine;
2808
- /** @internal Source column number where this resource was defined */
2809
- sourceColumn;
2810
- /**
2811
- * Creates a new SqlResource instance.
2812
- * @param name The name of the resource.
2813
- * @param setup An array of SQL DDL statements to create the resource.
2814
- * @param teardown An array of SQL DDL statements to drop the resource.
2815
- * @param options Optional configuration for specifying data dependencies.
2816
- * @param options.pullsDataFrom Tables/Views this resource reads from.
2817
- * @param options.pushesDataTo Tables/Views this resource writes to.
2818
- */
2819
- constructor(name, setup, teardown, options) {
2820
- const sqlResources = getMooseInternal().sqlResources;
2821
- if (!isClientOnlyMode() && sqlResources.has(name)) {
2822
- throw new Error(`SqlResource with name ${name} already exists`);
2823
- }
2824
- sqlResources.set(name, this);
2825
- this.name = name;
2826
- this.setup = setup.map(
2827
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2828
- );
2829
- this.teardown = teardown.map(
2830
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2831
- );
2832
- this.pullsDataFrom = options?.pullsDataFrom ?? [];
2833
- this.pushesDataTo = options?.pushesDataTo ?? [];
2834
- const stack = new Error().stack;
2835
- const location = getSourceLocationFromStack(stack);
2836
- if (location) {
2837
- this.sourceFile = location.file;
2838
- this.sourceLine = location.line;
2839
- this.sourceColumn = location.column;
2840
- }
2841
- }
2842
- };
2604
+ var SqlResource = class {
2605
+ /** @internal */
2606
+ kind = "SqlResource";
2607
+ /** Array of SQL statements to execute for setting up the resource. */
2608
+ setup;
2609
+ /** Array of SQL statements to execute for tearing down the resource. */
2610
+ teardown;
2611
+ /** The name of the SQL resource (e.g., view name, materialized view name). */
2612
+ name;
2613
+ /** List of OlapTables or Views that this resource reads data from. */
2614
+ pullsDataFrom;
2615
+ /** List of OlapTables or Views that this resource writes data to. */
2616
+ pushesDataTo;
2617
+ /** @internal Source file path where this resource was defined */
2618
+ sourceFile;
2619
+ /** @internal Source line number where this resource was defined */
2620
+ sourceLine;
2621
+ /** @internal Source column number where this resource was defined */
2622
+ sourceColumn;
2623
+ /**
2624
+ * Creates a new SqlResource instance.
2625
+ * @param name The name of the resource.
2626
+ * @param setup An array of SQL DDL statements to create the resource.
2627
+ * @param teardown An array of SQL DDL statements to drop the resource.
2628
+ * @param options Optional configuration for specifying data dependencies.
2629
+ * @param options.pullsDataFrom Tables/Views this resource reads from.
2630
+ * @param options.pushesDataTo Tables/Views this resource writes to.
2631
+ */
2632
+ constructor(name, setup, teardown, options) {
2633
+ const sqlResources = getMooseInternal().sqlResources;
2634
+ if (!isClientOnlyMode() && sqlResources.has(name)) {
2635
+ throw new Error(`SqlResource with name ${name} already exists`);
2636
+ }
2637
+ sqlResources.set(name, this);
2638
+ this.name = name;
2639
+ this.setup = setup.map(
2640
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2641
+ );
2642
+ this.teardown = teardown.map(
2643
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2644
+ );
2645
+ this.pullsDataFrom = options?.pullsDataFrom ?? [];
2646
+ this.pushesDataTo = options?.pushesDataTo ?? [];
2647
+ const stack = new Error().stack;
2648
+ const location = getSourceLocationFromStack(stack);
2649
+ if (location) {
2650
+ this.sourceFile = location.file;
2651
+ this.sourceLine = location.line;
2652
+ this.sourceColumn = location.column;
2653
+ }
2843
2654
  }
2844
- });
2655
+ };
2845
2656
 
2846
2657
  // src/dmv2/sdk/view.ts
2847
2658
  function formatTableReference2(table) {
@@ -2851,171 +2662,150 @@ function formatTableReference2(table) {
2851
2662
  }
2852
2663
  return `\`${table.name}\``;
2853
2664
  }
2854
- var View;
2855
- var init_view = __esm({
2856
- "src/dmv2/sdk/view.ts"() {
2857
- "use strict";
2858
- init_sqlHelpers();
2859
- init_olapTable();
2860
- init_internal();
2861
- init_stackTrace();
2862
- View = class {
2863
- /** @internal */
2864
- kind = "View";
2865
- /** The name of the view */
2866
- name;
2867
- /** The SELECT SQL statement that defines the view */
2868
- selectSql;
2869
- /** Names of source tables/views that the SELECT reads from */
2870
- sourceTables;
2871
- /** Optional metadata for the view */
2872
- metadata;
2873
- /**
2874
- * Creates a new View instance.
2875
- * @param name The name of the view to be created.
2876
- * @param selectStatement The SQL SELECT statement that defines the view's logic.
2877
- * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2878
- * @param metadata Optional metadata for the view (e.g., description, source file).
2879
- */
2880
- constructor(name, selectStatement, baseTables, metadata) {
2881
- if (typeof selectStatement !== "string") {
2882
- selectStatement = toStaticQuery(selectStatement);
2883
- }
2884
- this.name = name;
2885
- this.selectSql = selectStatement;
2886
- this.sourceTables = baseTables.map((t) => formatTableReference2(t));
2887
- this.metadata = metadata ? { ...metadata } : {};
2888
- if (!this.metadata.source) {
2889
- const stack = new Error().stack;
2890
- const sourceInfo = getSourceFileFromStack(stack);
2891
- if (sourceInfo) {
2892
- this.metadata.source = { file: sourceInfo };
2893
- }
2894
- }
2895
- const views = getMooseInternal().views;
2896
- if (!isClientOnlyMode() && views.has(this.name)) {
2897
- throw new Error(`View with name ${this.name} already exists`);
2898
- }
2899
- views.set(this.name, this);
2665
+ var View = class {
2666
+ /** @internal */
2667
+ kind = "View";
2668
+ /** The name of the view */
2669
+ name;
2670
+ /** The SELECT SQL statement that defines the view */
2671
+ selectSql;
2672
+ /** Names of source tables/views that the SELECT reads from */
2673
+ sourceTables;
2674
+ /** Optional metadata for the view */
2675
+ metadata;
2676
+ /**
2677
+ * Creates a new View instance.
2678
+ * @param name The name of the view to be created.
2679
+ * @param selectStatement The SQL SELECT statement that defines the view's logic.
2680
+ * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2681
+ * @param metadata Optional metadata for the view (e.g., description, source file).
2682
+ */
2683
+ constructor(name, selectStatement, baseTables, metadata) {
2684
+ if (typeof selectStatement !== "string") {
2685
+ selectStatement = toStaticQuery(selectStatement);
2686
+ }
2687
+ this.name = name;
2688
+ this.selectSql = selectStatement;
2689
+ this.sourceTables = baseTables.map((t) => formatTableReference2(t));
2690
+ this.metadata = metadata ? { ...metadata } : {};
2691
+ if (!this.metadata.source) {
2692
+ const stack = new Error().stack;
2693
+ const sourceInfo = getSourceFileFromStack(stack);
2694
+ if (sourceInfo) {
2695
+ this.metadata.source = { file: sourceInfo };
2900
2696
  }
2901
- };
2697
+ }
2698
+ const views = getMooseInternal().views;
2699
+ if (!isClientOnlyMode() && views.has(this.name)) {
2700
+ throw new Error(`View with name ${this.name} already exists`);
2701
+ }
2702
+ views.set(this.name, this);
2902
2703
  }
2903
- });
2704
+ };
2904
2705
 
2905
2706
  // src/dmv2/sdk/lifeCycle.ts
2906
- var LifeCycle;
2907
- var init_lifeCycle = __esm({
2908
- "src/dmv2/sdk/lifeCycle.ts"() {
2909
- "use strict";
2910
- LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2911
- LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2912
- LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2913
- LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2914
- return LifeCycle2;
2915
- })(LifeCycle || {});
2916
- }
2917
- });
2707
+ var LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2708
+ LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2709
+ LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2710
+ LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2711
+ return LifeCycle2;
2712
+ })(LifeCycle || {});
2918
2713
 
2919
2714
  // src/dmv2/sdk/webApp.ts
2920
- var RESERVED_MOUNT_PATHS, WebApp;
2921
- var init_webApp = __esm({
2922
- "src/dmv2/sdk/webApp.ts"() {
2923
- "use strict";
2924
- init_internal();
2925
- RESERVED_MOUNT_PATHS = [
2926
- "/admin",
2927
- "/api",
2928
- "/consumption",
2929
- "/health",
2930
- "/ingest",
2931
- "/moose",
2932
- // reserved for future use
2933
- "/ready",
2934
- "/workflows"
2935
- ];
2936
- WebApp = class {
2937
- name;
2938
- handler;
2939
- config;
2940
- _rawApp;
2941
- constructor(name, appOrHandler, config) {
2942
- this.name = name;
2943
- this.config = config;
2944
- if (!this.config.mountPath) {
2945
- throw new Error(
2946
- `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2947
- );
2948
- }
2949
- const mountPath = this.config.mountPath;
2950
- if (mountPath === "/") {
2951
- throw new Error(
2952
- `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2953
- );
2954
- }
2955
- if (mountPath.endsWith("/")) {
2715
+ var RESERVED_MOUNT_PATHS = [
2716
+ "/admin",
2717
+ "/api",
2718
+ "/consumption",
2719
+ "/health",
2720
+ "/ingest",
2721
+ "/moose",
2722
+ // reserved for future use
2723
+ "/ready",
2724
+ "/workflows"
2725
+ ];
2726
+ var WebApp = class {
2727
+ name;
2728
+ handler;
2729
+ config;
2730
+ _rawApp;
2731
+ constructor(name, appOrHandler, config) {
2732
+ this.name = name;
2733
+ this.config = config;
2734
+ if (!this.config.mountPath) {
2735
+ throw new Error(
2736
+ `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2737
+ );
2738
+ }
2739
+ const mountPath = this.config.mountPath;
2740
+ if (mountPath === "/") {
2741
+ throw new Error(
2742
+ `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2743
+ );
2744
+ }
2745
+ if (mountPath.endsWith("/")) {
2746
+ throw new Error(
2747
+ `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2748
+ );
2749
+ }
2750
+ for (const reserved of RESERVED_MOUNT_PATHS) {
2751
+ if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2752
+ throw new Error(
2753
+ `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2754
+ );
2755
+ }
2756
+ }
2757
+ this.handler = this.toHandler(appOrHandler);
2758
+ this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2759
+ const webApps = getMooseInternal().webApps;
2760
+ if (webApps.has(name)) {
2761
+ throw new Error(`WebApp with name ${name} already exists`);
2762
+ }
2763
+ if (this.config.mountPath) {
2764
+ for (const [existingName, existingApp] of webApps) {
2765
+ if (existingApp.config.mountPath === this.config.mountPath) {
2956
2766
  throw new Error(
2957
- `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2767
+ `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2958
2768
  );
2959
2769
  }
2960
- for (const reserved of RESERVED_MOUNT_PATHS) {
2961
- if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2962
- throw new Error(
2963
- `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2964
- );
2965
- }
2966
- }
2967
- this.handler = this.toHandler(appOrHandler);
2968
- this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2969
- const webApps = getMooseInternal().webApps;
2970
- if (webApps.has(name)) {
2971
- throw new Error(`WebApp with name ${name} already exists`);
2972
- }
2973
- if (this.config.mountPath) {
2974
- for (const [existingName, existingApp] of webApps) {
2975
- if (existingApp.config.mountPath === this.config.mountPath) {
2976
- throw new Error(
2977
- `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2978
- );
2979
- }
2980
- }
2981
- }
2982
- webApps.set(name, this);
2983
2770
  }
2984
- toHandler(appOrHandler) {
2985
- if (typeof appOrHandler === "function") {
2986
- return appOrHandler;
2987
- }
2988
- const app = appOrHandler;
2989
- if (typeof app.handle === "function") {
2990
- return (req, res) => {
2991
- app.handle(req, res, (err) => {
2992
- if (err) {
2993
- console.error("WebApp handler error:", err);
2994
- if (!res.headersSent) {
2995
- res.writeHead(500, { "Content-Type": "application/json" });
2996
- res.end(JSON.stringify({ error: "Internal Server Error" }));
2997
- }
2998
- }
2999
- });
3000
- };
3001
- }
3002
- if (typeof app.callback === "function") {
3003
- return app.callback();
3004
- }
3005
- if (typeof app.routing === "function") {
3006
- const routing = app.routing;
3007
- const appWithReady = app;
3008
- let readyPromise = null;
3009
- return async (req, res) => {
3010
- if (readyPromise === null) {
3011
- readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2771
+ }
2772
+ webApps.set(name, this);
2773
+ }
2774
+ toHandler(appOrHandler) {
2775
+ if (typeof appOrHandler === "function") {
2776
+ return appOrHandler;
2777
+ }
2778
+ const app = appOrHandler;
2779
+ if (typeof app.handle === "function") {
2780
+ return (req, res) => {
2781
+ app.handle(req, res, (err) => {
2782
+ if (err) {
2783
+ console.error("WebApp handler error:", err);
2784
+ if (!res.headersSent) {
2785
+ res.writeHead(500, { "Content-Type": "application/json" });
2786
+ res.end(JSON.stringify({ error: "Internal Server Error" }));
3012
2787
  }
3013
- await readyPromise;
3014
- routing(req, res);
3015
- };
3016
- }
3017
- throw new Error(
3018
- `Unable to convert app to handler. The provided object must be:
2788
+ }
2789
+ });
2790
+ };
2791
+ }
2792
+ if (typeof app.callback === "function") {
2793
+ return app.callback();
2794
+ }
2795
+ if (typeof app.routing === "function") {
2796
+ const routing = app.routing;
2797
+ const appWithReady = app;
2798
+ let readyPromise = null;
2799
+ return async (req, res) => {
2800
+ if (readyPromise === null) {
2801
+ readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2802
+ }
2803
+ await readyPromise;
2804
+ routing(req, res);
2805
+ };
2806
+ }
2807
+ throw new Error(
2808
+ `Unable to convert app to handler. The provided object must be:
3019
2809
  - A function (raw Node.js handler)
3020
2810
  - An object with .handle() method (Express, Connect)
3021
2811
  - An object with .callback() method (Koa)
@@ -3027,14 +2817,12 @@ Examples:
3027
2817
  Fastify: new WebApp("name", fastifyApp)
3028
2818
  Raw: new WebApp("name", (req, res) => { ... })
3029
2819
  `
3030
- );
3031
- }
3032
- getRawApp() {
3033
- return this._rawApp;
3034
- }
3035
- };
2820
+ );
3036
2821
  }
3037
- });
2822
+ getRawApp() {
2823
+ return this._rawApp;
2824
+ }
2825
+ };
3038
2826
 
3039
2827
  // src/dmv2/registry.ts
3040
2828
  function getTables() {
@@ -3055,7 +2843,7 @@ function getIngestApis() {
3055
2843
  function getIngestApi(name) {
3056
2844
  return getMooseInternal().ingestApis.get(name);
3057
2845
  }
3058
- function getApis2() {
2846
+ function getApis() {
3059
2847
  return getMooseInternal().apis;
3060
2848
  }
3061
2849
  function getApi(nameOrPath) {
@@ -3094,7 +2882,7 @@ function getWorkflows2() {
3094
2882
  function getWorkflow(name) {
3095
2883
  return getMooseInternal().workflows.get(name);
3096
2884
  }
3097
- function getWebApps2() {
2885
+ function getWebApps() {
3098
2886
  return getMooseInternal().webApps;
3099
2887
  }
3100
2888
  function getWebApp(name) {
@@ -3112,90 +2900,6 @@ function getViews() {
3112
2900
  function getView(name) {
3113
2901
  return getMooseInternal().views.get(name);
3114
2902
  }
3115
- var init_registry = __esm({
3116
- "src/dmv2/registry.ts"() {
3117
- "use strict";
3118
- init_internal();
3119
- }
3120
- });
3121
-
3122
- // src/dmv2/index.ts
3123
- var init_dmv2 = __esm({
3124
- "src/dmv2/index.ts"() {
3125
- "use strict";
3126
- init_olapTable();
3127
- init_types();
3128
- init_stream();
3129
- init_workflow();
3130
- init_ingestApi();
3131
- init_consumptionApi();
3132
- init_ingestPipeline();
3133
- init_etlPipeline();
3134
- init_materializedView();
3135
- init_sqlResource();
3136
- init_view();
3137
- init_lifeCycle();
3138
- init_webApp();
3139
- init_registry();
3140
- }
3141
- });
3142
-
3143
- // src/browserCompatible.ts
3144
- var browserCompatible_exports = {};
3145
- __export(browserCompatible_exports, {
3146
- Api: () => Api,
3147
- ClickHouseEngines: () => ClickHouseEngines,
3148
- ConsumptionApi: () => ConsumptionApi,
3149
- DeadLetterQueue: () => DeadLetterQueue,
3150
- ETLPipeline: () => ETLPipeline,
3151
- IngestApi: () => IngestApi,
3152
- IngestPipeline: () => IngestPipeline,
3153
- LifeCycle: () => LifeCycle,
3154
- MaterializedView: () => MaterializedView,
3155
- OlapTable: () => OlapTable,
3156
- Sql: () => Sql,
3157
- SqlResource: () => SqlResource,
3158
- Stream: () => Stream,
3159
- Task: () => Task,
3160
- View: () => View,
3161
- WebApp: () => WebApp,
3162
- Workflow: () => Workflow,
3163
- createClickhouseParameter: () => createClickhouseParameter,
3164
- getApi: () => getApi,
3165
- getApis: () => getApis2,
3166
- getIngestApi: () => getIngestApi,
3167
- getIngestApis: () => getIngestApis,
3168
- getMaterializedView: () => getMaterializedView,
3169
- getMaterializedViews: () => getMaterializedViews,
3170
- getSqlResource: () => getSqlResource,
3171
- getSqlResources: () => getSqlResources,
3172
- getStream: () => getStream,
3173
- getStreams: () => getStreams,
3174
- getTable: () => getTable,
3175
- getTables: () => getTables,
3176
- getValueFromParameter: () => getValueFromParameter,
3177
- getView: () => getView,
3178
- getViews: () => getViews,
3179
- getWebApp: () => getWebApp,
3180
- getWebApps: () => getWebApps2,
3181
- getWorkflow: () => getWorkflow,
3182
- getWorkflows: () => getWorkflows2,
3183
- mapToClickHouseType: () => mapToClickHouseType,
3184
- quoteIdentifier: () => quoteIdentifier,
3185
- sql: () => sql,
3186
- toQuery: () => toQuery,
3187
- toQueryPreview: () => toQueryPreview,
3188
- toStaticQuery: () => toStaticQuery
3189
- });
3190
- module.exports = __toCommonJS(browserCompatible_exports);
3191
- var init_browserCompatible = __esm({
3192
- "src/browserCompatible.ts"() {
3193
- init_dmv2();
3194
- init_types();
3195
- init_sqlHelpers();
3196
- }
3197
- });
3198
- init_browserCompatible();
3199
2903
  // Annotate the CommonJS export names for ESM import in node:
3200
2904
  0 && (module.exports = {
3201
2905
  Api,