@514labs/moose-lib 0.6.290-ci-13-g7dbff416 → 0.6.290

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -30,247 +30,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
30
30
  ));
31
31
  var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
32
32
 
33
- // src/dmv2/utils/stackTrace.ts
34
- function shouldSkipStackLine(line) {
35
- return line.includes("node_modules") || // Skip npm installed packages (prod)
36
- line.includes("node:internal") || // Skip Node.js internals (modern format)
37
- line.includes("internal/modules") || // Skip Node.js internals (older format)
38
- line.includes("ts-node") || // Skip TypeScript execution
39
- line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
40
- line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
41
- line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
42
- line.includes("\\ts-moose-lib\\dist\\");
43
- }
44
- function parseStackLine(line) {
45
- const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
46
- if (match && match[1]) {
47
- return {
48
- file: match[1],
49
- line: match[2]
50
- };
51
- }
52
- return void 0;
53
- }
54
- function getSourceFileInfo(stack) {
55
- if (!stack) return {};
56
- const lines = stack.split("\n");
57
- for (const line of lines) {
58
- if (shouldSkipStackLine(line)) continue;
59
- const info = parseStackLine(line);
60
- if (info) return info;
61
- }
62
- return {};
63
- }
64
- function getSourceLocationFromStack(stack) {
65
- if (!stack) return void 0;
66
- const lines = stack.split("\n");
67
- for (const line of lines.slice(1)) {
68
- if (shouldSkipStackLine(line)) {
69
- continue;
70
- }
71
- const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
72
- if (v8Match) {
73
- return {
74
- file: v8Match[1],
75
- line: parseInt(v8Match[2], 10),
76
- column: parseInt(v8Match[3], 10)
77
- };
78
- }
79
- const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
80
- if (smMatch) {
81
- return {
82
- file: smMatch[1],
83
- line: parseInt(smMatch[2], 10),
84
- column: parseInt(smMatch[3], 10)
85
- };
86
- }
87
- }
88
- return void 0;
89
- }
90
- function getSourceFileFromStack(stack) {
91
- const location = getSourceLocationFromStack(stack);
92
- return location?.file;
93
- }
94
- var init_stackTrace = __esm({
95
- "src/dmv2/utils/stackTrace.ts"() {
96
- "use strict";
97
- }
98
- });
99
-
100
- // src/dmv2/typedBase.ts
101
- var TypedBase;
102
- var init_typedBase = __esm({
103
- "src/dmv2/typedBase.ts"() {
104
- "use strict";
105
- init_stackTrace();
106
- TypedBase = class {
107
- /** The JSON schema representation of type T. Injected by the compiler plugin. */
108
- schema;
109
- /** The name assigned to this resource instance. */
110
- name;
111
- /** A dictionary mapping column names (keys of T) to their Column definitions. */
112
- columns;
113
- /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
114
- columnArray;
115
- /** The configuration object specific to this resource type. */
116
- config;
117
- /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
118
- validators;
119
- /** Optional metadata for the resource, always present as an object. */
120
- metadata;
121
- /**
122
- * Whether this resource allows extra fields beyond the defined columns.
123
- * When true, extra fields in payloads are passed through to streaming functions.
124
- * Injected by the compiler plugin when the type has an index signature.
125
- */
126
- allowExtraFields;
127
- /**
128
- * @internal Constructor intended for internal use by subclasses and the compiler plugin.
129
- * It expects the schema and columns to be provided, typically injected by the compiler.
130
- *
131
- * @param name The name for the resource instance.
132
- * @param config The configuration object for the resource.
133
- * @param schema The JSON schema for the resource's data type T (injected).
134
- * @param columns The array of Column definitions for T (injected).
135
- * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
136
- */
137
- constructor(name, config, schema, columns, validators, allowExtraFields) {
138
- if (schema === void 0 || columns === void 0) {
139
- throw new Error(
140
- "Supply the type param T so that the schema is inserted by the compiler plugin."
141
- );
142
- }
143
- this.schema = schema;
144
- this.columnArray = columns;
145
- const columnsObj = {};
146
- columns.forEach((column) => {
147
- columnsObj[column.name] = column;
148
- });
149
- this.columns = columnsObj;
150
- this.name = name;
151
- this.config = config;
152
- this.validators = validators;
153
- this.allowExtraFields = allowExtraFields ?? false;
154
- this.metadata = config?.metadata ? { ...config.metadata } : {};
155
- if (!this.metadata.source) {
156
- const stack = new Error().stack;
157
- if (stack) {
158
- const info = getSourceFileInfo(stack);
159
- this.metadata.source = { file: info.file, line: info.line };
160
- }
161
- }
162
- }
163
- };
164
- }
165
- });
166
-
167
- // src/dataModels/dataModelTypes.ts
168
- function isArrayNestedType(dt) {
169
- return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
170
- }
171
- function isNestedType(dt) {
172
- return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
173
- }
174
- var init_dataModelTypes = __esm({
175
- "src/dataModels/dataModelTypes.ts"() {
176
- "use strict";
177
- }
178
- });
179
-
180
- // src/sqlHelpers.ts
181
- function createClickhouseParameter(parameterIndex, value) {
182
- return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
183
- }
184
- function emptyIfUndefined(value) {
185
- return value === void 0 ? "" : value;
186
- }
187
- var quoteIdentifier, toStaticQuery, toQuery, getValueFromParameter, mapToClickHouseType;
188
- var init_sqlHelpers = __esm({
189
- "src/sqlHelpers.ts"() {
190
- "use strict";
191
- quoteIdentifier = (name) => {
192
- return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
193
- };
194
- toStaticQuery = (sql3) => {
195
- const [query, params] = toQuery(sql3);
196
- if (Object.keys(params).length !== 0) {
197
- throw new Error(
198
- "Dynamic SQL is not allowed in the select statement in view creation."
199
- );
200
- }
201
- return query;
202
- };
203
- toQuery = (sql3) => {
204
- const parameterizedStubs = sql3.values.map(
205
- (v, i) => createClickhouseParameter(i, v)
206
- );
207
- const query = sql3.strings.map(
208
- (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
209
- ).join("");
210
- const query_params = sql3.values.reduce(
211
- (acc, v, i) => ({
212
- ...acc,
213
- [`p${i}`]: getValueFromParameter(v)
214
- }),
215
- {}
216
- );
217
- return [query, query_params];
218
- };
219
- getValueFromParameter = (value) => {
220
- if (Array.isArray(value)) {
221
- const [type, val] = value;
222
- if (type === "Identifier") return val;
223
- }
224
- return value;
225
- };
226
- mapToClickHouseType = (value) => {
227
- if (typeof value === "number") {
228
- return Number.isInteger(value) ? "Int" : "Float";
229
- }
230
- if (typeof value === "boolean") return "Bool";
231
- if (value instanceof Date) return "DateTime";
232
- if (Array.isArray(value)) {
233
- const [type, _] = value;
234
- return type;
235
- }
236
- return "String";
237
- };
238
- }
239
- });
240
-
241
- // src/blocks/helpers.ts
242
- function dropView(name) {
243
- return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
244
- }
245
- function createMaterializedView(options) {
246
- return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
247
- TO ${quoteIdentifier(options.destinationTable)}
248
- AS ${options.select}`.trim();
249
- }
250
- var init_helpers = __esm({
251
- "src/blocks/helpers.ts"() {
252
- "use strict";
253
- init_sqlHelpers();
254
- }
255
- });
256
-
257
- // src/dataModels/types.ts
258
- var init_types = __esm({
259
- "src/dataModels/types.ts"() {
260
- "use strict";
261
- }
262
- });
263
-
264
- // src/browserCompatible.ts
265
- var init_browserCompatible = __esm({
266
- "src/browserCompatible.ts"() {
267
- "use strict";
268
- init_dmv2();
269
- init_types();
270
- init_sqlHelpers();
271
- }
272
- });
273
-
274
33
  // src/commons.ts
275
34
  var commons_exports = {};
276
35
  __export(commons_exports, {
@@ -438,299 +197,6 @@ var init_commons = __esm({
438
197
  }
439
198
  });
440
199
 
441
- // src/secrets.ts
442
- var init_secrets = __esm({
443
- "src/secrets.ts"() {
444
- "use strict";
445
- }
446
- });
447
-
448
- // src/consumption-apis/helpers.ts
449
- var import_client2, import_node_crypto;
450
- var init_helpers2 = __esm({
451
- "src/consumption-apis/helpers.ts"() {
452
- "use strict";
453
- import_client2 = require("@temporalio/client");
454
- import_node_crypto = require("crypto");
455
- init_internal();
456
- init_sqlHelpers();
457
- }
458
- });
459
-
460
- // src/consumption-apis/webAppHelpers.ts
461
- var init_webAppHelpers = __esm({
462
- "src/consumption-apis/webAppHelpers.ts"() {
463
- "use strict";
464
- }
465
- });
466
-
467
- // src/scripts/task.ts
468
- var init_task = __esm({
469
- "src/scripts/task.ts"() {
470
- "use strict";
471
- }
472
- });
473
-
474
- // src/cluster-utils.ts
475
- var import_node_cluster, import_node_os, import_node_process;
476
- var init_cluster_utils = __esm({
477
- "src/cluster-utils.ts"() {
478
- "use strict";
479
- import_node_cluster = __toESM(require("cluster"));
480
- import_node_os = require("os");
481
- import_node_process = require("process");
482
- }
483
- });
484
-
485
- // src/consumption-apis/runner.ts
486
- var jose;
487
- var init_runner = __esm({
488
- "src/consumption-apis/runner.ts"() {
489
- "use strict";
490
- init_commons();
491
- init_helpers2();
492
- jose = __toESM(require("jose"));
493
- init_cluster_utils();
494
- init_sqlHelpers();
495
- init_internal();
496
- }
497
- });
498
-
499
- // src/clients/redisClient.ts
500
- var import_redis;
501
- var init_redisClient = __esm({
502
- "src/clients/redisClient.ts"() {
503
- "use strict";
504
- import_redis = require("redis");
505
- }
506
- });
507
-
508
- // src/consumption-apis/standalone.ts
509
- var init_standalone = __esm({
510
- "src/consumption-apis/standalone.ts"() {
511
- "use strict";
512
- init_helpers2();
513
- init_commons();
514
- init_sqlHelpers();
515
- }
516
- });
517
-
518
- // src/utilities/json.ts
519
- var init_json = __esm({
520
- "src/utilities/json.ts"() {
521
- "use strict";
522
- }
523
- });
524
-
525
- // src/utilities/dataParser.ts
526
- var import_csv_parse, CSV_DELIMITERS, DEFAULT_CSV_CONFIG;
527
- var init_dataParser = __esm({
528
- "src/utilities/dataParser.ts"() {
529
- "use strict";
530
- import_csv_parse = require("csv-parse");
531
- init_json();
532
- CSV_DELIMITERS = {
533
- COMMA: ",",
534
- TAB: " ",
535
- SEMICOLON: ";",
536
- PIPE: "|"
537
- };
538
- DEFAULT_CSV_CONFIG = {
539
- delimiter: CSV_DELIMITERS.COMMA,
540
- columns: true,
541
- skipEmptyLines: true,
542
- trim: true
543
- };
544
- }
545
- });
546
-
547
- // src/utilities/index.ts
548
- var init_utilities = __esm({
549
- "src/utilities/index.ts"() {
550
- "use strict";
551
- init_dataParser();
552
- }
553
- });
554
-
555
- // src/connectors/dataSource.ts
556
- var init_dataSource = __esm({
557
- "src/connectors/dataSource.ts"() {
558
- "use strict";
559
- }
560
- });
561
-
562
- // src/index.ts
563
- var init_index = __esm({
564
- "src/index.ts"() {
565
- "use strict";
566
- init_browserCompatible();
567
- init_helpers();
568
- init_commons();
569
- init_secrets();
570
- init_helpers2();
571
- init_webAppHelpers();
572
- init_task();
573
- init_runner();
574
- init_redisClient();
575
- init_helpers2();
576
- init_standalone();
577
- init_sqlHelpers();
578
- init_utilities();
579
- init_dataSource();
580
- init_types();
581
- }
582
- });
583
-
584
- // src/dmv2/internal.ts
585
- var import_process, isClientOnlyMode, moose_internal, defaultRetentionPeriod, getMooseInternal, dlqSchema, dlqColumns;
586
- var init_internal = __esm({
587
- "src/dmv2/internal.ts"() {
588
- "use strict";
589
- import_process = __toESM(require("process"));
590
- init_index();
591
- init_commons();
592
- isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
593
- moose_internal = {
594
- tables: /* @__PURE__ */ new Map(),
595
- streams: /* @__PURE__ */ new Map(),
596
- ingestApis: /* @__PURE__ */ new Map(),
597
- apis: /* @__PURE__ */ new Map(),
598
- sqlResources: /* @__PURE__ */ new Map(),
599
- workflows: /* @__PURE__ */ new Map(),
600
- webApps: /* @__PURE__ */ new Map()
601
- };
602
- defaultRetentionPeriod = 60 * 60 * 24 * 7;
603
- getMooseInternal = () => globalThis.moose_internal;
604
- if (getMooseInternal() === void 0) {
605
- globalThis.moose_internal = moose_internal;
606
- }
607
- dlqSchema = {
608
- version: "3.1",
609
- components: {
610
- schemas: {
611
- DeadLetterModel: {
612
- type: "object",
613
- properties: {
614
- originalRecord: {
615
- $ref: "#/components/schemas/Recordstringany"
616
- },
617
- errorMessage: {
618
- type: "string"
619
- },
620
- errorType: {
621
- type: "string"
622
- },
623
- failedAt: {
624
- type: "string",
625
- format: "date-time"
626
- },
627
- source: {
628
- oneOf: [
629
- {
630
- const: "api"
631
- },
632
- {
633
- const: "transform"
634
- },
635
- {
636
- const: "table"
637
- }
638
- ]
639
- }
640
- },
641
- required: [
642
- "originalRecord",
643
- "errorMessage",
644
- "errorType",
645
- "failedAt",
646
- "source"
647
- ]
648
- },
649
- Recordstringany: {
650
- type: "object",
651
- properties: {},
652
- required: [],
653
- description: "Construct a type with a set of properties K of type T",
654
- additionalProperties: {}
655
- }
656
- }
657
- },
658
- schemas: [
659
- {
660
- $ref: "#/components/schemas/DeadLetterModel"
661
- }
662
- ]
663
- };
664
- dlqColumns = [
665
- {
666
- name: "originalRecord",
667
- data_type: "Json",
668
- primary_key: false,
669
- required: true,
670
- unique: false,
671
- default: null,
672
- annotations: [],
673
- ttl: null,
674
- codec: null,
675
- materialized: null,
676
- comment: null
677
- },
678
- {
679
- name: "errorMessage",
680
- data_type: "String",
681
- primary_key: false,
682
- required: true,
683
- unique: false,
684
- default: null,
685
- annotations: [],
686
- ttl: null,
687
- codec: null,
688
- materialized: null,
689
- comment: null
690
- },
691
- {
692
- name: "errorType",
693
- data_type: "String",
694
- primary_key: false,
695
- required: true,
696
- unique: false,
697
- default: null,
698
- annotations: [],
699
- ttl: null,
700
- codec: null,
701
- materialized: null,
702
- comment: null
703
- },
704
- {
705
- name: "failedAt",
706
- data_type: "DateTime",
707
- primary_key: false,
708
- required: true,
709
- unique: false,
710
- default: null,
711
- annotations: [],
712
- ttl: null,
713
- codec: null,
714
- materialized: null,
715
- comment: null
716
- },
717
- {
718
- name: "source",
719
- data_type: "String",
720
- primary_key: false,
721
- required: true,
722
- unique: false,
723
- default: null,
724
- annotations: [],
725
- ttl: null,
726
- codec: null,
727
- materialized: null,
728
- comment: null
729
- }
730
- ];
731
- }
732
- });
733
-
734
200
  // src/config/configFile.ts
735
201
  async function findConfigFile(startDir = process.cwd()) {
736
202
  const fs = await import("fs");
@@ -915,1849 +381,2161 @@ var init_runtime = __esm({
915
381
  }
916
382
  });
917
383
 
918
- // src/dmv2/sdk/olapTable.ts
919
- var import_node_stream, import_node_crypto2, OlapTable;
920
- var init_olapTable = __esm({
921
- "src/dmv2/sdk/olapTable.ts"() {
922
- "use strict";
923
- init_typedBase();
924
- init_dataModelTypes();
925
- init_helpers();
926
- init_internal();
927
- import_node_stream = require("stream");
928
- import_node_crypto2 = require("crypto");
929
- init_sqlHelpers();
930
- OlapTable = class extends TypedBase {
931
- name;
932
- /** @internal */
933
- kind = "OlapTable";
934
- /** @internal Memoized ClickHouse client for reusing connections across insert calls */
935
- _memoizedClient;
936
- /** @internal Hash of the configuration used to create the memoized client */
937
- _configHash;
938
- /** @internal Cached table name to avoid repeated generation */
939
- _cachedTableName;
940
- constructor(name, config, schema, columns, validators) {
941
- const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
942
- const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
943
- const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
944
- if (hasFields && hasExpr) {
945
- throw new Error(
946
- `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
947
- );
948
- }
949
- const hasCluster = typeof resolvedConfig.cluster === "string";
950
- const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
951
- const hasReplicaName = typeof resolvedConfig.replicaName === "string";
952
- if (hasCluster && (hasKeeperPath || hasReplicaName)) {
953
- throw new Error(
954
- `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
955
- );
956
- }
957
- super(name, resolvedConfig, schema, columns, validators);
958
- this.name = name;
959
- const tables = getMooseInternal().tables;
960
- const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
961
- if (!isClientOnlyMode() && tables.has(registryKey)) {
962
- throw new Error(
963
- `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
964
- );
965
- }
966
- tables.set(registryKey, this);
967
- }
968
- /**
969
- * Generates the versioned table name following Moose's naming convention
970
- * Format: {tableName}_{version_with_dots_replaced_by_underscores}
971
- */
972
- generateTableName() {
973
- if (this._cachedTableName) {
974
- return this._cachedTableName;
975
- }
976
- const tableVersion = this.config.version;
977
- if (!tableVersion) {
978
- this._cachedTableName = this.name;
979
- } else {
980
- const versionSuffix = tableVersion.replace(/\./g, "_");
981
- this._cachedTableName = `${this.name}_${versionSuffix}`;
982
- }
983
- return this._cachedTableName;
984
- }
985
- /**
986
- * Creates a fast hash of the ClickHouse configuration.
987
- * Uses crypto.createHash for better performance than JSON.stringify.
988
- *
989
- * @private
990
- */
991
- createConfigHash(clickhouseConfig) {
992
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
993
- const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
994
- return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
995
- }
996
- /**
997
- * Gets or creates a memoized ClickHouse client.
998
- * The client is cached and reused across multiple insert calls for better performance.
999
- * If the configuration changes, a new client will be created.
1000
- *
1001
- * @private
1002
- */
1003
- async getMemoizedClient() {
1004
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1005
- const configRegistry = globalThis._mooseConfigRegistry;
1006
- const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1007
- const clickhouseConfig = await configRegistry.getClickHouseConfig();
1008
- const currentConfigHash = this.createConfigHash(clickhouseConfig);
1009
- if (this._memoizedClient && this._configHash === currentConfigHash) {
1010
- return { client: this._memoizedClient, config: clickhouseConfig };
1011
- }
1012
- if (this._memoizedClient && this._configHash !== currentConfigHash) {
1013
- try {
1014
- await this._memoizedClient.close();
1015
- } catch (error) {
1016
- }
1017
- }
1018
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
1019
- const client = getClickhouseClient2({
1020
- username: clickhouseConfig.username,
1021
- password: clickhouseConfig.password,
1022
- database: effectiveDatabase,
1023
- useSSL: clickhouseConfig.useSSL ? "true" : "false",
1024
- host: clickhouseConfig.host,
1025
- port: clickhouseConfig.port
1026
- });
1027
- this._memoizedClient = client;
1028
- this._configHash = currentConfigHash;
1029
- return { client, config: clickhouseConfig };
1030
- }
1031
- /**
1032
- * Closes the memoized ClickHouse client if it exists.
1033
- * This is useful for cleaning up connections when the table instance is no longer needed.
1034
- * The client will be automatically recreated on the next insert call if needed.
1035
- */
1036
- async closeClient() {
1037
- if (this._memoizedClient) {
1038
- try {
1039
- await this._memoizedClient.close();
1040
- } catch (error) {
1041
- } finally {
1042
- this._memoizedClient = void 0;
1043
- this._configHash = void 0;
1044
- }
1045
- }
1046
- }
1047
- /**
1048
- * Validates a single record using typia's comprehensive type checking.
1049
- * This provides the most accurate validation as it uses the exact TypeScript type information.
1050
- *
1051
- * @param record The record to validate
1052
- * @returns Validation result with detailed error information
1053
- */
1054
- validateRecord(record) {
1055
- if (this.validators?.validate) {
1056
- try {
1057
- const result = this.validators.validate(record);
1058
- return {
1059
- success: result.success,
1060
- data: result.data,
1061
- errors: result.errors?.map(
1062
- (err) => typeof err === "string" ? err : JSON.stringify(err)
1063
- )
1064
- };
1065
- } catch (error) {
1066
- return {
1067
- success: false,
1068
- errors: [error instanceof Error ? error.message : String(error)]
1069
- };
384
+ // src/dmv2/index.ts
385
+ var dmv2_exports = {};
386
+ __export(dmv2_exports, {
387
+ Api: () => Api,
388
+ ConsumptionApi: () => ConsumptionApi,
389
+ DeadLetterQueue: () => DeadLetterQueue,
390
+ ETLPipeline: () => ETLPipeline,
391
+ IngestApi: () => IngestApi,
392
+ IngestPipeline: () => IngestPipeline,
393
+ LifeCycle: () => LifeCycle,
394
+ MaterializedView: () => MaterializedView,
395
+ OlapTable: () => OlapTable,
396
+ SqlResource: () => SqlResource,
397
+ Stream: () => Stream,
398
+ Task: () => Task,
399
+ View: () => View,
400
+ WebApp: () => WebApp,
401
+ Workflow: () => Workflow,
402
+ getApi: () => getApi,
403
+ getApis: () => getApis,
404
+ getIngestApi: () => getIngestApi,
405
+ getIngestApis: () => getIngestApis,
406
+ getSqlResource: () => getSqlResource,
407
+ getSqlResources: () => getSqlResources,
408
+ getStream: () => getStream,
409
+ getStreams: () => getStreams,
410
+ getTable: () => getTable,
411
+ getTables: () => getTables,
412
+ getWebApp: () => getWebApp,
413
+ getWebApps: () => getWebApps,
414
+ getWorkflow: () => getWorkflow,
415
+ getWorkflows: () => getWorkflows
416
+ });
417
+ module.exports = __toCommonJS(dmv2_exports);
418
+
419
+ // src/dmv2/utils/stackTrace.ts
420
+ function shouldSkipStackLine(line) {
421
+ return line.includes("node_modules") || // Skip npm installed packages (prod)
422
+ line.includes("node:internal") || // Skip Node.js internals (modern format)
423
+ line.includes("internal/modules") || // Skip Node.js internals (older format)
424
+ line.includes("ts-node") || // Skip TypeScript execution
425
+ line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
426
+ line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
427
+ line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
428
+ line.includes("\\ts-moose-lib\\dist\\");
429
+ }
430
+ function parseStackLine(line) {
431
+ const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
432
+ if (match && match[1]) {
433
+ return {
434
+ file: match[1],
435
+ line: match[2]
436
+ };
437
+ }
438
+ return void 0;
439
+ }
440
+ function getSourceFileInfo(stack) {
441
+ if (!stack) return {};
442
+ const lines = stack.split("\n");
443
+ for (const line of lines) {
444
+ if (shouldSkipStackLine(line)) continue;
445
+ const info = parseStackLine(line);
446
+ if (info) return info;
447
+ }
448
+ return {};
449
+ }
450
+ function getSourceLocationFromStack(stack) {
451
+ if (!stack) return void 0;
452
+ const lines = stack.split("\n");
453
+ for (const line of lines.slice(1)) {
454
+ if (shouldSkipStackLine(line)) {
455
+ continue;
456
+ }
457
+ const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
458
+ if (v8Match) {
459
+ return {
460
+ file: v8Match[1],
461
+ line: parseInt(v8Match[2], 10),
462
+ column: parseInt(v8Match[3], 10)
463
+ };
464
+ }
465
+ const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
466
+ if (smMatch) {
467
+ return {
468
+ file: smMatch[1],
469
+ line: parseInt(smMatch[2], 10),
470
+ column: parseInt(smMatch[3], 10)
471
+ };
472
+ }
473
+ }
474
+ return void 0;
475
+ }
476
+ function getSourceFileFromStack(stack) {
477
+ const location = getSourceLocationFromStack(stack);
478
+ return location?.file;
479
+ }
480
+
481
+ // src/dmv2/typedBase.ts
482
+ var TypedBase = class {
483
+ /** The JSON schema representation of type T. Injected by the compiler plugin. */
484
+ schema;
485
+ /** The name assigned to this resource instance. */
486
+ name;
487
+ /** A dictionary mapping column names (keys of T) to their Column definitions. */
488
+ columns;
489
+ /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
490
+ columnArray;
491
+ /** The configuration object specific to this resource type. */
492
+ config;
493
+ /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
494
+ validators;
495
+ /** Optional metadata for the resource, always present as an object. */
496
+ metadata;
497
+ /**
498
+ * Whether this resource allows extra fields beyond the defined columns.
499
+ * When true, extra fields in payloads are passed through to streaming functions.
500
+ * Injected by the compiler plugin when the type has an index signature.
501
+ */
502
+ allowExtraFields;
503
+ /**
504
+ * @internal Constructor intended for internal use by subclasses and the compiler plugin.
505
+ * It expects the schema and columns to be provided, typically injected by the compiler.
506
+ *
507
+ * @param name The name for the resource instance.
508
+ * @param config The configuration object for the resource.
509
+ * @param schema The JSON schema for the resource's data type T (injected).
510
+ * @param columns The array of Column definitions for T (injected).
511
+ * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
512
+ */
513
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
514
+ if (schema === void 0 || columns === void 0) {
515
+ throw new Error(
516
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
517
+ );
518
+ }
519
+ this.schema = schema;
520
+ this.columnArray = columns;
521
+ const columnsObj = {};
522
+ columns.forEach((column) => {
523
+ columnsObj[column.name] = column;
524
+ });
525
+ this.columns = columnsObj;
526
+ this.name = name;
527
+ this.config = config;
528
+ this.validators = validators;
529
+ this.allowExtraFields = allowExtraFields ?? false;
530
+ this.metadata = config?.metadata ? { ...config.metadata } : {};
531
+ if (!this.metadata.source) {
532
+ const stack = new Error().stack;
533
+ if (stack) {
534
+ const info = getSourceFileInfo(stack);
535
+ this.metadata.source = { file: info.file, line: info.line };
536
+ }
537
+ }
538
+ }
539
+ };
540
+
541
+ // src/dataModels/dataModelTypes.ts
542
+ function isArrayNestedType(dt) {
543
+ return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
544
+ }
545
+ function isNestedType(dt) {
546
+ return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
547
+ }
548
+
549
+ // src/sqlHelpers.ts
550
+ var quoteIdentifier = (name) => {
551
+ return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
552
+ };
553
+ var toStaticQuery = (sql3) => {
554
+ const [query, params] = toQuery(sql3);
555
+ if (Object.keys(params).length !== 0) {
556
+ throw new Error(
557
+ "Dynamic SQL is not allowed in the select statement in view creation."
558
+ );
559
+ }
560
+ return query;
561
+ };
562
+ var toQuery = (sql3) => {
563
+ const parameterizedStubs = sql3.values.map(
564
+ (v, i) => createClickhouseParameter(i, v)
565
+ );
566
+ const query = sql3.strings.map(
567
+ (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
568
+ ).join("");
569
+ const query_params = sql3.values.reduce(
570
+ (acc, v, i) => ({
571
+ ...acc,
572
+ [`p${i}`]: getValueFromParameter(v)
573
+ }),
574
+ {}
575
+ );
576
+ return [query, query_params];
577
+ };
578
+ var getValueFromParameter = (value) => {
579
+ if (Array.isArray(value)) {
580
+ const [type, val] = value;
581
+ if (type === "Identifier") return val;
582
+ }
583
+ return value;
584
+ };
585
+ function createClickhouseParameter(parameterIndex, value) {
586
+ return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
587
+ }
588
+ var mapToClickHouseType = (value) => {
589
+ if (typeof value === "number") {
590
+ return Number.isInteger(value) ? "Int" : "Float";
591
+ }
592
+ if (typeof value === "boolean") return "Bool";
593
+ if (value instanceof Date) return "DateTime";
594
+ if (Array.isArray(value)) {
595
+ const [type, _] = value;
596
+ return type;
597
+ }
598
+ return "String";
599
+ };
600
+ function emptyIfUndefined(value) {
601
+ return value === void 0 ? "" : value;
602
+ }
603
+
604
+ // src/blocks/helpers.ts
605
+ function dropView(name) {
606
+ return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
607
+ }
608
+ function createMaterializedView(options) {
609
+ return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
610
+ TO ${quoteIdentifier(options.destinationTable)}
611
+ AS ${options.select}`.trim();
612
+ }
613
+
614
+ // src/dmv2/internal.ts
615
+ var import_process = __toESM(require("process"));
616
+
617
+ // src/index.ts
618
+ init_commons();
619
+
620
+ // src/consumption-apis/helpers.ts
621
+ var import_client2 = require("@temporalio/client");
622
+ var import_node_crypto = require("crypto");
623
+
624
+ // src/consumption-apis/runner.ts
625
+ init_commons();
626
+ var jose = __toESM(require("jose"));
627
+
628
+ // src/cluster-utils.ts
629
+ var import_node_cluster = __toESM(require("cluster"));
630
+ var import_node_os = require("os");
631
+ var import_node_process = require("process");
632
+
633
+ // src/clients/redisClient.ts
634
+ var import_redis = require("redis");
635
+
636
+ // src/consumption-apis/standalone.ts
637
+ init_commons();
638
+
639
+ // src/utilities/dataParser.ts
640
+ var import_csv_parse = require("csv-parse");
641
+ var CSV_DELIMITERS = {
642
+ COMMA: ",",
643
+ TAB: " ",
644
+ SEMICOLON: ";",
645
+ PIPE: "|"
646
+ };
647
+ var DEFAULT_CSV_CONFIG = {
648
+ delimiter: CSV_DELIMITERS.COMMA,
649
+ columns: true,
650
+ skipEmptyLines: true,
651
+ trim: true
652
+ };
653
+
654
+ // src/dmv2/internal.ts
655
+ init_commons();
656
+ var isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
657
+ var moose_internal = {
658
+ tables: /* @__PURE__ */ new Map(),
659
+ streams: /* @__PURE__ */ new Map(),
660
+ ingestApis: /* @__PURE__ */ new Map(),
661
+ apis: /* @__PURE__ */ new Map(),
662
+ sqlResources: /* @__PURE__ */ new Map(),
663
+ workflows: /* @__PURE__ */ new Map(),
664
+ webApps: /* @__PURE__ */ new Map()
665
+ };
666
+ var defaultRetentionPeriod = 60 * 60 * 24 * 7;
667
+ var getMooseInternal = () => globalThis.moose_internal;
668
+ if (getMooseInternal() === void 0) {
669
+ globalThis.moose_internal = moose_internal;
670
+ }
671
+ var dlqSchema = {
672
+ version: "3.1",
673
+ components: {
674
+ schemas: {
675
+ DeadLetterModel: {
676
+ type: "object",
677
+ properties: {
678
+ originalRecord: {
679
+ $ref: "#/components/schemas/Recordstringany"
680
+ },
681
+ errorMessage: {
682
+ type: "string"
683
+ },
684
+ errorType: {
685
+ type: "string"
686
+ },
687
+ failedAt: {
688
+ type: "string",
689
+ format: "date-time"
690
+ },
691
+ source: {
692
+ oneOf: [
693
+ {
694
+ const: "api"
695
+ },
696
+ {
697
+ const: "transform"
698
+ },
699
+ {
700
+ const: "table"
701
+ }
702
+ ]
1070
703
  }
1071
- }
1072
- throw new Error("No typia validator found");
704
+ },
705
+ required: [
706
+ "originalRecord",
707
+ "errorMessage",
708
+ "errorType",
709
+ "failedAt",
710
+ "source"
711
+ ]
712
+ },
713
+ Recordstringany: {
714
+ type: "object",
715
+ properties: {},
716
+ required: [],
717
+ description: "Construct a type with a set of properties K of type T",
718
+ additionalProperties: {}
1073
719
  }
1074
- /**
1075
- * Type guard function using typia's is() function.
1076
- * Provides compile-time type narrowing for TypeScript.
1077
- *
1078
- * @param record The record to check
1079
- * @returns True if record matches type T, with type narrowing
1080
- */
1081
- isValidRecord(record) {
1082
- if (this.validators?.is) {
1083
- return this.validators.is(record);
1084
- }
1085
- throw new Error("No typia validator found");
720
+ }
721
+ },
722
+ schemas: [
723
+ {
724
+ $ref: "#/components/schemas/DeadLetterModel"
725
+ }
726
+ ]
727
+ };
728
+ var dlqColumns = [
729
+ {
730
+ name: "originalRecord",
731
+ data_type: "Json",
732
+ primary_key: false,
733
+ required: true,
734
+ unique: false,
735
+ default: null,
736
+ annotations: [],
737
+ ttl: null,
738
+ codec: null,
739
+ materialized: null,
740
+ comment: null
741
+ },
742
+ {
743
+ name: "errorMessage",
744
+ data_type: "String",
745
+ primary_key: false,
746
+ required: true,
747
+ unique: false,
748
+ default: null,
749
+ annotations: [],
750
+ ttl: null,
751
+ codec: null,
752
+ materialized: null,
753
+ comment: null
754
+ },
755
+ {
756
+ name: "errorType",
757
+ data_type: "String",
758
+ primary_key: false,
759
+ required: true,
760
+ unique: false,
761
+ default: null,
762
+ annotations: [],
763
+ ttl: null,
764
+ codec: null,
765
+ materialized: null,
766
+ comment: null
767
+ },
768
+ {
769
+ name: "failedAt",
770
+ data_type: "DateTime",
771
+ primary_key: false,
772
+ required: true,
773
+ unique: false,
774
+ default: null,
775
+ annotations: [],
776
+ ttl: null,
777
+ codec: null,
778
+ materialized: null,
779
+ comment: null
780
+ },
781
+ {
782
+ name: "source",
783
+ data_type: "String",
784
+ primary_key: false,
785
+ required: true,
786
+ unique: false,
787
+ default: null,
788
+ annotations: [],
789
+ ttl: null,
790
+ codec: null,
791
+ materialized: null,
792
+ comment: null
793
+ }
794
+ ];
795
+
796
+ // src/dmv2/sdk/olapTable.ts
797
+ var import_node_stream = require("stream");
798
+ var import_node_crypto2 = require("crypto");
799
+ var OlapTable = class extends TypedBase {
800
+ name;
801
+ /** @internal */
802
+ kind = "OlapTable";
803
+ /** @internal Memoized ClickHouse client for reusing connections across insert calls */
804
+ _memoizedClient;
805
+ /** @internal Hash of the configuration used to create the memoized client */
806
+ _configHash;
807
+ /** @internal Cached table name to avoid repeated generation */
808
+ _cachedTableName;
809
+ constructor(name, config, schema, columns, validators) {
810
+ const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
811
+ const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
812
+ const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
813
+ if (hasFields && hasExpr) {
814
+ throw new Error(
815
+ `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
816
+ );
817
+ }
818
+ const hasCluster = typeof resolvedConfig.cluster === "string";
819
+ const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
820
+ const hasReplicaName = typeof resolvedConfig.replicaName === "string";
821
+ if (hasCluster && (hasKeeperPath || hasReplicaName)) {
822
+ throw new Error(
823
+ `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
824
+ );
825
+ }
826
+ super(name, resolvedConfig, schema, columns, validators);
827
+ this.name = name;
828
+ const tables = getMooseInternal().tables;
829
+ const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
830
+ if (!isClientOnlyMode() && tables.has(registryKey)) {
831
+ throw new Error(
832
+ `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
833
+ );
834
+ }
835
+ tables.set(registryKey, this);
836
+ }
837
+ /**
838
+ * Generates the versioned table name following Moose's naming convention
839
+ * Format: {tableName}_{version_with_dots_replaced_by_underscores}
840
+ */
841
+ generateTableName() {
842
+ if (this._cachedTableName) {
843
+ return this._cachedTableName;
844
+ }
845
+ const tableVersion = this.config.version;
846
+ if (!tableVersion) {
847
+ this._cachedTableName = this.name;
848
+ } else {
849
+ const versionSuffix = tableVersion.replace(/\./g, "_");
850
+ this._cachedTableName = `${this.name}_${versionSuffix}`;
851
+ }
852
+ return this._cachedTableName;
853
+ }
854
+ /**
855
+ * Creates a fast hash of the ClickHouse configuration.
856
+ * Uses crypto.createHash for better performance than JSON.stringify.
857
+ *
858
+ * @private
859
+ */
860
+ createConfigHash(clickhouseConfig) {
861
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
862
+ const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
863
+ return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
864
+ }
865
+ /**
866
+ * Gets or creates a memoized ClickHouse client.
867
+ * The client is cached and reused across multiple insert calls for better performance.
868
+ * If the configuration changes, a new client will be created.
869
+ *
870
+ * @private
871
+ */
872
+ async getMemoizedClient() {
873
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
874
+ const configRegistry = globalThis._mooseConfigRegistry;
875
+ const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
876
+ const clickhouseConfig = await configRegistry.getClickHouseConfig();
877
+ const currentConfigHash = this.createConfigHash(clickhouseConfig);
878
+ if (this._memoizedClient && this._configHash === currentConfigHash) {
879
+ return { client: this._memoizedClient, config: clickhouseConfig };
880
+ }
881
+ if (this._memoizedClient && this._configHash !== currentConfigHash) {
882
+ try {
883
+ await this._memoizedClient.close();
884
+ } catch (error) {
1086
885
  }
1087
- /**
1088
- * Assert that a record matches type T, throwing detailed errors if not.
1089
- * Uses typia's assert() function for the most detailed error reporting.
1090
- *
1091
- * @param record The record to assert
1092
- * @returns The validated and typed record
1093
- * @throws Detailed validation error if record doesn't match type T
1094
- */
1095
- assertValidRecord(record) {
1096
- if (this.validators?.assert) {
1097
- return this.validators.assert(record);
1098
- }
1099
- throw new Error("No typia validator found");
886
+ }
887
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
888
+ const client = getClickhouseClient2({
889
+ username: clickhouseConfig.username,
890
+ password: clickhouseConfig.password,
891
+ database: effectiveDatabase,
892
+ useSSL: clickhouseConfig.useSSL ? "true" : "false",
893
+ host: clickhouseConfig.host,
894
+ port: clickhouseConfig.port
895
+ });
896
+ this._memoizedClient = client;
897
+ this._configHash = currentConfigHash;
898
+ return { client, config: clickhouseConfig };
899
+ }
900
+ /**
901
+ * Closes the memoized ClickHouse client if it exists.
902
+ * This is useful for cleaning up connections when the table instance is no longer needed.
903
+ * The client will be automatically recreated on the next insert call if needed.
904
+ */
905
+ async closeClient() {
906
+ if (this._memoizedClient) {
907
+ try {
908
+ await this._memoizedClient.close();
909
+ } catch (error) {
910
+ } finally {
911
+ this._memoizedClient = void 0;
912
+ this._configHash = void 0;
1100
913
  }
1101
- /**
1102
- * Validates an array of records with comprehensive error reporting.
1103
- * Uses the most appropriate validation method available (typia or basic).
1104
- *
1105
- * @param data Array of records to validate
1106
- * @returns Detailed validation results
1107
- */
1108
- async validateRecords(data) {
1109
- const valid = [];
1110
- const invalid = [];
1111
- valid.length = 0;
1112
- invalid.length = 0;
1113
- const dataLength = data.length;
1114
- for (let i = 0; i < dataLength; i++) {
1115
- const record = data[i];
1116
- try {
1117
- if (this.isValidRecord(record)) {
1118
- valid.push(this.mapToClickhouseRecord(record));
1119
- } else {
1120
- const result = this.validateRecord(record);
1121
- if (result.success) {
1122
- valid.push(this.mapToClickhouseRecord(record));
1123
- } else {
1124
- invalid.push({
1125
- record,
1126
- error: result.errors?.join(", ") || "Validation failed",
1127
- index: i,
1128
- path: "root"
1129
- });
1130
- }
1131
- }
1132
- } catch (error) {
914
+ }
915
+ }
916
+ /**
917
+ * Validates a single record using typia's comprehensive type checking.
918
+ * This provides the most accurate validation as it uses the exact TypeScript type information.
919
+ *
920
+ * @param record The record to validate
921
+ * @returns Validation result with detailed error information
922
+ */
923
+ validateRecord(record) {
924
+ if (this.validators?.validate) {
925
+ try {
926
+ const result = this.validators.validate(record);
927
+ return {
928
+ success: result.success,
929
+ data: result.data,
930
+ errors: result.errors?.map(
931
+ (err) => typeof err === "string" ? err : JSON.stringify(err)
932
+ )
933
+ };
934
+ } catch (error) {
935
+ return {
936
+ success: false,
937
+ errors: [error instanceof Error ? error.message : String(error)]
938
+ };
939
+ }
940
+ }
941
+ throw new Error("No typia validator found");
942
+ }
943
+ /**
944
+ * Type guard function using typia's is() function.
945
+ * Provides compile-time type narrowing for TypeScript.
946
+ *
947
+ * @param record The record to check
948
+ * @returns True if record matches type T, with type narrowing
949
+ */
950
+ isValidRecord(record) {
951
+ if (this.validators?.is) {
952
+ return this.validators.is(record);
953
+ }
954
+ throw new Error("No typia validator found");
955
+ }
956
+ /**
957
+ * Assert that a record matches type T, throwing detailed errors if not.
958
+ * Uses typia's assert() function for the most detailed error reporting.
959
+ *
960
+ * @param record The record to assert
961
+ * @returns The validated and typed record
962
+ * @throws Detailed validation error if record doesn't match type T
963
+ */
964
+ assertValidRecord(record) {
965
+ if (this.validators?.assert) {
966
+ return this.validators.assert(record);
967
+ }
968
+ throw new Error("No typia validator found");
969
+ }
970
+ /**
971
+ * Validates an array of records with comprehensive error reporting.
972
+ * Uses the most appropriate validation method available (typia or basic).
973
+ *
974
+ * @param data Array of records to validate
975
+ * @returns Detailed validation results
976
+ */
977
+ async validateRecords(data) {
978
+ const valid = [];
979
+ const invalid = [];
980
+ valid.length = 0;
981
+ invalid.length = 0;
982
+ const dataLength = data.length;
983
+ for (let i = 0; i < dataLength; i++) {
984
+ const record = data[i];
985
+ try {
986
+ if (this.isValidRecord(record)) {
987
+ valid.push(this.mapToClickhouseRecord(record));
988
+ } else {
989
+ const result = this.validateRecord(record);
990
+ if (result.success) {
991
+ valid.push(this.mapToClickhouseRecord(record));
992
+ } else {
1133
993
  invalid.push({
1134
994
  record,
1135
- error: error instanceof Error ? error.message : String(error),
995
+ error: result.errors?.join(", ") || "Validation failed",
1136
996
  index: i,
1137
997
  path: "root"
1138
998
  });
1139
999
  }
1140
1000
  }
1141
- return {
1142
- valid,
1143
- invalid,
1144
- total: dataLength
1145
- };
1001
+ } catch (error) {
1002
+ invalid.push({
1003
+ record,
1004
+ error: error instanceof Error ? error.message : String(error),
1005
+ index: i,
1006
+ path: "root"
1007
+ });
1146
1008
  }
1147
- /**
1148
- * Optimized batch retry that minimizes individual insert operations.
1149
- * Groups records into smaller batches to reduce round trips while still isolating failures.
1150
- *
1151
- * @private
1152
- */
1153
- async retryIndividualRecords(client, tableName, records) {
1154
- const successful = [];
1155
- const failed = [];
1156
- const RETRY_BATCH_SIZE = 10;
1157
- const totalRecords = records.length;
1158
- for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1159
- const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1160
- const batch = records.slice(i, batchEnd);
1009
+ }
1010
+ return {
1011
+ valid,
1012
+ invalid,
1013
+ total: dataLength
1014
+ };
1015
+ }
1016
+ /**
1017
+ * Optimized batch retry that minimizes individual insert operations.
1018
+ * Groups records into smaller batches to reduce round trips while still isolating failures.
1019
+ *
1020
+ * @private
1021
+ */
1022
+ async retryIndividualRecords(client, tableName, records) {
1023
+ const successful = [];
1024
+ const failed = [];
1025
+ const RETRY_BATCH_SIZE = 10;
1026
+ const totalRecords = records.length;
1027
+ for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1028
+ const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1029
+ const batch = records.slice(i, batchEnd);
1030
+ try {
1031
+ await client.insert({
1032
+ table: quoteIdentifier(tableName),
1033
+ values: batch,
1034
+ format: "JSONEachRow",
1035
+ clickhouse_settings: {
1036
+ date_time_input_format: "best_effort",
1037
+ // Add performance settings for retries
1038
+ max_insert_block_size: RETRY_BATCH_SIZE,
1039
+ max_block_size: RETRY_BATCH_SIZE
1040
+ }
1041
+ });
1042
+ successful.push(...batch);
1043
+ } catch (batchError) {
1044
+ for (let j = 0; j < batch.length; j++) {
1045
+ const record = batch[j];
1161
1046
  try {
1162
1047
  await client.insert({
1163
1048
  table: quoteIdentifier(tableName),
1164
- values: batch,
1049
+ values: [record],
1165
1050
  format: "JSONEachRow",
1166
1051
  clickhouse_settings: {
1167
- date_time_input_format: "best_effort",
1168
- // Add performance settings for retries
1169
- max_insert_block_size: RETRY_BATCH_SIZE,
1170
- max_block_size: RETRY_BATCH_SIZE
1052
+ date_time_input_format: "best_effort"
1171
1053
  }
1172
1054
  });
1173
- successful.push(...batch);
1174
- } catch (batchError) {
1175
- for (let j = 0; j < batch.length; j++) {
1176
- const record = batch[j];
1177
- try {
1178
- await client.insert({
1179
- table: quoteIdentifier(tableName),
1180
- values: [record],
1181
- format: "JSONEachRow",
1182
- clickhouse_settings: {
1183
- date_time_input_format: "best_effort"
1184
- }
1185
- });
1186
- successful.push(record);
1187
- } catch (error) {
1188
- failed.push({
1189
- record,
1190
- error: error instanceof Error ? error.message : String(error),
1191
- index: i + j
1192
- });
1193
- }
1194
- }
1195
- }
1196
- }
1197
- return { successful, failed };
1198
- }
1199
- /**
1200
- * Validates input parameters and strategy compatibility
1201
- * @private
1202
- */
1203
- validateInsertParameters(data, options) {
1204
- const isStream = data instanceof import_node_stream.Readable;
1205
- const strategy = options?.strategy || "fail-fast";
1206
- const shouldValidate = options?.validate !== false;
1207
- if (isStream && strategy === "isolate") {
1208
- throw new Error(
1209
- "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1210
- );
1211
- }
1212
- if (isStream && shouldValidate) {
1213
- console.warn(
1214
- "Validation is not supported with stream input. Validation will be skipped."
1215
- );
1216
- }
1217
- return { isStream, strategy, shouldValidate };
1218
- }
1219
- /**
1220
- * Handles early return cases for empty data
1221
- * @private
1222
- */
1223
- handleEmptyData(data, isStream) {
1224
- if (isStream && !data) {
1225
- return {
1226
- successful: 0,
1227
- failed: 0,
1228
- total: 0
1229
- };
1230
- }
1231
- if (!isStream && (!data || data.length === 0)) {
1232
- return {
1233
- successful: 0,
1234
- failed: 0,
1235
- total: 0
1236
- };
1237
- }
1238
- return null;
1239
- }
1240
- /**
1241
- * Performs pre-insertion validation for array data
1242
- * @private
1243
- */
1244
- async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1245
- if (!shouldValidate) {
1246
- return { validatedData: data, validationErrors: [] };
1247
- }
1248
- try {
1249
- const validationResult = await this.validateRecords(data);
1250
- const validatedData = validationResult.valid;
1251
- const validationErrors = validationResult.invalid;
1252
- if (validationErrors.length > 0) {
1253
- this.handleValidationErrors(validationErrors, strategy, data, options);
1254
- switch (strategy) {
1255
- case "discard":
1256
- return { validatedData, validationErrors };
1257
- case "isolate":
1258
- return { validatedData: data, validationErrors };
1259
- default:
1260
- return { validatedData, validationErrors };
1261
- }
1262
- }
1263
- return { validatedData, validationErrors };
1264
- } catch (validationError) {
1265
- if (strategy === "fail-fast") {
1266
- throw validationError;
1267
- }
1268
- console.warn("Validation error:", validationError);
1269
- return { validatedData: data, validationErrors: [] };
1270
- }
1271
- }
1272
- /**
1273
- * Handles validation errors based on the specified strategy
1274
- * @private
1275
- */
1276
- handleValidationErrors(validationErrors, strategy, data, options) {
1277
- switch (strategy) {
1278
- case "fail-fast":
1279
- const firstError = validationErrors[0];
1280
- throw new Error(
1281
- `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1282
- );
1283
- case "discard":
1284
- this.checkValidationThresholds(validationErrors, data.length, options);
1285
- break;
1286
- case "isolate":
1287
- break;
1288
- }
1289
- }
1290
- /**
1291
- * Checks if validation errors exceed configured thresholds
1292
- * @private
1293
- */
1294
- checkValidationThresholds(validationErrors, totalRecords, options) {
1295
- const validationFailedCount = validationErrors.length;
1296
- const validationFailedRatio = validationFailedCount / totalRecords;
1297
- if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1298
- throw new Error(
1299
- `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1300
- );
1301
- }
1302
- if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1303
- throw new Error(
1304
- `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1305
- );
1306
- }
1307
- }
1308
- /**
1309
- * Optimized insert options preparation with better memory management
1310
- * @private
1311
- */
1312
- prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1313
- const insertOptions = {
1314
- table: quoteIdentifier(tableName),
1315
- format: "JSONEachRow",
1316
- clickhouse_settings: {
1317
- date_time_input_format: "best_effort",
1318
- wait_end_of_query: 1,
1319
- // Ensure at least once delivery for INSERT operations
1320
- // Performance optimizations
1321
- max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1322
- max_block_size: 65536,
1323
- // Use async inserts for better performance with large datasets
1324
- async_insert: validatedData.length > 1e3 ? 1 : 0,
1325
- wait_for_async_insert: 1
1326
- // For at least once delivery
1327
- }
1328
- };
1329
- if (isStream) {
1330
- insertOptions.values = data;
1331
- } else {
1332
- insertOptions.values = validatedData;
1333
- }
1334
- if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1335
- if (options.allowErrors !== void 0) {
1336
- insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1337
- }
1338
- if (options.allowErrorsRatio !== void 0) {
1339
- insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1055
+ successful.push(record);
1056
+ } catch (error) {
1057
+ failed.push({
1058
+ record,
1059
+ error: error instanceof Error ? error.message : String(error),
1060
+ index: i + j
1061
+ });
1340
1062
  }
1341
1063
  }
1342
- return insertOptions;
1343
- }
1344
- /**
1345
- * Creates success result for completed insertions
1346
- * @private
1347
- */
1348
- createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1349
- if (isStream) {
1350
- return {
1351
- successful: -1,
1352
- // -1 indicates stream mode where count is unknown
1353
- failed: 0,
1354
- total: -1
1355
- };
1356
- }
1357
- const insertedCount = validatedData.length;
1358
- const totalProcessed = shouldValidate ? data.length : insertedCount;
1359
- const result = {
1360
- successful: insertedCount,
1361
- failed: shouldValidate ? validationErrors.length : 0,
1362
- total: totalProcessed
1363
- };
1364
- if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1365
- result.failedRecords = validationErrors.map((ve) => ({
1366
- record: ve.record,
1367
- error: `Validation error: ${ve.error}`,
1368
- index: ve.index
1369
- }));
1370
- }
1371
- return result;
1372
1064
  }
1373
- /**
1374
- * Handles insertion errors based on the specified strategy
1375
- * @private
1376
- */
1377
- async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1065
+ }
1066
+ return { successful, failed };
1067
+ }
1068
+ /**
1069
+ * Validates input parameters and strategy compatibility
1070
+ * @private
1071
+ */
1072
+ validateInsertParameters(data, options) {
1073
+ const isStream = data instanceof import_node_stream.Readable;
1074
+ const strategy = options?.strategy || "fail-fast";
1075
+ const shouldValidate = options?.validate !== false;
1076
+ if (isStream && strategy === "isolate") {
1077
+ throw new Error(
1078
+ "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1079
+ );
1080
+ }
1081
+ if (isStream && shouldValidate) {
1082
+ console.warn(
1083
+ "Validation is not supported with stream input. Validation will be skipped."
1084
+ );
1085
+ }
1086
+ return { isStream, strategy, shouldValidate };
1087
+ }
1088
+ /**
1089
+ * Handles early return cases for empty data
1090
+ * @private
1091
+ */
1092
+ handleEmptyData(data, isStream) {
1093
+ if (isStream && !data) {
1094
+ return {
1095
+ successful: 0,
1096
+ failed: 0,
1097
+ total: 0
1098
+ };
1099
+ }
1100
+ if (!isStream && (!data || data.length === 0)) {
1101
+ return {
1102
+ successful: 0,
1103
+ failed: 0,
1104
+ total: 0
1105
+ };
1106
+ }
1107
+ return null;
1108
+ }
1109
+ /**
1110
+ * Performs pre-insertion validation for array data
1111
+ * @private
1112
+ */
1113
+ async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1114
+ if (!shouldValidate) {
1115
+ return { validatedData: data, validationErrors: [] };
1116
+ }
1117
+ try {
1118
+ const validationResult = await this.validateRecords(data);
1119
+ const validatedData = validationResult.valid;
1120
+ const validationErrors = validationResult.invalid;
1121
+ if (validationErrors.length > 0) {
1122
+ this.handleValidationErrors(validationErrors, strategy, data, options);
1378
1123
  switch (strategy) {
1379
- case "fail-fast":
1380
- throw new Error(
1381
- `Failed to insert data into table ${tableName}: ${batchError}`
1382
- );
1383
1124
  case "discard":
1384
- throw new Error(
1385
- `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1386
- );
1125
+ return { validatedData, validationErrors };
1387
1126
  case "isolate":
1388
- return await this.handleIsolateStrategy(
1389
- batchError,
1390
- tableName,
1391
- data,
1392
- validatedData,
1393
- validationErrors,
1394
- isStream,
1395
- shouldValidate,
1396
- options
1397
- );
1127
+ return { validatedData: data, validationErrors };
1398
1128
  default:
1399
- throw new Error(`Unknown error strategy: ${strategy}`);
1400
- }
1401
- }
1402
- /**
1403
- * Handles the isolate strategy for insertion errors
1404
- * @private
1405
- */
1406
- async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1407
- if (isStream) {
1408
- throw new Error(
1409
- `Isolate strategy is not supported with stream input: ${batchError}`
1410
- );
1411
- }
1412
- try {
1413
- const { client } = await this.getMemoizedClient();
1414
- const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1415
- const retryData = skipValidationOnRetry ? data : validatedData;
1416
- const { successful, failed } = await this.retryIndividualRecords(
1417
- client,
1418
- tableName,
1419
- retryData
1420
- );
1421
- const allFailedRecords = [
1422
- // Validation errors (if any and not skipping validation on retry)
1423
- ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1424
- record: ve.record,
1425
- error: `Validation error: ${ve.error}`,
1426
- index: ve.index
1427
- })) : [],
1428
- // Insertion errors
1429
- ...failed
1430
- ];
1431
- this.checkInsertionThresholds(
1432
- allFailedRecords,
1433
- data.length,
1434
- options
1435
- );
1436
- return {
1437
- successful: successful.length,
1438
- failed: allFailedRecords.length,
1439
- total: data.length,
1440
- failedRecords: allFailedRecords
1441
- };
1442
- } catch (isolationError) {
1443
- throw new Error(
1444
- `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1445
- );
1446
- }
1447
- }
1448
- /**
1449
- * Checks if insertion errors exceed configured thresholds
1450
- * @private
1451
- */
1452
- checkInsertionThresholds(failedRecords, totalRecords, options) {
1453
- const totalFailed = failedRecords.length;
1454
- const failedRatio = totalFailed / totalRecords;
1455
- if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1456
- throw new Error(
1457
- `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1458
- );
1459
- }
1460
- if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1461
- throw new Error(
1462
- `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1463
- );
1464
- }
1465
- }
1466
- /**
1467
- * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1468
- *
1469
- * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1470
- * - For every Nested struct (not array), it recurses into the struct.
1471
- * - This ensures compatibility with kafka_clickhouse_sync
1472
- *
1473
- * @param record The input record to transform (may be deeply nested)
1474
- * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1475
- * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1476
- */
1477
- mapToClickhouseRecord(record, columns = this.columnArray) {
1478
- const result = { ...record };
1479
- for (const col of columns) {
1480
- const value = record[col.name];
1481
- const dt = col.data_type;
1482
- if (isArrayNestedType(dt)) {
1483
- if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1484
- result[col.name] = value.map((item) => [
1485
- this.mapToClickhouseRecord(item, dt.elementType.columns)
1486
- ]);
1487
- }
1488
- } else if (isNestedType(dt)) {
1489
- if (value && typeof value === "object") {
1490
- result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1491
- }
1492
- }
1129
+ return { validatedData, validationErrors };
1493
1130
  }
1494
- return result;
1495
1131
  }
1496
- /**
1497
- * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1498
- * This method establishes a direct connection to ClickHouse using the project configuration
1499
- * and inserts the provided data into the versioned table.
1500
- *
1501
- * PERFORMANCE OPTIMIZATIONS:
1502
- * - Memoized client connections with fast config hashing
1503
- * - Single-pass validation with pre-allocated arrays
1504
- * - Batch-optimized retry strategy (batches of 10, then individual)
1505
- * - Optimized ClickHouse settings for large datasets
1506
- * - Reduced memory allocations and object creation
1507
- *
1508
- * Uses advanced typia validation when available for comprehensive type checking,
1509
- * with fallback to basic validation for compatibility.
1510
- *
1511
- * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1512
- * If the configuration changes, a new client will be automatically created.
1513
- *
1514
- * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1515
- * @param options Optional configuration for error handling, validation, and insertion behavior
1516
- * @returns Promise resolving to detailed insertion results
1517
- * @throws {ConfigError} When configuration cannot be read or parsed
1518
- * @throws {ClickHouseError} When insertion fails based on the error strategy
1519
- * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1520
- *
1521
- * @example
1522
- * ```typescript
1523
- * // Create an OlapTable instance (typia validators auto-injected)
1524
- * const userTable = new OlapTable<User>('users');
1525
- *
1526
- * // Insert with comprehensive typia validation
1527
- * const result1 = await userTable.insert([
1528
- * { id: 1, name: 'John', email: 'john@example.com' },
1529
- * { id: 2, name: 'Jane', email: 'jane@example.com' }
1530
- * ]);
1531
- *
1532
- * // Insert data with stream input (validation not available for streams)
1533
- * const dataStream = new Readable({
1534
- * objectMode: true,
1535
- * read() { // Stream implementation }
1536
- * });
1537
- * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1538
- *
1539
- * // Insert with validation disabled for performance
1540
- * const result3 = await userTable.insert(data, { validate: false });
1541
- *
1542
- * // Insert with error handling strategies
1543
- * const result4 = await userTable.insert(mixedData, {
1544
- * strategy: 'isolate',
1545
- * allowErrorsRatio: 0.1,
1546
- * validate: true // Use typia validation (default)
1547
- * });
1548
- *
1549
- * // Optional: Clean up connection when completely done
1550
- * await userTable.closeClient();
1551
- * ```
1552
- */
1553
- async insert(data, options) {
1554
- const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1555
- const emptyResult = this.handleEmptyData(data, isStream);
1556
- if (emptyResult) {
1557
- return emptyResult;
1558
- }
1559
- let validatedData = [];
1560
- let validationErrors = [];
1561
- if (!isStream && shouldValidate) {
1562
- const validationResult = await this.performPreInsertionValidation(
1563
- data,
1564
- shouldValidate,
1565
- strategy,
1566
- options
1567
- );
1568
- validatedData = validationResult.validatedData;
1569
- validationErrors = validationResult.validationErrors;
1570
- } else {
1571
- validatedData = isStream ? [] : data;
1572
- }
1573
- const { client } = await this.getMemoizedClient();
1574
- const tableName = this.generateTableName();
1575
- try {
1576
- const insertOptions = this.prepareInsertOptions(
1577
- tableName,
1578
- data,
1579
- validatedData,
1580
- isStream,
1581
- strategy,
1582
- options
1583
- );
1584
- await client.insert(insertOptions);
1585
- return this.createSuccessResult(
1586
- data,
1587
- validatedData,
1588
- validationErrors,
1589
- isStream,
1590
- shouldValidate,
1591
- strategy
1592
- );
1593
- } catch (batchError) {
1594
- return await this.handleInsertionError(
1595
- batchError,
1596
- strategy,
1597
- tableName,
1598
- data,
1599
- validatedData,
1600
- validationErrors,
1601
- isStream,
1602
- shouldValidate,
1603
- options
1604
- );
1605
- }
1132
+ return { validatedData, validationErrors };
1133
+ } catch (validationError) {
1134
+ if (strategy === "fail-fast") {
1135
+ throw validationError;
1606
1136
  }
1607
- // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1608
- // were removed in ENG-856. Use direct configuration instead, e.g.:
1609
- // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1610
- };
1137
+ console.warn("Validation error:", validationError);
1138
+ return { validatedData: data, validationErrors: [] };
1139
+ }
1611
1140
  }
1612
- });
1613
-
1614
- // src/dmv2/sdk/stream.ts
1615
- function attachTypeGuard(dl, typeGuard) {
1616
- dl.asTyped = () => typeGuard(dl.originalRecord);
1617
- }
1618
- var import_node_crypto3, RoutedMessage, Stream, DeadLetterQueue;
1619
- var init_stream = __esm({
1620
- "src/dmv2/sdk/stream.ts"() {
1621
- "use strict";
1622
- init_typedBase();
1623
- init_internal();
1624
- import_node_crypto3 = require("crypto");
1625
- init_stackTrace();
1626
- RoutedMessage = class {
1627
- /** The destination stream for the message */
1628
- destination;
1629
- /** The message value(s) to send */
1630
- values;
1631
- /**
1632
- * Creates a new routed message.
1633
- *
1634
- * @param destination The target stream
1635
- * @param values The message(s) to route
1636
- */
1637
- constructor(destination, values) {
1638
- this.destination = destination;
1639
- this.values = values;
1141
+ /**
1142
+ * Handles validation errors based on the specified strategy
1143
+ * @private
1144
+ */
1145
+ handleValidationErrors(validationErrors, strategy, data, options) {
1146
+ switch (strategy) {
1147
+ case "fail-fast":
1148
+ const firstError = validationErrors[0];
1149
+ throw new Error(
1150
+ `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1151
+ );
1152
+ case "discard":
1153
+ this.checkValidationThresholds(validationErrors, data.length, options);
1154
+ break;
1155
+ case "isolate":
1156
+ break;
1157
+ }
1158
+ }
1159
+ /**
1160
+ * Checks if validation errors exceed configured thresholds
1161
+ * @private
1162
+ */
1163
+ checkValidationThresholds(validationErrors, totalRecords, options) {
1164
+ const validationFailedCount = validationErrors.length;
1165
+ const validationFailedRatio = validationFailedCount / totalRecords;
1166
+ if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1167
+ throw new Error(
1168
+ `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1169
+ );
1170
+ }
1171
+ if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1172
+ throw new Error(
1173
+ `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1174
+ );
1175
+ }
1176
+ }
1177
+ /**
1178
+ * Optimized insert options preparation with better memory management
1179
+ * @private
1180
+ */
1181
+ prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1182
+ const insertOptions = {
1183
+ table: quoteIdentifier(tableName),
1184
+ format: "JSONEachRow",
1185
+ clickhouse_settings: {
1186
+ date_time_input_format: "best_effort",
1187
+ wait_end_of_query: 1,
1188
+ // Ensure at least once delivery for INSERT operations
1189
+ // Performance optimizations
1190
+ max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1191
+ max_block_size: 65536,
1192
+ // Use async inserts for better performance with large datasets
1193
+ async_insert: validatedData.length > 1e3 ? 1 : 0,
1194
+ wait_for_async_insert: 1
1195
+ // For at least once delivery
1640
1196
  }
1641
1197
  };
1642
- Stream = class extends TypedBase {
1643
- defaultDeadLetterQueue;
1644
- /** @internal Memoized KafkaJS producer for reusing connections across sends */
1645
- _memoizedProducer;
1646
- /** @internal Hash of the configuration used to create the memoized Kafka producer */
1647
- _kafkaConfigHash;
1648
- constructor(name, config, schema, columns, validators, allowExtraFields) {
1649
- super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1650
- const streams = getMooseInternal().streams;
1651
- if (streams.has(name)) {
1652
- throw new Error(`Stream with name ${name} already exists`);
1653
- }
1654
- streams.set(name, this);
1655
- this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1656
- }
1657
- /**
1658
- * Internal map storing transformation configurations.
1659
- * Maps destination stream names to arrays of transformation functions and their configs.
1660
- *
1661
- * @internal
1662
- */
1663
- _transformations = /* @__PURE__ */ new Map();
1664
- /**
1665
- * Internal function for multi-stream transformations.
1666
- * Allows a single transformation to route messages to multiple destinations.
1667
- *
1668
- * @internal
1669
- */
1670
- _multipleTransformations;
1671
- /**
1672
- * Internal array storing consumer configurations.
1673
- *
1674
- * @internal
1675
- */
1676
- _consumers = new Array();
1677
- /**
1678
- * Builds the full Kafka topic name including optional namespace and version suffix.
1679
- * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1680
- */
1681
- buildFullTopicName(namespace) {
1682
- const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1683
- const base = `${this.name}${versionSuffix}`;
1684
- return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1198
+ if (isStream) {
1199
+ insertOptions.values = data;
1200
+ } else {
1201
+ insertOptions.values = validatedData;
1202
+ }
1203
+ if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1204
+ if (options.allowErrors !== void 0) {
1205
+ insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1685
1206
  }
1686
- /**
1687
- * Creates a fast hash string from relevant Kafka configuration fields.
1688
- */
1689
- createConfigHash(kafkaConfig) {
1690
- const configString = [
1691
- kafkaConfig.broker,
1692
- kafkaConfig.messageTimeoutMs,
1693
- kafkaConfig.saslUsername,
1694
- kafkaConfig.saslPassword,
1695
- kafkaConfig.saslMechanism,
1696
- kafkaConfig.securityProtocol,
1697
- kafkaConfig.namespace
1698
- ].join(":");
1699
- return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1207
+ if (options.allowErrorsRatio !== void 0) {
1208
+ insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1700
1209
  }
1701
- /**
1702
- * Gets or creates a memoized KafkaJS producer using runtime configuration.
1703
- */
1704
- async getMemoizedProducer() {
1705
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1706
- const configRegistry = globalThis._mooseConfigRegistry;
1707
- const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1708
- const kafkaConfig = await configRegistry.getKafkaConfig();
1709
- const currentHash = this.createConfigHash(kafkaConfig);
1710
- if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1711
- return { producer: this._memoizedProducer, kafkaConfig };
1712
- }
1713
- if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1714
- try {
1715
- await this._memoizedProducer.disconnect();
1716
- } catch {
1717
- }
1718
- this._memoizedProducer = void 0;
1719
- }
1720
- const clientId = `moose-sdk-stream-${this.name}`;
1721
- const logger = {
1722
- logPrefix: clientId,
1723
- log: (message) => {
1724
- console.log(`${clientId}: ${message}`);
1725
- },
1726
- error: (message) => {
1727
- console.error(`${clientId}: ${message}`);
1728
- },
1729
- warn: (message) => {
1730
- console.warn(`${clientId}: ${message}`);
1731
- }
1732
- };
1733
- const producer = await getKafkaProducer2(
1734
- {
1735
- clientId,
1736
- broker: kafkaConfig.broker,
1737
- securityProtocol: kafkaConfig.securityProtocol,
1738
- saslUsername: kafkaConfig.saslUsername,
1739
- saslPassword: kafkaConfig.saslPassword,
1740
- saslMechanism: kafkaConfig.saslMechanism
1741
- },
1742
- logger
1210
+ }
1211
+ return insertOptions;
1212
+ }
1213
+ /**
1214
+ * Creates success result for completed insertions
1215
+ * @private
1216
+ */
1217
+ createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1218
+ if (isStream) {
1219
+ return {
1220
+ successful: -1,
1221
+ // -1 indicates stream mode where count is unknown
1222
+ failed: 0,
1223
+ total: -1
1224
+ };
1225
+ }
1226
+ const insertedCount = validatedData.length;
1227
+ const totalProcessed = shouldValidate ? data.length : insertedCount;
1228
+ const result = {
1229
+ successful: insertedCount,
1230
+ failed: shouldValidate ? validationErrors.length : 0,
1231
+ total: totalProcessed
1232
+ };
1233
+ if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1234
+ result.failedRecords = validationErrors.map((ve) => ({
1235
+ record: ve.record,
1236
+ error: `Validation error: ${ve.error}`,
1237
+ index: ve.index
1238
+ }));
1239
+ }
1240
+ return result;
1241
+ }
1242
+ /**
1243
+ * Handles insertion errors based on the specified strategy
1244
+ * @private
1245
+ */
1246
+ async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1247
+ switch (strategy) {
1248
+ case "fail-fast":
1249
+ throw new Error(
1250
+ `Failed to insert data into table ${tableName}: ${batchError}`
1743
1251
  );
1744
- this._memoizedProducer = producer;
1745
- this._kafkaConfigHash = currentHash;
1746
- return { producer, kafkaConfig };
1747
- }
1748
- /**
1749
- * Closes the memoized Kafka producer if it exists.
1750
- */
1751
- async closeProducer() {
1752
- if (this._memoizedProducer) {
1753
- try {
1754
- await this._memoizedProducer.disconnect();
1755
- } catch {
1756
- } finally {
1757
- this._memoizedProducer = void 0;
1758
- this._kafkaConfigHash = void 0;
1759
- }
1760
- }
1761
- }
1762
- /**
1763
- * Sends one or more records to this stream's Kafka topic.
1764
- * Values are JSON-serialized as message values.
1765
- */
1766
- async send(values) {
1767
- const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1768
- if (flat.length === 0) return;
1769
- const { producer, kafkaConfig } = await this.getMemoizedProducer();
1770
- const topic = this.buildFullTopicName(kafkaConfig.namespace);
1771
- const sr = this.config.schemaConfig;
1772
- if (sr && sr.kind === "JSON") {
1773
- const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1774
- if (!schemaRegistryUrl) {
1775
- throw new Error("Schema Registry URL not configured");
1776
- }
1777
- const {
1778
- default: { SchemaRegistry }
1779
- } = await import("@kafkajs/confluent-schema-registry");
1780
- const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1781
- let schemaId = void 0;
1782
- if ("id" in sr.reference) {
1783
- schemaId = sr.reference.id;
1784
- } else if ("subjectLatest" in sr.reference) {
1785
- schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1786
- } else if ("subject" in sr.reference) {
1787
- schemaId = await registry.getRegistryId(
1788
- sr.reference.subject,
1789
- sr.reference.version
1790
- );
1791
- }
1792
- if (schemaId === void 0) {
1793
- throw new Error("Malformed schema reference.");
1794
- }
1795
- const encoded = await Promise.all(
1796
- flat.map(
1797
- (v) => registry.encode(schemaId, v)
1798
- )
1799
- );
1800
- await producer.send({
1801
- topic,
1802
- messages: encoded.map((value) => ({ value }))
1803
- });
1804
- return;
1805
- } else if (sr !== void 0) {
1806
- throw new Error("Currently only JSON Schema is supported.");
1807
- }
1808
- await producer.send({
1809
- topic,
1810
- messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1811
- });
1812
- }
1813
- /**
1814
- * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1815
- * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1816
- *
1817
- * @template U The data type of the messages in the destination stream.
1818
- * @param destination The destination stream for the transformed messages.
1819
- * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1820
- * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1821
- * @param config Optional configuration for this specific transformation step, like a version.
1822
- */
1823
- addTransform(destination, transformation, config) {
1824
- const sourceFile = getSourceFileFromStack(new Error().stack);
1825
- const transformConfig = {
1826
- ...config ?? {},
1827
- sourceFile
1828
- };
1829
- if (transformConfig.deadLetterQueue === void 0) {
1830
- transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1831
- }
1832
- if (this._transformations.has(destination.name)) {
1833
- const existingTransforms = this._transformations.get(destination.name);
1834
- const hasVersion = existingTransforms.some(
1835
- ([_, __, cfg]) => cfg.version === transformConfig.version
1836
- );
1837
- if (!hasVersion) {
1838
- existingTransforms.push([destination, transformation, transformConfig]);
1839
- }
1840
- } else {
1841
- this._transformations.set(destination.name, [
1842
- [destination, transformation, transformConfig]
1252
+ case "discard":
1253
+ throw new Error(
1254
+ `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1255
+ );
1256
+ case "isolate":
1257
+ return await this.handleIsolateStrategy(
1258
+ batchError,
1259
+ tableName,
1260
+ data,
1261
+ validatedData,
1262
+ validationErrors,
1263
+ isStream,
1264
+ shouldValidate,
1265
+ options
1266
+ );
1267
+ default:
1268
+ throw new Error(`Unknown error strategy: ${strategy}`);
1269
+ }
1270
+ }
1271
+ /**
1272
+ * Handles the isolate strategy for insertion errors
1273
+ * @private
1274
+ */
1275
+ async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1276
+ if (isStream) {
1277
+ throw new Error(
1278
+ `Isolate strategy is not supported with stream input: ${batchError}`
1279
+ );
1280
+ }
1281
+ try {
1282
+ const { client } = await this.getMemoizedClient();
1283
+ const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1284
+ const retryData = skipValidationOnRetry ? data : validatedData;
1285
+ const { successful, failed } = await this.retryIndividualRecords(
1286
+ client,
1287
+ tableName,
1288
+ retryData
1289
+ );
1290
+ const allFailedRecords = [
1291
+ // Validation errors (if any and not skipping validation on retry)
1292
+ ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1293
+ record: ve.record,
1294
+ error: `Validation error: ${ve.error}`,
1295
+ index: ve.index
1296
+ })) : [],
1297
+ // Insertion errors
1298
+ ...failed
1299
+ ];
1300
+ this.checkInsertionThresholds(
1301
+ allFailedRecords,
1302
+ data.length,
1303
+ options
1304
+ );
1305
+ return {
1306
+ successful: successful.length,
1307
+ failed: allFailedRecords.length,
1308
+ total: data.length,
1309
+ failedRecords: allFailedRecords
1310
+ };
1311
+ } catch (isolationError) {
1312
+ throw new Error(
1313
+ `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1314
+ );
1315
+ }
1316
+ }
1317
+ /**
1318
+ * Checks if insertion errors exceed configured thresholds
1319
+ * @private
1320
+ */
1321
+ checkInsertionThresholds(failedRecords, totalRecords, options) {
1322
+ const totalFailed = failedRecords.length;
1323
+ const failedRatio = totalFailed / totalRecords;
1324
+ if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1325
+ throw new Error(
1326
+ `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1327
+ );
1328
+ }
1329
+ if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1330
+ throw new Error(
1331
+ `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1332
+ );
1333
+ }
1334
+ }
1335
+ /**
1336
+ * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1337
+ *
1338
+ * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1339
+ * - For every Nested struct (not array), it recurses into the struct.
1340
+ * - This ensures compatibility with kafka_clickhouse_sync
1341
+ *
1342
+ * @param record The input record to transform (may be deeply nested)
1343
+ * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1344
+ * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1345
+ */
1346
+ mapToClickhouseRecord(record, columns = this.columnArray) {
1347
+ const result = { ...record };
1348
+ for (const col of columns) {
1349
+ const value = record[col.name];
1350
+ const dt = col.data_type;
1351
+ if (isArrayNestedType(dt)) {
1352
+ if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1353
+ result[col.name] = value.map((item) => [
1354
+ this.mapToClickhouseRecord(item, dt.elementType.columns)
1843
1355
  ]);
1844
1356
  }
1845
- }
1846
- /**
1847
- * Adds a consumer function that processes messages from this stream.
1848
- * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1849
- *
1850
- * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1851
- * @param config Optional configuration for this specific consumer, like a version.
1852
- */
1853
- addConsumer(consumer, config) {
1854
- const sourceFile = getSourceFileFromStack(new Error().stack);
1855
- const consumerConfig = {
1856
- ...config ?? {},
1857
- sourceFile
1858
- };
1859
- if (consumerConfig.deadLetterQueue === void 0) {
1860
- consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1861
- }
1862
- const hasVersion = this._consumers.some(
1863
- (existing) => existing.config.version === consumerConfig.version
1864
- );
1865
- if (!hasVersion) {
1866
- this._consumers.push({ consumer, config: consumerConfig });
1357
+ } else if (isNestedType(dt)) {
1358
+ if (value && typeof value === "object") {
1359
+ result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1867
1360
  }
1868
1361
  }
1869
- /**
1870
- * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1871
- * @param values The value or values to send to this stream.
1872
- * @returns A `RoutedMessage` object associating the values with this stream.
1873
- *
1874
- * @example
1875
- * ```typescript
1876
- * sourceStream.addMultiTransform((record) => [
1877
- * destinationStream1.routed(transformedRecord1),
1878
- * destinationStream2.routed([record2a, record2b])
1879
- * ]);
1880
- * ```
1881
- */
1882
- routed = (values) => new RoutedMessage(this, values);
1883
- /**
1884
- * Adds a single transformation function that can route messages to multiple destination streams.
1885
- * This is an alternative to adding multiple individual `addTransform` calls.
1886
- * Only one multi-transform function can be added per stream.
1887
- *
1888
- * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1889
- * each specifying a destination stream and the message(s) to send to it.
1890
- */
1891
- addMultiTransform(transformation) {
1892
- this._multipleTransformations = transformation;
1362
+ }
1363
+ return result;
1364
+ }
1365
+ /**
1366
+ * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1367
+ * This method establishes a direct connection to ClickHouse using the project configuration
1368
+ * and inserts the provided data into the versioned table.
1369
+ *
1370
+ * PERFORMANCE OPTIMIZATIONS:
1371
+ * - Memoized client connections with fast config hashing
1372
+ * - Single-pass validation with pre-allocated arrays
1373
+ * - Batch-optimized retry strategy (batches of 10, then individual)
1374
+ * - Optimized ClickHouse settings for large datasets
1375
+ * - Reduced memory allocations and object creation
1376
+ *
1377
+ * Uses advanced typia validation when available for comprehensive type checking,
1378
+ * with fallback to basic validation for compatibility.
1379
+ *
1380
+ * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1381
+ * If the configuration changes, a new client will be automatically created.
1382
+ *
1383
+ * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1384
+ * @param options Optional configuration for error handling, validation, and insertion behavior
1385
+ * @returns Promise resolving to detailed insertion results
1386
+ * @throws {ConfigError} When configuration cannot be read or parsed
1387
+ * @throws {ClickHouseError} When insertion fails based on the error strategy
1388
+ * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1389
+ *
1390
+ * @example
1391
+ * ```typescript
1392
+ * // Create an OlapTable instance (typia validators auto-injected)
1393
+ * const userTable = new OlapTable<User>('users');
1394
+ *
1395
+ * // Insert with comprehensive typia validation
1396
+ * const result1 = await userTable.insert([
1397
+ * { id: 1, name: 'John', email: 'john@example.com' },
1398
+ * { id: 2, name: 'Jane', email: 'jane@example.com' }
1399
+ * ]);
1400
+ *
1401
+ * // Insert data with stream input (validation not available for streams)
1402
+ * const dataStream = new Readable({
1403
+ * objectMode: true,
1404
+ * read() { // Stream implementation }
1405
+ * });
1406
+ * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1407
+ *
1408
+ * // Insert with validation disabled for performance
1409
+ * const result3 = await userTable.insert(data, { validate: false });
1410
+ *
1411
+ * // Insert with error handling strategies
1412
+ * const result4 = await userTable.insert(mixedData, {
1413
+ * strategy: 'isolate',
1414
+ * allowErrorsRatio: 0.1,
1415
+ * validate: true // Use typia validation (default)
1416
+ * });
1417
+ *
1418
+ * // Optional: Clean up connection when completely done
1419
+ * await userTable.closeClient();
1420
+ * ```
1421
+ */
1422
+ async insert(data, options) {
1423
+ const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1424
+ const emptyResult = this.handleEmptyData(data, isStream);
1425
+ if (emptyResult) {
1426
+ return emptyResult;
1427
+ }
1428
+ let validatedData = [];
1429
+ let validationErrors = [];
1430
+ if (!isStream && shouldValidate) {
1431
+ const validationResult = await this.performPreInsertionValidation(
1432
+ data,
1433
+ shouldValidate,
1434
+ strategy,
1435
+ options
1436
+ );
1437
+ validatedData = validationResult.validatedData;
1438
+ validationErrors = validationResult.validationErrors;
1439
+ } else {
1440
+ validatedData = isStream ? [] : data;
1441
+ }
1442
+ const { client } = await this.getMemoizedClient();
1443
+ const tableName = this.generateTableName();
1444
+ try {
1445
+ const insertOptions = this.prepareInsertOptions(
1446
+ tableName,
1447
+ data,
1448
+ validatedData,
1449
+ isStream,
1450
+ strategy,
1451
+ options
1452
+ );
1453
+ await client.insert(insertOptions);
1454
+ return this.createSuccessResult(
1455
+ data,
1456
+ validatedData,
1457
+ validationErrors,
1458
+ isStream,
1459
+ shouldValidate,
1460
+ strategy
1461
+ );
1462
+ } catch (batchError) {
1463
+ return await this.handleInsertionError(
1464
+ batchError,
1465
+ strategy,
1466
+ tableName,
1467
+ data,
1468
+ validatedData,
1469
+ validationErrors,
1470
+ isStream,
1471
+ shouldValidate,
1472
+ options
1473
+ );
1474
+ }
1475
+ }
1476
+ // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1477
+ // were removed in ENG-856. Use direct configuration instead, e.g.:
1478
+ // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1479
+ };
1480
+
1481
+ // src/dmv2/sdk/stream.ts
1482
+ var import_node_crypto3 = require("crypto");
1483
+ var RoutedMessage = class {
1484
+ /** The destination stream for the message */
1485
+ destination;
1486
+ /** The message value(s) to send */
1487
+ values;
1488
+ /**
1489
+ * Creates a new routed message.
1490
+ *
1491
+ * @param destination The target stream
1492
+ * @param values The message(s) to route
1493
+ */
1494
+ constructor(destination, values) {
1495
+ this.destination = destination;
1496
+ this.values = values;
1497
+ }
1498
+ };
1499
+ var Stream = class extends TypedBase {
1500
+ defaultDeadLetterQueue;
1501
+ /** @internal Memoized KafkaJS producer for reusing connections across sends */
1502
+ _memoizedProducer;
1503
+ /** @internal Hash of the configuration used to create the memoized Kafka producer */
1504
+ _kafkaConfigHash;
1505
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1506
+ super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1507
+ const streams = getMooseInternal().streams;
1508
+ if (streams.has(name)) {
1509
+ throw new Error(`Stream with name ${name} already exists`);
1510
+ }
1511
+ streams.set(name, this);
1512
+ this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1513
+ }
1514
+ /**
1515
+ * Internal map storing transformation configurations.
1516
+ * Maps destination stream names to arrays of transformation functions and their configs.
1517
+ *
1518
+ * @internal
1519
+ */
1520
+ _transformations = /* @__PURE__ */ new Map();
1521
+ /**
1522
+ * Internal function for multi-stream transformations.
1523
+ * Allows a single transformation to route messages to multiple destinations.
1524
+ *
1525
+ * @internal
1526
+ */
1527
+ _multipleTransformations;
1528
+ /**
1529
+ * Internal array storing consumer configurations.
1530
+ *
1531
+ * @internal
1532
+ */
1533
+ _consumers = new Array();
1534
+ /**
1535
+ * Builds the full Kafka topic name including optional namespace and version suffix.
1536
+ * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1537
+ */
1538
+ buildFullTopicName(namespace) {
1539
+ const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1540
+ const base = `${this.name}${versionSuffix}`;
1541
+ return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1542
+ }
1543
+ /**
1544
+ * Creates a fast hash string from relevant Kafka configuration fields.
1545
+ */
1546
+ createConfigHash(kafkaConfig) {
1547
+ const configString = [
1548
+ kafkaConfig.broker,
1549
+ kafkaConfig.messageTimeoutMs,
1550
+ kafkaConfig.saslUsername,
1551
+ kafkaConfig.saslPassword,
1552
+ kafkaConfig.saslMechanism,
1553
+ kafkaConfig.securityProtocol,
1554
+ kafkaConfig.namespace
1555
+ ].join(":");
1556
+ return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
1557
+ }
1558
+ /**
1559
+ * Gets or creates a memoized KafkaJS producer using runtime configuration.
1560
+ */
1561
+ async getMemoizedProducer() {
1562
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1563
+ const configRegistry = globalThis._mooseConfigRegistry;
1564
+ const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1565
+ const kafkaConfig = await configRegistry.getKafkaConfig();
1566
+ const currentHash = this.createConfigHash(kafkaConfig);
1567
+ if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1568
+ return { producer: this._memoizedProducer, kafkaConfig };
1569
+ }
1570
+ if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1571
+ try {
1572
+ await this._memoizedProducer.disconnect();
1573
+ } catch {
1574
+ }
1575
+ this._memoizedProducer = void 0;
1576
+ }
1577
+ const clientId = `moose-sdk-stream-${this.name}`;
1578
+ const logger = {
1579
+ logPrefix: clientId,
1580
+ log: (message) => {
1581
+ console.log(`${clientId}: ${message}`);
1582
+ },
1583
+ error: (message) => {
1584
+ console.error(`${clientId}: ${message}`);
1585
+ },
1586
+ warn: (message) => {
1587
+ console.warn(`${clientId}: ${message}`);
1893
1588
  }
1894
1589
  };
1895
- DeadLetterQueue = class extends Stream {
1896
- constructor(name, config, typeGuard) {
1897
- if (typeGuard === void 0) {
1898
- throw new Error(
1899
- "Supply the type param T so that the schema is inserted by the compiler plugin."
1900
- );
1901
- }
1902
- super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1903
- this.typeGuard = typeGuard;
1904
- getMooseInternal().streams.set(name, this);
1590
+ const producer = await getKafkaProducer2(
1591
+ {
1592
+ clientId,
1593
+ broker: kafkaConfig.broker,
1594
+ securityProtocol: kafkaConfig.securityProtocol,
1595
+ saslUsername: kafkaConfig.saslUsername,
1596
+ saslPassword: kafkaConfig.saslPassword,
1597
+ saslMechanism: kafkaConfig.saslMechanism
1598
+ },
1599
+ logger
1600
+ );
1601
+ this._memoizedProducer = producer;
1602
+ this._kafkaConfigHash = currentHash;
1603
+ return { producer, kafkaConfig };
1604
+ }
1605
+ /**
1606
+ * Closes the memoized Kafka producer if it exists.
1607
+ */
1608
+ async closeProducer() {
1609
+ if (this._memoizedProducer) {
1610
+ try {
1611
+ await this._memoizedProducer.disconnect();
1612
+ } catch {
1613
+ } finally {
1614
+ this._memoizedProducer = void 0;
1615
+ this._kafkaConfigHash = void 0;
1905
1616
  }
1906
- /**
1907
- * Internal type guard function for validating and casting original records.
1908
- *
1909
- * @internal
1910
- */
1911
- typeGuard;
1912
- /**
1913
- * Adds a transformation step for dead letter records.
1914
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1915
- *
1916
- * @template U The output type for the transformation
1917
- * @param destination The destination stream for transformed messages
1918
- * @param transformation Function to transform dead letter records
1919
- * @param config Optional transformation configuration
1920
- */
1921
- addTransform(destination, transformation, config) {
1922
- const withValidate = (deadLetter) => {
1923
- attachTypeGuard(deadLetter, this.typeGuard);
1924
- return transformation(deadLetter);
1925
- };
1926
- super.addTransform(destination, withValidate, config);
1617
+ }
1618
+ }
1619
+ /**
1620
+ * Sends one or more records to this stream's Kafka topic.
1621
+ * Values are JSON-serialized as message values.
1622
+ */
1623
+ async send(values) {
1624
+ const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1625
+ if (flat.length === 0) return;
1626
+ const { producer, kafkaConfig } = await this.getMemoizedProducer();
1627
+ const topic = this.buildFullTopicName(kafkaConfig.namespace);
1628
+ const sr = this.config.schemaConfig;
1629
+ if (sr && sr.kind === "JSON") {
1630
+ const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1631
+ if (!schemaRegistryUrl) {
1632
+ throw new Error("Schema Registry URL not configured");
1633
+ }
1634
+ const {
1635
+ default: { SchemaRegistry }
1636
+ } = await import("@kafkajs/confluent-schema-registry");
1637
+ const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1638
+ let schemaId = void 0;
1639
+ if ("id" in sr.reference) {
1640
+ schemaId = sr.reference.id;
1641
+ } else if ("subjectLatest" in sr.reference) {
1642
+ schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1643
+ } else if ("subject" in sr.reference) {
1644
+ schemaId = await registry.getRegistryId(
1645
+ sr.reference.subject,
1646
+ sr.reference.version
1647
+ );
1927
1648
  }
1928
- /**
1929
- * Adds a consumer for dead letter records.
1930
- * The consumer function receives a DeadLetter<T> with type recovery capabilities.
1931
- *
1932
- * @param consumer Function to process dead letter records
1933
- * @param config Optional consumer configuration
1934
- */
1935
- addConsumer(consumer, config) {
1936
- const withValidate = (deadLetter) => {
1937
- attachTypeGuard(deadLetter, this.typeGuard);
1938
- return consumer(deadLetter);
1939
- };
1940
- super.addConsumer(withValidate, config);
1649
+ if (schemaId === void 0) {
1650
+ throw new Error("Malformed schema reference.");
1941
1651
  }
1942
- /**
1943
- * Adds a multi-stream transformation for dead letter records.
1944
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1945
- *
1946
- * @param transformation Function to route dead letter records to multiple destinations
1947
- */
1948
- addMultiTransform(transformation) {
1949
- const withValidate = (deadLetter) => {
1950
- attachTypeGuard(deadLetter, this.typeGuard);
1951
- return transformation(deadLetter);
1952
- };
1953
- super.addMultiTransform(withValidate);
1652
+ const encoded = await Promise.all(
1653
+ flat.map(
1654
+ (v) => registry.encode(schemaId, v)
1655
+ )
1656
+ );
1657
+ await producer.send({
1658
+ topic,
1659
+ messages: encoded.map((value) => ({ value }))
1660
+ });
1661
+ return;
1662
+ } else if (sr !== void 0) {
1663
+ throw new Error("Currently only JSON Schema is supported.");
1664
+ }
1665
+ await producer.send({
1666
+ topic,
1667
+ messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1668
+ });
1669
+ }
1670
+ /**
1671
+ * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1672
+ * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1673
+ *
1674
+ * @template U The data type of the messages in the destination stream.
1675
+ * @param destination The destination stream for the transformed messages.
1676
+ * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1677
+ * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1678
+ * @param config Optional configuration for this specific transformation step, like a version.
1679
+ */
1680
+ addTransform(destination, transformation, config) {
1681
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1682
+ const transformConfig = {
1683
+ ...config ?? {},
1684
+ sourceFile
1685
+ };
1686
+ if (transformConfig.deadLetterQueue === void 0) {
1687
+ transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1688
+ }
1689
+ if (this._transformations.has(destination.name)) {
1690
+ const existingTransforms = this._transformations.get(destination.name);
1691
+ const hasVersion = existingTransforms.some(
1692
+ ([_, __, cfg]) => cfg.version === transformConfig.version
1693
+ );
1694
+ if (!hasVersion) {
1695
+ existingTransforms.push([destination, transformation, transformConfig]);
1954
1696
  }
1697
+ } else {
1698
+ this._transformations.set(destination.name, [
1699
+ [destination, transformation, transformConfig]
1700
+ ]);
1701
+ }
1702
+ }
1703
+ /**
1704
+ * Adds a consumer function that processes messages from this stream.
1705
+ * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1706
+ *
1707
+ * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1708
+ * @param config Optional configuration for this specific consumer, like a version.
1709
+ */
1710
+ addConsumer(consumer, config) {
1711
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1712
+ const consumerConfig = {
1713
+ ...config ?? {},
1714
+ sourceFile
1955
1715
  };
1716
+ if (consumerConfig.deadLetterQueue === void 0) {
1717
+ consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1718
+ }
1719
+ const hasVersion = this._consumers.some(
1720
+ (existing) => existing.config.version === consumerConfig.version
1721
+ );
1722
+ if (!hasVersion) {
1723
+ this._consumers.push({ consumer, config: consumerConfig });
1724
+ }
1956
1725
  }
1957
- });
1726
+ /**
1727
+ * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1728
+ * @param values The value or values to send to this stream.
1729
+ * @returns A `RoutedMessage` object associating the values with this stream.
1730
+ *
1731
+ * @example
1732
+ * ```typescript
1733
+ * sourceStream.addMultiTransform((record) => [
1734
+ * destinationStream1.routed(transformedRecord1),
1735
+ * destinationStream2.routed([record2a, record2b])
1736
+ * ]);
1737
+ * ```
1738
+ */
1739
+ routed = (values) => new RoutedMessage(this, values);
1740
+ /**
1741
+ * Adds a single transformation function that can route messages to multiple destination streams.
1742
+ * This is an alternative to adding multiple individual `addTransform` calls.
1743
+ * Only one multi-transform function can be added per stream.
1744
+ *
1745
+ * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1746
+ * each specifying a destination stream and the message(s) to send to it.
1747
+ */
1748
+ addMultiTransform(transformation) {
1749
+ this._multipleTransformations = transformation;
1750
+ }
1751
+ };
1752
+ function attachTypeGuard(dl, typeGuard) {
1753
+ dl.asTyped = () => typeGuard(dl.originalRecord);
1754
+ }
1755
+ var DeadLetterQueue = class extends Stream {
1756
+ constructor(name, config, typeGuard) {
1757
+ if (typeGuard === void 0) {
1758
+ throw new Error(
1759
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
1760
+ );
1761
+ }
1762
+ super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1763
+ this.typeGuard = typeGuard;
1764
+ getMooseInternal().streams.set(name, this);
1765
+ }
1766
+ /**
1767
+ * Internal type guard function for validating and casting original records.
1768
+ *
1769
+ * @internal
1770
+ */
1771
+ typeGuard;
1772
+ /**
1773
+ * Adds a transformation step for dead letter records.
1774
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1775
+ *
1776
+ * @template U The output type for the transformation
1777
+ * @param destination The destination stream for transformed messages
1778
+ * @param transformation Function to transform dead letter records
1779
+ * @param config Optional transformation configuration
1780
+ */
1781
+ addTransform(destination, transformation, config) {
1782
+ const withValidate = (deadLetter) => {
1783
+ attachTypeGuard(deadLetter, this.typeGuard);
1784
+ return transformation(deadLetter);
1785
+ };
1786
+ super.addTransform(destination, withValidate, config);
1787
+ }
1788
+ /**
1789
+ * Adds a consumer for dead letter records.
1790
+ * The consumer function receives a DeadLetter<T> with type recovery capabilities.
1791
+ *
1792
+ * @param consumer Function to process dead letter records
1793
+ * @param config Optional consumer configuration
1794
+ */
1795
+ addConsumer(consumer, config) {
1796
+ const withValidate = (deadLetter) => {
1797
+ attachTypeGuard(deadLetter, this.typeGuard);
1798
+ return consumer(deadLetter);
1799
+ };
1800
+ super.addConsumer(withValidate, config);
1801
+ }
1802
+ /**
1803
+ * Adds a multi-stream transformation for dead letter records.
1804
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1805
+ *
1806
+ * @param transformation Function to route dead letter records to multiple destinations
1807
+ */
1808
+ addMultiTransform(transformation) {
1809
+ const withValidate = (deadLetter) => {
1810
+ attachTypeGuard(deadLetter, this.typeGuard);
1811
+ return transformation(deadLetter);
1812
+ };
1813
+ super.addMultiTransform(withValidate);
1814
+ }
1815
+ };
1958
1816
 
1959
1817
  // src/dmv2/sdk/workflow.ts
1960
- var Task, Workflow;
1961
- var init_workflow = __esm({
1962
- "src/dmv2/sdk/workflow.ts"() {
1963
- "use strict";
1964
- init_internal();
1965
- Task = class {
1966
- /**
1967
- * Creates a new Task instance.
1968
- *
1969
- * @param name - Unique identifier for the task
1970
- * @param config - Configuration object defining the task behavior
1971
- *
1972
- * @example
1973
- * ```typescript
1974
- * // No input, no output
1975
- * const task1 = new Task<null, void>("task1", {
1976
- * run: async () => {
1977
- * console.log("No input/output");
1978
- * }
1979
- * });
1980
- *
1981
- * // No input, but has output
1982
- * const task2 = new Task<null, OutputType>("task2", {
1983
- * run: async () => {
1984
- * return someOutput;
1985
- * }
1986
- * });
1987
- *
1988
- * // Has input, no output
1989
- * const task3 = new Task<InputType, void>("task3", {
1990
- * run: async (input: InputType) => {
1991
- * // process input but return nothing
1992
- * }
1993
- * });
1994
- *
1995
- * // Has both input and output
1996
- * const task4 = new Task<InputType, OutputType>("task4", {
1997
- * run: async (input: InputType) => {
1998
- * return process(input);
1999
- * }
2000
- * });
2001
- * ```
2002
- */
2003
- constructor(name, config) {
2004
- this.name = name;
2005
- this.config = config;
1818
+ var Task = class {
1819
+ /**
1820
+ * Creates a new Task instance.
1821
+ *
1822
+ * @param name - Unique identifier for the task
1823
+ * @param config - Configuration object defining the task behavior
1824
+ *
1825
+ * @example
1826
+ * ```typescript
1827
+ * // No input, no output
1828
+ * const task1 = new Task<null, void>("task1", {
1829
+ * run: async () => {
1830
+ * console.log("No input/output");
1831
+ * }
1832
+ * });
1833
+ *
1834
+ * // No input, but has output
1835
+ * const task2 = new Task<null, OutputType>("task2", {
1836
+ * run: async () => {
1837
+ * return someOutput;
1838
+ * }
1839
+ * });
1840
+ *
1841
+ * // Has input, no output
1842
+ * const task3 = new Task<InputType, void>("task3", {
1843
+ * run: async (input: InputType) => {
1844
+ * // process input but return nothing
1845
+ * }
1846
+ * });
1847
+ *
1848
+ * // Has both input and output
1849
+ * const task4 = new Task<InputType, OutputType>("task4", {
1850
+ * run: async (input: InputType) => {
1851
+ * return process(input);
1852
+ * }
1853
+ * });
1854
+ * ```
1855
+ */
1856
+ constructor(name, config) {
1857
+ this.name = name;
1858
+ this.config = config;
1859
+ }
1860
+ };
1861
+ var Workflow = class {
1862
+ /**
1863
+ * Creates a new Workflow instance and registers it with the Moose system.
1864
+ *
1865
+ * @param name - Unique identifier for the workflow
1866
+ * @param config - Configuration object defining the workflow behavior and task orchestration
1867
+ * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
1868
+ */
1869
+ constructor(name, config) {
1870
+ this.name = name;
1871
+ this.config = config;
1872
+ const workflows = getMooseInternal().workflows;
1873
+ if (workflows.has(name)) {
1874
+ throw new Error(`Workflow with name ${name} already exists`);
1875
+ }
1876
+ this.validateTaskGraph(config.startingTask, name);
1877
+ workflows.set(name, this);
1878
+ }
1879
+ /**
1880
+ * Validates the task graph to ensure there are no null tasks or infinite loops.
1881
+ *
1882
+ * @private
1883
+ * @param startingTask - The starting task to begin validation from
1884
+ * @param workflowName - The name of the workflow being validated (for error messages)
1885
+ * @throws {Error} When null/undefined tasks are found or infinite loops are detected
1886
+ */
1887
+ validateTaskGraph(startingTask, workflowName) {
1888
+ if (startingTask === null || startingTask === void 0) {
1889
+ throw new Error(
1890
+ `Workflow "${workflowName}" has a null or undefined starting task`
1891
+ );
1892
+ }
1893
+ const visited = /* @__PURE__ */ new Set();
1894
+ const recursionStack = /* @__PURE__ */ new Set();
1895
+ const validateTask = (task, currentPath) => {
1896
+ if (task === null || task === void 0) {
1897
+ const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
1898
+ throw new Error(
1899
+ `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
1900
+ );
2006
1901
  }
2007
- };
2008
- Workflow = class {
2009
- /**
2010
- * Creates a new Workflow instance and registers it with the Moose system.
2011
- *
2012
- * @param name - Unique identifier for the workflow
2013
- * @param config - Configuration object defining the workflow behavior and task orchestration
2014
- * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
2015
- */
2016
- constructor(name, config) {
2017
- this.name = name;
2018
- this.config = config;
2019
- const workflows = getMooseInternal().workflows;
2020
- if (workflows.has(name)) {
2021
- throw new Error(`Workflow with name ${name} already exists`);
2022
- }
2023
- this.validateTaskGraph(config.startingTask, name);
2024
- workflows.set(name, this);
1902
+ const taskName = task.name;
1903
+ if (recursionStack.has(taskName)) {
1904
+ const cycleStartIndex = currentPath.indexOf(taskName);
1905
+ const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
1906
+ throw new Error(
1907
+ `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
1908
+ );
2025
1909
  }
2026
- /**
2027
- * Validates the task graph to ensure there are no null tasks or infinite loops.
2028
- *
2029
- * @private
2030
- * @param startingTask - The starting task to begin validation from
2031
- * @param workflowName - The name of the workflow being validated (for error messages)
2032
- * @throws {Error} When null/undefined tasks are found or infinite loops are detected
2033
- */
2034
- validateTaskGraph(startingTask, workflowName) {
2035
- if (startingTask === null || startingTask === void 0) {
2036
- throw new Error(
2037
- `Workflow "${workflowName}" has a null or undefined starting task`
2038
- );
1910
+ if (visited.has(taskName)) {
1911
+ return;
1912
+ }
1913
+ visited.add(taskName);
1914
+ recursionStack.add(taskName);
1915
+ if (task.config.onComplete) {
1916
+ for (const nextTask of task.config.onComplete) {
1917
+ validateTask(nextTask, [...currentPath, taskName]);
2039
1918
  }
2040
- const visited = /* @__PURE__ */ new Set();
2041
- const recursionStack = /* @__PURE__ */ new Set();
2042
- const validateTask = (task, currentPath) => {
2043
- if (task === null || task === void 0) {
2044
- const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
2045
- throw new Error(
2046
- `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
2047
- );
2048
- }
2049
- const taskName = task.name;
2050
- if (recursionStack.has(taskName)) {
2051
- const cycleStartIndex = currentPath.indexOf(taskName);
2052
- const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
2053
- throw new Error(
2054
- `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2055
- );
2056
- }
2057
- if (visited.has(taskName)) {
2058
- return;
2059
- }
2060
- visited.add(taskName);
2061
- recursionStack.add(taskName);
2062
- if (task.config.onComplete) {
2063
- for (const nextTask of task.config.onComplete) {
2064
- validateTask(nextTask, [...currentPath, taskName]);
2065
- }
2066
- }
2067
- recursionStack.delete(taskName);
2068
- };
2069
- validateTask(startingTask, []);
2070
1919
  }
1920
+ recursionStack.delete(taskName);
2071
1921
  };
1922
+ validateTask(startingTask, []);
2072
1923
  }
2073
- });
1924
+ };
2074
1925
 
2075
1926
  // src/dmv2/sdk/ingestApi.ts
2076
- var IngestApi;
2077
- var init_ingestApi = __esm({
2078
- "src/dmv2/sdk/ingestApi.ts"() {
2079
- "use strict";
2080
- init_typedBase();
2081
- init_internal();
2082
- IngestApi = class extends TypedBase {
2083
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2084
- super(name, config, schema, columns, void 0, allowExtraFields);
2085
- const ingestApis = getMooseInternal().ingestApis;
2086
- if (ingestApis.has(name)) {
2087
- throw new Error(`Ingest API with name ${name} already exists`);
2088
- }
2089
- ingestApis.set(name, this);
2090
- }
2091
- };
1927
+ var IngestApi = class extends TypedBase {
1928
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1929
+ super(name, config, schema, columns, void 0, allowExtraFields);
1930
+ const ingestApis = getMooseInternal().ingestApis;
1931
+ if (ingestApis.has(name)) {
1932
+ throw new Error(`Ingest API with name ${name} already exists`);
1933
+ }
1934
+ ingestApis.set(name, this);
2092
1935
  }
2093
- });
1936
+ };
2094
1937
 
2095
1938
  // src/dmv2/sdk/consumptionApi.ts
2096
- var Api, ConsumptionApi;
2097
- var init_consumptionApi = __esm({
2098
- "src/dmv2/sdk/consumptionApi.ts"() {
2099
- "use strict";
2100
- init_typedBase();
2101
- init_internal();
2102
- Api = class extends TypedBase {
2103
- /** @internal The handler function that processes requests and generates responses. */
2104
- _handler;
2105
- /** @internal The JSON schema definition for the response type R. */
2106
- responseSchema;
2107
- constructor(name, handler, config, schema, columns, responseSchema) {
2108
- super(name, config ?? {}, schema, columns);
2109
- this._handler = handler;
2110
- this.responseSchema = responseSchema ?? {
2111
- version: "3.1",
2112
- schemas: [{ type: "array", items: { type: "object" } }],
2113
- components: { schemas: {} }
2114
- };
2115
- const apis = getMooseInternal().apis;
2116
- const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2117
- if (apis.has(key)) {
2118
- throw new Error(
2119
- `Consumption API with name ${name} and version ${config?.version} already exists`
2120
- );
2121
- }
2122
- apis.set(key, this);
2123
- if (config?.path) {
2124
- if (config.version) {
2125
- const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2126
- if (pathEndsWithVersion) {
2127
- if (apis.has(config.path)) {
2128
- const existing = apis.get(config.path);
2129
- throw new Error(
2130
- `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2131
- );
2132
- }
2133
- apis.set(config.path, this);
2134
- } else {
2135
- const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2136
- if (apis.has(versionedPath)) {
2137
- const existing = apis.get(versionedPath);
2138
- throw new Error(
2139
- `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2140
- );
2141
- }
2142
- apis.set(versionedPath, this);
2143
- if (!apis.has(config.path)) {
2144
- apis.set(config.path, this);
2145
- }
2146
- }
2147
- } else {
2148
- if (apis.has(config.path)) {
2149
- const existing = apis.get(config.path);
2150
- throw new Error(
2151
- `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2152
- );
2153
- }
2154
- apis.set(config.path, this);
2155
- }
2156
- }
2157
- }
2158
- /**
2159
- * Retrieves the handler function associated with this Consumption API.
2160
- * @returns The handler function.
2161
- */
2162
- getHandler = () => {
2163
- return this._handler;
2164
- };
2165
- async call(baseUrl, queryParams) {
2166
- let path2;
2167
- if (this.config?.path) {
2168
- if (this.config.version) {
2169
- const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2170
- if (pathEndsWithVersion) {
2171
- path2 = this.config.path;
2172
- } else {
2173
- path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2174
- }
2175
- } else {
2176
- path2 = this.config.path;
1939
+ var Api = class extends TypedBase {
1940
+ /** @internal The handler function that processes requests and generates responses. */
1941
+ _handler;
1942
+ /** @internal The JSON schema definition for the response type R. */
1943
+ responseSchema;
1944
+ constructor(name, handler, config, schema, columns, responseSchema) {
1945
+ super(name, config ?? {}, schema, columns);
1946
+ this._handler = handler;
1947
+ this.responseSchema = responseSchema ?? {
1948
+ version: "3.1",
1949
+ schemas: [{ type: "array", items: { type: "object" } }],
1950
+ components: { schemas: {} }
1951
+ };
1952
+ const apis = getMooseInternal().apis;
1953
+ const key = `${name}${config?.version ? `:${config.version}` : ""}`;
1954
+ if (apis.has(key)) {
1955
+ throw new Error(
1956
+ `Consumption API with name ${name} and version ${config?.version} already exists`
1957
+ );
1958
+ }
1959
+ apis.set(key, this);
1960
+ if (config?.path) {
1961
+ if (config.version) {
1962
+ const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
1963
+ if (pathEndsWithVersion) {
1964
+ if (apis.has(config.path)) {
1965
+ const existing = apis.get(config.path);
1966
+ throw new Error(
1967
+ `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
1968
+ );
2177
1969
  }
1970
+ apis.set(config.path, this);
2178
1971
  } else {
2179
- path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2180
- }
2181
- const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2182
- const searchParams = url.searchParams;
2183
- for (const [key, value] of Object.entries(queryParams)) {
2184
- if (Array.isArray(value)) {
2185
- for (const item of value) {
2186
- if (item !== null && item !== void 0) {
2187
- searchParams.append(key, String(item));
2188
- }
2189
- }
2190
- } else if (value !== null && value !== void 0) {
2191
- searchParams.append(key, String(value));
2192
- }
2193
- }
2194
- const response = await fetch(url, {
2195
- method: "GET",
2196
- headers: {
2197
- Accept: "application/json"
2198
- }
2199
- });
2200
- if (!response.ok) {
2201
- throw new Error(`HTTP error! status: ${response.status}`);
2202
- }
2203
- const data = await response.json();
2204
- return data;
2205
- }
2206
- };
2207
- ConsumptionApi = Api;
2208
- }
2209
- });
2210
-
2211
- // src/dmv2/sdk/ingestPipeline.ts
2212
- var IngestPipeline;
2213
- var init_ingestPipeline = __esm({
2214
- "src/dmv2/sdk/ingestPipeline.ts"() {
2215
- "use strict";
2216
- init_typedBase();
2217
- init_stream();
2218
- init_olapTable();
2219
- init_ingestApi();
2220
- init_helpers();
2221
- IngestPipeline = class extends TypedBase {
2222
- /**
2223
- * The OLAP table component of the pipeline, if configured.
2224
- * Provides analytical query capabilities for the ingested data.
2225
- * Only present when `config.table` is not `false`.
2226
- */
2227
- table;
2228
- /**
2229
- * The stream component of the pipeline, if configured.
2230
- * Handles real-time data flow and processing between components.
2231
- * Only present when `config.stream` is not `false`.
2232
- */
2233
- stream;
2234
- /**
2235
- * The ingest API component of the pipeline, if configured.
2236
- * Provides HTTP endpoints for data ingestion.
2237
- * Only present when `config.ingestApi` is not `false`.
2238
- */
2239
- ingestApi;
2240
- /** The dead letter queue of the pipeline, if configured. */
2241
- deadLetterQueue;
2242
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2243
- super(name, config, schema, columns, validators, allowExtraFields);
2244
- if (config.ingest !== void 0) {
2245
- console.warn(
2246
- "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2247
- );
2248
- if (config.ingestApi === void 0) {
2249
- config.ingestApi = config.ingest;
1972
+ const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
1973
+ if (apis.has(versionedPath)) {
1974
+ const existing = apis.get(versionedPath);
1975
+ throw new Error(
1976
+ `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
1977
+ );
1978
+ }
1979
+ apis.set(versionedPath, this);
1980
+ if (!apis.has(config.path)) {
1981
+ apis.set(config.path, this);
2250
1982
  }
2251
1983
  }
2252
- if (config.table) {
2253
- const tableConfig = typeof config.table === "object" ? {
2254
- ...config.table,
2255
- lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2256
- ...config.version && { version: config.version }
2257
- } : {
2258
- lifeCycle: config.lifeCycle,
2259
- engine: "MergeTree" /* MergeTree */,
2260
- ...config.version && { version: config.version }
2261
- };
2262
- this.table = new OlapTable(
2263
- name,
2264
- tableConfig,
2265
- this.schema,
2266
- this.columnArray,
2267
- this.validators
2268
- );
2269
- }
2270
- if (config.deadLetterQueue) {
2271
- const streamConfig = {
2272
- destination: void 0,
2273
- ...typeof config.deadLetterQueue === "object" ? {
2274
- ...config.deadLetterQueue,
2275
- lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2276
- } : { lifeCycle: config.lifeCycle },
2277
- ...config.version && { version: config.version }
2278
- };
2279
- this.deadLetterQueue = new DeadLetterQueue(
2280
- `${name}DeadLetterQueue`,
2281
- streamConfig,
2282
- validators.assert
1984
+ } else {
1985
+ if (apis.has(config.path)) {
1986
+ const existing = apis.get(config.path);
1987
+ throw new Error(
1988
+ `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2283
1989
  );
2284
1990
  }
2285
- if (config.stream) {
2286
- const streamConfig = {
2287
- destination: this.table,
2288
- defaultDeadLetterQueue: this.deadLetterQueue,
2289
- ...typeof config.stream === "object" ? {
2290
- ...config.stream,
2291
- lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2292
- } : { lifeCycle: config.lifeCycle },
2293
- ...config.version && { version: config.version }
2294
- };
2295
- this.stream = new Stream(
2296
- name,
2297
- streamConfig,
2298
- this.schema,
2299
- this.columnArray,
2300
- void 0,
2301
- this.allowExtraFields
2302
- );
2303
- this.stream.pipelineParent = this;
1991
+ apis.set(config.path, this);
1992
+ }
1993
+ }
1994
+ }
1995
+ /**
1996
+ * Retrieves the handler function associated with this Consumption API.
1997
+ * @returns The handler function.
1998
+ */
1999
+ getHandler = () => {
2000
+ return this._handler;
2001
+ };
2002
+ async call(baseUrl, queryParams) {
2003
+ let path2;
2004
+ if (this.config?.path) {
2005
+ if (this.config.version) {
2006
+ const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2007
+ if (pathEndsWithVersion) {
2008
+ path2 = this.config.path;
2009
+ } else {
2010
+ path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2304
2011
  }
2305
- const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2306
- if (effectiveIngestAPI) {
2307
- if (!this.stream) {
2308
- throw new Error("Ingest API needs a stream to write to.");
2012
+ } else {
2013
+ path2 = this.config.path;
2014
+ }
2015
+ } else {
2016
+ path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2017
+ }
2018
+ const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2019
+ const searchParams = url.searchParams;
2020
+ for (const [key, value] of Object.entries(queryParams)) {
2021
+ if (Array.isArray(value)) {
2022
+ for (const item of value) {
2023
+ if (item !== null && item !== void 0) {
2024
+ searchParams.append(key, String(item));
2309
2025
  }
2310
- const ingestConfig = {
2311
- destination: this.stream,
2312
- deadLetterQueue: this.deadLetterQueue,
2313
- ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2314
- ...config.version && { version: config.version },
2315
- ...config.path && { path: config.path }
2316
- };
2317
- this.ingestApi = new IngestApi(
2318
- name,
2319
- ingestConfig,
2320
- this.schema,
2321
- this.columnArray,
2322
- void 0,
2323
- this.allowExtraFields
2324
- );
2325
- this.ingestApi.pipelineParent = this;
2326
2026
  }
2027
+ } else if (value !== null && value !== void 0) {
2028
+ searchParams.append(key, String(value));
2327
2029
  }
2328
- };
2030
+ }
2031
+ const response = await fetch(url, {
2032
+ method: "GET",
2033
+ headers: {
2034
+ Accept: "application/json"
2035
+ }
2036
+ });
2037
+ if (!response.ok) {
2038
+ throw new Error(`HTTP error! status: ${response.status}`);
2039
+ }
2040
+ const data = await response.json();
2041
+ return data;
2329
2042
  }
2330
- });
2043
+ };
2044
+ var ConsumptionApi = Api;
2331
2045
 
2332
- // src/dmv2/sdk/etlPipeline.ts
2333
- var InternalBatcher, ETLPipeline;
2334
- var init_etlPipeline = __esm({
2335
- "src/dmv2/sdk/etlPipeline.ts"() {
2336
- "use strict";
2337
- init_workflow();
2338
- InternalBatcher = class {
2339
- iterator;
2340
- batchSize;
2341
- constructor(asyncIterable, batchSize = 20) {
2342
- this.iterator = asyncIterable[Symbol.asyncIterator]();
2343
- this.batchSize = batchSize;
2344
- }
2345
- async getNextBatch() {
2346
- const items = [];
2347
- for (let i = 0; i < this.batchSize; i++) {
2348
- const { value, done } = await this.iterator.next();
2349
- if (done) {
2350
- return { items, hasMore: false };
2351
- }
2352
- items.push(value);
2353
- }
2354
- return { items, hasMore: true };
2046
+ // src/dmv2/sdk/ingestPipeline.ts
2047
+ var IngestPipeline = class extends TypedBase {
2048
+ /**
2049
+ * The OLAP table component of the pipeline, if configured.
2050
+ * Provides analytical query capabilities for the ingested data.
2051
+ * Only present when `config.table` is not `false`.
2052
+ */
2053
+ table;
2054
+ /**
2055
+ * The stream component of the pipeline, if configured.
2056
+ * Handles real-time data flow and processing between components.
2057
+ * Only present when `config.stream` is not `false`.
2058
+ */
2059
+ stream;
2060
+ /**
2061
+ * The ingest API component of the pipeline, if configured.
2062
+ * Provides HTTP endpoints for data ingestion.
2063
+ * Only present when `config.ingestApi` is not `false`.
2064
+ */
2065
+ ingestApi;
2066
+ /** The dead letter queue of the pipeline, if configured. */
2067
+ deadLetterQueue;
2068
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2069
+ super(name, config, schema, columns, validators, allowExtraFields);
2070
+ if (config.ingest !== void 0) {
2071
+ console.warn(
2072
+ "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2073
+ );
2074
+ if (config.ingestApi === void 0) {
2075
+ config.ingestApi = config.ingest;
2355
2076
  }
2077
+ }
2078
+ if (config.table) {
2079
+ const tableConfig = typeof config.table === "object" ? {
2080
+ ...config.table,
2081
+ lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2082
+ ...config.version && { version: config.version }
2083
+ } : {
2084
+ lifeCycle: config.lifeCycle,
2085
+ engine: "MergeTree" /* MergeTree */,
2086
+ ...config.version && { version: config.version }
2087
+ };
2088
+ this.table = new OlapTable(
2089
+ name,
2090
+ tableConfig,
2091
+ this.schema,
2092
+ this.columnArray,
2093
+ this.validators
2094
+ );
2095
+ }
2096
+ if (config.deadLetterQueue) {
2097
+ const streamConfig = {
2098
+ destination: void 0,
2099
+ ...typeof config.deadLetterQueue === "object" ? {
2100
+ ...config.deadLetterQueue,
2101
+ lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2102
+ } : { lifeCycle: config.lifeCycle },
2103
+ ...config.version && { version: config.version }
2104
+ };
2105
+ this.deadLetterQueue = new DeadLetterQueue(
2106
+ `${name}DeadLetterQueue`,
2107
+ streamConfig,
2108
+ validators.assert
2109
+ );
2110
+ }
2111
+ if (config.stream) {
2112
+ const streamConfig = {
2113
+ destination: this.table,
2114
+ defaultDeadLetterQueue: this.deadLetterQueue,
2115
+ ...typeof config.stream === "object" ? {
2116
+ ...config.stream,
2117
+ lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2118
+ } : { lifeCycle: config.lifeCycle },
2119
+ ...config.version && { version: config.version }
2120
+ };
2121
+ this.stream = new Stream(
2122
+ name,
2123
+ streamConfig,
2124
+ this.schema,
2125
+ this.columnArray,
2126
+ void 0,
2127
+ this.allowExtraFields
2128
+ );
2129
+ this.stream.pipelineParent = this;
2130
+ }
2131
+ const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2132
+ if (effectiveIngestAPI) {
2133
+ if (!this.stream) {
2134
+ throw new Error("Ingest API needs a stream to write to.");
2135
+ }
2136
+ const ingestConfig = {
2137
+ destination: this.stream,
2138
+ deadLetterQueue: this.deadLetterQueue,
2139
+ ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2140
+ ...config.version && { version: config.version },
2141
+ ...config.path && { path: config.path }
2142
+ };
2143
+ this.ingestApi = new IngestApi(
2144
+ name,
2145
+ ingestConfig,
2146
+ this.schema,
2147
+ this.columnArray,
2148
+ void 0,
2149
+ this.allowExtraFields
2150
+ );
2151
+ this.ingestApi.pipelineParent = this;
2152
+ }
2153
+ }
2154
+ };
2155
+
2156
+ // src/dmv2/sdk/etlPipeline.ts
2157
+ var InternalBatcher = class {
2158
+ iterator;
2159
+ batchSize;
2160
+ constructor(asyncIterable, batchSize = 20) {
2161
+ this.iterator = asyncIterable[Symbol.asyncIterator]();
2162
+ this.batchSize = batchSize;
2163
+ }
2164
+ async getNextBatch() {
2165
+ const items = [];
2166
+ for (let i = 0; i < this.batchSize; i++) {
2167
+ const { value, done } = await this.iterator.next();
2168
+ if (done) {
2169
+ return { items, hasMore: false };
2170
+ }
2171
+ items.push(value);
2172
+ }
2173
+ return { items, hasMore: true };
2174
+ }
2175
+ };
2176
+ var ETLPipeline = class {
2177
+ constructor(name, config) {
2178
+ this.name = name;
2179
+ this.config = config;
2180
+ this.setupPipeline();
2181
+ }
2182
+ batcher;
2183
+ setupPipeline() {
2184
+ this.batcher = this.createBatcher();
2185
+ const tasks = this.createAllTasks();
2186
+ tasks.extract.config.onComplete = [tasks.transform];
2187
+ tasks.transform.config.onComplete = [tasks.load];
2188
+ new Workflow(this.name, {
2189
+ startingTask: tasks.extract,
2190
+ retries: 1,
2191
+ timeout: "30m"
2192
+ });
2193
+ }
2194
+ createBatcher() {
2195
+ const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2196
+ return new InternalBatcher(iterable);
2197
+ }
2198
+ getDefaultTaskConfig() {
2199
+ return {
2200
+ retries: 1,
2201
+ timeout: "30m"
2356
2202
  };
2357
- ETLPipeline = class {
2358
- constructor(name, config) {
2359
- this.name = name;
2360
- this.config = config;
2361
- this.setupPipeline();
2362
- }
2363
- batcher;
2364
- setupPipeline() {
2365
- this.batcher = this.createBatcher();
2366
- const tasks = this.createAllTasks();
2367
- tasks.extract.config.onComplete = [tasks.transform];
2368
- tasks.transform.config.onComplete = [tasks.load];
2369
- new Workflow(this.name, {
2370
- startingTask: tasks.extract,
2371
- retries: 1,
2372
- timeout: "30m"
2373
- });
2374
- }
2375
- createBatcher() {
2376
- const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2377
- return new InternalBatcher(iterable);
2378
- }
2379
- getDefaultTaskConfig() {
2380
- return {
2381
- retries: 1,
2382
- timeout: "30m"
2383
- };
2384
- }
2385
- createAllTasks() {
2386
- const taskConfig = this.getDefaultTaskConfig();
2387
- return {
2388
- extract: this.createExtractTask(taskConfig),
2389
- transform: this.createTransformTask(taskConfig),
2390
- load: this.createLoadTask(taskConfig)
2391
- };
2392
- }
2393
- createExtractTask(taskConfig) {
2394
- return new Task(`${this.name}_extract`, {
2395
- run: async ({}) => {
2396
- console.log(`Running extract task for ${this.name}...`);
2397
- const batch = await this.batcher.getNextBatch();
2398
- console.log(`Extract task completed with ${batch.items.length} items`);
2399
- return batch;
2400
- },
2401
- retries: taskConfig.retries,
2402
- timeout: taskConfig.timeout
2403
- });
2404
- }
2405
- createTransformTask(taskConfig) {
2406
- return new Task(
2407
- `${this.name}_transform`,
2408
- {
2409
- // Use new single-parameter context API for handlers
2410
- run: async ({ input }) => {
2411
- const batch = input;
2412
- console.log(
2413
- `Running transform task for ${this.name} with ${batch.items.length} items...`
2414
- );
2415
- const transformedItems = [];
2416
- for (const item of batch.items) {
2417
- const transformed = await this.config.transform(item);
2418
- transformedItems.push(transformed);
2419
- }
2420
- console.log(
2421
- `Transform task completed with ${transformedItems.length} items`
2422
- );
2423
- return { items: transformedItems };
2424
- },
2425
- retries: taskConfig.retries,
2426
- timeout: taskConfig.timeout
2427
- }
2428
- );
2429
- }
2430
- createLoadTask(taskConfig) {
2431
- return new Task(`${this.name}_load`, {
2432
- run: async ({ input: transformedItems }) => {
2433
- console.log(
2434
- `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2435
- );
2436
- if ("insert" in this.config.load) {
2437
- await this.config.load.insert(transformedItems.items);
2438
- } else {
2439
- await this.config.load(transformedItems.items);
2440
- }
2441
- console.log(`Load task completed`);
2442
- },
2443
- retries: taskConfig.retries,
2444
- timeout: taskConfig.timeout
2445
- });
2446
- }
2447
- // Execute the entire ETL pipeline
2448
- async run() {
2449
- console.log(`Starting ETL Pipeline: ${this.name}`);
2450
- let batchNumber = 1;
2451
- do {
2452
- console.log(`Processing batch ${batchNumber}...`);
2453
- const batch = await this.batcher.getNextBatch();
2454
- if (batch.items.length === 0) {
2455
- break;
2456
- }
2203
+ }
2204
+ createAllTasks() {
2205
+ const taskConfig = this.getDefaultTaskConfig();
2206
+ return {
2207
+ extract: this.createExtractTask(taskConfig),
2208
+ transform: this.createTransformTask(taskConfig),
2209
+ load: this.createLoadTask(taskConfig)
2210
+ };
2211
+ }
2212
+ createExtractTask(taskConfig) {
2213
+ return new Task(`${this.name}_extract`, {
2214
+ run: async ({}) => {
2215
+ console.log(`Running extract task for ${this.name}...`);
2216
+ const batch = await this.batcher.getNextBatch();
2217
+ console.log(`Extract task completed with ${batch.items.length} items`);
2218
+ return batch;
2219
+ },
2220
+ retries: taskConfig.retries,
2221
+ timeout: taskConfig.timeout
2222
+ });
2223
+ }
2224
+ createTransformTask(taskConfig) {
2225
+ return new Task(
2226
+ `${this.name}_transform`,
2227
+ {
2228
+ // Use new single-parameter context API for handlers
2229
+ run: async ({ input }) => {
2230
+ const batch = input;
2231
+ console.log(
2232
+ `Running transform task for ${this.name} with ${batch.items.length} items...`
2233
+ );
2457
2234
  const transformedItems = [];
2458
- for (const extractedData of batch.items) {
2459
- const transformedData = await this.config.transform(extractedData);
2460
- transformedItems.push(transformedData);
2461
- }
2462
- if ("insert" in this.config.load) {
2463
- await this.config.load.insert(transformedItems);
2464
- } else {
2465
- await this.config.load(transformedItems);
2235
+ for (const item of batch.items) {
2236
+ const transformed = await this.config.transform(item);
2237
+ transformedItems.push(transformed);
2466
2238
  }
2467
2239
  console.log(
2468
- `Completed batch ${batchNumber} with ${batch.items.length} items`
2240
+ `Transform task completed with ${transformedItems.length} items`
2469
2241
  );
2470
- batchNumber++;
2471
- if (!batch.hasMore) {
2472
- break;
2473
- }
2474
- } while (true);
2475
- console.log(`Completed ETL Pipeline: ${this.name}`);
2242
+ return { items: transformedItems };
2243
+ },
2244
+ retries: taskConfig.retries,
2245
+ timeout: taskConfig.timeout
2476
2246
  }
2477
- };
2247
+ );
2478
2248
  }
2479
- });
2480
-
2481
- // src/dmv2/sdk/sqlResource.ts
2482
- var SqlResource;
2483
- var init_sqlResource = __esm({
2484
- "src/dmv2/sdk/sqlResource.ts"() {
2485
- "use strict";
2486
- init_internal();
2487
- init_sqlHelpers();
2488
- init_stackTrace();
2489
- SqlResource = class {
2490
- /** @internal */
2491
- kind = "SqlResource";
2492
- /** Array of SQL statements to execute for setting up the resource. */
2493
- setup;
2494
- /** Array of SQL statements to execute for tearing down the resource. */
2495
- teardown;
2496
- /** The name of the SQL resource (e.g., view name, materialized view name). */
2497
- name;
2498
- /** List of OlapTables or Views that this resource reads data from. */
2499
- pullsDataFrom;
2500
- /** List of OlapTables or Views that this resource writes data to. */
2501
- pushesDataTo;
2502
- /** @internal Source file path where this resource was defined */
2503
- sourceFile;
2504
- /** @internal Source line number where this resource was defined */
2505
- sourceLine;
2506
- /** @internal Source column number where this resource was defined */
2507
- sourceColumn;
2508
- /**
2509
- * Creates a new SqlResource instance.
2510
- * @param name The name of the resource.
2511
- * @param setup An array of SQL DDL statements to create the resource.
2512
- * @param teardown An array of SQL DDL statements to drop the resource.
2513
- * @param options Optional configuration for specifying data dependencies.
2514
- * @param options.pullsDataFrom Tables/Views this resource reads from.
2515
- * @param options.pushesDataTo Tables/Views this resource writes to.
2516
- */
2517
- constructor(name, setup, teardown, options) {
2518
- const sqlResources = getMooseInternal().sqlResources;
2519
- if (!isClientOnlyMode() && sqlResources.has(name)) {
2520
- throw new Error(`SqlResource with name ${name} already exists`);
2521
- }
2522
- sqlResources.set(name, this);
2523
- this.name = name;
2524
- this.setup = setup.map(
2525
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2249
+ createLoadTask(taskConfig) {
2250
+ return new Task(`${this.name}_load`, {
2251
+ run: async ({ input: transformedItems }) => {
2252
+ console.log(
2253
+ `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2526
2254
  );
2527
- this.teardown = teardown.map(
2528
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2529
- );
2530
- this.pullsDataFrom = options?.pullsDataFrom ?? [];
2531
- this.pushesDataTo = options?.pushesDataTo ?? [];
2532
- const stack = new Error().stack;
2533
- const location = getSourceLocationFromStack(stack);
2534
- if (location) {
2535
- this.sourceFile = location.file;
2536
- this.sourceLine = location.line;
2537
- this.sourceColumn = location.column;
2255
+ if ("insert" in this.config.load) {
2256
+ await this.config.load.insert(transformedItems.items);
2257
+ } else {
2258
+ await this.config.load(transformedItems.items);
2538
2259
  }
2260
+ console.log(`Load task completed`);
2261
+ },
2262
+ retries: taskConfig.retries,
2263
+ timeout: taskConfig.timeout
2264
+ });
2265
+ }
2266
+ // Execute the entire ETL pipeline
2267
+ async run() {
2268
+ console.log(`Starting ETL Pipeline: ${this.name}`);
2269
+ let batchNumber = 1;
2270
+ do {
2271
+ console.log(`Processing batch ${batchNumber}...`);
2272
+ const batch = await this.batcher.getNextBatch();
2273
+ if (batch.items.length === 0) {
2274
+ break;
2275
+ }
2276
+ const transformedItems = [];
2277
+ for (const extractedData of batch.items) {
2278
+ const transformedData = await this.config.transform(extractedData);
2279
+ transformedItems.push(transformedData);
2280
+ }
2281
+ if ("insert" in this.config.load) {
2282
+ await this.config.load.insert(transformedItems);
2283
+ } else {
2284
+ await this.config.load(transformedItems);
2539
2285
  }
2540
- };
2286
+ console.log(
2287
+ `Completed batch ${batchNumber} with ${batch.items.length} items`
2288
+ );
2289
+ batchNumber++;
2290
+ if (!batch.hasMore) {
2291
+ break;
2292
+ }
2293
+ } while (true);
2294
+ console.log(`Completed ETL Pipeline: ${this.name}`);
2541
2295
  }
2542
- });
2296
+ };
2297
+
2298
+ // src/dmv2/sdk/sqlResource.ts
2299
+ var SqlResource = class {
2300
+ /** @internal */
2301
+ kind = "SqlResource";
2302
+ /** Array of SQL statements to execute for setting up the resource. */
2303
+ setup;
2304
+ /** Array of SQL statements to execute for tearing down the resource. */
2305
+ teardown;
2306
+ /** The name of the SQL resource (e.g., view name, materialized view name). */
2307
+ name;
2308
+ /** List of OlapTables or Views that this resource reads data from. */
2309
+ pullsDataFrom;
2310
+ /** List of OlapTables or Views that this resource writes data to. */
2311
+ pushesDataTo;
2312
+ /** @internal Source file path where this resource was defined */
2313
+ sourceFile;
2314
+ /** @internal Source line number where this resource was defined */
2315
+ sourceLine;
2316
+ /** @internal Source column number where this resource was defined */
2317
+ sourceColumn;
2318
+ /**
2319
+ * Creates a new SqlResource instance.
2320
+ * @param name The name of the resource.
2321
+ * @param setup An array of SQL DDL statements to create the resource.
2322
+ * @param teardown An array of SQL DDL statements to drop the resource.
2323
+ * @param options Optional configuration for specifying data dependencies.
2324
+ * @param options.pullsDataFrom Tables/Views this resource reads from.
2325
+ * @param options.pushesDataTo Tables/Views this resource writes to.
2326
+ */
2327
+ constructor(name, setup, teardown, options) {
2328
+ const sqlResources = getMooseInternal().sqlResources;
2329
+ if (!isClientOnlyMode() && sqlResources.has(name)) {
2330
+ throw new Error(`SqlResource with name ${name} already exists`);
2331
+ }
2332
+ sqlResources.set(name, this);
2333
+ this.name = name;
2334
+ this.setup = setup.map(
2335
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2336
+ );
2337
+ this.teardown = teardown.map(
2338
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2339
+ );
2340
+ this.pullsDataFrom = options?.pullsDataFrom ?? [];
2341
+ this.pushesDataTo = options?.pushesDataTo ?? [];
2342
+ const stack = new Error().stack;
2343
+ const location = getSourceLocationFromStack(stack);
2344
+ if (location) {
2345
+ this.sourceFile = location.file;
2346
+ this.sourceLine = location.line;
2347
+ this.sourceColumn = location.column;
2348
+ }
2349
+ }
2350
+ };
2543
2351
 
2544
2352
  // src/dmv2/sdk/materializedView.ts
2545
- var requireTargetTableName, MaterializedView;
2546
- var init_materializedView = __esm({
2547
- "src/dmv2/sdk/materializedView.ts"() {
2548
- "use strict";
2549
- init_helpers();
2550
- init_sqlHelpers();
2551
- init_olapTable();
2552
- init_sqlResource();
2553
- requireTargetTableName = (tableName) => {
2554
- if (typeof tableName === "string") {
2555
- return tableName;
2556
- } else {
2557
- throw new Error("Name of targetTable is not specified.");
2558
- }
2559
- };
2560
- MaterializedView = class extends SqlResource {
2561
- /** The target OlapTable instance where the materialized data is stored. */
2562
- targetTable;
2563
- constructor(options, targetSchema, targetColumns) {
2564
- let selectStatement = options.selectStatement;
2565
- if (typeof selectStatement !== "string") {
2566
- selectStatement = toStaticQuery(selectStatement);
2567
- }
2568
- if (targetSchema === void 0 || targetColumns === void 0) {
2569
- throw new Error(
2570
- "Supply the type param T so that the schema is inserted by the compiler plugin."
2571
- );
2572
- }
2573
- const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2574
- requireTargetTableName(
2575
- options.targetTable?.name ?? options.tableName
2576
- ),
2577
- {
2578
- orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2579
- engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2580
- },
2581
- targetSchema,
2582
- targetColumns
2583
- );
2584
- if (targetTable.name === options.materializedViewName) {
2585
- throw new Error(
2586
- "Materialized view name cannot be the same as the target table name."
2587
- );
2588
- }
2589
- super(
2590
- options.materializedViewName,
2591
- [
2592
- createMaterializedView({
2593
- name: options.materializedViewName,
2594
- destinationTable: targetTable.name,
2595
- select: selectStatement
2596
- })
2597
- // Population is now handled automatically by Rust infrastructure
2598
- // based on table engine type and whether this is a new or updated view
2599
- ],
2600
- [dropView(options.materializedViewName)],
2601
- {
2602
- pullsDataFrom: options.selectTables,
2603
- pushesDataTo: [targetTable]
2604
- }
2605
- );
2606
- this.targetTable = targetTable;
2353
+ var requireTargetTableName = (tableName) => {
2354
+ if (typeof tableName === "string") {
2355
+ return tableName;
2356
+ } else {
2357
+ throw new Error("Name of targetTable is not specified.");
2358
+ }
2359
+ };
2360
+ var MaterializedView = class extends SqlResource {
2361
+ /** The target OlapTable instance where the materialized data is stored. */
2362
+ targetTable;
2363
+ constructor(options, targetSchema, targetColumns) {
2364
+ let selectStatement = options.selectStatement;
2365
+ if (typeof selectStatement !== "string") {
2366
+ selectStatement = toStaticQuery(selectStatement);
2367
+ }
2368
+ if (targetSchema === void 0 || targetColumns === void 0) {
2369
+ throw new Error(
2370
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
2371
+ );
2372
+ }
2373
+ const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2374
+ requireTargetTableName(
2375
+ options.targetTable?.name ?? options.tableName
2376
+ ),
2377
+ {
2378
+ orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2379
+ engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2380
+ },
2381
+ targetSchema,
2382
+ targetColumns
2383
+ );
2384
+ if (targetTable.name === options.materializedViewName) {
2385
+ throw new Error(
2386
+ "Materialized view name cannot be the same as the target table name."
2387
+ );
2388
+ }
2389
+ super(
2390
+ options.materializedViewName,
2391
+ [
2392
+ createMaterializedView({
2393
+ name: options.materializedViewName,
2394
+ destinationTable: targetTable.name,
2395
+ select: selectStatement
2396
+ })
2397
+ // Population is now handled automatically by Rust infrastructure
2398
+ // based on table engine type and whether this is a new or updated view
2399
+ ],
2400
+ [dropView(options.materializedViewName)],
2401
+ {
2402
+ pullsDataFrom: options.selectTables,
2403
+ pushesDataTo: [targetTable]
2607
2404
  }
2608
- };
2405
+ );
2406
+ this.targetTable = targetTable;
2609
2407
  }
2610
- });
2408
+ };
2611
2409
 
2612
2410
  // src/dmv2/sdk/view.ts
2613
- var View;
2614
- var init_view = __esm({
2615
- "src/dmv2/sdk/view.ts"() {
2616
- "use strict";
2617
- init_helpers();
2618
- init_sqlHelpers();
2619
- init_sqlResource();
2620
- View = class extends SqlResource {
2621
- /**
2622
- * Creates a new View instance.
2623
- * @param name The name of the view to be created.
2624
- * @param selectStatement The SQL SELECT statement that defines the view's logic.
2625
- * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2626
- */
2627
- constructor(name, selectStatement, baseTables) {
2628
- if (typeof selectStatement !== "string") {
2629
- selectStatement = toStaticQuery(selectStatement);
2630
- }
2631
- super(
2632
- name,
2633
- [
2634
- `CREATE VIEW IF NOT EXISTS ${name}
2411
+ var View = class extends SqlResource {
2412
+ /**
2413
+ * Creates a new View instance.
2414
+ * @param name The name of the view to be created.
2415
+ * @param selectStatement The SQL SELECT statement that defines the view's logic.
2416
+ * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2417
+ */
2418
+ constructor(name, selectStatement, baseTables) {
2419
+ if (typeof selectStatement !== "string") {
2420
+ selectStatement = toStaticQuery(selectStatement);
2421
+ }
2422
+ super(
2423
+ name,
2424
+ [
2425
+ `CREATE VIEW IF NOT EXISTS ${name}
2635
2426
  AS ${selectStatement}`.trim()
2636
- ],
2637
- [dropView(name)],
2638
- {
2639
- pullsDataFrom: baseTables
2640
- }
2641
- );
2427
+ ],
2428
+ [dropView(name)],
2429
+ {
2430
+ pullsDataFrom: baseTables
2642
2431
  }
2643
- };
2432
+ );
2644
2433
  }
2645
- });
2434
+ };
2646
2435
 
2647
2436
  // src/dmv2/sdk/lifeCycle.ts
2648
- var LifeCycle;
2649
- var init_lifeCycle = __esm({
2650
- "src/dmv2/sdk/lifeCycle.ts"() {
2651
- "use strict";
2652
- LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2653
- LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2654
- LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2655
- LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2656
- return LifeCycle2;
2657
- })(LifeCycle || {});
2658
- }
2659
- });
2437
+ var LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2438
+ LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2439
+ LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2440
+ LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2441
+ return LifeCycle2;
2442
+ })(LifeCycle || {});
2660
2443
 
2661
2444
  // src/dmv2/sdk/webApp.ts
2662
- var RESERVED_MOUNT_PATHS, WebApp;
2663
- var init_webApp = __esm({
2664
- "src/dmv2/sdk/webApp.ts"() {
2665
- "use strict";
2666
- init_internal();
2667
- RESERVED_MOUNT_PATHS = [
2668
- "/admin",
2669
- "/api",
2670
- "/consumption",
2671
- "/health",
2672
- "/ingest",
2673
- "/moose",
2674
- // reserved for future use
2675
- "/ready",
2676
- "/workflows"
2677
- ];
2678
- WebApp = class {
2679
- name;
2680
- handler;
2681
- config;
2682
- _rawApp;
2683
- constructor(name, appOrHandler, config) {
2684
- this.name = name;
2685
- this.config = config;
2686
- if (!this.config.mountPath) {
2687
- throw new Error(
2688
- `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2689
- );
2690
- }
2691
- const mountPath = this.config.mountPath;
2692
- if (mountPath === "/") {
2693
- throw new Error(
2694
- `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2695
- );
2696
- }
2697
- if (mountPath.endsWith("/")) {
2445
+ var RESERVED_MOUNT_PATHS = [
2446
+ "/admin",
2447
+ "/api",
2448
+ "/consumption",
2449
+ "/health",
2450
+ "/ingest",
2451
+ "/moose",
2452
+ // reserved for future use
2453
+ "/ready",
2454
+ "/workflows"
2455
+ ];
2456
+ var WebApp = class {
2457
+ name;
2458
+ handler;
2459
+ config;
2460
+ _rawApp;
2461
+ constructor(name, appOrHandler, config) {
2462
+ this.name = name;
2463
+ this.config = config;
2464
+ if (!this.config.mountPath) {
2465
+ throw new Error(
2466
+ `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2467
+ );
2468
+ }
2469
+ const mountPath = this.config.mountPath;
2470
+ if (mountPath === "/") {
2471
+ throw new Error(
2472
+ `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2473
+ );
2474
+ }
2475
+ if (mountPath.endsWith("/")) {
2476
+ throw new Error(
2477
+ `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2478
+ );
2479
+ }
2480
+ for (const reserved of RESERVED_MOUNT_PATHS) {
2481
+ if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2482
+ throw new Error(
2483
+ `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2484
+ );
2485
+ }
2486
+ }
2487
+ this.handler = this.toHandler(appOrHandler);
2488
+ this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2489
+ const webApps = getMooseInternal().webApps;
2490
+ if (webApps.has(name)) {
2491
+ throw new Error(`WebApp with name ${name} already exists`);
2492
+ }
2493
+ if (this.config.mountPath) {
2494
+ for (const [existingName, existingApp] of webApps) {
2495
+ if (existingApp.config.mountPath === this.config.mountPath) {
2698
2496
  throw new Error(
2699
- `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2497
+ `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2700
2498
  );
2701
2499
  }
2702
- for (const reserved of RESERVED_MOUNT_PATHS) {
2703
- if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2704
- throw new Error(
2705
- `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2706
- );
2707
- }
2708
- }
2709
- this.handler = this.toHandler(appOrHandler);
2710
- this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2711
- const webApps = getMooseInternal().webApps;
2712
- if (webApps.has(name)) {
2713
- throw new Error(`WebApp with name ${name} already exists`);
2714
- }
2715
- if (this.config.mountPath) {
2716
- for (const [existingName, existingApp] of webApps) {
2717
- if (existingApp.config.mountPath === this.config.mountPath) {
2718
- throw new Error(
2719
- `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2720
- );
2721
- }
2722
- }
2723
- }
2724
- webApps.set(name, this);
2725
2500
  }
2726
- toHandler(appOrHandler) {
2727
- if (typeof appOrHandler === "function") {
2728
- return appOrHandler;
2729
- }
2730
- const app = appOrHandler;
2731
- if (typeof app.handle === "function") {
2732
- return (req, res) => {
2733
- app.handle(req, res, (err) => {
2734
- if (err) {
2735
- console.error("WebApp handler error:", err);
2736
- if (!res.headersSent) {
2737
- res.writeHead(500, { "Content-Type": "application/json" });
2738
- res.end(JSON.stringify({ error: "Internal Server Error" }));
2739
- }
2740
- }
2741
- });
2742
- };
2743
- }
2744
- if (typeof app.callback === "function") {
2745
- return app.callback();
2746
- }
2747
- if (typeof app.routing === "function") {
2748
- const routing = app.routing;
2749
- const appWithReady = app;
2750
- let readyPromise = null;
2751
- return async (req, res) => {
2752
- if (readyPromise === null) {
2753
- readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2501
+ }
2502
+ webApps.set(name, this);
2503
+ }
2504
+ toHandler(appOrHandler) {
2505
+ if (typeof appOrHandler === "function") {
2506
+ return appOrHandler;
2507
+ }
2508
+ const app = appOrHandler;
2509
+ if (typeof app.handle === "function") {
2510
+ return (req, res) => {
2511
+ app.handle(req, res, (err) => {
2512
+ if (err) {
2513
+ console.error("WebApp handler error:", err);
2514
+ if (!res.headersSent) {
2515
+ res.writeHead(500, { "Content-Type": "application/json" });
2516
+ res.end(JSON.stringify({ error: "Internal Server Error" }));
2754
2517
  }
2755
- await readyPromise;
2756
- routing(req, res);
2757
- };
2758
- }
2759
- throw new Error(
2760
- `Unable to convert app to handler. The provided object must be:
2518
+ }
2519
+ });
2520
+ };
2521
+ }
2522
+ if (typeof app.callback === "function") {
2523
+ return app.callback();
2524
+ }
2525
+ if (typeof app.routing === "function") {
2526
+ const routing = app.routing;
2527
+ const appWithReady = app;
2528
+ let readyPromise = null;
2529
+ return async (req, res) => {
2530
+ if (readyPromise === null) {
2531
+ readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2532
+ }
2533
+ await readyPromise;
2534
+ routing(req, res);
2535
+ };
2536
+ }
2537
+ throw new Error(
2538
+ `Unable to convert app to handler. The provided object must be:
2761
2539
  - A function (raw Node.js handler)
2762
2540
  - An object with .handle() method (Express, Connect)
2763
2541
  - An object with .callback() method (Koa)
@@ -2769,14 +2547,12 @@ Examples:
2769
2547
  Fastify: new WebApp("name", fastifyApp)
2770
2548
  Raw: new WebApp("name", (req, res) => { ... })
2771
2549
  `
2772
- );
2773
- }
2774
- getRawApp() {
2775
- return this._rawApp;
2776
- }
2777
- };
2550
+ );
2778
2551
  }
2779
- });
2552
+ getRawApp() {
2553
+ return this._rawApp;
2554
+ }
2555
+ };
2780
2556
 
2781
2557
  // src/dmv2/registry.ts
2782
2558
  function getTables() {
@@ -2842,65 +2618,6 @@ function getWebApps() {
2842
2618
  function getWebApp(name) {
2843
2619
  return getMooseInternal().webApps.get(name);
2844
2620
  }
2845
- var init_registry = __esm({
2846
- "src/dmv2/registry.ts"() {
2847
- "use strict";
2848
- init_internal();
2849
- }
2850
- });
2851
-
2852
- // src/dmv2/index.ts
2853
- var dmv2_exports = {};
2854
- __export(dmv2_exports, {
2855
- Api: () => Api,
2856
- ConsumptionApi: () => ConsumptionApi,
2857
- DeadLetterQueue: () => DeadLetterQueue,
2858
- ETLPipeline: () => ETLPipeline,
2859
- IngestApi: () => IngestApi,
2860
- IngestPipeline: () => IngestPipeline,
2861
- LifeCycle: () => LifeCycle,
2862
- MaterializedView: () => MaterializedView,
2863
- OlapTable: () => OlapTable,
2864
- SqlResource: () => SqlResource,
2865
- Stream: () => Stream,
2866
- Task: () => Task,
2867
- View: () => View,
2868
- WebApp: () => WebApp,
2869
- Workflow: () => Workflow,
2870
- getApi: () => getApi,
2871
- getApis: () => getApis,
2872
- getIngestApi: () => getIngestApi,
2873
- getIngestApis: () => getIngestApis,
2874
- getSqlResource: () => getSqlResource,
2875
- getSqlResources: () => getSqlResources,
2876
- getStream: () => getStream,
2877
- getStreams: () => getStreams,
2878
- getTable: () => getTable,
2879
- getTables: () => getTables,
2880
- getWebApp: () => getWebApp,
2881
- getWebApps: () => getWebApps,
2882
- getWorkflow: () => getWorkflow,
2883
- getWorkflows: () => getWorkflows
2884
- });
2885
- module.exports = __toCommonJS(dmv2_exports);
2886
- var init_dmv2 = __esm({
2887
- "src/dmv2/index.ts"() {
2888
- init_olapTable();
2889
- init_stream();
2890
- init_workflow();
2891
- init_ingestApi();
2892
- init_consumptionApi();
2893
- init_ingestPipeline();
2894
- init_etlPipeline();
2895
- init_materializedView();
2896
- init_sqlResource();
2897
- init_view();
2898
- init_lifeCycle();
2899
- init_webApp();
2900
- init_registry();
2901
- }
2902
- });
2903
- init_dmv2();
2904
2621
  // Annotate the CommonJS export names for ESM import in node:
2905
2622
  0 && (module.exports = {
2906
2623
  Api,