@514labs/moose-lib 0.6.295-ci-15-gfb3b651b → 0.6.295-ci-17-g70d560ac

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,247 +8,6 @@ var __export = (target, all) => {
8
8
  __defProp(target, name, { get: all[name], enumerable: true });
9
9
  };
10
10
 
11
- // src/dmv2/utils/stackTrace.ts
12
- function shouldSkipStackLine(line) {
13
- return line.includes("node_modules") || // Skip npm installed packages (prod)
14
- line.includes("node:internal") || // Skip Node.js internals (modern format)
15
- line.includes("internal/modules") || // Skip Node.js internals (older format)
16
- line.includes("ts-node") || // Skip TypeScript execution
17
- line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
18
- line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
19
- line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
20
- line.includes("\\ts-moose-lib\\dist\\");
21
- }
22
- function parseStackLine(line) {
23
- const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
24
- if (match && match[1]) {
25
- return {
26
- file: match[1],
27
- line: match[2]
28
- };
29
- }
30
- return void 0;
31
- }
32
- function getSourceFileInfo(stack) {
33
- if (!stack) return {};
34
- const lines = stack.split("\n");
35
- for (const line of lines) {
36
- if (shouldSkipStackLine(line)) continue;
37
- const info = parseStackLine(line);
38
- if (info) return info;
39
- }
40
- return {};
41
- }
42
- function getSourceLocationFromStack(stack) {
43
- if (!stack) return void 0;
44
- const lines = stack.split("\n");
45
- for (const line of lines.slice(1)) {
46
- if (shouldSkipStackLine(line)) {
47
- continue;
48
- }
49
- const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
50
- if (v8Match) {
51
- return {
52
- file: v8Match[1],
53
- line: parseInt(v8Match[2], 10),
54
- column: parseInt(v8Match[3], 10)
55
- };
56
- }
57
- const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
58
- if (smMatch) {
59
- return {
60
- file: smMatch[1],
61
- line: parseInt(smMatch[2], 10),
62
- column: parseInt(smMatch[3], 10)
63
- };
64
- }
65
- }
66
- return void 0;
67
- }
68
- function getSourceFileFromStack(stack) {
69
- const location = getSourceLocationFromStack(stack);
70
- return location?.file;
71
- }
72
- var init_stackTrace = __esm({
73
- "src/dmv2/utils/stackTrace.ts"() {
74
- "use strict";
75
- }
76
- });
77
-
78
- // src/dmv2/typedBase.ts
79
- var TypedBase;
80
- var init_typedBase = __esm({
81
- "src/dmv2/typedBase.ts"() {
82
- "use strict";
83
- init_stackTrace();
84
- TypedBase = class {
85
- /** The JSON schema representation of type T. Injected by the compiler plugin. */
86
- schema;
87
- /** The name assigned to this resource instance. */
88
- name;
89
- /** A dictionary mapping column names (keys of T) to their Column definitions. */
90
- columns;
91
- /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
92
- columnArray;
93
- /** The configuration object specific to this resource type. */
94
- config;
95
- /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
96
- validators;
97
- /** Optional metadata for the resource, always present as an object. */
98
- metadata;
99
- /**
100
- * Whether this resource allows extra fields beyond the defined columns.
101
- * When true, extra fields in payloads are passed through to streaming functions.
102
- * Injected by the compiler plugin when the type has an index signature.
103
- */
104
- allowExtraFields;
105
- /**
106
- * @internal Constructor intended for internal use by subclasses and the compiler plugin.
107
- * It expects the schema and columns to be provided, typically injected by the compiler.
108
- *
109
- * @param name The name for the resource instance.
110
- * @param config The configuration object for the resource.
111
- * @param schema The JSON schema for the resource's data type T (injected).
112
- * @param columns The array of Column definitions for T (injected).
113
- * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
114
- */
115
- constructor(name, config, schema, columns, validators, allowExtraFields) {
116
- if (schema === void 0 || columns === void 0) {
117
- throw new Error(
118
- "Supply the type param T so that the schema is inserted by the compiler plugin."
119
- );
120
- }
121
- this.schema = schema;
122
- this.columnArray = columns;
123
- const columnsObj = {};
124
- columns.forEach((column) => {
125
- columnsObj[column.name] = column;
126
- });
127
- this.columns = columnsObj;
128
- this.name = name;
129
- this.config = config;
130
- this.validators = validators;
131
- this.allowExtraFields = allowExtraFields ?? false;
132
- this.metadata = config?.metadata ? { ...config.metadata } : {};
133
- if (!this.metadata.source) {
134
- const stack = new Error().stack;
135
- if (stack) {
136
- const info = getSourceFileInfo(stack);
137
- this.metadata.source = { file: info.file, line: info.line };
138
- }
139
- }
140
- }
141
- };
142
- }
143
- });
144
-
145
- // src/dataModels/dataModelTypes.ts
146
- function isArrayNestedType(dt) {
147
- return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
148
- }
149
- function isNestedType(dt) {
150
- return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
151
- }
152
- var init_dataModelTypes = __esm({
153
- "src/dataModels/dataModelTypes.ts"() {
154
- "use strict";
155
- }
156
- });
157
-
158
- // src/sqlHelpers.ts
159
- function createClickhouseParameter(parameterIndex, value) {
160
- return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
161
- }
162
- function emptyIfUndefined(value) {
163
- return value === void 0 ? "" : value;
164
- }
165
- var quoteIdentifier, toStaticQuery, toQuery, getValueFromParameter, mapToClickHouseType;
166
- var init_sqlHelpers = __esm({
167
- "src/sqlHelpers.ts"() {
168
- "use strict";
169
- quoteIdentifier = (name) => {
170
- return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
171
- };
172
- toStaticQuery = (sql3) => {
173
- const [query, params] = toQuery(sql3);
174
- if (Object.keys(params).length !== 0) {
175
- throw new Error(
176
- "Dynamic SQL is not allowed in the select statement in view creation."
177
- );
178
- }
179
- return query;
180
- };
181
- toQuery = (sql3) => {
182
- const parameterizedStubs = sql3.values.map(
183
- (v, i) => createClickhouseParameter(i, v)
184
- );
185
- const query = sql3.strings.map(
186
- (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
187
- ).join("");
188
- const query_params = sql3.values.reduce(
189
- (acc, v, i) => ({
190
- ...acc,
191
- [`p${i}`]: getValueFromParameter(v)
192
- }),
193
- {}
194
- );
195
- return [query, query_params];
196
- };
197
- getValueFromParameter = (value) => {
198
- if (Array.isArray(value)) {
199
- const [type, val] = value;
200
- if (type === "Identifier") return val;
201
- }
202
- return value;
203
- };
204
- mapToClickHouseType = (value) => {
205
- if (typeof value === "number") {
206
- return Number.isInteger(value) ? "Int" : "Float";
207
- }
208
- if (typeof value === "boolean") return "Bool";
209
- if (value instanceof Date) return "DateTime";
210
- if (Array.isArray(value)) {
211
- const [type, _] = value;
212
- return type;
213
- }
214
- return "String";
215
- };
216
- }
217
- });
218
-
219
- // src/blocks/helpers.ts
220
- function dropView(name) {
221
- return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
222
- }
223
- function createMaterializedView(options) {
224
- return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
225
- TO ${quoteIdentifier(options.destinationTable)}
226
- AS ${options.select}`.trim();
227
- }
228
- var init_helpers = __esm({
229
- "src/blocks/helpers.ts"() {
230
- "use strict";
231
- init_sqlHelpers();
232
- }
233
- });
234
-
235
- // src/dataModels/types.ts
236
- var init_types = __esm({
237
- "src/dataModels/types.ts"() {
238
- "use strict";
239
- }
240
- });
241
-
242
- // src/browserCompatible.ts
243
- var init_browserCompatible = __esm({
244
- "src/browserCompatible.ts"() {
245
- "use strict";
246
- init_dmv2();
247
- init_types();
248
- init_sqlHelpers();
249
- }
250
- });
251
-
252
11
  // src/commons.ts
253
12
  var commons_exports = {};
254
13
  __export(commons_exports, {
@@ -416,298 +175,6 @@ var init_commons = __esm({
416
175
  }
417
176
  });
418
177
 
419
- // src/secrets.ts
420
- var init_secrets = __esm({
421
- "src/secrets.ts"() {
422
- "use strict";
423
- }
424
- });
425
-
426
- // src/consumption-apis/helpers.ts
427
- import {
428
- Client as TemporalClient,
429
- Connection
430
- } from "@temporalio/client";
431
- import { createHash, randomUUID } from "crypto";
432
- var init_helpers2 = __esm({
433
- "src/consumption-apis/helpers.ts"() {
434
- "use strict";
435
- init_internal();
436
- init_sqlHelpers();
437
- }
438
- });
439
-
440
- // src/consumption-apis/webAppHelpers.ts
441
- var init_webAppHelpers = __esm({
442
- "src/consumption-apis/webAppHelpers.ts"() {
443
- "use strict";
444
- }
445
- });
446
-
447
- // src/scripts/task.ts
448
- var init_task = __esm({
449
- "src/scripts/task.ts"() {
450
- "use strict";
451
- }
452
- });
453
-
454
- // src/cluster-utils.ts
455
- import cluster from "cluster";
456
- import { availableParallelism } from "os";
457
- import { exit } from "process";
458
- var init_cluster_utils = __esm({
459
- "src/cluster-utils.ts"() {
460
- "use strict";
461
- }
462
- });
463
-
464
- // src/consumption-apis/runner.ts
465
- import * as jose from "jose";
466
- var init_runner = __esm({
467
- "src/consumption-apis/runner.ts"() {
468
- "use strict";
469
- init_commons();
470
- init_helpers2();
471
- init_cluster_utils();
472
- init_sqlHelpers();
473
- init_internal();
474
- }
475
- });
476
-
477
- // src/clients/redisClient.ts
478
- import { createClient as createClient2 } from "redis";
479
- var init_redisClient = __esm({
480
- "src/clients/redisClient.ts"() {
481
- "use strict";
482
- }
483
- });
484
-
485
- // src/consumption-apis/standalone.ts
486
- var init_standalone = __esm({
487
- "src/consumption-apis/standalone.ts"() {
488
- "use strict";
489
- init_helpers2();
490
- init_commons();
491
- init_sqlHelpers();
492
- }
493
- });
494
-
495
- // src/utilities/json.ts
496
- var init_json = __esm({
497
- "src/utilities/json.ts"() {
498
- "use strict";
499
- }
500
- });
501
-
502
- // src/utilities/dataParser.ts
503
- import { parse } from "csv-parse";
504
- var CSV_DELIMITERS, DEFAULT_CSV_CONFIG;
505
- var init_dataParser = __esm({
506
- "src/utilities/dataParser.ts"() {
507
- "use strict";
508
- init_json();
509
- CSV_DELIMITERS = {
510
- COMMA: ",",
511
- TAB: " ",
512
- SEMICOLON: ";",
513
- PIPE: "|"
514
- };
515
- DEFAULT_CSV_CONFIG = {
516
- delimiter: CSV_DELIMITERS.COMMA,
517
- columns: true,
518
- skipEmptyLines: true,
519
- trim: true
520
- };
521
- }
522
- });
523
-
524
- // src/utilities/index.ts
525
- var init_utilities = __esm({
526
- "src/utilities/index.ts"() {
527
- "use strict";
528
- init_dataParser();
529
- }
530
- });
531
-
532
- // src/connectors/dataSource.ts
533
- var init_dataSource = __esm({
534
- "src/connectors/dataSource.ts"() {
535
- "use strict";
536
- }
537
- });
538
-
539
- // src/index.ts
540
- var init_index = __esm({
541
- "src/index.ts"() {
542
- "use strict";
543
- init_browserCompatible();
544
- init_helpers();
545
- init_commons();
546
- init_secrets();
547
- init_helpers2();
548
- init_webAppHelpers();
549
- init_task();
550
- init_runner();
551
- init_redisClient();
552
- init_helpers2();
553
- init_standalone();
554
- init_sqlHelpers();
555
- init_utilities();
556
- init_dataSource();
557
- init_types();
558
- }
559
- });
560
-
561
- // src/dmv2/internal.ts
562
- import process2 from "process";
563
- var isClientOnlyMode, moose_internal, defaultRetentionPeriod, getMooseInternal, dlqSchema, dlqColumns;
564
- var init_internal = __esm({
565
- "src/dmv2/internal.ts"() {
566
- "use strict";
567
- init_index();
568
- init_commons();
569
- isClientOnlyMode = () => process2.env.MOOSE_CLIENT_ONLY === "true";
570
- moose_internal = {
571
- tables: /* @__PURE__ */ new Map(),
572
- streams: /* @__PURE__ */ new Map(),
573
- ingestApis: /* @__PURE__ */ new Map(),
574
- apis: /* @__PURE__ */ new Map(),
575
- sqlResources: /* @__PURE__ */ new Map(),
576
- workflows: /* @__PURE__ */ new Map(),
577
- webApps: /* @__PURE__ */ new Map()
578
- };
579
- defaultRetentionPeriod = 60 * 60 * 24 * 7;
580
- getMooseInternal = () => globalThis.moose_internal;
581
- if (getMooseInternal() === void 0) {
582
- globalThis.moose_internal = moose_internal;
583
- }
584
- dlqSchema = {
585
- version: "3.1",
586
- components: {
587
- schemas: {
588
- DeadLetterModel: {
589
- type: "object",
590
- properties: {
591
- originalRecord: {
592
- $ref: "#/components/schemas/Recordstringany"
593
- },
594
- errorMessage: {
595
- type: "string"
596
- },
597
- errorType: {
598
- type: "string"
599
- },
600
- failedAt: {
601
- type: "string",
602
- format: "date-time"
603
- },
604
- source: {
605
- oneOf: [
606
- {
607
- const: "api"
608
- },
609
- {
610
- const: "transform"
611
- },
612
- {
613
- const: "table"
614
- }
615
- ]
616
- }
617
- },
618
- required: [
619
- "originalRecord",
620
- "errorMessage",
621
- "errorType",
622
- "failedAt",
623
- "source"
624
- ]
625
- },
626
- Recordstringany: {
627
- type: "object",
628
- properties: {},
629
- required: [],
630
- description: "Construct a type with a set of properties K of type T",
631
- additionalProperties: {}
632
- }
633
- }
634
- },
635
- schemas: [
636
- {
637
- $ref: "#/components/schemas/DeadLetterModel"
638
- }
639
- ]
640
- };
641
- dlqColumns = [
642
- {
643
- name: "originalRecord",
644
- data_type: "Json",
645
- primary_key: false,
646
- required: true,
647
- unique: false,
648
- default: null,
649
- annotations: [],
650
- ttl: null,
651
- codec: null,
652
- materialized: null,
653
- comment: null
654
- },
655
- {
656
- name: "errorMessage",
657
- data_type: "String",
658
- primary_key: false,
659
- required: true,
660
- unique: false,
661
- default: null,
662
- annotations: [],
663
- ttl: null,
664
- codec: null,
665
- materialized: null,
666
- comment: null
667
- },
668
- {
669
- name: "errorType",
670
- data_type: "String",
671
- primary_key: false,
672
- required: true,
673
- unique: false,
674
- default: null,
675
- annotations: [],
676
- ttl: null,
677
- codec: null,
678
- materialized: null,
679
- comment: null
680
- },
681
- {
682
- name: "failedAt",
683
- data_type: "DateTime",
684
- primary_key: false,
685
- required: true,
686
- unique: false,
687
- default: null,
688
- annotations: [],
689
- ttl: null,
690
- codec: null,
691
- materialized: null,
692
- comment: null
693
- },
694
- {
695
- name: "source",
696
- data_type: "String",
697
- primary_key: false,
698
- required: true,
699
- unique: false,
700
- default: null,
701
- annotations: [],
702
- ttl: null,
703
- codec: null,
704
- materialized: null,
705
- comment: null
706
- }
707
- ];
708
- }
709
- });
710
-
711
178
  // src/config/configFile.ts
712
179
  import path from "path";
713
180
  import * as toml from "toml";
@@ -892,1849 +359,2133 @@ var init_runtime = __esm({
892
359
  }
893
360
  });
894
361
 
895
- // src/dmv2/sdk/olapTable.ts
896
- import { Readable } from "stream";
897
- import { createHash as createHash2 } from "crypto";
898
- var OlapTable;
899
- var init_olapTable = __esm({
900
- "src/dmv2/sdk/olapTable.ts"() {
901
- "use strict";
902
- init_typedBase();
903
- init_dataModelTypes();
904
- init_helpers();
905
- init_internal();
906
- init_sqlHelpers();
907
- OlapTable = class extends TypedBase {
908
- name;
909
- /** @internal */
910
- kind = "OlapTable";
911
- /** @internal Memoized ClickHouse client for reusing connections across insert calls */
912
- _memoizedClient;
913
- /** @internal Hash of the configuration used to create the memoized client */
914
- _configHash;
915
- /** @internal Cached table name to avoid repeated generation */
916
- _cachedTableName;
917
- constructor(name, config, schema, columns, validators) {
918
- const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
919
- const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
920
- const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
921
- if (hasFields && hasExpr) {
922
- throw new Error(
923
- `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
924
- );
925
- }
926
- const hasCluster = typeof resolvedConfig.cluster === "string";
927
- const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
928
- const hasReplicaName = typeof resolvedConfig.replicaName === "string";
929
- if (hasCluster && (hasKeeperPath || hasReplicaName)) {
930
- throw new Error(
931
- `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
932
- );
933
- }
934
- super(name, resolvedConfig, schema, columns, validators);
935
- this.name = name;
936
- const tables = getMooseInternal().tables;
937
- const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
938
- if (!isClientOnlyMode() && tables.has(registryKey)) {
939
- throw new Error(
940
- `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
941
- );
942
- }
943
- tables.set(registryKey, this);
944
- }
945
- /**
946
- * Generates the versioned table name following Moose's naming convention
947
- * Format: {tableName}_{version_with_dots_replaced_by_underscores}
948
- */
949
- generateTableName() {
950
- if (this._cachedTableName) {
951
- return this._cachedTableName;
952
- }
953
- const tableVersion = this.config.version;
954
- if (!tableVersion) {
955
- this._cachedTableName = this.name;
956
- } else {
957
- const versionSuffix = tableVersion.replace(/\./g, "_");
958
- this._cachedTableName = `${this.name}_${versionSuffix}`;
959
- }
960
- return this._cachedTableName;
961
- }
962
- /**
963
- * Creates a fast hash of the ClickHouse configuration.
964
- * Uses crypto.createHash for better performance than JSON.stringify.
965
- *
966
- * @private
967
- */
968
- createConfigHash(clickhouseConfig) {
969
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
970
- const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
971
- return createHash2("sha256").update(configString).digest("hex").substring(0, 16);
972
- }
973
- /**
974
- * Gets or creates a memoized ClickHouse client.
975
- * The client is cached and reused across multiple insert calls for better performance.
976
- * If the configuration changes, a new client will be created.
977
- *
978
- * @private
979
- */
980
- async getMemoizedClient() {
981
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
982
- const configRegistry = globalThis._mooseConfigRegistry;
983
- const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
984
- const clickhouseConfig = await configRegistry.getClickHouseConfig();
985
- const currentConfigHash = this.createConfigHash(clickhouseConfig);
986
- if (this._memoizedClient && this._configHash === currentConfigHash) {
987
- return { client: this._memoizedClient, config: clickhouseConfig };
988
- }
989
- if (this._memoizedClient && this._configHash !== currentConfigHash) {
990
- try {
991
- await this._memoizedClient.close();
992
- } catch (error) {
993
- }
994
- }
995
- const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
996
- const client = getClickhouseClient2({
997
- username: clickhouseConfig.username,
998
- password: clickhouseConfig.password,
999
- database: effectiveDatabase,
1000
- useSSL: clickhouseConfig.useSSL ? "true" : "false",
1001
- host: clickhouseConfig.host,
1002
- port: clickhouseConfig.port
1003
- });
1004
- this._memoizedClient = client;
1005
- this._configHash = currentConfigHash;
1006
- return { client, config: clickhouseConfig };
362
+ // src/dmv2/utils/stackTrace.ts
363
+ function shouldSkipStackLine(line) {
364
+ return line.includes("node_modules") || // Skip npm installed packages (prod)
365
+ line.includes("node:internal") || // Skip Node.js internals (modern format)
366
+ line.includes("internal/modules") || // Skip Node.js internals (older format)
367
+ line.includes("ts-node") || // Skip TypeScript execution
368
+ line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
369
+ line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
370
+ line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
371
+ line.includes("\\ts-moose-lib\\dist\\");
372
+ }
373
+ function parseStackLine(line) {
374
+ const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
375
+ if (match && match[1]) {
376
+ return {
377
+ file: match[1],
378
+ line: match[2]
379
+ };
380
+ }
381
+ return void 0;
382
+ }
383
+ function getSourceFileInfo(stack) {
384
+ if (!stack) return {};
385
+ const lines = stack.split("\n");
386
+ for (const line of lines) {
387
+ if (shouldSkipStackLine(line)) continue;
388
+ const info = parseStackLine(line);
389
+ if (info) return info;
390
+ }
391
+ return {};
392
+ }
393
+ function getSourceLocationFromStack(stack) {
394
+ if (!stack) return void 0;
395
+ const lines = stack.split("\n");
396
+ for (const line of lines.slice(1)) {
397
+ if (shouldSkipStackLine(line)) {
398
+ continue;
399
+ }
400
+ const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
401
+ if (v8Match) {
402
+ return {
403
+ file: v8Match[1],
404
+ line: parseInt(v8Match[2], 10),
405
+ column: parseInt(v8Match[3], 10)
406
+ };
407
+ }
408
+ const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
409
+ if (smMatch) {
410
+ return {
411
+ file: smMatch[1],
412
+ line: parseInt(smMatch[2], 10),
413
+ column: parseInt(smMatch[3], 10)
414
+ };
415
+ }
416
+ }
417
+ return void 0;
418
+ }
419
+ function getSourceFileFromStack(stack) {
420
+ const location = getSourceLocationFromStack(stack);
421
+ return location?.file;
422
+ }
423
+
424
+ // src/dmv2/typedBase.ts
425
+ var TypedBase = class {
426
+ /** The JSON schema representation of type T. Injected by the compiler plugin. */
427
+ schema;
428
+ /** The name assigned to this resource instance. */
429
+ name;
430
+ /** A dictionary mapping column names (keys of T) to their Column definitions. */
431
+ columns;
432
+ /** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
433
+ columnArray;
434
+ /** The configuration object specific to this resource type. */
435
+ config;
436
+ /** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
437
+ validators;
438
+ /** Optional metadata for the resource, always present as an object. */
439
+ metadata;
440
+ /**
441
+ * Whether this resource allows extra fields beyond the defined columns.
442
+ * When true, extra fields in payloads are passed through to streaming functions.
443
+ * Injected by the compiler plugin when the type has an index signature.
444
+ */
445
+ allowExtraFields;
446
+ /**
447
+ * @internal Constructor intended for internal use by subclasses and the compiler plugin.
448
+ * It expects the schema and columns to be provided, typically injected by the compiler.
449
+ *
450
+ * @param name The name for the resource instance.
451
+ * @param config The configuration object for the resource.
452
+ * @param schema The JSON schema for the resource's data type T (injected).
453
+ * @param columns The array of Column definitions for T (injected).
454
+ * @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
455
+ */
456
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
457
+ if (schema === void 0 || columns === void 0) {
458
+ throw new Error(
459
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
460
+ );
461
+ }
462
+ this.schema = schema;
463
+ this.columnArray = columns;
464
+ const columnsObj = {};
465
+ columns.forEach((column) => {
466
+ columnsObj[column.name] = column;
467
+ });
468
+ this.columns = columnsObj;
469
+ this.name = name;
470
+ this.config = config;
471
+ this.validators = validators;
472
+ this.allowExtraFields = allowExtraFields ?? false;
473
+ this.metadata = config?.metadata ? { ...config.metadata } : {};
474
+ if (!this.metadata.source) {
475
+ const stack = new Error().stack;
476
+ if (stack) {
477
+ const info = getSourceFileInfo(stack);
478
+ this.metadata.source = { file: info.file, line: info.line };
1007
479
  }
1008
- /**
1009
- * Closes the memoized ClickHouse client if it exists.
1010
- * This is useful for cleaning up connections when the table instance is no longer needed.
1011
- * The client will be automatically recreated on the next insert call if needed.
1012
- */
1013
- async closeClient() {
1014
- if (this._memoizedClient) {
1015
- try {
1016
- await this._memoizedClient.close();
1017
- } catch (error) {
1018
- } finally {
1019
- this._memoizedClient = void 0;
1020
- this._configHash = void 0;
480
+ }
481
+ }
482
+ };
483
+
484
+ // src/dataModels/dataModelTypes.ts
485
+ function isArrayNestedType(dt) {
486
+ return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
487
+ }
488
+ function isNestedType(dt) {
489
+ return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
490
+ }
491
+
492
+ // src/sqlHelpers.ts
493
+ var quoteIdentifier = (name) => {
494
+ return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
495
+ };
496
+ var toStaticQuery = (sql3) => {
497
+ const [query, params] = toQuery(sql3);
498
+ if (Object.keys(params).length !== 0) {
499
+ throw new Error(
500
+ "Dynamic SQL is not allowed in the select statement in view creation."
501
+ );
502
+ }
503
+ return query;
504
+ };
505
+ var toQuery = (sql3) => {
506
+ const parameterizedStubs = sql3.values.map(
507
+ (v, i) => createClickhouseParameter(i, v)
508
+ );
509
+ const query = sql3.strings.map(
510
+ (s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
511
+ ).join("");
512
+ const query_params = sql3.values.reduce(
513
+ (acc, v, i) => ({
514
+ ...acc,
515
+ [`p${i}`]: getValueFromParameter(v)
516
+ }),
517
+ {}
518
+ );
519
+ return [query, query_params];
520
+ };
521
+ var getValueFromParameter = (value) => {
522
+ if (Array.isArray(value)) {
523
+ const [type, val] = value;
524
+ if (type === "Identifier") return val;
525
+ }
526
+ return value;
527
+ };
528
+ function createClickhouseParameter(parameterIndex, value) {
529
+ return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
530
+ }
531
+ var mapToClickHouseType = (value) => {
532
+ if (typeof value === "number") {
533
+ return Number.isInteger(value) ? "Int" : "Float";
534
+ }
535
+ if (typeof value === "boolean") return "Bool";
536
+ if (value instanceof Date) return "DateTime";
537
+ if (Array.isArray(value)) {
538
+ const [type, _] = value;
539
+ return type;
540
+ }
541
+ return "String";
542
+ };
543
+ function emptyIfUndefined(value) {
544
+ return value === void 0 ? "" : value;
545
+ }
546
+
547
+ // src/dmv2/internal.ts
548
+ import process2 from "process";
549
+
550
+ // src/index.ts
551
+ init_commons();
552
+
553
+ // src/consumption-apis/helpers.ts
554
+ import {
555
+ Client as TemporalClient,
556
+ Connection
557
+ } from "@temporalio/client";
558
+ import { createHash, randomUUID } from "crypto";
559
+
560
+ // src/consumption-apis/runner.ts
561
+ init_commons();
562
+ import * as jose from "jose";
563
+
564
+ // src/cluster-utils.ts
565
+ import cluster from "cluster";
566
+ import { availableParallelism } from "os";
567
+ import { exit } from "process";
568
+
569
+ // src/clients/redisClient.ts
570
+ import { createClient as createClient2 } from "redis";
571
+
572
+ // src/consumption-apis/standalone.ts
573
+ init_commons();
574
+
575
+ // src/utilities/dataParser.ts
576
+ import { parse } from "csv-parse";
577
+ var CSV_DELIMITERS = {
578
+ COMMA: ",",
579
+ TAB: " ",
580
+ SEMICOLON: ";",
581
+ PIPE: "|"
582
+ };
583
+ var DEFAULT_CSV_CONFIG = {
584
+ delimiter: CSV_DELIMITERS.COMMA,
585
+ columns: true,
586
+ skipEmptyLines: true,
587
+ trim: true
588
+ };
589
+
590
+ // src/dmv2/internal.ts
591
+ init_commons();
592
+ var isClientOnlyMode = () => process2.env.MOOSE_CLIENT_ONLY === "true";
593
+ var moose_internal = {
594
+ tables: /* @__PURE__ */ new Map(),
595
+ streams: /* @__PURE__ */ new Map(),
596
+ ingestApis: /* @__PURE__ */ new Map(),
597
+ apis: /* @__PURE__ */ new Map(),
598
+ sqlResources: /* @__PURE__ */ new Map(),
599
+ workflows: /* @__PURE__ */ new Map(),
600
+ webApps: /* @__PURE__ */ new Map(),
601
+ materializedViews: /* @__PURE__ */ new Map(),
602
+ customViews: /* @__PURE__ */ new Map()
603
+ };
604
+ var defaultRetentionPeriod = 60 * 60 * 24 * 7;
605
+ var getMooseInternal = () => globalThis.moose_internal;
606
+ if (getMooseInternal() === void 0) {
607
+ globalThis.moose_internal = moose_internal;
608
+ }
609
+ var dlqSchema = {
610
+ version: "3.1",
611
+ components: {
612
+ schemas: {
613
+ DeadLetterModel: {
614
+ type: "object",
615
+ properties: {
616
+ originalRecord: {
617
+ $ref: "#/components/schemas/Recordstringany"
618
+ },
619
+ errorMessage: {
620
+ type: "string"
621
+ },
622
+ errorType: {
623
+ type: "string"
624
+ },
625
+ failedAt: {
626
+ type: "string",
627
+ format: "date-time"
628
+ },
629
+ source: {
630
+ oneOf: [
631
+ {
632
+ const: "api"
633
+ },
634
+ {
635
+ const: "transform"
636
+ },
637
+ {
638
+ const: "table"
639
+ }
640
+ ]
1021
641
  }
1022
- }
642
+ },
643
+ required: [
644
+ "originalRecord",
645
+ "errorMessage",
646
+ "errorType",
647
+ "failedAt",
648
+ "source"
649
+ ]
650
+ },
651
+ Recordstringany: {
652
+ type: "object",
653
+ properties: {},
654
+ required: [],
655
+ description: "Construct a type with a set of properties K of type T",
656
+ additionalProperties: {}
1023
657
  }
1024
- /**
1025
- * Validates a single record using typia's comprehensive type checking.
1026
- * This provides the most accurate validation as it uses the exact TypeScript type information.
1027
- *
1028
- * @param record The record to validate
1029
- * @returns Validation result with detailed error information
1030
- */
1031
- validateRecord(record) {
1032
- if (this.validators?.validate) {
1033
- try {
1034
- const result = this.validators.validate(record);
1035
- return {
1036
- success: result.success,
1037
- data: result.data,
1038
- errors: result.errors?.map(
1039
- (err) => typeof err === "string" ? err : JSON.stringify(err)
1040
- )
1041
- };
1042
- } catch (error) {
1043
- return {
1044
- success: false,
1045
- errors: [error instanceof Error ? error.message : String(error)]
1046
- };
1047
- }
1048
- }
1049
- throw new Error("No typia validator found");
658
+ }
659
+ },
660
+ schemas: [
661
+ {
662
+ $ref: "#/components/schemas/DeadLetterModel"
663
+ }
664
+ ]
665
+ };
666
+ var dlqColumns = [
667
+ {
668
+ name: "originalRecord",
669
+ data_type: "Json",
670
+ primary_key: false,
671
+ required: true,
672
+ unique: false,
673
+ default: null,
674
+ annotations: [],
675
+ ttl: null,
676
+ codec: null,
677
+ materialized: null,
678
+ comment: null
679
+ },
680
+ {
681
+ name: "errorMessage",
682
+ data_type: "String",
683
+ primary_key: false,
684
+ required: true,
685
+ unique: false,
686
+ default: null,
687
+ annotations: [],
688
+ ttl: null,
689
+ codec: null,
690
+ materialized: null,
691
+ comment: null
692
+ },
693
+ {
694
+ name: "errorType",
695
+ data_type: "String",
696
+ primary_key: false,
697
+ required: true,
698
+ unique: false,
699
+ default: null,
700
+ annotations: [],
701
+ ttl: null,
702
+ codec: null,
703
+ materialized: null,
704
+ comment: null
705
+ },
706
+ {
707
+ name: "failedAt",
708
+ data_type: "DateTime",
709
+ primary_key: false,
710
+ required: true,
711
+ unique: false,
712
+ default: null,
713
+ annotations: [],
714
+ ttl: null,
715
+ codec: null,
716
+ materialized: null,
717
+ comment: null
718
+ },
719
+ {
720
+ name: "source",
721
+ data_type: "String",
722
+ primary_key: false,
723
+ required: true,
724
+ unique: false,
725
+ default: null,
726
+ annotations: [],
727
+ ttl: null,
728
+ codec: null,
729
+ materialized: null,
730
+ comment: null
731
+ }
732
+ ];
733
+
734
+ // src/dmv2/sdk/olapTable.ts
735
+ import { Readable } from "stream";
736
+ import { createHash as createHash2 } from "crypto";
737
+ var OlapTable = class extends TypedBase {
738
+ name;
739
+ /** @internal */
740
+ kind = "OlapTable";
741
+ /** @internal Memoized ClickHouse client for reusing connections across insert calls */
742
+ _memoizedClient;
743
+ /** @internal Hash of the configuration used to create the memoized client */
744
+ _configHash;
745
+ /** @internal Cached table name to avoid repeated generation */
746
+ _cachedTableName;
747
+ constructor(name, config, schema, columns, validators) {
748
+ const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
749
+ const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
750
+ const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
751
+ if (hasFields && hasExpr) {
752
+ throw new Error(
753
+ `OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
754
+ );
755
+ }
756
+ const hasCluster = typeof resolvedConfig.cluster === "string";
757
+ const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
758
+ const hasReplicaName = typeof resolvedConfig.replicaName === "string";
759
+ if (hasCluster && (hasKeeperPath || hasReplicaName)) {
760
+ throw new Error(
761
+ `OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
762
+ );
763
+ }
764
+ super(name, resolvedConfig, schema, columns, validators);
765
+ this.name = name;
766
+ const tables = getMooseInternal().tables;
767
+ const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
768
+ if (!isClientOnlyMode() && tables.has(registryKey)) {
769
+ throw new Error(
770
+ `OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
771
+ );
772
+ }
773
+ tables.set(registryKey, this);
774
+ }
775
+ /**
776
+ * Generates the versioned table name following Moose's naming convention
777
+ * Format: {tableName}_{version_with_dots_replaced_by_underscores}
778
+ */
779
+ generateTableName() {
780
+ if (this._cachedTableName) {
781
+ return this._cachedTableName;
782
+ }
783
+ const tableVersion = this.config.version;
784
+ if (!tableVersion) {
785
+ this._cachedTableName = this.name;
786
+ } else {
787
+ const versionSuffix = tableVersion.replace(/\./g, "_");
788
+ this._cachedTableName = `${this.name}_${versionSuffix}`;
789
+ }
790
+ return this._cachedTableName;
791
+ }
792
+ /**
793
+ * Creates a fast hash of the ClickHouse configuration.
794
+ * Uses crypto.createHash for better performance than JSON.stringify.
795
+ *
796
+ * @private
797
+ */
798
+ createConfigHash(clickhouseConfig) {
799
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
800
+ const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
801
+ return createHash2("sha256").update(configString).digest("hex").substring(0, 16);
802
+ }
803
+ /**
804
+ * Gets or creates a memoized ClickHouse client.
805
+ * The client is cached and reused across multiple insert calls for better performance.
806
+ * If the configuration changes, a new client will be created.
807
+ *
808
+ * @private
809
+ */
810
+ async getMemoizedClient() {
811
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
812
+ const configRegistry = globalThis._mooseConfigRegistry;
813
+ const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
814
+ const clickhouseConfig = await configRegistry.getClickHouseConfig();
815
+ const currentConfigHash = this.createConfigHash(clickhouseConfig);
816
+ if (this._memoizedClient && this._configHash === currentConfigHash) {
817
+ return { client: this._memoizedClient, config: clickhouseConfig };
818
+ }
819
+ if (this._memoizedClient && this._configHash !== currentConfigHash) {
820
+ try {
821
+ await this._memoizedClient.close();
822
+ } catch (error) {
1050
823
  }
1051
- /**
1052
- * Type guard function using typia's is() function.
1053
- * Provides compile-time type narrowing for TypeScript.
1054
- *
1055
- * @param record The record to check
1056
- * @returns True if record matches type T, with type narrowing
1057
- */
1058
- isValidRecord(record) {
1059
- if (this.validators?.is) {
1060
- return this.validators.is(record);
1061
- }
1062
- throw new Error("No typia validator found");
824
+ }
825
+ const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
826
+ const client = getClickhouseClient2({
827
+ username: clickhouseConfig.username,
828
+ password: clickhouseConfig.password,
829
+ database: effectiveDatabase,
830
+ useSSL: clickhouseConfig.useSSL ? "true" : "false",
831
+ host: clickhouseConfig.host,
832
+ port: clickhouseConfig.port
833
+ });
834
+ this._memoizedClient = client;
835
+ this._configHash = currentConfigHash;
836
+ return { client, config: clickhouseConfig };
837
+ }
838
+ /**
839
+ * Closes the memoized ClickHouse client if it exists.
840
+ * This is useful for cleaning up connections when the table instance is no longer needed.
841
+ * The client will be automatically recreated on the next insert call if needed.
842
+ */
843
+ async closeClient() {
844
+ if (this._memoizedClient) {
845
+ try {
846
+ await this._memoizedClient.close();
847
+ } catch (error) {
848
+ } finally {
849
+ this._memoizedClient = void 0;
850
+ this._configHash = void 0;
1063
851
  }
1064
- /**
1065
- * Assert that a record matches type T, throwing detailed errors if not.
1066
- * Uses typia's assert() function for the most detailed error reporting.
1067
- *
1068
- * @param record The record to assert
1069
- * @returns The validated and typed record
1070
- * @throws Detailed validation error if record doesn't match type T
1071
- */
1072
- assertValidRecord(record) {
1073
- if (this.validators?.assert) {
1074
- return this.validators.assert(record);
1075
- }
1076
- throw new Error("No typia validator found");
852
+ }
853
+ }
854
+ /**
855
+ * Validates a single record using typia's comprehensive type checking.
856
+ * This provides the most accurate validation as it uses the exact TypeScript type information.
857
+ *
858
+ * @param record The record to validate
859
+ * @returns Validation result with detailed error information
860
+ */
861
+ validateRecord(record) {
862
+ if (this.validators?.validate) {
863
+ try {
864
+ const result = this.validators.validate(record);
865
+ return {
866
+ success: result.success,
867
+ data: result.data,
868
+ errors: result.errors?.map(
869
+ (err) => typeof err === "string" ? err : JSON.stringify(err)
870
+ )
871
+ };
872
+ } catch (error) {
873
+ return {
874
+ success: false,
875
+ errors: [error instanceof Error ? error.message : String(error)]
876
+ };
1077
877
  }
1078
- /**
1079
- * Validates an array of records with comprehensive error reporting.
1080
- * Uses the most appropriate validation method available (typia or basic).
1081
- *
1082
- * @param data Array of records to validate
1083
- * @returns Detailed validation results
1084
- */
1085
- async validateRecords(data) {
1086
- const valid = [];
1087
- const invalid = [];
1088
- valid.length = 0;
1089
- invalid.length = 0;
1090
- const dataLength = data.length;
1091
- for (let i = 0; i < dataLength; i++) {
1092
- const record = data[i];
1093
- try {
1094
- if (this.isValidRecord(record)) {
1095
- valid.push(this.mapToClickhouseRecord(record));
1096
- } else {
1097
- const result = this.validateRecord(record);
1098
- if (result.success) {
1099
- valid.push(this.mapToClickhouseRecord(record));
1100
- } else {
1101
- invalid.push({
1102
- record,
1103
- error: result.errors?.join(", ") || "Validation failed",
1104
- index: i,
1105
- path: "root"
1106
- });
1107
- }
1108
- }
1109
- } catch (error) {
878
+ }
879
+ throw new Error("No typia validator found");
880
+ }
881
+ /**
882
+ * Type guard function using typia's is() function.
883
+ * Provides compile-time type narrowing for TypeScript.
884
+ *
885
+ * @param record The record to check
886
+ * @returns True if record matches type T, with type narrowing
887
+ */
888
+ isValidRecord(record) {
889
+ if (this.validators?.is) {
890
+ return this.validators.is(record);
891
+ }
892
+ throw new Error("No typia validator found");
893
+ }
894
+ /**
895
+ * Assert that a record matches type T, throwing detailed errors if not.
896
+ * Uses typia's assert() function for the most detailed error reporting.
897
+ *
898
+ * @param record The record to assert
899
+ * @returns The validated and typed record
900
+ * @throws Detailed validation error if record doesn't match type T
901
+ */
902
+ assertValidRecord(record) {
903
+ if (this.validators?.assert) {
904
+ return this.validators.assert(record);
905
+ }
906
+ throw new Error("No typia validator found");
907
+ }
908
+ /**
909
+ * Validates an array of records with comprehensive error reporting.
910
+ * Uses the most appropriate validation method available (typia or basic).
911
+ *
912
+ * @param data Array of records to validate
913
+ * @returns Detailed validation results
914
+ */
915
+ async validateRecords(data) {
916
+ const valid = [];
917
+ const invalid = [];
918
+ valid.length = 0;
919
+ invalid.length = 0;
920
+ const dataLength = data.length;
921
+ for (let i = 0; i < dataLength; i++) {
922
+ const record = data[i];
923
+ try {
924
+ if (this.isValidRecord(record)) {
925
+ valid.push(this.mapToClickhouseRecord(record));
926
+ } else {
927
+ const result = this.validateRecord(record);
928
+ if (result.success) {
929
+ valid.push(this.mapToClickhouseRecord(record));
930
+ } else {
1110
931
  invalid.push({
1111
932
  record,
1112
- error: error instanceof Error ? error.message : String(error),
933
+ error: result.errors?.join(", ") || "Validation failed",
1113
934
  index: i,
1114
935
  path: "root"
1115
936
  });
1116
937
  }
1117
938
  }
1118
- return {
1119
- valid,
1120
- invalid,
1121
- total: dataLength
1122
- };
939
+ } catch (error) {
940
+ invalid.push({
941
+ record,
942
+ error: error instanceof Error ? error.message : String(error),
943
+ index: i,
944
+ path: "root"
945
+ });
1123
946
  }
1124
- /**
1125
- * Optimized batch retry that minimizes individual insert operations.
1126
- * Groups records into smaller batches to reduce round trips while still isolating failures.
1127
- *
1128
- * @private
1129
- */
1130
- async retryIndividualRecords(client, tableName, records) {
1131
- const successful = [];
1132
- const failed = [];
1133
- const RETRY_BATCH_SIZE = 10;
1134
- const totalRecords = records.length;
1135
- for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
1136
- const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
1137
- const batch = records.slice(i, batchEnd);
947
+ }
948
+ return {
949
+ valid,
950
+ invalid,
951
+ total: dataLength
952
+ };
953
+ }
954
+ /**
955
+ * Optimized batch retry that minimizes individual insert operations.
956
+ * Groups records into smaller batches to reduce round trips while still isolating failures.
957
+ *
958
+ * @private
959
+ */
960
+ async retryIndividualRecords(client, tableName, records) {
961
+ const successful = [];
962
+ const failed = [];
963
+ const RETRY_BATCH_SIZE = 10;
964
+ const totalRecords = records.length;
965
+ for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
966
+ const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
967
+ const batch = records.slice(i, batchEnd);
968
+ try {
969
+ await client.insert({
970
+ table: quoteIdentifier(tableName),
971
+ values: batch,
972
+ format: "JSONEachRow",
973
+ clickhouse_settings: {
974
+ date_time_input_format: "best_effort",
975
+ // Add performance settings for retries
976
+ max_insert_block_size: RETRY_BATCH_SIZE,
977
+ max_block_size: RETRY_BATCH_SIZE
978
+ }
979
+ });
980
+ successful.push(...batch);
981
+ } catch (batchError) {
982
+ for (let j = 0; j < batch.length; j++) {
983
+ const record = batch[j];
1138
984
  try {
1139
985
  await client.insert({
1140
986
  table: quoteIdentifier(tableName),
1141
- values: batch,
987
+ values: [record],
1142
988
  format: "JSONEachRow",
1143
989
  clickhouse_settings: {
1144
- date_time_input_format: "best_effort",
1145
- // Add performance settings for retries
1146
- max_insert_block_size: RETRY_BATCH_SIZE,
1147
- max_block_size: RETRY_BATCH_SIZE
990
+ date_time_input_format: "best_effort"
1148
991
  }
1149
992
  });
1150
- successful.push(...batch);
1151
- } catch (batchError) {
1152
- for (let j = 0; j < batch.length; j++) {
1153
- const record = batch[j];
1154
- try {
1155
- await client.insert({
1156
- table: quoteIdentifier(tableName),
1157
- values: [record],
1158
- format: "JSONEachRow",
1159
- clickhouse_settings: {
1160
- date_time_input_format: "best_effort"
1161
- }
1162
- });
1163
- successful.push(record);
1164
- } catch (error) {
1165
- failed.push({
1166
- record,
1167
- error: error instanceof Error ? error.message : String(error),
1168
- index: i + j
1169
- });
1170
- }
1171
- }
1172
- }
1173
- }
1174
- return { successful, failed };
1175
- }
1176
- /**
1177
- * Validates input parameters and strategy compatibility
1178
- * @private
1179
- */
1180
- validateInsertParameters(data, options) {
1181
- const isStream = data instanceof Readable;
1182
- const strategy = options?.strategy || "fail-fast";
1183
- const shouldValidate = options?.validate !== false;
1184
- if (isStream && strategy === "isolate") {
1185
- throw new Error(
1186
- "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1187
- );
1188
- }
1189
- if (isStream && shouldValidate) {
1190
- console.warn(
1191
- "Validation is not supported with stream input. Validation will be skipped."
1192
- );
1193
- }
1194
- return { isStream, strategy, shouldValidate };
1195
- }
1196
- /**
1197
- * Handles early return cases for empty data
1198
- * @private
1199
- */
1200
- handleEmptyData(data, isStream) {
1201
- if (isStream && !data) {
1202
- return {
1203
- successful: 0,
1204
- failed: 0,
1205
- total: 0
1206
- };
1207
- }
1208
- if (!isStream && (!data || data.length === 0)) {
1209
- return {
1210
- successful: 0,
1211
- failed: 0,
1212
- total: 0
1213
- };
1214
- }
1215
- return null;
1216
- }
1217
- /**
1218
- * Performs pre-insertion validation for array data
1219
- * @private
1220
- */
1221
- async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1222
- if (!shouldValidate) {
1223
- return { validatedData: data, validationErrors: [] };
1224
- }
1225
- try {
1226
- const validationResult = await this.validateRecords(data);
1227
- const validatedData = validationResult.valid;
1228
- const validationErrors = validationResult.invalid;
1229
- if (validationErrors.length > 0) {
1230
- this.handleValidationErrors(validationErrors, strategy, data, options);
1231
- switch (strategy) {
1232
- case "discard":
1233
- return { validatedData, validationErrors };
1234
- case "isolate":
1235
- return { validatedData: data, validationErrors };
1236
- default:
1237
- return { validatedData, validationErrors };
1238
- }
1239
- }
1240
- return { validatedData, validationErrors };
1241
- } catch (validationError) {
1242
- if (strategy === "fail-fast") {
1243
- throw validationError;
1244
- }
1245
- console.warn("Validation error:", validationError);
1246
- return { validatedData: data, validationErrors: [] };
1247
- }
1248
- }
1249
- /**
1250
- * Handles validation errors based on the specified strategy
1251
- * @private
1252
- */
1253
- handleValidationErrors(validationErrors, strategy, data, options) {
1254
- switch (strategy) {
1255
- case "fail-fast":
1256
- const firstError = validationErrors[0];
1257
- throw new Error(
1258
- `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1259
- );
1260
- case "discard":
1261
- this.checkValidationThresholds(validationErrors, data.length, options);
1262
- break;
1263
- case "isolate":
1264
- break;
1265
- }
1266
- }
1267
- /**
1268
- * Checks if validation errors exceed configured thresholds
1269
- * @private
1270
- */
1271
- checkValidationThresholds(validationErrors, totalRecords, options) {
1272
- const validationFailedCount = validationErrors.length;
1273
- const validationFailedRatio = validationFailedCount / totalRecords;
1274
- if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1275
- throw new Error(
1276
- `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1277
- );
1278
- }
1279
- if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1280
- throw new Error(
1281
- `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1282
- );
1283
- }
1284
- }
1285
- /**
1286
- * Optimized insert options preparation with better memory management
1287
- * @private
1288
- */
1289
- prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1290
- const insertOptions = {
1291
- table: quoteIdentifier(tableName),
1292
- format: "JSONEachRow",
1293
- clickhouse_settings: {
1294
- date_time_input_format: "best_effort",
1295
- wait_end_of_query: 1,
1296
- // Ensure at least once delivery for INSERT operations
1297
- // Performance optimizations
1298
- max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1299
- max_block_size: 65536,
1300
- // Use async inserts for better performance with large datasets
1301
- async_insert: validatedData.length > 1e3 ? 1 : 0,
1302
- wait_for_async_insert: 1
1303
- // For at least once delivery
1304
- }
1305
- };
1306
- if (isStream) {
1307
- insertOptions.values = data;
1308
- } else {
1309
- insertOptions.values = validatedData;
1310
- }
1311
- if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1312
- if (options.allowErrors !== void 0) {
1313
- insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1314
- }
1315
- if (options.allowErrorsRatio !== void 0) {
1316
- insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
993
+ successful.push(record);
994
+ } catch (error) {
995
+ failed.push({
996
+ record,
997
+ error: error instanceof Error ? error.message : String(error),
998
+ index: i + j
999
+ });
1317
1000
  }
1318
1001
  }
1319
- return insertOptions;
1320
- }
1321
- /**
1322
- * Creates success result for completed insertions
1323
- * @private
1324
- */
1325
- createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1326
- if (isStream) {
1327
- return {
1328
- successful: -1,
1329
- // -1 indicates stream mode where count is unknown
1330
- failed: 0,
1331
- total: -1
1332
- };
1333
- }
1334
- const insertedCount = validatedData.length;
1335
- const totalProcessed = shouldValidate ? data.length : insertedCount;
1336
- const result = {
1337
- successful: insertedCount,
1338
- failed: shouldValidate ? validationErrors.length : 0,
1339
- total: totalProcessed
1340
- };
1341
- if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1342
- result.failedRecords = validationErrors.map((ve) => ({
1343
- record: ve.record,
1344
- error: `Validation error: ${ve.error}`,
1345
- index: ve.index
1346
- }));
1347
- }
1348
- return result;
1349
1002
  }
1350
- /**
1351
- * Handles insertion errors based on the specified strategy
1352
- * @private
1353
- */
1354
- async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1003
+ }
1004
+ return { successful, failed };
1005
+ }
1006
+ /**
1007
+ * Validates input parameters and strategy compatibility
1008
+ * @private
1009
+ */
1010
+ validateInsertParameters(data, options) {
1011
+ const isStream = data instanceof Readable;
1012
+ const strategy = options?.strategy || "fail-fast";
1013
+ const shouldValidate = options?.validate !== false;
1014
+ if (isStream && strategy === "isolate") {
1015
+ throw new Error(
1016
+ "The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
1017
+ );
1018
+ }
1019
+ if (isStream && shouldValidate) {
1020
+ console.warn(
1021
+ "Validation is not supported with stream input. Validation will be skipped."
1022
+ );
1023
+ }
1024
+ return { isStream, strategy, shouldValidate };
1025
+ }
1026
+ /**
1027
+ * Handles early return cases for empty data
1028
+ * @private
1029
+ */
1030
+ handleEmptyData(data, isStream) {
1031
+ if (isStream && !data) {
1032
+ return {
1033
+ successful: 0,
1034
+ failed: 0,
1035
+ total: 0
1036
+ };
1037
+ }
1038
+ if (!isStream && (!data || data.length === 0)) {
1039
+ return {
1040
+ successful: 0,
1041
+ failed: 0,
1042
+ total: 0
1043
+ };
1044
+ }
1045
+ return null;
1046
+ }
1047
+ /**
1048
+ * Performs pre-insertion validation for array data
1049
+ * @private
1050
+ */
1051
+ async performPreInsertionValidation(data, shouldValidate, strategy, options) {
1052
+ if (!shouldValidate) {
1053
+ return { validatedData: data, validationErrors: [] };
1054
+ }
1055
+ try {
1056
+ const validationResult = await this.validateRecords(data);
1057
+ const validatedData = validationResult.valid;
1058
+ const validationErrors = validationResult.invalid;
1059
+ if (validationErrors.length > 0) {
1060
+ this.handleValidationErrors(validationErrors, strategy, data, options);
1355
1061
  switch (strategy) {
1356
- case "fail-fast":
1357
- throw new Error(
1358
- `Failed to insert data into table ${tableName}: ${batchError}`
1359
- );
1360
1062
  case "discard":
1361
- throw new Error(
1362
- `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1363
- );
1063
+ return { validatedData, validationErrors };
1364
1064
  case "isolate":
1365
- return await this.handleIsolateStrategy(
1366
- batchError,
1367
- tableName,
1368
- data,
1369
- validatedData,
1370
- validationErrors,
1371
- isStream,
1372
- shouldValidate,
1373
- options
1374
- );
1065
+ return { validatedData: data, validationErrors };
1375
1066
  default:
1376
- throw new Error(`Unknown error strategy: ${strategy}`);
1067
+ return { validatedData, validationErrors };
1377
1068
  }
1378
1069
  }
1379
- /**
1380
- * Handles the isolate strategy for insertion errors
1381
- * @private
1382
- */
1383
- async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1384
- if (isStream) {
1385
- throw new Error(
1386
- `Isolate strategy is not supported with stream input: ${batchError}`
1387
- );
1388
- }
1389
- try {
1390
- const { client } = await this.getMemoizedClient();
1391
- const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1392
- const retryData = skipValidationOnRetry ? data : validatedData;
1393
- const { successful, failed } = await this.retryIndividualRecords(
1394
- client,
1395
- tableName,
1396
- retryData
1397
- );
1398
- const allFailedRecords = [
1399
- // Validation errors (if any and not skipping validation on retry)
1400
- ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1401
- record: ve.record,
1402
- error: `Validation error: ${ve.error}`,
1403
- index: ve.index
1404
- })) : [],
1405
- // Insertion errors
1406
- ...failed
1407
- ];
1408
- this.checkInsertionThresholds(
1409
- allFailedRecords,
1410
- data.length,
1411
- options
1412
- );
1413
- return {
1414
- successful: successful.length,
1415
- failed: allFailedRecords.length,
1416
- total: data.length,
1417
- failedRecords: allFailedRecords
1418
- };
1419
- } catch (isolationError) {
1420
- throw new Error(
1421
- `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1422
- );
1423
- }
1070
+ return { validatedData, validationErrors };
1071
+ } catch (validationError) {
1072
+ if (strategy === "fail-fast") {
1073
+ throw validationError;
1424
1074
  }
1425
- /**
1426
- * Checks if insertion errors exceed configured thresholds
1427
- * @private
1428
- */
1429
- checkInsertionThresholds(failedRecords, totalRecords, options) {
1430
- const totalFailed = failedRecords.length;
1431
- const failedRatio = totalFailed / totalRecords;
1432
- if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1433
- throw new Error(
1434
- `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1435
- );
1436
- }
1437
- if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1438
- throw new Error(
1439
- `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1440
- );
1441
- }
1075
+ console.warn("Validation error:", validationError);
1076
+ return { validatedData: data, validationErrors: [] };
1077
+ }
1078
+ }
1079
+ /**
1080
+ * Handles validation errors based on the specified strategy
1081
+ * @private
1082
+ */
1083
+ handleValidationErrors(validationErrors, strategy, data, options) {
1084
+ switch (strategy) {
1085
+ case "fail-fast":
1086
+ const firstError = validationErrors[0];
1087
+ throw new Error(
1088
+ `Validation failed for record at index ${firstError.index}: ${firstError.error}`
1089
+ );
1090
+ case "discard":
1091
+ this.checkValidationThresholds(validationErrors, data.length, options);
1092
+ break;
1093
+ case "isolate":
1094
+ break;
1095
+ }
1096
+ }
1097
+ /**
1098
+ * Checks if validation errors exceed configured thresholds
1099
+ * @private
1100
+ */
1101
+ checkValidationThresholds(validationErrors, totalRecords, options) {
1102
+ const validationFailedCount = validationErrors.length;
1103
+ const validationFailedRatio = validationFailedCount / totalRecords;
1104
+ if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
1105
+ throw new Error(
1106
+ `Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1107
+ );
1108
+ }
1109
+ if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
1110
+ throw new Error(
1111
+ `Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
1112
+ );
1113
+ }
1114
+ }
1115
+ /**
1116
+ * Optimized insert options preparation with better memory management
1117
+ * @private
1118
+ */
1119
+ prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
1120
+ const insertOptions = {
1121
+ table: quoteIdentifier(tableName),
1122
+ format: "JSONEachRow",
1123
+ clickhouse_settings: {
1124
+ date_time_input_format: "best_effort",
1125
+ wait_end_of_query: 1,
1126
+ // Ensure at least once delivery for INSERT operations
1127
+ // Performance optimizations
1128
+ max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
1129
+ max_block_size: 65536,
1130
+ // Use async inserts for better performance with large datasets
1131
+ async_insert: validatedData.length > 1e3 ? 1 : 0,
1132
+ wait_for_async_insert: 1
1133
+ // For at least once delivery
1442
1134
  }
1443
- /**
1444
- * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1445
- *
1446
- * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1447
- * - For every Nested struct (not array), it recurses into the struct.
1448
- * - This ensures compatibility with kafka_clickhouse_sync
1449
- *
1450
- * @param record The input record to transform (may be deeply nested)
1451
- * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1452
- * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1453
- */
1454
- mapToClickhouseRecord(record, columns = this.columnArray) {
1455
- const result = { ...record };
1456
- for (const col of columns) {
1457
- const value = record[col.name];
1458
- const dt = col.data_type;
1459
- if (isArrayNestedType(dt)) {
1460
- if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1461
- result[col.name] = value.map((item) => [
1462
- this.mapToClickhouseRecord(item, dt.elementType.columns)
1463
- ]);
1464
- }
1465
- } else if (isNestedType(dt)) {
1466
- if (value && typeof value === "object") {
1467
- result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1468
- }
1469
- }
1470
- }
1471
- return result;
1135
+ };
1136
+ if (isStream) {
1137
+ insertOptions.values = data;
1138
+ } else {
1139
+ insertOptions.values = validatedData;
1140
+ }
1141
+ if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
1142
+ if (options.allowErrors !== void 0) {
1143
+ insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
1472
1144
  }
1473
- /**
1474
- * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1475
- * This method establishes a direct connection to ClickHouse using the project configuration
1476
- * and inserts the provided data into the versioned table.
1477
- *
1478
- * PERFORMANCE OPTIMIZATIONS:
1479
- * - Memoized client connections with fast config hashing
1480
- * - Single-pass validation with pre-allocated arrays
1481
- * - Batch-optimized retry strategy (batches of 10, then individual)
1482
- * - Optimized ClickHouse settings for large datasets
1483
- * - Reduced memory allocations and object creation
1484
- *
1485
- * Uses advanced typia validation when available for comprehensive type checking,
1486
- * with fallback to basic validation for compatibility.
1487
- *
1488
- * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1489
- * If the configuration changes, a new client will be automatically created.
1490
- *
1491
- * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1492
- * @param options Optional configuration for error handling, validation, and insertion behavior
1493
- * @returns Promise resolving to detailed insertion results
1494
- * @throws {ConfigError} When configuration cannot be read or parsed
1495
- * @throws {ClickHouseError} When insertion fails based on the error strategy
1496
- * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1497
- *
1498
- * @example
1499
- * ```typescript
1500
- * // Create an OlapTable instance (typia validators auto-injected)
1501
- * const userTable = new OlapTable<User>('users');
1502
- *
1503
- * // Insert with comprehensive typia validation
1504
- * const result1 = await userTable.insert([
1505
- * { id: 1, name: 'John', email: 'john@example.com' },
1506
- * { id: 2, name: 'Jane', email: 'jane@example.com' }
1507
- * ]);
1508
- *
1509
- * // Insert data with stream input (validation not available for streams)
1510
- * const dataStream = new Readable({
1511
- * objectMode: true,
1512
- * read() { // Stream implementation }
1513
- * });
1514
- * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1515
- *
1516
- * // Insert with validation disabled for performance
1517
- * const result3 = await userTable.insert(data, { validate: false });
1518
- *
1519
- * // Insert with error handling strategies
1520
- * const result4 = await userTable.insert(mixedData, {
1521
- * strategy: 'isolate',
1522
- * allowErrorsRatio: 0.1,
1523
- * validate: true // Use typia validation (default)
1524
- * });
1525
- *
1526
- * // Optional: Clean up connection when completely done
1527
- * await userTable.closeClient();
1528
- * ```
1529
- */
1530
- async insert(data, options) {
1531
- const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1532
- const emptyResult = this.handleEmptyData(data, isStream);
1533
- if (emptyResult) {
1534
- return emptyResult;
1535
- }
1536
- let validatedData = [];
1537
- let validationErrors = [];
1538
- if (!isStream && shouldValidate) {
1539
- const validationResult = await this.performPreInsertionValidation(
1540
- data,
1541
- shouldValidate,
1542
- strategy,
1543
- options
1544
- );
1545
- validatedData = validationResult.validatedData;
1546
- validationErrors = validationResult.validationErrors;
1547
- } else {
1548
- validatedData = isStream ? [] : data;
1145
+ if (options.allowErrorsRatio !== void 0) {
1146
+ insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
1147
+ }
1148
+ }
1149
+ return insertOptions;
1150
+ }
1151
+ /**
1152
+ * Creates success result for completed insertions
1153
+ * @private
1154
+ */
1155
+ createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
1156
+ if (isStream) {
1157
+ return {
1158
+ successful: -1,
1159
+ // -1 indicates stream mode where count is unknown
1160
+ failed: 0,
1161
+ total: -1
1162
+ };
1163
+ }
1164
+ const insertedCount = validatedData.length;
1165
+ const totalProcessed = shouldValidate ? data.length : insertedCount;
1166
+ const result = {
1167
+ successful: insertedCount,
1168
+ failed: shouldValidate ? validationErrors.length : 0,
1169
+ total: totalProcessed
1170
+ };
1171
+ if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
1172
+ result.failedRecords = validationErrors.map((ve) => ({
1173
+ record: ve.record,
1174
+ error: `Validation error: ${ve.error}`,
1175
+ index: ve.index
1176
+ }));
1177
+ }
1178
+ return result;
1179
+ }
1180
+ /**
1181
+ * Handles insertion errors based on the specified strategy
1182
+ * @private
1183
+ */
1184
+ async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1185
+ switch (strategy) {
1186
+ case "fail-fast":
1187
+ throw new Error(
1188
+ `Failed to insert data into table ${tableName}: ${batchError}`
1189
+ );
1190
+ case "discard":
1191
+ throw new Error(
1192
+ `Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
1193
+ );
1194
+ case "isolate":
1195
+ return await this.handleIsolateStrategy(
1196
+ batchError,
1197
+ tableName,
1198
+ data,
1199
+ validatedData,
1200
+ validationErrors,
1201
+ isStream,
1202
+ shouldValidate,
1203
+ options
1204
+ );
1205
+ default:
1206
+ throw new Error(`Unknown error strategy: ${strategy}`);
1207
+ }
1208
+ }
1209
+ /**
1210
+ * Handles the isolate strategy for insertion errors
1211
+ * @private
1212
+ */
1213
+ async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
1214
+ if (isStream) {
1215
+ throw new Error(
1216
+ `Isolate strategy is not supported with stream input: ${batchError}`
1217
+ );
1218
+ }
1219
+ try {
1220
+ const { client } = await this.getMemoizedClient();
1221
+ const skipValidationOnRetry = options?.skipValidationOnRetry || false;
1222
+ const retryData = skipValidationOnRetry ? data : validatedData;
1223
+ const { successful, failed } = await this.retryIndividualRecords(
1224
+ client,
1225
+ tableName,
1226
+ retryData
1227
+ );
1228
+ const allFailedRecords = [
1229
+ // Validation errors (if any and not skipping validation on retry)
1230
+ ...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
1231
+ record: ve.record,
1232
+ error: `Validation error: ${ve.error}`,
1233
+ index: ve.index
1234
+ })) : [],
1235
+ // Insertion errors
1236
+ ...failed
1237
+ ];
1238
+ this.checkInsertionThresholds(
1239
+ allFailedRecords,
1240
+ data.length,
1241
+ options
1242
+ );
1243
+ return {
1244
+ successful: successful.length,
1245
+ failed: allFailedRecords.length,
1246
+ total: data.length,
1247
+ failedRecords: allFailedRecords
1248
+ };
1249
+ } catch (isolationError) {
1250
+ throw new Error(
1251
+ `Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
1252
+ );
1253
+ }
1254
+ }
1255
+ /**
1256
+ * Checks if insertion errors exceed configured thresholds
1257
+ * @private
1258
+ */
1259
+ checkInsertionThresholds(failedRecords, totalRecords, options) {
1260
+ const totalFailed = failedRecords.length;
1261
+ const failedRatio = totalFailed / totalRecords;
1262
+ if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
1263
+ throw new Error(
1264
+ `Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1265
+ );
1266
+ }
1267
+ if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
1268
+ throw new Error(
1269
+ `Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
1270
+ );
1271
+ }
1272
+ }
1273
+ /**
1274
+ * Recursively transforms a record to match ClickHouse's JSONEachRow requirements
1275
+ *
1276
+ * - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
1277
+ * - For every Nested struct (not array), it recurses into the struct.
1278
+ * - This ensures compatibility with kafka_clickhouse_sync
1279
+ *
1280
+ * @param record The input record to transform (may be deeply nested)
1281
+ * @param columns The schema columns for this level (defaults to this.columnArray at the top level)
1282
+ * @returns The transformed record, ready for ClickHouse JSONEachRow insertion
1283
+ */
1284
+ mapToClickhouseRecord(record, columns = this.columnArray) {
1285
+ const result = { ...record };
1286
+ for (const col of columns) {
1287
+ const value = record[col.name];
1288
+ const dt = col.data_type;
1289
+ if (isArrayNestedType(dt)) {
1290
+ if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
1291
+ result[col.name] = value.map((item) => [
1292
+ this.mapToClickhouseRecord(item, dt.elementType.columns)
1293
+ ]);
1549
1294
  }
1550
- const { client } = await this.getMemoizedClient();
1551
- const tableName = this.generateTableName();
1552
- try {
1553
- const insertOptions = this.prepareInsertOptions(
1554
- tableName,
1555
- data,
1556
- validatedData,
1557
- isStream,
1558
- strategy,
1559
- options
1560
- );
1561
- await client.insert(insertOptions);
1562
- return this.createSuccessResult(
1563
- data,
1564
- validatedData,
1565
- validationErrors,
1566
- isStream,
1567
- shouldValidate,
1568
- strategy
1569
- );
1570
- } catch (batchError) {
1571
- return await this.handleInsertionError(
1572
- batchError,
1573
- strategy,
1574
- tableName,
1575
- data,
1576
- validatedData,
1577
- validationErrors,
1578
- isStream,
1579
- shouldValidate,
1580
- options
1581
- );
1295
+ } else if (isNestedType(dt)) {
1296
+ if (value && typeof value === "object") {
1297
+ result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
1582
1298
  }
1583
1299
  }
1584
- // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1585
- // were removed in ENG-856. Use direct configuration instead, e.g.:
1586
- // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1587
- };
1300
+ }
1301
+ return result;
1588
1302
  }
1589
- });
1303
+ /**
1304
+ * Inserts data directly into the ClickHouse table with enhanced error handling and validation.
1305
+ * This method establishes a direct connection to ClickHouse using the project configuration
1306
+ * and inserts the provided data into the versioned table.
1307
+ *
1308
+ * PERFORMANCE OPTIMIZATIONS:
1309
+ * - Memoized client connections with fast config hashing
1310
+ * - Single-pass validation with pre-allocated arrays
1311
+ * - Batch-optimized retry strategy (batches of 10, then individual)
1312
+ * - Optimized ClickHouse settings for large datasets
1313
+ * - Reduced memory allocations and object creation
1314
+ *
1315
+ * Uses advanced typia validation when available for comprehensive type checking,
1316
+ * with fallback to basic validation for compatibility.
1317
+ *
1318
+ * The ClickHouse client is memoized and reused across multiple insert calls for better performance.
1319
+ * If the configuration changes, a new client will be automatically created.
1320
+ *
1321
+ * @param data Array of objects conforming to the table schema, or a Node.js Readable stream
1322
+ * @param options Optional configuration for error handling, validation, and insertion behavior
1323
+ * @returns Promise resolving to detailed insertion results
1324
+ * @throws {ConfigError} When configuration cannot be read or parsed
1325
+ * @throws {ClickHouseError} When insertion fails based on the error strategy
1326
+ * @throws {ValidationError} When validation fails and strategy is 'fail-fast'
1327
+ *
1328
+ * @example
1329
+ * ```typescript
1330
+ * // Create an OlapTable instance (typia validators auto-injected)
1331
+ * const userTable = new OlapTable<User>('users');
1332
+ *
1333
+ * // Insert with comprehensive typia validation
1334
+ * const result1 = await userTable.insert([
1335
+ * { id: 1, name: 'John', email: 'john@example.com' },
1336
+ * { id: 2, name: 'Jane', email: 'jane@example.com' }
1337
+ * ]);
1338
+ *
1339
+ * // Insert data with stream input (validation not available for streams)
1340
+ * const dataStream = new Readable({
1341
+ * objectMode: true,
1342
+ * read() { // Stream implementation }
1343
+ * });
1344
+ * const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
1345
+ *
1346
+ * // Insert with validation disabled for performance
1347
+ * const result3 = await userTable.insert(data, { validate: false });
1348
+ *
1349
+ * // Insert with error handling strategies
1350
+ * const result4 = await userTable.insert(mixedData, {
1351
+ * strategy: 'isolate',
1352
+ * allowErrorsRatio: 0.1,
1353
+ * validate: true // Use typia validation (default)
1354
+ * });
1355
+ *
1356
+ * // Optional: Clean up connection when completely done
1357
+ * await userTable.closeClient();
1358
+ * ```
1359
+ */
1360
+ async insert(data, options) {
1361
+ const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
1362
+ const emptyResult = this.handleEmptyData(data, isStream);
1363
+ if (emptyResult) {
1364
+ return emptyResult;
1365
+ }
1366
+ let validatedData = [];
1367
+ let validationErrors = [];
1368
+ if (!isStream && shouldValidate) {
1369
+ const validationResult = await this.performPreInsertionValidation(
1370
+ data,
1371
+ shouldValidate,
1372
+ strategy,
1373
+ options
1374
+ );
1375
+ validatedData = validationResult.validatedData;
1376
+ validationErrors = validationResult.validationErrors;
1377
+ } else {
1378
+ validatedData = isStream ? [] : data;
1379
+ }
1380
+ const { client } = await this.getMemoizedClient();
1381
+ const tableName = this.generateTableName();
1382
+ try {
1383
+ const insertOptions = this.prepareInsertOptions(
1384
+ tableName,
1385
+ data,
1386
+ validatedData,
1387
+ isStream,
1388
+ strategy,
1389
+ options
1390
+ );
1391
+ await client.insert(insertOptions);
1392
+ return this.createSuccessResult(
1393
+ data,
1394
+ validatedData,
1395
+ validationErrors,
1396
+ isStream,
1397
+ shouldValidate,
1398
+ strategy
1399
+ );
1400
+ } catch (batchError) {
1401
+ return await this.handleInsertionError(
1402
+ batchError,
1403
+ strategy,
1404
+ tableName,
1405
+ data,
1406
+ validatedData,
1407
+ validationErrors,
1408
+ isStream,
1409
+ shouldValidate,
1410
+ options
1411
+ );
1412
+ }
1413
+ }
1414
+ // Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
1415
+ // were removed in ENG-856. Use direct configuration instead, e.g.:
1416
+ // new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
1417
+ };
1590
1418
 
1591
1419
  // src/dmv2/sdk/stream.ts
1592
1420
  import { createHash as createHash3 } from "crypto";
1593
- function attachTypeGuard(dl, typeGuard) {
1594
- dl.asTyped = () => typeGuard(dl.originalRecord);
1595
- }
1596
- var RoutedMessage, Stream, DeadLetterQueue;
1597
- var init_stream = __esm({
1598
- "src/dmv2/sdk/stream.ts"() {
1599
- "use strict";
1600
- init_typedBase();
1601
- init_internal();
1602
- init_stackTrace();
1603
- RoutedMessage = class {
1604
- /** The destination stream for the message */
1605
- destination;
1606
- /** The message value(s) to send */
1607
- values;
1608
- /**
1609
- * Creates a new routed message.
1610
- *
1611
- * @param destination The target stream
1612
- * @param values The message(s) to route
1613
- */
1614
- constructor(destination, values) {
1615
- this.destination = destination;
1616
- this.values = values;
1617
- }
1618
- };
1619
- Stream = class extends TypedBase {
1620
- defaultDeadLetterQueue;
1621
- /** @internal Memoized KafkaJS producer for reusing connections across sends */
1622
- _memoizedProducer;
1623
- /** @internal Hash of the configuration used to create the memoized Kafka producer */
1624
- _kafkaConfigHash;
1625
- constructor(name, config, schema, columns, validators, allowExtraFields) {
1626
- super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1627
- const streams = getMooseInternal().streams;
1628
- if (streams.has(name)) {
1629
- throw new Error(`Stream with name ${name} already exists`);
1630
- }
1631
- streams.set(name, this);
1632
- this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1421
+ var RoutedMessage = class {
1422
+ /** The destination stream for the message */
1423
+ destination;
1424
+ /** The message value(s) to send */
1425
+ values;
1426
+ /**
1427
+ * Creates a new routed message.
1428
+ *
1429
+ * @param destination The target stream
1430
+ * @param values The message(s) to route
1431
+ */
1432
+ constructor(destination, values) {
1433
+ this.destination = destination;
1434
+ this.values = values;
1435
+ }
1436
+ };
1437
+ var Stream = class extends TypedBase {
1438
+ defaultDeadLetterQueue;
1439
+ /** @internal Memoized KafkaJS producer for reusing connections across sends */
1440
+ _memoizedProducer;
1441
+ /** @internal Hash of the configuration used to create the memoized Kafka producer */
1442
+ _kafkaConfigHash;
1443
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1444
+ super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
1445
+ const streams = getMooseInternal().streams;
1446
+ if (streams.has(name)) {
1447
+ throw new Error(`Stream with name ${name} already exists`);
1448
+ }
1449
+ streams.set(name, this);
1450
+ this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
1451
+ }
1452
+ /**
1453
+ * Internal map storing transformation configurations.
1454
+ * Maps destination stream names to arrays of transformation functions and their configs.
1455
+ *
1456
+ * @internal
1457
+ */
1458
+ _transformations = /* @__PURE__ */ new Map();
1459
+ /**
1460
+ * Internal function for multi-stream transformations.
1461
+ * Allows a single transformation to route messages to multiple destinations.
1462
+ *
1463
+ * @internal
1464
+ */
1465
+ _multipleTransformations;
1466
+ /**
1467
+ * Internal array storing consumer configurations.
1468
+ *
1469
+ * @internal
1470
+ */
1471
+ _consumers = new Array();
1472
+ /**
1473
+ * Builds the full Kafka topic name including optional namespace and version suffix.
1474
+ * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1475
+ */
1476
+ buildFullTopicName(namespace) {
1477
+ const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1478
+ const base = `${this.name}${versionSuffix}`;
1479
+ return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1480
+ }
1481
+ /**
1482
+ * Creates a fast hash string from relevant Kafka configuration fields.
1483
+ */
1484
+ createConfigHash(kafkaConfig) {
1485
+ const configString = [
1486
+ kafkaConfig.broker,
1487
+ kafkaConfig.messageTimeoutMs,
1488
+ kafkaConfig.saslUsername,
1489
+ kafkaConfig.saslPassword,
1490
+ kafkaConfig.saslMechanism,
1491
+ kafkaConfig.securityProtocol,
1492
+ kafkaConfig.namespace
1493
+ ].join(":");
1494
+ return createHash3("sha256").update(configString).digest("hex").substring(0, 16);
1495
+ }
1496
+ /**
1497
+ * Gets or creates a memoized KafkaJS producer using runtime configuration.
1498
+ */
1499
+ async getMemoizedProducer() {
1500
+ await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1501
+ const configRegistry = globalThis._mooseConfigRegistry;
1502
+ const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1503
+ const kafkaConfig = await configRegistry.getKafkaConfig();
1504
+ const currentHash = this.createConfigHash(kafkaConfig);
1505
+ if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1506
+ return { producer: this._memoizedProducer, kafkaConfig };
1507
+ }
1508
+ if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1509
+ try {
1510
+ await this._memoizedProducer.disconnect();
1511
+ } catch {
1633
1512
  }
1634
- /**
1635
- * Internal map storing transformation configurations.
1636
- * Maps destination stream names to arrays of transformation functions and their configs.
1637
- *
1638
- * @internal
1639
- */
1640
- _transformations = /* @__PURE__ */ new Map();
1641
- /**
1642
- * Internal function for multi-stream transformations.
1643
- * Allows a single transformation to route messages to multiple destinations.
1644
- *
1645
- * @internal
1646
- */
1647
- _multipleTransformations;
1648
- /**
1649
- * Internal array storing consumer configurations.
1650
- *
1651
- * @internal
1652
- */
1653
- _consumers = new Array();
1654
- /**
1655
- * Builds the full Kafka topic name including optional namespace and version suffix.
1656
- * Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
1657
- */
1658
- buildFullTopicName(namespace) {
1659
- const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
1660
- const base = `${this.name}${versionSuffix}`;
1661
- return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
1513
+ this._memoizedProducer = void 0;
1514
+ }
1515
+ const clientId = `moose-sdk-stream-${this.name}`;
1516
+ const logger = {
1517
+ logPrefix: clientId,
1518
+ log: (message) => {
1519
+ console.log(`${clientId}: ${message}`);
1520
+ },
1521
+ error: (message) => {
1522
+ console.error(`${clientId}: ${message}`);
1523
+ },
1524
+ warn: (message) => {
1525
+ console.warn(`${clientId}: ${message}`);
1662
1526
  }
1663
- /**
1664
- * Creates a fast hash string from relevant Kafka configuration fields.
1665
- */
1666
- createConfigHash(kafkaConfig) {
1667
- const configString = [
1668
- kafkaConfig.broker,
1669
- kafkaConfig.messageTimeoutMs,
1670
- kafkaConfig.saslUsername,
1671
- kafkaConfig.saslPassword,
1672
- kafkaConfig.saslMechanism,
1673
- kafkaConfig.securityProtocol,
1674
- kafkaConfig.namespace
1675
- ].join(":");
1676
- return createHash3("sha256").update(configString).digest("hex").substring(0, 16);
1527
+ };
1528
+ const producer = await getKafkaProducer2(
1529
+ {
1530
+ clientId,
1531
+ broker: kafkaConfig.broker,
1532
+ securityProtocol: kafkaConfig.securityProtocol,
1533
+ saslUsername: kafkaConfig.saslUsername,
1534
+ saslPassword: kafkaConfig.saslPassword,
1535
+ saslMechanism: kafkaConfig.saslMechanism
1536
+ },
1537
+ logger
1538
+ );
1539
+ this._memoizedProducer = producer;
1540
+ this._kafkaConfigHash = currentHash;
1541
+ return { producer, kafkaConfig };
1542
+ }
1543
+ /**
1544
+ * Closes the memoized Kafka producer if it exists.
1545
+ */
1546
+ async closeProducer() {
1547
+ if (this._memoizedProducer) {
1548
+ try {
1549
+ await this._memoizedProducer.disconnect();
1550
+ } catch {
1551
+ } finally {
1552
+ this._memoizedProducer = void 0;
1553
+ this._kafkaConfigHash = void 0;
1677
1554
  }
1678
- /**
1679
- * Gets or creates a memoized KafkaJS producer using runtime configuration.
1680
- */
1681
- async getMemoizedProducer() {
1682
- await Promise.resolve().then(() => (init_runtime(), runtime_exports));
1683
- const configRegistry = globalThis._mooseConfigRegistry;
1684
- const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
1685
- const kafkaConfig = await configRegistry.getKafkaConfig();
1686
- const currentHash = this.createConfigHash(kafkaConfig);
1687
- if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
1688
- return { producer: this._memoizedProducer, kafkaConfig };
1689
- }
1690
- if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
1691
- try {
1692
- await this._memoizedProducer.disconnect();
1693
- } catch {
1694
- }
1695
- this._memoizedProducer = void 0;
1696
- }
1697
- const clientId = `moose-sdk-stream-${this.name}`;
1698
- const logger = {
1699
- logPrefix: clientId,
1700
- log: (message) => {
1701
- console.log(`${clientId}: ${message}`);
1702
- },
1703
- error: (message) => {
1704
- console.error(`${clientId}: ${message}`);
1705
- },
1706
- warn: (message) => {
1707
- console.warn(`${clientId}: ${message}`);
1708
- }
1709
- };
1710
- const producer = await getKafkaProducer2(
1711
- {
1712
- clientId,
1713
- broker: kafkaConfig.broker,
1714
- securityProtocol: kafkaConfig.securityProtocol,
1715
- saslUsername: kafkaConfig.saslUsername,
1716
- saslPassword: kafkaConfig.saslPassword,
1717
- saslMechanism: kafkaConfig.saslMechanism
1718
- },
1719
- logger
1555
+ }
1556
+ }
1557
+ /**
1558
+ * Sends one or more records to this stream's Kafka topic.
1559
+ * Values are JSON-serialized as message values.
1560
+ */
1561
+ async send(values) {
1562
+ const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1563
+ if (flat.length === 0) return;
1564
+ const { producer, kafkaConfig } = await this.getMemoizedProducer();
1565
+ const topic = this.buildFullTopicName(kafkaConfig.namespace);
1566
+ const sr = this.config.schemaConfig;
1567
+ if (sr && sr.kind === "JSON") {
1568
+ const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1569
+ if (!schemaRegistryUrl) {
1570
+ throw new Error("Schema Registry URL not configured");
1571
+ }
1572
+ const {
1573
+ default: { SchemaRegistry }
1574
+ } = await import("@kafkajs/confluent-schema-registry");
1575
+ const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1576
+ let schemaId = void 0;
1577
+ if ("id" in sr.reference) {
1578
+ schemaId = sr.reference.id;
1579
+ } else if ("subjectLatest" in sr.reference) {
1580
+ schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1581
+ } else if ("subject" in sr.reference) {
1582
+ schemaId = await registry.getRegistryId(
1583
+ sr.reference.subject,
1584
+ sr.reference.version
1720
1585
  );
1721
- this._memoizedProducer = producer;
1722
- this._kafkaConfigHash = currentHash;
1723
- return { producer, kafkaConfig };
1724
1586
  }
1725
- /**
1726
- * Closes the memoized Kafka producer if it exists.
1727
- */
1728
- async closeProducer() {
1729
- if (this._memoizedProducer) {
1730
- try {
1731
- await this._memoizedProducer.disconnect();
1732
- } catch {
1733
- } finally {
1734
- this._memoizedProducer = void 0;
1735
- this._kafkaConfigHash = void 0;
1736
- }
1737
- }
1587
+ if (schemaId === void 0) {
1588
+ throw new Error("Malformed schema reference.");
1738
1589
  }
1739
- /**
1740
- * Sends one or more records to this stream's Kafka topic.
1741
- * Values are JSON-serialized as message values.
1742
- */
1743
- async send(values) {
1744
- const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
1745
- if (flat.length === 0) return;
1746
- const { producer, kafkaConfig } = await this.getMemoizedProducer();
1747
- const topic = this.buildFullTopicName(kafkaConfig.namespace);
1748
- const sr = this.config.schemaConfig;
1749
- if (sr && sr.kind === "JSON") {
1750
- const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
1751
- if (!schemaRegistryUrl) {
1752
- throw new Error("Schema Registry URL not configured");
1753
- }
1754
- const {
1755
- default: { SchemaRegistry }
1756
- } = await import("@kafkajs/confluent-schema-registry");
1757
- const registry = new SchemaRegistry({ host: schemaRegistryUrl });
1758
- let schemaId = void 0;
1759
- if ("id" in sr.reference) {
1760
- schemaId = sr.reference.id;
1761
- } else if ("subjectLatest" in sr.reference) {
1762
- schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
1763
- } else if ("subject" in sr.reference) {
1764
- schemaId = await registry.getRegistryId(
1765
- sr.reference.subject,
1766
- sr.reference.version
1767
- );
1768
- }
1769
- if (schemaId === void 0) {
1770
- throw new Error("Malformed schema reference.");
1771
- }
1772
- const encoded = await Promise.all(
1773
- flat.map(
1774
- (v) => registry.encode(schemaId, v)
1775
- )
1776
- );
1777
- await producer.send({
1778
- topic,
1779
- messages: encoded.map((value) => ({ value }))
1780
- });
1781
- return;
1782
- } else if (sr !== void 0) {
1783
- throw new Error("Currently only JSON Schema is supported.");
1784
- }
1785
- await producer.send({
1786
- topic,
1787
- messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1788
- });
1590
+ const encoded = await Promise.all(
1591
+ flat.map(
1592
+ (v) => registry.encode(schemaId, v)
1593
+ )
1594
+ );
1595
+ await producer.send({
1596
+ topic,
1597
+ messages: encoded.map((value) => ({ value }))
1598
+ });
1599
+ return;
1600
+ } else if (sr !== void 0) {
1601
+ throw new Error("Currently only JSON Schema is supported.");
1602
+ }
1603
+ await producer.send({
1604
+ topic,
1605
+ messages: flat.map((v) => ({ value: JSON.stringify(v) }))
1606
+ });
1607
+ }
1608
+ /**
1609
+ * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1610
+ * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1611
+ *
1612
+ * @template U The data type of the messages in the destination stream.
1613
+ * @param destination The destination stream for the transformed messages.
1614
+ * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1615
+ * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1616
+ * @param config Optional configuration for this specific transformation step, like a version.
1617
+ */
1618
+ addTransform(destination, transformation, config) {
1619
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1620
+ const transformConfig = {
1621
+ ...config ?? {},
1622
+ sourceFile
1623
+ };
1624
+ if (transformConfig.deadLetterQueue === void 0) {
1625
+ transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1626
+ }
1627
+ if (this._transformations.has(destination.name)) {
1628
+ const existingTransforms = this._transformations.get(destination.name);
1629
+ const hasVersion = existingTransforms.some(
1630
+ ([_, __, cfg]) => cfg.version === transformConfig.version
1631
+ );
1632
+ if (!hasVersion) {
1633
+ existingTransforms.push([destination, transformation, transformConfig]);
1789
1634
  }
1790
- /**
1791
- * Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
1792
- * Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
1793
- *
1794
- * @template U The data type of the messages in the destination stream.
1795
- * @param destination The destination stream for the transformed messages.
1796
- * @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
1797
- * Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
1798
- * @param config Optional configuration for this specific transformation step, like a version.
1799
- */
1800
- addTransform(destination, transformation, config) {
1801
- const sourceFile = getSourceFileFromStack(new Error().stack);
1802
- const transformConfig = {
1803
- ...config ?? {},
1804
- sourceFile
1805
- };
1806
- if (transformConfig.deadLetterQueue === void 0) {
1807
- transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1808
- }
1809
- if (this._transformations.has(destination.name)) {
1810
- const existingTransforms = this._transformations.get(destination.name);
1811
- const hasVersion = existingTransforms.some(
1812
- ([_, __, cfg]) => cfg.version === transformConfig.version
1813
- );
1814
- if (!hasVersion) {
1815
- existingTransforms.push([destination, transformation, transformConfig]);
1816
- }
1817
- } else {
1818
- this._transformations.set(destination.name, [
1819
- [destination, transformation, transformConfig]
1820
- ]);
1821
- }
1635
+ } else {
1636
+ this._transformations.set(destination.name, [
1637
+ [destination, transformation, transformConfig]
1638
+ ]);
1639
+ }
1640
+ }
1641
+ /**
1642
+ * Adds a consumer function that processes messages from this stream.
1643
+ * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1644
+ *
1645
+ * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1646
+ * @param config Optional configuration for this specific consumer, like a version.
1647
+ */
1648
+ addConsumer(consumer, config) {
1649
+ const sourceFile = getSourceFileFromStack(new Error().stack);
1650
+ const consumerConfig = {
1651
+ ...config ?? {},
1652
+ sourceFile
1653
+ };
1654
+ if (consumerConfig.deadLetterQueue === void 0) {
1655
+ consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1656
+ }
1657
+ const hasVersion = this._consumers.some(
1658
+ (existing) => existing.config.version === consumerConfig.version
1659
+ );
1660
+ if (!hasVersion) {
1661
+ this._consumers.push({ consumer, config: consumerConfig });
1662
+ }
1663
+ }
1664
+ /**
1665
+ * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1666
+ * @param values The value or values to send to this stream.
1667
+ * @returns A `RoutedMessage` object associating the values with this stream.
1668
+ *
1669
+ * @example
1670
+ * ```typescript
1671
+ * sourceStream.addMultiTransform((record) => [
1672
+ * destinationStream1.routed(transformedRecord1),
1673
+ * destinationStream2.routed([record2a, record2b])
1674
+ * ]);
1675
+ * ```
1676
+ */
1677
+ routed = (values) => new RoutedMessage(this, values);
1678
+ /**
1679
+ * Adds a single transformation function that can route messages to multiple destination streams.
1680
+ * This is an alternative to adding multiple individual `addTransform` calls.
1681
+ * Only one multi-transform function can be added per stream.
1682
+ *
1683
+ * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1684
+ * each specifying a destination stream and the message(s) to send to it.
1685
+ */
1686
+ addMultiTransform(transformation) {
1687
+ this._multipleTransformations = transformation;
1688
+ }
1689
+ };
1690
+ function attachTypeGuard(dl, typeGuard) {
1691
+ dl.asTyped = () => typeGuard(dl.originalRecord);
1692
+ }
1693
+ var DeadLetterQueue = class extends Stream {
1694
+ constructor(name, config, typeGuard) {
1695
+ if (typeGuard === void 0) {
1696
+ throw new Error(
1697
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
1698
+ );
1699
+ }
1700
+ super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1701
+ this.typeGuard = typeGuard;
1702
+ getMooseInternal().streams.set(name, this);
1703
+ }
1704
+ /**
1705
+ * Internal type guard function for validating and casting original records.
1706
+ *
1707
+ * @internal
1708
+ */
1709
+ typeGuard;
1710
+ /**
1711
+ * Adds a transformation step for dead letter records.
1712
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1713
+ *
1714
+ * @template U The output type for the transformation
1715
+ * @param destination The destination stream for transformed messages
1716
+ * @param transformation Function to transform dead letter records
1717
+ * @param config Optional transformation configuration
1718
+ */
1719
+ addTransform(destination, transformation, config) {
1720
+ const withValidate = (deadLetter) => {
1721
+ attachTypeGuard(deadLetter, this.typeGuard);
1722
+ return transformation(deadLetter);
1723
+ };
1724
+ super.addTransform(destination, withValidate, config);
1725
+ }
1726
+ /**
1727
+ * Adds a consumer for dead letter records.
1728
+ * The consumer function receives a DeadLetter<T> with type recovery capabilities.
1729
+ *
1730
+ * @param consumer Function to process dead letter records
1731
+ * @param config Optional consumer configuration
1732
+ */
1733
+ addConsumer(consumer, config) {
1734
+ const withValidate = (deadLetter) => {
1735
+ attachTypeGuard(deadLetter, this.typeGuard);
1736
+ return consumer(deadLetter);
1737
+ };
1738
+ super.addConsumer(withValidate, config);
1739
+ }
1740
+ /**
1741
+ * Adds a multi-stream transformation for dead letter records.
1742
+ * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1743
+ *
1744
+ * @param transformation Function to route dead letter records to multiple destinations
1745
+ */
1746
+ addMultiTransform(transformation) {
1747
+ const withValidate = (deadLetter) => {
1748
+ attachTypeGuard(deadLetter, this.typeGuard);
1749
+ return transformation(deadLetter);
1750
+ };
1751
+ super.addMultiTransform(withValidate);
1752
+ }
1753
+ };
1754
+
1755
+ // src/dmv2/sdk/workflow.ts
1756
+ var Task = class {
1757
+ /**
1758
+ * Creates a new Task instance.
1759
+ *
1760
+ * @param name - Unique identifier for the task
1761
+ * @param config - Configuration object defining the task behavior
1762
+ *
1763
+ * @example
1764
+ * ```typescript
1765
+ * // No input, no output
1766
+ * const task1 = new Task<null, void>("task1", {
1767
+ * run: async () => {
1768
+ * console.log("No input/output");
1769
+ * }
1770
+ * });
1771
+ *
1772
+ * // No input, but has output
1773
+ * const task2 = new Task<null, OutputType>("task2", {
1774
+ * run: async () => {
1775
+ * return someOutput;
1776
+ * }
1777
+ * });
1778
+ *
1779
+ * // Has input, no output
1780
+ * const task3 = new Task<InputType, void>("task3", {
1781
+ * run: async (input: InputType) => {
1782
+ * // process input but return nothing
1783
+ * }
1784
+ * });
1785
+ *
1786
+ * // Has both input and output
1787
+ * const task4 = new Task<InputType, OutputType>("task4", {
1788
+ * run: async (input: InputType) => {
1789
+ * return process(input);
1790
+ * }
1791
+ * });
1792
+ * ```
1793
+ */
1794
+ constructor(name, config) {
1795
+ this.name = name;
1796
+ this.config = config;
1797
+ }
1798
+ };
1799
+ var Workflow = class {
1800
+ /**
1801
+ * Creates a new Workflow instance and registers it with the Moose system.
1802
+ *
1803
+ * @param name - Unique identifier for the workflow
1804
+ * @param config - Configuration object defining the workflow behavior and task orchestration
1805
+ * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
1806
+ */
1807
+ constructor(name, config) {
1808
+ this.name = name;
1809
+ this.config = config;
1810
+ const workflows = getMooseInternal().workflows;
1811
+ if (workflows.has(name)) {
1812
+ throw new Error(`Workflow with name ${name} already exists`);
1813
+ }
1814
+ this.validateTaskGraph(config.startingTask, name);
1815
+ workflows.set(name, this);
1816
+ }
1817
+ /**
1818
+ * Validates the task graph to ensure there are no null tasks or infinite loops.
1819
+ *
1820
+ * @private
1821
+ * @param startingTask - The starting task to begin validation from
1822
+ * @param workflowName - The name of the workflow being validated (for error messages)
1823
+ * @throws {Error} When null/undefined tasks are found or infinite loops are detected
1824
+ */
1825
+ validateTaskGraph(startingTask, workflowName) {
1826
+ if (startingTask === null || startingTask === void 0) {
1827
+ throw new Error(
1828
+ `Workflow "${workflowName}" has a null or undefined starting task`
1829
+ );
1830
+ }
1831
+ const visited = /* @__PURE__ */ new Set();
1832
+ const recursionStack = /* @__PURE__ */ new Set();
1833
+ const validateTask = (task, currentPath) => {
1834
+ if (task === null || task === void 0) {
1835
+ const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
1836
+ throw new Error(
1837
+ `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
1838
+ );
1822
1839
  }
1823
- /**
1824
- * Adds a consumer function that processes messages from this stream.
1825
- * Multiple consumers can be added if they have distinct `version` identifiers in their config.
1826
- *
1827
- * @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
1828
- * @param config Optional configuration for this specific consumer, like a version.
1829
- */
1830
- addConsumer(consumer, config) {
1831
- const sourceFile = getSourceFileFromStack(new Error().stack);
1832
- const consumerConfig = {
1833
- ...config ?? {},
1834
- sourceFile
1835
- };
1836
- if (consumerConfig.deadLetterQueue === void 0) {
1837
- consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
1838
- }
1839
- const hasVersion = this._consumers.some(
1840
- (existing) => existing.config.version === consumerConfig.version
1840
+ const taskName = task.name;
1841
+ if (recursionStack.has(taskName)) {
1842
+ const cycleStartIndex = currentPath.indexOf(taskName);
1843
+ const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
1844
+ throw new Error(
1845
+ `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
1841
1846
  );
1842
- if (!hasVersion) {
1843
- this._consumers.push({ consumer, config: consumerConfig });
1844
- }
1845
1847
  }
1846
- /**
1847
- * Helper method for `addMultiTransform` to specify the destination and values for a routed message.
1848
- * @param values The value or values to send to this stream.
1849
- * @returns A `RoutedMessage` object associating the values with this stream.
1850
- *
1851
- * @example
1852
- * ```typescript
1853
- * sourceStream.addMultiTransform((record) => [
1854
- * destinationStream1.routed(transformedRecord1),
1855
- * destinationStream2.routed([record2a, record2b])
1856
- * ]);
1857
- * ```
1858
- */
1859
- routed = (values) => new RoutedMessage(this, values);
1860
- /**
1861
- * Adds a single transformation function that can route messages to multiple destination streams.
1862
- * This is an alternative to adding multiple individual `addTransform` calls.
1863
- * Only one multi-transform function can be added per stream.
1864
- *
1865
- * @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
1866
- * each specifying a destination stream and the message(s) to send to it.
1867
- */
1868
- addMultiTransform(transformation) {
1869
- this._multipleTransformations = transformation;
1848
+ if (visited.has(taskName)) {
1849
+ return;
1870
1850
  }
1871
- };
1872
- DeadLetterQueue = class extends Stream {
1873
- constructor(name, config, typeGuard) {
1874
- if (typeGuard === void 0) {
1875
- throw new Error(
1876
- "Supply the type param T so that the schema is inserted by the compiler plugin."
1877
- );
1851
+ visited.add(taskName);
1852
+ recursionStack.add(taskName);
1853
+ if (task.config.onComplete) {
1854
+ for (const nextTask of task.config.onComplete) {
1855
+ validateTask(nextTask, [...currentPath, taskName]);
1878
1856
  }
1879
- super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
1880
- this.typeGuard = typeGuard;
1881
- getMooseInternal().streams.set(name, this);
1882
- }
1883
- /**
1884
- * Internal type guard function for validating and casting original records.
1885
- *
1886
- * @internal
1887
- */
1888
- typeGuard;
1889
- /**
1890
- * Adds a transformation step for dead letter records.
1891
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1892
- *
1893
- * @template U The output type for the transformation
1894
- * @param destination The destination stream for transformed messages
1895
- * @param transformation Function to transform dead letter records
1896
- * @param config Optional transformation configuration
1897
- */
1898
- addTransform(destination, transformation, config) {
1899
- const withValidate = (deadLetter) => {
1900
- attachTypeGuard(deadLetter, this.typeGuard);
1901
- return transformation(deadLetter);
1902
- };
1903
- super.addTransform(destination, withValidate, config);
1904
- }
1905
- /**
1906
- * Adds a consumer for dead letter records.
1907
- * The consumer function receives a DeadLetter<T> with type recovery capabilities.
1908
- *
1909
- * @param consumer Function to process dead letter records
1910
- * @param config Optional consumer configuration
1911
- */
1912
- addConsumer(consumer, config) {
1913
- const withValidate = (deadLetter) => {
1914
- attachTypeGuard(deadLetter, this.typeGuard);
1915
- return consumer(deadLetter);
1916
- };
1917
- super.addConsumer(withValidate, config);
1918
- }
1919
- /**
1920
- * Adds a multi-stream transformation for dead letter records.
1921
- * The transformation function receives a DeadLetter<T> with type recovery capabilities.
1922
- *
1923
- * @param transformation Function to route dead letter records to multiple destinations
1924
- */
1925
- addMultiTransform(transformation) {
1926
- const withValidate = (deadLetter) => {
1927
- attachTypeGuard(deadLetter, this.typeGuard);
1928
- return transformation(deadLetter);
1929
- };
1930
- super.addMultiTransform(withValidate);
1931
1857
  }
1858
+ recursionStack.delete(taskName);
1932
1859
  };
1860
+ validateTask(startingTask, []);
1933
1861
  }
1934
- });
1862
+ };
1935
1863
 
1936
- // src/dmv2/sdk/workflow.ts
1937
- var Task, Workflow;
1938
- var init_workflow = __esm({
1939
- "src/dmv2/sdk/workflow.ts"() {
1940
- "use strict";
1941
- init_internal();
1942
- Task = class {
1943
- /**
1944
- * Creates a new Task instance.
1945
- *
1946
- * @param name - Unique identifier for the task
1947
- * @param config - Configuration object defining the task behavior
1948
- *
1949
- * @example
1950
- * ```typescript
1951
- * // No input, no output
1952
- * const task1 = new Task<null, void>("task1", {
1953
- * run: async () => {
1954
- * console.log("No input/output");
1955
- * }
1956
- * });
1957
- *
1958
- * // No input, but has output
1959
- * const task2 = new Task<null, OutputType>("task2", {
1960
- * run: async () => {
1961
- * return someOutput;
1962
- * }
1963
- * });
1964
- *
1965
- * // Has input, no output
1966
- * const task3 = new Task<InputType, void>("task3", {
1967
- * run: async (input: InputType) => {
1968
- * // process input but return nothing
1969
- * }
1970
- * });
1971
- *
1972
- * // Has both input and output
1973
- * const task4 = new Task<InputType, OutputType>("task4", {
1974
- * run: async (input: InputType) => {
1975
- * return process(input);
1976
- * }
1977
- * });
1978
- * ```
1979
- */
1980
- constructor(name, config) {
1981
- this.name = name;
1982
- this.config = config;
1983
- }
1864
+ // src/dmv2/sdk/ingestApi.ts
1865
+ var IngestApi = class extends TypedBase {
1866
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
1867
+ super(name, config, schema, columns, void 0, allowExtraFields);
1868
+ const ingestApis = getMooseInternal().ingestApis;
1869
+ if (ingestApis.has(name)) {
1870
+ throw new Error(`Ingest API with name ${name} already exists`);
1871
+ }
1872
+ ingestApis.set(name, this);
1873
+ }
1874
+ };
1875
+
1876
+ // src/dmv2/sdk/consumptionApi.ts
1877
+ var Api = class extends TypedBase {
1878
+ /** @internal The handler function that processes requests and generates responses. */
1879
+ _handler;
1880
+ /** @internal The JSON schema definition for the response type R. */
1881
+ responseSchema;
1882
+ constructor(name, handler, config, schema, columns, responseSchema) {
1883
+ super(name, config ?? {}, schema, columns);
1884
+ this._handler = handler;
1885
+ this.responseSchema = responseSchema ?? {
1886
+ version: "3.1",
1887
+ schemas: [{ type: "array", items: { type: "object" } }],
1888
+ components: { schemas: {} }
1984
1889
  };
1985
- Workflow = class {
1986
- /**
1987
- * Creates a new Workflow instance and registers it with the Moose system.
1988
- *
1989
- * @param name - Unique identifier for the workflow
1990
- * @param config - Configuration object defining the workflow behavior and task orchestration
1991
- * @throws {Error} When the workflow contains null/undefined tasks or infinite loops
1992
- */
1993
- constructor(name, config) {
1994
- this.name = name;
1995
- this.config = config;
1996
- const workflows = getMooseInternal().workflows;
1997
- if (workflows.has(name)) {
1998
- throw new Error(`Workflow with name ${name} already exists`);
1999
- }
2000
- this.validateTaskGraph(config.startingTask, name);
2001
- workflows.set(name, this);
2002
- }
2003
- /**
2004
- * Validates the task graph to ensure there are no null tasks or infinite loops.
2005
- *
2006
- * @private
2007
- * @param startingTask - The starting task to begin validation from
2008
- * @param workflowName - The name of the workflow being validated (for error messages)
2009
- * @throws {Error} When null/undefined tasks are found or infinite loops are detected
2010
- */
2011
- validateTaskGraph(startingTask, workflowName) {
2012
- if (startingTask === null || startingTask === void 0) {
2013
- throw new Error(
2014
- `Workflow "${workflowName}" has a null or undefined starting task`
2015
- );
2016
- }
2017
- const visited = /* @__PURE__ */ new Set();
2018
- const recursionStack = /* @__PURE__ */ new Set();
2019
- const validateTask = (task, currentPath) => {
2020
- if (task === null || task === void 0) {
2021
- const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
1890
+ const apis = getMooseInternal().apis;
1891
+ const key = `${name}${config?.version ? `:${config.version}` : ""}`;
1892
+ if (apis.has(key)) {
1893
+ throw new Error(
1894
+ `Consumption API with name ${name} and version ${config?.version} already exists`
1895
+ );
1896
+ }
1897
+ apis.set(key, this);
1898
+ if (config?.path) {
1899
+ if (config.version) {
1900
+ const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
1901
+ if (pathEndsWithVersion) {
1902
+ if (apis.has(config.path)) {
1903
+ const existing = apis.get(config.path);
2022
1904
  throw new Error(
2023
- `Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
1905
+ `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2024
1906
  );
2025
1907
  }
2026
- const taskName = task.name;
2027
- if (recursionStack.has(taskName)) {
2028
- const cycleStartIndex = currentPath.indexOf(taskName);
2029
- const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
1908
+ apis.set(config.path, this);
1909
+ } else {
1910
+ const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
1911
+ if (apis.has(versionedPath)) {
1912
+ const existing = apis.get(versionedPath);
2030
1913
  throw new Error(
2031
- `Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
2032
- );
2033
- }
2034
- if (visited.has(taskName)) {
2035
- return;
2036
- }
2037
- visited.add(taskName);
2038
- recursionStack.add(taskName);
2039
- if (task.config.onComplete) {
2040
- for (const nextTask of task.config.onComplete) {
2041
- validateTask(nextTask, [...currentPath, taskName]);
2042
- }
2043
- }
2044
- recursionStack.delete(taskName);
2045
- };
2046
- validateTask(startingTask, []);
2047
- }
2048
- };
2049
- }
2050
- });
2051
-
2052
- // src/dmv2/sdk/ingestApi.ts
2053
- var IngestApi;
2054
- var init_ingestApi = __esm({
2055
- "src/dmv2/sdk/ingestApi.ts"() {
2056
- "use strict";
2057
- init_typedBase();
2058
- init_internal();
2059
- IngestApi = class extends TypedBase {
2060
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2061
- super(name, config, schema, columns, void 0, allowExtraFields);
2062
- const ingestApis = getMooseInternal().ingestApis;
2063
- if (ingestApis.has(name)) {
2064
- throw new Error(`Ingest API with name ${name} already exists`);
1914
+ `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
1915
+ );
1916
+ }
1917
+ apis.set(versionedPath, this);
1918
+ if (!apis.has(config.path)) {
1919
+ apis.set(config.path, this);
1920
+ }
2065
1921
  }
2066
- ingestApis.set(name, this);
2067
- }
2068
- };
2069
- }
2070
- });
2071
-
2072
- // src/dmv2/sdk/consumptionApi.ts
2073
- var Api, ConsumptionApi;
2074
- var init_consumptionApi = __esm({
2075
- "src/dmv2/sdk/consumptionApi.ts"() {
2076
- "use strict";
2077
- init_typedBase();
2078
- init_internal();
2079
- Api = class extends TypedBase {
2080
- /** @internal The handler function that processes requests and generates responses. */
2081
- _handler;
2082
- /** @internal The JSON schema definition for the response type R. */
2083
- responseSchema;
2084
- constructor(name, handler, config, schema, columns, responseSchema) {
2085
- super(name, config ?? {}, schema, columns);
2086
- this._handler = handler;
2087
- this.responseSchema = responseSchema ?? {
2088
- version: "3.1",
2089
- schemas: [{ type: "array", items: { type: "object" } }],
2090
- components: { schemas: {} }
2091
- };
2092
- const apis = getMooseInternal().apis;
2093
- const key = `${name}${config?.version ? `:${config.version}` : ""}`;
2094
- if (apis.has(key)) {
1922
+ } else {
1923
+ if (apis.has(config.path)) {
1924
+ const existing = apis.get(config.path);
2095
1925
  throw new Error(
2096
- `Consumption API with name ${name} and version ${config?.version} already exists`
1926
+ `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2097
1927
  );
2098
1928
  }
2099
- apis.set(key, this);
2100
- if (config?.path) {
2101
- if (config.version) {
2102
- const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
2103
- if (pathEndsWithVersion) {
2104
- if (apis.has(config.path)) {
2105
- const existing = apis.get(config.path);
2106
- throw new Error(
2107
- `Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
2108
- );
2109
- }
2110
- apis.set(config.path, this);
2111
- } else {
2112
- const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
2113
- if (apis.has(versionedPath)) {
2114
- const existing = apis.get(versionedPath);
2115
- throw new Error(
2116
- `Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
2117
- );
2118
- }
2119
- apis.set(versionedPath, this);
2120
- if (!apis.has(config.path)) {
2121
- apis.set(config.path, this);
2122
- }
2123
- }
2124
- } else {
2125
- if (apis.has(config.path)) {
2126
- const existing = apis.get(config.path);
2127
- throw new Error(
2128
- `Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
2129
- );
2130
- }
2131
- apis.set(config.path, this);
2132
- }
2133
- }
1929
+ apis.set(config.path, this);
2134
1930
  }
2135
- /**
2136
- * Retrieves the handler function associated with this Consumption API.
2137
- * @returns The handler function.
2138
- */
2139
- getHandler = () => {
2140
- return this._handler;
2141
- };
2142
- async call(baseUrl, queryParams) {
2143
- let path2;
2144
- if (this.config?.path) {
2145
- if (this.config.version) {
2146
- const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
2147
- if (pathEndsWithVersion) {
2148
- path2 = this.config.path;
2149
- } else {
2150
- path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2151
- }
2152
- } else {
2153
- path2 = this.config.path;
2154
- }
1931
+ }
1932
+ }
1933
+ /**
1934
+ * Retrieves the handler function associated with this Consumption API.
1935
+ * @returns The handler function.
1936
+ */
1937
+ getHandler = () => {
1938
+ return this._handler;
1939
+ };
1940
+ async call(baseUrl, queryParams) {
1941
+ let path2;
1942
+ if (this.config?.path) {
1943
+ if (this.config.version) {
1944
+ const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
1945
+ if (pathEndsWithVersion) {
1946
+ path2 = this.config.path;
2155
1947
  } else {
2156
- path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
2157
- }
2158
- const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
2159
- const searchParams = url.searchParams;
2160
- for (const [key, value] of Object.entries(queryParams)) {
2161
- if (Array.isArray(value)) {
2162
- for (const item of value) {
2163
- if (item !== null && item !== void 0) {
2164
- searchParams.append(key, String(item));
2165
- }
2166
- }
2167
- } else if (value !== null && value !== void 0) {
2168
- searchParams.append(key, String(value));
2169
- }
1948
+ path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
2170
1949
  }
2171
- const response = await fetch(url, {
2172
- method: "GET",
2173
- headers: {
2174
- Accept: "application/json"
1950
+ } else {
1951
+ path2 = this.config.path;
1952
+ }
1953
+ } else {
1954
+ path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
1955
+ }
1956
+ const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
1957
+ const searchParams = url.searchParams;
1958
+ for (const [key, value] of Object.entries(queryParams)) {
1959
+ if (Array.isArray(value)) {
1960
+ for (const item of value) {
1961
+ if (item !== null && item !== void 0) {
1962
+ searchParams.append(key, String(item));
2175
1963
  }
2176
- });
2177
- if (!response.ok) {
2178
- throw new Error(`HTTP error! status: ${response.status}`);
2179
1964
  }
2180
- const data = await response.json();
2181
- return data;
1965
+ } else if (value !== null && value !== void 0) {
1966
+ searchParams.append(key, String(value));
2182
1967
  }
2183
- };
2184
- ConsumptionApi = Api;
1968
+ }
1969
+ const response = await fetch(url, {
1970
+ method: "GET",
1971
+ headers: {
1972
+ Accept: "application/json"
1973
+ }
1974
+ });
1975
+ if (!response.ok) {
1976
+ throw new Error(`HTTP error! status: ${response.status}`);
1977
+ }
1978
+ const data = await response.json();
1979
+ return data;
2185
1980
  }
2186
- });
1981
+ };
1982
+ var ConsumptionApi = Api;
2187
1983
 
2188
1984
  // src/dmv2/sdk/ingestPipeline.ts
2189
- var IngestPipeline;
2190
- var init_ingestPipeline = __esm({
2191
- "src/dmv2/sdk/ingestPipeline.ts"() {
2192
- "use strict";
2193
- init_typedBase();
2194
- init_stream();
2195
- init_olapTable();
2196
- init_ingestApi();
2197
- init_helpers();
2198
- IngestPipeline = class extends TypedBase {
2199
- /**
2200
- * The OLAP table component of the pipeline, if configured.
2201
- * Provides analytical query capabilities for the ingested data.
2202
- * Only present when `config.table` is not `false`.
2203
- */
2204
- table;
2205
- /**
2206
- * The stream component of the pipeline, if configured.
2207
- * Handles real-time data flow and processing between components.
2208
- * Only present when `config.stream` is not `false`.
2209
- */
2210
- stream;
2211
- /**
2212
- * The ingest API component of the pipeline, if configured.
2213
- * Provides HTTP endpoints for data ingestion.
2214
- * Only present when `config.ingestApi` is not `false`.
2215
- */
2216
- ingestApi;
2217
- /** The dead letter queue of the pipeline, if configured. */
2218
- deadLetterQueue;
2219
- constructor(name, config, schema, columns, validators, allowExtraFields) {
2220
- super(name, config, schema, columns, validators, allowExtraFields);
2221
- if (config.ingest !== void 0) {
2222
- console.warn(
2223
- "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2224
- );
2225
- if (config.ingestApi === void 0) {
2226
- config.ingestApi = config.ingest;
2227
- }
2228
- }
2229
- if (config.table) {
2230
- const tableConfig = typeof config.table === "object" ? {
2231
- ...config.table,
2232
- lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2233
- ...config.version && { version: config.version }
2234
- } : {
2235
- lifeCycle: config.lifeCycle,
2236
- engine: "MergeTree" /* MergeTree */,
2237
- ...config.version && { version: config.version }
2238
- };
2239
- this.table = new OlapTable(
2240
- name,
2241
- tableConfig,
2242
- this.schema,
2243
- this.columnArray,
2244
- this.validators
2245
- );
2246
- }
2247
- if (config.deadLetterQueue) {
2248
- const streamConfig = {
2249
- destination: void 0,
2250
- ...typeof config.deadLetterQueue === "object" ? {
2251
- ...config.deadLetterQueue,
2252
- lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2253
- } : { lifeCycle: config.lifeCycle },
2254
- ...config.version && { version: config.version }
2255
- };
2256
- this.deadLetterQueue = new DeadLetterQueue(
2257
- `${name}DeadLetterQueue`,
2258
- streamConfig,
2259
- validators.assert
2260
- );
2261
- }
2262
- if (config.stream) {
2263
- const streamConfig = {
2264
- destination: this.table,
2265
- defaultDeadLetterQueue: this.deadLetterQueue,
2266
- ...typeof config.stream === "object" ? {
2267
- ...config.stream,
2268
- lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2269
- } : { lifeCycle: config.lifeCycle },
2270
- ...config.version && { version: config.version }
2271
- };
2272
- this.stream = new Stream(
2273
- name,
2274
- streamConfig,
2275
- this.schema,
2276
- this.columnArray,
2277
- void 0,
2278
- this.allowExtraFields
2279
- );
2280
- this.stream.pipelineParent = this;
2281
- }
2282
- const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2283
- if (effectiveIngestAPI) {
2284
- if (!this.stream) {
2285
- throw new Error("Ingest API needs a stream to write to.");
2286
- }
2287
- const ingestConfig = {
2288
- destination: this.stream,
2289
- deadLetterQueue: this.deadLetterQueue,
2290
- ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2291
- ...config.version && { version: config.version },
2292
- ...config.path && { path: config.path }
2293
- };
2294
- this.ingestApi = new IngestApi(
2295
- name,
2296
- ingestConfig,
2297
- this.schema,
2298
- this.columnArray,
2299
- void 0,
2300
- this.allowExtraFields
2301
- );
2302
- this.ingestApi.pipelineParent = this;
2303
- }
1985
+ var IngestPipeline = class extends TypedBase {
1986
+ /**
1987
+ * The OLAP table component of the pipeline, if configured.
1988
+ * Provides analytical query capabilities for the ingested data.
1989
+ * Only present when `config.table` is not `false`.
1990
+ */
1991
+ table;
1992
+ /**
1993
+ * The stream component of the pipeline, if configured.
1994
+ * Handles real-time data flow and processing between components.
1995
+ * Only present when `config.stream` is not `false`.
1996
+ */
1997
+ stream;
1998
+ /**
1999
+ * The ingest API component of the pipeline, if configured.
2000
+ * Provides HTTP endpoints for data ingestion.
2001
+ * Only present when `config.ingestApi` is not `false`.
2002
+ */
2003
+ ingestApi;
2004
+ /** The dead letter queue of the pipeline, if configured. */
2005
+ deadLetterQueue;
2006
+ constructor(name, config, schema, columns, validators, allowExtraFields) {
2007
+ super(name, config, schema, columns, validators, allowExtraFields);
2008
+ if (config.ingest !== void 0) {
2009
+ console.warn(
2010
+ "\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
2011
+ );
2012
+ if (config.ingestApi === void 0) {
2013
+ config.ingestApi = config.ingest;
2304
2014
  }
2305
- };
2015
+ }
2016
+ if (config.table) {
2017
+ const tableConfig = typeof config.table === "object" ? {
2018
+ ...config.table,
2019
+ lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
2020
+ ...config.version && { version: config.version }
2021
+ } : {
2022
+ lifeCycle: config.lifeCycle,
2023
+ engine: "MergeTree" /* MergeTree */,
2024
+ ...config.version && { version: config.version }
2025
+ };
2026
+ this.table = new OlapTable(
2027
+ name,
2028
+ tableConfig,
2029
+ this.schema,
2030
+ this.columnArray,
2031
+ this.validators
2032
+ );
2033
+ }
2034
+ if (config.deadLetterQueue) {
2035
+ const streamConfig = {
2036
+ destination: void 0,
2037
+ ...typeof config.deadLetterQueue === "object" ? {
2038
+ ...config.deadLetterQueue,
2039
+ lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
2040
+ } : { lifeCycle: config.lifeCycle },
2041
+ ...config.version && { version: config.version }
2042
+ };
2043
+ this.deadLetterQueue = new DeadLetterQueue(
2044
+ `${name}DeadLetterQueue`,
2045
+ streamConfig,
2046
+ validators.assert
2047
+ );
2048
+ }
2049
+ if (config.stream) {
2050
+ const streamConfig = {
2051
+ destination: this.table,
2052
+ defaultDeadLetterQueue: this.deadLetterQueue,
2053
+ ...typeof config.stream === "object" ? {
2054
+ ...config.stream,
2055
+ lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
2056
+ } : { lifeCycle: config.lifeCycle },
2057
+ ...config.version && { version: config.version }
2058
+ };
2059
+ this.stream = new Stream(
2060
+ name,
2061
+ streamConfig,
2062
+ this.schema,
2063
+ this.columnArray,
2064
+ void 0,
2065
+ this.allowExtraFields
2066
+ );
2067
+ this.stream.pipelineParent = this;
2068
+ }
2069
+ const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
2070
+ if (effectiveIngestAPI) {
2071
+ if (!this.stream) {
2072
+ throw new Error("Ingest API needs a stream to write to.");
2073
+ }
2074
+ const ingestConfig = {
2075
+ destination: this.stream,
2076
+ deadLetterQueue: this.deadLetterQueue,
2077
+ ...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
2078
+ ...config.version && { version: config.version },
2079
+ ...config.path && { path: config.path }
2080
+ };
2081
+ this.ingestApi = new IngestApi(
2082
+ name,
2083
+ ingestConfig,
2084
+ this.schema,
2085
+ this.columnArray,
2086
+ void 0,
2087
+ this.allowExtraFields
2088
+ );
2089
+ this.ingestApi.pipelineParent = this;
2090
+ }
2306
2091
  }
2307
- });
2092
+ };
2308
2093
 
2309
2094
  // src/dmv2/sdk/etlPipeline.ts
2310
- var InternalBatcher, ETLPipeline;
2311
- var init_etlPipeline = __esm({
2312
- "src/dmv2/sdk/etlPipeline.ts"() {
2313
- "use strict";
2314
- init_workflow();
2315
- InternalBatcher = class {
2316
- iterator;
2317
- batchSize;
2318
- constructor(asyncIterable, batchSize = 20) {
2319
- this.iterator = asyncIterable[Symbol.asyncIterator]();
2320
- this.batchSize = batchSize;
2321
- }
2322
- async getNextBatch() {
2323
- const items = [];
2324
- for (let i = 0; i < this.batchSize; i++) {
2325
- const { value, done } = await this.iterator.next();
2326
- if (done) {
2327
- return { items, hasMore: false };
2328
- }
2329
- items.push(value);
2330
- }
2331
- return { items, hasMore: true };
2332
- }
2095
+ var InternalBatcher = class {
2096
+ iterator;
2097
+ batchSize;
2098
+ constructor(asyncIterable, batchSize = 20) {
2099
+ this.iterator = asyncIterable[Symbol.asyncIterator]();
2100
+ this.batchSize = batchSize;
2101
+ }
2102
+ async getNextBatch() {
2103
+ const items = [];
2104
+ for (let i = 0; i < this.batchSize; i++) {
2105
+ const { value, done } = await this.iterator.next();
2106
+ if (done) {
2107
+ return { items, hasMore: false };
2108
+ }
2109
+ items.push(value);
2110
+ }
2111
+ return { items, hasMore: true };
2112
+ }
2113
+ };
2114
+ var ETLPipeline = class {
2115
+ constructor(name, config) {
2116
+ this.name = name;
2117
+ this.config = config;
2118
+ this.setupPipeline();
2119
+ }
2120
+ batcher;
2121
+ setupPipeline() {
2122
+ this.batcher = this.createBatcher();
2123
+ const tasks = this.createAllTasks();
2124
+ tasks.extract.config.onComplete = [tasks.transform];
2125
+ tasks.transform.config.onComplete = [tasks.load];
2126
+ new Workflow(this.name, {
2127
+ startingTask: tasks.extract,
2128
+ retries: 1,
2129
+ timeout: "30m"
2130
+ });
2131
+ }
2132
+ createBatcher() {
2133
+ const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2134
+ return new InternalBatcher(iterable);
2135
+ }
2136
+ getDefaultTaskConfig() {
2137
+ return {
2138
+ retries: 1,
2139
+ timeout: "30m"
2333
2140
  };
2334
- ETLPipeline = class {
2335
- constructor(name, config) {
2336
- this.name = name;
2337
- this.config = config;
2338
- this.setupPipeline();
2339
- }
2340
- batcher;
2341
- setupPipeline() {
2342
- this.batcher = this.createBatcher();
2343
- const tasks = this.createAllTasks();
2344
- tasks.extract.config.onComplete = [tasks.transform];
2345
- tasks.transform.config.onComplete = [tasks.load];
2346
- new Workflow(this.name, {
2347
- startingTask: tasks.extract,
2348
- retries: 1,
2349
- timeout: "30m"
2350
- });
2351
- }
2352
- createBatcher() {
2353
- const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
2354
- return new InternalBatcher(iterable);
2355
- }
2356
- getDefaultTaskConfig() {
2357
- return {
2358
- retries: 1,
2359
- timeout: "30m"
2360
- };
2361
- }
2362
- createAllTasks() {
2363
- const taskConfig = this.getDefaultTaskConfig();
2364
- return {
2365
- extract: this.createExtractTask(taskConfig),
2366
- transform: this.createTransformTask(taskConfig),
2367
- load: this.createLoadTask(taskConfig)
2368
- };
2369
- }
2370
- createExtractTask(taskConfig) {
2371
- return new Task(`${this.name}_extract`, {
2372
- run: async ({}) => {
2373
- console.log(`Running extract task for ${this.name}...`);
2374
- const batch = await this.batcher.getNextBatch();
2375
- console.log(`Extract task completed with ${batch.items.length} items`);
2376
- return batch;
2377
- },
2378
- retries: taskConfig.retries,
2379
- timeout: taskConfig.timeout
2380
- });
2381
- }
2382
- createTransformTask(taskConfig) {
2383
- return new Task(
2384
- `${this.name}_transform`,
2385
- {
2386
- // Use new single-parameter context API for handlers
2387
- run: async ({ input }) => {
2388
- const batch = input;
2389
- console.log(
2390
- `Running transform task for ${this.name} with ${batch.items.length} items...`
2391
- );
2392
- const transformedItems = [];
2393
- for (const item of batch.items) {
2394
- const transformed = await this.config.transform(item);
2395
- transformedItems.push(transformed);
2396
- }
2397
- console.log(
2398
- `Transform task completed with ${transformedItems.length} items`
2399
- );
2400
- return { items: transformedItems };
2401
- },
2402
- retries: taskConfig.retries,
2403
- timeout: taskConfig.timeout
2404
- }
2405
- );
2406
- }
2407
- createLoadTask(taskConfig) {
2408
- return new Task(`${this.name}_load`, {
2409
- run: async ({ input: transformedItems }) => {
2410
- console.log(
2411
- `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2412
- );
2413
- if ("insert" in this.config.load) {
2414
- await this.config.load.insert(transformedItems.items);
2415
- } else {
2416
- await this.config.load(transformedItems.items);
2417
- }
2418
- console.log(`Load task completed`);
2419
- },
2420
- retries: taskConfig.retries,
2421
- timeout: taskConfig.timeout
2422
- });
2423
- }
2424
- // Execute the entire ETL pipeline
2425
- async run() {
2426
- console.log(`Starting ETL Pipeline: ${this.name}`);
2427
- let batchNumber = 1;
2428
- do {
2429
- console.log(`Processing batch ${batchNumber}...`);
2430
- const batch = await this.batcher.getNextBatch();
2431
- if (batch.items.length === 0) {
2432
- break;
2433
- }
2141
+ }
2142
+ createAllTasks() {
2143
+ const taskConfig = this.getDefaultTaskConfig();
2144
+ return {
2145
+ extract: this.createExtractTask(taskConfig),
2146
+ transform: this.createTransformTask(taskConfig),
2147
+ load: this.createLoadTask(taskConfig)
2148
+ };
2149
+ }
2150
+ createExtractTask(taskConfig) {
2151
+ return new Task(`${this.name}_extract`, {
2152
+ run: async ({}) => {
2153
+ console.log(`Running extract task for ${this.name}...`);
2154
+ const batch = await this.batcher.getNextBatch();
2155
+ console.log(`Extract task completed with ${batch.items.length} items`);
2156
+ return batch;
2157
+ },
2158
+ retries: taskConfig.retries,
2159
+ timeout: taskConfig.timeout
2160
+ });
2161
+ }
2162
+ createTransformTask(taskConfig) {
2163
+ return new Task(
2164
+ `${this.name}_transform`,
2165
+ {
2166
+ // Use new single-parameter context API for handlers
2167
+ run: async ({ input }) => {
2168
+ const batch = input;
2169
+ console.log(
2170
+ `Running transform task for ${this.name} with ${batch.items.length} items...`
2171
+ );
2434
2172
  const transformedItems = [];
2435
- for (const extractedData of batch.items) {
2436
- const transformedData = await this.config.transform(extractedData);
2437
- transformedItems.push(transformedData);
2438
- }
2439
- if ("insert" in this.config.load) {
2440
- await this.config.load.insert(transformedItems);
2441
- } else {
2442
- await this.config.load(transformedItems);
2173
+ for (const item of batch.items) {
2174
+ const transformed = await this.config.transform(item);
2175
+ transformedItems.push(transformed);
2443
2176
  }
2444
2177
  console.log(
2445
- `Completed batch ${batchNumber} with ${batch.items.length} items`
2178
+ `Transform task completed with ${transformedItems.length} items`
2446
2179
  );
2447
- batchNumber++;
2448
- if (!batch.hasMore) {
2449
- break;
2450
- }
2451
- } while (true);
2452
- console.log(`Completed ETL Pipeline: ${this.name}`);
2180
+ return { items: transformedItems };
2181
+ },
2182
+ retries: taskConfig.retries,
2183
+ timeout: taskConfig.timeout
2453
2184
  }
2454
- };
2185
+ );
2455
2186
  }
2456
- });
2457
-
2458
- // src/dmv2/sdk/sqlResource.ts
2459
- var SqlResource;
2460
- var init_sqlResource = __esm({
2461
- "src/dmv2/sdk/sqlResource.ts"() {
2462
- "use strict";
2463
- init_internal();
2464
- init_sqlHelpers();
2465
- init_stackTrace();
2466
- SqlResource = class {
2467
- /** @internal */
2468
- kind = "SqlResource";
2469
- /** Array of SQL statements to execute for setting up the resource. */
2470
- setup;
2471
- /** Array of SQL statements to execute for tearing down the resource. */
2472
- teardown;
2473
- /** The name of the SQL resource (e.g., view name, materialized view name). */
2474
- name;
2475
- /** List of OlapTables or Views that this resource reads data from. */
2476
- pullsDataFrom;
2477
- /** List of OlapTables or Views that this resource writes data to. */
2478
- pushesDataTo;
2479
- /** @internal Source file path where this resource was defined */
2480
- sourceFile;
2481
- /** @internal Source line number where this resource was defined */
2482
- sourceLine;
2483
- /** @internal Source column number where this resource was defined */
2484
- sourceColumn;
2485
- /**
2486
- * Creates a new SqlResource instance.
2487
- * @param name The name of the resource.
2488
- * @param setup An array of SQL DDL statements to create the resource.
2489
- * @param teardown An array of SQL DDL statements to drop the resource.
2490
- * @param options Optional configuration for specifying data dependencies.
2491
- * @param options.pullsDataFrom Tables/Views this resource reads from.
2492
- * @param options.pushesDataTo Tables/Views this resource writes to.
2493
- */
2494
- constructor(name, setup, teardown, options) {
2495
- const sqlResources = getMooseInternal().sqlResources;
2496
- if (!isClientOnlyMode() && sqlResources.has(name)) {
2497
- throw new Error(`SqlResource with name ${name} already exists`);
2498
- }
2499
- sqlResources.set(name, this);
2500
- this.name = name;
2501
- this.setup = setup.map(
2502
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2187
+ createLoadTask(taskConfig) {
2188
+ return new Task(`${this.name}_load`, {
2189
+ run: async ({ input: transformedItems }) => {
2190
+ console.log(
2191
+ `Running load task for ${this.name} with ${transformedItems.items.length} items...`
2503
2192
  );
2504
- this.teardown = teardown.map(
2505
- (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2506
- );
2507
- this.pullsDataFrom = options?.pullsDataFrom ?? [];
2508
- this.pushesDataTo = options?.pushesDataTo ?? [];
2509
- const stack = new Error().stack;
2510
- const location = getSourceLocationFromStack(stack);
2511
- if (location) {
2512
- this.sourceFile = location.file;
2513
- this.sourceLine = location.line;
2514
- this.sourceColumn = location.column;
2193
+ if ("insert" in this.config.load) {
2194
+ await this.config.load.insert(transformedItems.items);
2195
+ } else {
2196
+ await this.config.load(transformedItems.items);
2515
2197
  }
2516
- }
2517
- };
2198
+ console.log(`Load task completed`);
2199
+ },
2200
+ retries: taskConfig.retries,
2201
+ timeout: taskConfig.timeout
2202
+ });
2518
2203
  }
2519
- });
2520
-
2521
- // src/dmv2/sdk/materializedView.ts
2522
- var requireTargetTableName, MaterializedView;
2523
- var init_materializedView = __esm({
2524
- "src/dmv2/sdk/materializedView.ts"() {
2525
- "use strict";
2526
- init_helpers();
2527
- init_sqlHelpers();
2528
- init_olapTable();
2529
- init_sqlResource();
2530
- requireTargetTableName = (tableName) => {
2531
- if (typeof tableName === "string") {
2532
- return tableName;
2204
+ // Execute the entire ETL pipeline
2205
+ async run() {
2206
+ console.log(`Starting ETL Pipeline: ${this.name}`);
2207
+ let batchNumber = 1;
2208
+ do {
2209
+ console.log(`Processing batch ${batchNumber}...`);
2210
+ const batch = await this.batcher.getNextBatch();
2211
+ if (batch.items.length === 0) {
2212
+ break;
2213
+ }
2214
+ const transformedItems = [];
2215
+ for (const extractedData of batch.items) {
2216
+ const transformedData = await this.config.transform(extractedData);
2217
+ transformedItems.push(transformedData);
2218
+ }
2219
+ if ("insert" in this.config.load) {
2220
+ await this.config.load.insert(transformedItems);
2533
2221
  } else {
2534
- throw new Error("Name of targetTable is not specified.");
2222
+ await this.config.load(transformedItems);
2535
2223
  }
2536
- };
2537
- MaterializedView = class extends SqlResource {
2538
- /** The target OlapTable instance where the materialized data is stored. */
2539
- targetTable;
2540
- constructor(options, targetSchema, targetColumns) {
2541
- let selectStatement = options.selectStatement;
2542
- if (typeof selectStatement !== "string") {
2543
- selectStatement = toStaticQuery(selectStatement);
2544
- }
2545
- if (targetSchema === void 0 || targetColumns === void 0) {
2546
- throw new Error(
2547
- "Supply the type param T so that the schema is inserted by the compiler plugin."
2548
- );
2549
- }
2550
- const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2551
- requireTargetTableName(
2552
- options.targetTable?.name ?? options.tableName
2553
- ),
2554
- {
2555
- orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2556
- engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2557
- },
2558
- targetSchema,
2559
- targetColumns
2560
- );
2561
- if (targetTable.name === options.materializedViewName) {
2562
- throw new Error(
2563
- "Materialized view name cannot be the same as the target table name."
2564
- );
2565
- }
2566
- super(
2567
- options.materializedViewName,
2568
- [
2569
- createMaterializedView({
2570
- name: options.materializedViewName,
2571
- destinationTable: targetTable.name,
2572
- select: selectStatement
2573
- })
2574
- // Population is now handled automatically by Rust infrastructure
2575
- // based on table engine type and whether this is a new or updated view
2576
- ],
2577
- [dropView(options.materializedViewName)],
2578
- {
2579
- pullsDataFrom: options.selectTables,
2580
- pushesDataTo: [targetTable]
2581
- }
2582
- );
2583
- this.targetTable = targetTable;
2224
+ console.log(
2225
+ `Completed batch ${batchNumber} with ${batch.items.length} items`
2226
+ );
2227
+ batchNumber++;
2228
+ if (!batch.hasMore) {
2229
+ break;
2584
2230
  }
2585
- };
2231
+ } while (true);
2232
+ console.log(`Completed ETL Pipeline: ${this.name}`);
2586
2233
  }
2587
- });
2234
+ };
2235
+
2236
+ // src/dmv2/sdk/materializedView.ts
2237
+ var requireTargetTableName = (tableName) => {
2238
+ if (typeof tableName === "string") {
2239
+ return tableName;
2240
+ } else {
2241
+ throw new Error("Name of targetTable is not specified.");
2242
+ }
2243
+ };
2244
+ var MaterializedView = class {
2245
+ /** @internal */
2246
+ kind = "MaterializedView";
2247
+ /** The name of the materialized view */
2248
+ name;
2249
+ /** The target OlapTable instance where the materialized data is stored. */
2250
+ targetTable;
2251
+ /** The SELECT SQL statement */
2252
+ selectSql;
2253
+ /** Names of source tables that the SELECT reads from */
2254
+ sourceTables;
2255
+ /** @internal Source file path where this MV was defined */
2256
+ sourceFile;
2257
+ constructor(options, targetSchema, targetColumns) {
2258
+ let selectStatement = options.selectStatement;
2259
+ if (typeof selectStatement !== "string") {
2260
+ selectStatement = toStaticQuery(selectStatement);
2261
+ }
2262
+ if (targetSchema === void 0 || targetColumns === void 0) {
2263
+ throw new Error(
2264
+ "Supply the type param T so that the schema is inserted by the compiler plugin."
2265
+ );
2266
+ }
2267
+ const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
2268
+ requireTargetTableName(
2269
+ options.targetTable?.name ?? options.tableName
2270
+ ),
2271
+ {
2272
+ orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
2273
+ engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
2274
+ },
2275
+ targetSchema,
2276
+ targetColumns
2277
+ );
2278
+ if (targetTable.name === options.materializedViewName) {
2279
+ throw new Error(
2280
+ "Materialized view name cannot be the same as the target table name."
2281
+ );
2282
+ }
2283
+ this.name = options.materializedViewName;
2284
+ this.targetTable = targetTable;
2285
+ this.selectSql = selectStatement;
2286
+ this.sourceTables = options.selectTables.map((t) => t.name);
2287
+ const stack = new Error().stack;
2288
+ this.sourceFile = getSourceFileFromStack(stack);
2289
+ const materializedViews = getMooseInternal().materializedViews;
2290
+ if (!isClientOnlyMode() && materializedViews.has(this.name)) {
2291
+ throw new Error(`MaterializedView with name ${this.name} already exists`);
2292
+ }
2293
+ materializedViews.set(this.name, this);
2294
+ }
2295
+ };
2296
+
2297
+ // src/dmv2/sdk/sqlResource.ts
2298
+ var SqlResource = class {
2299
+ /** @internal */
2300
+ kind = "SqlResource";
2301
+ /** Array of SQL statements to execute for setting up the resource. */
2302
+ setup;
2303
+ /** Array of SQL statements to execute for tearing down the resource. */
2304
+ teardown;
2305
+ /** The name of the SQL resource (e.g., view name, materialized view name). */
2306
+ name;
2307
+ /** List of OlapTables or Views that this resource reads data from. */
2308
+ pullsDataFrom;
2309
+ /** List of OlapTables or Views that this resource writes data to. */
2310
+ pushesDataTo;
2311
+ /** @internal Source file path where this resource was defined */
2312
+ sourceFile;
2313
+ /** @internal Source line number where this resource was defined */
2314
+ sourceLine;
2315
+ /** @internal Source column number where this resource was defined */
2316
+ sourceColumn;
2317
+ /**
2318
+ * Creates a new SqlResource instance.
2319
+ * @param name The name of the resource.
2320
+ * @param setup An array of SQL DDL statements to create the resource.
2321
+ * @param teardown An array of SQL DDL statements to drop the resource.
2322
+ * @param options Optional configuration for specifying data dependencies.
2323
+ * @param options.pullsDataFrom Tables/Views this resource reads from.
2324
+ * @param options.pushesDataTo Tables/Views this resource writes to.
2325
+ */
2326
+ constructor(name, setup, teardown, options) {
2327
+ const sqlResources = getMooseInternal().sqlResources;
2328
+ if (!isClientOnlyMode() && sqlResources.has(name)) {
2329
+ throw new Error(`SqlResource with name ${name} already exists`);
2330
+ }
2331
+ sqlResources.set(name, this);
2332
+ this.name = name;
2333
+ this.setup = setup.map(
2334
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2335
+ );
2336
+ this.teardown = teardown.map(
2337
+ (sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
2338
+ );
2339
+ this.pullsDataFrom = options?.pullsDataFrom ?? [];
2340
+ this.pushesDataTo = options?.pushesDataTo ?? [];
2341
+ const stack = new Error().stack;
2342
+ const location = getSourceLocationFromStack(stack);
2343
+ if (location) {
2344
+ this.sourceFile = location.file;
2345
+ this.sourceLine = location.line;
2346
+ this.sourceColumn = location.column;
2347
+ }
2348
+ }
2349
+ };
2588
2350
 
2589
2351
  // src/dmv2/sdk/view.ts
2590
- var View;
2591
- var init_view = __esm({
2592
- "src/dmv2/sdk/view.ts"() {
2593
- "use strict";
2594
- init_helpers();
2595
- init_sqlHelpers();
2596
- init_sqlResource();
2597
- View = class extends SqlResource {
2598
- /**
2599
- * Creates a new View instance.
2600
- * @param name The name of the view to be created.
2601
- * @param selectStatement The SQL SELECT statement that defines the view's logic.
2602
- * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2603
- */
2604
- constructor(name, selectStatement, baseTables) {
2605
- if (typeof selectStatement !== "string") {
2606
- selectStatement = toStaticQuery(selectStatement);
2607
- }
2608
- super(
2609
- name,
2610
- [
2611
- `CREATE VIEW IF NOT EXISTS ${name}
2612
- AS ${selectStatement}`.trim()
2613
- ],
2614
- [dropView(name)],
2615
- {
2616
- pullsDataFrom: baseTables
2617
- }
2618
- );
2619
- }
2620
- };
2352
+ var View = class {
2353
+ /** @internal */
2354
+ kind = "CustomView";
2355
+ /** The name of the view */
2356
+ name;
2357
+ /** The SELECT SQL statement that defines the view */
2358
+ selectSql;
2359
+ /** Names of source tables/views that the SELECT reads from */
2360
+ sourceTables;
2361
+ /** @internal Source file path where this view was defined */
2362
+ sourceFile;
2363
+ /**
2364
+ * Creates a new View instance.
2365
+ * @param name The name of the view to be created.
2366
+ * @param selectStatement The SQL SELECT statement that defines the view's logic.
2367
+ * @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
2368
+ */
2369
+ constructor(name, selectStatement, baseTables) {
2370
+ if (typeof selectStatement !== "string") {
2371
+ selectStatement = toStaticQuery(selectStatement);
2372
+ }
2373
+ this.name = name;
2374
+ this.selectSql = selectStatement;
2375
+ this.sourceTables = baseTables.map((t) => t.name);
2376
+ const stack = new Error().stack;
2377
+ this.sourceFile = getSourceFileFromStack(stack);
2378
+ const customViews = getMooseInternal().customViews;
2379
+ if (!isClientOnlyMode() && customViews.has(this.name)) {
2380
+ throw new Error(`View with name ${this.name} already exists`);
2381
+ }
2382
+ customViews.set(this.name, this);
2621
2383
  }
2622
- });
2384
+ };
2623
2385
 
2624
2386
  // src/dmv2/sdk/lifeCycle.ts
2625
- var LifeCycle;
2626
- var init_lifeCycle = __esm({
2627
- "src/dmv2/sdk/lifeCycle.ts"() {
2628
- "use strict";
2629
- LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2630
- LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2631
- LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2632
- LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2633
- return LifeCycle2;
2634
- })(LifeCycle || {});
2635
- }
2636
- });
2387
+ var LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
2388
+ LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
2389
+ LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
2390
+ LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
2391
+ return LifeCycle2;
2392
+ })(LifeCycle || {});
2637
2393
 
2638
2394
  // src/dmv2/sdk/webApp.ts
2639
- var RESERVED_MOUNT_PATHS, WebApp;
2640
- var init_webApp = __esm({
2641
- "src/dmv2/sdk/webApp.ts"() {
2642
- "use strict";
2643
- init_internal();
2644
- RESERVED_MOUNT_PATHS = [
2645
- "/admin",
2646
- "/api",
2647
- "/consumption",
2648
- "/health",
2649
- "/ingest",
2650
- "/moose",
2651
- // reserved for future use
2652
- "/ready",
2653
- "/workflows"
2654
- ];
2655
- WebApp = class {
2656
- name;
2657
- handler;
2658
- config;
2659
- _rawApp;
2660
- constructor(name, appOrHandler, config) {
2661
- this.name = name;
2662
- this.config = config;
2663
- if (!this.config.mountPath) {
2664
- throw new Error(
2665
- `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2666
- );
2667
- }
2668
- const mountPath = this.config.mountPath;
2669
- if (mountPath === "/") {
2670
- throw new Error(
2671
- `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2672
- );
2673
- }
2674
- if (mountPath.endsWith("/")) {
2395
+ var RESERVED_MOUNT_PATHS = [
2396
+ "/admin",
2397
+ "/api",
2398
+ "/consumption",
2399
+ "/health",
2400
+ "/ingest",
2401
+ "/moose",
2402
+ // reserved for future use
2403
+ "/ready",
2404
+ "/workflows"
2405
+ ];
2406
+ var WebApp = class {
2407
+ name;
2408
+ handler;
2409
+ config;
2410
+ _rawApp;
2411
+ constructor(name, appOrHandler, config) {
2412
+ this.name = name;
2413
+ this.config = config;
2414
+ if (!this.config.mountPath) {
2415
+ throw new Error(
2416
+ `mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
2417
+ );
2418
+ }
2419
+ const mountPath = this.config.mountPath;
2420
+ if (mountPath === "/") {
2421
+ throw new Error(
2422
+ `mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
2423
+ );
2424
+ }
2425
+ if (mountPath.endsWith("/")) {
2426
+ throw new Error(
2427
+ `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2428
+ );
2429
+ }
2430
+ for (const reserved of RESERVED_MOUNT_PATHS) {
2431
+ if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2432
+ throw new Error(
2433
+ `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2434
+ );
2435
+ }
2436
+ }
2437
+ this.handler = this.toHandler(appOrHandler);
2438
+ this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2439
+ const webApps = getMooseInternal().webApps;
2440
+ if (webApps.has(name)) {
2441
+ throw new Error(`WebApp with name ${name} already exists`);
2442
+ }
2443
+ if (this.config.mountPath) {
2444
+ for (const [existingName, existingApp] of webApps) {
2445
+ if (existingApp.config.mountPath === this.config.mountPath) {
2675
2446
  throw new Error(
2676
- `mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
2447
+ `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2677
2448
  );
2678
2449
  }
2679
- for (const reserved of RESERVED_MOUNT_PATHS) {
2680
- if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
2681
- throw new Error(
2682
- `mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
2683
- );
2684
- }
2685
- }
2686
- this.handler = this.toHandler(appOrHandler);
2687
- this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
2688
- const webApps = getMooseInternal().webApps;
2689
- if (webApps.has(name)) {
2690
- throw new Error(`WebApp with name ${name} already exists`);
2691
- }
2692
- if (this.config.mountPath) {
2693
- for (const [existingName, existingApp] of webApps) {
2694
- if (existingApp.config.mountPath === this.config.mountPath) {
2695
- throw new Error(
2696
- `WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
2697
- );
2698
- }
2699
- }
2700
- }
2701
- webApps.set(name, this);
2702
2450
  }
2703
- toHandler(appOrHandler) {
2704
- if (typeof appOrHandler === "function") {
2705
- return appOrHandler;
2706
- }
2707
- const app = appOrHandler;
2708
- if (typeof app.handle === "function") {
2709
- return (req, res) => {
2710
- app.handle(req, res, (err) => {
2711
- if (err) {
2712
- console.error("WebApp handler error:", err);
2713
- if (!res.headersSent) {
2714
- res.writeHead(500, { "Content-Type": "application/json" });
2715
- res.end(JSON.stringify({ error: "Internal Server Error" }));
2716
- }
2717
- }
2718
- });
2719
- };
2720
- }
2721
- if (typeof app.callback === "function") {
2722
- return app.callback();
2723
- }
2724
- if (typeof app.routing === "function") {
2725
- const routing = app.routing;
2726
- const appWithReady = app;
2727
- let readyPromise = null;
2728
- return async (req, res) => {
2729
- if (readyPromise === null) {
2730
- readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2451
+ }
2452
+ webApps.set(name, this);
2453
+ }
2454
+ toHandler(appOrHandler) {
2455
+ if (typeof appOrHandler === "function") {
2456
+ return appOrHandler;
2457
+ }
2458
+ const app = appOrHandler;
2459
+ if (typeof app.handle === "function") {
2460
+ return (req, res) => {
2461
+ app.handle(req, res, (err) => {
2462
+ if (err) {
2463
+ console.error("WebApp handler error:", err);
2464
+ if (!res.headersSent) {
2465
+ res.writeHead(500, { "Content-Type": "application/json" });
2466
+ res.end(JSON.stringify({ error: "Internal Server Error" }));
2731
2467
  }
2732
- await readyPromise;
2733
- routing(req, res);
2734
- };
2735
- }
2736
- throw new Error(
2737
- `Unable to convert app to handler. The provided object must be:
2468
+ }
2469
+ });
2470
+ };
2471
+ }
2472
+ if (typeof app.callback === "function") {
2473
+ return app.callback();
2474
+ }
2475
+ if (typeof app.routing === "function") {
2476
+ const routing = app.routing;
2477
+ const appWithReady = app;
2478
+ let readyPromise = null;
2479
+ return async (req, res) => {
2480
+ if (readyPromise === null) {
2481
+ readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
2482
+ }
2483
+ await readyPromise;
2484
+ routing(req, res);
2485
+ };
2486
+ }
2487
+ throw new Error(
2488
+ `Unable to convert app to handler. The provided object must be:
2738
2489
  - A function (raw Node.js handler)
2739
2490
  - An object with .handle() method (Express, Connect)
2740
2491
  - An object with .callback() method (Koa)
@@ -2746,14 +2497,12 @@ Examples:
2746
2497
  Fastify: new WebApp("name", fastifyApp)
2747
2498
  Raw: new WebApp("name", (req, res) => { ... })
2748
2499
  `
2749
- );
2750
- }
2751
- getRawApp() {
2752
- return this._rawApp;
2753
- }
2754
- };
2500
+ );
2755
2501
  }
2756
- });
2502
+ getRawApp() {
2503
+ return this._rawApp;
2504
+ }
2505
+ };
2757
2506
 
2758
2507
  // src/dmv2/registry.ts
2759
2508
  function getTables() {
@@ -2819,32 +2568,18 @@ function getWebApps() {
2819
2568
  function getWebApp(name) {
2820
2569
  return getMooseInternal().webApps.get(name);
2821
2570
  }
2822
- var init_registry = __esm({
2823
- "src/dmv2/registry.ts"() {
2824
- "use strict";
2825
- init_internal();
2826
- }
2827
- });
2828
-
2829
- // src/dmv2/index.ts
2830
- var init_dmv2 = __esm({
2831
- "src/dmv2/index.ts"() {
2832
- init_olapTable();
2833
- init_stream();
2834
- init_workflow();
2835
- init_ingestApi();
2836
- init_consumptionApi();
2837
- init_ingestPipeline();
2838
- init_etlPipeline();
2839
- init_materializedView();
2840
- init_sqlResource();
2841
- init_view();
2842
- init_lifeCycle();
2843
- init_webApp();
2844
- init_registry();
2845
- }
2846
- });
2847
- init_dmv2();
2571
+ function getMaterializedViews() {
2572
+ return getMooseInternal().materializedViews;
2573
+ }
2574
+ function getMaterializedView(name) {
2575
+ return getMooseInternal().materializedViews.get(name);
2576
+ }
2577
+ function getCustomViews() {
2578
+ return getMooseInternal().customViews;
2579
+ }
2580
+ function getCustomView(name) {
2581
+ return getMooseInternal().customViews.get(name);
2582
+ }
2848
2583
  export {
2849
2584
  Api,
2850
2585
  ConsumptionApi,
@@ -2863,8 +2598,12 @@ export {
2863
2598
  Workflow,
2864
2599
  getApi,
2865
2600
  getApis,
2601
+ getCustomView,
2602
+ getCustomViews,
2866
2603
  getIngestApi,
2867
2604
  getIngestApis,
2605
+ getMaterializedView,
2606
+ getMaterializedViews,
2868
2607
  getSqlResource,
2869
2608
  getSqlResources,
2870
2609
  getStream,