@514labs/moose-lib 0.6.295-ci-16-gad4ec11a → 0.6.295-ci-20-gbe187727
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{browserCompatible-FzU17dxm.d.mts → browserCompatible-DCCiBirg.d.mts} +1 -1
- package/dist/{browserCompatible-CMEunMFq.d.ts → browserCompatible-DhVPp9qX.d.ts} +1 -1
- package/dist/browserCompatible.d.mts +2 -2
- package/dist/browserCompatible.d.ts +2 -2
- package/dist/browserCompatible.js +2189 -2448
- package/dist/browserCompatible.js.map +1 -1
- package/dist/browserCompatible.mjs +2133 -2394
- package/dist/browserCompatible.mjs.map +1 -1
- package/dist/dmv2/index.d.mts +1 -1
- package/dist/dmv2/index.d.ts +1 -1
- package/dist/dmv2/index.js +2093 -2352
- package/dist/dmv2/index.js.map +1 -1
- package/dist/dmv2/index.mjs +2044 -2305
- package/dist/dmv2/index.mjs.map +1 -1
- package/dist/{index-CcHF2cVT.d.mts → index-CcZRaA0b.d.mts} +87 -56
- package/dist/{index-CcHF2cVT.d.ts → index-CcZRaA0b.d.ts} +87 -56
- package/dist/index.d.mts +6 -79
- package/dist/index.d.ts +6 -79
- package/dist/index.js +2773 -3085
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2662 -2977
- package/dist/index.mjs.map +1 -1
- package/dist/moose-runner.js +1160 -1713
- package/dist/moose-runner.js.map +1 -1
- package/dist/moose-runner.mjs +1152 -1703
- package/dist/moose-runner.mjs.map +1 -1
- package/package.json +1 -1
|
@@ -30,325 +30,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
30
30
|
));
|
|
31
31
|
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
32
32
|
|
|
33
|
-
// src/dmv2/utils/stackTrace.ts
|
|
34
|
-
function shouldSkipStackLine(line) {
|
|
35
|
-
return line.includes("node_modules") || // Skip npm installed packages (prod)
|
|
36
|
-
line.includes("node:internal") || // Skip Node.js internals (modern format)
|
|
37
|
-
line.includes("internal/modules") || // Skip Node.js internals (older format)
|
|
38
|
-
line.includes("ts-node") || // Skip TypeScript execution
|
|
39
|
-
line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
|
|
40
|
-
line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
|
|
41
|
-
line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
|
|
42
|
-
line.includes("\\ts-moose-lib\\dist\\");
|
|
43
|
-
}
|
|
44
|
-
function parseStackLine(line) {
|
|
45
|
-
const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
|
|
46
|
-
if (match && match[1]) {
|
|
47
|
-
return {
|
|
48
|
-
file: match[1],
|
|
49
|
-
line: match[2]
|
|
50
|
-
};
|
|
51
|
-
}
|
|
52
|
-
return void 0;
|
|
53
|
-
}
|
|
54
|
-
function getSourceFileInfo(stack) {
|
|
55
|
-
if (!stack) return {};
|
|
56
|
-
const lines = stack.split("\n");
|
|
57
|
-
for (const line of lines) {
|
|
58
|
-
if (shouldSkipStackLine(line)) continue;
|
|
59
|
-
const info = parseStackLine(line);
|
|
60
|
-
if (info) return info;
|
|
61
|
-
}
|
|
62
|
-
return {};
|
|
63
|
-
}
|
|
64
|
-
function getSourceLocationFromStack(stack) {
|
|
65
|
-
if (!stack) return void 0;
|
|
66
|
-
const lines = stack.split("\n");
|
|
67
|
-
for (const line of lines.slice(1)) {
|
|
68
|
-
if (shouldSkipStackLine(line)) {
|
|
69
|
-
continue;
|
|
70
|
-
}
|
|
71
|
-
const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
|
|
72
|
-
if (v8Match) {
|
|
73
|
-
return {
|
|
74
|
-
file: v8Match[1],
|
|
75
|
-
line: parseInt(v8Match[2], 10),
|
|
76
|
-
column: parseInt(v8Match[3], 10)
|
|
77
|
-
};
|
|
78
|
-
}
|
|
79
|
-
const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
|
|
80
|
-
if (smMatch) {
|
|
81
|
-
return {
|
|
82
|
-
file: smMatch[1],
|
|
83
|
-
line: parseInt(smMatch[2], 10),
|
|
84
|
-
column: parseInt(smMatch[3], 10)
|
|
85
|
-
};
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
return void 0;
|
|
89
|
-
}
|
|
90
|
-
function getSourceFileFromStack(stack) {
|
|
91
|
-
const location = getSourceLocationFromStack(stack);
|
|
92
|
-
return location?.file;
|
|
93
|
-
}
|
|
94
|
-
var init_stackTrace = __esm({
|
|
95
|
-
"src/dmv2/utils/stackTrace.ts"() {
|
|
96
|
-
"use strict";
|
|
97
|
-
}
|
|
98
|
-
});
|
|
99
|
-
|
|
100
|
-
// src/dmv2/typedBase.ts
|
|
101
|
-
var TypedBase;
|
|
102
|
-
var init_typedBase = __esm({
|
|
103
|
-
"src/dmv2/typedBase.ts"() {
|
|
104
|
-
"use strict";
|
|
105
|
-
init_stackTrace();
|
|
106
|
-
TypedBase = class {
|
|
107
|
-
/** The JSON schema representation of type T. Injected by the compiler plugin. */
|
|
108
|
-
schema;
|
|
109
|
-
/** The name assigned to this resource instance. */
|
|
110
|
-
name;
|
|
111
|
-
/** A dictionary mapping column names (keys of T) to their Column definitions. */
|
|
112
|
-
columns;
|
|
113
|
-
/** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
|
|
114
|
-
columnArray;
|
|
115
|
-
/** The configuration object specific to this resource type. */
|
|
116
|
-
config;
|
|
117
|
-
/** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
|
|
118
|
-
validators;
|
|
119
|
-
/** Optional metadata for the resource, always present as an object. */
|
|
120
|
-
metadata;
|
|
121
|
-
/**
|
|
122
|
-
* Whether this resource allows extra fields beyond the defined columns.
|
|
123
|
-
* When true, extra fields in payloads are passed through to streaming functions.
|
|
124
|
-
* Injected by the compiler plugin when the type has an index signature.
|
|
125
|
-
*/
|
|
126
|
-
allowExtraFields;
|
|
127
|
-
/**
|
|
128
|
-
* @internal Constructor intended for internal use by subclasses and the compiler plugin.
|
|
129
|
-
* It expects the schema and columns to be provided, typically injected by the compiler.
|
|
130
|
-
*
|
|
131
|
-
* @param name The name for the resource instance.
|
|
132
|
-
* @param config The configuration object for the resource.
|
|
133
|
-
* @param schema The JSON schema for the resource's data type T (injected).
|
|
134
|
-
* @param columns The array of Column definitions for T (injected).
|
|
135
|
-
* @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
|
|
136
|
-
*/
|
|
137
|
-
constructor(name, config, schema, columns, validators, allowExtraFields) {
|
|
138
|
-
if (schema === void 0 || columns === void 0) {
|
|
139
|
-
throw new Error(
|
|
140
|
-
"Supply the type param T so that the schema is inserted by the compiler plugin."
|
|
141
|
-
);
|
|
142
|
-
}
|
|
143
|
-
this.schema = schema;
|
|
144
|
-
this.columnArray = columns;
|
|
145
|
-
const columnsObj = {};
|
|
146
|
-
columns.forEach((column) => {
|
|
147
|
-
columnsObj[column.name] = column;
|
|
148
|
-
});
|
|
149
|
-
this.columns = columnsObj;
|
|
150
|
-
this.name = name;
|
|
151
|
-
this.config = config;
|
|
152
|
-
this.validators = validators;
|
|
153
|
-
this.allowExtraFields = allowExtraFields ?? false;
|
|
154
|
-
this.metadata = config?.metadata ? { ...config.metadata } : {};
|
|
155
|
-
if (!this.metadata.source) {
|
|
156
|
-
const stack = new Error().stack;
|
|
157
|
-
if (stack) {
|
|
158
|
-
const info = getSourceFileInfo(stack);
|
|
159
|
-
this.metadata.source = { file: info.file, line: info.line };
|
|
160
|
-
}
|
|
161
|
-
}
|
|
162
|
-
}
|
|
163
|
-
};
|
|
164
|
-
}
|
|
165
|
-
});
|
|
166
|
-
|
|
167
|
-
// src/dataModels/dataModelTypes.ts
|
|
168
|
-
function isArrayNestedType(dt) {
|
|
169
|
-
return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
|
|
170
|
-
}
|
|
171
|
-
function isNestedType(dt) {
|
|
172
|
-
return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
|
|
173
|
-
}
|
|
174
|
-
var init_dataModelTypes = __esm({
|
|
175
|
-
"src/dataModels/dataModelTypes.ts"() {
|
|
176
|
-
"use strict";
|
|
177
|
-
}
|
|
178
|
-
});
|
|
179
|
-
|
|
180
|
-
// src/sqlHelpers.ts
|
|
181
|
-
function sql(strings, ...values) {
|
|
182
|
-
return new Sql(strings, values);
|
|
183
|
-
}
|
|
184
|
-
function createClickhouseParameter(parameterIndex, value) {
|
|
185
|
-
return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
|
|
186
|
-
}
|
|
187
|
-
function emptyIfUndefined(value) {
|
|
188
|
-
return value === void 0 ? "" : value;
|
|
189
|
-
}
|
|
190
|
-
var quoteIdentifier, isTable, isColumn, instanceofSql, Sql, toStaticQuery, toQuery, toQueryPreview, getValueFromParameter, mapToClickHouseType;
|
|
191
|
-
var init_sqlHelpers = __esm({
|
|
192
|
-
"src/sqlHelpers.ts"() {
|
|
193
|
-
"use strict";
|
|
194
|
-
quoteIdentifier = (name) => {
|
|
195
|
-
return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
|
|
196
|
-
};
|
|
197
|
-
isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
|
|
198
|
-
isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
|
|
199
|
-
instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
|
|
200
|
-
Sql = class {
|
|
201
|
-
values;
|
|
202
|
-
strings;
|
|
203
|
-
constructor(rawStrings, rawValues) {
|
|
204
|
-
if (rawStrings.length - 1 !== rawValues.length) {
|
|
205
|
-
if (rawStrings.length === 0) {
|
|
206
|
-
throw new TypeError("Expected at least 1 string");
|
|
207
|
-
}
|
|
208
|
-
throw new TypeError(
|
|
209
|
-
`Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
|
|
210
|
-
);
|
|
211
|
-
}
|
|
212
|
-
const valuesLength = rawValues.reduce(
|
|
213
|
-
(len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
|
|
214
|
-
0
|
|
215
|
-
);
|
|
216
|
-
this.values = new Array(valuesLength);
|
|
217
|
-
this.strings = new Array(valuesLength + 1);
|
|
218
|
-
this.strings[0] = rawStrings[0];
|
|
219
|
-
let i = 0, pos = 0;
|
|
220
|
-
while (i < rawValues.length) {
|
|
221
|
-
const child = rawValues[i++];
|
|
222
|
-
const rawString = rawStrings[i];
|
|
223
|
-
if (instanceofSql(child)) {
|
|
224
|
-
this.strings[pos] += child.strings[0];
|
|
225
|
-
let childIndex = 0;
|
|
226
|
-
while (childIndex < child.values.length) {
|
|
227
|
-
this.values[pos++] = child.values[childIndex++];
|
|
228
|
-
this.strings[pos] = child.strings[childIndex];
|
|
229
|
-
}
|
|
230
|
-
this.strings[pos] += rawString;
|
|
231
|
-
} else if (isColumn(child)) {
|
|
232
|
-
const aggregationFunction = child.annotations.find(
|
|
233
|
-
([k, _]) => k === "aggregationFunction"
|
|
234
|
-
);
|
|
235
|
-
if (aggregationFunction !== void 0) {
|
|
236
|
-
this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
|
|
237
|
-
} else {
|
|
238
|
-
this.strings[pos] += `\`${child.name}\``;
|
|
239
|
-
}
|
|
240
|
-
this.strings[pos] += rawString;
|
|
241
|
-
} else if (isTable(child)) {
|
|
242
|
-
if (child.config.database) {
|
|
243
|
-
this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
|
|
244
|
-
} else {
|
|
245
|
-
this.strings[pos] += `\`${child.name}\``;
|
|
246
|
-
}
|
|
247
|
-
this.strings[pos] += rawString;
|
|
248
|
-
} else {
|
|
249
|
-
this.values[pos++] = child;
|
|
250
|
-
this.strings[pos] = rawString;
|
|
251
|
-
}
|
|
252
|
-
}
|
|
253
|
-
}
|
|
254
|
-
};
|
|
255
|
-
toStaticQuery = (sql3) => {
|
|
256
|
-
const [query, params] = toQuery(sql3);
|
|
257
|
-
if (Object.keys(params).length !== 0) {
|
|
258
|
-
throw new Error(
|
|
259
|
-
"Dynamic SQL is not allowed in the select statement in view creation."
|
|
260
|
-
);
|
|
261
|
-
}
|
|
262
|
-
return query;
|
|
263
|
-
};
|
|
264
|
-
toQuery = (sql3) => {
|
|
265
|
-
const parameterizedStubs = sql3.values.map(
|
|
266
|
-
(v, i) => createClickhouseParameter(i, v)
|
|
267
|
-
);
|
|
268
|
-
const query = sql3.strings.map(
|
|
269
|
-
(s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
|
|
270
|
-
).join("");
|
|
271
|
-
const query_params = sql3.values.reduce(
|
|
272
|
-
(acc, v, i) => ({
|
|
273
|
-
...acc,
|
|
274
|
-
[`p${i}`]: getValueFromParameter(v)
|
|
275
|
-
}),
|
|
276
|
-
{}
|
|
277
|
-
);
|
|
278
|
-
return [query, query_params];
|
|
279
|
-
};
|
|
280
|
-
toQueryPreview = (sql3) => {
|
|
281
|
-
try {
|
|
282
|
-
const formatValue = (v) => {
|
|
283
|
-
if (Array.isArray(v)) {
|
|
284
|
-
const [type, val] = v;
|
|
285
|
-
if (type === "Identifier") {
|
|
286
|
-
return `\`${String(val)}\``;
|
|
287
|
-
}
|
|
288
|
-
return `[${v.map((x) => formatValue(x)).join(", ")}]`;
|
|
289
|
-
}
|
|
290
|
-
if (v === null || v === void 0) return "NULL";
|
|
291
|
-
if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
|
|
292
|
-
if (typeof v === "number") return String(v);
|
|
293
|
-
if (typeof v === "boolean") return v ? "true" : "false";
|
|
294
|
-
if (v instanceof Date)
|
|
295
|
-
return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
|
|
296
|
-
try {
|
|
297
|
-
return JSON.stringify(v);
|
|
298
|
-
} catch {
|
|
299
|
-
return String(v);
|
|
300
|
-
}
|
|
301
|
-
};
|
|
302
|
-
let out = sql3.strings[0] ?? "";
|
|
303
|
-
for (let i = 0; i < sql3.values.length; i++) {
|
|
304
|
-
const val = getValueFromParameter(sql3.values[i]);
|
|
305
|
-
out += formatValue(val);
|
|
306
|
-
out += sql3.strings[i + 1] ?? "";
|
|
307
|
-
}
|
|
308
|
-
return out.replace(/\s+/g, " ").trim();
|
|
309
|
-
} catch (error) {
|
|
310
|
-
console.log(`toQueryPreview error: ${error}`);
|
|
311
|
-
return "/* query preview unavailable */";
|
|
312
|
-
}
|
|
313
|
-
};
|
|
314
|
-
getValueFromParameter = (value) => {
|
|
315
|
-
if (Array.isArray(value)) {
|
|
316
|
-
const [type, val] = value;
|
|
317
|
-
if (type === "Identifier") return val;
|
|
318
|
-
}
|
|
319
|
-
return value;
|
|
320
|
-
};
|
|
321
|
-
mapToClickHouseType = (value) => {
|
|
322
|
-
if (typeof value === "number") {
|
|
323
|
-
return Number.isInteger(value) ? "Int" : "Float";
|
|
324
|
-
}
|
|
325
|
-
if (typeof value === "boolean") return "Bool";
|
|
326
|
-
if (value instanceof Date) return "DateTime";
|
|
327
|
-
if (Array.isArray(value)) {
|
|
328
|
-
const [type, _] = value;
|
|
329
|
-
return type;
|
|
330
|
-
}
|
|
331
|
-
return "String";
|
|
332
|
-
};
|
|
333
|
-
}
|
|
334
|
-
});
|
|
335
|
-
|
|
336
|
-
// src/blocks/helpers.ts
|
|
337
|
-
function dropView(name) {
|
|
338
|
-
return `DROP VIEW IF EXISTS ${quoteIdentifier(name)}`.trim();
|
|
339
|
-
}
|
|
340
|
-
function createMaterializedView(options) {
|
|
341
|
-
return `CREATE MATERIALIZED VIEW IF NOT EXISTS ${quoteIdentifier(options.name)}
|
|
342
|
-
TO ${quoteIdentifier(options.destinationTable)}
|
|
343
|
-
AS ${options.select}`.trim();
|
|
344
|
-
}
|
|
345
|
-
var init_helpers = __esm({
|
|
346
|
-
"src/blocks/helpers.ts"() {
|
|
347
|
-
"use strict";
|
|
348
|
-
init_sqlHelpers();
|
|
349
|
-
}
|
|
350
|
-
});
|
|
351
|
-
|
|
352
33
|
// src/commons.ts
|
|
353
34
|
var commons_exports = {};
|
|
354
35
|
__export(commons_exports, {
|
|
@@ -516,306 +197,6 @@ var init_commons = __esm({
|
|
|
516
197
|
}
|
|
517
198
|
});
|
|
518
199
|
|
|
519
|
-
// src/secrets.ts
|
|
520
|
-
var init_secrets = __esm({
|
|
521
|
-
"src/secrets.ts"() {
|
|
522
|
-
"use strict";
|
|
523
|
-
}
|
|
524
|
-
});
|
|
525
|
-
|
|
526
|
-
// src/consumption-apis/helpers.ts
|
|
527
|
-
var import_client2, import_node_crypto;
|
|
528
|
-
var init_helpers2 = __esm({
|
|
529
|
-
"src/consumption-apis/helpers.ts"() {
|
|
530
|
-
"use strict";
|
|
531
|
-
import_client2 = require("@temporalio/client");
|
|
532
|
-
import_node_crypto = require("crypto");
|
|
533
|
-
init_internal();
|
|
534
|
-
init_sqlHelpers();
|
|
535
|
-
}
|
|
536
|
-
});
|
|
537
|
-
|
|
538
|
-
// src/consumption-apis/webAppHelpers.ts
|
|
539
|
-
var init_webAppHelpers = __esm({
|
|
540
|
-
"src/consumption-apis/webAppHelpers.ts"() {
|
|
541
|
-
"use strict";
|
|
542
|
-
}
|
|
543
|
-
});
|
|
544
|
-
|
|
545
|
-
// src/scripts/task.ts
|
|
546
|
-
var init_task = __esm({
|
|
547
|
-
"src/scripts/task.ts"() {
|
|
548
|
-
"use strict";
|
|
549
|
-
}
|
|
550
|
-
});
|
|
551
|
-
|
|
552
|
-
// src/cluster-utils.ts
|
|
553
|
-
var import_node_cluster, import_node_os, import_node_process;
|
|
554
|
-
var init_cluster_utils = __esm({
|
|
555
|
-
"src/cluster-utils.ts"() {
|
|
556
|
-
"use strict";
|
|
557
|
-
import_node_cluster = __toESM(require("cluster"));
|
|
558
|
-
import_node_os = require("os");
|
|
559
|
-
import_node_process = require("process");
|
|
560
|
-
}
|
|
561
|
-
});
|
|
562
|
-
|
|
563
|
-
// src/consumption-apis/runner.ts
|
|
564
|
-
var jose;
|
|
565
|
-
var init_runner = __esm({
|
|
566
|
-
"src/consumption-apis/runner.ts"() {
|
|
567
|
-
"use strict";
|
|
568
|
-
init_commons();
|
|
569
|
-
init_helpers2();
|
|
570
|
-
jose = __toESM(require("jose"));
|
|
571
|
-
init_cluster_utils();
|
|
572
|
-
init_sqlHelpers();
|
|
573
|
-
init_internal();
|
|
574
|
-
}
|
|
575
|
-
});
|
|
576
|
-
|
|
577
|
-
// src/clients/redisClient.ts
|
|
578
|
-
var import_redis;
|
|
579
|
-
var init_redisClient = __esm({
|
|
580
|
-
"src/clients/redisClient.ts"() {
|
|
581
|
-
"use strict";
|
|
582
|
-
import_redis = require("redis");
|
|
583
|
-
}
|
|
584
|
-
});
|
|
585
|
-
|
|
586
|
-
// src/consumption-apis/standalone.ts
|
|
587
|
-
var init_standalone = __esm({
|
|
588
|
-
"src/consumption-apis/standalone.ts"() {
|
|
589
|
-
"use strict";
|
|
590
|
-
init_helpers2();
|
|
591
|
-
init_commons();
|
|
592
|
-
init_sqlHelpers();
|
|
593
|
-
}
|
|
594
|
-
});
|
|
595
|
-
|
|
596
|
-
// src/utilities/json.ts
|
|
597
|
-
var init_json = __esm({
|
|
598
|
-
"src/utilities/json.ts"() {
|
|
599
|
-
"use strict";
|
|
600
|
-
}
|
|
601
|
-
});
|
|
602
|
-
|
|
603
|
-
// src/utilities/dataParser.ts
|
|
604
|
-
var import_csv_parse, CSV_DELIMITERS, DEFAULT_CSV_CONFIG;
|
|
605
|
-
var init_dataParser = __esm({
|
|
606
|
-
"src/utilities/dataParser.ts"() {
|
|
607
|
-
"use strict";
|
|
608
|
-
import_csv_parse = require("csv-parse");
|
|
609
|
-
init_json();
|
|
610
|
-
CSV_DELIMITERS = {
|
|
611
|
-
COMMA: ",",
|
|
612
|
-
TAB: " ",
|
|
613
|
-
SEMICOLON: ";",
|
|
614
|
-
PIPE: "|"
|
|
615
|
-
};
|
|
616
|
-
DEFAULT_CSV_CONFIG = {
|
|
617
|
-
delimiter: CSV_DELIMITERS.COMMA,
|
|
618
|
-
columns: true,
|
|
619
|
-
skipEmptyLines: true,
|
|
620
|
-
trim: true
|
|
621
|
-
};
|
|
622
|
-
}
|
|
623
|
-
});
|
|
624
|
-
|
|
625
|
-
// src/utilities/index.ts
|
|
626
|
-
var init_utilities = __esm({
|
|
627
|
-
"src/utilities/index.ts"() {
|
|
628
|
-
"use strict";
|
|
629
|
-
init_dataParser();
|
|
630
|
-
}
|
|
631
|
-
});
|
|
632
|
-
|
|
633
|
-
// src/connectors/dataSource.ts
|
|
634
|
-
var init_dataSource = __esm({
|
|
635
|
-
"src/connectors/dataSource.ts"() {
|
|
636
|
-
"use strict";
|
|
637
|
-
}
|
|
638
|
-
});
|
|
639
|
-
|
|
640
|
-
// src/dataModels/types.ts
|
|
641
|
-
var init_types = __esm({
|
|
642
|
-
"src/dataModels/types.ts"() {
|
|
643
|
-
"use strict";
|
|
644
|
-
}
|
|
645
|
-
});
|
|
646
|
-
|
|
647
|
-
// src/index.ts
|
|
648
|
-
var init_index = __esm({
|
|
649
|
-
"src/index.ts"() {
|
|
650
|
-
"use strict";
|
|
651
|
-
init_browserCompatible();
|
|
652
|
-
init_helpers();
|
|
653
|
-
init_commons();
|
|
654
|
-
init_secrets();
|
|
655
|
-
init_helpers2();
|
|
656
|
-
init_webAppHelpers();
|
|
657
|
-
init_task();
|
|
658
|
-
init_runner();
|
|
659
|
-
init_redisClient();
|
|
660
|
-
init_helpers2();
|
|
661
|
-
init_standalone();
|
|
662
|
-
init_sqlHelpers();
|
|
663
|
-
init_utilities();
|
|
664
|
-
init_dataSource();
|
|
665
|
-
init_types();
|
|
666
|
-
}
|
|
667
|
-
});
|
|
668
|
-
|
|
669
|
-
// src/dmv2/internal.ts
|
|
670
|
-
var import_process, isClientOnlyMode, moose_internal, defaultRetentionPeriod, getMooseInternal, dlqSchema, dlqColumns;
|
|
671
|
-
var init_internal = __esm({
|
|
672
|
-
"src/dmv2/internal.ts"() {
|
|
673
|
-
"use strict";
|
|
674
|
-
import_process = __toESM(require("process"));
|
|
675
|
-
init_index();
|
|
676
|
-
init_commons();
|
|
677
|
-
isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
|
|
678
|
-
moose_internal = {
|
|
679
|
-
tables: /* @__PURE__ */ new Map(),
|
|
680
|
-
streams: /* @__PURE__ */ new Map(),
|
|
681
|
-
ingestApis: /* @__PURE__ */ new Map(),
|
|
682
|
-
apis: /* @__PURE__ */ new Map(),
|
|
683
|
-
sqlResources: /* @__PURE__ */ new Map(),
|
|
684
|
-
workflows: /* @__PURE__ */ new Map(),
|
|
685
|
-
webApps: /* @__PURE__ */ new Map()
|
|
686
|
-
};
|
|
687
|
-
defaultRetentionPeriod = 60 * 60 * 24 * 7;
|
|
688
|
-
getMooseInternal = () => globalThis.moose_internal;
|
|
689
|
-
if (getMooseInternal() === void 0) {
|
|
690
|
-
globalThis.moose_internal = moose_internal;
|
|
691
|
-
}
|
|
692
|
-
dlqSchema = {
|
|
693
|
-
version: "3.1",
|
|
694
|
-
components: {
|
|
695
|
-
schemas: {
|
|
696
|
-
DeadLetterModel: {
|
|
697
|
-
type: "object",
|
|
698
|
-
properties: {
|
|
699
|
-
originalRecord: {
|
|
700
|
-
$ref: "#/components/schemas/Recordstringany"
|
|
701
|
-
},
|
|
702
|
-
errorMessage: {
|
|
703
|
-
type: "string"
|
|
704
|
-
},
|
|
705
|
-
errorType: {
|
|
706
|
-
type: "string"
|
|
707
|
-
},
|
|
708
|
-
failedAt: {
|
|
709
|
-
type: "string",
|
|
710
|
-
format: "date-time"
|
|
711
|
-
},
|
|
712
|
-
source: {
|
|
713
|
-
oneOf: [
|
|
714
|
-
{
|
|
715
|
-
const: "api"
|
|
716
|
-
},
|
|
717
|
-
{
|
|
718
|
-
const: "transform"
|
|
719
|
-
},
|
|
720
|
-
{
|
|
721
|
-
const: "table"
|
|
722
|
-
}
|
|
723
|
-
]
|
|
724
|
-
}
|
|
725
|
-
},
|
|
726
|
-
required: [
|
|
727
|
-
"originalRecord",
|
|
728
|
-
"errorMessage",
|
|
729
|
-
"errorType",
|
|
730
|
-
"failedAt",
|
|
731
|
-
"source"
|
|
732
|
-
]
|
|
733
|
-
},
|
|
734
|
-
Recordstringany: {
|
|
735
|
-
type: "object",
|
|
736
|
-
properties: {},
|
|
737
|
-
required: [],
|
|
738
|
-
description: "Construct a type with a set of properties K of type T",
|
|
739
|
-
additionalProperties: {}
|
|
740
|
-
}
|
|
741
|
-
}
|
|
742
|
-
},
|
|
743
|
-
schemas: [
|
|
744
|
-
{
|
|
745
|
-
$ref: "#/components/schemas/DeadLetterModel"
|
|
746
|
-
}
|
|
747
|
-
]
|
|
748
|
-
};
|
|
749
|
-
dlqColumns = [
|
|
750
|
-
{
|
|
751
|
-
name: "originalRecord",
|
|
752
|
-
data_type: "Json",
|
|
753
|
-
primary_key: false,
|
|
754
|
-
required: true,
|
|
755
|
-
unique: false,
|
|
756
|
-
default: null,
|
|
757
|
-
annotations: [],
|
|
758
|
-
ttl: null,
|
|
759
|
-
codec: null,
|
|
760
|
-
materialized: null,
|
|
761
|
-
comment: null
|
|
762
|
-
},
|
|
763
|
-
{
|
|
764
|
-
name: "errorMessage",
|
|
765
|
-
data_type: "String",
|
|
766
|
-
primary_key: false,
|
|
767
|
-
required: true,
|
|
768
|
-
unique: false,
|
|
769
|
-
default: null,
|
|
770
|
-
annotations: [],
|
|
771
|
-
ttl: null,
|
|
772
|
-
codec: null,
|
|
773
|
-
materialized: null,
|
|
774
|
-
comment: null
|
|
775
|
-
},
|
|
776
|
-
{
|
|
777
|
-
name: "errorType",
|
|
778
|
-
data_type: "String",
|
|
779
|
-
primary_key: false,
|
|
780
|
-
required: true,
|
|
781
|
-
unique: false,
|
|
782
|
-
default: null,
|
|
783
|
-
annotations: [],
|
|
784
|
-
ttl: null,
|
|
785
|
-
codec: null,
|
|
786
|
-
materialized: null,
|
|
787
|
-
comment: null
|
|
788
|
-
},
|
|
789
|
-
{
|
|
790
|
-
name: "failedAt",
|
|
791
|
-
data_type: "DateTime",
|
|
792
|
-
primary_key: false,
|
|
793
|
-
required: true,
|
|
794
|
-
unique: false,
|
|
795
|
-
default: null,
|
|
796
|
-
annotations: [],
|
|
797
|
-
ttl: null,
|
|
798
|
-
codec: null,
|
|
799
|
-
materialized: null,
|
|
800
|
-
comment: null
|
|
801
|
-
},
|
|
802
|
-
{
|
|
803
|
-
name: "source",
|
|
804
|
-
data_type: "String",
|
|
805
|
-
primary_key: false,
|
|
806
|
-
required: true,
|
|
807
|
-
unique: false,
|
|
808
|
-
default: null,
|
|
809
|
-
annotations: [],
|
|
810
|
-
ttl: null,
|
|
811
|
-
codec: null,
|
|
812
|
-
materialized: null,
|
|
813
|
-
comment: null
|
|
814
|
-
}
|
|
815
|
-
];
|
|
816
|
-
}
|
|
817
|
-
});
|
|
818
|
-
|
|
819
200
|
// src/config/configFile.ts
|
|
820
201
|
async function findConfigFile(startDir = process.cwd()) {
|
|
821
202
|
const fs = await import("fs");
|
|
@@ -1000,1849 +381,2273 @@ var init_runtime = __esm({
|
|
|
1000
381
|
}
|
|
1001
382
|
});
|
|
1002
383
|
|
|
1003
|
-
// src/
|
|
1004
|
-
var
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
384
|
+
// src/browserCompatible.ts
|
|
385
|
+
var browserCompatible_exports = {};
|
|
386
|
+
__export(browserCompatible_exports, {
|
|
387
|
+
Api: () => Api,
|
|
388
|
+
ConsumptionApi: () => ConsumptionApi,
|
|
389
|
+
DeadLetterQueue: () => DeadLetterQueue,
|
|
390
|
+
ETLPipeline: () => ETLPipeline,
|
|
391
|
+
IngestApi: () => IngestApi,
|
|
392
|
+
IngestPipeline: () => IngestPipeline,
|
|
393
|
+
LifeCycle: () => LifeCycle,
|
|
394
|
+
MaterializedView: () => MaterializedView,
|
|
395
|
+
OlapTable: () => OlapTable,
|
|
396
|
+
Sql: () => Sql,
|
|
397
|
+
SqlResource: () => SqlResource,
|
|
398
|
+
Stream: () => Stream,
|
|
399
|
+
Task: () => Task,
|
|
400
|
+
View: () => View,
|
|
401
|
+
WebApp: () => WebApp,
|
|
402
|
+
Workflow: () => Workflow,
|
|
403
|
+
createClickhouseParameter: () => createClickhouseParameter,
|
|
404
|
+
getApi: () => getApi,
|
|
405
|
+
getApis: () => getApis2,
|
|
406
|
+
getCustomView: () => getCustomView,
|
|
407
|
+
getCustomViews: () => getCustomViews,
|
|
408
|
+
getIngestApi: () => getIngestApi,
|
|
409
|
+
getIngestApis: () => getIngestApis,
|
|
410
|
+
getMaterializedView: () => getMaterializedView,
|
|
411
|
+
getMaterializedViews: () => getMaterializedViews,
|
|
412
|
+
getSqlResource: () => getSqlResource,
|
|
413
|
+
getSqlResources: () => getSqlResources,
|
|
414
|
+
getStream: () => getStream,
|
|
415
|
+
getStreams: () => getStreams,
|
|
416
|
+
getTable: () => getTable,
|
|
417
|
+
getTables: () => getTables,
|
|
418
|
+
getValueFromParameter: () => getValueFromParameter,
|
|
419
|
+
getWebApp: () => getWebApp,
|
|
420
|
+
getWebApps: () => getWebApps2,
|
|
421
|
+
getWorkflow: () => getWorkflow,
|
|
422
|
+
getWorkflows: () => getWorkflows2,
|
|
423
|
+
mapToClickHouseType: () => mapToClickHouseType,
|
|
424
|
+
quoteIdentifier: () => quoteIdentifier,
|
|
425
|
+
sql: () => sql,
|
|
426
|
+
toQuery: () => toQuery,
|
|
427
|
+
toQueryPreview: () => toQueryPreview,
|
|
428
|
+
toStaticQuery: () => toStaticQuery
|
|
429
|
+
});
|
|
430
|
+
module.exports = __toCommonJS(browserCompatible_exports);
|
|
431
|
+
|
|
432
|
+
// src/dmv2/utils/stackTrace.ts
|
|
433
|
+
function shouldSkipStackLine(line) {
|
|
434
|
+
return line.includes("node_modules") || // Skip npm installed packages (prod)
|
|
435
|
+
line.includes("node:internal") || // Skip Node.js internals (modern format)
|
|
436
|
+
line.includes("internal/modules") || // Skip Node.js internals (older format)
|
|
437
|
+
line.includes("ts-node") || // Skip TypeScript execution
|
|
438
|
+
line.includes("/ts-moose-lib/src/") || // Skip dev/linked moose-lib src (Unix)
|
|
439
|
+
line.includes("\\ts-moose-lib\\src\\") || // Skip dev/linked moose-lib src (Windows)
|
|
440
|
+
line.includes("/ts-moose-lib/dist/") || // Skip dev/linked moose-lib dist (Unix)
|
|
441
|
+
line.includes("\\ts-moose-lib\\dist\\");
|
|
442
|
+
}
|
|
443
|
+
function parseStackLine(line) {
|
|
444
|
+
const match = line.match(/\((.*):(\d+):(\d+)\)/) || line.match(/at (.*):(\d+):(\d+)/);
|
|
445
|
+
if (match && match[1]) {
|
|
446
|
+
return {
|
|
447
|
+
file: match[1],
|
|
448
|
+
line: match[2]
|
|
449
|
+
};
|
|
450
|
+
}
|
|
451
|
+
return void 0;
|
|
452
|
+
}
|
|
453
|
+
function getSourceFileInfo(stack) {
|
|
454
|
+
if (!stack) return {};
|
|
455
|
+
const lines = stack.split("\n");
|
|
456
|
+
for (const line of lines) {
|
|
457
|
+
if (shouldSkipStackLine(line)) continue;
|
|
458
|
+
const info = parseStackLine(line);
|
|
459
|
+
if (info) return info;
|
|
460
|
+
}
|
|
461
|
+
return {};
|
|
462
|
+
}
|
|
463
|
+
function getSourceLocationFromStack(stack) {
|
|
464
|
+
if (!stack) return void 0;
|
|
465
|
+
const lines = stack.split("\n");
|
|
466
|
+
for (const line of lines.slice(1)) {
|
|
467
|
+
if (shouldSkipStackLine(line)) {
|
|
468
|
+
continue;
|
|
469
|
+
}
|
|
470
|
+
const v8Match = line.match(/at\s+(?:.*?\s+\()?(.+):(\d+):(\d+)\)?/);
|
|
471
|
+
if (v8Match) {
|
|
472
|
+
return {
|
|
473
|
+
file: v8Match[1],
|
|
474
|
+
line: parseInt(v8Match[2], 10),
|
|
475
|
+
column: parseInt(v8Match[3], 10)
|
|
476
|
+
};
|
|
477
|
+
}
|
|
478
|
+
const smMatch = line.match(/(?:.*@)?(.+):(\d+):(\d+)/);
|
|
479
|
+
if (smMatch) {
|
|
480
|
+
return {
|
|
481
|
+
file: smMatch[1],
|
|
482
|
+
line: parseInt(smMatch[2], 10),
|
|
483
|
+
column: parseInt(smMatch[3], 10)
|
|
484
|
+
};
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
return void 0;
|
|
488
|
+
}
|
|
489
|
+
function getSourceFileFromStack(stack) {
|
|
490
|
+
const location = getSourceLocationFromStack(stack);
|
|
491
|
+
return location?.file;
|
|
492
|
+
}
|
|
493
|
+
|
|
494
|
+
// src/dmv2/typedBase.ts
|
|
495
|
+
var TypedBase = class {
|
|
496
|
+
/** The JSON schema representation of type T. Injected by the compiler plugin. */
|
|
497
|
+
schema;
|
|
498
|
+
/** The name assigned to this resource instance. */
|
|
499
|
+
name;
|
|
500
|
+
/** A dictionary mapping column names (keys of T) to their Column definitions. */
|
|
501
|
+
columns;
|
|
502
|
+
/** An array containing the Column definitions for this resource. Injected by the compiler plugin. */
|
|
503
|
+
columnArray;
|
|
504
|
+
/** The configuration object specific to this resource type. */
|
|
505
|
+
config;
|
|
506
|
+
/** Typia validation functions for type T. Injected by the compiler plugin for OlapTable. */
|
|
507
|
+
validators;
|
|
508
|
+
/** Optional metadata for the resource, always present as an object. */
|
|
509
|
+
metadata;
|
|
510
|
+
/**
|
|
511
|
+
* Whether this resource allows extra fields beyond the defined columns.
|
|
512
|
+
* When true, extra fields in payloads are passed through to streaming functions.
|
|
513
|
+
* Injected by the compiler plugin when the type has an index signature.
|
|
514
|
+
*/
|
|
515
|
+
allowExtraFields;
|
|
516
|
+
/**
|
|
517
|
+
* @internal Constructor intended for internal use by subclasses and the compiler plugin.
|
|
518
|
+
* It expects the schema and columns to be provided, typically injected by the compiler.
|
|
519
|
+
*
|
|
520
|
+
* @param name The name for the resource instance.
|
|
521
|
+
* @param config The configuration object for the resource.
|
|
522
|
+
* @param schema The JSON schema for the resource's data type T (injected).
|
|
523
|
+
* @param columns The array of Column definitions for T (injected).
|
|
524
|
+
* @param allowExtraFields Whether extra fields are allowed (injected when type has index signature).
|
|
525
|
+
*/
|
|
526
|
+
constructor(name, config, schema, columns, validators, allowExtraFields) {
|
|
527
|
+
if (schema === void 0 || columns === void 0) {
|
|
528
|
+
throw new Error(
|
|
529
|
+
"Supply the type param T so that the schema is inserted by the compiler plugin."
|
|
530
|
+
);
|
|
531
|
+
}
|
|
532
|
+
this.schema = schema;
|
|
533
|
+
this.columnArray = columns;
|
|
534
|
+
const columnsObj = {};
|
|
535
|
+
columns.forEach((column) => {
|
|
536
|
+
columnsObj[column.name] = column;
|
|
537
|
+
});
|
|
538
|
+
this.columns = columnsObj;
|
|
539
|
+
this.name = name;
|
|
540
|
+
this.config = config;
|
|
541
|
+
this.validators = validators;
|
|
542
|
+
this.allowExtraFields = allowExtraFields ?? false;
|
|
543
|
+
this.metadata = config?.metadata ? { ...config.metadata } : {};
|
|
544
|
+
if (!this.metadata.source) {
|
|
545
|
+
const stack = new Error().stack;
|
|
546
|
+
if (stack) {
|
|
547
|
+
const info = getSourceFileInfo(stack);
|
|
548
|
+
this.metadata.source = { file: info.file, line: info.line };
|
|
1080
549
|
}
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
550
|
+
}
|
|
551
|
+
}
|
|
552
|
+
};
|
|
553
|
+
|
|
554
|
+
// src/dataModels/dataModelTypes.ts
|
|
555
|
+
function isArrayNestedType(dt) {
|
|
556
|
+
return typeof dt === "object" && dt !== null && dt.elementType !== null && typeof dt.elementType === "object" && dt.elementType.hasOwnProperty("columns") && Array.isArray(dt.elementType.columns);
|
|
557
|
+
}
|
|
558
|
+
function isNestedType(dt) {
|
|
559
|
+
return typeof dt === "object" && dt !== null && Array.isArray(dt.columns);
|
|
560
|
+
}
|
|
561
|
+
|
|
562
|
+
// src/sqlHelpers.ts
|
|
563
|
+
var quoteIdentifier = (name) => {
|
|
564
|
+
return name.startsWith("`") && name.endsWith("`") ? name : `\`${name}\``;
|
|
565
|
+
};
|
|
566
|
+
var isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
|
|
567
|
+
var isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
|
|
568
|
+
function sql(strings, ...values) {
|
|
569
|
+
return new Sql(strings, values);
|
|
570
|
+
}
|
|
571
|
+
var instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
|
|
572
|
+
var Sql = class {
|
|
573
|
+
values;
|
|
574
|
+
strings;
|
|
575
|
+
constructor(rawStrings, rawValues) {
|
|
576
|
+
if (rawStrings.length - 1 !== rawValues.length) {
|
|
577
|
+
if (rawStrings.length === 0) {
|
|
578
|
+
throw new TypeError("Expected at least 1 string");
|
|
579
|
+
}
|
|
580
|
+
throw new TypeError(
|
|
581
|
+
`Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
|
|
582
|
+
);
|
|
583
|
+
}
|
|
584
|
+
const valuesLength = rawValues.reduce(
|
|
585
|
+
(len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
|
|
586
|
+
0
|
|
587
|
+
);
|
|
588
|
+
this.values = new Array(valuesLength);
|
|
589
|
+
this.strings = new Array(valuesLength + 1);
|
|
590
|
+
this.strings[0] = rawStrings[0];
|
|
591
|
+
let i = 0, pos = 0;
|
|
592
|
+
while (i < rawValues.length) {
|
|
593
|
+
const child = rawValues[i++];
|
|
594
|
+
const rawString = rawStrings[i];
|
|
595
|
+
if (instanceofSql(child)) {
|
|
596
|
+
this.strings[pos] += child.strings[0];
|
|
597
|
+
let childIndex = 0;
|
|
598
|
+
while (childIndex < child.values.length) {
|
|
599
|
+
this.values[pos++] = child.values[childIndex++];
|
|
600
|
+
this.strings[pos] = child.strings[childIndex];
|
|
601
|
+
}
|
|
602
|
+
this.strings[pos] += rawString;
|
|
603
|
+
} else if (isColumn(child)) {
|
|
604
|
+
const aggregationFunction = child.annotations.find(
|
|
605
|
+
([k, _]) => k === "aggregationFunction"
|
|
606
|
+
);
|
|
607
|
+
if (aggregationFunction !== void 0) {
|
|
608
|
+
this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
|
|
609
|
+
} else {
|
|
610
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
1096
611
|
}
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
612
|
+
this.strings[pos] += rawString;
|
|
613
|
+
} else if (isTable(child)) {
|
|
614
|
+
if (child.config.database) {
|
|
615
|
+
this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
|
|
616
|
+
} else {
|
|
617
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
1102
618
|
}
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
database: effectiveDatabase,
|
|
1108
|
-
useSSL: clickhouseConfig.useSSL ? "true" : "false",
|
|
1109
|
-
host: clickhouseConfig.host,
|
|
1110
|
-
port: clickhouseConfig.port
|
|
1111
|
-
});
|
|
1112
|
-
this._memoizedClient = client;
|
|
1113
|
-
this._configHash = currentConfigHash;
|
|
1114
|
-
return { client, config: clickhouseConfig };
|
|
619
|
+
this.strings[pos] += rawString;
|
|
620
|
+
} else {
|
|
621
|
+
this.values[pos++] = child;
|
|
622
|
+
this.strings[pos] = rawString;
|
|
1115
623
|
}
|
|
1116
|
-
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
};
|
|
627
|
+
var toStaticQuery = (sql3) => {
|
|
628
|
+
const [query, params] = toQuery(sql3);
|
|
629
|
+
if (Object.keys(params).length !== 0) {
|
|
630
|
+
throw new Error(
|
|
631
|
+
"Dynamic SQL is not allowed in the select statement in view creation."
|
|
632
|
+
);
|
|
633
|
+
}
|
|
634
|
+
return query;
|
|
635
|
+
};
|
|
636
|
+
var toQuery = (sql3) => {
|
|
637
|
+
const parameterizedStubs = sql3.values.map(
|
|
638
|
+
(v, i) => createClickhouseParameter(i, v)
|
|
639
|
+
);
|
|
640
|
+
const query = sql3.strings.map(
|
|
641
|
+
(s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
|
|
642
|
+
).join("");
|
|
643
|
+
const query_params = sql3.values.reduce(
|
|
644
|
+
(acc, v, i) => ({
|
|
645
|
+
...acc,
|
|
646
|
+
[`p${i}`]: getValueFromParameter(v)
|
|
647
|
+
}),
|
|
648
|
+
{}
|
|
649
|
+
);
|
|
650
|
+
return [query, query_params];
|
|
651
|
+
};
|
|
652
|
+
var toQueryPreview = (sql3) => {
|
|
653
|
+
try {
|
|
654
|
+
const formatValue = (v) => {
|
|
655
|
+
if (Array.isArray(v)) {
|
|
656
|
+
const [type, val] = v;
|
|
657
|
+
if (type === "Identifier") {
|
|
658
|
+
return `\`${String(val)}\``;
|
|
659
|
+
}
|
|
660
|
+
return `[${v.map((x) => formatValue(x)).join(", ")}]`;
|
|
661
|
+
}
|
|
662
|
+
if (v === null || v === void 0) return "NULL";
|
|
663
|
+
if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
|
|
664
|
+
if (typeof v === "number") return String(v);
|
|
665
|
+
if (typeof v === "boolean") return v ? "true" : "false";
|
|
666
|
+
if (v instanceof Date)
|
|
667
|
+
return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
|
|
668
|
+
try {
|
|
669
|
+
return JSON.stringify(v);
|
|
670
|
+
} catch {
|
|
671
|
+
return String(v);
|
|
1131
672
|
}
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
673
|
+
};
|
|
674
|
+
let out = sql3.strings[0] ?? "";
|
|
675
|
+
for (let i = 0; i < sql3.values.length; i++) {
|
|
676
|
+
const val = getValueFromParameter(sql3.values[i]);
|
|
677
|
+
out += formatValue(val);
|
|
678
|
+
out += sql3.strings[i + 1] ?? "";
|
|
679
|
+
}
|
|
680
|
+
return out.replace(/\s+/g, " ").trim();
|
|
681
|
+
} catch (error) {
|
|
682
|
+
console.log(`toQueryPreview error: ${error}`);
|
|
683
|
+
return "/* query preview unavailable */";
|
|
684
|
+
}
|
|
685
|
+
};
|
|
686
|
+
var getValueFromParameter = (value) => {
|
|
687
|
+
if (Array.isArray(value)) {
|
|
688
|
+
const [type, val] = value;
|
|
689
|
+
if (type === "Identifier") return val;
|
|
690
|
+
}
|
|
691
|
+
return value;
|
|
692
|
+
};
|
|
693
|
+
function createClickhouseParameter(parameterIndex, value) {
|
|
694
|
+
return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
|
|
695
|
+
}
|
|
696
|
+
var mapToClickHouseType = (value) => {
|
|
697
|
+
if (typeof value === "number") {
|
|
698
|
+
return Number.isInteger(value) ? "Int" : "Float";
|
|
699
|
+
}
|
|
700
|
+
if (typeof value === "boolean") return "Bool";
|
|
701
|
+
if (value instanceof Date) return "DateTime";
|
|
702
|
+
if (Array.isArray(value)) {
|
|
703
|
+
const [type, _] = value;
|
|
704
|
+
return type;
|
|
705
|
+
}
|
|
706
|
+
return "String";
|
|
707
|
+
};
|
|
708
|
+
function emptyIfUndefined(value) {
|
|
709
|
+
return value === void 0 ? "" : value;
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
// src/dmv2/internal.ts
|
|
713
|
+
var import_process = __toESM(require("process"));
|
|
714
|
+
|
|
715
|
+
// src/index.ts
|
|
716
|
+
init_commons();
|
|
717
|
+
|
|
718
|
+
// src/consumption-apis/helpers.ts
|
|
719
|
+
var import_client2 = require("@temporalio/client");
|
|
720
|
+
var import_node_crypto = require("crypto");
|
|
721
|
+
|
|
722
|
+
// src/consumption-apis/runner.ts
|
|
723
|
+
init_commons();
|
|
724
|
+
var jose = __toESM(require("jose"));
|
|
725
|
+
|
|
726
|
+
// src/cluster-utils.ts
|
|
727
|
+
var import_node_cluster = __toESM(require("cluster"));
|
|
728
|
+
var import_node_os = require("os");
|
|
729
|
+
var import_node_process = require("process");
|
|
730
|
+
|
|
731
|
+
// src/clients/redisClient.ts
|
|
732
|
+
var import_redis = require("redis");
|
|
733
|
+
|
|
734
|
+
// src/consumption-apis/standalone.ts
|
|
735
|
+
init_commons();
|
|
736
|
+
|
|
737
|
+
// src/utilities/dataParser.ts
|
|
738
|
+
var import_csv_parse = require("csv-parse");
|
|
739
|
+
var CSV_DELIMITERS = {
|
|
740
|
+
COMMA: ",",
|
|
741
|
+
TAB: " ",
|
|
742
|
+
SEMICOLON: ";",
|
|
743
|
+
PIPE: "|"
|
|
744
|
+
};
|
|
745
|
+
var DEFAULT_CSV_CONFIG = {
|
|
746
|
+
delimiter: CSV_DELIMITERS.COMMA,
|
|
747
|
+
columns: true,
|
|
748
|
+
skipEmptyLines: true,
|
|
749
|
+
trim: true
|
|
750
|
+
};
|
|
751
|
+
|
|
752
|
+
// src/dmv2/internal.ts
|
|
753
|
+
init_commons();
|
|
754
|
+
var isClientOnlyMode = () => import_process.default.env.MOOSE_CLIENT_ONLY === "true";
|
|
755
|
+
var moose_internal = {
|
|
756
|
+
tables: /* @__PURE__ */ new Map(),
|
|
757
|
+
streams: /* @__PURE__ */ new Map(),
|
|
758
|
+
ingestApis: /* @__PURE__ */ new Map(),
|
|
759
|
+
apis: /* @__PURE__ */ new Map(),
|
|
760
|
+
sqlResources: /* @__PURE__ */ new Map(),
|
|
761
|
+
workflows: /* @__PURE__ */ new Map(),
|
|
762
|
+
webApps: /* @__PURE__ */ new Map(),
|
|
763
|
+
materializedViews: /* @__PURE__ */ new Map(),
|
|
764
|
+
customViews: /* @__PURE__ */ new Map()
|
|
765
|
+
};
|
|
766
|
+
var defaultRetentionPeriod = 60 * 60 * 24 * 7;
|
|
767
|
+
var getMooseInternal = () => globalThis.moose_internal;
|
|
768
|
+
if (getMooseInternal() === void 0) {
|
|
769
|
+
globalThis.moose_internal = moose_internal;
|
|
770
|
+
}
|
|
771
|
+
var dlqSchema = {
|
|
772
|
+
version: "3.1",
|
|
773
|
+
components: {
|
|
774
|
+
schemas: {
|
|
775
|
+
DeadLetterModel: {
|
|
776
|
+
type: "object",
|
|
777
|
+
properties: {
|
|
778
|
+
originalRecord: {
|
|
779
|
+
$ref: "#/components/schemas/Recordstringany"
|
|
780
|
+
},
|
|
781
|
+
errorMessage: {
|
|
782
|
+
type: "string"
|
|
783
|
+
},
|
|
784
|
+
errorType: {
|
|
785
|
+
type: "string"
|
|
786
|
+
},
|
|
787
|
+
failedAt: {
|
|
788
|
+
type: "string",
|
|
789
|
+
format: "date-time"
|
|
790
|
+
},
|
|
791
|
+
source: {
|
|
792
|
+
oneOf: [
|
|
793
|
+
{
|
|
794
|
+
const: "api"
|
|
795
|
+
},
|
|
796
|
+
{
|
|
797
|
+
const: "transform"
|
|
798
|
+
},
|
|
799
|
+
{
|
|
800
|
+
const: "table"
|
|
801
|
+
}
|
|
802
|
+
]
|
|
1155
803
|
}
|
|
1156
|
-
}
|
|
1157
|
-
|
|
804
|
+
},
|
|
805
|
+
required: [
|
|
806
|
+
"originalRecord",
|
|
807
|
+
"errorMessage",
|
|
808
|
+
"errorType",
|
|
809
|
+
"failedAt",
|
|
810
|
+
"source"
|
|
811
|
+
]
|
|
812
|
+
},
|
|
813
|
+
Recordstringany: {
|
|
814
|
+
type: "object",
|
|
815
|
+
properties: {},
|
|
816
|
+
required: [],
|
|
817
|
+
description: "Construct a type with a set of properties K of type T",
|
|
818
|
+
additionalProperties: {}
|
|
1158
819
|
}
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
820
|
+
}
|
|
821
|
+
},
|
|
822
|
+
schemas: [
|
|
823
|
+
{
|
|
824
|
+
$ref: "#/components/schemas/DeadLetterModel"
|
|
825
|
+
}
|
|
826
|
+
]
|
|
827
|
+
};
|
|
828
|
+
var dlqColumns = [
|
|
829
|
+
{
|
|
830
|
+
name: "originalRecord",
|
|
831
|
+
data_type: "Json",
|
|
832
|
+
primary_key: false,
|
|
833
|
+
required: true,
|
|
834
|
+
unique: false,
|
|
835
|
+
default: null,
|
|
836
|
+
annotations: [],
|
|
837
|
+
ttl: null,
|
|
838
|
+
codec: null,
|
|
839
|
+
materialized: null,
|
|
840
|
+
comment: null
|
|
841
|
+
},
|
|
842
|
+
{
|
|
843
|
+
name: "errorMessage",
|
|
844
|
+
data_type: "String",
|
|
845
|
+
primary_key: false,
|
|
846
|
+
required: true,
|
|
847
|
+
unique: false,
|
|
848
|
+
default: null,
|
|
849
|
+
annotations: [],
|
|
850
|
+
ttl: null,
|
|
851
|
+
codec: null,
|
|
852
|
+
materialized: null,
|
|
853
|
+
comment: null
|
|
854
|
+
},
|
|
855
|
+
{
|
|
856
|
+
name: "errorType",
|
|
857
|
+
data_type: "String",
|
|
858
|
+
primary_key: false,
|
|
859
|
+
required: true,
|
|
860
|
+
unique: false,
|
|
861
|
+
default: null,
|
|
862
|
+
annotations: [],
|
|
863
|
+
ttl: null,
|
|
864
|
+
codec: null,
|
|
865
|
+
materialized: null,
|
|
866
|
+
comment: null
|
|
867
|
+
},
|
|
868
|
+
{
|
|
869
|
+
name: "failedAt",
|
|
870
|
+
data_type: "DateTime",
|
|
871
|
+
primary_key: false,
|
|
872
|
+
required: true,
|
|
873
|
+
unique: false,
|
|
874
|
+
default: null,
|
|
875
|
+
annotations: [],
|
|
876
|
+
ttl: null,
|
|
877
|
+
codec: null,
|
|
878
|
+
materialized: null,
|
|
879
|
+
comment: null
|
|
880
|
+
},
|
|
881
|
+
{
|
|
882
|
+
name: "source",
|
|
883
|
+
data_type: "String",
|
|
884
|
+
primary_key: false,
|
|
885
|
+
required: true,
|
|
886
|
+
unique: false,
|
|
887
|
+
default: null,
|
|
888
|
+
annotations: [],
|
|
889
|
+
ttl: null,
|
|
890
|
+
codec: null,
|
|
891
|
+
materialized: null,
|
|
892
|
+
comment: null
|
|
893
|
+
}
|
|
894
|
+
];
|
|
895
|
+
|
|
896
|
+
// src/dmv2/sdk/olapTable.ts
|
|
897
|
+
var import_node_stream = require("stream");
|
|
898
|
+
var import_node_crypto2 = require("crypto");
|
|
899
|
+
var OlapTable = class extends TypedBase {
|
|
900
|
+
name;
|
|
901
|
+
/** @internal */
|
|
902
|
+
kind = "OlapTable";
|
|
903
|
+
/** @internal Memoized ClickHouse client for reusing connections across insert calls */
|
|
904
|
+
_memoizedClient;
|
|
905
|
+
/** @internal Hash of the configuration used to create the memoized client */
|
|
906
|
+
_configHash;
|
|
907
|
+
/** @internal Cached table name to avoid repeated generation */
|
|
908
|
+
_cachedTableName;
|
|
909
|
+
constructor(name, config, schema, columns, validators) {
|
|
910
|
+
const resolvedConfig = config ? "engine" in config ? config : { ...config, engine: "MergeTree" /* MergeTree */ } : { engine: "MergeTree" /* MergeTree */ };
|
|
911
|
+
const hasFields = Array.isArray(resolvedConfig.orderByFields) && resolvedConfig.orderByFields.length > 0;
|
|
912
|
+
const hasExpr = typeof resolvedConfig.orderByExpression === "string" && resolvedConfig.orderByExpression.length > 0;
|
|
913
|
+
if (hasFields && hasExpr) {
|
|
914
|
+
throw new Error(
|
|
915
|
+
`OlapTable ${name}: Provide either orderByFields or orderByExpression, not both.`
|
|
916
|
+
);
|
|
917
|
+
}
|
|
918
|
+
const hasCluster = typeof resolvedConfig.cluster === "string";
|
|
919
|
+
const hasKeeperPath = typeof resolvedConfig.keeperPath === "string";
|
|
920
|
+
const hasReplicaName = typeof resolvedConfig.replicaName === "string";
|
|
921
|
+
if (hasCluster && (hasKeeperPath || hasReplicaName)) {
|
|
922
|
+
throw new Error(
|
|
923
|
+
`OlapTable ${name}: Cannot specify both 'cluster' and explicit replication params ('keeperPath' or 'replicaName'). Use 'cluster' for auto-injected params, or use explicit 'keeperPath' and 'replicaName' without 'cluster'.`
|
|
924
|
+
);
|
|
925
|
+
}
|
|
926
|
+
super(name, resolvedConfig, schema, columns, validators);
|
|
927
|
+
this.name = name;
|
|
928
|
+
const tables = getMooseInternal().tables;
|
|
929
|
+
const registryKey = this.config.version ? `${name}_${this.config.version}` : name;
|
|
930
|
+
if (!isClientOnlyMode() && tables.has(registryKey)) {
|
|
931
|
+
throw new Error(
|
|
932
|
+
`OlapTable with name ${name} and version ${config?.version ?? "unversioned"} already exists`
|
|
933
|
+
);
|
|
934
|
+
}
|
|
935
|
+
tables.set(registryKey, this);
|
|
936
|
+
}
|
|
937
|
+
/**
|
|
938
|
+
* Generates the versioned table name following Moose's naming convention
|
|
939
|
+
* Format: {tableName}_{version_with_dots_replaced_by_underscores}
|
|
940
|
+
*/
|
|
941
|
+
generateTableName() {
|
|
942
|
+
if (this._cachedTableName) {
|
|
943
|
+
return this._cachedTableName;
|
|
944
|
+
}
|
|
945
|
+
const tableVersion = this.config.version;
|
|
946
|
+
if (!tableVersion) {
|
|
947
|
+
this._cachedTableName = this.name;
|
|
948
|
+
} else {
|
|
949
|
+
const versionSuffix = tableVersion.replace(/\./g, "_");
|
|
950
|
+
this._cachedTableName = `${this.name}_${versionSuffix}`;
|
|
951
|
+
}
|
|
952
|
+
return this._cachedTableName;
|
|
953
|
+
}
|
|
954
|
+
/**
|
|
955
|
+
* Creates a fast hash of the ClickHouse configuration.
|
|
956
|
+
* Uses crypto.createHash for better performance than JSON.stringify.
|
|
957
|
+
*
|
|
958
|
+
* @private
|
|
959
|
+
*/
|
|
960
|
+
createConfigHash(clickhouseConfig) {
|
|
961
|
+
const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
|
|
962
|
+
const configString = `${clickhouseConfig.host}:${clickhouseConfig.port}:${clickhouseConfig.username}:${clickhouseConfig.password}:${effectiveDatabase}:${clickhouseConfig.useSSL}`;
|
|
963
|
+
return (0, import_node_crypto2.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
|
|
964
|
+
}
|
|
965
|
+
/**
|
|
966
|
+
* Gets or creates a memoized ClickHouse client.
|
|
967
|
+
* The client is cached and reused across multiple insert calls for better performance.
|
|
968
|
+
* If the configuration changes, a new client will be created.
|
|
969
|
+
*
|
|
970
|
+
* @private
|
|
971
|
+
*/
|
|
972
|
+
async getMemoizedClient() {
|
|
973
|
+
await Promise.resolve().then(() => (init_runtime(), runtime_exports));
|
|
974
|
+
const configRegistry = globalThis._mooseConfigRegistry;
|
|
975
|
+
const { getClickhouseClient: getClickhouseClient2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
|
|
976
|
+
const clickhouseConfig = await configRegistry.getClickHouseConfig();
|
|
977
|
+
const currentConfigHash = this.createConfigHash(clickhouseConfig);
|
|
978
|
+
if (this._memoizedClient && this._configHash === currentConfigHash) {
|
|
979
|
+
return { client: this._memoizedClient, config: clickhouseConfig };
|
|
980
|
+
}
|
|
981
|
+
if (this._memoizedClient && this._configHash !== currentConfigHash) {
|
|
982
|
+
try {
|
|
983
|
+
await this._memoizedClient.close();
|
|
984
|
+
} catch (error) {
|
|
1171
985
|
}
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
986
|
+
}
|
|
987
|
+
const effectiveDatabase = this.config.database ?? clickhouseConfig.database;
|
|
988
|
+
const client = getClickhouseClient2({
|
|
989
|
+
username: clickhouseConfig.username,
|
|
990
|
+
password: clickhouseConfig.password,
|
|
991
|
+
database: effectiveDatabase,
|
|
992
|
+
useSSL: clickhouseConfig.useSSL ? "true" : "false",
|
|
993
|
+
host: clickhouseConfig.host,
|
|
994
|
+
port: clickhouseConfig.port
|
|
995
|
+
});
|
|
996
|
+
this._memoizedClient = client;
|
|
997
|
+
this._configHash = currentConfigHash;
|
|
998
|
+
return { client, config: clickhouseConfig };
|
|
999
|
+
}
|
|
1000
|
+
/**
|
|
1001
|
+
* Closes the memoized ClickHouse client if it exists.
|
|
1002
|
+
* This is useful for cleaning up connections when the table instance is no longer needed.
|
|
1003
|
+
* The client will be automatically recreated on the next insert call if needed.
|
|
1004
|
+
*/
|
|
1005
|
+
async closeClient() {
|
|
1006
|
+
if (this._memoizedClient) {
|
|
1007
|
+
try {
|
|
1008
|
+
await this._memoizedClient.close();
|
|
1009
|
+
} catch (error) {
|
|
1010
|
+
} finally {
|
|
1011
|
+
this._memoizedClient = void 0;
|
|
1012
|
+
this._configHash = void 0;
|
|
1185
1013
|
}
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
const
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1014
|
+
}
|
|
1015
|
+
}
|
|
1016
|
+
/**
|
|
1017
|
+
* Validates a single record using typia's comprehensive type checking.
|
|
1018
|
+
* This provides the most accurate validation as it uses the exact TypeScript type information.
|
|
1019
|
+
*
|
|
1020
|
+
* @param record The record to validate
|
|
1021
|
+
* @returns Validation result with detailed error information
|
|
1022
|
+
*/
|
|
1023
|
+
validateRecord(record) {
|
|
1024
|
+
if (this.validators?.validate) {
|
|
1025
|
+
try {
|
|
1026
|
+
const result = this.validators.validate(record);
|
|
1027
|
+
return {
|
|
1028
|
+
success: result.success,
|
|
1029
|
+
data: result.data,
|
|
1030
|
+
errors: result.errors?.map(
|
|
1031
|
+
(err) => typeof err === "string" ? err : JSON.stringify(err)
|
|
1032
|
+
)
|
|
1033
|
+
};
|
|
1034
|
+
} catch (error) {
|
|
1035
|
+
return {
|
|
1036
|
+
success: false,
|
|
1037
|
+
errors: [error instanceof Error ? error.message : String(error)]
|
|
1038
|
+
};
|
|
1039
|
+
}
|
|
1040
|
+
}
|
|
1041
|
+
throw new Error("No typia validator found");
|
|
1042
|
+
}
|
|
1043
|
+
/**
|
|
1044
|
+
* Type guard function using typia's is() function.
|
|
1045
|
+
* Provides compile-time type narrowing for TypeScript.
|
|
1046
|
+
*
|
|
1047
|
+
* @param record The record to check
|
|
1048
|
+
* @returns True if record matches type T, with type narrowing
|
|
1049
|
+
*/
|
|
1050
|
+
isValidRecord(record) {
|
|
1051
|
+
if (this.validators?.is) {
|
|
1052
|
+
return this.validators.is(record);
|
|
1053
|
+
}
|
|
1054
|
+
throw new Error("No typia validator found");
|
|
1055
|
+
}
|
|
1056
|
+
/**
|
|
1057
|
+
* Assert that a record matches type T, throwing detailed errors if not.
|
|
1058
|
+
* Uses typia's assert() function for the most detailed error reporting.
|
|
1059
|
+
*
|
|
1060
|
+
* @param record The record to assert
|
|
1061
|
+
* @returns The validated and typed record
|
|
1062
|
+
* @throws Detailed validation error if record doesn't match type T
|
|
1063
|
+
*/
|
|
1064
|
+
assertValidRecord(record) {
|
|
1065
|
+
if (this.validators?.assert) {
|
|
1066
|
+
return this.validators.assert(record);
|
|
1067
|
+
}
|
|
1068
|
+
throw new Error("No typia validator found");
|
|
1069
|
+
}
|
|
1070
|
+
/**
|
|
1071
|
+
* Validates an array of records with comprehensive error reporting.
|
|
1072
|
+
* Uses the most appropriate validation method available (typia or basic).
|
|
1073
|
+
*
|
|
1074
|
+
* @param data Array of records to validate
|
|
1075
|
+
* @returns Detailed validation results
|
|
1076
|
+
*/
|
|
1077
|
+
async validateRecords(data) {
|
|
1078
|
+
const valid = [];
|
|
1079
|
+
const invalid = [];
|
|
1080
|
+
valid.length = 0;
|
|
1081
|
+
invalid.length = 0;
|
|
1082
|
+
const dataLength = data.length;
|
|
1083
|
+
for (let i = 0; i < dataLength; i++) {
|
|
1084
|
+
const record = data[i];
|
|
1085
|
+
try {
|
|
1086
|
+
if (this.isValidRecord(record)) {
|
|
1087
|
+
valid.push(this.mapToClickhouseRecord(record));
|
|
1088
|
+
} else {
|
|
1089
|
+
const result = this.validateRecord(record);
|
|
1090
|
+
if (result.success) {
|
|
1091
|
+
valid.push(this.mapToClickhouseRecord(record));
|
|
1092
|
+
} else {
|
|
1218
1093
|
invalid.push({
|
|
1219
1094
|
record,
|
|
1220
|
-
error:
|
|
1095
|
+
error: result.errors?.join(", ") || "Validation failed",
|
|
1221
1096
|
index: i,
|
|
1222
1097
|
path: "root"
|
|
1223
1098
|
});
|
|
1224
1099
|
}
|
|
1225
1100
|
}
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1101
|
+
} catch (error) {
|
|
1102
|
+
invalid.push({
|
|
1103
|
+
record,
|
|
1104
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1105
|
+
index: i,
|
|
1106
|
+
path: "root"
|
|
1107
|
+
});
|
|
1231
1108
|
}
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1109
|
+
}
|
|
1110
|
+
return {
|
|
1111
|
+
valid,
|
|
1112
|
+
invalid,
|
|
1113
|
+
total: dataLength
|
|
1114
|
+
};
|
|
1115
|
+
}
|
|
1116
|
+
/**
|
|
1117
|
+
* Optimized batch retry that minimizes individual insert operations.
|
|
1118
|
+
* Groups records into smaller batches to reduce round trips while still isolating failures.
|
|
1119
|
+
*
|
|
1120
|
+
* @private
|
|
1121
|
+
*/
|
|
1122
|
+
async retryIndividualRecords(client, tableName, records) {
|
|
1123
|
+
const successful = [];
|
|
1124
|
+
const failed = [];
|
|
1125
|
+
const RETRY_BATCH_SIZE = 10;
|
|
1126
|
+
const totalRecords = records.length;
|
|
1127
|
+
for (let i = 0; i < totalRecords; i += RETRY_BATCH_SIZE) {
|
|
1128
|
+
const batchEnd = Math.min(i + RETRY_BATCH_SIZE, totalRecords);
|
|
1129
|
+
const batch = records.slice(i, batchEnd);
|
|
1130
|
+
try {
|
|
1131
|
+
await client.insert({
|
|
1132
|
+
table: quoteIdentifier(tableName),
|
|
1133
|
+
values: batch,
|
|
1134
|
+
format: "JSONEachRow",
|
|
1135
|
+
clickhouse_settings: {
|
|
1136
|
+
date_time_input_format: "best_effort",
|
|
1137
|
+
// Add performance settings for retries
|
|
1138
|
+
max_insert_block_size: RETRY_BATCH_SIZE,
|
|
1139
|
+
max_block_size: RETRY_BATCH_SIZE
|
|
1140
|
+
}
|
|
1141
|
+
});
|
|
1142
|
+
successful.push(...batch);
|
|
1143
|
+
} catch (batchError) {
|
|
1144
|
+
for (let j = 0; j < batch.length; j++) {
|
|
1145
|
+
const record = batch[j];
|
|
1246
1146
|
try {
|
|
1247
1147
|
await client.insert({
|
|
1248
1148
|
table: quoteIdentifier(tableName),
|
|
1249
|
-
values:
|
|
1149
|
+
values: [record],
|
|
1250
1150
|
format: "JSONEachRow",
|
|
1251
1151
|
clickhouse_settings: {
|
|
1252
|
-
date_time_input_format: "best_effort"
|
|
1253
|
-
// Add performance settings for retries
|
|
1254
|
-
max_insert_block_size: RETRY_BATCH_SIZE,
|
|
1255
|
-
max_block_size: RETRY_BATCH_SIZE
|
|
1152
|
+
date_time_input_format: "best_effort"
|
|
1256
1153
|
}
|
|
1257
1154
|
});
|
|
1258
|
-
successful.push(
|
|
1259
|
-
} catch (
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
format: "JSONEachRow",
|
|
1267
|
-
clickhouse_settings: {
|
|
1268
|
-
date_time_input_format: "best_effort"
|
|
1269
|
-
}
|
|
1270
|
-
});
|
|
1271
|
-
successful.push(record);
|
|
1272
|
-
} catch (error) {
|
|
1273
|
-
failed.push({
|
|
1274
|
-
record,
|
|
1275
|
-
error: error instanceof Error ? error.message : String(error),
|
|
1276
|
-
index: i + j
|
|
1277
|
-
});
|
|
1278
|
-
}
|
|
1279
|
-
}
|
|
1280
|
-
}
|
|
1281
|
-
}
|
|
1282
|
-
return { successful, failed };
|
|
1283
|
-
}
|
|
1284
|
-
/**
|
|
1285
|
-
* Validates input parameters and strategy compatibility
|
|
1286
|
-
* @private
|
|
1287
|
-
*/
|
|
1288
|
-
validateInsertParameters(data, options) {
|
|
1289
|
-
const isStream = data instanceof import_node_stream.Readable;
|
|
1290
|
-
const strategy = options?.strategy || "fail-fast";
|
|
1291
|
-
const shouldValidate = options?.validate !== false;
|
|
1292
|
-
if (isStream && strategy === "isolate") {
|
|
1293
|
-
throw new Error(
|
|
1294
|
-
"The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
|
|
1295
|
-
);
|
|
1296
|
-
}
|
|
1297
|
-
if (isStream && shouldValidate) {
|
|
1298
|
-
console.warn(
|
|
1299
|
-
"Validation is not supported with stream input. Validation will be skipped."
|
|
1300
|
-
);
|
|
1301
|
-
}
|
|
1302
|
-
return { isStream, strategy, shouldValidate };
|
|
1303
|
-
}
|
|
1304
|
-
/**
|
|
1305
|
-
* Handles early return cases for empty data
|
|
1306
|
-
* @private
|
|
1307
|
-
*/
|
|
1308
|
-
handleEmptyData(data, isStream) {
|
|
1309
|
-
if (isStream && !data) {
|
|
1310
|
-
return {
|
|
1311
|
-
successful: 0,
|
|
1312
|
-
failed: 0,
|
|
1313
|
-
total: 0
|
|
1314
|
-
};
|
|
1315
|
-
}
|
|
1316
|
-
if (!isStream && (!data || data.length === 0)) {
|
|
1317
|
-
return {
|
|
1318
|
-
successful: 0,
|
|
1319
|
-
failed: 0,
|
|
1320
|
-
total: 0
|
|
1321
|
-
};
|
|
1322
|
-
}
|
|
1323
|
-
return null;
|
|
1324
|
-
}
|
|
1325
|
-
/**
|
|
1326
|
-
* Performs pre-insertion validation for array data
|
|
1327
|
-
* @private
|
|
1328
|
-
*/
|
|
1329
|
-
async performPreInsertionValidation(data, shouldValidate, strategy, options) {
|
|
1330
|
-
if (!shouldValidate) {
|
|
1331
|
-
return { validatedData: data, validationErrors: [] };
|
|
1332
|
-
}
|
|
1333
|
-
try {
|
|
1334
|
-
const validationResult = await this.validateRecords(data);
|
|
1335
|
-
const validatedData = validationResult.valid;
|
|
1336
|
-
const validationErrors = validationResult.invalid;
|
|
1337
|
-
if (validationErrors.length > 0) {
|
|
1338
|
-
this.handleValidationErrors(validationErrors, strategy, data, options);
|
|
1339
|
-
switch (strategy) {
|
|
1340
|
-
case "discard":
|
|
1341
|
-
return { validatedData, validationErrors };
|
|
1342
|
-
case "isolate":
|
|
1343
|
-
return { validatedData: data, validationErrors };
|
|
1344
|
-
default:
|
|
1345
|
-
return { validatedData, validationErrors };
|
|
1346
|
-
}
|
|
1347
|
-
}
|
|
1348
|
-
return { validatedData, validationErrors };
|
|
1349
|
-
} catch (validationError) {
|
|
1350
|
-
if (strategy === "fail-fast") {
|
|
1351
|
-
throw validationError;
|
|
1352
|
-
}
|
|
1353
|
-
console.warn("Validation error:", validationError);
|
|
1354
|
-
return { validatedData: data, validationErrors: [] };
|
|
1355
|
-
}
|
|
1356
|
-
}
|
|
1357
|
-
/**
|
|
1358
|
-
* Handles validation errors based on the specified strategy
|
|
1359
|
-
* @private
|
|
1360
|
-
*/
|
|
1361
|
-
handleValidationErrors(validationErrors, strategy, data, options) {
|
|
1362
|
-
switch (strategy) {
|
|
1363
|
-
case "fail-fast":
|
|
1364
|
-
const firstError = validationErrors[0];
|
|
1365
|
-
throw new Error(
|
|
1366
|
-
`Validation failed for record at index ${firstError.index}: ${firstError.error}`
|
|
1367
|
-
);
|
|
1368
|
-
case "discard":
|
|
1369
|
-
this.checkValidationThresholds(validationErrors, data.length, options);
|
|
1370
|
-
break;
|
|
1371
|
-
case "isolate":
|
|
1372
|
-
break;
|
|
1373
|
-
}
|
|
1374
|
-
}
|
|
1375
|
-
/**
|
|
1376
|
-
* Checks if validation errors exceed configured thresholds
|
|
1377
|
-
* @private
|
|
1378
|
-
*/
|
|
1379
|
-
checkValidationThresholds(validationErrors, totalRecords, options) {
|
|
1380
|
-
const validationFailedCount = validationErrors.length;
|
|
1381
|
-
const validationFailedRatio = validationFailedCount / totalRecords;
|
|
1382
|
-
if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
|
|
1383
|
-
throw new Error(
|
|
1384
|
-
`Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
|
|
1385
|
-
);
|
|
1386
|
-
}
|
|
1387
|
-
if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
|
|
1388
|
-
throw new Error(
|
|
1389
|
-
`Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
|
|
1390
|
-
);
|
|
1391
|
-
}
|
|
1392
|
-
}
|
|
1393
|
-
/**
|
|
1394
|
-
* Optimized insert options preparation with better memory management
|
|
1395
|
-
* @private
|
|
1396
|
-
*/
|
|
1397
|
-
prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
|
|
1398
|
-
const insertOptions = {
|
|
1399
|
-
table: quoteIdentifier(tableName),
|
|
1400
|
-
format: "JSONEachRow",
|
|
1401
|
-
clickhouse_settings: {
|
|
1402
|
-
date_time_input_format: "best_effort",
|
|
1403
|
-
wait_end_of_query: 1,
|
|
1404
|
-
// Ensure at least once delivery for INSERT operations
|
|
1405
|
-
// Performance optimizations
|
|
1406
|
-
max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
|
|
1407
|
-
max_block_size: 65536,
|
|
1408
|
-
// Use async inserts for better performance with large datasets
|
|
1409
|
-
async_insert: validatedData.length > 1e3 ? 1 : 0,
|
|
1410
|
-
wait_for_async_insert: 1
|
|
1411
|
-
// For at least once delivery
|
|
1412
|
-
}
|
|
1413
|
-
};
|
|
1414
|
-
if (isStream) {
|
|
1415
|
-
insertOptions.values = data;
|
|
1416
|
-
} else {
|
|
1417
|
-
insertOptions.values = validatedData;
|
|
1418
|
-
}
|
|
1419
|
-
if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
|
|
1420
|
-
if (options.allowErrors !== void 0) {
|
|
1421
|
-
insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
|
|
1422
|
-
}
|
|
1423
|
-
if (options.allowErrorsRatio !== void 0) {
|
|
1424
|
-
insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
|
|
1425
|
-
}
|
|
1426
|
-
}
|
|
1427
|
-
return insertOptions;
|
|
1428
|
-
}
|
|
1429
|
-
/**
|
|
1430
|
-
* Creates success result for completed insertions
|
|
1431
|
-
* @private
|
|
1432
|
-
*/
|
|
1433
|
-
createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
|
|
1434
|
-
if (isStream) {
|
|
1435
|
-
return {
|
|
1436
|
-
successful: -1,
|
|
1437
|
-
// -1 indicates stream mode where count is unknown
|
|
1438
|
-
failed: 0,
|
|
1439
|
-
total: -1
|
|
1440
|
-
};
|
|
1441
|
-
}
|
|
1442
|
-
const insertedCount = validatedData.length;
|
|
1443
|
-
const totalProcessed = shouldValidate ? data.length : insertedCount;
|
|
1444
|
-
const result = {
|
|
1445
|
-
successful: insertedCount,
|
|
1446
|
-
failed: shouldValidate ? validationErrors.length : 0,
|
|
1447
|
-
total: totalProcessed
|
|
1448
|
-
};
|
|
1449
|
-
if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
|
|
1450
|
-
result.failedRecords = validationErrors.map((ve) => ({
|
|
1451
|
-
record: ve.record,
|
|
1452
|
-
error: `Validation error: ${ve.error}`,
|
|
1453
|
-
index: ve.index
|
|
1454
|
-
}));
|
|
1455
|
-
}
|
|
1456
|
-
return result;
|
|
1457
|
-
}
|
|
1458
|
-
/**
|
|
1459
|
-
* Handles insertion errors based on the specified strategy
|
|
1460
|
-
* @private
|
|
1461
|
-
*/
|
|
1462
|
-
async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
|
|
1463
|
-
switch (strategy) {
|
|
1464
|
-
case "fail-fast":
|
|
1465
|
-
throw new Error(
|
|
1466
|
-
`Failed to insert data into table ${tableName}: ${batchError}`
|
|
1467
|
-
);
|
|
1468
|
-
case "discard":
|
|
1469
|
-
throw new Error(
|
|
1470
|
-
`Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
|
|
1471
|
-
);
|
|
1472
|
-
case "isolate":
|
|
1473
|
-
return await this.handleIsolateStrategy(
|
|
1474
|
-
batchError,
|
|
1475
|
-
tableName,
|
|
1476
|
-
data,
|
|
1477
|
-
validatedData,
|
|
1478
|
-
validationErrors,
|
|
1479
|
-
isStream,
|
|
1480
|
-
shouldValidate,
|
|
1481
|
-
options
|
|
1482
|
-
);
|
|
1483
|
-
default:
|
|
1484
|
-
throw new Error(`Unknown error strategy: ${strategy}`);
|
|
1485
|
-
}
|
|
1486
|
-
}
|
|
1487
|
-
/**
|
|
1488
|
-
* Handles the isolate strategy for insertion errors
|
|
1489
|
-
* @private
|
|
1490
|
-
*/
|
|
1491
|
-
async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
|
|
1492
|
-
if (isStream) {
|
|
1493
|
-
throw new Error(
|
|
1494
|
-
`Isolate strategy is not supported with stream input: ${batchError}`
|
|
1495
|
-
);
|
|
1496
|
-
}
|
|
1497
|
-
try {
|
|
1498
|
-
const { client } = await this.getMemoizedClient();
|
|
1499
|
-
const skipValidationOnRetry = options?.skipValidationOnRetry || false;
|
|
1500
|
-
const retryData = skipValidationOnRetry ? data : validatedData;
|
|
1501
|
-
const { successful, failed } = await this.retryIndividualRecords(
|
|
1502
|
-
client,
|
|
1503
|
-
tableName,
|
|
1504
|
-
retryData
|
|
1505
|
-
);
|
|
1506
|
-
const allFailedRecords = [
|
|
1507
|
-
// Validation errors (if any and not skipping validation on retry)
|
|
1508
|
-
...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
|
|
1509
|
-
record: ve.record,
|
|
1510
|
-
error: `Validation error: ${ve.error}`,
|
|
1511
|
-
index: ve.index
|
|
1512
|
-
})) : [],
|
|
1513
|
-
// Insertion errors
|
|
1514
|
-
...failed
|
|
1515
|
-
];
|
|
1516
|
-
this.checkInsertionThresholds(
|
|
1517
|
-
allFailedRecords,
|
|
1518
|
-
data.length,
|
|
1519
|
-
options
|
|
1520
|
-
);
|
|
1521
|
-
return {
|
|
1522
|
-
successful: successful.length,
|
|
1523
|
-
failed: allFailedRecords.length,
|
|
1524
|
-
total: data.length,
|
|
1525
|
-
failedRecords: allFailedRecords
|
|
1526
|
-
};
|
|
1527
|
-
} catch (isolationError) {
|
|
1528
|
-
throw new Error(
|
|
1529
|
-
`Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
|
|
1530
|
-
);
|
|
1531
|
-
}
|
|
1532
|
-
}
|
|
1533
|
-
/**
|
|
1534
|
-
* Checks if insertion errors exceed configured thresholds
|
|
1535
|
-
* @private
|
|
1536
|
-
*/
|
|
1537
|
-
checkInsertionThresholds(failedRecords, totalRecords, options) {
|
|
1538
|
-
const totalFailed = failedRecords.length;
|
|
1539
|
-
const failedRatio = totalFailed / totalRecords;
|
|
1540
|
-
if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
|
|
1541
|
-
throw new Error(
|
|
1542
|
-
`Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
|
|
1543
|
-
);
|
|
1544
|
-
}
|
|
1545
|
-
if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
|
|
1546
|
-
throw new Error(
|
|
1547
|
-
`Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
|
|
1548
|
-
);
|
|
1549
|
-
}
|
|
1550
|
-
}
|
|
1551
|
-
/**
|
|
1552
|
-
* Recursively transforms a record to match ClickHouse's JSONEachRow requirements
|
|
1553
|
-
*
|
|
1554
|
-
* - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
|
|
1555
|
-
* - For every Nested struct (not array), it recurses into the struct.
|
|
1556
|
-
* - This ensures compatibility with kafka_clickhouse_sync
|
|
1557
|
-
*
|
|
1558
|
-
* @param record The input record to transform (may be deeply nested)
|
|
1559
|
-
* @param columns The schema columns for this level (defaults to this.columnArray at the top level)
|
|
1560
|
-
* @returns The transformed record, ready for ClickHouse JSONEachRow insertion
|
|
1561
|
-
*/
|
|
1562
|
-
mapToClickhouseRecord(record, columns = this.columnArray) {
|
|
1563
|
-
const result = { ...record };
|
|
1564
|
-
for (const col of columns) {
|
|
1565
|
-
const value = record[col.name];
|
|
1566
|
-
const dt = col.data_type;
|
|
1567
|
-
if (isArrayNestedType(dt)) {
|
|
1568
|
-
if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
|
|
1569
|
-
result[col.name] = value.map((item) => [
|
|
1570
|
-
this.mapToClickhouseRecord(item, dt.elementType.columns)
|
|
1571
|
-
]);
|
|
1572
|
-
}
|
|
1573
|
-
} else if (isNestedType(dt)) {
|
|
1574
|
-
if (value && typeof value === "object") {
|
|
1575
|
-
result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
|
|
1576
|
-
}
|
|
1577
|
-
}
|
|
1578
|
-
}
|
|
1579
|
-
return result;
|
|
1580
|
-
}
|
|
1581
|
-
/**
|
|
1582
|
-
* Inserts data directly into the ClickHouse table with enhanced error handling and validation.
|
|
1583
|
-
* This method establishes a direct connection to ClickHouse using the project configuration
|
|
1584
|
-
* and inserts the provided data into the versioned table.
|
|
1585
|
-
*
|
|
1586
|
-
* PERFORMANCE OPTIMIZATIONS:
|
|
1587
|
-
* - Memoized client connections with fast config hashing
|
|
1588
|
-
* - Single-pass validation with pre-allocated arrays
|
|
1589
|
-
* - Batch-optimized retry strategy (batches of 10, then individual)
|
|
1590
|
-
* - Optimized ClickHouse settings for large datasets
|
|
1591
|
-
* - Reduced memory allocations and object creation
|
|
1592
|
-
*
|
|
1593
|
-
* Uses advanced typia validation when available for comprehensive type checking,
|
|
1594
|
-
* with fallback to basic validation for compatibility.
|
|
1595
|
-
*
|
|
1596
|
-
* The ClickHouse client is memoized and reused across multiple insert calls for better performance.
|
|
1597
|
-
* If the configuration changes, a new client will be automatically created.
|
|
1598
|
-
*
|
|
1599
|
-
* @param data Array of objects conforming to the table schema, or a Node.js Readable stream
|
|
1600
|
-
* @param options Optional configuration for error handling, validation, and insertion behavior
|
|
1601
|
-
* @returns Promise resolving to detailed insertion results
|
|
1602
|
-
* @throws {ConfigError} When configuration cannot be read or parsed
|
|
1603
|
-
* @throws {ClickHouseError} When insertion fails based on the error strategy
|
|
1604
|
-
* @throws {ValidationError} When validation fails and strategy is 'fail-fast'
|
|
1605
|
-
*
|
|
1606
|
-
* @example
|
|
1607
|
-
* ```typescript
|
|
1608
|
-
* // Create an OlapTable instance (typia validators auto-injected)
|
|
1609
|
-
* const userTable = new OlapTable<User>('users');
|
|
1610
|
-
*
|
|
1611
|
-
* // Insert with comprehensive typia validation
|
|
1612
|
-
* const result1 = await userTable.insert([
|
|
1613
|
-
* { id: 1, name: 'John', email: 'john@example.com' },
|
|
1614
|
-
* { id: 2, name: 'Jane', email: 'jane@example.com' }
|
|
1615
|
-
* ]);
|
|
1616
|
-
*
|
|
1617
|
-
* // Insert data with stream input (validation not available for streams)
|
|
1618
|
-
* const dataStream = new Readable({
|
|
1619
|
-
* objectMode: true,
|
|
1620
|
-
* read() { // Stream implementation }
|
|
1621
|
-
* });
|
|
1622
|
-
* const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
|
|
1623
|
-
*
|
|
1624
|
-
* // Insert with validation disabled for performance
|
|
1625
|
-
* const result3 = await userTable.insert(data, { validate: false });
|
|
1626
|
-
*
|
|
1627
|
-
* // Insert with error handling strategies
|
|
1628
|
-
* const result4 = await userTable.insert(mixedData, {
|
|
1629
|
-
* strategy: 'isolate',
|
|
1630
|
-
* allowErrorsRatio: 0.1,
|
|
1631
|
-
* validate: true // Use typia validation (default)
|
|
1632
|
-
* });
|
|
1633
|
-
*
|
|
1634
|
-
* // Optional: Clean up connection when completely done
|
|
1635
|
-
* await userTable.closeClient();
|
|
1636
|
-
* ```
|
|
1637
|
-
*/
|
|
1638
|
-
async insert(data, options) {
|
|
1639
|
-
const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
|
|
1640
|
-
const emptyResult = this.handleEmptyData(data, isStream);
|
|
1641
|
-
if (emptyResult) {
|
|
1642
|
-
return emptyResult;
|
|
1643
|
-
}
|
|
1644
|
-
let validatedData = [];
|
|
1645
|
-
let validationErrors = [];
|
|
1646
|
-
if (!isStream && shouldValidate) {
|
|
1647
|
-
const validationResult = await this.performPreInsertionValidation(
|
|
1648
|
-
data,
|
|
1649
|
-
shouldValidate,
|
|
1650
|
-
strategy,
|
|
1651
|
-
options
|
|
1652
|
-
);
|
|
1653
|
-
validatedData = validationResult.validatedData;
|
|
1654
|
-
validationErrors = validationResult.validationErrors;
|
|
1655
|
-
} else {
|
|
1656
|
-
validatedData = isStream ? [] : data;
|
|
1657
|
-
}
|
|
1658
|
-
const { client } = await this.getMemoizedClient();
|
|
1659
|
-
const tableName = this.generateTableName();
|
|
1660
|
-
try {
|
|
1661
|
-
const insertOptions = this.prepareInsertOptions(
|
|
1662
|
-
tableName,
|
|
1663
|
-
data,
|
|
1664
|
-
validatedData,
|
|
1665
|
-
isStream,
|
|
1666
|
-
strategy,
|
|
1667
|
-
options
|
|
1668
|
-
);
|
|
1669
|
-
await client.insert(insertOptions);
|
|
1670
|
-
return this.createSuccessResult(
|
|
1671
|
-
data,
|
|
1672
|
-
validatedData,
|
|
1673
|
-
validationErrors,
|
|
1674
|
-
isStream,
|
|
1675
|
-
shouldValidate,
|
|
1676
|
-
strategy
|
|
1677
|
-
);
|
|
1678
|
-
} catch (batchError) {
|
|
1679
|
-
return await this.handleInsertionError(
|
|
1680
|
-
batchError,
|
|
1681
|
-
strategy,
|
|
1682
|
-
tableName,
|
|
1683
|
-
data,
|
|
1684
|
-
validatedData,
|
|
1685
|
-
validationErrors,
|
|
1686
|
-
isStream,
|
|
1687
|
-
shouldValidate,
|
|
1688
|
-
options
|
|
1689
|
-
);
|
|
1155
|
+
successful.push(record);
|
|
1156
|
+
} catch (error) {
|
|
1157
|
+
failed.push({
|
|
1158
|
+
record,
|
|
1159
|
+
error: error instanceof Error ? error.message : String(error),
|
|
1160
|
+
index: i + j
|
|
1161
|
+
});
|
|
1162
|
+
}
|
|
1690
1163
|
}
|
|
1691
1164
|
}
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
// new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
|
|
1695
|
-
};
|
|
1165
|
+
}
|
|
1166
|
+
return { successful, failed };
|
|
1696
1167
|
}
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
|
|
1733
|
-
|
|
1734
|
-
|
|
1735
|
-
|
|
1736
|
-
|
|
1737
|
-
|
|
1168
|
+
/**
|
|
1169
|
+
* Validates input parameters and strategy compatibility
|
|
1170
|
+
* @private
|
|
1171
|
+
*/
|
|
1172
|
+
validateInsertParameters(data, options) {
|
|
1173
|
+
const isStream = data instanceof import_node_stream.Readable;
|
|
1174
|
+
const strategy = options?.strategy || "fail-fast";
|
|
1175
|
+
const shouldValidate = options?.validate !== false;
|
|
1176
|
+
if (isStream && strategy === "isolate") {
|
|
1177
|
+
throw new Error(
|
|
1178
|
+
"The 'isolate' error strategy is not supported with stream input. Use 'fail-fast' or 'discard' instead."
|
|
1179
|
+
);
|
|
1180
|
+
}
|
|
1181
|
+
if (isStream && shouldValidate) {
|
|
1182
|
+
console.warn(
|
|
1183
|
+
"Validation is not supported with stream input. Validation will be skipped."
|
|
1184
|
+
);
|
|
1185
|
+
}
|
|
1186
|
+
return { isStream, strategy, shouldValidate };
|
|
1187
|
+
}
|
|
1188
|
+
/**
|
|
1189
|
+
* Handles early return cases for empty data
|
|
1190
|
+
* @private
|
|
1191
|
+
*/
|
|
1192
|
+
handleEmptyData(data, isStream) {
|
|
1193
|
+
if (isStream && !data) {
|
|
1194
|
+
return {
|
|
1195
|
+
successful: 0,
|
|
1196
|
+
failed: 0,
|
|
1197
|
+
total: 0
|
|
1198
|
+
};
|
|
1199
|
+
}
|
|
1200
|
+
if (!isStream && (!data || data.length === 0)) {
|
|
1201
|
+
return {
|
|
1202
|
+
successful: 0,
|
|
1203
|
+
failed: 0,
|
|
1204
|
+
total: 0
|
|
1205
|
+
};
|
|
1206
|
+
}
|
|
1207
|
+
return null;
|
|
1208
|
+
}
|
|
1209
|
+
/**
|
|
1210
|
+
* Performs pre-insertion validation for array data
|
|
1211
|
+
* @private
|
|
1212
|
+
*/
|
|
1213
|
+
async performPreInsertionValidation(data, shouldValidate, strategy, options) {
|
|
1214
|
+
if (!shouldValidate) {
|
|
1215
|
+
return { validatedData: data, validationErrors: [] };
|
|
1216
|
+
}
|
|
1217
|
+
try {
|
|
1218
|
+
const validationResult = await this.validateRecords(data);
|
|
1219
|
+
const validatedData = validationResult.valid;
|
|
1220
|
+
const validationErrors = validationResult.invalid;
|
|
1221
|
+
if (validationErrors.length > 0) {
|
|
1222
|
+
this.handleValidationErrors(validationErrors, strategy, data, options);
|
|
1223
|
+
switch (strategy) {
|
|
1224
|
+
case "discard":
|
|
1225
|
+
return { validatedData, validationErrors };
|
|
1226
|
+
case "isolate":
|
|
1227
|
+
return { validatedData: data, validationErrors };
|
|
1228
|
+
default:
|
|
1229
|
+
return { validatedData, validationErrors };
|
|
1738
1230
|
}
|
|
1739
|
-
streams.set(name, this);
|
|
1740
|
-
this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
|
|
1741
|
-
}
|
|
1742
|
-
/**
|
|
1743
|
-
* Internal map storing transformation configurations.
|
|
1744
|
-
* Maps destination stream names to arrays of transformation functions and their configs.
|
|
1745
|
-
*
|
|
1746
|
-
* @internal
|
|
1747
|
-
*/
|
|
1748
|
-
_transformations = /* @__PURE__ */ new Map();
|
|
1749
|
-
/**
|
|
1750
|
-
* Internal function for multi-stream transformations.
|
|
1751
|
-
* Allows a single transformation to route messages to multiple destinations.
|
|
1752
|
-
*
|
|
1753
|
-
* @internal
|
|
1754
|
-
*/
|
|
1755
|
-
_multipleTransformations;
|
|
1756
|
-
/**
|
|
1757
|
-
* Internal array storing consumer configurations.
|
|
1758
|
-
*
|
|
1759
|
-
* @internal
|
|
1760
|
-
*/
|
|
1761
|
-
_consumers = new Array();
|
|
1762
|
-
/**
|
|
1763
|
-
* Builds the full Kafka topic name including optional namespace and version suffix.
|
|
1764
|
-
* Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
|
|
1765
|
-
*/
|
|
1766
|
-
buildFullTopicName(namespace) {
|
|
1767
|
-
const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
|
|
1768
|
-
const base = `${this.name}${versionSuffix}`;
|
|
1769
|
-
return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
|
|
1770
1231
|
}
|
|
1771
|
-
|
|
1772
|
-
|
|
1773
|
-
|
|
1774
|
-
|
|
1775
|
-
const configString = [
|
|
1776
|
-
kafkaConfig.broker,
|
|
1777
|
-
kafkaConfig.messageTimeoutMs,
|
|
1778
|
-
kafkaConfig.saslUsername,
|
|
1779
|
-
kafkaConfig.saslPassword,
|
|
1780
|
-
kafkaConfig.saslMechanism,
|
|
1781
|
-
kafkaConfig.securityProtocol,
|
|
1782
|
-
kafkaConfig.namespace
|
|
1783
|
-
].join(":");
|
|
1784
|
-
return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
|
|
1232
|
+
return { validatedData, validationErrors };
|
|
1233
|
+
} catch (validationError) {
|
|
1234
|
+
if (strategy === "fail-fast") {
|
|
1235
|
+
throw validationError;
|
|
1785
1236
|
}
|
|
1786
|
-
|
|
1787
|
-
|
|
1788
|
-
|
|
1789
|
-
|
|
1790
|
-
|
|
1791
|
-
|
|
1792
|
-
|
|
1793
|
-
|
|
1794
|
-
|
|
1795
|
-
|
|
1796
|
-
|
|
1797
|
-
|
|
1798
|
-
|
|
1799
|
-
|
|
1800
|
-
await this._memoizedProducer.disconnect();
|
|
1801
|
-
} catch {
|
|
1802
|
-
}
|
|
1803
|
-
this._memoizedProducer = void 0;
|
|
1804
|
-
}
|
|
1805
|
-
const clientId = `moose-sdk-stream-${this.name}`;
|
|
1806
|
-
const logger = {
|
|
1807
|
-
logPrefix: clientId,
|
|
1808
|
-
log: (message) => {
|
|
1809
|
-
console.log(`${clientId}: ${message}`);
|
|
1810
|
-
},
|
|
1811
|
-
error: (message) => {
|
|
1812
|
-
console.error(`${clientId}: ${message}`);
|
|
1813
|
-
},
|
|
1814
|
-
warn: (message) => {
|
|
1815
|
-
console.warn(`${clientId}: ${message}`);
|
|
1816
|
-
}
|
|
1817
|
-
};
|
|
1818
|
-
const producer = await getKafkaProducer2(
|
|
1819
|
-
{
|
|
1820
|
-
clientId,
|
|
1821
|
-
broker: kafkaConfig.broker,
|
|
1822
|
-
securityProtocol: kafkaConfig.securityProtocol,
|
|
1823
|
-
saslUsername: kafkaConfig.saslUsername,
|
|
1824
|
-
saslPassword: kafkaConfig.saslPassword,
|
|
1825
|
-
saslMechanism: kafkaConfig.saslMechanism
|
|
1826
|
-
},
|
|
1827
|
-
logger
|
|
1237
|
+
console.warn("Validation error:", validationError);
|
|
1238
|
+
return { validatedData: data, validationErrors: [] };
|
|
1239
|
+
}
|
|
1240
|
+
}
|
|
1241
|
+
/**
|
|
1242
|
+
* Handles validation errors based on the specified strategy
|
|
1243
|
+
* @private
|
|
1244
|
+
*/
|
|
1245
|
+
handleValidationErrors(validationErrors, strategy, data, options) {
|
|
1246
|
+
switch (strategy) {
|
|
1247
|
+
case "fail-fast":
|
|
1248
|
+
const firstError = validationErrors[0];
|
|
1249
|
+
throw new Error(
|
|
1250
|
+
`Validation failed for record at index ${firstError.index}: ${firstError.error}`
|
|
1828
1251
|
);
|
|
1829
|
-
|
|
1830
|
-
this.
|
|
1831
|
-
|
|
1252
|
+
case "discard":
|
|
1253
|
+
this.checkValidationThresholds(validationErrors, data.length, options);
|
|
1254
|
+
break;
|
|
1255
|
+
case "isolate":
|
|
1256
|
+
break;
|
|
1257
|
+
}
|
|
1258
|
+
}
|
|
1259
|
+
/**
|
|
1260
|
+
* Checks if validation errors exceed configured thresholds
|
|
1261
|
+
* @private
|
|
1262
|
+
*/
|
|
1263
|
+
checkValidationThresholds(validationErrors, totalRecords, options) {
|
|
1264
|
+
const validationFailedCount = validationErrors.length;
|
|
1265
|
+
const validationFailedRatio = validationFailedCount / totalRecords;
|
|
1266
|
+
if (options?.allowErrors !== void 0 && validationFailedCount > options.allowErrors) {
|
|
1267
|
+
throw new Error(
|
|
1268
|
+
`Too many validation failures: ${validationFailedCount} > ${options.allowErrors}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
|
|
1269
|
+
);
|
|
1270
|
+
}
|
|
1271
|
+
if (options?.allowErrorsRatio !== void 0 && validationFailedRatio > options.allowErrorsRatio) {
|
|
1272
|
+
throw new Error(
|
|
1273
|
+
`Validation failure ratio too high: ${validationFailedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Errors: ${validationErrors.map((e) => e.error).join(", ")}`
|
|
1274
|
+
);
|
|
1275
|
+
}
|
|
1276
|
+
}
|
|
1277
|
+
/**
|
|
1278
|
+
* Optimized insert options preparation with better memory management
|
|
1279
|
+
* @private
|
|
1280
|
+
*/
|
|
1281
|
+
prepareInsertOptions(tableName, data, validatedData, isStream, strategy, options) {
|
|
1282
|
+
const insertOptions = {
|
|
1283
|
+
table: quoteIdentifier(tableName),
|
|
1284
|
+
format: "JSONEachRow",
|
|
1285
|
+
clickhouse_settings: {
|
|
1286
|
+
date_time_input_format: "best_effort",
|
|
1287
|
+
wait_end_of_query: 1,
|
|
1288
|
+
// Ensure at least once delivery for INSERT operations
|
|
1289
|
+
// Performance optimizations
|
|
1290
|
+
max_insert_block_size: isStream ? 1e5 : Math.min(validatedData.length, 1e5),
|
|
1291
|
+
max_block_size: 65536,
|
|
1292
|
+
// Use async inserts for better performance with large datasets
|
|
1293
|
+
async_insert: validatedData.length > 1e3 ? 1 : 0,
|
|
1294
|
+
wait_for_async_insert: 1
|
|
1295
|
+
// For at least once delivery
|
|
1832
1296
|
}
|
|
1833
|
-
|
|
1834
|
-
|
|
1835
|
-
|
|
1836
|
-
|
|
1837
|
-
|
|
1838
|
-
|
|
1839
|
-
|
|
1840
|
-
|
|
1841
|
-
|
|
1842
|
-
this._memoizedProducer = void 0;
|
|
1843
|
-
this._kafkaConfigHash = void 0;
|
|
1844
|
-
}
|
|
1845
|
-
}
|
|
1297
|
+
};
|
|
1298
|
+
if (isStream) {
|
|
1299
|
+
insertOptions.values = data;
|
|
1300
|
+
} else {
|
|
1301
|
+
insertOptions.values = validatedData;
|
|
1302
|
+
}
|
|
1303
|
+
if (strategy === "discard" && (options?.allowErrors !== void 0 || options?.allowErrorsRatio !== void 0)) {
|
|
1304
|
+
if (options.allowErrors !== void 0) {
|
|
1305
|
+
insertOptions.clickhouse_settings.input_format_allow_errors_num = options.allowErrors;
|
|
1846
1306
|
}
|
|
1847
|
-
|
|
1848
|
-
|
|
1849
|
-
* Values are JSON-serialized as message values.
|
|
1850
|
-
*/
|
|
1851
|
-
async send(values) {
|
|
1852
|
-
const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
|
|
1853
|
-
if (flat.length === 0) return;
|
|
1854
|
-
const { producer, kafkaConfig } = await this.getMemoizedProducer();
|
|
1855
|
-
const topic = this.buildFullTopicName(kafkaConfig.namespace);
|
|
1856
|
-
const sr = this.config.schemaConfig;
|
|
1857
|
-
if (sr && sr.kind === "JSON") {
|
|
1858
|
-
const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
|
|
1859
|
-
if (!schemaRegistryUrl) {
|
|
1860
|
-
throw new Error("Schema Registry URL not configured");
|
|
1861
|
-
}
|
|
1862
|
-
const {
|
|
1863
|
-
default: { SchemaRegistry }
|
|
1864
|
-
} = await import("@kafkajs/confluent-schema-registry");
|
|
1865
|
-
const registry = new SchemaRegistry({ host: schemaRegistryUrl });
|
|
1866
|
-
let schemaId = void 0;
|
|
1867
|
-
if ("id" in sr.reference) {
|
|
1868
|
-
schemaId = sr.reference.id;
|
|
1869
|
-
} else if ("subjectLatest" in sr.reference) {
|
|
1870
|
-
schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
|
|
1871
|
-
} else if ("subject" in sr.reference) {
|
|
1872
|
-
schemaId = await registry.getRegistryId(
|
|
1873
|
-
sr.reference.subject,
|
|
1874
|
-
sr.reference.version
|
|
1875
|
-
);
|
|
1876
|
-
}
|
|
1877
|
-
if (schemaId === void 0) {
|
|
1878
|
-
throw new Error("Malformed schema reference.");
|
|
1879
|
-
}
|
|
1880
|
-
const encoded = await Promise.all(
|
|
1881
|
-
flat.map(
|
|
1882
|
-
(v) => registry.encode(schemaId, v)
|
|
1883
|
-
)
|
|
1884
|
-
);
|
|
1885
|
-
await producer.send({
|
|
1886
|
-
topic,
|
|
1887
|
-
messages: encoded.map((value) => ({ value }))
|
|
1888
|
-
});
|
|
1889
|
-
return;
|
|
1890
|
-
} else if (sr !== void 0) {
|
|
1891
|
-
throw new Error("Currently only JSON Schema is supported.");
|
|
1892
|
-
}
|
|
1893
|
-
await producer.send({
|
|
1894
|
-
topic,
|
|
1895
|
-
messages: flat.map((v) => ({ value: JSON.stringify(v) }))
|
|
1896
|
-
});
|
|
1307
|
+
if (options.allowErrorsRatio !== void 0) {
|
|
1308
|
+
insertOptions.clickhouse_settings.input_format_allow_errors_ratio = options.allowErrorsRatio;
|
|
1897
1309
|
}
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
1925
|
-
|
|
1926
|
-
|
|
1927
|
-
|
|
1310
|
+
}
|
|
1311
|
+
return insertOptions;
|
|
1312
|
+
}
|
|
1313
|
+
/**
|
|
1314
|
+
* Creates success result for completed insertions
|
|
1315
|
+
* @private
|
|
1316
|
+
*/
|
|
1317
|
+
createSuccessResult(data, validatedData, validationErrors, isStream, shouldValidate, strategy) {
|
|
1318
|
+
if (isStream) {
|
|
1319
|
+
return {
|
|
1320
|
+
successful: -1,
|
|
1321
|
+
// -1 indicates stream mode where count is unknown
|
|
1322
|
+
failed: 0,
|
|
1323
|
+
total: -1
|
|
1324
|
+
};
|
|
1325
|
+
}
|
|
1326
|
+
const insertedCount = validatedData.length;
|
|
1327
|
+
const totalProcessed = shouldValidate ? data.length : insertedCount;
|
|
1328
|
+
const result = {
|
|
1329
|
+
successful: insertedCount,
|
|
1330
|
+
failed: shouldValidate ? validationErrors.length : 0,
|
|
1331
|
+
total: totalProcessed
|
|
1332
|
+
};
|
|
1333
|
+
if (shouldValidate && validationErrors.length > 0 && strategy === "discard") {
|
|
1334
|
+
result.failedRecords = validationErrors.map((ve) => ({
|
|
1335
|
+
record: ve.record,
|
|
1336
|
+
error: `Validation error: ${ve.error}`,
|
|
1337
|
+
index: ve.index
|
|
1338
|
+
}));
|
|
1339
|
+
}
|
|
1340
|
+
return result;
|
|
1341
|
+
}
|
|
1342
|
+
/**
|
|
1343
|
+
* Handles insertion errors based on the specified strategy
|
|
1344
|
+
* @private
|
|
1345
|
+
*/
|
|
1346
|
+
async handleInsertionError(batchError, strategy, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
|
|
1347
|
+
switch (strategy) {
|
|
1348
|
+
case "fail-fast":
|
|
1349
|
+
throw new Error(
|
|
1350
|
+
`Failed to insert data into table ${tableName}: ${batchError}`
|
|
1351
|
+
);
|
|
1352
|
+
case "discard":
|
|
1353
|
+
throw new Error(
|
|
1354
|
+
`Too many errors during insert into table ${tableName}. Error threshold exceeded: ${batchError}`
|
|
1355
|
+
);
|
|
1356
|
+
case "isolate":
|
|
1357
|
+
return await this.handleIsolateStrategy(
|
|
1358
|
+
batchError,
|
|
1359
|
+
tableName,
|
|
1360
|
+
data,
|
|
1361
|
+
validatedData,
|
|
1362
|
+
validationErrors,
|
|
1363
|
+
isStream,
|
|
1364
|
+
shouldValidate,
|
|
1365
|
+
options
|
|
1366
|
+
);
|
|
1367
|
+
default:
|
|
1368
|
+
throw new Error(`Unknown error strategy: ${strategy}`);
|
|
1369
|
+
}
|
|
1370
|
+
}
|
|
1371
|
+
/**
|
|
1372
|
+
* Handles the isolate strategy for insertion errors
|
|
1373
|
+
* @private
|
|
1374
|
+
*/
|
|
1375
|
+
async handleIsolateStrategy(batchError, tableName, data, validatedData, validationErrors, isStream, shouldValidate, options) {
|
|
1376
|
+
if (isStream) {
|
|
1377
|
+
throw new Error(
|
|
1378
|
+
`Isolate strategy is not supported with stream input: ${batchError}`
|
|
1379
|
+
);
|
|
1380
|
+
}
|
|
1381
|
+
try {
|
|
1382
|
+
const { client } = await this.getMemoizedClient();
|
|
1383
|
+
const skipValidationOnRetry = options?.skipValidationOnRetry || false;
|
|
1384
|
+
const retryData = skipValidationOnRetry ? data : validatedData;
|
|
1385
|
+
const { successful, failed } = await this.retryIndividualRecords(
|
|
1386
|
+
client,
|
|
1387
|
+
tableName,
|
|
1388
|
+
retryData
|
|
1389
|
+
);
|
|
1390
|
+
const allFailedRecords = [
|
|
1391
|
+
// Validation errors (if any and not skipping validation on retry)
|
|
1392
|
+
...shouldValidate && !skipValidationOnRetry ? validationErrors.map((ve) => ({
|
|
1393
|
+
record: ve.record,
|
|
1394
|
+
error: `Validation error: ${ve.error}`,
|
|
1395
|
+
index: ve.index
|
|
1396
|
+
})) : [],
|
|
1397
|
+
// Insertion errors
|
|
1398
|
+
...failed
|
|
1399
|
+
];
|
|
1400
|
+
this.checkInsertionThresholds(
|
|
1401
|
+
allFailedRecords,
|
|
1402
|
+
data.length,
|
|
1403
|
+
options
|
|
1404
|
+
);
|
|
1405
|
+
return {
|
|
1406
|
+
successful: successful.length,
|
|
1407
|
+
failed: allFailedRecords.length,
|
|
1408
|
+
total: data.length,
|
|
1409
|
+
failedRecords: allFailedRecords
|
|
1410
|
+
};
|
|
1411
|
+
} catch (isolationError) {
|
|
1412
|
+
throw new Error(
|
|
1413
|
+
`Failed to insert data into table ${tableName} during record isolation: ${isolationError}`
|
|
1414
|
+
);
|
|
1415
|
+
}
|
|
1416
|
+
}
|
|
1417
|
+
/**
|
|
1418
|
+
* Checks if insertion errors exceed configured thresholds
|
|
1419
|
+
* @private
|
|
1420
|
+
*/
|
|
1421
|
+
checkInsertionThresholds(failedRecords, totalRecords, options) {
|
|
1422
|
+
const totalFailed = failedRecords.length;
|
|
1423
|
+
const failedRatio = totalFailed / totalRecords;
|
|
1424
|
+
if (options?.allowErrors !== void 0 && totalFailed > options.allowErrors) {
|
|
1425
|
+
throw new Error(
|
|
1426
|
+
`Too many failed records: ${totalFailed} > ${options.allowErrors}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
|
|
1427
|
+
);
|
|
1428
|
+
}
|
|
1429
|
+
if (options?.allowErrorsRatio !== void 0 && failedRatio > options.allowErrorsRatio) {
|
|
1430
|
+
throw new Error(
|
|
1431
|
+
`Failed record ratio too high: ${failedRatio.toFixed(3)} > ${options.allowErrorsRatio}. Failed records: ${failedRecords.map((f) => f.error).join(", ")}`
|
|
1432
|
+
);
|
|
1433
|
+
}
|
|
1434
|
+
}
|
|
1435
|
+
/**
|
|
1436
|
+
* Recursively transforms a record to match ClickHouse's JSONEachRow requirements
|
|
1437
|
+
*
|
|
1438
|
+
* - For every Array(Nested(...)) field at any depth, each item is wrapped in its own array and recursively processed.
|
|
1439
|
+
* - For every Nested struct (not array), it recurses into the struct.
|
|
1440
|
+
* - This ensures compatibility with kafka_clickhouse_sync
|
|
1441
|
+
*
|
|
1442
|
+
* @param record The input record to transform (may be deeply nested)
|
|
1443
|
+
* @param columns The schema columns for this level (defaults to this.columnArray at the top level)
|
|
1444
|
+
* @returns The transformed record, ready for ClickHouse JSONEachRow insertion
|
|
1445
|
+
*/
|
|
1446
|
+
mapToClickhouseRecord(record, columns = this.columnArray) {
|
|
1447
|
+
const result = { ...record };
|
|
1448
|
+
for (const col of columns) {
|
|
1449
|
+
const value = record[col.name];
|
|
1450
|
+
const dt = col.data_type;
|
|
1451
|
+
if (isArrayNestedType(dt)) {
|
|
1452
|
+
if (Array.isArray(value) && (value.length === 0 || typeof value[0] === "object")) {
|
|
1453
|
+
result[col.name] = value.map((item) => [
|
|
1454
|
+
this.mapToClickhouseRecord(item, dt.elementType.columns)
|
|
1928
1455
|
]);
|
|
1929
1456
|
}
|
|
1930
|
-
}
|
|
1931
|
-
|
|
1932
|
-
|
|
1933
|
-
* Multiple consumers can be added if they have distinct `version` identifiers in their config.
|
|
1934
|
-
*
|
|
1935
|
-
* @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
|
|
1936
|
-
* @param config Optional configuration for this specific consumer, like a version.
|
|
1937
|
-
*/
|
|
1938
|
-
addConsumer(consumer, config) {
|
|
1939
|
-
const sourceFile = getSourceFileFromStack(new Error().stack);
|
|
1940
|
-
const consumerConfig = {
|
|
1941
|
-
...config ?? {},
|
|
1942
|
-
sourceFile
|
|
1943
|
-
};
|
|
1944
|
-
if (consumerConfig.deadLetterQueue === void 0) {
|
|
1945
|
-
consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
|
|
1946
|
-
}
|
|
1947
|
-
const hasVersion = this._consumers.some(
|
|
1948
|
-
(existing) => existing.config.version === consumerConfig.version
|
|
1949
|
-
);
|
|
1950
|
-
if (!hasVersion) {
|
|
1951
|
-
this._consumers.push({ consumer, config: consumerConfig });
|
|
1457
|
+
} else if (isNestedType(dt)) {
|
|
1458
|
+
if (value && typeof value === "object") {
|
|
1459
|
+
result[col.name] = this.mapToClickhouseRecord(value, dt.columns);
|
|
1952
1460
|
}
|
|
1953
1461
|
}
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1959
|
-
|
|
1960
|
-
|
|
1961
|
-
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1462
|
+
}
|
|
1463
|
+
return result;
|
|
1464
|
+
}
|
|
1465
|
+
/**
|
|
1466
|
+
* Inserts data directly into the ClickHouse table with enhanced error handling and validation.
|
|
1467
|
+
* This method establishes a direct connection to ClickHouse using the project configuration
|
|
1468
|
+
* and inserts the provided data into the versioned table.
|
|
1469
|
+
*
|
|
1470
|
+
* PERFORMANCE OPTIMIZATIONS:
|
|
1471
|
+
* - Memoized client connections with fast config hashing
|
|
1472
|
+
* - Single-pass validation with pre-allocated arrays
|
|
1473
|
+
* - Batch-optimized retry strategy (batches of 10, then individual)
|
|
1474
|
+
* - Optimized ClickHouse settings for large datasets
|
|
1475
|
+
* - Reduced memory allocations and object creation
|
|
1476
|
+
*
|
|
1477
|
+
* Uses advanced typia validation when available for comprehensive type checking,
|
|
1478
|
+
* with fallback to basic validation for compatibility.
|
|
1479
|
+
*
|
|
1480
|
+
* The ClickHouse client is memoized and reused across multiple insert calls for better performance.
|
|
1481
|
+
* If the configuration changes, a new client will be automatically created.
|
|
1482
|
+
*
|
|
1483
|
+
* @param data Array of objects conforming to the table schema, or a Node.js Readable stream
|
|
1484
|
+
* @param options Optional configuration for error handling, validation, and insertion behavior
|
|
1485
|
+
* @returns Promise resolving to detailed insertion results
|
|
1486
|
+
* @throws {ConfigError} When configuration cannot be read or parsed
|
|
1487
|
+
* @throws {ClickHouseError} When insertion fails based on the error strategy
|
|
1488
|
+
* @throws {ValidationError} When validation fails and strategy is 'fail-fast'
|
|
1489
|
+
*
|
|
1490
|
+
* @example
|
|
1491
|
+
* ```typescript
|
|
1492
|
+
* // Create an OlapTable instance (typia validators auto-injected)
|
|
1493
|
+
* const userTable = new OlapTable<User>('users');
|
|
1494
|
+
*
|
|
1495
|
+
* // Insert with comprehensive typia validation
|
|
1496
|
+
* const result1 = await userTable.insert([
|
|
1497
|
+
* { id: 1, name: 'John', email: 'john@example.com' },
|
|
1498
|
+
* { id: 2, name: 'Jane', email: 'jane@example.com' }
|
|
1499
|
+
* ]);
|
|
1500
|
+
*
|
|
1501
|
+
* // Insert data with stream input (validation not available for streams)
|
|
1502
|
+
* const dataStream = new Readable({
|
|
1503
|
+
* objectMode: true,
|
|
1504
|
+
* read() { // Stream implementation }
|
|
1505
|
+
* });
|
|
1506
|
+
* const result2 = await userTable.insert(dataStream, { strategy: 'fail-fast' });
|
|
1507
|
+
*
|
|
1508
|
+
* // Insert with validation disabled for performance
|
|
1509
|
+
* const result3 = await userTable.insert(data, { validate: false });
|
|
1510
|
+
*
|
|
1511
|
+
* // Insert with error handling strategies
|
|
1512
|
+
* const result4 = await userTable.insert(mixedData, {
|
|
1513
|
+
* strategy: 'isolate',
|
|
1514
|
+
* allowErrorsRatio: 0.1,
|
|
1515
|
+
* validate: true // Use typia validation (default)
|
|
1516
|
+
* });
|
|
1517
|
+
*
|
|
1518
|
+
* // Optional: Clean up connection when completely done
|
|
1519
|
+
* await userTable.closeClient();
|
|
1520
|
+
* ```
|
|
1521
|
+
*/
|
|
1522
|
+
async insert(data, options) {
|
|
1523
|
+
const { isStream, strategy, shouldValidate } = this.validateInsertParameters(data, options);
|
|
1524
|
+
const emptyResult = this.handleEmptyData(data, isStream);
|
|
1525
|
+
if (emptyResult) {
|
|
1526
|
+
return emptyResult;
|
|
1527
|
+
}
|
|
1528
|
+
let validatedData = [];
|
|
1529
|
+
let validationErrors = [];
|
|
1530
|
+
if (!isStream && shouldValidate) {
|
|
1531
|
+
const validationResult = await this.performPreInsertionValidation(
|
|
1532
|
+
data,
|
|
1533
|
+
shouldValidate,
|
|
1534
|
+
strategy,
|
|
1535
|
+
options
|
|
1536
|
+
);
|
|
1537
|
+
validatedData = validationResult.validatedData;
|
|
1538
|
+
validationErrors = validationResult.validationErrors;
|
|
1539
|
+
} else {
|
|
1540
|
+
validatedData = isStream ? [] : data;
|
|
1541
|
+
}
|
|
1542
|
+
const { client } = await this.getMemoizedClient();
|
|
1543
|
+
const tableName = this.generateTableName();
|
|
1544
|
+
try {
|
|
1545
|
+
const insertOptions = this.prepareInsertOptions(
|
|
1546
|
+
tableName,
|
|
1547
|
+
data,
|
|
1548
|
+
validatedData,
|
|
1549
|
+
isStream,
|
|
1550
|
+
strategy,
|
|
1551
|
+
options
|
|
1552
|
+
);
|
|
1553
|
+
await client.insert(insertOptions);
|
|
1554
|
+
return this.createSuccessResult(
|
|
1555
|
+
data,
|
|
1556
|
+
validatedData,
|
|
1557
|
+
validationErrors,
|
|
1558
|
+
isStream,
|
|
1559
|
+
shouldValidate,
|
|
1560
|
+
strategy
|
|
1561
|
+
);
|
|
1562
|
+
} catch (batchError) {
|
|
1563
|
+
return await this.handleInsertionError(
|
|
1564
|
+
batchError,
|
|
1565
|
+
strategy,
|
|
1566
|
+
tableName,
|
|
1567
|
+
data,
|
|
1568
|
+
validatedData,
|
|
1569
|
+
validationErrors,
|
|
1570
|
+
isStream,
|
|
1571
|
+
shouldValidate,
|
|
1572
|
+
options
|
|
1573
|
+
);
|
|
1574
|
+
}
|
|
1575
|
+
}
|
|
1576
|
+
// Note: Static factory methods (withS3Queue, withReplacingMergeTree, withMergeTree)
|
|
1577
|
+
// were removed in ENG-856. Use direct configuration instead, e.g.:
|
|
1578
|
+
// new OlapTable(name, { engine: ClickHouseEngines.ReplacingMergeTree, orderByFields: ["id"], ver: "updated_at" })
|
|
1579
|
+
};
|
|
1580
|
+
|
|
1581
|
+
// src/dmv2/sdk/stream.ts
|
|
1582
|
+
var import_node_crypto3 = require("crypto");
|
|
1583
|
+
var RoutedMessage = class {
|
|
1584
|
+
/** The destination stream for the message */
|
|
1585
|
+
destination;
|
|
1586
|
+
/** The message value(s) to send */
|
|
1587
|
+
values;
|
|
1588
|
+
/**
|
|
1589
|
+
* Creates a new routed message.
|
|
1590
|
+
*
|
|
1591
|
+
* @param destination The target stream
|
|
1592
|
+
* @param values The message(s) to route
|
|
1593
|
+
*/
|
|
1594
|
+
constructor(destination, values) {
|
|
1595
|
+
this.destination = destination;
|
|
1596
|
+
this.values = values;
|
|
1597
|
+
}
|
|
1598
|
+
};
|
|
1599
|
+
var Stream = class extends TypedBase {
|
|
1600
|
+
defaultDeadLetterQueue;
|
|
1601
|
+
/** @internal Memoized KafkaJS producer for reusing connections across sends */
|
|
1602
|
+
_memoizedProducer;
|
|
1603
|
+
/** @internal Hash of the configuration used to create the memoized Kafka producer */
|
|
1604
|
+
_kafkaConfigHash;
|
|
1605
|
+
constructor(name, config, schema, columns, validators, allowExtraFields) {
|
|
1606
|
+
super(name, config ?? {}, schema, columns, void 0, allowExtraFields);
|
|
1607
|
+
const streams = getMooseInternal().streams;
|
|
1608
|
+
if (streams.has(name)) {
|
|
1609
|
+
throw new Error(`Stream with name ${name} already exists`);
|
|
1610
|
+
}
|
|
1611
|
+
streams.set(name, this);
|
|
1612
|
+
this.defaultDeadLetterQueue = this.config.defaultDeadLetterQueue;
|
|
1613
|
+
}
|
|
1614
|
+
/**
|
|
1615
|
+
* Internal map storing transformation configurations.
|
|
1616
|
+
* Maps destination stream names to arrays of transformation functions and their configs.
|
|
1617
|
+
*
|
|
1618
|
+
* @internal
|
|
1619
|
+
*/
|
|
1620
|
+
_transformations = /* @__PURE__ */ new Map();
|
|
1621
|
+
/**
|
|
1622
|
+
* Internal function for multi-stream transformations.
|
|
1623
|
+
* Allows a single transformation to route messages to multiple destinations.
|
|
1624
|
+
*
|
|
1625
|
+
* @internal
|
|
1626
|
+
*/
|
|
1627
|
+
_multipleTransformations;
|
|
1628
|
+
/**
|
|
1629
|
+
* Internal array storing consumer configurations.
|
|
1630
|
+
*
|
|
1631
|
+
* @internal
|
|
1632
|
+
*/
|
|
1633
|
+
_consumers = new Array();
|
|
1634
|
+
/**
|
|
1635
|
+
* Builds the full Kafka topic name including optional namespace and version suffix.
|
|
1636
|
+
* Version suffix is appended as _x_y_z where dots in version are replaced with underscores.
|
|
1637
|
+
*/
|
|
1638
|
+
buildFullTopicName(namespace) {
|
|
1639
|
+
const versionSuffix = this.config.version ? `_${this.config.version.replace(/\./g, "_")}` : "";
|
|
1640
|
+
const base = `${this.name}${versionSuffix}`;
|
|
1641
|
+
return namespace !== void 0 && namespace.length > 0 ? `${namespace}.${base}` : base;
|
|
1642
|
+
}
|
|
1643
|
+
/**
|
|
1644
|
+
* Creates a fast hash string from relevant Kafka configuration fields.
|
|
1645
|
+
*/
|
|
1646
|
+
createConfigHash(kafkaConfig) {
|
|
1647
|
+
const configString = [
|
|
1648
|
+
kafkaConfig.broker,
|
|
1649
|
+
kafkaConfig.messageTimeoutMs,
|
|
1650
|
+
kafkaConfig.saslUsername,
|
|
1651
|
+
kafkaConfig.saslPassword,
|
|
1652
|
+
kafkaConfig.saslMechanism,
|
|
1653
|
+
kafkaConfig.securityProtocol,
|
|
1654
|
+
kafkaConfig.namespace
|
|
1655
|
+
].join(":");
|
|
1656
|
+
return (0, import_node_crypto3.createHash)("sha256").update(configString).digest("hex").substring(0, 16);
|
|
1657
|
+
}
|
|
1658
|
+
/**
|
|
1659
|
+
* Gets or creates a memoized KafkaJS producer using runtime configuration.
|
|
1660
|
+
*/
|
|
1661
|
+
async getMemoizedProducer() {
|
|
1662
|
+
await Promise.resolve().then(() => (init_runtime(), runtime_exports));
|
|
1663
|
+
const configRegistry = globalThis._mooseConfigRegistry;
|
|
1664
|
+
const { getKafkaProducer: getKafkaProducer2 } = await Promise.resolve().then(() => (init_commons(), commons_exports));
|
|
1665
|
+
const kafkaConfig = await configRegistry.getKafkaConfig();
|
|
1666
|
+
const currentHash = this.createConfigHash(kafkaConfig);
|
|
1667
|
+
if (this._memoizedProducer && this._kafkaConfigHash === currentHash) {
|
|
1668
|
+
return { producer: this._memoizedProducer, kafkaConfig };
|
|
1669
|
+
}
|
|
1670
|
+
if (this._memoizedProducer && this._kafkaConfigHash !== currentHash) {
|
|
1671
|
+
try {
|
|
1672
|
+
await this._memoizedProducer.disconnect();
|
|
1673
|
+
} catch {
|
|
1674
|
+
}
|
|
1675
|
+
this._memoizedProducer = void 0;
|
|
1676
|
+
}
|
|
1677
|
+
const clientId = `moose-sdk-stream-${this.name}`;
|
|
1678
|
+
const logger = {
|
|
1679
|
+
logPrefix: clientId,
|
|
1680
|
+
log: (message) => {
|
|
1681
|
+
console.log(`${clientId}: ${message}`);
|
|
1682
|
+
},
|
|
1683
|
+
error: (message) => {
|
|
1684
|
+
console.error(`${clientId}: ${message}`);
|
|
1685
|
+
},
|
|
1686
|
+
warn: (message) => {
|
|
1687
|
+
console.warn(`${clientId}: ${message}`);
|
|
1978
1688
|
}
|
|
1979
1689
|
};
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
|
|
1690
|
+
const producer = await getKafkaProducer2(
|
|
1691
|
+
{
|
|
1692
|
+
clientId,
|
|
1693
|
+
broker: kafkaConfig.broker,
|
|
1694
|
+
securityProtocol: kafkaConfig.securityProtocol,
|
|
1695
|
+
saslUsername: kafkaConfig.saslUsername,
|
|
1696
|
+
saslPassword: kafkaConfig.saslPassword,
|
|
1697
|
+
saslMechanism: kafkaConfig.saslMechanism
|
|
1698
|
+
},
|
|
1699
|
+
logger
|
|
1700
|
+
);
|
|
1701
|
+
this._memoizedProducer = producer;
|
|
1702
|
+
this._kafkaConfigHash = currentHash;
|
|
1703
|
+
return { producer, kafkaConfig };
|
|
1704
|
+
}
|
|
1705
|
+
/**
|
|
1706
|
+
* Closes the memoized Kafka producer if it exists.
|
|
1707
|
+
*/
|
|
1708
|
+
async closeProducer() {
|
|
1709
|
+
if (this._memoizedProducer) {
|
|
1710
|
+
try {
|
|
1711
|
+
await this._memoizedProducer.disconnect();
|
|
1712
|
+
} catch {
|
|
1713
|
+
} finally {
|
|
1714
|
+
this._memoizedProducer = void 0;
|
|
1715
|
+
this._kafkaConfigHash = void 0;
|
|
1990
1716
|
}
|
|
1991
|
-
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
|
|
1996
|
-
|
|
1997
|
-
|
|
1998
|
-
|
|
1999
|
-
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
|
|
2003
|
-
|
|
2004
|
-
|
|
2005
|
-
|
|
2006
|
-
|
|
2007
|
-
|
|
2008
|
-
|
|
2009
|
-
|
|
2010
|
-
|
|
2011
|
-
|
|
1717
|
+
}
|
|
1718
|
+
}
|
|
1719
|
+
/**
|
|
1720
|
+
* Sends one or more records to this stream's Kafka topic.
|
|
1721
|
+
* Values are JSON-serialized as message values.
|
|
1722
|
+
*/
|
|
1723
|
+
async send(values) {
|
|
1724
|
+
const flat = Array.isArray(values) ? values : values !== void 0 && values !== null ? [values] : [];
|
|
1725
|
+
if (flat.length === 0) return;
|
|
1726
|
+
const { producer, kafkaConfig } = await this.getMemoizedProducer();
|
|
1727
|
+
const topic = this.buildFullTopicName(kafkaConfig.namespace);
|
|
1728
|
+
const sr = this.config.schemaConfig;
|
|
1729
|
+
if (sr && sr.kind === "JSON") {
|
|
1730
|
+
const schemaRegistryUrl = kafkaConfig.schemaRegistryUrl;
|
|
1731
|
+
if (!schemaRegistryUrl) {
|
|
1732
|
+
throw new Error("Schema Registry URL not configured");
|
|
1733
|
+
}
|
|
1734
|
+
const {
|
|
1735
|
+
default: { SchemaRegistry }
|
|
1736
|
+
} = await import("@kafkajs/confluent-schema-registry");
|
|
1737
|
+
const registry = new SchemaRegistry({ host: schemaRegistryUrl });
|
|
1738
|
+
let schemaId = void 0;
|
|
1739
|
+
if ("id" in sr.reference) {
|
|
1740
|
+
schemaId = sr.reference.id;
|
|
1741
|
+
} else if ("subjectLatest" in sr.reference) {
|
|
1742
|
+
schemaId = await registry.getLatestSchemaId(sr.reference.subjectLatest);
|
|
1743
|
+
} else if ("subject" in sr.reference) {
|
|
1744
|
+
schemaId = await registry.getRegistryId(
|
|
1745
|
+
sr.reference.subject,
|
|
1746
|
+
sr.reference.version
|
|
1747
|
+
);
|
|
2012
1748
|
}
|
|
2013
|
-
|
|
2014
|
-
|
|
2015
|
-
* The consumer function receives a DeadLetter<T> with type recovery capabilities.
|
|
2016
|
-
*
|
|
2017
|
-
* @param consumer Function to process dead letter records
|
|
2018
|
-
* @param config Optional consumer configuration
|
|
2019
|
-
*/
|
|
2020
|
-
addConsumer(consumer, config) {
|
|
2021
|
-
const withValidate = (deadLetter) => {
|
|
2022
|
-
attachTypeGuard(deadLetter, this.typeGuard);
|
|
2023
|
-
return consumer(deadLetter);
|
|
2024
|
-
};
|
|
2025
|
-
super.addConsumer(withValidate, config);
|
|
1749
|
+
if (schemaId === void 0) {
|
|
1750
|
+
throw new Error("Malformed schema reference.");
|
|
2026
1751
|
}
|
|
2027
|
-
|
|
2028
|
-
|
|
2029
|
-
|
|
2030
|
-
|
|
2031
|
-
|
|
2032
|
-
|
|
2033
|
-
|
|
2034
|
-
|
|
2035
|
-
|
|
2036
|
-
|
|
2037
|
-
|
|
2038
|
-
|
|
1752
|
+
const encoded = await Promise.all(
|
|
1753
|
+
flat.map(
|
|
1754
|
+
(v) => registry.encode(schemaId, v)
|
|
1755
|
+
)
|
|
1756
|
+
);
|
|
1757
|
+
await producer.send({
|
|
1758
|
+
topic,
|
|
1759
|
+
messages: encoded.map((value) => ({ value }))
|
|
1760
|
+
});
|
|
1761
|
+
return;
|
|
1762
|
+
} else if (sr !== void 0) {
|
|
1763
|
+
throw new Error("Currently only JSON Schema is supported.");
|
|
1764
|
+
}
|
|
1765
|
+
await producer.send({
|
|
1766
|
+
topic,
|
|
1767
|
+
messages: flat.map((v) => ({ value: JSON.stringify(v) }))
|
|
1768
|
+
});
|
|
1769
|
+
}
|
|
1770
|
+
/**
|
|
1771
|
+
* Adds a transformation step that processes messages from this stream and sends the results to a destination stream.
|
|
1772
|
+
* Multiple transformations to the same destination stream can be added if they have distinct `version` identifiers in their config.
|
|
1773
|
+
*
|
|
1774
|
+
* @template U The data type of the messages in the destination stream.
|
|
1775
|
+
* @param destination The destination stream for the transformed messages.
|
|
1776
|
+
* @param transformation A function that takes a message of type T and returns zero or more messages of type U (or a Promise thereof).
|
|
1777
|
+
* Return `null` or `undefined` or an empty array `[]` to filter out a message. Return an array to emit multiple messages.
|
|
1778
|
+
* @param config Optional configuration for this specific transformation step, like a version.
|
|
1779
|
+
*/
|
|
1780
|
+
addTransform(destination, transformation, config) {
|
|
1781
|
+
const sourceFile = getSourceFileFromStack(new Error().stack);
|
|
1782
|
+
const transformConfig = {
|
|
1783
|
+
...config ?? {},
|
|
1784
|
+
sourceFile
|
|
1785
|
+
};
|
|
1786
|
+
if (transformConfig.deadLetterQueue === void 0) {
|
|
1787
|
+
transformConfig.deadLetterQueue = this.defaultDeadLetterQueue;
|
|
1788
|
+
}
|
|
1789
|
+
if (this._transformations.has(destination.name)) {
|
|
1790
|
+
const existingTransforms = this._transformations.get(destination.name);
|
|
1791
|
+
const hasVersion = existingTransforms.some(
|
|
1792
|
+
([_, __, cfg]) => cfg.version === transformConfig.version
|
|
1793
|
+
);
|
|
1794
|
+
if (!hasVersion) {
|
|
1795
|
+
existingTransforms.push([destination, transformation, transformConfig]);
|
|
2039
1796
|
}
|
|
1797
|
+
} else {
|
|
1798
|
+
this._transformations.set(destination.name, [
|
|
1799
|
+
[destination, transformation, transformConfig]
|
|
1800
|
+
]);
|
|
1801
|
+
}
|
|
1802
|
+
}
|
|
1803
|
+
/**
|
|
1804
|
+
* Adds a consumer function that processes messages from this stream.
|
|
1805
|
+
* Multiple consumers can be added if they have distinct `version` identifiers in their config.
|
|
1806
|
+
*
|
|
1807
|
+
* @param consumer A function that takes a message of type T and performs an action (e.g., side effect, logging). Should return void or Promise<void>.
|
|
1808
|
+
* @param config Optional configuration for this specific consumer, like a version.
|
|
1809
|
+
*/
|
|
1810
|
+
addConsumer(consumer, config) {
|
|
1811
|
+
const sourceFile = getSourceFileFromStack(new Error().stack);
|
|
1812
|
+
const consumerConfig = {
|
|
1813
|
+
...config ?? {},
|
|
1814
|
+
sourceFile
|
|
2040
1815
|
};
|
|
1816
|
+
if (consumerConfig.deadLetterQueue === void 0) {
|
|
1817
|
+
consumerConfig.deadLetterQueue = this.defaultDeadLetterQueue;
|
|
1818
|
+
}
|
|
1819
|
+
const hasVersion = this._consumers.some(
|
|
1820
|
+
(existing) => existing.config.version === consumerConfig.version
|
|
1821
|
+
);
|
|
1822
|
+
if (!hasVersion) {
|
|
1823
|
+
this._consumers.push({ consumer, config: consumerConfig });
|
|
1824
|
+
}
|
|
2041
1825
|
}
|
|
2042
|
-
|
|
1826
|
+
/**
|
|
1827
|
+
* Helper method for `addMultiTransform` to specify the destination and values for a routed message.
|
|
1828
|
+
* @param values The value or values to send to this stream.
|
|
1829
|
+
* @returns A `RoutedMessage` object associating the values with this stream.
|
|
1830
|
+
*
|
|
1831
|
+
* @example
|
|
1832
|
+
* ```typescript
|
|
1833
|
+
* sourceStream.addMultiTransform((record) => [
|
|
1834
|
+
* destinationStream1.routed(transformedRecord1),
|
|
1835
|
+
* destinationStream2.routed([record2a, record2b])
|
|
1836
|
+
* ]);
|
|
1837
|
+
* ```
|
|
1838
|
+
*/
|
|
1839
|
+
routed = (values) => new RoutedMessage(this, values);
|
|
1840
|
+
/**
|
|
1841
|
+
* Adds a single transformation function that can route messages to multiple destination streams.
|
|
1842
|
+
* This is an alternative to adding multiple individual `addTransform` calls.
|
|
1843
|
+
* Only one multi-transform function can be added per stream.
|
|
1844
|
+
*
|
|
1845
|
+
* @param transformation A function that takes a message of type T and returns an array of `RoutedMessage` objects,
|
|
1846
|
+
* each specifying a destination stream and the message(s) to send to it.
|
|
1847
|
+
*/
|
|
1848
|
+
addMultiTransform(transformation) {
|
|
1849
|
+
this._multipleTransformations = transformation;
|
|
1850
|
+
}
|
|
1851
|
+
};
|
|
1852
|
+
function attachTypeGuard(dl, typeGuard) {
|
|
1853
|
+
dl.asTyped = () => typeGuard(dl.originalRecord);
|
|
1854
|
+
}
|
|
1855
|
+
var DeadLetterQueue = class extends Stream {
|
|
1856
|
+
constructor(name, config, typeGuard) {
|
|
1857
|
+
if (typeGuard === void 0) {
|
|
1858
|
+
throw new Error(
|
|
1859
|
+
"Supply the type param T so that the schema is inserted by the compiler plugin."
|
|
1860
|
+
);
|
|
1861
|
+
}
|
|
1862
|
+
super(name, config ?? {}, dlqSchema, dlqColumns, void 0, false);
|
|
1863
|
+
this.typeGuard = typeGuard;
|
|
1864
|
+
getMooseInternal().streams.set(name, this);
|
|
1865
|
+
}
|
|
1866
|
+
/**
|
|
1867
|
+
* Internal type guard function for validating and casting original records.
|
|
1868
|
+
*
|
|
1869
|
+
* @internal
|
|
1870
|
+
*/
|
|
1871
|
+
typeGuard;
|
|
1872
|
+
/**
|
|
1873
|
+
* Adds a transformation step for dead letter records.
|
|
1874
|
+
* The transformation function receives a DeadLetter<T> with type recovery capabilities.
|
|
1875
|
+
*
|
|
1876
|
+
* @template U The output type for the transformation
|
|
1877
|
+
* @param destination The destination stream for transformed messages
|
|
1878
|
+
* @param transformation Function to transform dead letter records
|
|
1879
|
+
* @param config Optional transformation configuration
|
|
1880
|
+
*/
|
|
1881
|
+
addTransform(destination, transformation, config) {
|
|
1882
|
+
const withValidate = (deadLetter) => {
|
|
1883
|
+
attachTypeGuard(deadLetter, this.typeGuard);
|
|
1884
|
+
return transformation(deadLetter);
|
|
1885
|
+
};
|
|
1886
|
+
super.addTransform(destination, withValidate, config);
|
|
1887
|
+
}
|
|
1888
|
+
/**
|
|
1889
|
+
* Adds a consumer for dead letter records.
|
|
1890
|
+
* The consumer function receives a DeadLetter<T> with type recovery capabilities.
|
|
1891
|
+
*
|
|
1892
|
+
* @param consumer Function to process dead letter records
|
|
1893
|
+
* @param config Optional consumer configuration
|
|
1894
|
+
*/
|
|
1895
|
+
addConsumer(consumer, config) {
|
|
1896
|
+
const withValidate = (deadLetter) => {
|
|
1897
|
+
attachTypeGuard(deadLetter, this.typeGuard);
|
|
1898
|
+
return consumer(deadLetter);
|
|
1899
|
+
};
|
|
1900
|
+
super.addConsumer(withValidate, config);
|
|
1901
|
+
}
|
|
1902
|
+
/**
|
|
1903
|
+
* Adds a multi-stream transformation for dead letter records.
|
|
1904
|
+
* The transformation function receives a DeadLetter<T> with type recovery capabilities.
|
|
1905
|
+
*
|
|
1906
|
+
* @param transformation Function to route dead letter records to multiple destinations
|
|
1907
|
+
*/
|
|
1908
|
+
addMultiTransform(transformation) {
|
|
1909
|
+
const withValidate = (deadLetter) => {
|
|
1910
|
+
attachTypeGuard(deadLetter, this.typeGuard);
|
|
1911
|
+
return transformation(deadLetter);
|
|
1912
|
+
};
|
|
1913
|
+
super.addMultiTransform(withValidate);
|
|
1914
|
+
}
|
|
1915
|
+
};
|
|
2043
1916
|
|
|
2044
1917
|
// src/dmv2/sdk/workflow.ts
|
|
2045
|
-
var Task
|
|
2046
|
-
|
|
2047
|
-
|
|
2048
|
-
|
|
2049
|
-
|
|
2050
|
-
|
|
2051
|
-
|
|
2052
|
-
|
|
2053
|
-
|
|
2054
|
-
|
|
2055
|
-
|
|
2056
|
-
|
|
2057
|
-
|
|
2058
|
-
|
|
2059
|
-
|
|
2060
|
-
|
|
2061
|
-
|
|
2062
|
-
|
|
2063
|
-
|
|
2064
|
-
|
|
2065
|
-
|
|
2066
|
-
|
|
2067
|
-
|
|
2068
|
-
|
|
2069
|
-
|
|
2070
|
-
|
|
2071
|
-
|
|
2072
|
-
|
|
2073
|
-
|
|
2074
|
-
|
|
2075
|
-
|
|
2076
|
-
|
|
2077
|
-
|
|
2078
|
-
|
|
2079
|
-
|
|
2080
|
-
|
|
2081
|
-
|
|
2082
|
-
|
|
2083
|
-
|
|
2084
|
-
|
|
2085
|
-
|
|
2086
|
-
|
|
2087
|
-
|
|
2088
|
-
|
|
2089
|
-
|
|
2090
|
-
|
|
1918
|
+
var Task = class {
|
|
1919
|
+
/**
|
|
1920
|
+
* Creates a new Task instance.
|
|
1921
|
+
*
|
|
1922
|
+
* @param name - Unique identifier for the task
|
|
1923
|
+
* @param config - Configuration object defining the task behavior
|
|
1924
|
+
*
|
|
1925
|
+
* @example
|
|
1926
|
+
* ```typescript
|
|
1927
|
+
* // No input, no output
|
|
1928
|
+
* const task1 = new Task<null, void>("task1", {
|
|
1929
|
+
* run: async () => {
|
|
1930
|
+
* console.log("No input/output");
|
|
1931
|
+
* }
|
|
1932
|
+
* });
|
|
1933
|
+
*
|
|
1934
|
+
* // No input, but has output
|
|
1935
|
+
* const task2 = new Task<null, OutputType>("task2", {
|
|
1936
|
+
* run: async () => {
|
|
1937
|
+
* return someOutput;
|
|
1938
|
+
* }
|
|
1939
|
+
* });
|
|
1940
|
+
*
|
|
1941
|
+
* // Has input, no output
|
|
1942
|
+
* const task3 = new Task<InputType, void>("task3", {
|
|
1943
|
+
* run: async (input: InputType) => {
|
|
1944
|
+
* // process input but return nothing
|
|
1945
|
+
* }
|
|
1946
|
+
* });
|
|
1947
|
+
*
|
|
1948
|
+
* // Has both input and output
|
|
1949
|
+
* const task4 = new Task<InputType, OutputType>("task4", {
|
|
1950
|
+
* run: async (input: InputType) => {
|
|
1951
|
+
* return process(input);
|
|
1952
|
+
* }
|
|
1953
|
+
* });
|
|
1954
|
+
* ```
|
|
1955
|
+
*/
|
|
1956
|
+
constructor(name, config) {
|
|
1957
|
+
this.name = name;
|
|
1958
|
+
this.config = config;
|
|
1959
|
+
}
|
|
1960
|
+
};
|
|
1961
|
+
var Workflow = class {
|
|
1962
|
+
/**
|
|
1963
|
+
* Creates a new Workflow instance and registers it with the Moose system.
|
|
1964
|
+
*
|
|
1965
|
+
* @param name - Unique identifier for the workflow
|
|
1966
|
+
* @param config - Configuration object defining the workflow behavior and task orchestration
|
|
1967
|
+
* @throws {Error} When the workflow contains null/undefined tasks or infinite loops
|
|
1968
|
+
*/
|
|
1969
|
+
constructor(name, config) {
|
|
1970
|
+
this.name = name;
|
|
1971
|
+
this.config = config;
|
|
1972
|
+
const workflows = getMooseInternal().workflows;
|
|
1973
|
+
if (workflows.has(name)) {
|
|
1974
|
+
throw new Error(`Workflow with name ${name} already exists`);
|
|
1975
|
+
}
|
|
1976
|
+
this.validateTaskGraph(config.startingTask, name);
|
|
1977
|
+
workflows.set(name, this);
|
|
1978
|
+
}
|
|
1979
|
+
/**
|
|
1980
|
+
* Validates the task graph to ensure there are no null tasks or infinite loops.
|
|
1981
|
+
*
|
|
1982
|
+
* @private
|
|
1983
|
+
* @param startingTask - The starting task to begin validation from
|
|
1984
|
+
* @param workflowName - The name of the workflow being validated (for error messages)
|
|
1985
|
+
* @throws {Error} When null/undefined tasks are found or infinite loops are detected
|
|
1986
|
+
*/
|
|
1987
|
+
validateTaskGraph(startingTask, workflowName) {
|
|
1988
|
+
if (startingTask === null || startingTask === void 0) {
|
|
1989
|
+
throw new Error(
|
|
1990
|
+
`Workflow "${workflowName}" has a null or undefined starting task`
|
|
1991
|
+
);
|
|
1992
|
+
}
|
|
1993
|
+
const visited = /* @__PURE__ */ new Set();
|
|
1994
|
+
const recursionStack = /* @__PURE__ */ new Set();
|
|
1995
|
+
const validateTask = (task, currentPath) => {
|
|
1996
|
+
if (task === null || task === void 0) {
|
|
1997
|
+
const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
|
|
1998
|
+
throw new Error(
|
|
1999
|
+
`Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
|
|
2000
|
+
);
|
|
2091
2001
|
}
|
|
2092
|
-
|
|
2093
|
-
|
|
2094
|
-
|
|
2095
|
-
|
|
2096
|
-
|
|
2097
|
-
|
|
2098
|
-
|
|
2099
|
-
* @throws {Error} When the workflow contains null/undefined tasks or infinite loops
|
|
2100
|
-
*/
|
|
2101
|
-
constructor(name, config) {
|
|
2102
|
-
this.name = name;
|
|
2103
|
-
this.config = config;
|
|
2104
|
-
const workflows = getMooseInternal().workflows;
|
|
2105
|
-
if (workflows.has(name)) {
|
|
2106
|
-
throw new Error(`Workflow with name ${name} already exists`);
|
|
2107
|
-
}
|
|
2108
|
-
this.validateTaskGraph(config.startingTask, name);
|
|
2109
|
-
workflows.set(name, this);
|
|
2002
|
+
const taskName = task.name;
|
|
2003
|
+
if (recursionStack.has(taskName)) {
|
|
2004
|
+
const cycleStartIndex = currentPath.indexOf(taskName);
|
|
2005
|
+
const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
|
|
2006
|
+
throw new Error(
|
|
2007
|
+
`Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
|
|
2008
|
+
);
|
|
2110
2009
|
}
|
|
2111
|
-
|
|
2112
|
-
|
|
2113
|
-
|
|
2114
|
-
|
|
2115
|
-
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
validateTaskGraph(startingTask, workflowName) {
|
|
2120
|
-
if (startingTask === null || startingTask === void 0) {
|
|
2121
|
-
throw new Error(
|
|
2122
|
-
`Workflow "${workflowName}" has a null or undefined starting task`
|
|
2123
|
-
);
|
|
2010
|
+
if (visited.has(taskName)) {
|
|
2011
|
+
return;
|
|
2012
|
+
}
|
|
2013
|
+
visited.add(taskName);
|
|
2014
|
+
recursionStack.add(taskName);
|
|
2015
|
+
if (task.config.onComplete) {
|
|
2016
|
+
for (const nextTask of task.config.onComplete) {
|
|
2017
|
+
validateTask(nextTask, [...currentPath, taskName]);
|
|
2124
2018
|
}
|
|
2125
|
-
const visited = /* @__PURE__ */ new Set();
|
|
2126
|
-
const recursionStack = /* @__PURE__ */ new Set();
|
|
2127
|
-
const validateTask = (task, currentPath) => {
|
|
2128
|
-
if (task === null || task === void 0) {
|
|
2129
|
-
const pathStr = currentPath.length > 0 ? currentPath.join(" -> ") + " -> " : "";
|
|
2130
|
-
throw new Error(
|
|
2131
|
-
`Workflow "${workflowName}" contains a null or undefined task in the task chain: ${pathStr}null`
|
|
2132
|
-
);
|
|
2133
|
-
}
|
|
2134
|
-
const taskName = task.name;
|
|
2135
|
-
if (recursionStack.has(taskName)) {
|
|
2136
|
-
const cycleStartIndex = currentPath.indexOf(taskName);
|
|
2137
|
-
const cyclePath = cycleStartIndex >= 0 ? currentPath.slice(cycleStartIndex).concat(taskName) : currentPath.concat(taskName);
|
|
2138
|
-
throw new Error(
|
|
2139
|
-
`Workflow "${workflowName}" contains an infinite loop in task chain: ${cyclePath.join(" -> ")}`
|
|
2140
|
-
);
|
|
2141
|
-
}
|
|
2142
|
-
if (visited.has(taskName)) {
|
|
2143
|
-
return;
|
|
2144
|
-
}
|
|
2145
|
-
visited.add(taskName);
|
|
2146
|
-
recursionStack.add(taskName);
|
|
2147
|
-
if (task.config.onComplete) {
|
|
2148
|
-
for (const nextTask of task.config.onComplete) {
|
|
2149
|
-
validateTask(nextTask, [...currentPath, taskName]);
|
|
2150
|
-
}
|
|
2151
|
-
}
|
|
2152
|
-
recursionStack.delete(taskName);
|
|
2153
|
-
};
|
|
2154
|
-
validateTask(startingTask, []);
|
|
2155
2019
|
}
|
|
2020
|
+
recursionStack.delete(taskName);
|
|
2156
2021
|
};
|
|
2022
|
+
validateTask(startingTask, []);
|
|
2157
2023
|
}
|
|
2158
|
-
}
|
|
2024
|
+
};
|
|
2159
2025
|
|
|
2160
2026
|
// src/dmv2/sdk/ingestApi.ts
|
|
2161
|
-
var IngestApi
|
|
2162
|
-
|
|
2163
|
-
|
|
2164
|
-
|
|
2165
|
-
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
super(name, config, schema, columns, void 0, allowExtraFields);
|
|
2170
|
-
const ingestApis = getMooseInternal().ingestApis;
|
|
2171
|
-
if (ingestApis.has(name)) {
|
|
2172
|
-
throw new Error(`Ingest API with name ${name} already exists`);
|
|
2173
|
-
}
|
|
2174
|
-
ingestApis.set(name, this);
|
|
2175
|
-
}
|
|
2176
|
-
};
|
|
2027
|
+
var IngestApi = class extends TypedBase {
|
|
2028
|
+
constructor(name, config, schema, columns, validators, allowExtraFields) {
|
|
2029
|
+
super(name, config, schema, columns, void 0, allowExtraFields);
|
|
2030
|
+
const ingestApis = getMooseInternal().ingestApis;
|
|
2031
|
+
if (ingestApis.has(name)) {
|
|
2032
|
+
throw new Error(`Ingest API with name ${name} already exists`);
|
|
2033
|
+
}
|
|
2034
|
+
ingestApis.set(name, this);
|
|
2177
2035
|
}
|
|
2178
|
-
}
|
|
2036
|
+
};
|
|
2179
2037
|
|
|
2180
2038
|
// src/dmv2/sdk/consumptionApi.ts
|
|
2181
|
-
var Api
|
|
2182
|
-
|
|
2183
|
-
|
|
2184
|
-
|
|
2185
|
-
|
|
2186
|
-
|
|
2187
|
-
|
|
2188
|
-
|
|
2189
|
-
|
|
2190
|
-
|
|
2191
|
-
|
|
2192
|
-
|
|
2193
|
-
|
|
2194
|
-
|
|
2195
|
-
|
|
2196
|
-
|
|
2197
|
-
|
|
2198
|
-
|
|
2199
|
-
|
|
2200
|
-
|
|
2201
|
-
|
|
2202
|
-
|
|
2203
|
-
|
|
2204
|
-
|
|
2205
|
-
|
|
2206
|
-
|
|
2207
|
-
|
|
2208
|
-
|
|
2209
|
-
|
|
2210
|
-
|
|
2211
|
-
if (pathEndsWithVersion) {
|
|
2212
|
-
if (apis.has(config.path)) {
|
|
2213
|
-
const existing = apis.get(config.path);
|
|
2214
|
-
throw new Error(
|
|
2215
|
-
`Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
|
|
2216
|
-
);
|
|
2217
|
-
}
|
|
2218
|
-
apis.set(config.path, this);
|
|
2219
|
-
} else {
|
|
2220
|
-
const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
|
|
2221
|
-
if (apis.has(versionedPath)) {
|
|
2222
|
-
const existing = apis.get(versionedPath);
|
|
2223
|
-
throw new Error(
|
|
2224
|
-
`Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
|
|
2225
|
-
);
|
|
2226
|
-
}
|
|
2227
|
-
apis.set(versionedPath, this);
|
|
2228
|
-
if (!apis.has(config.path)) {
|
|
2229
|
-
apis.set(config.path, this);
|
|
2230
|
-
}
|
|
2231
|
-
}
|
|
2232
|
-
} else {
|
|
2233
|
-
if (apis.has(config.path)) {
|
|
2234
|
-
const existing = apis.get(config.path);
|
|
2235
|
-
throw new Error(
|
|
2236
|
-
`Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
|
|
2237
|
-
);
|
|
2238
|
-
}
|
|
2239
|
-
apis.set(config.path, this);
|
|
2240
|
-
}
|
|
2241
|
-
}
|
|
2242
|
-
}
|
|
2243
|
-
/**
|
|
2244
|
-
* Retrieves the handler function associated with this Consumption API.
|
|
2245
|
-
* @returns The handler function.
|
|
2246
|
-
*/
|
|
2247
|
-
getHandler = () => {
|
|
2248
|
-
return this._handler;
|
|
2249
|
-
};
|
|
2250
|
-
async call(baseUrl, queryParams) {
|
|
2251
|
-
let path2;
|
|
2252
|
-
if (this.config?.path) {
|
|
2253
|
-
if (this.config.version) {
|
|
2254
|
-
const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
|
|
2255
|
-
if (pathEndsWithVersion) {
|
|
2256
|
-
path2 = this.config.path;
|
|
2257
|
-
} else {
|
|
2258
|
-
path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
|
|
2259
|
-
}
|
|
2260
|
-
} else {
|
|
2261
|
-
path2 = this.config.path;
|
|
2039
|
+
var Api = class extends TypedBase {
|
|
2040
|
+
/** @internal The handler function that processes requests and generates responses. */
|
|
2041
|
+
_handler;
|
|
2042
|
+
/** @internal The JSON schema definition for the response type R. */
|
|
2043
|
+
responseSchema;
|
|
2044
|
+
constructor(name, handler, config, schema, columns, responseSchema) {
|
|
2045
|
+
super(name, config ?? {}, schema, columns);
|
|
2046
|
+
this._handler = handler;
|
|
2047
|
+
this.responseSchema = responseSchema ?? {
|
|
2048
|
+
version: "3.1",
|
|
2049
|
+
schemas: [{ type: "array", items: { type: "object" } }],
|
|
2050
|
+
components: { schemas: {} }
|
|
2051
|
+
};
|
|
2052
|
+
const apis = getMooseInternal().apis;
|
|
2053
|
+
const key = `${name}${config?.version ? `:${config.version}` : ""}`;
|
|
2054
|
+
if (apis.has(key)) {
|
|
2055
|
+
throw new Error(
|
|
2056
|
+
`Consumption API with name ${name} and version ${config?.version} already exists`
|
|
2057
|
+
);
|
|
2058
|
+
}
|
|
2059
|
+
apis.set(key, this);
|
|
2060
|
+
if (config?.path) {
|
|
2061
|
+
if (config.version) {
|
|
2062
|
+
const pathEndsWithVersion = config.path.endsWith(`/${config.version}`) || config.path === config.version || config.path.endsWith(config.version) && config.path.length > config.version.length && config.path[config.path.length - config.version.length - 1] === "/";
|
|
2063
|
+
if (pathEndsWithVersion) {
|
|
2064
|
+
if (apis.has(config.path)) {
|
|
2065
|
+
const existing = apis.get(config.path);
|
|
2066
|
+
throw new Error(
|
|
2067
|
+
`Cannot register API "${name}" with path "${config.path}" - this path is already used by API "${existing.name}"`
|
|
2068
|
+
);
|
|
2262
2069
|
}
|
|
2070
|
+
apis.set(config.path, this);
|
|
2263
2071
|
} else {
|
|
2264
|
-
|
|
2265
|
-
|
|
2266
|
-
|
|
2267
|
-
|
|
2268
|
-
|
|
2269
|
-
|
|
2270
|
-
for (const item of value) {
|
|
2271
|
-
if (item !== null && item !== void 0) {
|
|
2272
|
-
searchParams.append(key, String(item));
|
|
2273
|
-
}
|
|
2274
|
-
}
|
|
2275
|
-
} else if (value !== null && value !== void 0) {
|
|
2276
|
-
searchParams.append(key, String(value));
|
|
2277
|
-
}
|
|
2278
|
-
}
|
|
2279
|
-
const response = await fetch(url, {
|
|
2280
|
-
method: "GET",
|
|
2281
|
-
headers: {
|
|
2282
|
-
Accept: "application/json"
|
|
2072
|
+
const versionedPath = `${config.path.replace(/\/$/, "")}/${config.version}`;
|
|
2073
|
+
if (apis.has(versionedPath)) {
|
|
2074
|
+
const existing = apis.get(versionedPath);
|
|
2075
|
+
throw new Error(
|
|
2076
|
+
`Cannot register API "${name}" with path "${versionedPath}" - this path is already used by API "${existing.name}"`
|
|
2077
|
+
);
|
|
2283
2078
|
}
|
|
2284
|
-
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
}
|
|
2288
|
-
const data = await response.json();
|
|
2289
|
-
return data;
|
|
2290
|
-
}
|
|
2291
|
-
};
|
|
2292
|
-
ConsumptionApi = Api;
|
|
2293
|
-
}
|
|
2294
|
-
});
|
|
2295
|
-
|
|
2296
|
-
// src/dmv2/sdk/ingestPipeline.ts
|
|
2297
|
-
var IngestPipeline;
|
|
2298
|
-
var init_ingestPipeline = __esm({
|
|
2299
|
-
"src/dmv2/sdk/ingestPipeline.ts"() {
|
|
2300
|
-
"use strict";
|
|
2301
|
-
init_typedBase();
|
|
2302
|
-
init_stream();
|
|
2303
|
-
init_olapTable();
|
|
2304
|
-
init_ingestApi();
|
|
2305
|
-
init_helpers();
|
|
2306
|
-
IngestPipeline = class extends TypedBase {
|
|
2307
|
-
/**
|
|
2308
|
-
* The OLAP table component of the pipeline, if configured.
|
|
2309
|
-
* Provides analytical query capabilities for the ingested data.
|
|
2310
|
-
* Only present when `config.table` is not `false`.
|
|
2311
|
-
*/
|
|
2312
|
-
table;
|
|
2313
|
-
/**
|
|
2314
|
-
* The stream component of the pipeline, if configured.
|
|
2315
|
-
* Handles real-time data flow and processing between components.
|
|
2316
|
-
* Only present when `config.stream` is not `false`.
|
|
2317
|
-
*/
|
|
2318
|
-
stream;
|
|
2319
|
-
/**
|
|
2320
|
-
* The ingest API component of the pipeline, if configured.
|
|
2321
|
-
* Provides HTTP endpoints for data ingestion.
|
|
2322
|
-
* Only present when `config.ingestApi` is not `false`.
|
|
2323
|
-
*/
|
|
2324
|
-
ingestApi;
|
|
2325
|
-
/** The dead letter queue of the pipeline, if configured. */
|
|
2326
|
-
deadLetterQueue;
|
|
2327
|
-
constructor(name, config, schema, columns, validators, allowExtraFields) {
|
|
2328
|
-
super(name, config, schema, columns, validators, allowExtraFields);
|
|
2329
|
-
if (config.ingest !== void 0) {
|
|
2330
|
-
console.warn(
|
|
2331
|
-
"\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
|
|
2332
|
-
);
|
|
2333
|
-
if (config.ingestApi === void 0) {
|
|
2334
|
-
config.ingestApi = config.ingest;
|
|
2079
|
+
apis.set(versionedPath, this);
|
|
2080
|
+
if (!apis.has(config.path)) {
|
|
2081
|
+
apis.set(config.path, this);
|
|
2335
2082
|
}
|
|
2336
2083
|
}
|
|
2337
|
-
|
|
2338
|
-
|
|
2339
|
-
|
|
2340
|
-
|
|
2341
|
-
|
|
2342
|
-
} : {
|
|
2343
|
-
lifeCycle: config.lifeCycle,
|
|
2344
|
-
engine: "MergeTree" /* MergeTree */,
|
|
2345
|
-
...config.version && { version: config.version }
|
|
2346
|
-
};
|
|
2347
|
-
this.table = new OlapTable(
|
|
2348
|
-
name,
|
|
2349
|
-
tableConfig,
|
|
2350
|
-
this.schema,
|
|
2351
|
-
this.columnArray,
|
|
2352
|
-
this.validators
|
|
2353
|
-
);
|
|
2354
|
-
}
|
|
2355
|
-
if (config.deadLetterQueue) {
|
|
2356
|
-
const streamConfig = {
|
|
2357
|
-
destination: void 0,
|
|
2358
|
-
...typeof config.deadLetterQueue === "object" ? {
|
|
2359
|
-
...config.deadLetterQueue,
|
|
2360
|
-
lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
|
|
2361
|
-
} : { lifeCycle: config.lifeCycle },
|
|
2362
|
-
...config.version && { version: config.version }
|
|
2363
|
-
};
|
|
2364
|
-
this.deadLetterQueue = new DeadLetterQueue(
|
|
2365
|
-
`${name}DeadLetterQueue`,
|
|
2366
|
-
streamConfig,
|
|
2367
|
-
validators.assert
|
|
2084
|
+
} else {
|
|
2085
|
+
if (apis.has(config.path)) {
|
|
2086
|
+
const existing = apis.get(config.path);
|
|
2087
|
+
throw new Error(
|
|
2088
|
+
`Cannot register API "${name}" with custom path "${config.path}" - this path is already used by API "${existing.name}"`
|
|
2368
2089
|
);
|
|
2369
2090
|
}
|
|
2370
|
-
|
|
2371
|
-
|
|
2372
|
-
|
|
2373
|
-
|
|
2374
|
-
|
|
2375
|
-
|
|
2376
|
-
|
|
2377
|
-
|
|
2378
|
-
|
|
2379
|
-
|
|
2380
|
-
|
|
2381
|
-
|
|
2382
|
-
|
|
2383
|
-
|
|
2384
|
-
|
|
2385
|
-
|
|
2386
|
-
|
|
2387
|
-
|
|
2388
|
-
|
|
2091
|
+
apis.set(config.path, this);
|
|
2092
|
+
}
|
|
2093
|
+
}
|
|
2094
|
+
}
|
|
2095
|
+
/**
|
|
2096
|
+
* Retrieves the handler function associated with this Consumption API.
|
|
2097
|
+
* @returns The handler function.
|
|
2098
|
+
*/
|
|
2099
|
+
getHandler = () => {
|
|
2100
|
+
return this._handler;
|
|
2101
|
+
};
|
|
2102
|
+
async call(baseUrl, queryParams) {
|
|
2103
|
+
let path2;
|
|
2104
|
+
if (this.config?.path) {
|
|
2105
|
+
if (this.config.version) {
|
|
2106
|
+
const pathEndsWithVersion = this.config.path.endsWith(`/${this.config.version}`) || this.config.path === this.config.version || this.config.path.endsWith(this.config.version) && this.config.path.length > this.config.version.length && this.config.path[this.config.path.length - this.config.version.length - 1] === "/";
|
|
2107
|
+
if (pathEndsWithVersion) {
|
|
2108
|
+
path2 = this.config.path;
|
|
2109
|
+
} else {
|
|
2110
|
+
path2 = `${this.config.path.replace(/\/$/, "")}/${this.config.version}`;
|
|
2389
2111
|
}
|
|
2390
|
-
|
|
2391
|
-
|
|
2392
|
-
|
|
2393
|
-
|
|
2112
|
+
} else {
|
|
2113
|
+
path2 = this.config.path;
|
|
2114
|
+
}
|
|
2115
|
+
} else {
|
|
2116
|
+
path2 = this.config?.version ? `${this.name}/${this.config.version}` : this.name;
|
|
2117
|
+
}
|
|
2118
|
+
const url = new URL(`${baseUrl.replace(/\/$/, "")}/api/${path2}`);
|
|
2119
|
+
const searchParams = url.searchParams;
|
|
2120
|
+
for (const [key, value] of Object.entries(queryParams)) {
|
|
2121
|
+
if (Array.isArray(value)) {
|
|
2122
|
+
for (const item of value) {
|
|
2123
|
+
if (item !== null && item !== void 0) {
|
|
2124
|
+
searchParams.append(key, String(item));
|
|
2394
2125
|
}
|
|
2395
|
-
const ingestConfig = {
|
|
2396
|
-
destination: this.stream,
|
|
2397
|
-
deadLetterQueue: this.deadLetterQueue,
|
|
2398
|
-
...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
|
|
2399
|
-
...config.version && { version: config.version },
|
|
2400
|
-
...config.path && { path: config.path }
|
|
2401
|
-
};
|
|
2402
|
-
this.ingestApi = new IngestApi(
|
|
2403
|
-
name,
|
|
2404
|
-
ingestConfig,
|
|
2405
|
-
this.schema,
|
|
2406
|
-
this.columnArray,
|
|
2407
|
-
void 0,
|
|
2408
|
-
this.allowExtraFields
|
|
2409
|
-
);
|
|
2410
|
-
this.ingestApi.pipelineParent = this;
|
|
2411
2126
|
}
|
|
2127
|
+
} else if (value !== null && value !== void 0) {
|
|
2128
|
+
searchParams.append(key, String(value));
|
|
2412
2129
|
}
|
|
2413
|
-
}
|
|
2130
|
+
}
|
|
2131
|
+
const response = await fetch(url, {
|
|
2132
|
+
method: "GET",
|
|
2133
|
+
headers: {
|
|
2134
|
+
Accept: "application/json"
|
|
2135
|
+
}
|
|
2136
|
+
});
|
|
2137
|
+
if (!response.ok) {
|
|
2138
|
+
throw new Error(`HTTP error! status: ${response.status}`);
|
|
2139
|
+
}
|
|
2140
|
+
const data = await response.json();
|
|
2141
|
+
return data;
|
|
2414
2142
|
}
|
|
2415
|
-
}
|
|
2143
|
+
};
|
|
2144
|
+
var ConsumptionApi = Api;
|
|
2416
2145
|
|
|
2417
|
-
// src/dmv2/sdk/
|
|
2418
|
-
var
|
|
2419
|
-
|
|
2420
|
-
|
|
2421
|
-
|
|
2422
|
-
|
|
2423
|
-
|
|
2424
|
-
|
|
2425
|
-
|
|
2426
|
-
|
|
2427
|
-
|
|
2428
|
-
|
|
2429
|
-
|
|
2430
|
-
|
|
2431
|
-
|
|
2432
|
-
|
|
2433
|
-
|
|
2434
|
-
|
|
2435
|
-
|
|
2436
|
-
|
|
2437
|
-
|
|
2438
|
-
|
|
2439
|
-
|
|
2146
|
+
// src/dmv2/sdk/ingestPipeline.ts
|
|
2147
|
+
var IngestPipeline = class extends TypedBase {
|
|
2148
|
+
/**
|
|
2149
|
+
* The OLAP table component of the pipeline, if configured.
|
|
2150
|
+
* Provides analytical query capabilities for the ingested data.
|
|
2151
|
+
* Only present when `config.table` is not `false`.
|
|
2152
|
+
*/
|
|
2153
|
+
table;
|
|
2154
|
+
/**
|
|
2155
|
+
* The stream component of the pipeline, if configured.
|
|
2156
|
+
* Handles real-time data flow and processing between components.
|
|
2157
|
+
* Only present when `config.stream` is not `false`.
|
|
2158
|
+
*/
|
|
2159
|
+
stream;
|
|
2160
|
+
/**
|
|
2161
|
+
* The ingest API component of the pipeline, if configured.
|
|
2162
|
+
* Provides HTTP endpoints for data ingestion.
|
|
2163
|
+
* Only present when `config.ingestApi` is not `false`.
|
|
2164
|
+
*/
|
|
2165
|
+
ingestApi;
|
|
2166
|
+
/** The dead letter queue of the pipeline, if configured. */
|
|
2167
|
+
deadLetterQueue;
|
|
2168
|
+
constructor(name, config, schema, columns, validators, allowExtraFields) {
|
|
2169
|
+
super(name, config, schema, columns, validators, allowExtraFields);
|
|
2170
|
+
if (config.ingest !== void 0) {
|
|
2171
|
+
console.warn(
|
|
2172
|
+
"\u26A0\uFE0F DEPRECATION WARNING: The 'ingest' parameter is deprecated and will be removed in a future version. Please use 'ingestApi' instead."
|
|
2173
|
+
);
|
|
2174
|
+
if (config.ingestApi === void 0) {
|
|
2175
|
+
config.ingestApi = config.ingest;
|
|
2440
2176
|
}
|
|
2177
|
+
}
|
|
2178
|
+
if (config.table) {
|
|
2179
|
+
const tableConfig = typeof config.table === "object" ? {
|
|
2180
|
+
...config.table,
|
|
2181
|
+
lifeCycle: config.table.lifeCycle ?? config.lifeCycle,
|
|
2182
|
+
...config.version && { version: config.version }
|
|
2183
|
+
} : {
|
|
2184
|
+
lifeCycle: config.lifeCycle,
|
|
2185
|
+
engine: "MergeTree" /* MergeTree */,
|
|
2186
|
+
...config.version && { version: config.version }
|
|
2187
|
+
};
|
|
2188
|
+
this.table = new OlapTable(
|
|
2189
|
+
name,
|
|
2190
|
+
tableConfig,
|
|
2191
|
+
this.schema,
|
|
2192
|
+
this.columnArray,
|
|
2193
|
+
this.validators
|
|
2194
|
+
);
|
|
2195
|
+
}
|
|
2196
|
+
if (config.deadLetterQueue) {
|
|
2197
|
+
const streamConfig = {
|
|
2198
|
+
destination: void 0,
|
|
2199
|
+
...typeof config.deadLetterQueue === "object" ? {
|
|
2200
|
+
...config.deadLetterQueue,
|
|
2201
|
+
lifeCycle: config.deadLetterQueue.lifeCycle ?? config.lifeCycle
|
|
2202
|
+
} : { lifeCycle: config.lifeCycle },
|
|
2203
|
+
...config.version && { version: config.version }
|
|
2204
|
+
};
|
|
2205
|
+
this.deadLetterQueue = new DeadLetterQueue(
|
|
2206
|
+
`${name}DeadLetterQueue`,
|
|
2207
|
+
streamConfig,
|
|
2208
|
+
validators.assert
|
|
2209
|
+
);
|
|
2210
|
+
}
|
|
2211
|
+
if (config.stream) {
|
|
2212
|
+
const streamConfig = {
|
|
2213
|
+
destination: this.table,
|
|
2214
|
+
defaultDeadLetterQueue: this.deadLetterQueue,
|
|
2215
|
+
...typeof config.stream === "object" ? {
|
|
2216
|
+
...config.stream,
|
|
2217
|
+
lifeCycle: config.stream.lifeCycle ?? config.lifeCycle
|
|
2218
|
+
} : { lifeCycle: config.lifeCycle },
|
|
2219
|
+
...config.version && { version: config.version }
|
|
2220
|
+
};
|
|
2221
|
+
this.stream = new Stream(
|
|
2222
|
+
name,
|
|
2223
|
+
streamConfig,
|
|
2224
|
+
this.schema,
|
|
2225
|
+
this.columnArray,
|
|
2226
|
+
void 0,
|
|
2227
|
+
this.allowExtraFields
|
|
2228
|
+
);
|
|
2229
|
+
this.stream.pipelineParent = this;
|
|
2230
|
+
}
|
|
2231
|
+
const effectiveIngestAPI = config.ingestApi !== void 0 ? config.ingestApi : config.ingest;
|
|
2232
|
+
if (effectiveIngestAPI) {
|
|
2233
|
+
if (!this.stream) {
|
|
2234
|
+
throw new Error("Ingest API needs a stream to write to.");
|
|
2235
|
+
}
|
|
2236
|
+
const ingestConfig = {
|
|
2237
|
+
destination: this.stream,
|
|
2238
|
+
deadLetterQueue: this.deadLetterQueue,
|
|
2239
|
+
...typeof effectiveIngestAPI === "object" ? effectiveIngestAPI : {},
|
|
2240
|
+
...config.version && { version: config.version },
|
|
2241
|
+
...config.path && { path: config.path }
|
|
2242
|
+
};
|
|
2243
|
+
this.ingestApi = new IngestApi(
|
|
2244
|
+
name,
|
|
2245
|
+
ingestConfig,
|
|
2246
|
+
this.schema,
|
|
2247
|
+
this.columnArray,
|
|
2248
|
+
void 0,
|
|
2249
|
+
this.allowExtraFields
|
|
2250
|
+
);
|
|
2251
|
+
this.ingestApi.pipelineParent = this;
|
|
2252
|
+
}
|
|
2253
|
+
}
|
|
2254
|
+
};
|
|
2255
|
+
|
|
2256
|
+
// src/dmv2/sdk/etlPipeline.ts
|
|
2257
|
+
var InternalBatcher = class {
|
|
2258
|
+
iterator;
|
|
2259
|
+
batchSize;
|
|
2260
|
+
constructor(asyncIterable, batchSize = 20) {
|
|
2261
|
+
this.iterator = asyncIterable[Symbol.asyncIterator]();
|
|
2262
|
+
this.batchSize = batchSize;
|
|
2263
|
+
}
|
|
2264
|
+
async getNextBatch() {
|
|
2265
|
+
const items = [];
|
|
2266
|
+
for (let i = 0; i < this.batchSize; i++) {
|
|
2267
|
+
const { value, done } = await this.iterator.next();
|
|
2268
|
+
if (done) {
|
|
2269
|
+
return { items, hasMore: false };
|
|
2270
|
+
}
|
|
2271
|
+
items.push(value);
|
|
2272
|
+
}
|
|
2273
|
+
return { items, hasMore: true };
|
|
2274
|
+
}
|
|
2275
|
+
};
|
|
2276
|
+
var ETLPipeline = class {
|
|
2277
|
+
constructor(name, config) {
|
|
2278
|
+
this.name = name;
|
|
2279
|
+
this.config = config;
|
|
2280
|
+
this.setupPipeline();
|
|
2281
|
+
}
|
|
2282
|
+
batcher;
|
|
2283
|
+
setupPipeline() {
|
|
2284
|
+
this.batcher = this.createBatcher();
|
|
2285
|
+
const tasks = this.createAllTasks();
|
|
2286
|
+
tasks.extract.config.onComplete = [tasks.transform];
|
|
2287
|
+
tasks.transform.config.onComplete = [tasks.load];
|
|
2288
|
+
new Workflow(this.name, {
|
|
2289
|
+
startingTask: tasks.extract,
|
|
2290
|
+
retries: 1,
|
|
2291
|
+
timeout: "30m"
|
|
2292
|
+
});
|
|
2293
|
+
}
|
|
2294
|
+
createBatcher() {
|
|
2295
|
+
const iterable = typeof this.config.extract === "function" ? this.config.extract() : this.config.extract;
|
|
2296
|
+
return new InternalBatcher(iterable);
|
|
2297
|
+
}
|
|
2298
|
+
getDefaultTaskConfig() {
|
|
2299
|
+
return {
|
|
2300
|
+
retries: 1,
|
|
2301
|
+
timeout: "30m"
|
|
2441
2302
|
};
|
|
2442
|
-
|
|
2443
|
-
|
|
2444
|
-
|
|
2445
|
-
|
|
2446
|
-
|
|
2447
|
-
|
|
2448
|
-
|
|
2449
|
-
|
|
2450
|
-
|
|
2451
|
-
|
|
2452
|
-
|
|
2453
|
-
|
|
2454
|
-
|
|
2455
|
-
|
|
2456
|
-
|
|
2457
|
-
|
|
2458
|
-
|
|
2459
|
-
|
|
2460
|
-
|
|
2461
|
-
|
|
2462
|
-
|
|
2463
|
-
|
|
2464
|
-
|
|
2465
|
-
|
|
2466
|
-
|
|
2467
|
-
|
|
2468
|
-
}
|
|
2469
|
-
|
|
2470
|
-
|
|
2471
|
-
|
|
2472
|
-
|
|
2473
|
-
extract: this.createExtractTask(taskConfig),
|
|
2474
|
-
transform: this.createTransformTask(taskConfig),
|
|
2475
|
-
load: this.createLoadTask(taskConfig)
|
|
2476
|
-
};
|
|
2477
|
-
}
|
|
2478
|
-
createExtractTask(taskConfig) {
|
|
2479
|
-
return new Task(`${this.name}_extract`, {
|
|
2480
|
-
run: async ({}) => {
|
|
2481
|
-
console.log(`Running extract task for ${this.name}...`);
|
|
2482
|
-
const batch = await this.batcher.getNextBatch();
|
|
2483
|
-
console.log(`Extract task completed with ${batch.items.length} items`);
|
|
2484
|
-
return batch;
|
|
2485
|
-
},
|
|
2486
|
-
retries: taskConfig.retries,
|
|
2487
|
-
timeout: taskConfig.timeout
|
|
2488
|
-
});
|
|
2489
|
-
}
|
|
2490
|
-
createTransformTask(taskConfig) {
|
|
2491
|
-
return new Task(
|
|
2492
|
-
`${this.name}_transform`,
|
|
2493
|
-
{
|
|
2494
|
-
// Use new single-parameter context API for handlers
|
|
2495
|
-
run: async ({ input }) => {
|
|
2496
|
-
const batch = input;
|
|
2497
|
-
console.log(
|
|
2498
|
-
`Running transform task for ${this.name} with ${batch.items.length} items...`
|
|
2499
|
-
);
|
|
2500
|
-
const transformedItems = [];
|
|
2501
|
-
for (const item of batch.items) {
|
|
2502
|
-
const transformed = await this.config.transform(item);
|
|
2503
|
-
transformedItems.push(transformed);
|
|
2504
|
-
}
|
|
2505
|
-
console.log(
|
|
2506
|
-
`Transform task completed with ${transformedItems.length} items`
|
|
2507
|
-
);
|
|
2508
|
-
return { items: transformedItems };
|
|
2509
|
-
},
|
|
2510
|
-
retries: taskConfig.retries,
|
|
2511
|
-
timeout: taskConfig.timeout
|
|
2512
|
-
}
|
|
2513
|
-
);
|
|
2514
|
-
}
|
|
2515
|
-
createLoadTask(taskConfig) {
|
|
2516
|
-
return new Task(`${this.name}_load`, {
|
|
2517
|
-
run: async ({ input: transformedItems }) => {
|
|
2518
|
-
console.log(
|
|
2519
|
-
`Running load task for ${this.name} with ${transformedItems.items.length} items...`
|
|
2520
|
-
);
|
|
2521
|
-
if ("insert" in this.config.load) {
|
|
2522
|
-
await this.config.load.insert(transformedItems.items);
|
|
2523
|
-
} else {
|
|
2524
|
-
await this.config.load(transformedItems.items);
|
|
2525
|
-
}
|
|
2526
|
-
console.log(`Load task completed`);
|
|
2527
|
-
},
|
|
2528
|
-
retries: taskConfig.retries,
|
|
2529
|
-
timeout: taskConfig.timeout
|
|
2530
|
-
});
|
|
2531
|
-
}
|
|
2532
|
-
// Execute the entire ETL pipeline
|
|
2533
|
-
async run() {
|
|
2534
|
-
console.log(`Starting ETL Pipeline: ${this.name}`);
|
|
2535
|
-
let batchNumber = 1;
|
|
2536
|
-
do {
|
|
2537
|
-
console.log(`Processing batch ${batchNumber}...`);
|
|
2538
|
-
const batch = await this.batcher.getNextBatch();
|
|
2539
|
-
if (batch.items.length === 0) {
|
|
2540
|
-
break;
|
|
2541
|
-
}
|
|
2303
|
+
}
|
|
2304
|
+
createAllTasks() {
|
|
2305
|
+
const taskConfig = this.getDefaultTaskConfig();
|
|
2306
|
+
return {
|
|
2307
|
+
extract: this.createExtractTask(taskConfig),
|
|
2308
|
+
transform: this.createTransformTask(taskConfig),
|
|
2309
|
+
load: this.createLoadTask(taskConfig)
|
|
2310
|
+
};
|
|
2311
|
+
}
|
|
2312
|
+
createExtractTask(taskConfig) {
|
|
2313
|
+
return new Task(`${this.name}_extract`, {
|
|
2314
|
+
run: async ({}) => {
|
|
2315
|
+
console.log(`Running extract task for ${this.name}...`);
|
|
2316
|
+
const batch = await this.batcher.getNextBatch();
|
|
2317
|
+
console.log(`Extract task completed with ${batch.items.length} items`);
|
|
2318
|
+
return batch;
|
|
2319
|
+
},
|
|
2320
|
+
retries: taskConfig.retries,
|
|
2321
|
+
timeout: taskConfig.timeout
|
|
2322
|
+
});
|
|
2323
|
+
}
|
|
2324
|
+
createTransformTask(taskConfig) {
|
|
2325
|
+
return new Task(
|
|
2326
|
+
`${this.name}_transform`,
|
|
2327
|
+
{
|
|
2328
|
+
// Use new single-parameter context API for handlers
|
|
2329
|
+
run: async ({ input }) => {
|
|
2330
|
+
const batch = input;
|
|
2331
|
+
console.log(
|
|
2332
|
+
`Running transform task for ${this.name} with ${batch.items.length} items...`
|
|
2333
|
+
);
|
|
2542
2334
|
const transformedItems = [];
|
|
2543
|
-
for (const
|
|
2544
|
-
const
|
|
2545
|
-
transformedItems.push(
|
|
2546
|
-
}
|
|
2547
|
-
if ("insert" in this.config.load) {
|
|
2548
|
-
await this.config.load.insert(transformedItems);
|
|
2549
|
-
} else {
|
|
2550
|
-
await this.config.load(transformedItems);
|
|
2335
|
+
for (const item of batch.items) {
|
|
2336
|
+
const transformed = await this.config.transform(item);
|
|
2337
|
+
transformedItems.push(transformed);
|
|
2551
2338
|
}
|
|
2552
2339
|
console.log(
|
|
2553
|
-
`
|
|
2340
|
+
`Transform task completed with ${transformedItems.length} items`
|
|
2554
2341
|
);
|
|
2555
|
-
|
|
2556
|
-
|
|
2557
|
-
|
|
2558
|
-
|
|
2559
|
-
} while (true);
|
|
2560
|
-
console.log(`Completed ETL Pipeline: ${this.name}`);
|
|
2342
|
+
return { items: transformedItems };
|
|
2343
|
+
},
|
|
2344
|
+
retries: taskConfig.retries,
|
|
2345
|
+
timeout: taskConfig.timeout
|
|
2561
2346
|
}
|
|
2562
|
-
|
|
2347
|
+
);
|
|
2563
2348
|
}
|
|
2564
|
-
|
|
2565
|
-
|
|
2566
|
-
|
|
2567
|
-
|
|
2568
|
-
|
|
2569
|
-
"src/dmv2/sdk/sqlResource.ts"() {
|
|
2570
|
-
"use strict";
|
|
2571
|
-
init_internal();
|
|
2572
|
-
init_sqlHelpers();
|
|
2573
|
-
init_stackTrace();
|
|
2574
|
-
SqlResource = class {
|
|
2575
|
-
/** @internal */
|
|
2576
|
-
kind = "SqlResource";
|
|
2577
|
-
/** Array of SQL statements to execute for setting up the resource. */
|
|
2578
|
-
setup;
|
|
2579
|
-
/** Array of SQL statements to execute for tearing down the resource. */
|
|
2580
|
-
teardown;
|
|
2581
|
-
/** The name of the SQL resource (e.g., view name, materialized view name). */
|
|
2582
|
-
name;
|
|
2583
|
-
/** List of OlapTables or Views that this resource reads data from. */
|
|
2584
|
-
pullsDataFrom;
|
|
2585
|
-
/** List of OlapTables or Views that this resource writes data to. */
|
|
2586
|
-
pushesDataTo;
|
|
2587
|
-
/** @internal Source file path where this resource was defined */
|
|
2588
|
-
sourceFile;
|
|
2589
|
-
/** @internal Source line number where this resource was defined */
|
|
2590
|
-
sourceLine;
|
|
2591
|
-
/** @internal Source column number where this resource was defined */
|
|
2592
|
-
sourceColumn;
|
|
2593
|
-
/**
|
|
2594
|
-
* Creates a new SqlResource instance.
|
|
2595
|
-
* @param name The name of the resource.
|
|
2596
|
-
* @param setup An array of SQL DDL statements to create the resource.
|
|
2597
|
-
* @param teardown An array of SQL DDL statements to drop the resource.
|
|
2598
|
-
* @param options Optional configuration for specifying data dependencies.
|
|
2599
|
-
* @param options.pullsDataFrom Tables/Views this resource reads from.
|
|
2600
|
-
* @param options.pushesDataTo Tables/Views this resource writes to.
|
|
2601
|
-
*/
|
|
2602
|
-
constructor(name, setup, teardown, options) {
|
|
2603
|
-
const sqlResources = getMooseInternal().sqlResources;
|
|
2604
|
-
if (!isClientOnlyMode() && sqlResources.has(name)) {
|
|
2605
|
-
throw new Error(`SqlResource with name ${name} already exists`);
|
|
2606
|
-
}
|
|
2607
|
-
sqlResources.set(name, this);
|
|
2608
|
-
this.name = name;
|
|
2609
|
-
this.setup = setup.map(
|
|
2610
|
-
(sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
|
|
2611
|
-
);
|
|
2612
|
-
this.teardown = teardown.map(
|
|
2613
|
-
(sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
|
|
2349
|
+
createLoadTask(taskConfig) {
|
|
2350
|
+
return new Task(`${this.name}_load`, {
|
|
2351
|
+
run: async ({ input: transformedItems }) => {
|
|
2352
|
+
console.log(
|
|
2353
|
+
`Running load task for ${this.name} with ${transformedItems.items.length} items...`
|
|
2614
2354
|
);
|
|
2615
|
-
|
|
2616
|
-
|
|
2617
|
-
|
|
2618
|
-
|
|
2619
|
-
if (location) {
|
|
2620
|
-
this.sourceFile = location.file;
|
|
2621
|
-
this.sourceLine = location.line;
|
|
2622
|
-
this.sourceColumn = location.column;
|
|
2355
|
+
if ("insert" in this.config.load) {
|
|
2356
|
+
await this.config.load.insert(transformedItems.items);
|
|
2357
|
+
} else {
|
|
2358
|
+
await this.config.load(transformedItems.items);
|
|
2623
2359
|
}
|
|
2624
|
-
|
|
2625
|
-
|
|
2360
|
+
console.log(`Load task completed`);
|
|
2361
|
+
},
|
|
2362
|
+
retries: taskConfig.retries,
|
|
2363
|
+
timeout: taskConfig.timeout
|
|
2364
|
+
});
|
|
2626
2365
|
}
|
|
2627
|
-
|
|
2628
|
-
|
|
2629
|
-
|
|
2630
|
-
|
|
2631
|
-
|
|
2632
|
-
|
|
2633
|
-
|
|
2634
|
-
|
|
2635
|
-
|
|
2636
|
-
|
|
2637
|
-
|
|
2638
|
-
|
|
2639
|
-
|
|
2640
|
-
|
|
2366
|
+
// Execute the entire ETL pipeline
|
|
2367
|
+
async run() {
|
|
2368
|
+
console.log(`Starting ETL Pipeline: ${this.name}`);
|
|
2369
|
+
let batchNumber = 1;
|
|
2370
|
+
do {
|
|
2371
|
+
console.log(`Processing batch ${batchNumber}...`);
|
|
2372
|
+
const batch = await this.batcher.getNextBatch();
|
|
2373
|
+
if (batch.items.length === 0) {
|
|
2374
|
+
break;
|
|
2375
|
+
}
|
|
2376
|
+
const transformedItems = [];
|
|
2377
|
+
for (const extractedData of batch.items) {
|
|
2378
|
+
const transformedData = await this.config.transform(extractedData);
|
|
2379
|
+
transformedItems.push(transformedData);
|
|
2380
|
+
}
|
|
2381
|
+
if ("insert" in this.config.load) {
|
|
2382
|
+
await this.config.load.insert(transformedItems);
|
|
2641
2383
|
} else {
|
|
2642
|
-
|
|
2384
|
+
await this.config.load(transformedItems);
|
|
2643
2385
|
}
|
|
2644
|
-
|
|
2645
|
-
|
|
2646
|
-
|
|
2647
|
-
|
|
2648
|
-
|
|
2649
|
-
|
|
2650
|
-
if (typeof selectStatement !== "string") {
|
|
2651
|
-
selectStatement = toStaticQuery(selectStatement);
|
|
2652
|
-
}
|
|
2653
|
-
if (targetSchema === void 0 || targetColumns === void 0) {
|
|
2654
|
-
throw new Error(
|
|
2655
|
-
"Supply the type param T so that the schema is inserted by the compiler plugin."
|
|
2656
|
-
);
|
|
2657
|
-
}
|
|
2658
|
-
const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
|
|
2659
|
-
requireTargetTableName(
|
|
2660
|
-
options.targetTable?.name ?? options.tableName
|
|
2661
|
-
),
|
|
2662
|
-
{
|
|
2663
|
-
orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
|
|
2664
|
-
engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
|
|
2665
|
-
},
|
|
2666
|
-
targetSchema,
|
|
2667
|
-
targetColumns
|
|
2668
|
-
);
|
|
2669
|
-
if (targetTable.name === options.materializedViewName) {
|
|
2670
|
-
throw new Error(
|
|
2671
|
-
"Materialized view name cannot be the same as the target table name."
|
|
2672
|
-
);
|
|
2673
|
-
}
|
|
2674
|
-
super(
|
|
2675
|
-
options.materializedViewName,
|
|
2676
|
-
[
|
|
2677
|
-
createMaterializedView({
|
|
2678
|
-
name: options.materializedViewName,
|
|
2679
|
-
destinationTable: targetTable.name,
|
|
2680
|
-
select: selectStatement
|
|
2681
|
-
})
|
|
2682
|
-
// Population is now handled automatically by Rust infrastructure
|
|
2683
|
-
// based on table engine type and whether this is a new or updated view
|
|
2684
|
-
],
|
|
2685
|
-
[dropView(options.materializedViewName)],
|
|
2686
|
-
{
|
|
2687
|
-
pullsDataFrom: options.selectTables,
|
|
2688
|
-
pushesDataTo: [targetTable]
|
|
2689
|
-
}
|
|
2690
|
-
);
|
|
2691
|
-
this.targetTable = targetTable;
|
|
2386
|
+
console.log(
|
|
2387
|
+
`Completed batch ${batchNumber} with ${batch.items.length} items`
|
|
2388
|
+
);
|
|
2389
|
+
batchNumber++;
|
|
2390
|
+
if (!batch.hasMore) {
|
|
2391
|
+
break;
|
|
2692
2392
|
}
|
|
2693
|
-
};
|
|
2393
|
+
} while (true);
|
|
2394
|
+
console.log(`Completed ETL Pipeline: ${this.name}`);
|
|
2694
2395
|
}
|
|
2695
|
-
}
|
|
2396
|
+
};
|
|
2397
|
+
|
|
2398
|
+
// src/dmv2/sdk/materializedView.ts
|
|
2399
|
+
var requireTargetTableName = (tableName) => {
|
|
2400
|
+
if (typeof tableName === "string") {
|
|
2401
|
+
return tableName;
|
|
2402
|
+
} else {
|
|
2403
|
+
throw new Error("Name of targetTable is not specified.");
|
|
2404
|
+
}
|
|
2405
|
+
};
|
|
2406
|
+
var MaterializedView = class {
|
|
2407
|
+
/** @internal */
|
|
2408
|
+
kind = "MaterializedView";
|
|
2409
|
+
/** The name of the materialized view */
|
|
2410
|
+
name;
|
|
2411
|
+
/** The target OlapTable instance where the materialized data is stored. */
|
|
2412
|
+
targetTable;
|
|
2413
|
+
/** The SELECT SQL statement */
|
|
2414
|
+
selectSql;
|
|
2415
|
+
/** Names of source tables that the SELECT reads from */
|
|
2416
|
+
sourceTables;
|
|
2417
|
+
/** @internal Source file path where this MV was defined */
|
|
2418
|
+
sourceFile;
|
|
2419
|
+
constructor(options, targetSchema, targetColumns) {
|
|
2420
|
+
let selectStatement = options.selectStatement;
|
|
2421
|
+
if (typeof selectStatement !== "string") {
|
|
2422
|
+
selectStatement = toStaticQuery(selectStatement);
|
|
2423
|
+
}
|
|
2424
|
+
if (targetSchema === void 0 || targetColumns === void 0) {
|
|
2425
|
+
throw new Error(
|
|
2426
|
+
"Supply the type param T so that the schema is inserted by the compiler plugin."
|
|
2427
|
+
);
|
|
2428
|
+
}
|
|
2429
|
+
const targetTable = options.targetTable instanceof OlapTable ? options.targetTable : new OlapTable(
|
|
2430
|
+
requireTargetTableName(
|
|
2431
|
+
options.targetTable?.name ?? options.tableName
|
|
2432
|
+
),
|
|
2433
|
+
{
|
|
2434
|
+
orderByFields: options.targetTable?.orderByFields ?? options.orderByFields,
|
|
2435
|
+
engine: options.targetTable?.engine ?? options.engine ?? "MergeTree" /* MergeTree */
|
|
2436
|
+
},
|
|
2437
|
+
targetSchema,
|
|
2438
|
+
targetColumns
|
|
2439
|
+
);
|
|
2440
|
+
if (targetTable.name === options.materializedViewName) {
|
|
2441
|
+
throw new Error(
|
|
2442
|
+
"Materialized view name cannot be the same as the target table name."
|
|
2443
|
+
);
|
|
2444
|
+
}
|
|
2445
|
+
this.name = options.materializedViewName;
|
|
2446
|
+
this.targetTable = targetTable;
|
|
2447
|
+
this.selectSql = selectStatement;
|
|
2448
|
+
this.sourceTables = options.selectTables.map((t) => t.name);
|
|
2449
|
+
const stack = new Error().stack;
|
|
2450
|
+
this.sourceFile = getSourceFileFromStack(stack);
|
|
2451
|
+
const materializedViews = getMooseInternal().materializedViews;
|
|
2452
|
+
if (!isClientOnlyMode() && materializedViews.has(this.name)) {
|
|
2453
|
+
throw new Error(`MaterializedView with name ${this.name} already exists`);
|
|
2454
|
+
}
|
|
2455
|
+
materializedViews.set(this.name, this);
|
|
2456
|
+
}
|
|
2457
|
+
};
|
|
2458
|
+
|
|
2459
|
+
// src/dmv2/sdk/sqlResource.ts
|
|
2460
|
+
var SqlResource = class {
|
|
2461
|
+
/** @internal */
|
|
2462
|
+
kind = "SqlResource";
|
|
2463
|
+
/** Array of SQL statements to execute for setting up the resource. */
|
|
2464
|
+
setup;
|
|
2465
|
+
/** Array of SQL statements to execute for tearing down the resource. */
|
|
2466
|
+
teardown;
|
|
2467
|
+
/** The name of the SQL resource (e.g., view name, materialized view name). */
|
|
2468
|
+
name;
|
|
2469
|
+
/** List of OlapTables or Views that this resource reads data from. */
|
|
2470
|
+
pullsDataFrom;
|
|
2471
|
+
/** List of OlapTables or Views that this resource writes data to. */
|
|
2472
|
+
pushesDataTo;
|
|
2473
|
+
/** @internal Source file path where this resource was defined */
|
|
2474
|
+
sourceFile;
|
|
2475
|
+
/** @internal Source line number where this resource was defined */
|
|
2476
|
+
sourceLine;
|
|
2477
|
+
/** @internal Source column number where this resource was defined */
|
|
2478
|
+
sourceColumn;
|
|
2479
|
+
/**
|
|
2480
|
+
* Creates a new SqlResource instance.
|
|
2481
|
+
* @param name The name of the resource.
|
|
2482
|
+
* @param setup An array of SQL DDL statements to create the resource.
|
|
2483
|
+
* @param teardown An array of SQL DDL statements to drop the resource.
|
|
2484
|
+
* @param options Optional configuration for specifying data dependencies.
|
|
2485
|
+
* @param options.pullsDataFrom Tables/Views this resource reads from.
|
|
2486
|
+
* @param options.pushesDataTo Tables/Views this resource writes to.
|
|
2487
|
+
*/
|
|
2488
|
+
constructor(name, setup, teardown, options) {
|
|
2489
|
+
const sqlResources = getMooseInternal().sqlResources;
|
|
2490
|
+
if (!isClientOnlyMode() && sqlResources.has(name)) {
|
|
2491
|
+
throw new Error(`SqlResource with name ${name} already exists`);
|
|
2492
|
+
}
|
|
2493
|
+
sqlResources.set(name, this);
|
|
2494
|
+
this.name = name;
|
|
2495
|
+
this.setup = setup.map(
|
|
2496
|
+
(sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
|
|
2497
|
+
);
|
|
2498
|
+
this.teardown = teardown.map(
|
|
2499
|
+
(sql3) => typeof sql3 === "string" ? sql3 : toStaticQuery(sql3)
|
|
2500
|
+
);
|
|
2501
|
+
this.pullsDataFrom = options?.pullsDataFrom ?? [];
|
|
2502
|
+
this.pushesDataTo = options?.pushesDataTo ?? [];
|
|
2503
|
+
const stack = new Error().stack;
|
|
2504
|
+
const location = getSourceLocationFromStack(stack);
|
|
2505
|
+
if (location) {
|
|
2506
|
+
this.sourceFile = location.file;
|
|
2507
|
+
this.sourceLine = location.line;
|
|
2508
|
+
this.sourceColumn = location.column;
|
|
2509
|
+
}
|
|
2510
|
+
}
|
|
2511
|
+
};
|
|
2696
2512
|
|
|
2697
2513
|
// src/dmv2/sdk/view.ts
|
|
2698
|
-
var View
|
|
2699
|
-
|
|
2700
|
-
"
|
|
2701
|
-
|
|
2702
|
-
|
|
2703
|
-
|
|
2704
|
-
|
|
2705
|
-
|
|
2706
|
-
|
|
2707
|
-
|
|
2708
|
-
|
|
2709
|
-
|
|
2710
|
-
|
|
2711
|
-
|
|
2712
|
-
|
|
2713
|
-
|
|
2714
|
-
|
|
2715
|
-
|
|
2716
|
-
|
|
2717
|
-
|
|
2718
|
-
|
|
2719
|
-
|
|
2720
|
-
|
|
2721
|
-
|
|
2722
|
-
|
|
2723
|
-
|
|
2724
|
-
|
|
2725
|
-
|
|
2726
|
-
|
|
2727
|
-
|
|
2728
|
-
|
|
2514
|
+
var View = class {
|
|
2515
|
+
/** @internal */
|
|
2516
|
+
kind = "CustomView";
|
|
2517
|
+
/** The name of the view */
|
|
2518
|
+
name;
|
|
2519
|
+
/** The SELECT SQL statement that defines the view */
|
|
2520
|
+
selectSql;
|
|
2521
|
+
/** Names of source tables/views that the SELECT reads from */
|
|
2522
|
+
sourceTables;
|
|
2523
|
+
/** @internal Source file path where this view was defined */
|
|
2524
|
+
sourceFile;
|
|
2525
|
+
/**
|
|
2526
|
+
* Creates a new View instance.
|
|
2527
|
+
* @param name The name of the view to be created.
|
|
2528
|
+
* @param selectStatement The SQL SELECT statement that defines the view's logic.
|
|
2529
|
+
* @param baseTables An array of OlapTable or View objects that the `selectStatement` reads from. Used for dependency tracking.
|
|
2530
|
+
*/
|
|
2531
|
+
constructor(name, selectStatement, baseTables) {
|
|
2532
|
+
if (typeof selectStatement !== "string") {
|
|
2533
|
+
selectStatement = toStaticQuery(selectStatement);
|
|
2534
|
+
}
|
|
2535
|
+
this.name = name;
|
|
2536
|
+
this.selectSql = selectStatement;
|
|
2537
|
+
this.sourceTables = baseTables.map((t) => t.name);
|
|
2538
|
+
const stack = new Error().stack;
|
|
2539
|
+
this.sourceFile = getSourceFileFromStack(stack);
|
|
2540
|
+
const customViews = getMooseInternal().customViews;
|
|
2541
|
+
if (!isClientOnlyMode() && customViews.has(this.name)) {
|
|
2542
|
+
throw new Error(`View with name ${this.name} already exists`);
|
|
2543
|
+
}
|
|
2544
|
+
customViews.set(this.name, this);
|
|
2729
2545
|
}
|
|
2730
|
-
}
|
|
2546
|
+
};
|
|
2731
2547
|
|
|
2732
2548
|
// src/dmv2/sdk/lifeCycle.ts
|
|
2733
|
-
var LifeCycle
|
|
2734
|
-
|
|
2735
|
-
"
|
|
2736
|
-
|
|
2737
|
-
|
|
2738
|
-
|
|
2739
|
-
LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
|
|
2740
|
-
LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
|
|
2741
|
-
return LifeCycle2;
|
|
2742
|
-
})(LifeCycle || {});
|
|
2743
|
-
}
|
|
2744
|
-
});
|
|
2549
|
+
var LifeCycle = /* @__PURE__ */ ((LifeCycle2) => {
|
|
2550
|
+
LifeCycle2["FULLY_MANAGED"] = "FULLY_MANAGED";
|
|
2551
|
+
LifeCycle2["DELETION_PROTECTED"] = "DELETION_PROTECTED";
|
|
2552
|
+
LifeCycle2["EXTERNALLY_MANAGED"] = "EXTERNALLY_MANAGED";
|
|
2553
|
+
return LifeCycle2;
|
|
2554
|
+
})(LifeCycle || {});
|
|
2745
2555
|
|
|
2746
2556
|
// src/dmv2/sdk/webApp.ts
|
|
2747
|
-
var RESERVED_MOUNT_PATHS
|
|
2748
|
-
|
|
2749
|
-
"
|
|
2750
|
-
|
|
2751
|
-
|
|
2752
|
-
|
|
2753
|
-
|
|
2754
|
-
|
|
2755
|
-
|
|
2756
|
-
|
|
2757
|
-
|
|
2758
|
-
|
|
2759
|
-
|
|
2760
|
-
|
|
2761
|
-
|
|
2762
|
-
|
|
2763
|
-
|
|
2764
|
-
|
|
2765
|
-
|
|
2766
|
-
|
|
2767
|
-
|
|
2768
|
-
|
|
2769
|
-
|
|
2770
|
-
|
|
2771
|
-
|
|
2772
|
-
|
|
2773
|
-
|
|
2774
|
-
|
|
2775
|
-
|
|
2776
|
-
|
|
2777
|
-
|
|
2778
|
-
|
|
2779
|
-
|
|
2780
|
-
|
|
2781
|
-
|
|
2782
|
-
|
|
2557
|
+
var RESERVED_MOUNT_PATHS = [
|
|
2558
|
+
"/admin",
|
|
2559
|
+
"/api",
|
|
2560
|
+
"/consumption",
|
|
2561
|
+
"/health",
|
|
2562
|
+
"/ingest",
|
|
2563
|
+
"/moose",
|
|
2564
|
+
// reserved for future use
|
|
2565
|
+
"/ready",
|
|
2566
|
+
"/workflows"
|
|
2567
|
+
];
|
|
2568
|
+
var WebApp = class {
|
|
2569
|
+
name;
|
|
2570
|
+
handler;
|
|
2571
|
+
config;
|
|
2572
|
+
_rawApp;
|
|
2573
|
+
constructor(name, appOrHandler, config) {
|
|
2574
|
+
this.name = name;
|
|
2575
|
+
this.config = config;
|
|
2576
|
+
if (!this.config.mountPath) {
|
|
2577
|
+
throw new Error(
|
|
2578
|
+
`mountPath is required. Please specify a mount path for your WebApp (e.g., "/myapi").`
|
|
2579
|
+
);
|
|
2580
|
+
}
|
|
2581
|
+
const mountPath = this.config.mountPath;
|
|
2582
|
+
if (mountPath === "/") {
|
|
2583
|
+
throw new Error(
|
|
2584
|
+
`mountPath cannot be "/" as it would allow routes to overlap with reserved paths: ${RESERVED_MOUNT_PATHS.join(", ")}`
|
|
2585
|
+
);
|
|
2586
|
+
}
|
|
2587
|
+
if (mountPath.endsWith("/")) {
|
|
2588
|
+
throw new Error(
|
|
2589
|
+
`mountPath cannot end with a trailing slash. Remove the '/' from: "${mountPath}"`
|
|
2590
|
+
);
|
|
2591
|
+
}
|
|
2592
|
+
for (const reserved of RESERVED_MOUNT_PATHS) {
|
|
2593
|
+
if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
|
|
2594
|
+
throw new Error(
|
|
2595
|
+
`mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
|
|
2596
|
+
);
|
|
2597
|
+
}
|
|
2598
|
+
}
|
|
2599
|
+
this.handler = this.toHandler(appOrHandler);
|
|
2600
|
+
this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
|
|
2601
|
+
const webApps = getMooseInternal().webApps;
|
|
2602
|
+
if (webApps.has(name)) {
|
|
2603
|
+
throw new Error(`WebApp with name ${name} already exists`);
|
|
2604
|
+
}
|
|
2605
|
+
if (this.config.mountPath) {
|
|
2606
|
+
for (const [existingName, existingApp] of webApps) {
|
|
2607
|
+
if (existingApp.config.mountPath === this.config.mountPath) {
|
|
2783
2608
|
throw new Error(
|
|
2784
|
-
`
|
|
2609
|
+
`WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
|
|
2785
2610
|
);
|
|
2786
2611
|
}
|
|
2787
|
-
for (const reserved of RESERVED_MOUNT_PATHS) {
|
|
2788
|
-
if (mountPath === reserved || mountPath.startsWith(`${reserved}/`)) {
|
|
2789
|
-
throw new Error(
|
|
2790
|
-
`mountPath cannot begin with a reserved path: ${RESERVED_MOUNT_PATHS.join(", ")}. Got: "${mountPath}"`
|
|
2791
|
-
);
|
|
2792
|
-
}
|
|
2793
|
-
}
|
|
2794
|
-
this.handler = this.toHandler(appOrHandler);
|
|
2795
|
-
this._rawApp = typeof appOrHandler === "function" ? void 0 : appOrHandler;
|
|
2796
|
-
const webApps = getMooseInternal().webApps;
|
|
2797
|
-
if (webApps.has(name)) {
|
|
2798
|
-
throw new Error(`WebApp with name ${name} already exists`);
|
|
2799
|
-
}
|
|
2800
|
-
if (this.config.mountPath) {
|
|
2801
|
-
for (const [existingName, existingApp] of webApps) {
|
|
2802
|
-
if (existingApp.config.mountPath === this.config.mountPath) {
|
|
2803
|
-
throw new Error(
|
|
2804
|
-
`WebApp with mountPath "${this.config.mountPath}" already exists (used by WebApp "${existingName}")`
|
|
2805
|
-
);
|
|
2806
|
-
}
|
|
2807
|
-
}
|
|
2808
|
-
}
|
|
2809
|
-
webApps.set(name, this);
|
|
2810
2612
|
}
|
|
2811
|
-
|
|
2812
|
-
|
|
2813
|
-
|
|
2814
|
-
|
|
2815
|
-
|
|
2816
|
-
|
|
2817
|
-
|
|
2818
|
-
|
|
2819
|
-
|
|
2820
|
-
|
|
2821
|
-
|
|
2822
|
-
|
|
2823
|
-
|
|
2824
|
-
|
|
2825
|
-
}
|
|
2826
|
-
|
|
2827
|
-
};
|
|
2828
|
-
}
|
|
2829
|
-
if (typeof app.callback === "function") {
|
|
2830
|
-
return app.callback();
|
|
2831
|
-
}
|
|
2832
|
-
if (typeof app.routing === "function") {
|
|
2833
|
-
const routing = app.routing;
|
|
2834
|
-
const appWithReady = app;
|
|
2835
|
-
let readyPromise = null;
|
|
2836
|
-
return async (req, res) => {
|
|
2837
|
-
if (readyPromise === null) {
|
|
2838
|
-
readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
|
|
2613
|
+
}
|
|
2614
|
+
webApps.set(name, this);
|
|
2615
|
+
}
|
|
2616
|
+
toHandler(appOrHandler) {
|
|
2617
|
+
if (typeof appOrHandler === "function") {
|
|
2618
|
+
return appOrHandler;
|
|
2619
|
+
}
|
|
2620
|
+
const app = appOrHandler;
|
|
2621
|
+
if (typeof app.handle === "function") {
|
|
2622
|
+
return (req, res) => {
|
|
2623
|
+
app.handle(req, res, (err) => {
|
|
2624
|
+
if (err) {
|
|
2625
|
+
console.error("WebApp handler error:", err);
|
|
2626
|
+
if (!res.headersSent) {
|
|
2627
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
2628
|
+
res.end(JSON.stringify({ error: "Internal Server Error" }));
|
|
2839
2629
|
}
|
|
2840
|
-
|
|
2841
|
-
|
|
2842
|
-
|
|
2843
|
-
|
|
2844
|
-
|
|
2845
|
-
|
|
2630
|
+
}
|
|
2631
|
+
});
|
|
2632
|
+
};
|
|
2633
|
+
}
|
|
2634
|
+
if (typeof app.callback === "function") {
|
|
2635
|
+
return app.callback();
|
|
2636
|
+
}
|
|
2637
|
+
if (typeof app.routing === "function") {
|
|
2638
|
+
const routing = app.routing;
|
|
2639
|
+
const appWithReady = app;
|
|
2640
|
+
let readyPromise = null;
|
|
2641
|
+
return async (req, res) => {
|
|
2642
|
+
if (readyPromise === null) {
|
|
2643
|
+
readyPromise = typeof appWithReady.ready === "function" ? appWithReady.ready() : Promise.resolve();
|
|
2644
|
+
}
|
|
2645
|
+
await readyPromise;
|
|
2646
|
+
routing(req, res);
|
|
2647
|
+
};
|
|
2648
|
+
}
|
|
2649
|
+
throw new Error(
|
|
2650
|
+
`Unable to convert app to handler. The provided object must be:
|
|
2846
2651
|
- A function (raw Node.js handler)
|
|
2847
2652
|
- An object with .handle() method (Express, Connect)
|
|
2848
2653
|
- An object with .callback() method (Koa)
|
|
@@ -2854,14 +2659,12 @@ Examples:
|
|
|
2854
2659
|
Fastify: new WebApp("name", fastifyApp)
|
|
2855
2660
|
Raw: new WebApp("name", (req, res) => { ... })
|
|
2856
2661
|
`
|
|
2857
|
-
|
|
2858
|
-
}
|
|
2859
|
-
getRawApp() {
|
|
2860
|
-
return this._rawApp;
|
|
2861
|
-
}
|
|
2862
|
-
};
|
|
2662
|
+
);
|
|
2863
2663
|
}
|
|
2864
|
-
|
|
2664
|
+
getRawApp() {
|
|
2665
|
+
return this._rawApp;
|
|
2666
|
+
}
|
|
2667
|
+
};
|
|
2865
2668
|
|
|
2866
2669
|
// src/dmv2/registry.ts
|
|
2867
2670
|
function getTables() {
|
|
@@ -2927,84 +2730,18 @@ function getWebApps2() {
|
|
|
2927
2730
|
function getWebApp(name) {
|
|
2928
2731
|
return getMooseInternal().webApps.get(name);
|
|
2929
2732
|
}
|
|
2930
|
-
|
|
2931
|
-
|
|
2932
|
-
|
|
2933
|
-
|
|
2934
|
-
|
|
2935
|
-
}
|
|
2936
|
-
|
|
2937
|
-
|
|
2938
|
-
|
|
2939
|
-
|
|
2940
|
-
|
|
2941
|
-
|
|
2942
|
-
init_stream();
|
|
2943
|
-
init_workflow();
|
|
2944
|
-
init_ingestApi();
|
|
2945
|
-
init_consumptionApi();
|
|
2946
|
-
init_ingestPipeline();
|
|
2947
|
-
init_etlPipeline();
|
|
2948
|
-
init_materializedView();
|
|
2949
|
-
init_sqlResource();
|
|
2950
|
-
init_view();
|
|
2951
|
-
init_lifeCycle();
|
|
2952
|
-
init_webApp();
|
|
2953
|
-
init_registry();
|
|
2954
|
-
}
|
|
2955
|
-
});
|
|
2956
|
-
|
|
2957
|
-
// src/browserCompatible.ts
|
|
2958
|
-
var browserCompatible_exports = {};
|
|
2959
|
-
__export(browserCompatible_exports, {
|
|
2960
|
-
Api: () => Api,
|
|
2961
|
-
ConsumptionApi: () => ConsumptionApi,
|
|
2962
|
-
DeadLetterQueue: () => DeadLetterQueue,
|
|
2963
|
-
ETLPipeline: () => ETLPipeline,
|
|
2964
|
-
IngestApi: () => IngestApi,
|
|
2965
|
-
IngestPipeline: () => IngestPipeline,
|
|
2966
|
-
LifeCycle: () => LifeCycle,
|
|
2967
|
-
MaterializedView: () => MaterializedView,
|
|
2968
|
-
OlapTable: () => OlapTable,
|
|
2969
|
-
Sql: () => Sql,
|
|
2970
|
-
SqlResource: () => SqlResource,
|
|
2971
|
-
Stream: () => Stream,
|
|
2972
|
-
Task: () => Task,
|
|
2973
|
-
View: () => View,
|
|
2974
|
-
WebApp: () => WebApp,
|
|
2975
|
-
Workflow: () => Workflow,
|
|
2976
|
-
createClickhouseParameter: () => createClickhouseParameter,
|
|
2977
|
-
getApi: () => getApi,
|
|
2978
|
-
getApis: () => getApis2,
|
|
2979
|
-
getIngestApi: () => getIngestApi,
|
|
2980
|
-
getIngestApis: () => getIngestApis,
|
|
2981
|
-
getSqlResource: () => getSqlResource,
|
|
2982
|
-
getSqlResources: () => getSqlResources,
|
|
2983
|
-
getStream: () => getStream,
|
|
2984
|
-
getStreams: () => getStreams,
|
|
2985
|
-
getTable: () => getTable,
|
|
2986
|
-
getTables: () => getTables,
|
|
2987
|
-
getValueFromParameter: () => getValueFromParameter,
|
|
2988
|
-
getWebApp: () => getWebApp,
|
|
2989
|
-
getWebApps: () => getWebApps2,
|
|
2990
|
-
getWorkflow: () => getWorkflow,
|
|
2991
|
-
getWorkflows: () => getWorkflows2,
|
|
2992
|
-
mapToClickHouseType: () => mapToClickHouseType,
|
|
2993
|
-
quoteIdentifier: () => quoteIdentifier,
|
|
2994
|
-
sql: () => sql,
|
|
2995
|
-
toQuery: () => toQuery,
|
|
2996
|
-
toQueryPreview: () => toQueryPreview,
|
|
2997
|
-
toStaticQuery: () => toStaticQuery
|
|
2998
|
-
});
|
|
2999
|
-
module.exports = __toCommonJS(browserCompatible_exports);
|
|
3000
|
-
var init_browserCompatible = __esm({
|
|
3001
|
-
"src/browserCompatible.ts"() {
|
|
3002
|
-
init_dmv2();
|
|
3003
|
-
init_types();
|
|
3004
|
-
init_sqlHelpers();
|
|
3005
|
-
}
|
|
3006
|
-
});
|
|
3007
|
-
init_browserCompatible();
|
|
2733
|
+
function getMaterializedViews() {
|
|
2734
|
+
return getMooseInternal().materializedViews;
|
|
2735
|
+
}
|
|
2736
|
+
function getMaterializedView(name) {
|
|
2737
|
+
return getMooseInternal().materializedViews.get(name);
|
|
2738
|
+
}
|
|
2739
|
+
function getCustomViews() {
|
|
2740
|
+
return getMooseInternal().customViews;
|
|
2741
|
+
}
|
|
2742
|
+
function getCustomView(name) {
|
|
2743
|
+
return getMooseInternal().customViews.get(name);
|
|
2744
|
+
}
|
|
3008
2745
|
// Annotate the CommonJS export names for ESM import in node:
|
|
3009
2746
|
0 && (module.exports = {
|
|
3010
2747
|
Api,
|
|
@@ -3026,8 +2763,12 @@ init_browserCompatible();
|
|
|
3026
2763
|
createClickhouseParameter,
|
|
3027
2764
|
getApi,
|
|
3028
2765
|
getApis,
|
|
2766
|
+
getCustomView,
|
|
2767
|
+
getCustomViews,
|
|
3029
2768
|
getIngestApi,
|
|
3030
2769
|
getIngestApis,
|
|
2770
|
+
getMaterializedView,
|
|
2771
|
+
getMaterializedViews,
|
|
3031
2772
|
getSqlResource,
|
|
3032
2773
|
getSqlResources,
|
|
3033
2774
|
getStream,
|