@514labs/moose-lib 0.6.295-ci-17-gc22400d0 → 0.6.295
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{browserCompatible-CMEunMFq.d.ts → browserCompatible-B8CAYjv5.d.ts} +1 -1
- package/dist/{browserCompatible-FzU17dxm.d.mts → browserCompatible-ChWHzgtb.d.mts} +1 -1
- package/dist/browserCompatible.d.mts +2 -2
- package/dist/browserCompatible.d.ts +2 -2
- package/dist/browserCompatible.js +2161 -2444
- package/dist/browserCompatible.js.map +1 -1
- package/dist/browserCompatible.mjs +2165 -2446
- package/dist/browserCompatible.mjs.map +1 -1
- package/dist/dmv2/index.d.mts +1 -1
- package/dist/dmv2/index.d.ts +1 -1
- package/dist/dmv2/index.js +2058 -2341
- package/dist/dmv2/index.js.map +1 -1
- package/dist/dmv2/index.mjs +2020 -2301
- package/dist/dmv2/index.mjs.map +1 -1
- package/dist/{index-CcHF2cVT.d.mts → index-rQOQo9sv.d.mts} +5 -16
- package/dist/{index-CcHF2cVT.d.ts → index-rQOQo9sv.d.ts} +5 -16
- package/dist/index.d.mts +6 -76
- package/dist/index.d.ts +6 -76
- package/dist/index.js +2737 -3081
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2630 -2973
- package/dist/index.mjs.map +1 -1
- package/dist/moose-runner.js +1136 -1715
- package/dist/moose-runner.js.map +1 -1
- package/dist/moose-runner.mjs +1127 -1704
- package/dist/moose-runner.mjs.map +1 -1
- package/package.json +1 -1
package/dist/moose-runner.js
CHANGED
|
@@ -9,10 +9,6 @@ var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
|
9
9
|
var __esm = (fn, res) => function __init() {
|
|
10
10
|
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
11
11
|
};
|
|
12
|
-
var __export = (target, all) => {
|
|
13
|
-
for (var name in all)
|
|
14
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
15
|
-
};
|
|
16
12
|
var __copyProps = (to, from, except, desc) => {
|
|
17
13
|
if (from && typeof from === "object" || typeof from === "function") {
|
|
18
14
|
for (let key of __getOwnPropNames(from))
|
|
@@ -30,344 +26,6 @@ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__ge
|
|
|
30
26
|
mod
|
|
31
27
|
));
|
|
32
28
|
|
|
33
|
-
// src/dmv2/utils/stackTrace.ts
|
|
34
|
-
var init_stackTrace = __esm({
|
|
35
|
-
"src/dmv2/utils/stackTrace.ts"() {
|
|
36
|
-
"use strict";
|
|
37
|
-
}
|
|
38
|
-
});
|
|
39
|
-
|
|
40
|
-
// src/dmv2/typedBase.ts
|
|
41
|
-
var init_typedBase = __esm({
|
|
42
|
-
"src/dmv2/typedBase.ts"() {
|
|
43
|
-
"use strict";
|
|
44
|
-
init_stackTrace();
|
|
45
|
-
}
|
|
46
|
-
});
|
|
47
|
-
|
|
48
|
-
// src/dataModels/dataModelTypes.ts
|
|
49
|
-
var init_dataModelTypes = __esm({
|
|
50
|
-
"src/dataModels/dataModelTypes.ts"() {
|
|
51
|
-
"use strict";
|
|
52
|
-
}
|
|
53
|
-
});
|
|
54
|
-
|
|
55
|
-
// src/sqlHelpers.ts
|
|
56
|
-
function sql(strings, ...values) {
|
|
57
|
-
return new Sql(strings, values);
|
|
58
|
-
}
|
|
59
|
-
function createClickhouseParameter(parameterIndex, value) {
|
|
60
|
-
return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
|
|
61
|
-
}
|
|
62
|
-
function emptyIfUndefined(value) {
|
|
63
|
-
return value === void 0 ? "" : value;
|
|
64
|
-
}
|
|
65
|
-
var isTable, isColumn, instanceofSql, Sql, toQuery, toQueryPreview, getValueFromParameter, mapToClickHouseType;
|
|
66
|
-
var init_sqlHelpers = __esm({
|
|
67
|
-
"src/sqlHelpers.ts"() {
|
|
68
|
-
"use strict";
|
|
69
|
-
isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
|
|
70
|
-
isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
|
|
71
|
-
instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
|
|
72
|
-
Sql = class {
|
|
73
|
-
values;
|
|
74
|
-
strings;
|
|
75
|
-
constructor(rawStrings, rawValues) {
|
|
76
|
-
if (rawStrings.length - 1 !== rawValues.length) {
|
|
77
|
-
if (rawStrings.length === 0) {
|
|
78
|
-
throw new TypeError("Expected at least 1 string");
|
|
79
|
-
}
|
|
80
|
-
throw new TypeError(
|
|
81
|
-
`Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
|
|
82
|
-
);
|
|
83
|
-
}
|
|
84
|
-
const valuesLength = rawValues.reduce(
|
|
85
|
-
(len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
|
|
86
|
-
0
|
|
87
|
-
);
|
|
88
|
-
this.values = new Array(valuesLength);
|
|
89
|
-
this.strings = new Array(valuesLength + 1);
|
|
90
|
-
this.strings[0] = rawStrings[0];
|
|
91
|
-
let i = 0, pos = 0;
|
|
92
|
-
while (i < rawValues.length) {
|
|
93
|
-
const child = rawValues[i++];
|
|
94
|
-
const rawString = rawStrings[i];
|
|
95
|
-
if (instanceofSql(child)) {
|
|
96
|
-
this.strings[pos] += child.strings[0];
|
|
97
|
-
let childIndex = 0;
|
|
98
|
-
while (childIndex < child.values.length) {
|
|
99
|
-
this.values[pos++] = child.values[childIndex++];
|
|
100
|
-
this.strings[pos] = child.strings[childIndex];
|
|
101
|
-
}
|
|
102
|
-
this.strings[pos] += rawString;
|
|
103
|
-
} else if (isColumn(child)) {
|
|
104
|
-
const aggregationFunction = child.annotations.find(
|
|
105
|
-
([k, _]) => k === "aggregationFunction"
|
|
106
|
-
);
|
|
107
|
-
if (aggregationFunction !== void 0) {
|
|
108
|
-
this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
|
|
109
|
-
} else {
|
|
110
|
-
this.strings[pos] += `\`${child.name}\``;
|
|
111
|
-
}
|
|
112
|
-
this.strings[pos] += rawString;
|
|
113
|
-
} else if (isTable(child)) {
|
|
114
|
-
if (child.config.database) {
|
|
115
|
-
this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
|
|
116
|
-
} else {
|
|
117
|
-
this.strings[pos] += `\`${child.name}\``;
|
|
118
|
-
}
|
|
119
|
-
this.strings[pos] += rawString;
|
|
120
|
-
} else {
|
|
121
|
-
this.values[pos++] = child;
|
|
122
|
-
this.strings[pos] = rawString;
|
|
123
|
-
}
|
|
124
|
-
}
|
|
125
|
-
}
|
|
126
|
-
};
|
|
127
|
-
toQuery = (sql3) => {
|
|
128
|
-
const parameterizedStubs = sql3.values.map(
|
|
129
|
-
(v, i) => createClickhouseParameter(i, v)
|
|
130
|
-
);
|
|
131
|
-
const query = sql3.strings.map(
|
|
132
|
-
(s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
|
|
133
|
-
).join("");
|
|
134
|
-
const query_params = sql3.values.reduce(
|
|
135
|
-
(acc, v, i) => ({
|
|
136
|
-
...acc,
|
|
137
|
-
[`p${i}`]: getValueFromParameter(v)
|
|
138
|
-
}),
|
|
139
|
-
{}
|
|
140
|
-
);
|
|
141
|
-
return [query, query_params];
|
|
142
|
-
};
|
|
143
|
-
toQueryPreview = (sql3) => {
|
|
144
|
-
try {
|
|
145
|
-
const formatValue = (v) => {
|
|
146
|
-
if (Array.isArray(v)) {
|
|
147
|
-
const [type, val] = v;
|
|
148
|
-
if (type === "Identifier") {
|
|
149
|
-
return `\`${String(val)}\``;
|
|
150
|
-
}
|
|
151
|
-
return `[${v.map((x) => formatValue(x)).join(", ")}]`;
|
|
152
|
-
}
|
|
153
|
-
if (v === null || v === void 0) return "NULL";
|
|
154
|
-
if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
|
|
155
|
-
if (typeof v === "number") return String(v);
|
|
156
|
-
if (typeof v === "boolean") return v ? "true" : "false";
|
|
157
|
-
if (v instanceof Date)
|
|
158
|
-
return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
|
|
159
|
-
try {
|
|
160
|
-
return JSON.stringify(v);
|
|
161
|
-
} catch {
|
|
162
|
-
return String(v);
|
|
163
|
-
}
|
|
164
|
-
};
|
|
165
|
-
let out = sql3.strings[0] ?? "";
|
|
166
|
-
for (let i = 0; i < sql3.values.length; i++) {
|
|
167
|
-
const val = getValueFromParameter(sql3.values[i]);
|
|
168
|
-
out += formatValue(val);
|
|
169
|
-
out += sql3.strings[i + 1] ?? "";
|
|
170
|
-
}
|
|
171
|
-
return out.replace(/\s+/g, " ").trim();
|
|
172
|
-
} catch (error) {
|
|
173
|
-
console.log(`toQueryPreview error: ${error}`);
|
|
174
|
-
return "/* query preview unavailable */";
|
|
175
|
-
}
|
|
176
|
-
};
|
|
177
|
-
getValueFromParameter = (value) => {
|
|
178
|
-
if (Array.isArray(value)) {
|
|
179
|
-
const [type, val] = value;
|
|
180
|
-
if (type === "Identifier") return val;
|
|
181
|
-
}
|
|
182
|
-
return value;
|
|
183
|
-
};
|
|
184
|
-
mapToClickHouseType = (value) => {
|
|
185
|
-
if (typeof value === "number") {
|
|
186
|
-
return Number.isInteger(value) ? "Int" : "Float";
|
|
187
|
-
}
|
|
188
|
-
if (typeof value === "boolean") return "Bool";
|
|
189
|
-
if (value instanceof Date) return "DateTime";
|
|
190
|
-
if (Array.isArray(value)) {
|
|
191
|
-
const [type, _] = value;
|
|
192
|
-
return type;
|
|
193
|
-
}
|
|
194
|
-
return "String";
|
|
195
|
-
};
|
|
196
|
-
}
|
|
197
|
-
});
|
|
198
|
-
|
|
199
|
-
// src/blocks/helpers.ts
|
|
200
|
-
var init_helpers = __esm({
|
|
201
|
-
"src/blocks/helpers.ts"() {
|
|
202
|
-
"use strict";
|
|
203
|
-
init_sqlHelpers();
|
|
204
|
-
}
|
|
205
|
-
});
|
|
206
|
-
|
|
207
|
-
// src/dmv2/sdk/olapTable.ts
|
|
208
|
-
var import_node_stream, import_node_crypto;
|
|
209
|
-
var init_olapTable = __esm({
|
|
210
|
-
"src/dmv2/sdk/olapTable.ts"() {
|
|
211
|
-
"use strict";
|
|
212
|
-
init_typedBase();
|
|
213
|
-
init_dataModelTypes();
|
|
214
|
-
init_helpers();
|
|
215
|
-
init_internal();
|
|
216
|
-
import_node_stream = require("stream");
|
|
217
|
-
import_node_crypto = require("crypto");
|
|
218
|
-
init_sqlHelpers();
|
|
219
|
-
}
|
|
220
|
-
});
|
|
221
|
-
|
|
222
|
-
// src/dmv2/sdk/stream.ts
|
|
223
|
-
var import_node_crypto2;
|
|
224
|
-
var init_stream = __esm({
|
|
225
|
-
"src/dmv2/sdk/stream.ts"() {
|
|
226
|
-
"use strict";
|
|
227
|
-
init_typedBase();
|
|
228
|
-
init_internal();
|
|
229
|
-
import_node_crypto2 = require("crypto");
|
|
230
|
-
init_stackTrace();
|
|
231
|
-
}
|
|
232
|
-
});
|
|
233
|
-
|
|
234
|
-
// src/dmv2/sdk/workflow.ts
|
|
235
|
-
var init_workflow = __esm({
|
|
236
|
-
"src/dmv2/sdk/workflow.ts"() {
|
|
237
|
-
"use strict";
|
|
238
|
-
init_internal();
|
|
239
|
-
}
|
|
240
|
-
});
|
|
241
|
-
|
|
242
|
-
// src/dmv2/sdk/ingestApi.ts
|
|
243
|
-
var init_ingestApi = __esm({
|
|
244
|
-
"src/dmv2/sdk/ingestApi.ts"() {
|
|
245
|
-
"use strict";
|
|
246
|
-
init_typedBase();
|
|
247
|
-
init_internal();
|
|
248
|
-
}
|
|
249
|
-
});
|
|
250
|
-
|
|
251
|
-
// src/dmv2/sdk/consumptionApi.ts
|
|
252
|
-
var init_consumptionApi = __esm({
|
|
253
|
-
"src/dmv2/sdk/consumptionApi.ts"() {
|
|
254
|
-
"use strict";
|
|
255
|
-
init_typedBase();
|
|
256
|
-
init_internal();
|
|
257
|
-
}
|
|
258
|
-
});
|
|
259
|
-
|
|
260
|
-
// src/dmv2/sdk/ingestPipeline.ts
|
|
261
|
-
var init_ingestPipeline = __esm({
|
|
262
|
-
"src/dmv2/sdk/ingestPipeline.ts"() {
|
|
263
|
-
"use strict";
|
|
264
|
-
init_typedBase();
|
|
265
|
-
init_stream();
|
|
266
|
-
init_olapTable();
|
|
267
|
-
init_ingestApi();
|
|
268
|
-
init_helpers();
|
|
269
|
-
}
|
|
270
|
-
});
|
|
271
|
-
|
|
272
|
-
// src/dmv2/sdk/etlPipeline.ts
|
|
273
|
-
var init_etlPipeline = __esm({
|
|
274
|
-
"src/dmv2/sdk/etlPipeline.ts"() {
|
|
275
|
-
"use strict";
|
|
276
|
-
init_workflow();
|
|
277
|
-
}
|
|
278
|
-
});
|
|
279
|
-
|
|
280
|
-
// src/dmv2/sdk/sqlResource.ts
|
|
281
|
-
var init_sqlResource = __esm({
|
|
282
|
-
"src/dmv2/sdk/sqlResource.ts"() {
|
|
283
|
-
"use strict";
|
|
284
|
-
init_internal();
|
|
285
|
-
init_sqlHelpers();
|
|
286
|
-
init_stackTrace();
|
|
287
|
-
}
|
|
288
|
-
});
|
|
289
|
-
|
|
290
|
-
// src/dmv2/sdk/materializedView.ts
|
|
291
|
-
var init_materializedView = __esm({
|
|
292
|
-
"src/dmv2/sdk/materializedView.ts"() {
|
|
293
|
-
"use strict";
|
|
294
|
-
init_helpers();
|
|
295
|
-
init_sqlHelpers();
|
|
296
|
-
init_olapTable();
|
|
297
|
-
init_sqlResource();
|
|
298
|
-
}
|
|
299
|
-
});
|
|
300
|
-
|
|
301
|
-
// src/dmv2/sdk/view.ts
|
|
302
|
-
var init_view = __esm({
|
|
303
|
-
"src/dmv2/sdk/view.ts"() {
|
|
304
|
-
"use strict";
|
|
305
|
-
init_helpers();
|
|
306
|
-
init_sqlHelpers();
|
|
307
|
-
init_sqlResource();
|
|
308
|
-
}
|
|
309
|
-
});
|
|
310
|
-
|
|
311
|
-
// src/dmv2/sdk/lifeCycle.ts
|
|
312
|
-
var init_lifeCycle = __esm({
|
|
313
|
-
"src/dmv2/sdk/lifeCycle.ts"() {
|
|
314
|
-
"use strict";
|
|
315
|
-
}
|
|
316
|
-
});
|
|
317
|
-
|
|
318
|
-
// src/dmv2/sdk/webApp.ts
|
|
319
|
-
var init_webApp = __esm({
|
|
320
|
-
"src/dmv2/sdk/webApp.ts"() {
|
|
321
|
-
"use strict";
|
|
322
|
-
init_internal();
|
|
323
|
-
}
|
|
324
|
-
});
|
|
325
|
-
|
|
326
|
-
// src/dmv2/registry.ts
|
|
327
|
-
var init_registry = __esm({
|
|
328
|
-
"src/dmv2/registry.ts"() {
|
|
329
|
-
"use strict";
|
|
330
|
-
init_internal();
|
|
331
|
-
}
|
|
332
|
-
});
|
|
333
|
-
|
|
334
|
-
// src/dmv2/index.ts
|
|
335
|
-
var init_dmv2 = __esm({
|
|
336
|
-
"src/dmv2/index.ts"() {
|
|
337
|
-
"use strict";
|
|
338
|
-
init_olapTable();
|
|
339
|
-
init_stream();
|
|
340
|
-
init_workflow();
|
|
341
|
-
init_ingestApi();
|
|
342
|
-
init_consumptionApi();
|
|
343
|
-
init_ingestPipeline();
|
|
344
|
-
init_etlPipeline();
|
|
345
|
-
init_materializedView();
|
|
346
|
-
init_sqlResource();
|
|
347
|
-
init_view();
|
|
348
|
-
init_lifeCycle();
|
|
349
|
-
init_webApp();
|
|
350
|
-
init_registry();
|
|
351
|
-
}
|
|
352
|
-
});
|
|
353
|
-
|
|
354
|
-
// src/dataModels/types.ts
|
|
355
|
-
var init_types = __esm({
|
|
356
|
-
"src/dataModels/types.ts"() {
|
|
357
|
-
"use strict";
|
|
358
|
-
}
|
|
359
|
-
});
|
|
360
|
-
|
|
361
|
-
// src/browserCompatible.ts
|
|
362
|
-
var init_browserCompatible = __esm({
|
|
363
|
-
"src/browserCompatible.ts"() {
|
|
364
|
-
"use strict";
|
|
365
|
-
init_dmv2();
|
|
366
|
-
init_types();
|
|
367
|
-
init_sqlHelpers();
|
|
368
|
-
}
|
|
369
|
-
});
|
|
370
|
-
|
|
371
29
|
// src/commons.ts
|
|
372
30
|
function isTruthy(value) {
|
|
373
31
|
if (!value) return false;
|
|
@@ -497,14 +155,165 @@ var init_commons = __esm({
|
|
|
497
155
|
}
|
|
498
156
|
});
|
|
499
157
|
|
|
500
|
-
// src/
|
|
501
|
-
var
|
|
502
|
-
|
|
503
|
-
|
|
158
|
+
// src/moose-runner.ts
|
|
159
|
+
var import_ts_node = require("ts-node");
|
|
160
|
+
|
|
161
|
+
// src/dmv2/internal.ts
|
|
162
|
+
var import_process = __toESM(require("process"));
|
|
163
|
+
|
|
164
|
+
// src/sqlHelpers.ts
|
|
165
|
+
var isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
|
|
166
|
+
var isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
|
|
167
|
+
function sql(strings, ...values) {
|
|
168
|
+
return new Sql(strings, values);
|
|
169
|
+
}
|
|
170
|
+
var instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
|
|
171
|
+
var Sql = class {
|
|
172
|
+
values;
|
|
173
|
+
strings;
|
|
174
|
+
constructor(rawStrings, rawValues) {
|
|
175
|
+
if (rawStrings.length - 1 !== rawValues.length) {
|
|
176
|
+
if (rawStrings.length === 0) {
|
|
177
|
+
throw new TypeError("Expected at least 1 string");
|
|
178
|
+
}
|
|
179
|
+
throw new TypeError(
|
|
180
|
+
`Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
|
|
181
|
+
);
|
|
182
|
+
}
|
|
183
|
+
const valuesLength = rawValues.reduce(
|
|
184
|
+
(len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
|
|
185
|
+
0
|
|
186
|
+
);
|
|
187
|
+
this.values = new Array(valuesLength);
|
|
188
|
+
this.strings = new Array(valuesLength + 1);
|
|
189
|
+
this.strings[0] = rawStrings[0];
|
|
190
|
+
let i = 0, pos = 0;
|
|
191
|
+
while (i < rawValues.length) {
|
|
192
|
+
const child = rawValues[i++];
|
|
193
|
+
const rawString = rawStrings[i];
|
|
194
|
+
if (instanceofSql(child)) {
|
|
195
|
+
this.strings[pos] += child.strings[0];
|
|
196
|
+
let childIndex = 0;
|
|
197
|
+
while (childIndex < child.values.length) {
|
|
198
|
+
this.values[pos++] = child.values[childIndex++];
|
|
199
|
+
this.strings[pos] = child.strings[childIndex];
|
|
200
|
+
}
|
|
201
|
+
this.strings[pos] += rawString;
|
|
202
|
+
} else if (isColumn(child)) {
|
|
203
|
+
const aggregationFunction = child.annotations.find(
|
|
204
|
+
([k, _]) => k === "aggregationFunction"
|
|
205
|
+
);
|
|
206
|
+
if (aggregationFunction !== void 0) {
|
|
207
|
+
this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
|
|
208
|
+
} else {
|
|
209
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
210
|
+
}
|
|
211
|
+
this.strings[pos] += rawString;
|
|
212
|
+
} else if (isTable(child)) {
|
|
213
|
+
if (child.config.database) {
|
|
214
|
+
this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
|
|
215
|
+
} else {
|
|
216
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
217
|
+
}
|
|
218
|
+
this.strings[pos] += rawString;
|
|
219
|
+
} else {
|
|
220
|
+
this.values[pos++] = child;
|
|
221
|
+
this.strings[pos] = rawString;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
504
224
|
}
|
|
505
|
-
}
|
|
225
|
+
};
|
|
226
|
+
var toQuery = (sql3) => {
|
|
227
|
+
const parameterizedStubs = sql3.values.map(
|
|
228
|
+
(v, i) => createClickhouseParameter(i, v)
|
|
229
|
+
);
|
|
230
|
+
const query = sql3.strings.map(
|
|
231
|
+
(s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
|
|
232
|
+
).join("");
|
|
233
|
+
const query_params = sql3.values.reduce(
|
|
234
|
+
(acc, v, i) => ({
|
|
235
|
+
...acc,
|
|
236
|
+
[`p${i}`]: getValueFromParameter(v)
|
|
237
|
+
}),
|
|
238
|
+
{}
|
|
239
|
+
);
|
|
240
|
+
return [query, query_params];
|
|
241
|
+
};
|
|
242
|
+
var toQueryPreview = (sql3) => {
|
|
243
|
+
try {
|
|
244
|
+
const formatValue = (v) => {
|
|
245
|
+
if (Array.isArray(v)) {
|
|
246
|
+
const [type, val] = v;
|
|
247
|
+
if (type === "Identifier") {
|
|
248
|
+
return `\`${String(val)}\``;
|
|
249
|
+
}
|
|
250
|
+
return `[${v.map((x) => formatValue(x)).join(", ")}]`;
|
|
251
|
+
}
|
|
252
|
+
if (v === null || v === void 0) return "NULL";
|
|
253
|
+
if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
|
|
254
|
+
if (typeof v === "number") return String(v);
|
|
255
|
+
if (typeof v === "boolean") return v ? "true" : "false";
|
|
256
|
+
if (v instanceof Date)
|
|
257
|
+
return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
|
|
258
|
+
try {
|
|
259
|
+
return JSON.stringify(v);
|
|
260
|
+
} catch {
|
|
261
|
+
return String(v);
|
|
262
|
+
}
|
|
263
|
+
};
|
|
264
|
+
let out = sql3.strings[0] ?? "";
|
|
265
|
+
for (let i = 0; i < sql3.values.length; i++) {
|
|
266
|
+
const val = getValueFromParameter(sql3.values[i]);
|
|
267
|
+
out += formatValue(val);
|
|
268
|
+
out += sql3.strings[i + 1] ?? "";
|
|
269
|
+
}
|
|
270
|
+
return out.replace(/\s+/g, " ").trim();
|
|
271
|
+
} catch (error) {
|
|
272
|
+
console.log(`toQueryPreview error: ${error}`);
|
|
273
|
+
return "/* query preview unavailable */";
|
|
274
|
+
}
|
|
275
|
+
};
|
|
276
|
+
var getValueFromParameter = (value) => {
|
|
277
|
+
if (Array.isArray(value)) {
|
|
278
|
+
const [type, val] = value;
|
|
279
|
+
if (type === "Identifier") return val;
|
|
280
|
+
}
|
|
281
|
+
return value;
|
|
282
|
+
};
|
|
283
|
+
function createClickhouseParameter(parameterIndex, value) {
|
|
284
|
+
return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
|
|
285
|
+
}
|
|
286
|
+
var mapToClickHouseType = (value) => {
|
|
287
|
+
if (typeof value === "number") {
|
|
288
|
+
return Number.isInteger(value) ? "Int" : "Float";
|
|
289
|
+
}
|
|
290
|
+
if (typeof value === "boolean") return "Bool";
|
|
291
|
+
if (value instanceof Date) return "DateTime";
|
|
292
|
+
if (Array.isArray(value)) {
|
|
293
|
+
const [type, _] = value;
|
|
294
|
+
return type;
|
|
295
|
+
}
|
|
296
|
+
return "String";
|
|
297
|
+
};
|
|
298
|
+
function emptyIfUndefined(value) {
|
|
299
|
+
return value === void 0 ? "" : value;
|
|
300
|
+
}
|
|
301
|
+
|
|
302
|
+
// src/dmv2/sdk/olapTable.ts
|
|
303
|
+
var import_node_stream = require("stream");
|
|
304
|
+
var import_node_crypto = require("crypto");
|
|
305
|
+
|
|
306
|
+
// src/dmv2/sdk/stream.ts
|
|
307
|
+
var import_node_crypto2 = require("crypto");
|
|
308
|
+
|
|
309
|
+
// src/index.ts
|
|
310
|
+
init_commons();
|
|
506
311
|
|
|
507
312
|
// src/consumption-apis/helpers.ts
|
|
313
|
+
var import_client2 = require("@temporalio/client");
|
|
314
|
+
var import_node_crypto3 = require("crypto");
|
|
315
|
+
var import_perf_hooks = require("perf_hooks");
|
|
316
|
+
var fs = __toESM(require("fs"));
|
|
508
317
|
function formatElapsedTime(ms) {
|
|
509
318
|
if (ms < 1e3) {
|
|
510
319
|
return `${Math.round(ms)} ms`;
|
|
@@ -517,6 +326,142 @@ function formatElapsedTime(ms) {
|
|
|
517
326
|
const remainingSeconds = seconds % 60;
|
|
518
327
|
return `${minutes} minutes and ${remainingSeconds.toFixed(2)} seconds`;
|
|
519
328
|
}
|
|
329
|
+
var MooseClient = class {
|
|
330
|
+
query;
|
|
331
|
+
workflow;
|
|
332
|
+
constructor(queryClient, temporalClient) {
|
|
333
|
+
this.query = queryClient;
|
|
334
|
+
this.workflow = new WorkflowClient(temporalClient);
|
|
335
|
+
}
|
|
336
|
+
};
|
|
337
|
+
var QueryClient = class {
|
|
338
|
+
client;
|
|
339
|
+
query_id_prefix;
|
|
340
|
+
constructor(client, query_id_prefix) {
|
|
341
|
+
this.client = client;
|
|
342
|
+
this.query_id_prefix = query_id_prefix;
|
|
343
|
+
}
|
|
344
|
+
async execute(sql3) {
|
|
345
|
+
const [query, query_params] = toQuery(sql3);
|
|
346
|
+
console.log(`[QueryClient] | Query: ${toQueryPreview(sql3)}`);
|
|
347
|
+
const start = import_perf_hooks.performance.now();
|
|
348
|
+
const result = await this.client.query({
|
|
349
|
+
query,
|
|
350
|
+
query_params,
|
|
351
|
+
format: "JSONEachRow",
|
|
352
|
+
query_id: this.query_id_prefix + (0, import_node_crypto3.randomUUID)()
|
|
353
|
+
// Note: wait_end_of_query deliberately NOT set here as this is used for SELECT queries
|
|
354
|
+
// where response buffering would harm streaming performance and concurrency
|
|
355
|
+
});
|
|
356
|
+
const elapsedMs = import_perf_hooks.performance.now() - start;
|
|
357
|
+
console.log(
|
|
358
|
+
`[QueryClient] | Query completed: ${formatElapsedTime(elapsedMs)}`
|
|
359
|
+
);
|
|
360
|
+
return result;
|
|
361
|
+
}
|
|
362
|
+
async command(sql3) {
|
|
363
|
+
const [query, query_params] = toQuery(sql3);
|
|
364
|
+
console.log(`[QueryClient] | Command: ${toQueryPreview(sql3)}`);
|
|
365
|
+
const start = import_perf_hooks.performance.now();
|
|
366
|
+
const result = await this.client.command({
|
|
367
|
+
query,
|
|
368
|
+
query_params,
|
|
369
|
+
query_id: this.query_id_prefix + (0, import_node_crypto3.randomUUID)()
|
|
370
|
+
});
|
|
371
|
+
const elapsedMs = import_perf_hooks.performance.now() - start;
|
|
372
|
+
console.log(
|
|
373
|
+
`[QueryClient] | Command completed: ${formatElapsedTime(elapsedMs)}`
|
|
374
|
+
);
|
|
375
|
+
return result;
|
|
376
|
+
}
|
|
377
|
+
};
|
|
378
|
+
var WorkflowClient = class {
|
|
379
|
+
client;
|
|
380
|
+
constructor(temporalClient) {
|
|
381
|
+
this.client = temporalClient;
|
|
382
|
+
}
|
|
383
|
+
async execute(name, input_data) {
|
|
384
|
+
try {
|
|
385
|
+
if (!this.client) {
|
|
386
|
+
return {
|
|
387
|
+
status: 404,
|
|
388
|
+
body: `Temporal client not found. Is the feature flag enabled?`
|
|
389
|
+
};
|
|
390
|
+
}
|
|
391
|
+
const config = await this.getWorkflowConfig(name);
|
|
392
|
+
const [processedInput, workflowId] = this.processInputData(
|
|
393
|
+
name,
|
|
394
|
+
input_data
|
|
395
|
+
);
|
|
396
|
+
console.log(
|
|
397
|
+
`WorkflowClient - starting workflow: ${name} with config ${JSON.stringify(config)} and input_data ${JSON.stringify(processedInput)}`
|
|
398
|
+
);
|
|
399
|
+
const handle = await this.client.workflow.start("ScriptWorkflow", {
|
|
400
|
+
args: [
|
|
401
|
+
{ workflow_name: name, execution_mode: "start" },
|
|
402
|
+
processedInput
|
|
403
|
+
],
|
|
404
|
+
taskQueue: "typescript-script-queue",
|
|
405
|
+
workflowId,
|
|
406
|
+
workflowIdConflictPolicy: "FAIL",
|
|
407
|
+
workflowIdReusePolicy: "ALLOW_DUPLICATE",
|
|
408
|
+
retry: {
|
|
409
|
+
maximumAttempts: config.retries
|
|
410
|
+
},
|
|
411
|
+
workflowRunTimeout: config.timeout
|
|
412
|
+
});
|
|
413
|
+
return {
|
|
414
|
+
status: 200,
|
|
415
|
+
body: `Workflow started: ${name}. View it in the Temporal dashboard: http://localhost:8080/namespaces/default/workflows/${workflowId}/${handle.firstExecutionRunId}/history`
|
|
416
|
+
};
|
|
417
|
+
} catch (error) {
|
|
418
|
+
return {
|
|
419
|
+
status: 400,
|
|
420
|
+
body: `Error starting workflow: ${error}`
|
|
421
|
+
};
|
|
422
|
+
}
|
|
423
|
+
}
|
|
424
|
+
async terminate(workflowId) {
|
|
425
|
+
try {
|
|
426
|
+
if (!this.client) {
|
|
427
|
+
return {
|
|
428
|
+
status: 404,
|
|
429
|
+
body: `Temporal client not found. Is the feature flag enabled?`
|
|
430
|
+
};
|
|
431
|
+
}
|
|
432
|
+
const handle = this.client.workflow.getHandle(workflowId);
|
|
433
|
+
await handle.terminate();
|
|
434
|
+
return {
|
|
435
|
+
status: 200,
|
|
436
|
+
body: `Workflow terminated: ${workflowId}`
|
|
437
|
+
};
|
|
438
|
+
} catch (error) {
|
|
439
|
+
return {
|
|
440
|
+
status: 400,
|
|
441
|
+
body: `Error terminating workflow: ${error}`
|
|
442
|
+
};
|
|
443
|
+
}
|
|
444
|
+
}
|
|
445
|
+
async getWorkflowConfig(name) {
|
|
446
|
+
const workflows = await getWorkflows2();
|
|
447
|
+
const dmv2Workflow = workflows.get(name);
|
|
448
|
+
if (dmv2Workflow) {
|
|
449
|
+
return {
|
|
450
|
+
retries: dmv2Workflow.config.retries || 3,
|
|
451
|
+
timeout: dmv2Workflow.config.timeout || "1h"
|
|
452
|
+
};
|
|
453
|
+
}
|
|
454
|
+
throw new Error(`Workflow config not found for ${name}`);
|
|
455
|
+
}
|
|
456
|
+
processInputData(name, input_data) {
|
|
457
|
+
let workflowId = name;
|
|
458
|
+
if (input_data) {
|
|
459
|
+
const hash = (0, import_node_crypto3.createHash)("sha256").update(JSON.stringify(input_data)).digest("hex").slice(0, 16);
|
|
460
|
+
workflowId = `${name}-${hash}`;
|
|
461
|
+
}
|
|
462
|
+
return [input_data, workflowId];
|
|
463
|
+
}
|
|
464
|
+
};
|
|
520
465
|
async function getTemporalClient(temporalUrl, namespace, clientCert, clientKey, apiKey) {
|
|
521
466
|
try {
|
|
522
467
|
console.info(
|
|
@@ -553,989 +498,538 @@ async function getTemporalClient(temporalUrl, namespace, clientCert, clientKey,
|
|
|
553
498
|
return void 0;
|
|
554
499
|
}
|
|
555
500
|
}
|
|
556
|
-
var import_client2, import_node_crypto3, import_perf_hooks, fs, MooseClient, QueryClient, WorkflowClient;
|
|
557
|
-
var init_helpers2 = __esm({
|
|
558
|
-
"src/consumption-apis/helpers.ts"() {
|
|
559
|
-
"use strict";
|
|
560
|
-
import_client2 = require("@temporalio/client");
|
|
561
|
-
import_node_crypto3 = require("crypto");
|
|
562
|
-
import_perf_hooks = require("perf_hooks");
|
|
563
|
-
fs = __toESM(require("fs"));
|
|
564
|
-
init_internal();
|
|
565
|
-
init_sqlHelpers();
|
|
566
|
-
MooseClient = class {
|
|
567
|
-
query;
|
|
568
|
-
workflow;
|
|
569
|
-
constructor(queryClient, temporalClient) {
|
|
570
|
-
this.query = queryClient;
|
|
571
|
-
this.workflow = new WorkflowClient(temporalClient);
|
|
572
|
-
}
|
|
573
|
-
};
|
|
574
|
-
QueryClient = class {
|
|
575
|
-
client;
|
|
576
|
-
query_id_prefix;
|
|
577
|
-
constructor(client, query_id_prefix) {
|
|
578
|
-
this.client = client;
|
|
579
|
-
this.query_id_prefix = query_id_prefix;
|
|
580
|
-
}
|
|
581
|
-
async execute(sql3) {
|
|
582
|
-
const [query, query_params] = toQuery(sql3);
|
|
583
|
-
console.log(`[QueryClient] | Query: ${toQueryPreview(sql3)}`);
|
|
584
|
-
const start = import_perf_hooks.performance.now();
|
|
585
|
-
const result = await this.client.query({
|
|
586
|
-
query,
|
|
587
|
-
query_params,
|
|
588
|
-
format: "JSONEachRow",
|
|
589
|
-
query_id: this.query_id_prefix + (0, import_node_crypto3.randomUUID)()
|
|
590
|
-
// Note: wait_end_of_query deliberately NOT set here as this is used for SELECT queries
|
|
591
|
-
// where response buffering would harm streaming performance and concurrency
|
|
592
|
-
});
|
|
593
|
-
const elapsedMs = import_perf_hooks.performance.now() - start;
|
|
594
|
-
console.log(
|
|
595
|
-
`[QueryClient] | Query completed: ${formatElapsedTime(elapsedMs)}`
|
|
596
|
-
);
|
|
597
|
-
return result;
|
|
598
|
-
}
|
|
599
|
-
async command(sql3) {
|
|
600
|
-
const [query, query_params] = toQuery(sql3);
|
|
601
|
-
console.log(`[QueryClient] | Command: ${toQueryPreview(sql3)}`);
|
|
602
|
-
const start = import_perf_hooks.performance.now();
|
|
603
|
-
const result = await this.client.command({
|
|
604
|
-
query,
|
|
605
|
-
query_params,
|
|
606
|
-
query_id: this.query_id_prefix + (0, import_node_crypto3.randomUUID)()
|
|
607
|
-
});
|
|
608
|
-
const elapsedMs = import_perf_hooks.performance.now() - start;
|
|
609
|
-
console.log(
|
|
610
|
-
`[QueryClient] | Command completed: ${formatElapsedTime(elapsedMs)}`
|
|
611
|
-
);
|
|
612
|
-
return result;
|
|
613
|
-
}
|
|
614
|
-
};
|
|
615
|
-
WorkflowClient = class {
|
|
616
|
-
client;
|
|
617
|
-
constructor(temporalClient) {
|
|
618
|
-
this.client = temporalClient;
|
|
619
|
-
}
|
|
620
|
-
async execute(name, input_data) {
|
|
621
|
-
try {
|
|
622
|
-
if (!this.client) {
|
|
623
|
-
return {
|
|
624
|
-
status: 404,
|
|
625
|
-
body: `Temporal client not found. Is the feature flag enabled?`
|
|
626
|
-
};
|
|
627
|
-
}
|
|
628
|
-
const config = await this.getWorkflowConfig(name);
|
|
629
|
-
const [processedInput, workflowId] = this.processInputData(
|
|
630
|
-
name,
|
|
631
|
-
input_data
|
|
632
|
-
);
|
|
633
|
-
console.log(
|
|
634
|
-
`WorkflowClient - starting workflow: ${name} with config ${JSON.stringify(config)} and input_data ${JSON.stringify(processedInput)}`
|
|
635
|
-
);
|
|
636
|
-
const handle = await this.client.workflow.start("ScriptWorkflow", {
|
|
637
|
-
args: [
|
|
638
|
-
{ workflow_name: name, execution_mode: "start" },
|
|
639
|
-
processedInput
|
|
640
|
-
],
|
|
641
|
-
taskQueue: "typescript-script-queue",
|
|
642
|
-
workflowId,
|
|
643
|
-
workflowIdConflictPolicy: "FAIL",
|
|
644
|
-
workflowIdReusePolicy: "ALLOW_DUPLICATE",
|
|
645
|
-
retry: {
|
|
646
|
-
maximumAttempts: config.retries
|
|
647
|
-
},
|
|
648
|
-
workflowRunTimeout: config.timeout
|
|
649
|
-
});
|
|
650
|
-
return {
|
|
651
|
-
status: 200,
|
|
652
|
-
body: `Workflow started: ${name}. View it in the Temporal dashboard: http://localhost:8080/namespaces/default/workflows/${workflowId}/${handle.firstExecutionRunId}/history`
|
|
653
|
-
};
|
|
654
|
-
} catch (error) {
|
|
655
|
-
return {
|
|
656
|
-
status: 400,
|
|
657
|
-
body: `Error starting workflow: ${error}`
|
|
658
|
-
};
|
|
659
|
-
}
|
|
660
|
-
}
|
|
661
|
-
async terminate(workflowId) {
|
|
662
|
-
try {
|
|
663
|
-
if (!this.client) {
|
|
664
|
-
return {
|
|
665
|
-
status: 404,
|
|
666
|
-
body: `Temporal client not found. Is the feature flag enabled?`
|
|
667
|
-
};
|
|
668
|
-
}
|
|
669
|
-
const handle = this.client.workflow.getHandle(workflowId);
|
|
670
|
-
await handle.terminate();
|
|
671
|
-
return {
|
|
672
|
-
status: 200,
|
|
673
|
-
body: `Workflow terminated: ${workflowId}`
|
|
674
|
-
};
|
|
675
|
-
} catch (error) {
|
|
676
|
-
return {
|
|
677
|
-
status: 400,
|
|
678
|
-
body: `Error terminating workflow: ${error}`
|
|
679
|
-
};
|
|
680
|
-
}
|
|
681
|
-
}
|
|
682
|
-
async getWorkflowConfig(name) {
|
|
683
|
-
const workflows = await getWorkflows2();
|
|
684
|
-
const dmv2Workflow = workflows.get(name);
|
|
685
|
-
if (dmv2Workflow) {
|
|
686
|
-
return {
|
|
687
|
-
retries: dmv2Workflow.config.retries || 3,
|
|
688
|
-
timeout: dmv2Workflow.config.timeout || "1h"
|
|
689
|
-
};
|
|
690
|
-
}
|
|
691
|
-
throw new Error(`Workflow config not found for ${name}`);
|
|
692
|
-
}
|
|
693
|
-
processInputData(name, input_data) {
|
|
694
|
-
let workflowId = name;
|
|
695
|
-
if (input_data) {
|
|
696
|
-
const hash = (0, import_node_crypto3.createHash)("sha256").update(JSON.stringify(input_data)).digest("hex").slice(0, 16);
|
|
697
|
-
workflowId = `${name}-${hash}`;
|
|
698
|
-
}
|
|
699
|
-
return [input_data, workflowId];
|
|
700
|
-
}
|
|
701
|
-
};
|
|
702
|
-
}
|
|
703
|
-
});
|
|
704
|
-
|
|
705
|
-
// src/consumption-apis/webAppHelpers.ts
|
|
706
|
-
var init_webAppHelpers = __esm({
|
|
707
|
-
"src/consumption-apis/webAppHelpers.ts"() {
|
|
708
|
-
"use strict";
|
|
709
|
-
}
|
|
710
|
-
});
|
|
711
501
|
|
|
712
|
-
// src/
|
|
713
|
-
var
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
}
|
|
717
|
-
});
|
|
502
|
+
// src/consumption-apis/runner.ts
|
|
503
|
+
var import_http2 = __toESM(require("http"));
|
|
504
|
+
init_commons();
|
|
505
|
+
var jose = __toESM(require("jose"));
|
|
718
506
|
|
|
719
507
|
// src/cluster-utils.ts
|
|
720
|
-
var import_node_cluster
|
|
721
|
-
var
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
this.workerStart = options.workerStart;
|
|
758
|
-
this.workerStop = options.workerStop;
|
|
759
|
-
if (options.maxCpuUsageRatio && (options.maxCpuUsageRatio > 1 || options.maxCpuUsageRatio < 0)) {
|
|
760
|
-
throw new Error("maxCpuUsageRatio must be between 0 and 1");
|
|
761
|
-
}
|
|
762
|
-
this.maxCpuUsageRatio = options.maxCpuUsageRatio || DEFAULT_MAX_CPU_USAGE_RATIO;
|
|
763
|
-
this.usedCpuCount = this.computeCPUUsageCount(
|
|
764
|
-
this.maxCpuUsageRatio,
|
|
765
|
-
options.maxWorkerCount
|
|
766
|
-
);
|
|
767
|
-
}
|
|
768
|
-
/**
|
|
769
|
-
* Calculates the number of CPU cores to utilize based on available parallelism and constraints.
|
|
770
|
-
*
|
|
771
|
-
* @param cpuUsageRatio - Ratio of CPU cores to use (0-1)
|
|
772
|
-
* @param maxWorkerCount - Optional maximum number of workers
|
|
773
|
-
* @returns The number of CPU cores to utilize
|
|
774
|
-
*/
|
|
775
|
-
computeCPUUsageCount(cpuUsageRatio, maxWorkerCount) {
|
|
776
|
-
const cpuCount = (0, import_node_os.availableParallelism)();
|
|
777
|
-
const maxWorkers = maxWorkerCount || cpuCount;
|
|
778
|
-
return Math.min(
|
|
779
|
-
maxWorkers,
|
|
780
|
-
Math.max(1, Math.floor(cpuCount * cpuUsageRatio))
|
|
781
|
-
);
|
|
782
|
-
}
|
|
783
|
-
/**
|
|
784
|
-
* Initializes the cluster by spawning worker processes and setting up signal handlers.
|
|
785
|
-
* For the primary process, spawns workers and monitors parent process.
|
|
786
|
-
* For worker processes, executes the worker startup function.
|
|
787
|
-
*
|
|
788
|
-
* @throws {Error} If worker is undefined in worker process
|
|
789
|
-
*/
|
|
790
|
-
async start() {
|
|
791
|
-
process.on(SIGTERM, this.gracefulClusterShutdown(SIGTERM));
|
|
792
|
-
process.on(SIGINT, this.gracefulClusterShutdown(SIGINT));
|
|
793
|
-
if (import_node_cluster.default.isPrimary) {
|
|
794
|
-
const parentPid = process.ppid;
|
|
795
|
-
setInterval(() => {
|
|
796
|
-
try {
|
|
797
|
-
process.kill(parentPid, 0);
|
|
798
|
-
} catch (e) {
|
|
799
|
-
console.log("Parent process has exited.");
|
|
800
|
-
this.gracefulClusterShutdown(SIGTERM)();
|
|
801
|
-
}
|
|
802
|
-
}, 1e3);
|
|
803
|
-
await this.bootWorkers(this.usedCpuCount);
|
|
804
|
-
} else {
|
|
805
|
-
if (!import_node_cluster.default.worker) {
|
|
806
|
-
throw new Error(
|
|
807
|
-
"Worker is not defined, it should be defined in worker process"
|
|
808
|
-
);
|
|
809
|
-
}
|
|
810
|
-
this.startOutput = await this.workerStart(
|
|
811
|
-
import_node_cluster.default.worker,
|
|
812
|
-
this.usedCpuCount
|
|
813
|
-
);
|
|
814
|
-
}
|
|
815
|
-
}
|
|
816
|
-
/**
|
|
817
|
-
* Spawns worker processes and configures their lifecycle event handlers.
|
|
818
|
-
* Handles worker online, exit and disconnect events.
|
|
819
|
-
* Automatically restarts failed workers during normal operation.
|
|
820
|
-
*
|
|
821
|
-
* @param numWorkers - Number of worker processes to spawn
|
|
822
|
-
*/
|
|
823
|
-
bootWorkers = async (numWorkers) => {
|
|
824
|
-
console.info(`Setting ${numWorkers} workers...`);
|
|
825
|
-
for (let i = 0; i < numWorkers; i++) {
|
|
826
|
-
import_node_cluster.default.fork();
|
|
827
|
-
}
|
|
828
|
-
import_node_cluster.default.on("online", (worker) => {
|
|
829
|
-
console.info(`worker process ${worker.process.pid} is online`);
|
|
830
|
-
});
|
|
831
|
-
import_node_cluster.default.on("exit", (worker, code, signal) => {
|
|
832
|
-
console.info(
|
|
833
|
-
`worker ${worker.process.pid} exited with code ${code} and signal ${signal}`
|
|
834
|
-
);
|
|
835
|
-
if (!this.shutdownInProgress) {
|
|
836
|
-
setTimeout(() => import_node_cluster.default.fork(), RESTART_TIME_MS);
|
|
837
|
-
}
|
|
838
|
-
if (this.shutdownInProgress && code != 0) {
|
|
839
|
-
this.hasCleanWorkerExit = false;
|
|
840
|
-
}
|
|
841
|
-
});
|
|
842
|
-
import_node_cluster.default.on("disconnect", (worker) => {
|
|
843
|
-
console.info(`worker process ${worker.process.pid} has disconnected`);
|
|
844
|
-
});
|
|
845
|
-
};
|
|
846
|
-
/**
|
|
847
|
-
* Creates a handler function for graceful shutdown on receipt of a signal.
|
|
848
|
-
* Ensures only one shutdown can occur at a time.
|
|
849
|
-
* Handles shutdown differently for primary and worker processes.
|
|
850
|
-
*
|
|
851
|
-
* @param signal - The signal triggering the shutdown (e.g. SIGTERM)
|
|
852
|
-
* @returns An async function that performs the shutdown
|
|
853
|
-
*/
|
|
854
|
-
gracefulClusterShutdown = (signal) => async () => {
|
|
855
|
-
if (this.shutdownInProgress) {
|
|
856
|
-
return;
|
|
857
|
-
}
|
|
858
|
-
this.shutdownInProgress = true;
|
|
859
|
-
this.hasCleanWorkerExit = true;
|
|
860
|
-
console.info(
|
|
861
|
-
`Got ${signal} on ${this.processStr}. Graceful shutdown start at ${(/* @__PURE__ */ new Date()).toISOString()}`
|
|
862
|
-
);
|
|
863
|
-
try {
|
|
864
|
-
if (import_node_cluster.default.isPrimary) {
|
|
865
|
-
await this.shutdownWorkers(signal);
|
|
866
|
-
console.info(`${this.processStr} - worker shutdown successful`);
|
|
867
|
-
(0, import_node_process.exit)(0);
|
|
868
|
-
} else {
|
|
869
|
-
if (this.startOutput) {
|
|
870
|
-
await this.workerStop(this.startOutput);
|
|
871
|
-
} else {
|
|
872
|
-
console.info(
|
|
873
|
-
`${this.processStr} - shutdown before worker fully started`
|
|
874
|
-
);
|
|
875
|
-
}
|
|
876
|
-
console.info(`${this.processStr} shutdown successful`);
|
|
877
|
-
this.hasCleanWorkerExit ? (0, import_node_process.exit)(0) : (0, import_node_process.exit)(1);
|
|
878
|
-
}
|
|
879
|
-
} catch (e) {
|
|
880
|
-
console.error(`${this.processStr} - shutdown failed`, e);
|
|
881
|
-
(0, import_node_process.exit)(1);
|
|
882
|
-
}
|
|
883
|
-
};
|
|
884
|
-
/**
|
|
885
|
-
* Gracefully terminates all worker processes.
|
|
886
|
-
* Monitors workers until they all exit or timeout occurs.
|
|
887
|
-
* Only relevant for the primary process.
|
|
888
|
-
*
|
|
889
|
-
* @param signal - The signal to send to worker processes
|
|
890
|
-
* @returns A promise that resolves when all workers have terminated
|
|
891
|
-
*/
|
|
892
|
-
shutdownWorkers = (signal) => {
|
|
893
|
-
return new Promise((resolve2, reject) => {
|
|
894
|
-
if (!import_node_cluster.default.isPrimary) {
|
|
895
|
-
return resolve2();
|
|
896
|
-
}
|
|
897
|
-
if (!import_node_cluster.default.workers) {
|
|
898
|
-
return resolve2();
|
|
899
|
-
}
|
|
900
|
-
const workerIds = Object.keys(import_node_cluster.default.workers);
|
|
901
|
-
if (workerIds.length == 0) {
|
|
902
|
-
return resolve2();
|
|
903
|
-
}
|
|
904
|
-
let workersAlive = 0;
|
|
905
|
-
let funcRun = 0;
|
|
906
|
-
const cleanWorkers = () => {
|
|
907
|
-
++funcRun;
|
|
908
|
-
workersAlive = 0;
|
|
909
|
-
Object.values(import_node_cluster.default.workers || {}).filter((worker) => !!worker).forEach((worker) => {
|
|
910
|
-
if (worker && !worker.isDead()) {
|
|
911
|
-
++workersAlive;
|
|
912
|
-
if (funcRun == 1) {
|
|
913
|
-
worker.kill(signal);
|
|
914
|
-
}
|
|
915
|
-
}
|
|
916
|
-
});
|
|
917
|
-
console.info(workersAlive + " workers alive");
|
|
918
|
-
if (workersAlive == 0) {
|
|
919
|
-
clearInterval(interval);
|
|
920
|
-
return resolve2();
|
|
921
|
-
}
|
|
922
|
-
};
|
|
923
|
-
const interval = setInterval(cleanWorkers, SHUTDOWN_WORKERS_INTERVAL);
|
|
924
|
-
});
|
|
925
|
-
};
|
|
926
|
-
};
|
|
927
|
-
}
|
|
928
|
-
});
|
|
929
|
-
|
|
930
|
-
// src/config/configFile.ts
|
|
931
|
-
async function findConfigFile(startDir = process.cwd()) {
|
|
932
|
-
const fs4 = await import("fs");
|
|
933
|
-
let currentDir = import_node_path.default.resolve(startDir);
|
|
934
|
-
while (true) {
|
|
935
|
-
const configPath = import_node_path.default.join(currentDir, "moose.config.toml");
|
|
936
|
-
if (fs4.existsSync(configPath)) {
|
|
937
|
-
return configPath;
|
|
938
|
-
}
|
|
939
|
-
const parentDir = import_node_path.default.dirname(currentDir);
|
|
940
|
-
if (parentDir === currentDir) {
|
|
941
|
-
break;
|
|
508
|
+
var import_node_cluster = __toESM(require("cluster"));
|
|
509
|
+
var import_node_os = require("os");
|
|
510
|
+
var import_node_process = require("process");
|
|
511
|
+
var DEFAULT_MAX_CPU_USAGE_RATIO = 0.7;
|
|
512
|
+
var RESTART_TIME_MS = 1e4;
|
|
513
|
+
var SIGTERM = "SIGTERM";
|
|
514
|
+
var SIGINT = "SIGINT";
|
|
515
|
+
var SHUTDOWN_WORKERS_INTERVAL = 500;
|
|
516
|
+
var Cluster = class {
|
|
517
|
+
// Tracks if shutdown is currently in progress
|
|
518
|
+
shutdownInProgress = false;
|
|
519
|
+
// Tracks if workers exited cleanly during shutdown
|
|
520
|
+
hasCleanWorkerExit = true;
|
|
521
|
+
// String identifying if this is primary or worker process
|
|
522
|
+
processStr = `${import_node_cluster.default.isPrimary ? "primary" : "worker"} process ${process.pid}`;
|
|
523
|
+
// Functions for starting and stopping workers
|
|
524
|
+
workerStart;
|
|
525
|
+
workerStop;
|
|
526
|
+
// Result from starting worker, needed for cleanup
|
|
527
|
+
startOutput;
|
|
528
|
+
maxCpuUsageRatio;
|
|
529
|
+
usedCpuCount;
|
|
530
|
+
/**
|
|
531
|
+
* Creates a new cluster manager instance.
|
|
532
|
+
*
|
|
533
|
+
* @param options - Configuration options for the cluster
|
|
534
|
+
* @param options.workerStart - Async function to execute when starting a worker
|
|
535
|
+
* @param options.workerStop - Async function to execute when stopping a worker
|
|
536
|
+
* @param options.maxCpuUsageRatio - Maximum ratio of CPU cores to utilize (0-1)
|
|
537
|
+
* @param options.maxWorkerCount - Maximum number of workers to spawn
|
|
538
|
+
* @throws {Error} If maxCpuUsageRatio is not between 0 and 1
|
|
539
|
+
*/
|
|
540
|
+
constructor(options) {
|
|
541
|
+
this.workerStart = options.workerStart;
|
|
542
|
+
this.workerStop = options.workerStop;
|
|
543
|
+
if (options.maxCpuUsageRatio && (options.maxCpuUsageRatio > 1 || options.maxCpuUsageRatio < 0)) {
|
|
544
|
+
throw new Error("maxCpuUsageRatio must be between 0 and 1");
|
|
942
545
|
}
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
async function readProjectConfig() {
|
|
948
|
-
const fs4 = await import("fs");
|
|
949
|
-
const configPath = await findConfigFile();
|
|
950
|
-
if (!configPath) {
|
|
951
|
-
throw new ConfigError(
|
|
952
|
-
"moose.config.toml not found in current directory or any parent directory"
|
|
546
|
+
this.maxCpuUsageRatio = options.maxCpuUsageRatio || DEFAULT_MAX_CPU_USAGE_RATIO;
|
|
547
|
+
this.usedCpuCount = this.computeCPUUsageCount(
|
|
548
|
+
this.maxCpuUsageRatio,
|
|
549
|
+
options.maxWorkerCount
|
|
953
550
|
);
|
|
954
551
|
}
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
ConfigError = class extends Error {
|
|
970
|
-
constructor(message) {
|
|
971
|
-
super(message);
|
|
972
|
-
this.name = "ConfigError";
|
|
973
|
-
}
|
|
974
|
-
};
|
|
552
|
+
/**
|
|
553
|
+
* Calculates the number of CPU cores to utilize based on available parallelism and constraints.
|
|
554
|
+
*
|
|
555
|
+
* @param cpuUsageRatio - Ratio of CPU cores to use (0-1)
|
|
556
|
+
* @param maxWorkerCount - Optional maximum number of workers
|
|
557
|
+
* @returns The number of CPU cores to utilize
|
|
558
|
+
*/
|
|
559
|
+
computeCPUUsageCount(cpuUsageRatio, maxWorkerCount) {
|
|
560
|
+
const cpuCount = (0, import_node_os.availableParallelism)();
|
|
561
|
+
const maxWorkers = maxWorkerCount || cpuCount;
|
|
562
|
+
return Math.min(
|
|
563
|
+
maxWorkers,
|
|
564
|
+
Math.max(1, Math.floor(cpuCount * cpuUsageRatio))
|
|
565
|
+
);
|
|
975
566
|
}
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
static getInstance() {
|
|
990
|
-
if (!_ConfigurationRegistry.instance) {
|
|
991
|
-
_ConfigurationRegistry.instance = new _ConfigurationRegistry();
|
|
992
|
-
}
|
|
993
|
-
return _ConfigurationRegistry.instance;
|
|
994
|
-
}
|
|
995
|
-
setClickHouseConfig(config) {
|
|
996
|
-
this.clickhouseConfig = config;
|
|
997
|
-
}
|
|
998
|
-
setKafkaConfig(config) {
|
|
999
|
-
this.kafkaConfig = config;
|
|
1000
|
-
}
|
|
1001
|
-
_env(name) {
|
|
1002
|
-
const value = process.env[name];
|
|
1003
|
-
if (value === void 0) return void 0;
|
|
1004
|
-
const trimmed = value.trim();
|
|
1005
|
-
return trimmed.length > 0 ? trimmed : void 0;
|
|
1006
|
-
}
|
|
1007
|
-
_parseBool(value) {
|
|
1008
|
-
if (value === void 0) return void 0;
|
|
1009
|
-
switch (value.trim().toLowerCase()) {
|
|
1010
|
-
case "1":
|
|
1011
|
-
case "true":
|
|
1012
|
-
case "yes":
|
|
1013
|
-
case "on":
|
|
1014
|
-
return true;
|
|
1015
|
-
case "0":
|
|
1016
|
-
case "false":
|
|
1017
|
-
case "no":
|
|
1018
|
-
case "off":
|
|
1019
|
-
return false;
|
|
1020
|
-
default:
|
|
1021
|
-
return void 0;
|
|
1022
|
-
}
|
|
1023
|
-
}
|
|
1024
|
-
async getClickHouseConfig() {
|
|
1025
|
-
if (this.clickhouseConfig) {
|
|
1026
|
-
return this.clickhouseConfig;
|
|
1027
|
-
}
|
|
1028
|
-
const projectConfig = await readProjectConfig();
|
|
1029
|
-
const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
|
|
1030
|
-
const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
|
|
1031
|
-
const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
|
|
1032
|
-
const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
|
|
1033
|
-
const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
|
|
1034
|
-
const envUseSSL = this._parseBool(
|
|
1035
|
-
this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
|
|
1036
|
-
);
|
|
1037
|
-
return {
|
|
1038
|
-
host: envHost ?? projectConfig.clickhouse_config.host,
|
|
1039
|
-
port: envPort ?? projectConfig.clickhouse_config.host_port.toString(),
|
|
1040
|
-
username: envUser ?? projectConfig.clickhouse_config.user,
|
|
1041
|
-
password: envPassword ?? projectConfig.clickhouse_config.password,
|
|
1042
|
-
database: envDb ?? projectConfig.clickhouse_config.db_name,
|
|
1043
|
-
useSSL: envUseSSL !== void 0 ? envUseSSL : projectConfig.clickhouse_config.use_ssl || false
|
|
1044
|
-
};
|
|
1045
|
-
}
|
|
1046
|
-
async getStandaloneClickhouseConfig(overrides) {
|
|
1047
|
-
if (this.clickhouseConfig) {
|
|
1048
|
-
return { ...this.clickhouseConfig, ...overrides };
|
|
1049
|
-
}
|
|
1050
|
-
const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
|
|
1051
|
-
const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
|
|
1052
|
-
const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
|
|
1053
|
-
const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
|
|
1054
|
-
const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
|
|
1055
|
-
const envUseSSL = this._parseBool(
|
|
1056
|
-
this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
|
|
1057
|
-
);
|
|
1058
|
-
let projectConfig;
|
|
567
|
+
/**
|
|
568
|
+
* Initializes the cluster by spawning worker processes and setting up signal handlers.
|
|
569
|
+
* For the primary process, spawns workers and monitors parent process.
|
|
570
|
+
* For worker processes, executes the worker startup function.
|
|
571
|
+
*
|
|
572
|
+
* @throws {Error} If worker is undefined in worker process
|
|
573
|
+
*/
|
|
574
|
+
async start() {
|
|
575
|
+
process.on(SIGTERM, this.gracefulClusterShutdown(SIGTERM));
|
|
576
|
+
process.on(SIGINT, this.gracefulClusterShutdown(SIGINT));
|
|
577
|
+
if (import_node_cluster.default.isPrimary) {
|
|
578
|
+
const parentPid = process.ppid;
|
|
579
|
+
setInterval(() => {
|
|
1059
580
|
try {
|
|
1060
|
-
|
|
1061
|
-
} catch (
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
const defaults = {
|
|
1065
|
-
host: "localhost",
|
|
1066
|
-
port: "18123",
|
|
1067
|
-
username: "default",
|
|
1068
|
-
password: "",
|
|
1069
|
-
database: "local",
|
|
1070
|
-
useSSL: false
|
|
1071
|
-
};
|
|
1072
|
-
return {
|
|
1073
|
-
host: overrides?.host ?? envHost ?? projectConfig?.clickhouse_config.host ?? defaults.host,
|
|
1074
|
-
port: overrides?.port ?? envPort ?? projectConfig?.clickhouse_config.host_port.toString() ?? defaults.port,
|
|
1075
|
-
username: overrides?.username ?? envUser ?? projectConfig?.clickhouse_config.user ?? defaults.username,
|
|
1076
|
-
password: overrides?.password ?? envPassword ?? projectConfig?.clickhouse_config.password ?? defaults.password,
|
|
1077
|
-
database: overrides?.database ?? envDb ?? projectConfig?.clickhouse_config.db_name ?? defaults.database,
|
|
1078
|
-
useSSL: overrides?.useSSL ?? envUseSSL ?? projectConfig?.clickhouse_config.use_ssl ?? defaults.useSSL
|
|
1079
|
-
};
|
|
1080
|
-
}
|
|
1081
|
-
async getKafkaConfig() {
|
|
1082
|
-
if (this.kafkaConfig) {
|
|
1083
|
-
return this.kafkaConfig;
|
|
581
|
+
process.kill(parentPid, 0);
|
|
582
|
+
} catch (e) {
|
|
583
|
+
console.log("Parent process has exited.");
|
|
584
|
+
this.gracefulClusterShutdown(SIGTERM)();
|
|
1084
585
|
}
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
const envNamespace = this._env("MOOSE_REDPANDA_CONFIG__NAMESPACE") ?? this._env("MOOSE_KAFKA_CONFIG__NAMESPACE");
|
|
1093
|
-
const envSchemaRegistryUrl = this._env("MOOSE_REDPANDA_CONFIG__SCHEMA_REGISTRY_URL") ?? this._env("MOOSE_KAFKA_CONFIG__SCHEMA_REGISTRY_URL");
|
|
1094
|
-
const fileKafka = projectConfig.kafka_config ?? projectConfig.redpanda_config;
|
|
1095
|
-
return {
|
|
1096
|
-
broker: envBroker ?? fileKafka?.broker ?? "localhost:19092",
|
|
1097
|
-
messageTimeoutMs: envMsgTimeout ? parseInt(envMsgTimeout, 10) : fileKafka?.message_timeout_ms ?? 1e3,
|
|
1098
|
-
saslUsername: envSaslUsername ?? fileKafka?.sasl_username,
|
|
1099
|
-
saslPassword: envSaslPassword ?? fileKafka?.sasl_password,
|
|
1100
|
-
saslMechanism: envSaslMechanism ?? fileKafka?.sasl_mechanism,
|
|
1101
|
-
securityProtocol: envSecurityProtocol ?? fileKafka?.security_protocol,
|
|
1102
|
-
namespace: envNamespace ?? fileKafka?.namespace,
|
|
1103
|
-
schemaRegistryUrl: envSchemaRegistryUrl ?? fileKafka?.schema_registry_url
|
|
1104
|
-
};
|
|
1105
|
-
}
|
|
1106
|
-
hasRuntimeConfig() {
|
|
1107
|
-
return !!this.clickhouseConfig || !!this.kafkaConfig;
|
|
586
|
+
}, 1e3);
|
|
587
|
+
await this.bootWorkers(this.usedCpuCount);
|
|
588
|
+
} else {
|
|
589
|
+
if (!import_node_cluster.default.worker) {
|
|
590
|
+
throw new Error(
|
|
591
|
+
"Worker is not defined, it should be defined in worker process"
|
|
592
|
+
);
|
|
1108
593
|
}
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
});
|
|
1113
|
-
|
|
1114
|
-
// src/consumption-apis/standalone.ts
|
|
1115
|
-
var standalone_exports = {};
|
|
1116
|
-
__export(standalone_exports, {
|
|
1117
|
-
getMooseClients: () => getMooseClients,
|
|
1118
|
-
getMooseUtils: () => getMooseUtils
|
|
1119
|
-
});
|
|
1120
|
-
async function getMooseUtils(req) {
|
|
1121
|
-
if (req !== void 0) {
|
|
1122
|
-
console.warn(
|
|
1123
|
-
"[DEPRECATED] getMooseUtils(req) no longer requires a request parameter. Use getMooseUtils() instead."
|
|
1124
|
-
);
|
|
1125
|
-
}
|
|
1126
|
-
const runtimeContext = globalThis._mooseRuntimeContext;
|
|
1127
|
-
if (runtimeContext) {
|
|
1128
|
-
return {
|
|
1129
|
-
client: runtimeContext.client,
|
|
1130
|
-
sql,
|
|
1131
|
-
jwt: runtimeContext.jwt
|
|
1132
|
-
};
|
|
1133
|
-
}
|
|
1134
|
-
if (standaloneUtils) {
|
|
1135
|
-
return standaloneUtils;
|
|
1136
|
-
}
|
|
1137
|
-
if (initPromise) {
|
|
1138
|
-
return initPromise;
|
|
1139
|
-
}
|
|
1140
|
-
initPromise = (async () => {
|
|
1141
|
-
await Promise.resolve().then(() => (init_runtime(), runtime_exports));
|
|
1142
|
-
const configRegistry = globalThis._mooseConfigRegistry;
|
|
1143
|
-
if (!configRegistry) {
|
|
1144
|
-
throw new Error(
|
|
1145
|
-
"Moose not initialized. Ensure you're running within a Moose app or have proper configuration set up."
|
|
594
|
+
this.startOutput = await this.workerStart(
|
|
595
|
+
import_node_cluster.default.worker,
|
|
596
|
+
this.usedCpuCount
|
|
1146
597
|
);
|
|
1147
598
|
}
|
|
1148
|
-
const clickhouseConfig = await configRegistry.getStandaloneClickhouseConfig();
|
|
1149
|
-
const clickhouseClient = getClickhouseClient(
|
|
1150
|
-
toClientConfig(clickhouseConfig)
|
|
1151
|
-
);
|
|
1152
|
-
const queryClient = new QueryClient(clickhouseClient, "standalone");
|
|
1153
|
-
const mooseClient = new MooseClient(queryClient);
|
|
1154
|
-
standaloneUtils = {
|
|
1155
|
-
client: mooseClient,
|
|
1156
|
-
sql,
|
|
1157
|
-
jwt: void 0
|
|
1158
|
-
};
|
|
1159
|
-
return standaloneUtils;
|
|
1160
|
-
})();
|
|
1161
|
-
try {
|
|
1162
|
-
return await initPromise;
|
|
1163
|
-
} finally {
|
|
1164
|
-
initPromise = null;
|
|
1165
599
|
}
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
600
|
+
/**
|
|
601
|
+
* Spawns worker processes and configures their lifecycle event handlers.
|
|
602
|
+
* Handles worker online, exit and disconnect events.
|
|
603
|
+
* Automatically restarts failed workers during normal operation.
|
|
604
|
+
*
|
|
605
|
+
* @param numWorkers - Number of worker processes to spawn
|
|
606
|
+
*/
|
|
607
|
+
bootWorkers = async (numWorkers) => {
|
|
608
|
+
console.info(`Setting ${numWorkers} workers...`);
|
|
609
|
+
for (let i = 0; i < numWorkers; i++) {
|
|
610
|
+
import_node_cluster.default.fork();
|
|
611
|
+
}
|
|
612
|
+
import_node_cluster.default.on("online", (worker) => {
|
|
613
|
+
console.info(`worker process ${worker.process.pid} is online`);
|
|
614
|
+
});
|
|
615
|
+
import_node_cluster.default.on("exit", (worker, code, signal) => {
|
|
616
|
+
console.info(
|
|
617
|
+
`worker ${worker.process.pid} exited with code ${code} and signal ${signal}`
|
|
1177
618
|
);
|
|
619
|
+
if (!this.shutdownInProgress) {
|
|
620
|
+
setTimeout(() => import_node_cluster.default.fork(), RESTART_TIME_MS);
|
|
621
|
+
}
|
|
622
|
+
if (this.shutdownInProgress && code != 0) {
|
|
623
|
+
this.hasCleanWorkerExit = false;
|
|
624
|
+
}
|
|
625
|
+
});
|
|
626
|
+
import_node_cluster.default.on("disconnect", (worker) => {
|
|
627
|
+
console.info(`worker process ${worker.process.pid} has disconnected`);
|
|
628
|
+
});
|
|
629
|
+
};
|
|
630
|
+
/**
|
|
631
|
+
* Creates a handler function for graceful shutdown on receipt of a signal.
|
|
632
|
+
* Ensures only one shutdown can occur at a time.
|
|
633
|
+
* Handles shutdown differently for primary and worker processes.
|
|
634
|
+
*
|
|
635
|
+
* @param signal - The signal triggering the shutdown (e.g. SIGTERM)
|
|
636
|
+
* @returns An async function that performs the shutdown
|
|
637
|
+
*/
|
|
638
|
+
gracefulClusterShutdown = (signal) => async () => {
|
|
639
|
+
if (this.shutdownInProgress) {
|
|
640
|
+
return;
|
|
1178
641
|
}
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
642
|
+
this.shutdownInProgress = true;
|
|
643
|
+
this.hasCleanWorkerExit = true;
|
|
644
|
+
console.info(
|
|
645
|
+
`Got ${signal} on ${this.processStr}. Graceful shutdown start at ${(/* @__PURE__ */ new Date()).toISOString()}`
|
|
1182
646
|
);
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
647
|
+
try {
|
|
648
|
+
if (import_node_cluster.default.isPrimary) {
|
|
649
|
+
await this.shutdownWorkers(signal);
|
|
650
|
+
console.info(`${this.processStr} - worker shutdown successful`);
|
|
651
|
+
(0, import_node_process.exit)(0);
|
|
652
|
+
} else {
|
|
653
|
+
if (this.startOutput) {
|
|
654
|
+
await this.workerStop(this.startOutput);
|
|
655
|
+
} else {
|
|
656
|
+
console.info(
|
|
657
|
+
`${this.processStr} - shutdown before worker fully started`
|
|
658
|
+
);
|
|
659
|
+
}
|
|
660
|
+
console.info(`${this.processStr} shutdown successful`);
|
|
661
|
+
this.hasCleanWorkerExit ? (0, import_node_process.exit)(0) : (0, import_node_process.exit)(1);
|
|
662
|
+
}
|
|
663
|
+
} catch (e) {
|
|
664
|
+
console.error(`${this.processStr} - shutdown failed`, e);
|
|
665
|
+
(0, import_node_process.exit)(1);
|
|
666
|
+
}
|
|
667
|
+
};
|
|
668
|
+
/**
|
|
669
|
+
* Gracefully terminates all worker processes.
|
|
670
|
+
* Monitors workers until they all exit or timeout occurs.
|
|
671
|
+
* Only relevant for the primary process.
|
|
672
|
+
*
|
|
673
|
+
* @param signal - The signal to send to worker processes
|
|
674
|
+
* @returns A promise that resolves when all workers have terminated
|
|
675
|
+
*/
|
|
676
|
+
shutdownWorkers = (signal) => {
|
|
677
|
+
return new Promise((resolve2, reject) => {
|
|
678
|
+
if (!import_node_cluster.default.isPrimary) {
|
|
679
|
+
return resolve2();
|
|
680
|
+
}
|
|
681
|
+
if (!import_node_cluster.default.workers) {
|
|
682
|
+
return resolve2();
|
|
683
|
+
}
|
|
684
|
+
const workerIds = Object.keys(import_node_cluster.default.workers);
|
|
685
|
+
if (workerIds.length == 0) {
|
|
686
|
+
return resolve2();
|
|
687
|
+
}
|
|
688
|
+
let workersAlive = 0;
|
|
689
|
+
let funcRun = 0;
|
|
690
|
+
const cleanWorkers = () => {
|
|
691
|
+
++funcRun;
|
|
692
|
+
workersAlive = 0;
|
|
693
|
+
Object.values(import_node_cluster.default.workers || {}).filter((worker) => !!worker).forEach((worker) => {
|
|
694
|
+
if (worker && !worker.isDead()) {
|
|
695
|
+
++workersAlive;
|
|
696
|
+
if (funcRun == 1) {
|
|
697
|
+
worker.kill(signal);
|
|
698
|
+
}
|
|
699
|
+
}
|
|
700
|
+
});
|
|
701
|
+
console.info(workersAlive + " workers alive");
|
|
702
|
+
if (workersAlive == 0) {
|
|
703
|
+
clearInterval(interval);
|
|
704
|
+
return resolve2();
|
|
705
|
+
}
|
|
706
|
+
};
|
|
707
|
+
const interval = setInterval(cleanWorkers, SHUTDOWN_WORKERS_INTERVAL);
|
|
1202
708
|
});
|
|
1203
|
-
}
|
|
1204
|
-
}
|
|
709
|
+
};
|
|
710
|
+
};
|
|
1205
711
|
|
|
1206
712
|
// src/consumption-apis/runner.ts
|
|
1207
|
-
var
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
)
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
const jwt = req.headers.authorization?.split(" ")[1];
|
|
1239
|
-
if (jwt) {
|
|
1240
|
-
try {
|
|
1241
|
-
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
1242
|
-
issuer: jwtConfig.issuer,
|
|
1243
|
-
audience: jwtConfig.audience
|
|
1244
|
-
});
|
|
1245
|
-
jwtPayload = payload;
|
|
1246
|
-
} catch (error) {
|
|
1247
|
-
console.log("JWT verification failed");
|
|
1248
|
-
if (enforceAuth) {
|
|
1249
|
-
res.writeHead(401, { "Content-Type": "application/json" });
|
|
1250
|
-
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
1251
|
-
httpLogger(req, res, start);
|
|
1252
|
-
return;
|
|
1253
|
-
}
|
|
1254
|
-
}
|
|
1255
|
-
} else if (enforceAuth) {
|
|
713
|
+
var toClientConfig = (config) => ({
|
|
714
|
+
...config,
|
|
715
|
+
useSSL: config.useSSL ? "true" : "false"
|
|
716
|
+
});
|
|
717
|
+
var createPath = (apisDir, path3) => `${apisDir}${path3}.ts`;
|
|
718
|
+
var httpLogger = (req, res, startMs) => {
|
|
719
|
+
console.log(
|
|
720
|
+
`${req.method} ${req.url} ${res.statusCode} ${Date.now() - startMs}ms`
|
|
721
|
+
);
|
|
722
|
+
};
|
|
723
|
+
var modulesCache = /* @__PURE__ */ new Map();
|
|
724
|
+
var apiHandler = async (publicKey, clickhouseClient, temporalClient, apisDir, enforceAuth, isDmv2, jwtConfig) => {
|
|
725
|
+
const apis = isDmv2 ? await getApis2() : /* @__PURE__ */ new Map();
|
|
726
|
+
return async (req, res) => {
|
|
727
|
+
const start = Date.now();
|
|
728
|
+
try {
|
|
729
|
+
const url = new URL(req.url || "", "http://localhost");
|
|
730
|
+
const fileName = url.pathname;
|
|
731
|
+
let jwtPayload;
|
|
732
|
+
if (publicKey && jwtConfig) {
|
|
733
|
+
const jwt = req.headers.authorization?.split(" ")[1];
|
|
734
|
+
if (jwt) {
|
|
735
|
+
try {
|
|
736
|
+
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
737
|
+
issuer: jwtConfig.issuer,
|
|
738
|
+
audience: jwtConfig.audience
|
|
739
|
+
});
|
|
740
|
+
jwtPayload = payload;
|
|
741
|
+
} catch (error) {
|
|
742
|
+
console.log("JWT verification failed");
|
|
743
|
+
if (enforceAuth) {
|
|
1256
744
|
res.writeHead(401, { "Content-Type": "application/json" });
|
|
1257
745
|
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
1258
746
|
httpLogger(req, res, start);
|
|
1259
747
|
return;
|
|
1260
748
|
}
|
|
1261
|
-
} else if (enforceAuth) {
|
|
1262
|
-
res.writeHead(401, { "Content-Type": "application/json" });
|
|
1263
|
-
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
1264
|
-
httpLogger(req, res, start);
|
|
1265
|
-
return;
|
|
1266
749
|
}
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
)
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
750
|
+
} else if (enforceAuth) {
|
|
751
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
752
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
753
|
+
httpLogger(req, res, start);
|
|
754
|
+
return;
|
|
755
|
+
}
|
|
756
|
+
} else if (enforceAuth) {
|
|
757
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
758
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
759
|
+
httpLogger(req, res, start);
|
|
760
|
+
return;
|
|
761
|
+
}
|
|
762
|
+
const pathName = createPath(apisDir, fileName);
|
|
763
|
+
const paramsObject = Array.from(url.searchParams.entries()).reduce(
|
|
764
|
+
(obj, [key, value]) => {
|
|
765
|
+
const existingValue = obj[key];
|
|
766
|
+
if (existingValue) {
|
|
767
|
+
if (Array.isArray(existingValue)) {
|
|
768
|
+
existingValue.push(value);
|
|
769
|
+
} else {
|
|
770
|
+
obj[key] = [existingValue, value];
|
|
771
|
+
}
|
|
772
|
+
} else {
|
|
773
|
+
obj[key] = value;
|
|
774
|
+
}
|
|
775
|
+
return obj;
|
|
776
|
+
},
|
|
777
|
+
{}
|
|
778
|
+
);
|
|
779
|
+
let userFuncModule = modulesCache.get(pathName);
|
|
780
|
+
if (userFuncModule === void 0) {
|
|
781
|
+
if (isDmv2) {
|
|
782
|
+
let apiName = fileName.replace(/^\/+|\/+$/g, "");
|
|
783
|
+
let version = null;
|
|
784
|
+
userFuncModule = apis.get(apiName);
|
|
785
|
+
if (!userFuncModule) {
|
|
786
|
+
version = url.searchParams.get("version");
|
|
787
|
+
if (!version && apiName.includes("/")) {
|
|
788
|
+
const pathParts = apiName.split("/");
|
|
789
|
+
if (pathParts.length >= 2) {
|
|
790
|
+
userFuncModule = apis.get(apiName);
|
|
1302
791
|
if (!userFuncModule) {
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
userFuncModule = apis.get(versionedKey);
|
|
1306
|
-
} else {
|
|
1307
|
-
userFuncModule = apis.get(apiName);
|
|
1308
|
-
}
|
|
792
|
+
apiName = pathParts[0];
|
|
793
|
+
version = pathParts.slice(1).join("/");
|
|
1309
794
|
}
|
|
1310
795
|
}
|
|
1311
|
-
if (!userFuncModule) {
|
|
1312
|
-
const availableApis = Array.from(apis.keys()).map(
|
|
1313
|
-
(key) => key.replace(":", "/")
|
|
1314
|
-
);
|
|
1315
|
-
const errorMessage = version ? `API ${apiName} with version ${version} not found. Available APIs: ${availableApis.join(", ")}` : `API ${apiName} not found. Available APIs: ${availableApis.join(", ")}`;
|
|
1316
|
-
throw new Error(errorMessage);
|
|
1317
|
-
}
|
|
1318
|
-
modulesCache.set(pathName, userFuncModule);
|
|
1319
|
-
console.log(`[API] | Executing API: ${apiName}`);
|
|
1320
|
-
} else {
|
|
1321
|
-
userFuncModule = require(pathName);
|
|
1322
|
-
modulesCache.set(pathName, userFuncModule);
|
|
1323
796
|
}
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
client: new MooseClient(queryClient, temporalClient),
|
|
1332
|
-
sql,
|
|
1333
|
-
jwt: jwtPayload
|
|
1334
|
-
});
|
|
1335
|
-
let body;
|
|
1336
|
-
let status;
|
|
1337
|
-
if (Object.getPrototypeOf(result).constructor.name === "ResultSet") {
|
|
1338
|
-
body = JSON.stringify(await result.json());
|
|
1339
|
-
} else {
|
|
1340
|
-
if ("body" in result && "status" in result) {
|
|
1341
|
-
body = JSON.stringify(result.body);
|
|
1342
|
-
status = result.status;
|
|
1343
|
-
} else {
|
|
1344
|
-
body = JSON.stringify(result);
|
|
797
|
+
if (!userFuncModule) {
|
|
798
|
+
if (version) {
|
|
799
|
+
const versionedKey = `${apiName}:${version}`;
|
|
800
|
+
userFuncModule = apis.get(versionedKey);
|
|
801
|
+
} else {
|
|
802
|
+
userFuncModule = apis.get(apiName);
|
|
803
|
+
}
|
|
1345
804
|
}
|
|
1346
805
|
}
|
|
1347
|
-
if (
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
}
|
|
1354
|
-
res.end(body);
|
|
1355
|
-
} catch (error) {
|
|
1356
|
-
console.log("error in path ", req.url, error);
|
|
1357
|
-
if (Object.getPrototypeOf(error).constructor.name === "TypeGuardError") {
|
|
1358
|
-
res.writeHead(400, { "Content-Type": "application/json" });
|
|
1359
|
-
res.end(JSON.stringify({ error: error.message }));
|
|
1360
|
-
httpLogger(req, res, start);
|
|
1361
|
-
}
|
|
1362
|
-
if (error instanceof Error) {
|
|
1363
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
1364
|
-
res.end(JSON.stringify({ error: error.message }));
|
|
1365
|
-
httpLogger(req, res, start);
|
|
1366
|
-
} else {
|
|
1367
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
1368
|
-
res.end();
|
|
1369
|
-
httpLogger(req, res, start);
|
|
806
|
+
if (!userFuncModule) {
|
|
807
|
+
const availableApis = Array.from(apis.keys()).map(
|
|
808
|
+
(key) => key.replace(":", "/")
|
|
809
|
+
);
|
|
810
|
+
const errorMessage = version ? `API ${apiName} with version ${version} not found. Available APIs: ${availableApis.join(", ")}` : `API ${apiName} not found. Available APIs: ${availableApis.join(", ")}`;
|
|
811
|
+
throw new Error(errorMessage);
|
|
1370
812
|
}
|
|
813
|
+
modulesCache.set(pathName, userFuncModule);
|
|
814
|
+
console.log(`[API] | Executing API: ${apiName}`);
|
|
815
|
+
} else {
|
|
816
|
+
userFuncModule = require(pathName);
|
|
817
|
+
modulesCache.set(pathName, userFuncModule);
|
|
1371
818
|
}
|
|
1372
|
-
}
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
|
|
819
|
+
}
|
|
820
|
+
const queryClient = new QueryClient(clickhouseClient, fileName);
|
|
821
|
+
let result = isDmv2 ? await userFuncModule(paramsObject, {
|
|
822
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
823
|
+
sql,
|
|
824
|
+
jwt: jwtPayload
|
|
825
|
+
}) : await userFuncModule.default(paramsObject, {
|
|
826
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
827
|
+
sql,
|
|
828
|
+
jwt: jwtPayload
|
|
829
|
+
});
|
|
830
|
+
let body;
|
|
831
|
+
let status;
|
|
832
|
+
if (Object.getPrototypeOf(result).constructor.name === "ResultSet") {
|
|
833
|
+
body = JSON.stringify(await result.json());
|
|
834
|
+
} else {
|
|
835
|
+
if ("body" in result && "status" in result) {
|
|
836
|
+
body = JSON.stringify(result.body);
|
|
837
|
+
status = result.status;
|
|
838
|
+
} else {
|
|
839
|
+
body = JSON.stringify(result);
|
|
840
|
+
}
|
|
841
|
+
}
|
|
842
|
+
if (status) {
|
|
843
|
+
res.writeHead(status, { "Content-Type": "application/json" });
|
|
844
|
+
httpLogger(req, res, start);
|
|
845
|
+
} else {
|
|
846
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
847
|
+
httpLogger(req, res, start);
|
|
848
|
+
}
|
|
849
|
+
res.end(body);
|
|
850
|
+
} catch (error) {
|
|
851
|
+
console.log("error in path ", req.url, error);
|
|
852
|
+
if (Object.getPrototypeOf(error).constructor.name === "TypeGuardError") {
|
|
853
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
854
|
+
res.end(JSON.stringify({ error: error.message }));
|
|
855
|
+
httpLogger(req, res, start);
|
|
856
|
+
}
|
|
857
|
+
if (error instanceof Error) {
|
|
858
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
859
|
+
res.end(JSON.stringify({ error: error.message }));
|
|
860
|
+
httpLogger(req, res, start);
|
|
861
|
+
} else {
|
|
862
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
863
|
+
res.end();
|
|
864
|
+
httpLogger(req, res, start);
|
|
865
|
+
}
|
|
866
|
+
}
|
|
867
|
+
};
|
|
868
|
+
};
|
|
869
|
+
var createMainRouter = async (publicKey, clickhouseClient, temporalClient, apisDir, enforceAuth, isDmv2, jwtConfig) => {
|
|
870
|
+
const apiRequestHandler = await apiHandler(
|
|
871
|
+
publicKey,
|
|
872
|
+
clickhouseClient,
|
|
873
|
+
temporalClient,
|
|
874
|
+
apisDir,
|
|
875
|
+
enforceAuth,
|
|
876
|
+
isDmv2,
|
|
877
|
+
jwtConfig
|
|
878
|
+
);
|
|
879
|
+
const webApps = isDmv2 ? await getWebApps2() : /* @__PURE__ */ new Map();
|
|
880
|
+
const sortedWebApps = Array.from(webApps.values()).sort((a, b) => {
|
|
881
|
+
const pathA = a.config.mountPath || "/";
|
|
882
|
+
const pathB = b.config.mountPath || "/";
|
|
883
|
+
return pathB.length - pathA.length;
|
|
884
|
+
});
|
|
885
|
+
return async (req, res) => {
|
|
886
|
+
const start = Date.now();
|
|
887
|
+
const url = new URL(req.url || "", "http://localhost");
|
|
888
|
+
const pathname = url.pathname;
|
|
889
|
+
if (pathname === "/_moose_internal/health") {
|
|
890
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
891
|
+
res.end(
|
|
892
|
+
JSON.stringify({
|
|
893
|
+
status: "healthy",
|
|
894
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
895
|
+
})
|
|
1383
896
|
);
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
status: "healthy",
|
|
1399
|
-
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
1400
|
-
})
|
|
1401
|
-
);
|
|
1402
|
-
return;
|
|
1403
|
-
}
|
|
1404
|
-
let jwtPayload;
|
|
1405
|
-
if (publicKey && jwtConfig) {
|
|
1406
|
-
const jwt = req.headers.authorization?.split(" ")[1];
|
|
1407
|
-
if (jwt) {
|
|
1408
|
-
try {
|
|
1409
|
-
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
1410
|
-
issuer: jwtConfig.issuer,
|
|
1411
|
-
audience: jwtConfig.audience
|
|
1412
|
-
});
|
|
1413
|
-
jwtPayload = payload;
|
|
1414
|
-
} catch (error) {
|
|
1415
|
-
console.log("JWT verification failed for WebApp route");
|
|
1416
|
-
}
|
|
1417
|
-
}
|
|
897
|
+
return;
|
|
898
|
+
}
|
|
899
|
+
let jwtPayload;
|
|
900
|
+
if (publicKey && jwtConfig) {
|
|
901
|
+
const jwt = req.headers.authorization?.split(" ")[1];
|
|
902
|
+
if (jwt) {
|
|
903
|
+
try {
|
|
904
|
+
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
905
|
+
issuer: jwtConfig.issuer,
|
|
906
|
+
audience: jwtConfig.audience
|
|
907
|
+
});
|
|
908
|
+
jwtPayload = payload;
|
|
909
|
+
} catch (error) {
|
|
910
|
+
console.log("JWT verification failed for WebApp route");
|
|
1418
911
|
}
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
1428
|
-
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1433
|
-
try {
|
|
1434
|
-
const modifiedReq = Object.assign(
|
|
1435
|
-
Object.create(Object.getPrototypeOf(req)),
|
|
1436
|
-
req,
|
|
1437
|
-
{
|
|
1438
|
-
url: proxiedUrl
|
|
1439
|
-
}
|
|
1440
|
-
);
|
|
1441
|
-
await webApp.handler(modifiedReq, res);
|
|
1442
|
-
return;
|
|
1443
|
-
} catch (error) {
|
|
1444
|
-
console.error(`Error in WebApp ${webApp.name}:`, error);
|
|
1445
|
-
if (!res.headersSent) {
|
|
1446
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
1447
|
-
res.end(JSON.stringify({ error: "Internal Server Error" }));
|
|
1448
|
-
}
|
|
1449
|
-
return;
|
|
1450
|
-
}
|
|
1451
|
-
}
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
for (const webApp of sortedWebApps) {
|
|
915
|
+
const mountPath = webApp.config.mountPath || "/";
|
|
916
|
+
const normalizedMount = mountPath.endsWith("/") && mountPath !== "/" ? mountPath.slice(0, -1) : mountPath;
|
|
917
|
+
const matches = pathname === normalizedMount || pathname.startsWith(normalizedMount + "/");
|
|
918
|
+
if (matches) {
|
|
919
|
+
if (webApp.config.injectMooseUtils !== false) {
|
|
920
|
+
const queryClient = new QueryClient(clickhouseClient, pathname);
|
|
921
|
+
req.moose = {
|
|
922
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
923
|
+
sql,
|
|
924
|
+
jwt: jwtPayload
|
|
925
|
+
};
|
|
1452
926
|
}
|
|
1453
|
-
let
|
|
1454
|
-
if (
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
apiPath = pathname.substring(13);
|
|
927
|
+
let proxiedUrl = req.url;
|
|
928
|
+
if (normalizedMount !== "/") {
|
|
929
|
+
const pathWithoutMount = pathname.substring(normalizedMount.length) || "/";
|
|
930
|
+
proxiedUrl = pathWithoutMount + url.search;
|
|
1458
931
|
}
|
|
1459
|
-
|
|
932
|
+
try {
|
|
1460
933
|
const modifiedReq = Object.assign(
|
|
1461
934
|
Object.create(Object.getPrototypeOf(req)),
|
|
1462
935
|
req,
|
|
1463
936
|
{
|
|
1464
|
-
url:
|
|
937
|
+
url: proxiedUrl
|
|
1465
938
|
}
|
|
1466
939
|
);
|
|
1467
|
-
await
|
|
940
|
+
await webApp.handler(modifiedReq, res);
|
|
1468
941
|
return;
|
|
1469
|
-
}
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
};
|
|
1475
|
-
runApis = async (config) => {
|
|
1476
|
-
const apisCluster = new Cluster({
|
|
1477
|
-
maxWorkerCount: (config.workerCount ?? 0) > 0 ? config.workerCount : void 0,
|
|
1478
|
-
workerStart: async () => {
|
|
1479
|
-
let temporalClient;
|
|
1480
|
-
if (config.temporalConfig) {
|
|
1481
|
-
temporalClient = await getTemporalClient(
|
|
1482
|
-
config.temporalConfig.url,
|
|
1483
|
-
config.temporalConfig.namespace,
|
|
1484
|
-
config.temporalConfig.clientCert,
|
|
1485
|
-
config.temporalConfig.clientKey,
|
|
1486
|
-
config.temporalConfig.apiKey
|
|
1487
|
-
);
|
|
1488
|
-
}
|
|
1489
|
-
const clickhouseClient = getClickhouseClient(
|
|
1490
|
-
toClientConfig2(config.clickhouseConfig)
|
|
1491
|
-
);
|
|
1492
|
-
let publicKey;
|
|
1493
|
-
if (config.jwtConfig?.secret) {
|
|
1494
|
-
console.log("Importing JWT public key...");
|
|
1495
|
-
publicKey = await jose.importSPKI(config.jwtConfig.secret, "RS256");
|
|
942
|
+
} catch (error) {
|
|
943
|
+
console.error(`Error in WebApp ${webApp.name}:`, error);
|
|
944
|
+
if (!res.headersSent) {
|
|
945
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
946
|
+
res.end(JSON.stringify({ error: "Internal Server Error" }));
|
|
1496
947
|
}
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
server.listen(port, "localhost", () => {
|
|
1514
|
-
console.log(`Server running on port ${port}`);
|
|
1515
|
-
});
|
|
1516
|
-
return server;
|
|
1517
|
-
},
|
|
1518
|
-
workerStop: async (server) => {
|
|
1519
|
-
return new Promise((resolve2) => {
|
|
1520
|
-
server.close(() => resolve2());
|
|
1521
|
-
});
|
|
948
|
+
return;
|
|
949
|
+
}
|
|
950
|
+
}
|
|
951
|
+
}
|
|
952
|
+
let apiPath = pathname;
|
|
953
|
+
if (pathname.startsWith("/api/")) {
|
|
954
|
+
apiPath = pathname.substring(4);
|
|
955
|
+
} else if (pathname.startsWith("/consumption/")) {
|
|
956
|
+
apiPath = pathname.substring(13);
|
|
957
|
+
}
|
|
958
|
+
if (apiPath !== pathname) {
|
|
959
|
+
const modifiedReq = Object.assign(
|
|
960
|
+
Object.create(Object.getPrototypeOf(req)),
|
|
961
|
+
req,
|
|
962
|
+
{
|
|
963
|
+
url: apiPath + url.search
|
|
1522
964
|
}
|
|
965
|
+
);
|
|
966
|
+
await apiRequestHandler(modifiedReq, res);
|
|
967
|
+
return;
|
|
968
|
+
}
|
|
969
|
+
res.writeHead(404, { "Content-Type": "application/json" });
|
|
970
|
+
res.end(JSON.stringify({ error: "Not Found" }));
|
|
971
|
+
httpLogger(req, res, start);
|
|
972
|
+
};
|
|
973
|
+
};
|
|
974
|
+
var runApis = async (config) => {
|
|
975
|
+
const apisCluster = new Cluster({
|
|
976
|
+
maxWorkerCount: (config.workerCount ?? 0) > 0 ? config.workerCount : void 0,
|
|
977
|
+
workerStart: async () => {
|
|
978
|
+
let temporalClient;
|
|
979
|
+
if (config.temporalConfig) {
|
|
980
|
+
temporalClient = await getTemporalClient(
|
|
981
|
+
config.temporalConfig.url,
|
|
982
|
+
config.temporalConfig.namespace,
|
|
983
|
+
config.temporalConfig.clientCert,
|
|
984
|
+
config.temporalConfig.clientKey,
|
|
985
|
+
config.temporalConfig.apiKey
|
|
986
|
+
);
|
|
987
|
+
}
|
|
988
|
+
const clickhouseClient = getClickhouseClient(
|
|
989
|
+
toClientConfig(config.clickhouseConfig)
|
|
990
|
+
);
|
|
991
|
+
let publicKey;
|
|
992
|
+
if (config.jwtConfig?.secret) {
|
|
993
|
+
console.log("Importing JWT public key...");
|
|
994
|
+
publicKey = await jose.importSPKI(config.jwtConfig.secret, "RS256");
|
|
995
|
+
}
|
|
996
|
+
const server = import_http2.default.createServer(
|
|
997
|
+
await createMainRouter(
|
|
998
|
+
publicKey,
|
|
999
|
+
clickhouseClient,
|
|
1000
|
+
temporalClient,
|
|
1001
|
+
config.apisDir,
|
|
1002
|
+
config.enforceAuth,
|
|
1003
|
+
config.isDmv2,
|
|
1004
|
+
config.jwtConfig
|
|
1005
|
+
)
|
|
1006
|
+
);
|
|
1007
|
+
const port = config.proxyPort !== void 0 ? config.proxyPort : 4001;
|
|
1008
|
+
server.listen(port, "localhost", () => {
|
|
1009
|
+
console.log(`Server running on port ${port}`);
|
|
1523
1010
|
});
|
|
1524
|
-
|
|
1525
|
-
}
|
|
1526
|
-
|
|
1527
|
-
|
|
1011
|
+
return server;
|
|
1012
|
+
},
|
|
1013
|
+
workerStop: async (server) => {
|
|
1014
|
+
return new Promise((resolve2) => {
|
|
1015
|
+
server.close(() => resolve2());
|
|
1016
|
+
});
|
|
1017
|
+
}
|
|
1018
|
+
});
|
|
1019
|
+
apisCluster.start();
|
|
1020
|
+
};
|
|
1528
1021
|
|
|
1529
1022
|
// src/clients/redisClient.ts
|
|
1530
|
-
var import_redis;
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
|
|
1536
|
-
|
|
1023
|
+
var import_redis = require("redis");
|
|
1024
|
+
|
|
1025
|
+
// src/consumption-apis/standalone.ts
|
|
1026
|
+
init_commons();
|
|
1027
|
+
|
|
1028
|
+
// src/utilities/dataParser.ts
|
|
1029
|
+
var import_csv_parse = require("csv-parse");
|
|
1537
1030
|
|
|
1538
1031
|
// src/utilities/json.ts
|
|
1032
|
+
var STRING_DATE_ANNOTATION = "stringDate";
|
|
1539
1033
|
function isNullableType(dt) {
|
|
1540
1034
|
return typeof dt === "object" && dt !== null && "nullable" in dt && typeof dt.nullable !== "undefined";
|
|
1541
1035
|
}
|
|
@@ -1654,77 +1148,36 @@ function mutateParsedJson(data, fieldMutations) {
|
|
|
1654
1148
|
}
|
|
1655
1149
|
applyFieldMutations(data, fieldMutations);
|
|
1656
1150
|
}
|
|
1657
|
-
var STRING_DATE_ANNOTATION;
|
|
1658
|
-
var init_json = __esm({
|
|
1659
|
-
"src/utilities/json.ts"() {
|
|
1660
|
-
"use strict";
|
|
1661
|
-
STRING_DATE_ANNOTATION = "stringDate";
|
|
1662
|
-
}
|
|
1663
|
-
});
|
|
1664
1151
|
|
|
1665
1152
|
// src/utilities/dataParser.ts
|
|
1666
|
-
var
|
|
1667
|
-
|
|
1668
|
-
"
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
DEFAULT_CSV_CONFIG = {
|
|
1679
|
-
delimiter: CSV_DELIMITERS.COMMA,
|
|
1680
|
-
columns: true,
|
|
1681
|
-
skipEmptyLines: true,
|
|
1682
|
-
trim: true
|
|
1683
|
-
};
|
|
1684
|
-
}
|
|
1685
|
-
});
|
|
1686
|
-
|
|
1687
|
-
// src/utilities/index.ts
|
|
1688
|
-
var init_utilities = __esm({
|
|
1689
|
-
"src/utilities/index.ts"() {
|
|
1690
|
-
"use strict";
|
|
1691
|
-
init_dataParser();
|
|
1692
|
-
}
|
|
1693
|
-
});
|
|
1694
|
-
|
|
1695
|
-
// src/connectors/dataSource.ts
|
|
1696
|
-
var init_dataSource = __esm({
|
|
1697
|
-
"src/connectors/dataSource.ts"() {
|
|
1698
|
-
"use strict";
|
|
1699
|
-
}
|
|
1700
|
-
});
|
|
1701
|
-
|
|
1702
|
-
// src/index.ts
|
|
1703
|
-
var init_index = __esm({
|
|
1704
|
-
"src/index.ts"() {
|
|
1705
|
-
"use strict";
|
|
1706
|
-
init_browserCompatible();
|
|
1707
|
-
init_helpers();
|
|
1708
|
-
init_commons();
|
|
1709
|
-
init_secrets();
|
|
1710
|
-
init_helpers2();
|
|
1711
|
-
init_webAppHelpers();
|
|
1712
|
-
init_task();
|
|
1713
|
-
init_runner();
|
|
1714
|
-
init_redisClient();
|
|
1715
|
-
init_helpers2();
|
|
1716
|
-
init_standalone();
|
|
1717
|
-
init_sqlHelpers();
|
|
1718
|
-
init_utilities();
|
|
1719
|
-
init_dataSource();
|
|
1720
|
-
init_types();
|
|
1721
|
-
}
|
|
1722
|
-
});
|
|
1153
|
+
var CSV_DELIMITERS = {
|
|
1154
|
+
COMMA: ",",
|
|
1155
|
+
TAB: " ",
|
|
1156
|
+
SEMICOLON: ";",
|
|
1157
|
+
PIPE: "|"
|
|
1158
|
+
};
|
|
1159
|
+
var DEFAULT_CSV_CONFIG = {
|
|
1160
|
+
delimiter: CSV_DELIMITERS.COMMA,
|
|
1161
|
+
columns: true,
|
|
1162
|
+
skipEmptyLines: true,
|
|
1163
|
+
trim: true
|
|
1164
|
+
};
|
|
1723
1165
|
|
|
1724
1166
|
// src/dmv2/internal.ts
|
|
1167
|
+
init_commons();
|
|
1725
1168
|
function getSourceDir() {
|
|
1726
1169
|
return import_process.default.env.MOOSE_SOURCE_DIR || "app";
|
|
1727
1170
|
}
|
|
1171
|
+
var moose_internal = {
|
|
1172
|
+
tables: /* @__PURE__ */ new Map(),
|
|
1173
|
+
streams: /* @__PURE__ */ new Map(),
|
|
1174
|
+
ingestApis: /* @__PURE__ */ new Map(),
|
|
1175
|
+
apis: /* @__PURE__ */ new Map(),
|
|
1176
|
+
sqlResources: /* @__PURE__ */ new Map(),
|
|
1177
|
+
workflows: /* @__PURE__ */ new Map(),
|
|
1178
|
+
webApps: /* @__PURE__ */ new Map()
|
|
1179
|
+
};
|
|
1180
|
+
var defaultRetentionPeriod = 60 * 60 * 24 * 7;
|
|
1728
1181
|
function isS3QueueConfig(config) {
|
|
1729
1182
|
return "engine" in config && config.engine === "S3Queue" /* S3Queue */;
|
|
1730
1183
|
}
|
|
@@ -1961,387 +1414,364 @@ function convertTableConfigToEngineConfig(config) {
|
|
|
1961
1414
|
}
|
|
1962
1415
|
return void 0;
|
|
1963
1416
|
}
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
}
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1417
|
+
var toInfraMap = (registry) => {
|
|
1418
|
+
const tables = {};
|
|
1419
|
+
const topics = {};
|
|
1420
|
+
const ingestApis = {};
|
|
1421
|
+
const apis = {};
|
|
1422
|
+
const sqlResources = {};
|
|
1423
|
+
const workflows = {};
|
|
1424
|
+
const webApps = {};
|
|
1425
|
+
registry.tables.forEach((table) => {
|
|
1426
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1427
|
+
let metadata = table.metadata;
|
|
1428
|
+
if (!metadata && table.config && table.pipelineParent) {
|
|
1429
|
+
metadata = table.pipelineParent.metadata;
|
|
1430
|
+
}
|
|
1431
|
+
const engineConfig = convertTableConfigToEngineConfig(table.config);
|
|
1432
|
+
let tableSettings = void 0;
|
|
1433
|
+
if (table.config.settings) {
|
|
1434
|
+
tableSettings = Object.entries(table.config.settings).reduce(
|
|
1435
|
+
(acc, [key, value]) => {
|
|
1436
|
+
if (value !== void 0) {
|
|
1437
|
+
acc[key] = String(value);
|
|
1438
|
+
}
|
|
1439
|
+
return acc;
|
|
1440
|
+
},
|
|
1441
|
+
{}
|
|
1442
|
+
);
|
|
1443
|
+
}
|
|
1444
|
+
if (engineConfig?.engine === "S3Queue") {
|
|
1445
|
+
if (!tableSettings) {
|
|
1446
|
+
tableSettings = {};
|
|
1447
|
+
}
|
|
1448
|
+
if (!tableSettings.mode) {
|
|
1449
|
+
tableSettings.mode = "unordered";
|
|
1973
1450
|
}
|
|
1974
1451
|
}
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1978
|
-
|
|
1979
|
-
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
|
|
1991
|
-
|
|
1992
|
-
|
|
1452
|
+
const hasOrderByFields = "orderByFields" in table.config && Array.isArray(table.config.orderByFields) && table.config.orderByFields.length > 0;
|
|
1453
|
+
const hasOrderByExpression = "orderByExpression" in table.config && typeof table.config.orderByExpression === "string" && table.config.orderByExpression.length > 0;
|
|
1454
|
+
if (hasOrderByFields && hasOrderByExpression) {
|
|
1455
|
+
throw new Error(
|
|
1456
|
+
`Table ${table.name}: Provide either orderByFields or orderByExpression, not both.`
|
|
1457
|
+
);
|
|
1458
|
+
}
|
|
1459
|
+
const orderBy = hasOrderByExpression && "orderByExpression" in table.config ? table.config.orderByExpression ?? "" : "orderByFields" in table.config ? table.config.orderByFields ?? [] : [];
|
|
1460
|
+
tables[id] = {
|
|
1461
|
+
name: table.name,
|
|
1462
|
+
columns: table.columnArray,
|
|
1463
|
+
orderBy,
|
|
1464
|
+
partitionBy: "partitionBy" in table.config ? table.config.partitionBy : void 0,
|
|
1465
|
+
sampleByExpression: "sampleByExpression" in table.config ? table.config.sampleByExpression : void 0,
|
|
1466
|
+
primaryKeyExpression: "primaryKeyExpression" in table.config ? table.config.primaryKeyExpression : void 0,
|
|
1467
|
+
engineConfig,
|
|
1468
|
+
version: table.config.version,
|
|
1469
|
+
metadata,
|
|
1470
|
+
lifeCycle: table.config.lifeCycle,
|
|
1471
|
+
// Map 'settings' to 'tableSettings' for internal use
|
|
1472
|
+
tableSettings: tableSettings && Object.keys(tableSettings).length > 0 ? tableSettings : void 0,
|
|
1473
|
+
indexes: table.config.indexes?.map((i) => ({
|
|
1474
|
+
...i,
|
|
1475
|
+
granularity: i.granularity === void 0 ? 1 : i.granularity,
|
|
1476
|
+
arguments: i.arguments === void 0 ? [] : i.arguments
|
|
1477
|
+
})) || [],
|
|
1478
|
+
ttl: table.config.ttl,
|
|
1479
|
+
database: table.config.database,
|
|
1480
|
+
cluster: table.config.cluster
|
|
1993
1481
|
};
|
|
1994
|
-
|
|
1995
|
-
|
|
1996
|
-
|
|
1997
|
-
|
|
1998
|
-
|
|
1999
|
-
|
|
2000
|
-
|
|
2001
|
-
|
|
2002
|
-
|
|
2003
|
-
|
|
2004
|
-
|
|
2005
|
-
|
|
2006
|
-
|
|
2007
|
-
|
|
2008
|
-
|
|
2009
|
-
|
|
2010
|
-
let tableSettings = void 0;
|
|
2011
|
-
if (table.config.settings) {
|
|
2012
|
-
tableSettings = Object.entries(table.config.settings).reduce(
|
|
2013
|
-
(acc, [key, value]) => {
|
|
2014
|
-
if (value !== void 0) {
|
|
2015
|
-
acc[key] = String(value);
|
|
2016
|
-
}
|
|
2017
|
-
return acc;
|
|
2018
|
-
},
|
|
2019
|
-
{}
|
|
2020
|
-
);
|
|
2021
|
-
}
|
|
2022
|
-
if (engineConfig?.engine === "S3Queue") {
|
|
2023
|
-
if (!tableSettings) {
|
|
2024
|
-
tableSettings = {};
|
|
2025
|
-
}
|
|
2026
|
-
if (!tableSettings.mode) {
|
|
2027
|
-
tableSettings.mode = "unordered";
|
|
2028
|
-
}
|
|
2029
|
-
}
|
|
2030
|
-
const hasOrderByFields = "orderByFields" in table.config && Array.isArray(table.config.orderByFields) && table.config.orderByFields.length > 0;
|
|
2031
|
-
const hasOrderByExpression = "orderByExpression" in table.config && typeof table.config.orderByExpression === "string" && table.config.orderByExpression.length > 0;
|
|
2032
|
-
if (hasOrderByFields && hasOrderByExpression) {
|
|
2033
|
-
throw new Error(
|
|
2034
|
-
`Table ${table.name}: Provide either orderByFields or orderByExpression, not both.`
|
|
2035
|
-
);
|
|
2036
|
-
}
|
|
2037
|
-
const orderBy = hasOrderByExpression && "orderByExpression" in table.config ? table.config.orderByExpression ?? "" : "orderByFields" in table.config ? table.config.orderByFields ?? [] : [];
|
|
2038
|
-
tables[id] = {
|
|
2039
|
-
name: table.name,
|
|
2040
|
-
columns: table.columnArray,
|
|
2041
|
-
orderBy,
|
|
2042
|
-
partitionBy: "partitionBy" in table.config ? table.config.partitionBy : void 0,
|
|
2043
|
-
sampleByExpression: "sampleByExpression" in table.config ? table.config.sampleByExpression : void 0,
|
|
2044
|
-
primaryKeyExpression: "primaryKeyExpression" in table.config ? table.config.primaryKeyExpression : void 0,
|
|
2045
|
-
engineConfig,
|
|
2046
|
-
version: table.config.version,
|
|
2047
|
-
metadata,
|
|
2048
|
-
lifeCycle: table.config.lifeCycle,
|
|
2049
|
-
// Map 'settings' to 'tableSettings' for internal use
|
|
2050
|
-
tableSettings: tableSettings && Object.keys(tableSettings).length > 0 ? tableSettings : void 0,
|
|
2051
|
-
indexes: table.config.indexes?.map((i) => ({
|
|
2052
|
-
...i,
|
|
2053
|
-
granularity: i.granularity === void 0 ? 1 : i.granularity,
|
|
2054
|
-
arguments: i.arguments === void 0 ? [] : i.arguments
|
|
2055
|
-
})) || [],
|
|
2056
|
-
ttl: table.config.ttl,
|
|
2057
|
-
database: table.config.database,
|
|
2058
|
-
cluster: table.config.cluster
|
|
2059
|
-
};
|
|
2060
|
-
});
|
|
2061
|
-
registry.streams.forEach((stream) => {
|
|
2062
|
-
let metadata = stream.metadata;
|
|
2063
|
-
if (!metadata && stream.config && stream.pipelineParent) {
|
|
2064
|
-
metadata = stream.pipelineParent.metadata;
|
|
2065
|
-
}
|
|
2066
|
-
const transformationTargets = [];
|
|
2067
|
-
const consumers = [];
|
|
2068
|
-
stream._transformations.forEach((transforms, destinationName) => {
|
|
2069
|
-
transforms.forEach(([destination, _, config]) => {
|
|
2070
|
-
transformationTargets.push({
|
|
2071
|
-
kind: "stream",
|
|
2072
|
-
name: destinationName,
|
|
2073
|
-
version: config.version,
|
|
2074
|
-
metadata: config.metadata,
|
|
2075
|
-
sourceFile: config.sourceFile
|
|
2076
|
-
});
|
|
2077
|
-
});
|
|
2078
|
-
});
|
|
2079
|
-
stream._consumers.forEach((consumer) => {
|
|
2080
|
-
consumers.push({
|
|
2081
|
-
version: consumer.config.version,
|
|
2082
|
-
sourceFile: consumer.config.sourceFile
|
|
2083
|
-
});
|
|
1482
|
+
});
|
|
1483
|
+
registry.streams.forEach((stream) => {
|
|
1484
|
+
let metadata = stream.metadata;
|
|
1485
|
+
if (!metadata && stream.config && stream.pipelineParent) {
|
|
1486
|
+
metadata = stream.pipelineParent.metadata;
|
|
1487
|
+
}
|
|
1488
|
+
const transformationTargets = [];
|
|
1489
|
+
const consumers = [];
|
|
1490
|
+
stream._transformations.forEach((transforms, destinationName) => {
|
|
1491
|
+
transforms.forEach(([destination, _, config]) => {
|
|
1492
|
+
transformationTargets.push({
|
|
1493
|
+
kind: "stream",
|
|
1494
|
+
name: destinationName,
|
|
1495
|
+
version: config.version,
|
|
1496
|
+
metadata: config.metadata,
|
|
1497
|
+
sourceFile: config.sourceFile
|
|
2084
1498
|
});
|
|
2085
|
-
topics[stream.name] = {
|
|
2086
|
-
name: stream.name,
|
|
2087
|
-
columns: stream.columnArray,
|
|
2088
|
-
targetTable: stream.config.destination?.name,
|
|
2089
|
-
targetTableVersion: stream.config.destination?.config.version,
|
|
2090
|
-
retentionPeriod: stream.config.retentionPeriod ?? defaultRetentionPeriod,
|
|
2091
|
-
partitionCount: stream.config.parallelism ?? 1,
|
|
2092
|
-
version: stream.config.version,
|
|
2093
|
-
transformationTargets,
|
|
2094
|
-
hasMultiTransform: stream._multipleTransformations === void 0,
|
|
2095
|
-
consumers,
|
|
2096
|
-
metadata,
|
|
2097
|
-
lifeCycle: stream.config.lifeCycle,
|
|
2098
|
-
schemaConfig: stream.config.schemaConfig
|
|
2099
|
-
};
|
|
2100
1499
|
});
|
|
2101
|
-
|
|
2102
|
-
|
|
2103
|
-
|
|
2104
|
-
|
|
2105
|
-
|
|
2106
|
-
ingestApis[api.name] = {
|
|
2107
|
-
name: api.name,
|
|
2108
|
-
columns: api.columnArray,
|
|
2109
|
-
version: api.config.version,
|
|
2110
|
-
path: api.config.path,
|
|
2111
|
-
writeTo: {
|
|
2112
|
-
kind: "stream",
|
|
2113
|
-
name: api.config.destination.name
|
|
2114
|
-
},
|
|
2115
|
-
deadLetterQueue: api.config.deadLetterQueue?.name,
|
|
2116
|
-
metadata,
|
|
2117
|
-
schema: api.schema,
|
|
2118
|
-
allowExtraFields: api.allowExtraFields
|
|
2119
|
-
};
|
|
2120
|
-
});
|
|
2121
|
-
registry.apis.forEach((api, key) => {
|
|
2122
|
-
const rustKey = api.config.version ? `${api.name}:${api.config.version}` : api.name;
|
|
2123
|
-
apis[rustKey] = {
|
|
2124
|
-
name: api.name,
|
|
2125
|
-
queryParams: api.columnArray,
|
|
2126
|
-
responseSchema: api.responseSchema,
|
|
2127
|
-
version: api.config.version,
|
|
2128
|
-
path: api.config.path,
|
|
2129
|
-
metadata: api.metadata
|
|
2130
|
-
};
|
|
2131
|
-
});
|
|
2132
|
-
registry.sqlResources.forEach((sqlResource) => {
|
|
2133
|
-
sqlResources[sqlResource.name] = {
|
|
2134
|
-
name: sqlResource.name,
|
|
2135
|
-
setup: sqlResource.setup,
|
|
2136
|
-
teardown: sqlResource.teardown,
|
|
2137
|
-
sourceFile: sqlResource.sourceFile,
|
|
2138
|
-
sourceLine: sqlResource.sourceLine,
|
|
2139
|
-
sourceColumn: sqlResource.sourceColumn,
|
|
2140
|
-
pullsDataFrom: sqlResource.pullsDataFrom.map((r) => {
|
|
2141
|
-
if (r.kind === "OlapTable") {
|
|
2142
|
-
const table = r;
|
|
2143
|
-
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
2144
|
-
return {
|
|
2145
|
-
id,
|
|
2146
|
-
kind: "Table"
|
|
2147
|
-
};
|
|
2148
|
-
} else if (r.kind === "SqlResource") {
|
|
2149
|
-
const resource = r;
|
|
2150
|
-
return {
|
|
2151
|
-
id: resource.name,
|
|
2152
|
-
kind: "SqlResource"
|
|
2153
|
-
};
|
|
2154
|
-
} else {
|
|
2155
|
-
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2156
|
-
}
|
|
2157
|
-
}),
|
|
2158
|
-
pushesDataTo: sqlResource.pushesDataTo.map((r) => {
|
|
2159
|
-
if (r.kind === "OlapTable") {
|
|
2160
|
-
const table = r;
|
|
2161
|
-
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
2162
|
-
return {
|
|
2163
|
-
id,
|
|
2164
|
-
kind: "Table"
|
|
2165
|
-
};
|
|
2166
|
-
} else if (r.kind === "SqlResource") {
|
|
2167
|
-
const resource = r;
|
|
2168
|
-
return {
|
|
2169
|
-
id: resource.name,
|
|
2170
|
-
kind: "SqlResource"
|
|
2171
|
-
};
|
|
2172
|
-
} else {
|
|
2173
|
-
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2174
|
-
}
|
|
2175
|
-
})
|
|
2176
|
-
};
|
|
2177
|
-
});
|
|
2178
|
-
registry.workflows.forEach((workflow) => {
|
|
2179
|
-
workflows[workflow.name] = {
|
|
2180
|
-
name: workflow.name,
|
|
2181
|
-
retries: workflow.config.retries,
|
|
2182
|
-
timeout: workflow.config.timeout,
|
|
2183
|
-
schedule: workflow.config.schedule
|
|
2184
|
-
};
|
|
2185
|
-
});
|
|
2186
|
-
registry.webApps.forEach((webApp) => {
|
|
2187
|
-
webApps[webApp.name] = {
|
|
2188
|
-
name: webApp.name,
|
|
2189
|
-
mountPath: webApp.config.mountPath || "/",
|
|
2190
|
-
metadata: webApp.config.metadata
|
|
2191
|
-
};
|
|
1500
|
+
});
|
|
1501
|
+
stream._consumers.forEach((consumer) => {
|
|
1502
|
+
consumers.push({
|
|
1503
|
+
version: consumer.config.version,
|
|
1504
|
+
sourceFile: consumer.config.sourceFile
|
|
2192
1505
|
});
|
|
2193
|
-
|
|
2194
|
-
|
|
2195
|
-
|
|
2196
|
-
|
|
2197
|
-
|
|
2198
|
-
|
|
2199
|
-
|
|
2200
|
-
|
|
2201
|
-
|
|
1506
|
+
});
|
|
1507
|
+
topics[stream.name] = {
|
|
1508
|
+
name: stream.name,
|
|
1509
|
+
columns: stream.columnArray,
|
|
1510
|
+
targetTable: stream.config.destination?.name,
|
|
1511
|
+
targetTableVersion: stream.config.destination?.config.version,
|
|
1512
|
+
retentionPeriod: stream.config.retentionPeriod ?? defaultRetentionPeriod,
|
|
1513
|
+
partitionCount: stream.config.parallelism ?? 1,
|
|
1514
|
+
version: stream.config.version,
|
|
1515
|
+
transformationTargets,
|
|
1516
|
+
hasMultiTransform: stream._multipleTransformations === void 0,
|
|
1517
|
+
consumers,
|
|
1518
|
+
metadata,
|
|
1519
|
+
lifeCycle: stream.config.lifeCycle,
|
|
1520
|
+
schemaConfig: stream.config.schemaConfig
|
|
2202
1521
|
};
|
|
2203
|
-
|
|
2204
|
-
|
|
2205
|
-
|
|
1522
|
+
});
|
|
1523
|
+
registry.ingestApis.forEach((api) => {
|
|
1524
|
+
let metadata = api.metadata;
|
|
1525
|
+
if (!metadata && api.config && api.pipelineParent) {
|
|
1526
|
+
metadata = api.pipelineParent.metadata;
|
|
2206
1527
|
}
|
|
2207
|
-
|
|
2208
|
-
|
|
2209
|
-
|
|
2210
|
-
|
|
2211
|
-
|
|
2212
|
-
|
|
2213
|
-
|
|
2214
|
-
|
|
2215
|
-
|
|
2216
|
-
|
|
2217
|
-
|
|
2218
|
-
|
|
2219
|
-
|
|
2220
|
-
registry.apis.clear();
|
|
2221
|
-
registry.sqlResources.clear();
|
|
2222
|
-
registry.workflows.clear();
|
|
2223
|
-
registry.webApps.clear();
|
|
2224
|
-
const appDir = `${import_process.default.cwd()}/${getSourceDir()}`;
|
|
2225
|
-
Object.keys(require.cache).forEach((key) => {
|
|
2226
|
-
if (key.startsWith(appDir)) {
|
|
2227
|
-
delete require.cache[key];
|
|
2228
|
-
}
|
|
2229
|
-
});
|
|
2230
|
-
try {
|
|
2231
|
-
require(`${import_process.default.cwd()}/${getSourceDir()}/index.ts`);
|
|
2232
|
-
} catch (error) {
|
|
2233
|
-
let hint;
|
|
2234
|
-
const details = error instanceof Error ? error.message : String(error);
|
|
2235
|
-
if (details.includes("ERR_REQUIRE_ESM") || details.includes("ES Module")) {
|
|
2236
|
-
hint = "The file or its dependencies are ESM-only. Switch to packages that dual-support CJS & ESM, or upgrade to Node 22.12+. If you must use Node 20, you may try Node 20.19\n\n";
|
|
2237
|
-
}
|
|
2238
|
-
const errorMsg = `${hint ?? ""}${details}`;
|
|
2239
|
-
const cause = error instanceof Error ? error : void 0;
|
|
2240
|
-
throw new Error(errorMsg, { cause });
|
|
2241
|
-
}
|
|
1528
|
+
ingestApis[api.name] = {
|
|
1529
|
+
name: api.name,
|
|
1530
|
+
columns: api.columnArray,
|
|
1531
|
+
version: api.config.version,
|
|
1532
|
+
path: api.config.path,
|
|
1533
|
+
writeTo: {
|
|
1534
|
+
kind: "stream",
|
|
1535
|
+
name: api.config.destination.name
|
|
1536
|
+
},
|
|
1537
|
+
deadLetterQueue: api.config.deadLetterQueue?.name,
|
|
1538
|
+
metadata,
|
|
1539
|
+
schema: api.schema,
|
|
1540
|
+
allowExtraFields: api.allowExtraFields
|
|
2242
1541
|
};
|
|
2243
|
-
|
|
2244
|
-
|
|
2245
|
-
|
|
2246
|
-
|
|
2247
|
-
|
|
2248
|
-
|
|
2249
|
-
|
|
2250
|
-
|
|
2251
|
-
|
|
2252
|
-
|
|
2253
|
-
transform,
|
|
2254
|
-
config,
|
|
2255
|
-
stream.columnArray
|
|
2256
|
-
]);
|
|
2257
|
-
});
|
|
2258
|
-
});
|
|
2259
|
-
stream._consumers.forEach((consumer) => {
|
|
2260
|
-
const consumerFunctionKey = `${stream.name}_<no-target>${consumer.config.version ? `_${consumer.config.version}` : ""}`;
|
|
2261
|
-
transformFunctions.set(consumerFunctionKey, [
|
|
2262
|
-
consumer.consumer,
|
|
2263
|
-
consumer.config,
|
|
2264
|
-
stream.columnArray
|
|
2265
|
-
]);
|
|
2266
|
-
});
|
|
2267
|
-
});
|
|
2268
|
-
return transformFunctions;
|
|
1542
|
+
});
|
|
1543
|
+
registry.apis.forEach((api, key) => {
|
|
1544
|
+
const rustKey = api.config.version ? `${api.name}:${api.config.version}` : api.name;
|
|
1545
|
+
apis[rustKey] = {
|
|
1546
|
+
name: api.name,
|
|
1547
|
+
queryParams: api.columnArray,
|
|
1548
|
+
responseSchema: api.responseSchema,
|
|
1549
|
+
version: api.config.version,
|
|
1550
|
+
path: api.config.path,
|
|
1551
|
+
metadata: api.metadata
|
|
2269
1552
|
};
|
|
2270
|
-
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2274
|
-
|
|
2275
|
-
|
|
2276
|
-
|
|
2277
|
-
|
|
2278
|
-
|
|
2279
|
-
|
|
2280
|
-
|
|
2281
|
-
|
|
2282
|
-
}
|
|
2283
|
-
|
|
2284
|
-
|
|
2285
|
-
|
|
2286
|
-
|
|
2287
|
-
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
|
|
2291
|
-
|
|
2292
|
-
}
|
|
1553
|
+
});
|
|
1554
|
+
registry.sqlResources.forEach((sqlResource) => {
|
|
1555
|
+
sqlResources[sqlResource.name] = {
|
|
1556
|
+
name: sqlResource.name,
|
|
1557
|
+
setup: sqlResource.setup,
|
|
1558
|
+
teardown: sqlResource.teardown,
|
|
1559
|
+
sourceFile: sqlResource.sourceFile,
|
|
1560
|
+
sourceLine: sqlResource.sourceLine,
|
|
1561
|
+
sourceColumn: sqlResource.sourceColumn,
|
|
1562
|
+
pullsDataFrom: sqlResource.pullsDataFrom.map((r) => {
|
|
1563
|
+
if (r.kind === "OlapTable") {
|
|
1564
|
+
const table = r;
|
|
1565
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1566
|
+
return {
|
|
1567
|
+
id,
|
|
1568
|
+
kind: "Table"
|
|
1569
|
+
};
|
|
1570
|
+
} else if (r.kind === "SqlResource") {
|
|
1571
|
+
const resource = r;
|
|
1572
|
+
return {
|
|
1573
|
+
id: resource.name,
|
|
1574
|
+
kind: "SqlResource"
|
|
1575
|
+
};
|
|
1576
|
+
} else {
|
|
1577
|
+
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2293
1578
|
}
|
|
2294
|
-
})
|
|
2295
|
-
|
|
2296
|
-
if (
|
|
2297
|
-
|
|
1579
|
+
}),
|
|
1580
|
+
pushesDataTo: sqlResource.pushesDataTo.map((r) => {
|
|
1581
|
+
if (r.kind === "OlapTable") {
|
|
1582
|
+
const table = r;
|
|
1583
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1584
|
+
return {
|
|
1585
|
+
id,
|
|
1586
|
+
kind: "Table"
|
|
1587
|
+
};
|
|
1588
|
+
} else if (r.kind === "SqlResource") {
|
|
1589
|
+
const resource = r;
|
|
1590
|
+
return {
|
|
1591
|
+
id: resource.name,
|
|
1592
|
+
kind: "SqlResource"
|
|
1593
|
+
};
|
|
1594
|
+
} else {
|
|
1595
|
+
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2298
1596
|
}
|
|
2299
|
-
})
|
|
2300
|
-
return apiFunctions;
|
|
1597
|
+
})
|
|
2301
1598
|
};
|
|
2302
|
-
|
|
2303
|
-
|
|
2304
|
-
|
|
2305
|
-
|
|
1599
|
+
});
|
|
1600
|
+
registry.workflows.forEach((workflow) => {
|
|
1601
|
+
workflows[workflow.name] = {
|
|
1602
|
+
name: workflow.name,
|
|
1603
|
+
retries: workflow.config.retries,
|
|
1604
|
+
timeout: workflow.config.timeout,
|
|
1605
|
+
schedule: workflow.config.schedule
|
|
2306
1606
|
};
|
|
2307
|
-
|
|
2308
|
-
|
|
2309
|
-
|
|
2310
|
-
|
|
2311
|
-
|
|
1607
|
+
});
|
|
1608
|
+
registry.webApps.forEach((webApp) => {
|
|
1609
|
+
webApps[webApp.name] = {
|
|
1610
|
+
name: webApp.name,
|
|
1611
|
+
mountPath: webApp.config.mountPath || "/",
|
|
1612
|
+
metadata: webApp.config.metadata
|
|
1613
|
+
};
|
|
1614
|
+
});
|
|
1615
|
+
return {
|
|
1616
|
+
topics,
|
|
1617
|
+
tables,
|
|
1618
|
+
ingestApis,
|
|
1619
|
+
apis,
|
|
1620
|
+
sqlResources,
|
|
1621
|
+
workflows,
|
|
1622
|
+
webApps
|
|
1623
|
+
};
|
|
1624
|
+
};
|
|
1625
|
+
var getMooseInternal = () => globalThis.moose_internal;
|
|
1626
|
+
if (getMooseInternal() === void 0) {
|
|
1627
|
+
globalThis.moose_internal = moose_internal;
|
|
1628
|
+
}
|
|
1629
|
+
var dumpMooseInternal = async () => {
|
|
1630
|
+
loadIndex();
|
|
1631
|
+
console.log(
|
|
1632
|
+
"___MOOSE_STUFF___start",
|
|
1633
|
+
JSON.stringify(toInfraMap(getMooseInternal())),
|
|
1634
|
+
"end___MOOSE_STUFF___"
|
|
1635
|
+
);
|
|
1636
|
+
};
|
|
1637
|
+
var loadIndex = () => {
|
|
1638
|
+
const registry = getMooseInternal();
|
|
1639
|
+
registry.tables.clear();
|
|
1640
|
+
registry.streams.clear();
|
|
1641
|
+
registry.ingestApis.clear();
|
|
1642
|
+
registry.apis.clear();
|
|
1643
|
+
registry.sqlResources.clear();
|
|
1644
|
+
registry.workflows.clear();
|
|
1645
|
+
registry.webApps.clear();
|
|
1646
|
+
const appDir = `${import_process.default.cwd()}/${getSourceDir()}`;
|
|
1647
|
+
Object.keys(require.cache).forEach((key) => {
|
|
1648
|
+
if (key.startsWith(appDir)) {
|
|
1649
|
+
delete require.cache[key];
|
|
1650
|
+
}
|
|
1651
|
+
});
|
|
1652
|
+
try {
|
|
1653
|
+
require(`${import_process.default.cwd()}/${getSourceDir()}/index.ts`);
|
|
1654
|
+
} catch (error) {
|
|
1655
|
+
let hint;
|
|
1656
|
+
const details = error instanceof Error ? error.message : String(error);
|
|
1657
|
+
if (details.includes("ERR_REQUIRE_ESM") || details.includes("ES Module")) {
|
|
1658
|
+
hint = "The file or its dependencies are ESM-only. Switch to packages that dual-support CJS & ESM, or upgrade to Node 22.12+. If you must use Node 20, you may try Node 20.19\n\n";
|
|
1659
|
+
}
|
|
1660
|
+
const errorMsg = `${hint ?? ""}${details}`;
|
|
1661
|
+
const cause = error instanceof Error ? error : void 0;
|
|
1662
|
+
throw new Error(errorMsg, { cause });
|
|
1663
|
+
}
|
|
1664
|
+
};
|
|
1665
|
+
var getStreamingFunctions = async () => {
|
|
1666
|
+
loadIndex();
|
|
1667
|
+
const registry = getMooseInternal();
|
|
1668
|
+
const transformFunctions = /* @__PURE__ */ new Map();
|
|
1669
|
+
registry.streams.forEach((stream) => {
|
|
1670
|
+
stream._transformations.forEach((transforms, destinationName) => {
|
|
1671
|
+
transforms.forEach(([_, transform, config]) => {
|
|
1672
|
+
const transformFunctionKey = `${stream.name}_${destinationName}${config.version ? `_${config.version}` : ""}`;
|
|
1673
|
+
compilerLog(`getStreamingFunctions: ${transformFunctionKey}`);
|
|
1674
|
+
transformFunctions.set(transformFunctionKey, [
|
|
1675
|
+
transform,
|
|
1676
|
+
config,
|
|
1677
|
+
stream.columnArray
|
|
1678
|
+
]);
|
|
1679
|
+
});
|
|
1680
|
+
});
|
|
1681
|
+
stream._consumers.forEach((consumer) => {
|
|
1682
|
+
const consumerFunctionKey = `${stream.name}_<no-target>${consumer.config.version ? `_${consumer.config.version}` : ""}`;
|
|
1683
|
+
transformFunctions.set(consumerFunctionKey, [
|
|
1684
|
+
consumer.consumer,
|
|
1685
|
+
consumer.config,
|
|
1686
|
+
stream.columnArray
|
|
1687
|
+
]);
|
|
1688
|
+
});
|
|
1689
|
+
});
|
|
1690
|
+
return transformFunctions;
|
|
1691
|
+
};
|
|
1692
|
+
var getApis2 = async () => {
|
|
1693
|
+
loadIndex();
|
|
1694
|
+
const apiFunctions = /* @__PURE__ */ new Map();
|
|
1695
|
+
const registry = getMooseInternal();
|
|
1696
|
+
const versionCountByName = /* @__PURE__ */ new Map();
|
|
1697
|
+
const nameToSoleVersionHandler = /* @__PURE__ */ new Map();
|
|
1698
|
+
registry.apis.forEach((api, key) => {
|
|
1699
|
+
const handler = api.getHandler();
|
|
1700
|
+
apiFunctions.set(key, handler);
|
|
1701
|
+
if (!api.config.version) {
|
|
1702
|
+
if (!apiFunctions.has(api.name)) {
|
|
1703
|
+
apiFunctions.set(api.name, handler);
|
|
2312
1704
|
}
|
|
2313
|
-
|
|
2314
|
-
|
|
2315
|
-
|
|
2316
|
-
);
|
|
2317
|
-
|
|
2318
|
-
|
|
1705
|
+
nameToSoleVersionHandler.delete(api.name);
|
|
1706
|
+
versionCountByName.delete(api.name);
|
|
1707
|
+
} else if (!apiFunctions.has(api.name)) {
|
|
1708
|
+
const count = (versionCountByName.get(api.name) ?? 0) + 1;
|
|
1709
|
+
versionCountByName.set(api.name, count);
|
|
1710
|
+
if (count === 1) {
|
|
1711
|
+
nameToSoleVersionHandler.set(api.name, handler);
|
|
1712
|
+
} else {
|
|
1713
|
+
nameToSoleVersionHandler.delete(api.name);
|
|
2319
1714
|
}
|
|
2320
|
-
|
|
2321
|
-
|
|
2322
|
-
|
|
2323
|
-
|
|
2324
|
-
|
|
2325
|
-
}
|
|
1715
|
+
}
|
|
1716
|
+
});
|
|
1717
|
+
nameToSoleVersionHandler.forEach((handler, name) => {
|
|
1718
|
+
if (!apiFunctions.has(name)) {
|
|
1719
|
+
apiFunctions.set(name, handler);
|
|
1720
|
+
}
|
|
1721
|
+
});
|
|
1722
|
+
return apiFunctions;
|
|
1723
|
+
};
|
|
1724
|
+
var getWorkflows2 = async () => {
|
|
1725
|
+
loadIndex();
|
|
1726
|
+
const registry = getMooseInternal();
|
|
1727
|
+
return registry.workflows;
|
|
1728
|
+
};
|
|
1729
|
+
function findTaskInTree(task, targetName) {
|
|
1730
|
+
if (task.name === targetName) {
|
|
1731
|
+
return task;
|
|
2326
1732
|
}
|
|
2327
|
-
|
|
2328
|
-
|
|
2329
|
-
|
|
2330
|
-
|
|
2331
|
-
|
|
1733
|
+
if (task.config.onComplete?.length) {
|
|
1734
|
+
for (const childTask of task.config.onComplete) {
|
|
1735
|
+
const found = findTaskInTree(childTask, targetName);
|
|
1736
|
+
if (found) {
|
|
1737
|
+
return found;
|
|
1738
|
+
}
|
|
1739
|
+
}
|
|
1740
|
+
}
|
|
1741
|
+
return void 0;
|
|
1742
|
+
}
|
|
1743
|
+
var getTaskForWorkflow = async (workflowName, taskName) => {
|
|
1744
|
+
const workflows = await getWorkflows2();
|
|
1745
|
+
const workflow = workflows.get(workflowName);
|
|
1746
|
+
if (!workflow) {
|
|
1747
|
+
throw new Error(`Workflow ${workflowName} not found`);
|
|
1748
|
+
}
|
|
1749
|
+
const task = findTaskInTree(
|
|
1750
|
+
workflow.config.startingTask,
|
|
1751
|
+
taskName
|
|
1752
|
+
);
|
|
1753
|
+
if (!task) {
|
|
1754
|
+
throw new Error(`Task ${taskName} not found in workflow ${workflowName}`);
|
|
1755
|
+
}
|
|
1756
|
+
return task;
|
|
1757
|
+
};
|
|
1758
|
+
var getWebApps2 = async () => {
|
|
1759
|
+
loadIndex();
|
|
1760
|
+
return getMooseInternal().webApps;
|
|
1761
|
+
};
|
|
2332
1762
|
|
|
2333
1763
|
// src/blocks/runner.ts
|
|
2334
1764
|
var import_fastq = __toESM(require("fastq"));
|
|
2335
1765
|
init_commons();
|
|
2336
1766
|
var import_node_fs = __toESM(require("fs"));
|
|
2337
|
-
var
|
|
1767
|
+
var import_node_path = __toESM(require("path"));
|
|
2338
1768
|
var walkDir = (dir, fileExtension, fileList) => {
|
|
2339
1769
|
const files = import_node_fs.default.readdirSync(dir);
|
|
2340
1770
|
files.forEach((file) => {
|
|
2341
|
-
if (import_node_fs.default.statSync(
|
|
2342
|
-
fileList = walkDir(
|
|
1771
|
+
if (import_node_fs.default.statSync(import_node_path.default.join(dir, file)).isDirectory()) {
|
|
1772
|
+
fileList = walkDir(import_node_path.default.join(dir, file), fileExtension, fileList);
|
|
2343
1773
|
} else if (file.endsWith(fileExtension)) {
|
|
2344
|
-
fileList.push(
|
|
1774
|
+
fileList.push(import_node_path.default.join(dir, file));
|
|
2345
1775
|
}
|
|
2346
1776
|
});
|
|
2347
1777
|
return fileList;
|
|
@@ -2352,7 +1782,7 @@ var DependencyError = class extends Error {
|
|
|
2352
1782
|
this.name = "DependencyError";
|
|
2353
1783
|
}
|
|
2354
1784
|
};
|
|
2355
|
-
var
|
|
1785
|
+
var toClientConfig2 = (config) => ({
|
|
2356
1786
|
...config,
|
|
2357
1787
|
useSSL: config.useSSL ? "true" : "false"
|
|
2358
1788
|
});
|
|
@@ -2404,7 +1834,7 @@ var asyncWorker = async (task) => {
|
|
|
2404
1834
|
await createBlocks(task.chClient, task.blocks);
|
|
2405
1835
|
};
|
|
2406
1836
|
var runBlocks = async (config) => {
|
|
2407
|
-
const chClient = getClickhouseClient(
|
|
1837
|
+
const chClient = getClickhouseClient(toClientConfig2(config.clickhouseConfig));
|
|
2408
1838
|
console.log(`Connected`);
|
|
2409
1839
|
const blocksFiles = walkDir(config.blocksDir, ".ts", []);
|
|
2410
1840
|
const numOfBlockFiles = blocksFiles.length;
|
|
@@ -2417,10 +1847,10 @@ var runBlocks = async (config) => {
|
|
|
2417
1847
|
}
|
|
2418
1848
|
}
|
|
2419
1849
|
});
|
|
2420
|
-
for (const
|
|
2421
|
-
console.log(`Adding to queue: ${
|
|
1850
|
+
for (const path3 of blocksFiles) {
|
|
1851
|
+
console.log(`Adding to queue: ${path3}`);
|
|
2422
1852
|
try {
|
|
2423
|
-
const blocks = require(
|
|
1853
|
+
const blocks = require(path3).default;
|
|
2424
1854
|
queue.push({
|
|
2425
1855
|
chClient,
|
|
2426
1856
|
blocks,
|
|
@@ -2429,7 +1859,7 @@ var runBlocks = async (config) => {
|
|
|
2429
1859
|
} catch (err) {
|
|
2430
1860
|
cliLog({
|
|
2431
1861
|
action: "Blocks",
|
|
2432
|
-
message: `Failed to import blocks from ${
|
|
1862
|
+
message: `Failed to import blocks from ${path3}: ${err}`,
|
|
2433
1863
|
message_type: "Error"
|
|
2434
1864
|
});
|
|
2435
1865
|
}
|
|
@@ -2439,9 +1869,6 @@ var runBlocks = async (config) => {
|
|
|
2439
1869
|
}
|
|
2440
1870
|
};
|
|
2441
1871
|
|
|
2442
|
-
// src/moose-runner.ts
|
|
2443
|
-
init_runner();
|
|
2444
|
-
|
|
2445
1872
|
// src/streaming-functions/runner.ts
|
|
2446
1873
|
var import_node_stream2 = require("stream");
|
|
2447
1874
|
var import_kafka_javascript2 = require("@514labs/kafka-javascript");
|
|
@@ -2449,9 +1876,6 @@ var import_node_buffer = require("buffer");
|
|
|
2449
1876
|
var process3 = __toESM(require("process"));
|
|
2450
1877
|
var http3 = __toESM(require("http"));
|
|
2451
1878
|
init_commons();
|
|
2452
|
-
init_cluster_utils();
|
|
2453
|
-
init_internal();
|
|
2454
|
-
init_json();
|
|
2455
1879
|
var { Kafka: Kafka2 } = import_kafka_javascript2.KafkaJS;
|
|
2456
1880
|
var HOSTNAME = process3.env.HOSTNAME;
|
|
2457
1881
|
var AUTO_COMMIT_INTERVAL_MS = 5e3;
|
|
@@ -3010,15 +2434,12 @@ async function runApiTypeSerializer(targetModel) {
|
|
|
3010
2434
|
|
|
3011
2435
|
// src/scripts/runner.ts
|
|
3012
2436
|
var import_worker2 = require("@temporalio/worker");
|
|
3013
|
-
var
|
|
2437
|
+
var path2 = __toESM(require("path"));
|
|
3014
2438
|
var fs3 = __toESM(require("fs"));
|
|
3015
|
-
init_internal();
|
|
3016
2439
|
|
|
3017
2440
|
// src/scripts/activity.ts
|
|
3018
2441
|
var import_activity = require("@temporalio/activity");
|
|
3019
2442
|
var import_workflow3 = require("@temporalio/workflow");
|
|
3020
|
-
init_internal();
|
|
3021
|
-
init_json();
|
|
3022
2443
|
var activities = {
|
|
3023
2444
|
async hasDmv2Workflow(name) {
|
|
3024
2445
|
try {
|
|
@@ -3303,7 +2724,7 @@ async function registerWorkflows(logger2, config) {
|
|
|
3303
2724
|
}
|
|
3304
2725
|
};
|
|
3305
2726
|
const workflowBundle = await (0, import_worker2.bundleWorkflowCode)({
|
|
3306
|
-
workflowsPath:
|
|
2727
|
+
workflowsPath: path2.resolve(__dirname, "scripts/workflow.js"),
|
|
3307
2728
|
logger: silentLogger
|
|
3308
2729
|
});
|
|
3309
2730
|
const worker = await import_worker2.Worker.create({
|