@514labs/moose-lib 0.6.295-ci-17-gc22400d0 → 0.6.295
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{browserCompatible-CMEunMFq.d.ts → browserCompatible-B8CAYjv5.d.ts} +1 -1
- package/dist/{browserCompatible-FzU17dxm.d.mts → browserCompatible-ChWHzgtb.d.mts} +1 -1
- package/dist/browserCompatible.d.mts +2 -2
- package/dist/browserCompatible.d.ts +2 -2
- package/dist/browserCompatible.js +2161 -2444
- package/dist/browserCompatible.js.map +1 -1
- package/dist/browserCompatible.mjs +2165 -2446
- package/dist/browserCompatible.mjs.map +1 -1
- package/dist/dmv2/index.d.mts +1 -1
- package/dist/dmv2/index.d.ts +1 -1
- package/dist/dmv2/index.js +2058 -2341
- package/dist/dmv2/index.js.map +1 -1
- package/dist/dmv2/index.mjs +2020 -2301
- package/dist/dmv2/index.mjs.map +1 -1
- package/dist/{index-CcHF2cVT.d.mts → index-rQOQo9sv.d.mts} +5 -16
- package/dist/{index-CcHF2cVT.d.ts → index-rQOQo9sv.d.ts} +5 -16
- package/dist/index.d.mts +6 -76
- package/dist/index.d.ts +6 -76
- package/dist/index.js +2737 -3081
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +2630 -2973
- package/dist/index.mjs.map +1 -1
- package/dist/moose-runner.js +1136 -1715
- package/dist/moose-runner.js.map +1 -1
- package/dist/moose-runner.mjs +1127 -1704
- package/dist/moose-runner.mjs.map +1 -1
- package/package.json +1 -1
package/dist/moose-runner.mjs
CHANGED
|
@@ -1,5 +1,4 @@
|
|
|
1
1
|
#!/usr/bin/env node
|
|
2
|
-
var __defProp = Object.defineProperty;
|
|
3
2
|
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
4
3
|
var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require : typeof Proxy !== "undefined" ? new Proxy(x, {
|
|
5
4
|
get: (a, b) => (typeof require !== "undefined" ? require : a)[b]
|
|
@@ -10,346 +9,6 @@ var __require = /* @__PURE__ */ ((x) => typeof require !== "undefined" ? require
|
|
|
10
9
|
var __esm = (fn, res) => function __init() {
|
|
11
10
|
return fn && (res = (0, fn[__getOwnPropNames(fn)[0]])(fn = 0)), res;
|
|
12
11
|
};
|
|
13
|
-
var __export = (target, all) => {
|
|
14
|
-
for (var name in all)
|
|
15
|
-
__defProp(target, name, { get: all[name], enumerable: true });
|
|
16
|
-
};
|
|
17
|
-
|
|
18
|
-
// src/dmv2/utils/stackTrace.ts
|
|
19
|
-
var init_stackTrace = __esm({
|
|
20
|
-
"src/dmv2/utils/stackTrace.ts"() {
|
|
21
|
-
"use strict";
|
|
22
|
-
}
|
|
23
|
-
});
|
|
24
|
-
|
|
25
|
-
// src/dmv2/typedBase.ts
|
|
26
|
-
var init_typedBase = __esm({
|
|
27
|
-
"src/dmv2/typedBase.ts"() {
|
|
28
|
-
"use strict";
|
|
29
|
-
init_stackTrace();
|
|
30
|
-
}
|
|
31
|
-
});
|
|
32
|
-
|
|
33
|
-
// src/dataModels/dataModelTypes.ts
|
|
34
|
-
var init_dataModelTypes = __esm({
|
|
35
|
-
"src/dataModels/dataModelTypes.ts"() {
|
|
36
|
-
"use strict";
|
|
37
|
-
}
|
|
38
|
-
});
|
|
39
|
-
|
|
40
|
-
// src/sqlHelpers.ts
|
|
41
|
-
function sql(strings, ...values) {
|
|
42
|
-
return new Sql(strings, values);
|
|
43
|
-
}
|
|
44
|
-
function createClickhouseParameter(parameterIndex, value) {
|
|
45
|
-
return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
|
|
46
|
-
}
|
|
47
|
-
function emptyIfUndefined(value) {
|
|
48
|
-
return value === void 0 ? "" : value;
|
|
49
|
-
}
|
|
50
|
-
var isTable, isColumn, instanceofSql, Sql, toQuery, toQueryPreview, getValueFromParameter, mapToClickHouseType;
|
|
51
|
-
var init_sqlHelpers = __esm({
|
|
52
|
-
"src/sqlHelpers.ts"() {
|
|
53
|
-
"use strict";
|
|
54
|
-
isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
|
|
55
|
-
isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
|
|
56
|
-
instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
|
|
57
|
-
Sql = class {
|
|
58
|
-
values;
|
|
59
|
-
strings;
|
|
60
|
-
constructor(rawStrings, rawValues) {
|
|
61
|
-
if (rawStrings.length - 1 !== rawValues.length) {
|
|
62
|
-
if (rawStrings.length === 0) {
|
|
63
|
-
throw new TypeError("Expected at least 1 string");
|
|
64
|
-
}
|
|
65
|
-
throw new TypeError(
|
|
66
|
-
`Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
|
|
67
|
-
);
|
|
68
|
-
}
|
|
69
|
-
const valuesLength = rawValues.reduce(
|
|
70
|
-
(len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
|
|
71
|
-
0
|
|
72
|
-
);
|
|
73
|
-
this.values = new Array(valuesLength);
|
|
74
|
-
this.strings = new Array(valuesLength + 1);
|
|
75
|
-
this.strings[0] = rawStrings[0];
|
|
76
|
-
let i = 0, pos = 0;
|
|
77
|
-
while (i < rawValues.length) {
|
|
78
|
-
const child = rawValues[i++];
|
|
79
|
-
const rawString = rawStrings[i];
|
|
80
|
-
if (instanceofSql(child)) {
|
|
81
|
-
this.strings[pos] += child.strings[0];
|
|
82
|
-
let childIndex = 0;
|
|
83
|
-
while (childIndex < child.values.length) {
|
|
84
|
-
this.values[pos++] = child.values[childIndex++];
|
|
85
|
-
this.strings[pos] = child.strings[childIndex];
|
|
86
|
-
}
|
|
87
|
-
this.strings[pos] += rawString;
|
|
88
|
-
} else if (isColumn(child)) {
|
|
89
|
-
const aggregationFunction = child.annotations.find(
|
|
90
|
-
([k, _]) => k === "aggregationFunction"
|
|
91
|
-
);
|
|
92
|
-
if (aggregationFunction !== void 0) {
|
|
93
|
-
this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
|
|
94
|
-
} else {
|
|
95
|
-
this.strings[pos] += `\`${child.name}\``;
|
|
96
|
-
}
|
|
97
|
-
this.strings[pos] += rawString;
|
|
98
|
-
} else if (isTable(child)) {
|
|
99
|
-
if (child.config.database) {
|
|
100
|
-
this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
|
|
101
|
-
} else {
|
|
102
|
-
this.strings[pos] += `\`${child.name}\``;
|
|
103
|
-
}
|
|
104
|
-
this.strings[pos] += rawString;
|
|
105
|
-
} else {
|
|
106
|
-
this.values[pos++] = child;
|
|
107
|
-
this.strings[pos] = rawString;
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
};
|
|
112
|
-
toQuery = (sql3) => {
|
|
113
|
-
const parameterizedStubs = sql3.values.map(
|
|
114
|
-
(v, i) => createClickhouseParameter(i, v)
|
|
115
|
-
);
|
|
116
|
-
const query = sql3.strings.map(
|
|
117
|
-
(s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
|
|
118
|
-
).join("");
|
|
119
|
-
const query_params = sql3.values.reduce(
|
|
120
|
-
(acc, v, i) => ({
|
|
121
|
-
...acc,
|
|
122
|
-
[`p${i}`]: getValueFromParameter(v)
|
|
123
|
-
}),
|
|
124
|
-
{}
|
|
125
|
-
);
|
|
126
|
-
return [query, query_params];
|
|
127
|
-
};
|
|
128
|
-
toQueryPreview = (sql3) => {
|
|
129
|
-
try {
|
|
130
|
-
const formatValue = (v) => {
|
|
131
|
-
if (Array.isArray(v)) {
|
|
132
|
-
const [type, val] = v;
|
|
133
|
-
if (type === "Identifier") {
|
|
134
|
-
return `\`${String(val)}\``;
|
|
135
|
-
}
|
|
136
|
-
return `[${v.map((x) => formatValue(x)).join(", ")}]`;
|
|
137
|
-
}
|
|
138
|
-
if (v === null || v === void 0) return "NULL";
|
|
139
|
-
if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
|
|
140
|
-
if (typeof v === "number") return String(v);
|
|
141
|
-
if (typeof v === "boolean") return v ? "true" : "false";
|
|
142
|
-
if (v instanceof Date)
|
|
143
|
-
return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
|
|
144
|
-
try {
|
|
145
|
-
return JSON.stringify(v);
|
|
146
|
-
} catch {
|
|
147
|
-
return String(v);
|
|
148
|
-
}
|
|
149
|
-
};
|
|
150
|
-
let out = sql3.strings[0] ?? "";
|
|
151
|
-
for (let i = 0; i < sql3.values.length; i++) {
|
|
152
|
-
const val = getValueFromParameter(sql3.values[i]);
|
|
153
|
-
out += formatValue(val);
|
|
154
|
-
out += sql3.strings[i + 1] ?? "";
|
|
155
|
-
}
|
|
156
|
-
return out.replace(/\s+/g, " ").trim();
|
|
157
|
-
} catch (error) {
|
|
158
|
-
console.log(`toQueryPreview error: ${error}`);
|
|
159
|
-
return "/* query preview unavailable */";
|
|
160
|
-
}
|
|
161
|
-
};
|
|
162
|
-
getValueFromParameter = (value) => {
|
|
163
|
-
if (Array.isArray(value)) {
|
|
164
|
-
const [type, val] = value;
|
|
165
|
-
if (type === "Identifier") return val;
|
|
166
|
-
}
|
|
167
|
-
return value;
|
|
168
|
-
};
|
|
169
|
-
mapToClickHouseType = (value) => {
|
|
170
|
-
if (typeof value === "number") {
|
|
171
|
-
return Number.isInteger(value) ? "Int" : "Float";
|
|
172
|
-
}
|
|
173
|
-
if (typeof value === "boolean") return "Bool";
|
|
174
|
-
if (value instanceof Date) return "DateTime";
|
|
175
|
-
if (Array.isArray(value)) {
|
|
176
|
-
const [type, _] = value;
|
|
177
|
-
return type;
|
|
178
|
-
}
|
|
179
|
-
return "String";
|
|
180
|
-
};
|
|
181
|
-
}
|
|
182
|
-
});
|
|
183
|
-
|
|
184
|
-
// src/blocks/helpers.ts
|
|
185
|
-
var init_helpers = __esm({
|
|
186
|
-
"src/blocks/helpers.ts"() {
|
|
187
|
-
"use strict";
|
|
188
|
-
init_sqlHelpers();
|
|
189
|
-
}
|
|
190
|
-
});
|
|
191
|
-
|
|
192
|
-
// src/dmv2/sdk/olapTable.ts
|
|
193
|
-
import { Readable } from "stream";
|
|
194
|
-
import { createHash } from "crypto";
|
|
195
|
-
var init_olapTable = __esm({
|
|
196
|
-
"src/dmv2/sdk/olapTable.ts"() {
|
|
197
|
-
"use strict";
|
|
198
|
-
init_typedBase();
|
|
199
|
-
init_dataModelTypes();
|
|
200
|
-
init_helpers();
|
|
201
|
-
init_internal();
|
|
202
|
-
init_sqlHelpers();
|
|
203
|
-
}
|
|
204
|
-
});
|
|
205
|
-
|
|
206
|
-
// src/dmv2/sdk/stream.ts
|
|
207
|
-
import { createHash as createHash2 } from "crypto";
|
|
208
|
-
var init_stream = __esm({
|
|
209
|
-
"src/dmv2/sdk/stream.ts"() {
|
|
210
|
-
"use strict";
|
|
211
|
-
init_typedBase();
|
|
212
|
-
init_internal();
|
|
213
|
-
init_stackTrace();
|
|
214
|
-
}
|
|
215
|
-
});
|
|
216
|
-
|
|
217
|
-
// src/dmv2/sdk/workflow.ts
|
|
218
|
-
var init_workflow = __esm({
|
|
219
|
-
"src/dmv2/sdk/workflow.ts"() {
|
|
220
|
-
"use strict";
|
|
221
|
-
init_internal();
|
|
222
|
-
}
|
|
223
|
-
});
|
|
224
|
-
|
|
225
|
-
// src/dmv2/sdk/ingestApi.ts
|
|
226
|
-
var init_ingestApi = __esm({
|
|
227
|
-
"src/dmv2/sdk/ingestApi.ts"() {
|
|
228
|
-
"use strict";
|
|
229
|
-
init_typedBase();
|
|
230
|
-
init_internal();
|
|
231
|
-
}
|
|
232
|
-
});
|
|
233
|
-
|
|
234
|
-
// src/dmv2/sdk/consumptionApi.ts
|
|
235
|
-
var init_consumptionApi = __esm({
|
|
236
|
-
"src/dmv2/sdk/consumptionApi.ts"() {
|
|
237
|
-
"use strict";
|
|
238
|
-
init_typedBase();
|
|
239
|
-
init_internal();
|
|
240
|
-
}
|
|
241
|
-
});
|
|
242
|
-
|
|
243
|
-
// src/dmv2/sdk/ingestPipeline.ts
|
|
244
|
-
var init_ingestPipeline = __esm({
|
|
245
|
-
"src/dmv2/sdk/ingestPipeline.ts"() {
|
|
246
|
-
"use strict";
|
|
247
|
-
init_typedBase();
|
|
248
|
-
init_stream();
|
|
249
|
-
init_olapTable();
|
|
250
|
-
init_ingestApi();
|
|
251
|
-
init_helpers();
|
|
252
|
-
}
|
|
253
|
-
});
|
|
254
|
-
|
|
255
|
-
// src/dmv2/sdk/etlPipeline.ts
|
|
256
|
-
var init_etlPipeline = __esm({
|
|
257
|
-
"src/dmv2/sdk/etlPipeline.ts"() {
|
|
258
|
-
"use strict";
|
|
259
|
-
init_workflow();
|
|
260
|
-
}
|
|
261
|
-
});
|
|
262
|
-
|
|
263
|
-
// src/dmv2/sdk/sqlResource.ts
|
|
264
|
-
var init_sqlResource = __esm({
|
|
265
|
-
"src/dmv2/sdk/sqlResource.ts"() {
|
|
266
|
-
"use strict";
|
|
267
|
-
init_internal();
|
|
268
|
-
init_sqlHelpers();
|
|
269
|
-
init_stackTrace();
|
|
270
|
-
}
|
|
271
|
-
});
|
|
272
|
-
|
|
273
|
-
// src/dmv2/sdk/materializedView.ts
|
|
274
|
-
var init_materializedView = __esm({
|
|
275
|
-
"src/dmv2/sdk/materializedView.ts"() {
|
|
276
|
-
"use strict";
|
|
277
|
-
init_helpers();
|
|
278
|
-
init_sqlHelpers();
|
|
279
|
-
init_olapTable();
|
|
280
|
-
init_sqlResource();
|
|
281
|
-
}
|
|
282
|
-
});
|
|
283
|
-
|
|
284
|
-
// src/dmv2/sdk/view.ts
|
|
285
|
-
var init_view = __esm({
|
|
286
|
-
"src/dmv2/sdk/view.ts"() {
|
|
287
|
-
"use strict";
|
|
288
|
-
init_helpers();
|
|
289
|
-
init_sqlHelpers();
|
|
290
|
-
init_sqlResource();
|
|
291
|
-
}
|
|
292
|
-
});
|
|
293
|
-
|
|
294
|
-
// src/dmv2/sdk/lifeCycle.ts
|
|
295
|
-
var init_lifeCycle = __esm({
|
|
296
|
-
"src/dmv2/sdk/lifeCycle.ts"() {
|
|
297
|
-
"use strict";
|
|
298
|
-
}
|
|
299
|
-
});
|
|
300
|
-
|
|
301
|
-
// src/dmv2/sdk/webApp.ts
|
|
302
|
-
var init_webApp = __esm({
|
|
303
|
-
"src/dmv2/sdk/webApp.ts"() {
|
|
304
|
-
"use strict";
|
|
305
|
-
init_internal();
|
|
306
|
-
}
|
|
307
|
-
});
|
|
308
|
-
|
|
309
|
-
// src/dmv2/registry.ts
|
|
310
|
-
var init_registry = __esm({
|
|
311
|
-
"src/dmv2/registry.ts"() {
|
|
312
|
-
"use strict";
|
|
313
|
-
init_internal();
|
|
314
|
-
}
|
|
315
|
-
});
|
|
316
|
-
|
|
317
|
-
// src/dmv2/index.ts
|
|
318
|
-
var init_dmv2 = __esm({
|
|
319
|
-
"src/dmv2/index.ts"() {
|
|
320
|
-
"use strict";
|
|
321
|
-
init_olapTable();
|
|
322
|
-
init_stream();
|
|
323
|
-
init_workflow();
|
|
324
|
-
init_ingestApi();
|
|
325
|
-
init_consumptionApi();
|
|
326
|
-
init_ingestPipeline();
|
|
327
|
-
init_etlPipeline();
|
|
328
|
-
init_materializedView();
|
|
329
|
-
init_sqlResource();
|
|
330
|
-
init_view();
|
|
331
|
-
init_lifeCycle();
|
|
332
|
-
init_webApp();
|
|
333
|
-
init_registry();
|
|
334
|
-
}
|
|
335
|
-
});
|
|
336
|
-
|
|
337
|
-
// src/dataModels/types.ts
|
|
338
|
-
var init_types = __esm({
|
|
339
|
-
"src/dataModels/types.ts"() {
|
|
340
|
-
"use strict";
|
|
341
|
-
}
|
|
342
|
-
});
|
|
343
|
-
|
|
344
|
-
// src/browserCompatible.ts
|
|
345
|
-
var init_browserCompatible = __esm({
|
|
346
|
-
"src/browserCompatible.ts"() {
|
|
347
|
-
"use strict";
|
|
348
|
-
init_dmv2();
|
|
349
|
-
init_types();
|
|
350
|
-
init_sqlHelpers();
|
|
351
|
-
}
|
|
352
|
-
});
|
|
353
12
|
|
|
354
13
|
// src/commons.ts
|
|
355
14
|
import http from "http";
|
|
@@ -480,12 +139,159 @@ var init_commons = __esm({
|
|
|
480
139
|
}
|
|
481
140
|
});
|
|
482
141
|
|
|
483
|
-
// src/
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
142
|
+
// src/moose-runner.ts
|
|
143
|
+
import { register } from "ts-node";
|
|
144
|
+
|
|
145
|
+
// src/dmv2/internal.ts
|
|
146
|
+
import process2 from "process";
|
|
147
|
+
|
|
148
|
+
// src/sqlHelpers.ts
|
|
149
|
+
var isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
|
|
150
|
+
var isColumn = (value) => typeof value === "object" && "name" in value && "annotations" in value;
|
|
151
|
+
function sql(strings, ...values) {
|
|
152
|
+
return new Sql(strings, values);
|
|
153
|
+
}
|
|
154
|
+
var instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
|
|
155
|
+
var Sql = class {
|
|
156
|
+
values;
|
|
157
|
+
strings;
|
|
158
|
+
constructor(rawStrings, rawValues) {
|
|
159
|
+
if (rawStrings.length - 1 !== rawValues.length) {
|
|
160
|
+
if (rawStrings.length === 0) {
|
|
161
|
+
throw new TypeError("Expected at least 1 string");
|
|
162
|
+
}
|
|
163
|
+
throw new TypeError(
|
|
164
|
+
`Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
|
|
165
|
+
);
|
|
166
|
+
}
|
|
167
|
+
const valuesLength = rawValues.reduce(
|
|
168
|
+
(len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) ? 0 : 1),
|
|
169
|
+
0
|
|
170
|
+
);
|
|
171
|
+
this.values = new Array(valuesLength);
|
|
172
|
+
this.strings = new Array(valuesLength + 1);
|
|
173
|
+
this.strings[0] = rawStrings[0];
|
|
174
|
+
let i = 0, pos = 0;
|
|
175
|
+
while (i < rawValues.length) {
|
|
176
|
+
const child = rawValues[i++];
|
|
177
|
+
const rawString = rawStrings[i];
|
|
178
|
+
if (instanceofSql(child)) {
|
|
179
|
+
this.strings[pos] += child.strings[0];
|
|
180
|
+
let childIndex = 0;
|
|
181
|
+
while (childIndex < child.values.length) {
|
|
182
|
+
this.values[pos++] = child.values[childIndex++];
|
|
183
|
+
this.strings[pos] = child.strings[childIndex];
|
|
184
|
+
}
|
|
185
|
+
this.strings[pos] += rawString;
|
|
186
|
+
} else if (isColumn(child)) {
|
|
187
|
+
const aggregationFunction = child.annotations.find(
|
|
188
|
+
([k, _]) => k === "aggregationFunction"
|
|
189
|
+
);
|
|
190
|
+
if (aggregationFunction !== void 0) {
|
|
191
|
+
this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
|
|
192
|
+
} else {
|
|
193
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
194
|
+
}
|
|
195
|
+
this.strings[pos] += rawString;
|
|
196
|
+
} else if (isTable(child)) {
|
|
197
|
+
if (child.config.database) {
|
|
198
|
+
this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
|
|
199
|
+
} else {
|
|
200
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
201
|
+
}
|
|
202
|
+
this.strings[pos] += rawString;
|
|
203
|
+
} else {
|
|
204
|
+
this.values[pos++] = child;
|
|
205
|
+
this.strings[pos] = rawString;
|
|
206
|
+
}
|
|
207
|
+
}
|
|
487
208
|
}
|
|
488
|
-
}
|
|
209
|
+
};
|
|
210
|
+
var toQuery = (sql3) => {
|
|
211
|
+
const parameterizedStubs = sql3.values.map(
|
|
212
|
+
(v, i) => createClickhouseParameter(i, v)
|
|
213
|
+
);
|
|
214
|
+
const query = sql3.strings.map(
|
|
215
|
+
(s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
|
|
216
|
+
).join("");
|
|
217
|
+
const query_params = sql3.values.reduce(
|
|
218
|
+
(acc, v, i) => ({
|
|
219
|
+
...acc,
|
|
220
|
+
[`p${i}`]: getValueFromParameter(v)
|
|
221
|
+
}),
|
|
222
|
+
{}
|
|
223
|
+
);
|
|
224
|
+
return [query, query_params];
|
|
225
|
+
};
|
|
226
|
+
var toQueryPreview = (sql3) => {
|
|
227
|
+
try {
|
|
228
|
+
const formatValue = (v) => {
|
|
229
|
+
if (Array.isArray(v)) {
|
|
230
|
+
const [type, val] = v;
|
|
231
|
+
if (type === "Identifier") {
|
|
232
|
+
return `\`${String(val)}\``;
|
|
233
|
+
}
|
|
234
|
+
return `[${v.map((x) => formatValue(x)).join(", ")}]`;
|
|
235
|
+
}
|
|
236
|
+
if (v === null || v === void 0) return "NULL";
|
|
237
|
+
if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
|
|
238
|
+
if (typeof v === "number") return String(v);
|
|
239
|
+
if (typeof v === "boolean") return v ? "true" : "false";
|
|
240
|
+
if (v instanceof Date)
|
|
241
|
+
return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
|
|
242
|
+
try {
|
|
243
|
+
return JSON.stringify(v);
|
|
244
|
+
} catch {
|
|
245
|
+
return String(v);
|
|
246
|
+
}
|
|
247
|
+
};
|
|
248
|
+
let out = sql3.strings[0] ?? "";
|
|
249
|
+
for (let i = 0; i < sql3.values.length; i++) {
|
|
250
|
+
const val = getValueFromParameter(sql3.values[i]);
|
|
251
|
+
out += formatValue(val);
|
|
252
|
+
out += sql3.strings[i + 1] ?? "";
|
|
253
|
+
}
|
|
254
|
+
return out.replace(/\s+/g, " ").trim();
|
|
255
|
+
} catch (error) {
|
|
256
|
+
console.log(`toQueryPreview error: ${error}`);
|
|
257
|
+
return "/* query preview unavailable */";
|
|
258
|
+
}
|
|
259
|
+
};
|
|
260
|
+
var getValueFromParameter = (value) => {
|
|
261
|
+
if (Array.isArray(value)) {
|
|
262
|
+
const [type, val] = value;
|
|
263
|
+
if (type === "Identifier") return val;
|
|
264
|
+
}
|
|
265
|
+
return value;
|
|
266
|
+
};
|
|
267
|
+
function createClickhouseParameter(parameterIndex, value) {
|
|
268
|
+
return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
|
|
269
|
+
}
|
|
270
|
+
var mapToClickHouseType = (value) => {
|
|
271
|
+
if (typeof value === "number") {
|
|
272
|
+
return Number.isInteger(value) ? "Int" : "Float";
|
|
273
|
+
}
|
|
274
|
+
if (typeof value === "boolean") return "Bool";
|
|
275
|
+
if (value instanceof Date) return "DateTime";
|
|
276
|
+
if (Array.isArray(value)) {
|
|
277
|
+
const [type, _] = value;
|
|
278
|
+
return type;
|
|
279
|
+
}
|
|
280
|
+
return "String";
|
|
281
|
+
};
|
|
282
|
+
function emptyIfUndefined(value) {
|
|
283
|
+
return value === void 0 ? "" : value;
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// src/dmv2/sdk/olapTable.ts
|
|
287
|
+
import { Readable } from "stream";
|
|
288
|
+
import { createHash } from "crypto";
|
|
289
|
+
|
|
290
|
+
// src/dmv2/sdk/stream.ts
|
|
291
|
+
import { createHash as createHash2 } from "crypto";
|
|
292
|
+
|
|
293
|
+
// src/index.ts
|
|
294
|
+
init_commons();
|
|
489
295
|
|
|
490
296
|
// src/consumption-apis/helpers.ts
|
|
491
297
|
import {
|
|
@@ -507,6 +313,142 @@ function formatElapsedTime(ms) {
|
|
|
507
313
|
const remainingSeconds = seconds % 60;
|
|
508
314
|
return `${minutes} minutes and ${remainingSeconds.toFixed(2)} seconds`;
|
|
509
315
|
}
|
|
316
|
+
var MooseClient = class {
|
|
317
|
+
query;
|
|
318
|
+
workflow;
|
|
319
|
+
constructor(queryClient, temporalClient) {
|
|
320
|
+
this.query = queryClient;
|
|
321
|
+
this.workflow = new WorkflowClient(temporalClient);
|
|
322
|
+
}
|
|
323
|
+
};
|
|
324
|
+
var QueryClient = class {
|
|
325
|
+
client;
|
|
326
|
+
query_id_prefix;
|
|
327
|
+
constructor(client, query_id_prefix) {
|
|
328
|
+
this.client = client;
|
|
329
|
+
this.query_id_prefix = query_id_prefix;
|
|
330
|
+
}
|
|
331
|
+
async execute(sql3) {
|
|
332
|
+
const [query, query_params] = toQuery(sql3);
|
|
333
|
+
console.log(`[QueryClient] | Query: ${toQueryPreview(sql3)}`);
|
|
334
|
+
const start = performance.now();
|
|
335
|
+
const result = await this.client.query({
|
|
336
|
+
query,
|
|
337
|
+
query_params,
|
|
338
|
+
format: "JSONEachRow",
|
|
339
|
+
query_id: this.query_id_prefix + randomUUID()
|
|
340
|
+
// Note: wait_end_of_query deliberately NOT set here as this is used for SELECT queries
|
|
341
|
+
// where response buffering would harm streaming performance and concurrency
|
|
342
|
+
});
|
|
343
|
+
const elapsedMs = performance.now() - start;
|
|
344
|
+
console.log(
|
|
345
|
+
`[QueryClient] | Query completed: ${formatElapsedTime(elapsedMs)}`
|
|
346
|
+
);
|
|
347
|
+
return result;
|
|
348
|
+
}
|
|
349
|
+
async command(sql3) {
|
|
350
|
+
const [query, query_params] = toQuery(sql3);
|
|
351
|
+
console.log(`[QueryClient] | Command: ${toQueryPreview(sql3)}`);
|
|
352
|
+
const start = performance.now();
|
|
353
|
+
const result = await this.client.command({
|
|
354
|
+
query,
|
|
355
|
+
query_params,
|
|
356
|
+
query_id: this.query_id_prefix + randomUUID()
|
|
357
|
+
});
|
|
358
|
+
const elapsedMs = performance.now() - start;
|
|
359
|
+
console.log(
|
|
360
|
+
`[QueryClient] | Command completed: ${formatElapsedTime(elapsedMs)}`
|
|
361
|
+
);
|
|
362
|
+
return result;
|
|
363
|
+
}
|
|
364
|
+
};
|
|
365
|
+
var WorkflowClient = class {
|
|
366
|
+
client;
|
|
367
|
+
constructor(temporalClient) {
|
|
368
|
+
this.client = temporalClient;
|
|
369
|
+
}
|
|
370
|
+
async execute(name, input_data) {
|
|
371
|
+
try {
|
|
372
|
+
if (!this.client) {
|
|
373
|
+
return {
|
|
374
|
+
status: 404,
|
|
375
|
+
body: `Temporal client not found. Is the feature flag enabled?`
|
|
376
|
+
};
|
|
377
|
+
}
|
|
378
|
+
const config = await this.getWorkflowConfig(name);
|
|
379
|
+
const [processedInput, workflowId] = this.processInputData(
|
|
380
|
+
name,
|
|
381
|
+
input_data
|
|
382
|
+
);
|
|
383
|
+
console.log(
|
|
384
|
+
`WorkflowClient - starting workflow: ${name} with config ${JSON.stringify(config)} and input_data ${JSON.stringify(processedInput)}`
|
|
385
|
+
);
|
|
386
|
+
const handle = await this.client.workflow.start("ScriptWorkflow", {
|
|
387
|
+
args: [
|
|
388
|
+
{ workflow_name: name, execution_mode: "start" },
|
|
389
|
+
processedInput
|
|
390
|
+
],
|
|
391
|
+
taskQueue: "typescript-script-queue",
|
|
392
|
+
workflowId,
|
|
393
|
+
workflowIdConflictPolicy: "FAIL",
|
|
394
|
+
workflowIdReusePolicy: "ALLOW_DUPLICATE",
|
|
395
|
+
retry: {
|
|
396
|
+
maximumAttempts: config.retries
|
|
397
|
+
},
|
|
398
|
+
workflowRunTimeout: config.timeout
|
|
399
|
+
});
|
|
400
|
+
return {
|
|
401
|
+
status: 200,
|
|
402
|
+
body: `Workflow started: ${name}. View it in the Temporal dashboard: http://localhost:8080/namespaces/default/workflows/${workflowId}/${handle.firstExecutionRunId}/history`
|
|
403
|
+
};
|
|
404
|
+
} catch (error) {
|
|
405
|
+
return {
|
|
406
|
+
status: 400,
|
|
407
|
+
body: `Error starting workflow: ${error}`
|
|
408
|
+
};
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
async terminate(workflowId) {
|
|
412
|
+
try {
|
|
413
|
+
if (!this.client) {
|
|
414
|
+
return {
|
|
415
|
+
status: 404,
|
|
416
|
+
body: `Temporal client not found. Is the feature flag enabled?`
|
|
417
|
+
};
|
|
418
|
+
}
|
|
419
|
+
const handle = this.client.workflow.getHandle(workflowId);
|
|
420
|
+
await handle.terminate();
|
|
421
|
+
return {
|
|
422
|
+
status: 200,
|
|
423
|
+
body: `Workflow terminated: ${workflowId}`
|
|
424
|
+
};
|
|
425
|
+
} catch (error) {
|
|
426
|
+
return {
|
|
427
|
+
status: 400,
|
|
428
|
+
body: `Error terminating workflow: ${error}`
|
|
429
|
+
};
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
async getWorkflowConfig(name) {
|
|
433
|
+
const workflows = await getWorkflows2();
|
|
434
|
+
const dmv2Workflow = workflows.get(name);
|
|
435
|
+
if (dmv2Workflow) {
|
|
436
|
+
return {
|
|
437
|
+
retries: dmv2Workflow.config.retries || 3,
|
|
438
|
+
timeout: dmv2Workflow.config.timeout || "1h"
|
|
439
|
+
};
|
|
440
|
+
}
|
|
441
|
+
throw new Error(`Workflow config not found for ${name}`);
|
|
442
|
+
}
|
|
443
|
+
processInputData(name, input_data) {
|
|
444
|
+
let workflowId = name;
|
|
445
|
+
if (input_data) {
|
|
446
|
+
const hash = createHash3("sha256").update(JSON.stringify(input_data)).digest("hex").slice(0, 16);
|
|
447
|
+
workflowId = `${name}-${hash}`;
|
|
448
|
+
}
|
|
449
|
+
return [input_data, workflowId];
|
|
450
|
+
}
|
|
451
|
+
};
|
|
510
452
|
async function getTemporalClient(temporalUrl, namespace, clientCert, clientKey, apiKey) {
|
|
511
453
|
try {
|
|
512
454
|
console.info(
|
|
@@ -543,984 +485,538 @@ async function getTemporalClient(temporalUrl, namespace, clientCert, clientKey,
|
|
|
543
485
|
return void 0;
|
|
544
486
|
}
|
|
545
487
|
}
|
|
546
|
-
var MooseClient, QueryClient, WorkflowClient;
|
|
547
|
-
var init_helpers2 = __esm({
|
|
548
|
-
"src/consumption-apis/helpers.ts"() {
|
|
549
|
-
"use strict";
|
|
550
|
-
init_internal();
|
|
551
|
-
init_sqlHelpers();
|
|
552
|
-
MooseClient = class {
|
|
553
|
-
query;
|
|
554
|
-
workflow;
|
|
555
|
-
constructor(queryClient, temporalClient) {
|
|
556
|
-
this.query = queryClient;
|
|
557
|
-
this.workflow = new WorkflowClient(temporalClient);
|
|
558
|
-
}
|
|
559
|
-
};
|
|
560
|
-
QueryClient = class {
|
|
561
|
-
client;
|
|
562
|
-
query_id_prefix;
|
|
563
|
-
constructor(client, query_id_prefix) {
|
|
564
|
-
this.client = client;
|
|
565
|
-
this.query_id_prefix = query_id_prefix;
|
|
566
|
-
}
|
|
567
|
-
async execute(sql3) {
|
|
568
|
-
const [query, query_params] = toQuery(sql3);
|
|
569
|
-
console.log(`[QueryClient] | Query: ${toQueryPreview(sql3)}`);
|
|
570
|
-
const start = performance.now();
|
|
571
|
-
const result = await this.client.query({
|
|
572
|
-
query,
|
|
573
|
-
query_params,
|
|
574
|
-
format: "JSONEachRow",
|
|
575
|
-
query_id: this.query_id_prefix + randomUUID()
|
|
576
|
-
// Note: wait_end_of_query deliberately NOT set here as this is used for SELECT queries
|
|
577
|
-
// where response buffering would harm streaming performance and concurrency
|
|
578
|
-
});
|
|
579
|
-
const elapsedMs = performance.now() - start;
|
|
580
|
-
console.log(
|
|
581
|
-
`[QueryClient] | Query completed: ${formatElapsedTime(elapsedMs)}`
|
|
582
|
-
);
|
|
583
|
-
return result;
|
|
584
|
-
}
|
|
585
|
-
async command(sql3) {
|
|
586
|
-
const [query, query_params] = toQuery(sql3);
|
|
587
|
-
console.log(`[QueryClient] | Command: ${toQueryPreview(sql3)}`);
|
|
588
|
-
const start = performance.now();
|
|
589
|
-
const result = await this.client.command({
|
|
590
|
-
query,
|
|
591
|
-
query_params,
|
|
592
|
-
query_id: this.query_id_prefix + randomUUID()
|
|
593
|
-
});
|
|
594
|
-
const elapsedMs = performance.now() - start;
|
|
595
|
-
console.log(
|
|
596
|
-
`[QueryClient] | Command completed: ${formatElapsedTime(elapsedMs)}`
|
|
597
|
-
);
|
|
598
|
-
return result;
|
|
599
|
-
}
|
|
600
|
-
};
|
|
601
|
-
WorkflowClient = class {
|
|
602
|
-
client;
|
|
603
|
-
constructor(temporalClient) {
|
|
604
|
-
this.client = temporalClient;
|
|
605
|
-
}
|
|
606
|
-
async execute(name, input_data) {
|
|
607
|
-
try {
|
|
608
|
-
if (!this.client) {
|
|
609
|
-
return {
|
|
610
|
-
status: 404,
|
|
611
|
-
body: `Temporal client not found. Is the feature flag enabled?`
|
|
612
|
-
};
|
|
613
|
-
}
|
|
614
|
-
const config = await this.getWorkflowConfig(name);
|
|
615
|
-
const [processedInput, workflowId] = this.processInputData(
|
|
616
|
-
name,
|
|
617
|
-
input_data
|
|
618
|
-
);
|
|
619
|
-
console.log(
|
|
620
|
-
`WorkflowClient - starting workflow: ${name} with config ${JSON.stringify(config)} and input_data ${JSON.stringify(processedInput)}`
|
|
621
|
-
);
|
|
622
|
-
const handle = await this.client.workflow.start("ScriptWorkflow", {
|
|
623
|
-
args: [
|
|
624
|
-
{ workflow_name: name, execution_mode: "start" },
|
|
625
|
-
processedInput
|
|
626
|
-
],
|
|
627
|
-
taskQueue: "typescript-script-queue",
|
|
628
|
-
workflowId,
|
|
629
|
-
workflowIdConflictPolicy: "FAIL",
|
|
630
|
-
workflowIdReusePolicy: "ALLOW_DUPLICATE",
|
|
631
|
-
retry: {
|
|
632
|
-
maximumAttempts: config.retries
|
|
633
|
-
},
|
|
634
|
-
workflowRunTimeout: config.timeout
|
|
635
|
-
});
|
|
636
|
-
return {
|
|
637
|
-
status: 200,
|
|
638
|
-
body: `Workflow started: ${name}. View it in the Temporal dashboard: http://localhost:8080/namespaces/default/workflows/${workflowId}/${handle.firstExecutionRunId}/history`
|
|
639
|
-
};
|
|
640
|
-
} catch (error) {
|
|
641
|
-
return {
|
|
642
|
-
status: 400,
|
|
643
|
-
body: `Error starting workflow: ${error}`
|
|
644
|
-
};
|
|
645
|
-
}
|
|
646
|
-
}
|
|
647
|
-
async terminate(workflowId) {
|
|
648
|
-
try {
|
|
649
|
-
if (!this.client) {
|
|
650
|
-
return {
|
|
651
|
-
status: 404,
|
|
652
|
-
body: `Temporal client not found. Is the feature flag enabled?`
|
|
653
|
-
};
|
|
654
|
-
}
|
|
655
|
-
const handle = this.client.workflow.getHandle(workflowId);
|
|
656
|
-
await handle.terminate();
|
|
657
|
-
return {
|
|
658
|
-
status: 200,
|
|
659
|
-
body: `Workflow terminated: ${workflowId}`
|
|
660
|
-
};
|
|
661
|
-
} catch (error) {
|
|
662
|
-
return {
|
|
663
|
-
status: 400,
|
|
664
|
-
body: `Error terminating workflow: ${error}`
|
|
665
|
-
};
|
|
666
|
-
}
|
|
667
|
-
}
|
|
668
|
-
async getWorkflowConfig(name) {
|
|
669
|
-
const workflows = await getWorkflows2();
|
|
670
|
-
const dmv2Workflow = workflows.get(name);
|
|
671
|
-
if (dmv2Workflow) {
|
|
672
|
-
return {
|
|
673
|
-
retries: dmv2Workflow.config.retries || 3,
|
|
674
|
-
timeout: dmv2Workflow.config.timeout || "1h"
|
|
675
|
-
};
|
|
676
|
-
}
|
|
677
|
-
throw new Error(`Workflow config not found for ${name}`);
|
|
678
|
-
}
|
|
679
|
-
processInputData(name, input_data) {
|
|
680
|
-
let workflowId = name;
|
|
681
|
-
if (input_data) {
|
|
682
|
-
const hash = createHash3("sha256").update(JSON.stringify(input_data)).digest("hex").slice(0, 16);
|
|
683
|
-
workflowId = `${name}-${hash}`;
|
|
684
|
-
}
|
|
685
|
-
return [input_data, workflowId];
|
|
686
|
-
}
|
|
687
|
-
};
|
|
688
|
-
}
|
|
689
|
-
});
|
|
690
488
|
|
|
691
|
-
// src/consumption-apis/
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
}
|
|
696
|
-
});
|
|
697
|
-
|
|
698
|
-
// src/scripts/task.ts
|
|
699
|
-
var init_task = __esm({
|
|
700
|
-
"src/scripts/task.ts"() {
|
|
701
|
-
"use strict";
|
|
702
|
-
}
|
|
703
|
-
});
|
|
489
|
+
// src/consumption-apis/runner.ts
|
|
490
|
+
init_commons();
|
|
491
|
+
import http2 from "http";
|
|
492
|
+
import * as jose from "jose";
|
|
704
493
|
|
|
705
494
|
// src/cluster-utils.ts
|
|
706
495
|
import cluster from "cluster";
|
|
707
496
|
import { availableParallelism } from "os";
|
|
708
497
|
import { exit } from "process";
|
|
709
|
-
var DEFAULT_MAX_CPU_USAGE_RATIO
|
|
710
|
-
var
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
this.workerStart = options.workerStart;
|
|
744
|
-
this.workerStop = options.workerStop;
|
|
745
|
-
if (options.maxCpuUsageRatio && (options.maxCpuUsageRatio > 1 || options.maxCpuUsageRatio < 0)) {
|
|
746
|
-
throw new Error("maxCpuUsageRatio must be between 0 and 1");
|
|
747
|
-
}
|
|
748
|
-
this.maxCpuUsageRatio = options.maxCpuUsageRatio || DEFAULT_MAX_CPU_USAGE_RATIO;
|
|
749
|
-
this.usedCpuCount = this.computeCPUUsageCount(
|
|
750
|
-
this.maxCpuUsageRatio,
|
|
751
|
-
options.maxWorkerCount
|
|
752
|
-
);
|
|
753
|
-
}
|
|
754
|
-
/**
|
|
755
|
-
* Calculates the number of CPU cores to utilize based on available parallelism and constraints.
|
|
756
|
-
*
|
|
757
|
-
* @param cpuUsageRatio - Ratio of CPU cores to use (0-1)
|
|
758
|
-
* @param maxWorkerCount - Optional maximum number of workers
|
|
759
|
-
* @returns The number of CPU cores to utilize
|
|
760
|
-
*/
|
|
761
|
-
computeCPUUsageCount(cpuUsageRatio, maxWorkerCount) {
|
|
762
|
-
const cpuCount = availableParallelism();
|
|
763
|
-
const maxWorkers = maxWorkerCount || cpuCount;
|
|
764
|
-
return Math.min(
|
|
765
|
-
maxWorkers,
|
|
766
|
-
Math.max(1, Math.floor(cpuCount * cpuUsageRatio))
|
|
767
|
-
);
|
|
768
|
-
}
|
|
769
|
-
/**
|
|
770
|
-
* Initializes the cluster by spawning worker processes and setting up signal handlers.
|
|
771
|
-
* For the primary process, spawns workers and monitors parent process.
|
|
772
|
-
* For worker processes, executes the worker startup function.
|
|
773
|
-
*
|
|
774
|
-
* @throws {Error} If worker is undefined in worker process
|
|
775
|
-
*/
|
|
776
|
-
async start() {
|
|
777
|
-
process.on(SIGTERM, this.gracefulClusterShutdown(SIGTERM));
|
|
778
|
-
process.on(SIGINT, this.gracefulClusterShutdown(SIGINT));
|
|
779
|
-
if (cluster.isPrimary) {
|
|
780
|
-
const parentPid = process.ppid;
|
|
781
|
-
setInterval(() => {
|
|
782
|
-
try {
|
|
783
|
-
process.kill(parentPid, 0);
|
|
784
|
-
} catch (e) {
|
|
785
|
-
console.log("Parent process has exited.");
|
|
786
|
-
this.gracefulClusterShutdown(SIGTERM)();
|
|
787
|
-
}
|
|
788
|
-
}, 1e3);
|
|
789
|
-
await this.bootWorkers(this.usedCpuCount);
|
|
790
|
-
} else {
|
|
791
|
-
if (!cluster.worker) {
|
|
792
|
-
throw new Error(
|
|
793
|
-
"Worker is not defined, it should be defined in worker process"
|
|
794
|
-
);
|
|
795
|
-
}
|
|
796
|
-
this.startOutput = await this.workerStart(
|
|
797
|
-
cluster.worker,
|
|
798
|
-
this.usedCpuCount
|
|
799
|
-
);
|
|
800
|
-
}
|
|
801
|
-
}
|
|
802
|
-
/**
|
|
803
|
-
* Spawns worker processes and configures their lifecycle event handlers.
|
|
804
|
-
* Handles worker online, exit and disconnect events.
|
|
805
|
-
* Automatically restarts failed workers during normal operation.
|
|
806
|
-
*
|
|
807
|
-
* @param numWorkers - Number of worker processes to spawn
|
|
808
|
-
*/
|
|
809
|
-
bootWorkers = async (numWorkers) => {
|
|
810
|
-
console.info(`Setting ${numWorkers} workers...`);
|
|
811
|
-
for (let i = 0; i < numWorkers; i++) {
|
|
812
|
-
cluster.fork();
|
|
813
|
-
}
|
|
814
|
-
cluster.on("online", (worker) => {
|
|
815
|
-
console.info(`worker process ${worker.process.pid} is online`);
|
|
816
|
-
});
|
|
817
|
-
cluster.on("exit", (worker, code, signal) => {
|
|
818
|
-
console.info(
|
|
819
|
-
`worker ${worker.process.pid} exited with code ${code} and signal ${signal}`
|
|
820
|
-
);
|
|
821
|
-
if (!this.shutdownInProgress) {
|
|
822
|
-
setTimeout(() => cluster.fork(), RESTART_TIME_MS);
|
|
823
|
-
}
|
|
824
|
-
if (this.shutdownInProgress && code != 0) {
|
|
825
|
-
this.hasCleanWorkerExit = false;
|
|
826
|
-
}
|
|
827
|
-
});
|
|
828
|
-
cluster.on("disconnect", (worker) => {
|
|
829
|
-
console.info(`worker process ${worker.process.pid} has disconnected`);
|
|
830
|
-
});
|
|
831
|
-
};
|
|
832
|
-
/**
|
|
833
|
-
* Creates a handler function for graceful shutdown on receipt of a signal.
|
|
834
|
-
* Ensures only one shutdown can occur at a time.
|
|
835
|
-
* Handles shutdown differently for primary and worker processes.
|
|
836
|
-
*
|
|
837
|
-
* @param signal - The signal triggering the shutdown (e.g. SIGTERM)
|
|
838
|
-
* @returns An async function that performs the shutdown
|
|
839
|
-
*/
|
|
840
|
-
gracefulClusterShutdown = (signal) => async () => {
|
|
841
|
-
if (this.shutdownInProgress) {
|
|
842
|
-
return;
|
|
843
|
-
}
|
|
844
|
-
this.shutdownInProgress = true;
|
|
845
|
-
this.hasCleanWorkerExit = true;
|
|
846
|
-
console.info(
|
|
847
|
-
`Got ${signal} on ${this.processStr}. Graceful shutdown start at ${(/* @__PURE__ */ new Date()).toISOString()}`
|
|
848
|
-
);
|
|
849
|
-
try {
|
|
850
|
-
if (cluster.isPrimary) {
|
|
851
|
-
await this.shutdownWorkers(signal);
|
|
852
|
-
console.info(`${this.processStr} - worker shutdown successful`);
|
|
853
|
-
exit(0);
|
|
854
|
-
} else {
|
|
855
|
-
if (this.startOutput) {
|
|
856
|
-
await this.workerStop(this.startOutput);
|
|
857
|
-
} else {
|
|
858
|
-
console.info(
|
|
859
|
-
`${this.processStr} - shutdown before worker fully started`
|
|
860
|
-
);
|
|
861
|
-
}
|
|
862
|
-
console.info(`${this.processStr} shutdown successful`);
|
|
863
|
-
this.hasCleanWorkerExit ? exit(0) : exit(1);
|
|
864
|
-
}
|
|
865
|
-
} catch (e) {
|
|
866
|
-
console.error(`${this.processStr} - shutdown failed`, e);
|
|
867
|
-
exit(1);
|
|
868
|
-
}
|
|
869
|
-
};
|
|
870
|
-
/**
|
|
871
|
-
* Gracefully terminates all worker processes.
|
|
872
|
-
* Monitors workers until they all exit or timeout occurs.
|
|
873
|
-
* Only relevant for the primary process.
|
|
874
|
-
*
|
|
875
|
-
* @param signal - The signal to send to worker processes
|
|
876
|
-
* @returns A promise that resolves when all workers have terminated
|
|
877
|
-
*/
|
|
878
|
-
shutdownWorkers = (signal) => {
|
|
879
|
-
return new Promise((resolve2, reject) => {
|
|
880
|
-
if (!cluster.isPrimary) {
|
|
881
|
-
return resolve2();
|
|
882
|
-
}
|
|
883
|
-
if (!cluster.workers) {
|
|
884
|
-
return resolve2();
|
|
885
|
-
}
|
|
886
|
-
const workerIds = Object.keys(cluster.workers);
|
|
887
|
-
if (workerIds.length == 0) {
|
|
888
|
-
return resolve2();
|
|
889
|
-
}
|
|
890
|
-
let workersAlive = 0;
|
|
891
|
-
let funcRun = 0;
|
|
892
|
-
const cleanWorkers = () => {
|
|
893
|
-
++funcRun;
|
|
894
|
-
workersAlive = 0;
|
|
895
|
-
Object.values(cluster.workers || {}).filter((worker) => !!worker).forEach((worker) => {
|
|
896
|
-
if (worker && !worker.isDead()) {
|
|
897
|
-
++workersAlive;
|
|
898
|
-
if (funcRun == 1) {
|
|
899
|
-
worker.kill(signal);
|
|
900
|
-
}
|
|
901
|
-
}
|
|
902
|
-
});
|
|
903
|
-
console.info(workersAlive + " workers alive");
|
|
904
|
-
if (workersAlive == 0) {
|
|
905
|
-
clearInterval(interval);
|
|
906
|
-
return resolve2();
|
|
907
|
-
}
|
|
908
|
-
};
|
|
909
|
-
const interval = setInterval(cleanWorkers, SHUTDOWN_WORKERS_INTERVAL);
|
|
910
|
-
});
|
|
911
|
-
};
|
|
912
|
-
};
|
|
913
|
-
}
|
|
914
|
-
});
|
|
915
|
-
|
|
916
|
-
// src/config/configFile.ts
|
|
917
|
-
import path from "path";
|
|
918
|
-
import * as toml from "toml";
|
|
919
|
-
async function findConfigFile(startDir = process.cwd()) {
|
|
920
|
-
const fs4 = await import("fs");
|
|
921
|
-
let currentDir = path.resolve(startDir);
|
|
922
|
-
while (true) {
|
|
923
|
-
const configPath = path.join(currentDir, "moose.config.toml");
|
|
924
|
-
if (fs4.existsSync(configPath)) {
|
|
925
|
-
return configPath;
|
|
498
|
+
var DEFAULT_MAX_CPU_USAGE_RATIO = 0.7;
|
|
499
|
+
var RESTART_TIME_MS = 1e4;
|
|
500
|
+
var SIGTERM = "SIGTERM";
|
|
501
|
+
var SIGINT = "SIGINT";
|
|
502
|
+
var SHUTDOWN_WORKERS_INTERVAL = 500;
|
|
503
|
+
var Cluster = class {
|
|
504
|
+
// Tracks if shutdown is currently in progress
|
|
505
|
+
shutdownInProgress = false;
|
|
506
|
+
// Tracks if workers exited cleanly during shutdown
|
|
507
|
+
hasCleanWorkerExit = true;
|
|
508
|
+
// String identifying if this is primary or worker process
|
|
509
|
+
processStr = `${cluster.isPrimary ? "primary" : "worker"} process ${process.pid}`;
|
|
510
|
+
// Functions for starting and stopping workers
|
|
511
|
+
workerStart;
|
|
512
|
+
workerStop;
|
|
513
|
+
// Result from starting worker, needed for cleanup
|
|
514
|
+
startOutput;
|
|
515
|
+
maxCpuUsageRatio;
|
|
516
|
+
usedCpuCount;
|
|
517
|
+
/**
|
|
518
|
+
* Creates a new cluster manager instance.
|
|
519
|
+
*
|
|
520
|
+
* @param options - Configuration options for the cluster
|
|
521
|
+
* @param options.workerStart - Async function to execute when starting a worker
|
|
522
|
+
* @param options.workerStop - Async function to execute when stopping a worker
|
|
523
|
+
* @param options.maxCpuUsageRatio - Maximum ratio of CPU cores to utilize (0-1)
|
|
524
|
+
* @param options.maxWorkerCount - Maximum number of workers to spawn
|
|
525
|
+
* @throws {Error} If maxCpuUsageRatio is not between 0 and 1
|
|
526
|
+
*/
|
|
527
|
+
constructor(options) {
|
|
528
|
+
this.workerStart = options.workerStart;
|
|
529
|
+
this.workerStop = options.workerStop;
|
|
530
|
+
if (options.maxCpuUsageRatio && (options.maxCpuUsageRatio > 1 || options.maxCpuUsageRatio < 0)) {
|
|
531
|
+
throw new Error("maxCpuUsageRatio must be between 0 and 1");
|
|
926
532
|
}
|
|
927
|
-
|
|
928
|
-
|
|
929
|
-
|
|
930
|
-
|
|
931
|
-
currentDir = parentDir;
|
|
932
|
-
}
|
|
933
|
-
return null;
|
|
934
|
-
}
|
|
935
|
-
async function readProjectConfig() {
|
|
936
|
-
const fs4 = await import("fs");
|
|
937
|
-
const configPath = await findConfigFile();
|
|
938
|
-
if (!configPath) {
|
|
939
|
-
throw new ConfigError(
|
|
940
|
-
"moose.config.toml not found in current directory or any parent directory"
|
|
533
|
+
this.maxCpuUsageRatio = options.maxCpuUsageRatio || DEFAULT_MAX_CPU_USAGE_RATIO;
|
|
534
|
+
this.usedCpuCount = this.computeCPUUsageCount(
|
|
535
|
+
this.maxCpuUsageRatio,
|
|
536
|
+
options.maxWorkerCount
|
|
941
537
|
);
|
|
942
538
|
}
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
super(message);
|
|
958
|
-
this.name = "ConfigError";
|
|
959
|
-
}
|
|
960
|
-
};
|
|
539
|
+
/**
|
|
540
|
+
* Calculates the number of CPU cores to utilize based on available parallelism and constraints.
|
|
541
|
+
*
|
|
542
|
+
* @param cpuUsageRatio - Ratio of CPU cores to use (0-1)
|
|
543
|
+
* @param maxWorkerCount - Optional maximum number of workers
|
|
544
|
+
* @returns The number of CPU cores to utilize
|
|
545
|
+
*/
|
|
546
|
+
computeCPUUsageCount(cpuUsageRatio, maxWorkerCount) {
|
|
547
|
+
const cpuCount = availableParallelism();
|
|
548
|
+
const maxWorkers = maxWorkerCount || cpuCount;
|
|
549
|
+
return Math.min(
|
|
550
|
+
maxWorkers,
|
|
551
|
+
Math.max(1, Math.floor(cpuCount * cpuUsageRatio))
|
|
552
|
+
);
|
|
961
553
|
}
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
static getInstance() {
|
|
976
|
-
if (!_ConfigurationRegistry.instance) {
|
|
977
|
-
_ConfigurationRegistry.instance = new _ConfigurationRegistry();
|
|
978
|
-
}
|
|
979
|
-
return _ConfigurationRegistry.instance;
|
|
980
|
-
}
|
|
981
|
-
setClickHouseConfig(config) {
|
|
982
|
-
this.clickhouseConfig = config;
|
|
983
|
-
}
|
|
984
|
-
setKafkaConfig(config) {
|
|
985
|
-
this.kafkaConfig = config;
|
|
986
|
-
}
|
|
987
|
-
_env(name) {
|
|
988
|
-
const value = process.env[name];
|
|
989
|
-
if (value === void 0) return void 0;
|
|
990
|
-
const trimmed = value.trim();
|
|
991
|
-
return trimmed.length > 0 ? trimmed : void 0;
|
|
992
|
-
}
|
|
993
|
-
_parseBool(value) {
|
|
994
|
-
if (value === void 0) return void 0;
|
|
995
|
-
switch (value.trim().toLowerCase()) {
|
|
996
|
-
case "1":
|
|
997
|
-
case "true":
|
|
998
|
-
case "yes":
|
|
999
|
-
case "on":
|
|
1000
|
-
return true;
|
|
1001
|
-
case "0":
|
|
1002
|
-
case "false":
|
|
1003
|
-
case "no":
|
|
1004
|
-
case "off":
|
|
1005
|
-
return false;
|
|
1006
|
-
default:
|
|
1007
|
-
return void 0;
|
|
1008
|
-
}
|
|
1009
|
-
}
|
|
1010
|
-
async getClickHouseConfig() {
|
|
1011
|
-
if (this.clickhouseConfig) {
|
|
1012
|
-
return this.clickhouseConfig;
|
|
1013
|
-
}
|
|
1014
|
-
const projectConfig = await readProjectConfig();
|
|
1015
|
-
const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
|
|
1016
|
-
const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
|
|
1017
|
-
const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
|
|
1018
|
-
const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
|
|
1019
|
-
const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
|
|
1020
|
-
const envUseSSL = this._parseBool(
|
|
1021
|
-
this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
|
|
1022
|
-
);
|
|
1023
|
-
return {
|
|
1024
|
-
host: envHost ?? projectConfig.clickhouse_config.host,
|
|
1025
|
-
port: envPort ?? projectConfig.clickhouse_config.host_port.toString(),
|
|
1026
|
-
username: envUser ?? projectConfig.clickhouse_config.user,
|
|
1027
|
-
password: envPassword ?? projectConfig.clickhouse_config.password,
|
|
1028
|
-
database: envDb ?? projectConfig.clickhouse_config.db_name,
|
|
1029
|
-
useSSL: envUseSSL !== void 0 ? envUseSSL : projectConfig.clickhouse_config.use_ssl || false
|
|
1030
|
-
};
|
|
1031
|
-
}
|
|
1032
|
-
async getStandaloneClickhouseConfig(overrides) {
|
|
1033
|
-
if (this.clickhouseConfig) {
|
|
1034
|
-
return { ...this.clickhouseConfig, ...overrides };
|
|
1035
|
-
}
|
|
1036
|
-
const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
|
|
1037
|
-
const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
|
|
1038
|
-
const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
|
|
1039
|
-
const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
|
|
1040
|
-
const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
|
|
1041
|
-
const envUseSSL = this._parseBool(
|
|
1042
|
-
this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
|
|
1043
|
-
);
|
|
1044
|
-
let projectConfig;
|
|
554
|
+
/**
|
|
555
|
+
* Initializes the cluster by spawning worker processes and setting up signal handlers.
|
|
556
|
+
* For the primary process, spawns workers and monitors parent process.
|
|
557
|
+
* For worker processes, executes the worker startup function.
|
|
558
|
+
*
|
|
559
|
+
* @throws {Error} If worker is undefined in worker process
|
|
560
|
+
*/
|
|
561
|
+
async start() {
|
|
562
|
+
process.on(SIGTERM, this.gracefulClusterShutdown(SIGTERM));
|
|
563
|
+
process.on(SIGINT, this.gracefulClusterShutdown(SIGINT));
|
|
564
|
+
if (cluster.isPrimary) {
|
|
565
|
+
const parentPid = process.ppid;
|
|
566
|
+
setInterval(() => {
|
|
1045
567
|
try {
|
|
1046
|
-
|
|
1047
|
-
} catch (
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
const defaults = {
|
|
1051
|
-
host: "localhost",
|
|
1052
|
-
port: "18123",
|
|
1053
|
-
username: "default",
|
|
1054
|
-
password: "",
|
|
1055
|
-
database: "local",
|
|
1056
|
-
useSSL: false
|
|
1057
|
-
};
|
|
1058
|
-
return {
|
|
1059
|
-
host: overrides?.host ?? envHost ?? projectConfig?.clickhouse_config.host ?? defaults.host,
|
|
1060
|
-
port: overrides?.port ?? envPort ?? projectConfig?.clickhouse_config.host_port.toString() ?? defaults.port,
|
|
1061
|
-
username: overrides?.username ?? envUser ?? projectConfig?.clickhouse_config.user ?? defaults.username,
|
|
1062
|
-
password: overrides?.password ?? envPassword ?? projectConfig?.clickhouse_config.password ?? defaults.password,
|
|
1063
|
-
database: overrides?.database ?? envDb ?? projectConfig?.clickhouse_config.db_name ?? defaults.database,
|
|
1064
|
-
useSSL: overrides?.useSSL ?? envUseSSL ?? projectConfig?.clickhouse_config.use_ssl ?? defaults.useSSL
|
|
1065
|
-
};
|
|
1066
|
-
}
|
|
1067
|
-
async getKafkaConfig() {
|
|
1068
|
-
if (this.kafkaConfig) {
|
|
1069
|
-
return this.kafkaConfig;
|
|
568
|
+
process.kill(parentPid, 0);
|
|
569
|
+
} catch (e) {
|
|
570
|
+
console.log("Parent process has exited.");
|
|
571
|
+
this.gracefulClusterShutdown(SIGTERM)();
|
|
1070
572
|
}
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
const envNamespace = this._env("MOOSE_REDPANDA_CONFIG__NAMESPACE") ?? this._env("MOOSE_KAFKA_CONFIG__NAMESPACE");
|
|
1079
|
-
const envSchemaRegistryUrl = this._env("MOOSE_REDPANDA_CONFIG__SCHEMA_REGISTRY_URL") ?? this._env("MOOSE_KAFKA_CONFIG__SCHEMA_REGISTRY_URL");
|
|
1080
|
-
const fileKafka = projectConfig.kafka_config ?? projectConfig.redpanda_config;
|
|
1081
|
-
return {
|
|
1082
|
-
broker: envBroker ?? fileKafka?.broker ?? "localhost:19092",
|
|
1083
|
-
messageTimeoutMs: envMsgTimeout ? parseInt(envMsgTimeout, 10) : fileKafka?.message_timeout_ms ?? 1e3,
|
|
1084
|
-
saslUsername: envSaslUsername ?? fileKafka?.sasl_username,
|
|
1085
|
-
saslPassword: envSaslPassword ?? fileKafka?.sasl_password,
|
|
1086
|
-
saslMechanism: envSaslMechanism ?? fileKafka?.sasl_mechanism,
|
|
1087
|
-
securityProtocol: envSecurityProtocol ?? fileKafka?.security_protocol,
|
|
1088
|
-
namespace: envNamespace ?? fileKafka?.namespace,
|
|
1089
|
-
schemaRegistryUrl: envSchemaRegistryUrl ?? fileKafka?.schema_registry_url
|
|
1090
|
-
};
|
|
1091
|
-
}
|
|
1092
|
-
hasRuntimeConfig() {
|
|
1093
|
-
return !!this.clickhouseConfig || !!this.kafkaConfig;
|
|
573
|
+
}, 1e3);
|
|
574
|
+
await this.bootWorkers(this.usedCpuCount);
|
|
575
|
+
} else {
|
|
576
|
+
if (!cluster.worker) {
|
|
577
|
+
throw new Error(
|
|
578
|
+
"Worker is not defined, it should be defined in worker process"
|
|
579
|
+
);
|
|
1094
580
|
}
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
});
|
|
1099
|
-
|
|
1100
|
-
// src/consumption-apis/standalone.ts
|
|
1101
|
-
var standalone_exports = {};
|
|
1102
|
-
__export(standalone_exports, {
|
|
1103
|
-
getMooseClients: () => getMooseClients,
|
|
1104
|
-
getMooseUtils: () => getMooseUtils
|
|
1105
|
-
});
|
|
1106
|
-
async function getMooseUtils(req) {
|
|
1107
|
-
if (req !== void 0) {
|
|
1108
|
-
console.warn(
|
|
1109
|
-
"[DEPRECATED] getMooseUtils(req) no longer requires a request parameter. Use getMooseUtils() instead."
|
|
1110
|
-
);
|
|
1111
|
-
}
|
|
1112
|
-
const runtimeContext = globalThis._mooseRuntimeContext;
|
|
1113
|
-
if (runtimeContext) {
|
|
1114
|
-
return {
|
|
1115
|
-
client: runtimeContext.client,
|
|
1116
|
-
sql,
|
|
1117
|
-
jwt: runtimeContext.jwt
|
|
1118
|
-
};
|
|
1119
|
-
}
|
|
1120
|
-
if (standaloneUtils) {
|
|
1121
|
-
return standaloneUtils;
|
|
1122
|
-
}
|
|
1123
|
-
if (initPromise) {
|
|
1124
|
-
return initPromise;
|
|
1125
|
-
}
|
|
1126
|
-
initPromise = (async () => {
|
|
1127
|
-
await Promise.resolve().then(() => (init_runtime(), runtime_exports));
|
|
1128
|
-
const configRegistry = globalThis._mooseConfigRegistry;
|
|
1129
|
-
if (!configRegistry) {
|
|
1130
|
-
throw new Error(
|
|
1131
|
-
"Moose not initialized. Ensure you're running within a Moose app or have proper configuration set up."
|
|
581
|
+
this.startOutput = await this.workerStart(
|
|
582
|
+
cluster.worker,
|
|
583
|
+
this.usedCpuCount
|
|
1132
584
|
);
|
|
1133
585
|
}
|
|
1134
|
-
const clickhouseConfig = await configRegistry.getStandaloneClickhouseConfig();
|
|
1135
|
-
const clickhouseClient = getClickhouseClient(
|
|
1136
|
-
toClientConfig(clickhouseConfig)
|
|
1137
|
-
);
|
|
1138
|
-
const queryClient = new QueryClient(clickhouseClient, "standalone");
|
|
1139
|
-
const mooseClient = new MooseClient(queryClient);
|
|
1140
|
-
standaloneUtils = {
|
|
1141
|
-
client: mooseClient,
|
|
1142
|
-
sql,
|
|
1143
|
-
jwt: void 0
|
|
1144
|
-
};
|
|
1145
|
-
return standaloneUtils;
|
|
1146
|
-
})();
|
|
1147
|
-
try {
|
|
1148
|
-
return await initPromise;
|
|
1149
|
-
} finally {
|
|
1150
|
-
initPromise = null;
|
|
1151
586
|
}
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
587
|
+
/**
|
|
588
|
+
* Spawns worker processes and configures their lifecycle event handlers.
|
|
589
|
+
* Handles worker online, exit and disconnect events.
|
|
590
|
+
* Automatically restarts failed workers during normal operation.
|
|
591
|
+
*
|
|
592
|
+
* @param numWorkers - Number of worker processes to spawn
|
|
593
|
+
*/
|
|
594
|
+
bootWorkers = async (numWorkers) => {
|
|
595
|
+
console.info(`Setting ${numWorkers} workers...`);
|
|
596
|
+
for (let i = 0; i < numWorkers; i++) {
|
|
597
|
+
cluster.fork();
|
|
598
|
+
}
|
|
599
|
+
cluster.on("online", (worker) => {
|
|
600
|
+
console.info(`worker process ${worker.process.pid} is online`);
|
|
601
|
+
});
|
|
602
|
+
cluster.on("exit", (worker, code, signal) => {
|
|
603
|
+
console.info(
|
|
604
|
+
`worker ${worker.process.pid} exited with code ${code} and signal ${signal}`
|
|
1163
605
|
);
|
|
606
|
+
if (!this.shutdownInProgress) {
|
|
607
|
+
setTimeout(() => cluster.fork(), RESTART_TIME_MS);
|
|
608
|
+
}
|
|
609
|
+
if (this.shutdownInProgress && code != 0) {
|
|
610
|
+
this.hasCleanWorkerExit = false;
|
|
611
|
+
}
|
|
612
|
+
});
|
|
613
|
+
cluster.on("disconnect", (worker) => {
|
|
614
|
+
console.info(`worker process ${worker.process.pid} has disconnected`);
|
|
615
|
+
});
|
|
616
|
+
};
|
|
617
|
+
/**
|
|
618
|
+
* Creates a handler function for graceful shutdown on receipt of a signal.
|
|
619
|
+
* Ensures only one shutdown can occur at a time.
|
|
620
|
+
* Handles shutdown differently for primary and worker processes.
|
|
621
|
+
*
|
|
622
|
+
* @param signal - The signal triggering the shutdown (e.g. SIGTERM)
|
|
623
|
+
* @returns An async function that performs the shutdown
|
|
624
|
+
*/
|
|
625
|
+
gracefulClusterShutdown = (signal) => async () => {
|
|
626
|
+
if (this.shutdownInProgress) {
|
|
627
|
+
return;
|
|
1164
628
|
}
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
629
|
+
this.shutdownInProgress = true;
|
|
630
|
+
this.hasCleanWorkerExit = true;
|
|
631
|
+
console.info(
|
|
632
|
+
`Got ${signal} on ${this.processStr}. Graceful shutdown start at ${(/* @__PURE__ */ new Date()).toISOString()}`
|
|
1168
633
|
);
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
634
|
+
try {
|
|
635
|
+
if (cluster.isPrimary) {
|
|
636
|
+
await this.shutdownWorkers(signal);
|
|
637
|
+
console.info(`${this.processStr} - worker shutdown successful`);
|
|
638
|
+
exit(0);
|
|
639
|
+
} else {
|
|
640
|
+
if (this.startOutput) {
|
|
641
|
+
await this.workerStop(this.startOutput);
|
|
642
|
+
} else {
|
|
643
|
+
console.info(
|
|
644
|
+
`${this.processStr} - shutdown before worker fully started`
|
|
645
|
+
);
|
|
646
|
+
}
|
|
647
|
+
console.info(`${this.processStr} shutdown successful`);
|
|
648
|
+
this.hasCleanWorkerExit ? exit(0) : exit(1);
|
|
649
|
+
}
|
|
650
|
+
} catch (e) {
|
|
651
|
+
console.error(`${this.processStr} - shutdown failed`, e);
|
|
652
|
+
exit(1);
|
|
653
|
+
}
|
|
654
|
+
};
|
|
655
|
+
/**
|
|
656
|
+
* Gracefully terminates all worker processes.
|
|
657
|
+
* Monitors workers until they all exit or timeout occurs.
|
|
658
|
+
* Only relevant for the primary process.
|
|
659
|
+
*
|
|
660
|
+
* @param signal - The signal to send to worker processes
|
|
661
|
+
* @returns A promise that resolves when all workers have terminated
|
|
662
|
+
*/
|
|
663
|
+
shutdownWorkers = (signal) => {
|
|
664
|
+
return new Promise((resolve2, reject) => {
|
|
665
|
+
if (!cluster.isPrimary) {
|
|
666
|
+
return resolve2();
|
|
667
|
+
}
|
|
668
|
+
if (!cluster.workers) {
|
|
669
|
+
return resolve2();
|
|
670
|
+
}
|
|
671
|
+
const workerIds = Object.keys(cluster.workers);
|
|
672
|
+
if (workerIds.length == 0) {
|
|
673
|
+
return resolve2();
|
|
674
|
+
}
|
|
675
|
+
let workersAlive = 0;
|
|
676
|
+
let funcRun = 0;
|
|
677
|
+
const cleanWorkers = () => {
|
|
678
|
+
++funcRun;
|
|
679
|
+
workersAlive = 0;
|
|
680
|
+
Object.values(cluster.workers || {}).filter((worker) => !!worker).forEach((worker) => {
|
|
681
|
+
if (worker && !worker.isDead()) {
|
|
682
|
+
++workersAlive;
|
|
683
|
+
if (funcRun == 1) {
|
|
684
|
+
worker.kill(signal);
|
|
685
|
+
}
|
|
686
|
+
}
|
|
687
|
+
});
|
|
688
|
+
console.info(workersAlive + " workers alive");
|
|
689
|
+
if (workersAlive == 0) {
|
|
690
|
+
clearInterval(interval);
|
|
691
|
+
return resolve2();
|
|
692
|
+
}
|
|
693
|
+
};
|
|
694
|
+
const interval = setInterval(cleanWorkers, SHUTDOWN_WORKERS_INTERVAL);
|
|
1188
695
|
});
|
|
1189
|
-
}
|
|
1190
|
-
}
|
|
696
|
+
};
|
|
697
|
+
};
|
|
1191
698
|
|
|
1192
699
|
// src/consumption-apis/runner.ts
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
)
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
const jwt = req.headers.authorization?.split(" ")[1];
|
|
1225
|
-
if (jwt) {
|
|
1226
|
-
try {
|
|
1227
|
-
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
1228
|
-
issuer: jwtConfig.issuer,
|
|
1229
|
-
audience: jwtConfig.audience
|
|
1230
|
-
});
|
|
1231
|
-
jwtPayload = payload;
|
|
1232
|
-
} catch (error) {
|
|
1233
|
-
console.log("JWT verification failed");
|
|
1234
|
-
if (enforceAuth) {
|
|
1235
|
-
res.writeHead(401, { "Content-Type": "application/json" });
|
|
1236
|
-
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
1237
|
-
httpLogger(req, res, start);
|
|
1238
|
-
return;
|
|
1239
|
-
}
|
|
1240
|
-
}
|
|
1241
|
-
} else if (enforceAuth) {
|
|
700
|
+
var toClientConfig = (config) => ({
|
|
701
|
+
...config,
|
|
702
|
+
useSSL: config.useSSL ? "true" : "false"
|
|
703
|
+
});
|
|
704
|
+
var createPath = (apisDir, path3) => `${apisDir}${path3}.ts`;
|
|
705
|
+
var httpLogger = (req, res, startMs) => {
|
|
706
|
+
console.log(
|
|
707
|
+
`${req.method} ${req.url} ${res.statusCode} ${Date.now() - startMs}ms`
|
|
708
|
+
);
|
|
709
|
+
};
|
|
710
|
+
var modulesCache = /* @__PURE__ */ new Map();
|
|
711
|
+
var apiHandler = async (publicKey, clickhouseClient, temporalClient, apisDir, enforceAuth, isDmv2, jwtConfig) => {
|
|
712
|
+
const apis = isDmv2 ? await getApis2() : /* @__PURE__ */ new Map();
|
|
713
|
+
return async (req, res) => {
|
|
714
|
+
const start = Date.now();
|
|
715
|
+
try {
|
|
716
|
+
const url = new URL(req.url || "", "http://localhost");
|
|
717
|
+
const fileName = url.pathname;
|
|
718
|
+
let jwtPayload;
|
|
719
|
+
if (publicKey && jwtConfig) {
|
|
720
|
+
const jwt = req.headers.authorization?.split(" ")[1];
|
|
721
|
+
if (jwt) {
|
|
722
|
+
try {
|
|
723
|
+
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
724
|
+
issuer: jwtConfig.issuer,
|
|
725
|
+
audience: jwtConfig.audience
|
|
726
|
+
});
|
|
727
|
+
jwtPayload = payload;
|
|
728
|
+
} catch (error) {
|
|
729
|
+
console.log("JWT verification failed");
|
|
730
|
+
if (enforceAuth) {
|
|
1242
731
|
res.writeHead(401, { "Content-Type": "application/json" });
|
|
1243
732
|
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
1244
733
|
httpLogger(req, res, start);
|
|
1245
734
|
return;
|
|
1246
735
|
}
|
|
1247
|
-
} else if (enforceAuth) {
|
|
1248
|
-
res.writeHead(401, { "Content-Type": "application/json" });
|
|
1249
|
-
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
1250
|
-
httpLogger(req, res, start);
|
|
1251
|
-
return;
|
|
1252
736
|
}
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
)
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
737
|
+
} else if (enforceAuth) {
|
|
738
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
739
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
740
|
+
httpLogger(req, res, start);
|
|
741
|
+
return;
|
|
742
|
+
}
|
|
743
|
+
} else if (enforceAuth) {
|
|
744
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
745
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
746
|
+
httpLogger(req, res, start);
|
|
747
|
+
return;
|
|
748
|
+
}
|
|
749
|
+
const pathName = createPath(apisDir, fileName);
|
|
750
|
+
const paramsObject = Array.from(url.searchParams.entries()).reduce(
|
|
751
|
+
(obj, [key, value]) => {
|
|
752
|
+
const existingValue = obj[key];
|
|
753
|
+
if (existingValue) {
|
|
754
|
+
if (Array.isArray(existingValue)) {
|
|
755
|
+
existingValue.push(value);
|
|
756
|
+
} else {
|
|
757
|
+
obj[key] = [existingValue, value];
|
|
758
|
+
}
|
|
759
|
+
} else {
|
|
760
|
+
obj[key] = value;
|
|
761
|
+
}
|
|
762
|
+
return obj;
|
|
763
|
+
},
|
|
764
|
+
{}
|
|
765
|
+
);
|
|
766
|
+
let userFuncModule = modulesCache.get(pathName);
|
|
767
|
+
if (userFuncModule === void 0) {
|
|
768
|
+
if (isDmv2) {
|
|
769
|
+
let apiName = fileName.replace(/^\/+|\/+$/g, "");
|
|
770
|
+
let version = null;
|
|
771
|
+
userFuncModule = apis.get(apiName);
|
|
772
|
+
if (!userFuncModule) {
|
|
773
|
+
version = url.searchParams.get("version");
|
|
774
|
+
if (!version && apiName.includes("/")) {
|
|
775
|
+
const pathParts = apiName.split("/");
|
|
776
|
+
if (pathParts.length >= 2) {
|
|
777
|
+
userFuncModule = apis.get(apiName);
|
|
1288
778
|
if (!userFuncModule) {
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
userFuncModule = apis.get(versionedKey);
|
|
1292
|
-
} else {
|
|
1293
|
-
userFuncModule = apis.get(apiName);
|
|
1294
|
-
}
|
|
779
|
+
apiName = pathParts[0];
|
|
780
|
+
version = pathParts.slice(1).join("/");
|
|
1295
781
|
}
|
|
1296
782
|
}
|
|
1297
|
-
if (!userFuncModule) {
|
|
1298
|
-
const availableApis = Array.from(apis.keys()).map(
|
|
1299
|
-
(key) => key.replace(":", "/")
|
|
1300
|
-
);
|
|
1301
|
-
const errorMessage = version ? `API ${apiName} with version ${version} not found. Available APIs: ${availableApis.join(", ")}` : `API ${apiName} not found. Available APIs: ${availableApis.join(", ")}`;
|
|
1302
|
-
throw new Error(errorMessage);
|
|
1303
|
-
}
|
|
1304
|
-
modulesCache.set(pathName, userFuncModule);
|
|
1305
|
-
console.log(`[API] | Executing API: ${apiName}`);
|
|
1306
|
-
} else {
|
|
1307
|
-
userFuncModule = __require(pathName);
|
|
1308
|
-
modulesCache.set(pathName, userFuncModule);
|
|
1309
783
|
}
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
client: new MooseClient(queryClient, temporalClient),
|
|
1318
|
-
sql,
|
|
1319
|
-
jwt: jwtPayload
|
|
1320
|
-
});
|
|
1321
|
-
let body;
|
|
1322
|
-
let status;
|
|
1323
|
-
if (Object.getPrototypeOf(result).constructor.name === "ResultSet") {
|
|
1324
|
-
body = JSON.stringify(await result.json());
|
|
1325
|
-
} else {
|
|
1326
|
-
if ("body" in result && "status" in result) {
|
|
1327
|
-
body = JSON.stringify(result.body);
|
|
1328
|
-
status = result.status;
|
|
1329
|
-
} else {
|
|
1330
|
-
body = JSON.stringify(result);
|
|
784
|
+
if (!userFuncModule) {
|
|
785
|
+
if (version) {
|
|
786
|
+
const versionedKey = `${apiName}:${version}`;
|
|
787
|
+
userFuncModule = apis.get(versionedKey);
|
|
788
|
+
} else {
|
|
789
|
+
userFuncModule = apis.get(apiName);
|
|
790
|
+
}
|
|
1331
791
|
}
|
|
1332
792
|
}
|
|
1333
|
-
if (
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
}
|
|
1340
|
-
res.end(body);
|
|
1341
|
-
} catch (error) {
|
|
1342
|
-
console.log("error in path ", req.url, error);
|
|
1343
|
-
if (Object.getPrototypeOf(error).constructor.name === "TypeGuardError") {
|
|
1344
|
-
res.writeHead(400, { "Content-Type": "application/json" });
|
|
1345
|
-
res.end(JSON.stringify({ error: error.message }));
|
|
1346
|
-
httpLogger(req, res, start);
|
|
1347
|
-
}
|
|
1348
|
-
if (error instanceof Error) {
|
|
1349
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
1350
|
-
res.end(JSON.stringify({ error: error.message }));
|
|
1351
|
-
httpLogger(req, res, start);
|
|
1352
|
-
} else {
|
|
1353
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
1354
|
-
res.end();
|
|
1355
|
-
httpLogger(req, res, start);
|
|
793
|
+
if (!userFuncModule) {
|
|
794
|
+
const availableApis = Array.from(apis.keys()).map(
|
|
795
|
+
(key) => key.replace(":", "/")
|
|
796
|
+
);
|
|
797
|
+
const errorMessage = version ? `API ${apiName} with version ${version} not found. Available APIs: ${availableApis.join(", ")}` : `API ${apiName} not found. Available APIs: ${availableApis.join(", ")}`;
|
|
798
|
+
throw new Error(errorMessage);
|
|
1356
799
|
}
|
|
800
|
+
modulesCache.set(pathName, userFuncModule);
|
|
801
|
+
console.log(`[API] | Executing API: ${apiName}`);
|
|
802
|
+
} else {
|
|
803
|
+
userFuncModule = __require(pathName);
|
|
804
|
+
modulesCache.set(pathName, userFuncModule);
|
|
1357
805
|
}
|
|
1358
|
-
}
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
|
|
1364
|
-
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
jwtConfig
|
|
1369
|
-
);
|
|
1370
|
-
const webApps = isDmv2 ? await getWebApps2() : /* @__PURE__ */ new Map();
|
|
1371
|
-
const sortedWebApps = Array.from(webApps.values()).sort((a, b) => {
|
|
1372
|
-
const pathA = a.config.mountPath || "/";
|
|
1373
|
-
const pathB = b.config.mountPath || "/";
|
|
1374
|
-
return pathB.length - pathA.length;
|
|
806
|
+
}
|
|
807
|
+
const queryClient = new QueryClient(clickhouseClient, fileName);
|
|
808
|
+
let result = isDmv2 ? await userFuncModule(paramsObject, {
|
|
809
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
810
|
+
sql,
|
|
811
|
+
jwt: jwtPayload
|
|
812
|
+
}) : await userFuncModule.default(paramsObject, {
|
|
813
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
814
|
+
sql,
|
|
815
|
+
jwt: jwtPayload
|
|
1375
816
|
});
|
|
1376
|
-
|
|
1377
|
-
|
|
1378
|
-
|
|
1379
|
-
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
})
|
|
1387
|
-
);
|
|
1388
|
-
return;
|
|
817
|
+
let body;
|
|
818
|
+
let status;
|
|
819
|
+
if (Object.getPrototypeOf(result).constructor.name === "ResultSet") {
|
|
820
|
+
body = JSON.stringify(await result.json());
|
|
821
|
+
} else {
|
|
822
|
+
if ("body" in result && "status" in result) {
|
|
823
|
+
body = JSON.stringify(result.body);
|
|
824
|
+
status = result.status;
|
|
825
|
+
} else {
|
|
826
|
+
body = JSON.stringify(result);
|
|
1389
827
|
}
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
828
|
+
}
|
|
829
|
+
if (status) {
|
|
830
|
+
res.writeHead(status, { "Content-Type": "application/json" });
|
|
831
|
+
httpLogger(req, res, start);
|
|
832
|
+
} else {
|
|
833
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
834
|
+
httpLogger(req, res, start);
|
|
835
|
+
}
|
|
836
|
+
res.end(body);
|
|
837
|
+
} catch (error) {
|
|
838
|
+
console.log("error in path ", req.url, error);
|
|
839
|
+
if (Object.getPrototypeOf(error).constructor.name === "TypeGuardError") {
|
|
840
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
841
|
+
res.end(JSON.stringify({ error: error.message }));
|
|
842
|
+
httpLogger(req, res, start);
|
|
843
|
+
}
|
|
844
|
+
if (error instanceof Error) {
|
|
845
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
846
|
+
res.end(JSON.stringify({ error: error.message }));
|
|
847
|
+
httpLogger(req, res, start);
|
|
848
|
+
} else {
|
|
849
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
850
|
+
res.end();
|
|
851
|
+
httpLogger(req, res, start);
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
};
|
|
855
|
+
};
|
|
856
|
+
var createMainRouter = async (publicKey, clickhouseClient, temporalClient, apisDir, enforceAuth, isDmv2, jwtConfig) => {
|
|
857
|
+
const apiRequestHandler = await apiHandler(
|
|
858
|
+
publicKey,
|
|
859
|
+
clickhouseClient,
|
|
860
|
+
temporalClient,
|
|
861
|
+
apisDir,
|
|
862
|
+
enforceAuth,
|
|
863
|
+
isDmv2,
|
|
864
|
+
jwtConfig
|
|
865
|
+
);
|
|
866
|
+
const webApps = isDmv2 ? await getWebApps2() : /* @__PURE__ */ new Map();
|
|
867
|
+
const sortedWebApps = Array.from(webApps.values()).sort((a, b) => {
|
|
868
|
+
const pathA = a.config.mountPath || "/";
|
|
869
|
+
const pathB = b.config.mountPath || "/";
|
|
870
|
+
return pathB.length - pathA.length;
|
|
871
|
+
});
|
|
872
|
+
return async (req, res) => {
|
|
873
|
+
const start = Date.now();
|
|
874
|
+
const url = new URL(req.url || "", "http://localhost");
|
|
875
|
+
const pathname = url.pathname;
|
|
876
|
+
if (pathname === "/_moose_internal/health") {
|
|
877
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
878
|
+
res.end(
|
|
879
|
+
JSON.stringify({
|
|
880
|
+
status: "healthy",
|
|
881
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
882
|
+
})
|
|
883
|
+
);
|
|
884
|
+
return;
|
|
885
|
+
}
|
|
886
|
+
let jwtPayload;
|
|
887
|
+
if (publicKey && jwtConfig) {
|
|
888
|
+
const jwt = req.headers.authorization?.split(" ")[1];
|
|
889
|
+
if (jwt) {
|
|
890
|
+
try {
|
|
891
|
+
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
892
|
+
issuer: jwtConfig.issuer,
|
|
893
|
+
audience: jwtConfig.audience
|
|
894
|
+
});
|
|
895
|
+
jwtPayload = payload;
|
|
896
|
+
} catch (error) {
|
|
897
|
+
console.log("JWT verification failed for WebApp route");
|
|
1404
898
|
}
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
try {
|
|
1420
|
-
const modifiedReq = Object.assign(
|
|
1421
|
-
Object.create(Object.getPrototypeOf(req)),
|
|
1422
|
-
req,
|
|
1423
|
-
{
|
|
1424
|
-
url: proxiedUrl
|
|
1425
|
-
}
|
|
1426
|
-
);
|
|
1427
|
-
await webApp.handler(modifiedReq, res);
|
|
1428
|
-
return;
|
|
1429
|
-
} catch (error) {
|
|
1430
|
-
console.error(`Error in WebApp ${webApp.name}:`, error);
|
|
1431
|
-
if (!res.headersSent) {
|
|
1432
|
-
res.writeHead(500, { "Content-Type": "application/json" });
|
|
1433
|
-
res.end(JSON.stringify({ error: "Internal Server Error" }));
|
|
1434
|
-
}
|
|
1435
|
-
return;
|
|
1436
|
-
}
|
|
1437
|
-
}
|
|
899
|
+
}
|
|
900
|
+
}
|
|
901
|
+
for (const webApp of sortedWebApps) {
|
|
902
|
+
const mountPath = webApp.config.mountPath || "/";
|
|
903
|
+
const normalizedMount = mountPath.endsWith("/") && mountPath !== "/" ? mountPath.slice(0, -1) : mountPath;
|
|
904
|
+
const matches = pathname === normalizedMount || pathname.startsWith(normalizedMount + "/");
|
|
905
|
+
if (matches) {
|
|
906
|
+
if (webApp.config.injectMooseUtils !== false) {
|
|
907
|
+
const queryClient = new QueryClient(clickhouseClient, pathname);
|
|
908
|
+
req.moose = {
|
|
909
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
910
|
+
sql,
|
|
911
|
+
jwt: jwtPayload
|
|
912
|
+
};
|
|
1438
913
|
}
|
|
1439
|
-
let
|
|
1440
|
-
if (
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
apiPath = pathname.substring(13);
|
|
914
|
+
let proxiedUrl = req.url;
|
|
915
|
+
if (normalizedMount !== "/") {
|
|
916
|
+
const pathWithoutMount = pathname.substring(normalizedMount.length) || "/";
|
|
917
|
+
proxiedUrl = pathWithoutMount + url.search;
|
|
1444
918
|
}
|
|
1445
|
-
|
|
919
|
+
try {
|
|
1446
920
|
const modifiedReq = Object.assign(
|
|
1447
921
|
Object.create(Object.getPrototypeOf(req)),
|
|
1448
922
|
req,
|
|
1449
923
|
{
|
|
1450
|
-
url:
|
|
924
|
+
url: proxiedUrl
|
|
1451
925
|
}
|
|
1452
926
|
);
|
|
1453
|
-
await
|
|
927
|
+
await webApp.handler(modifiedReq, res);
|
|
1454
928
|
return;
|
|
1455
|
-
}
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
};
|
|
1461
|
-
runApis = async (config) => {
|
|
1462
|
-
const apisCluster = new Cluster({
|
|
1463
|
-
maxWorkerCount: (config.workerCount ?? 0) > 0 ? config.workerCount : void 0,
|
|
1464
|
-
workerStart: async () => {
|
|
1465
|
-
let temporalClient;
|
|
1466
|
-
if (config.temporalConfig) {
|
|
1467
|
-
temporalClient = await getTemporalClient(
|
|
1468
|
-
config.temporalConfig.url,
|
|
1469
|
-
config.temporalConfig.namespace,
|
|
1470
|
-
config.temporalConfig.clientCert,
|
|
1471
|
-
config.temporalConfig.clientKey,
|
|
1472
|
-
config.temporalConfig.apiKey
|
|
1473
|
-
);
|
|
1474
|
-
}
|
|
1475
|
-
const clickhouseClient = getClickhouseClient(
|
|
1476
|
-
toClientConfig2(config.clickhouseConfig)
|
|
1477
|
-
);
|
|
1478
|
-
let publicKey;
|
|
1479
|
-
if (config.jwtConfig?.secret) {
|
|
1480
|
-
console.log("Importing JWT public key...");
|
|
1481
|
-
publicKey = await jose.importSPKI(config.jwtConfig.secret, "RS256");
|
|
929
|
+
} catch (error) {
|
|
930
|
+
console.error(`Error in WebApp ${webApp.name}:`, error);
|
|
931
|
+
if (!res.headersSent) {
|
|
932
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
933
|
+
res.end(JSON.stringify({ error: "Internal Server Error" }));
|
|
1482
934
|
}
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
server.listen(port, "localhost", () => {
|
|
1500
|
-
console.log(`Server running on port ${port}`);
|
|
1501
|
-
});
|
|
1502
|
-
return server;
|
|
1503
|
-
},
|
|
1504
|
-
workerStop: async (server) => {
|
|
1505
|
-
return new Promise((resolve2) => {
|
|
1506
|
-
server.close(() => resolve2());
|
|
1507
|
-
});
|
|
935
|
+
return;
|
|
936
|
+
}
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
let apiPath = pathname;
|
|
940
|
+
if (pathname.startsWith("/api/")) {
|
|
941
|
+
apiPath = pathname.substring(4);
|
|
942
|
+
} else if (pathname.startsWith("/consumption/")) {
|
|
943
|
+
apiPath = pathname.substring(13);
|
|
944
|
+
}
|
|
945
|
+
if (apiPath !== pathname) {
|
|
946
|
+
const modifiedReq = Object.assign(
|
|
947
|
+
Object.create(Object.getPrototypeOf(req)),
|
|
948
|
+
req,
|
|
949
|
+
{
|
|
950
|
+
url: apiPath + url.search
|
|
1508
951
|
}
|
|
952
|
+
);
|
|
953
|
+
await apiRequestHandler(modifiedReq, res);
|
|
954
|
+
return;
|
|
955
|
+
}
|
|
956
|
+
res.writeHead(404, { "Content-Type": "application/json" });
|
|
957
|
+
res.end(JSON.stringify({ error: "Not Found" }));
|
|
958
|
+
httpLogger(req, res, start);
|
|
959
|
+
};
|
|
960
|
+
};
|
|
961
|
+
var runApis = async (config) => {
|
|
962
|
+
const apisCluster = new Cluster({
|
|
963
|
+
maxWorkerCount: (config.workerCount ?? 0) > 0 ? config.workerCount : void 0,
|
|
964
|
+
workerStart: async () => {
|
|
965
|
+
let temporalClient;
|
|
966
|
+
if (config.temporalConfig) {
|
|
967
|
+
temporalClient = await getTemporalClient(
|
|
968
|
+
config.temporalConfig.url,
|
|
969
|
+
config.temporalConfig.namespace,
|
|
970
|
+
config.temporalConfig.clientCert,
|
|
971
|
+
config.temporalConfig.clientKey,
|
|
972
|
+
config.temporalConfig.apiKey
|
|
973
|
+
);
|
|
974
|
+
}
|
|
975
|
+
const clickhouseClient = getClickhouseClient(
|
|
976
|
+
toClientConfig(config.clickhouseConfig)
|
|
977
|
+
);
|
|
978
|
+
let publicKey;
|
|
979
|
+
if (config.jwtConfig?.secret) {
|
|
980
|
+
console.log("Importing JWT public key...");
|
|
981
|
+
publicKey = await jose.importSPKI(config.jwtConfig.secret, "RS256");
|
|
982
|
+
}
|
|
983
|
+
const server = http2.createServer(
|
|
984
|
+
await createMainRouter(
|
|
985
|
+
publicKey,
|
|
986
|
+
clickhouseClient,
|
|
987
|
+
temporalClient,
|
|
988
|
+
config.apisDir,
|
|
989
|
+
config.enforceAuth,
|
|
990
|
+
config.isDmv2,
|
|
991
|
+
config.jwtConfig
|
|
992
|
+
)
|
|
993
|
+
);
|
|
994
|
+
const port = config.proxyPort !== void 0 ? config.proxyPort : 4001;
|
|
995
|
+
server.listen(port, "localhost", () => {
|
|
996
|
+
console.log(`Server running on port ${port}`);
|
|
1509
997
|
});
|
|
1510
|
-
|
|
1511
|
-
}
|
|
1512
|
-
|
|
1513
|
-
|
|
998
|
+
return server;
|
|
999
|
+
},
|
|
1000
|
+
workerStop: async (server) => {
|
|
1001
|
+
return new Promise((resolve2) => {
|
|
1002
|
+
server.close(() => resolve2());
|
|
1003
|
+
});
|
|
1004
|
+
}
|
|
1005
|
+
});
|
|
1006
|
+
apisCluster.start();
|
|
1007
|
+
};
|
|
1514
1008
|
|
|
1515
1009
|
// src/clients/redisClient.ts
|
|
1516
1010
|
import { createClient as createClient2 } from "redis";
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
|
|
1011
|
+
|
|
1012
|
+
// src/consumption-apis/standalone.ts
|
|
1013
|
+
init_commons();
|
|
1014
|
+
|
|
1015
|
+
// src/utilities/dataParser.ts
|
|
1016
|
+
import { parse } from "csv-parse";
|
|
1522
1017
|
|
|
1523
1018
|
// src/utilities/json.ts
|
|
1019
|
+
var STRING_DATE_ANNOTATION = "stringDate";
|
|
1524
1020
|
function isNullableType(dt) {
|
|
1525
1021
|
return typeof dt === "object" && dt !== null && "nullable" in dt && typeof dt.nullable !== "undefined";
|
|
1526
1022
|
}
|
|
@@ -1639,78 +1135,36 @@ function mutateParsedJson(data, fieldMutations) {
|
|
|
1639
1135
|
}
|
|
1640
1136
|
applyFieldMutations(data, fieldMutations);
|
|
1641
1137
|
}
|
|
1642
|
-
var STRING_DATE_ANNOTATION;
|
|
1643
|
-
var init_json = __esm({
|
|
1644
|
-
"src/utilities/json.ts"() {
|
|
1645
|
-
"use strict";
|
|
1646
|
-
STRING_DATE_ANNOTATION = "stringDate";
|
|
1647
|
-
}
|
|
1648
|
-
});
|
|
1649
1138
|
|
|
1650
1139
|
// src/utilities/dataParser.ts
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
"
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
DEFAULT_CSV_CONFIG = {
|
|
1664
|
-
delimiter: CSV_DELIMITERS.COMMA,
|
|
1665
|
-
columns: true,
|
|
1666
|
-
skipEmptyLines: true,
|
|
1667
|
-
trim: true
|
|
1668
|
-
};
|
|
1669
|
-
}
|
|
1670
|
-
});
|
|
1671
|
-
|
|
1672
|
-
// src/utilities/index.ts
|
|
1673
|
-
var init_utilities = __esm({
|
|
1674
|
-
"src/utilities/index.ts"() {
|
|
1675
|
-
"use strict";
|
|
1676
|
-
init_dataParser();
|
|
1677
|
-
}
|
|
1678
|
-
});
|
|
1679
|
-
|
|
1680
|
-
// src/connectors/dataSource.ts
|
|
1681
|
-
var init_dataSource = __esm({
|
|
1682
|
-
"src/connectors/dataSource.ts"() {
|
|
1683
|
-
"use strict";
|
|
1684
|
-
}
|
|
1685
|
-
});
|
|
1686
|
-
|
|
1687
|
-
// src/index.ts
|
|
1688
|
-
var init_index = __esm({
|
|
1689
|
-
"src/index.ts"() {
|
|
1690
|
-
"use strict";
|
|
1691
|
-
init_browserCompatible();
|
|
1692
|
-
init_helpers();
|
|
1693
|
-
init_commons();
|
|
1694
|
-
init_secrets();
|
|
1695
|
-
init_helpers2();
|
|
1696
|
-
init_webAppHelpers();
|
|
1697
|
-
init_task();
|
|
1698
|
-
init_runner();
|
|
1699
|
-
init_redisClient();
|
|
1700
|
-
init_helpers2();
|
|
1701
|
-
init_standalone();
|
|
1702
|
-
init_sqlHelpers();
|
|
1703
|
-
init_utilities();
|
|
1704
|
-
init_dataSource();
|
|
1705
|
-
init_types();
|
|
1706
|
-
}
|
|
1707
|
-
});
|
|
1140
|
+
var CSV_DELIMITERS = {
|
|
1141
|
+
COMMA: ",",
|
|
1142
|
+
TAB: " ",
|
|
1143
|
+
SEMICOLON: ";",
|
|
1144
|
+
PIPE: "|"
|
|
1145
|
+
};
|
|
1146
|
+
var DEFAULT_CSV_CONFIG = {
|
|
1147
|
+
delimiter: CSV_DELIMITERS.COMMA,
|
|
1148
|
+
columns: true,
|
|
1149
|
+
skipEmptyLines: true,
|
|
1150
|
+
trim: true
|
|
1151
|
+
};
|
|
1708
1152
|
|
|
1709
1153
|
// src/dmv2/internal.ts
|
|
1710
|
-
|
|
1154
|
+
init_commons();
|
|
1711
1155
|
function getSourceDir() {
|
|
1712
1156
|
return process2.env.MOOSE_SOURCE_DIR || "app";
|
|
1713
1157
|
}
|
|
1158
|
+
var moose_internal = {
|
|
1159
|
+
tables: /* @__PURE__ */ new Map(),
|
|
1160
|
+
streams: /* @__PURE__ */ new Map(),
|
|
1161
|
+
ingestApis: /* @__PURE__ */ new Map(),
|
|
1162
|
+
apis: /* @__PURE__ */ new Map(),
|
|
1163
|
+
sqlResources: /* @__PURE__ */ new Map(),
|
|
1164
|
+
workflows: /* @__PURE__ */ new Map(),
|
|
1165
|
+
webApps: /* @__PURE__ */ new Map()
|
|
1166
|
+
};
|
|
1167
|
+
var defaultRetentionPeriod = 60 * 60 * 24 * 7;
|
|
1714
1168
|
function isS3QueueConfig(config) {
|
|
1715
1169
|
return "engine" in config && config.engine === "S3Queue" /* S3Queue */;
|
|
1716
1170
|
}
|
|
@@ -1947,386 +1401,364 @@ function convertTableConfigToEngineConfig(config) {
|
|
|
1947
1401
|
}
|
|
1948
1402
|
return void 0;
|
|
1949
1403
|
}
|
|
1950
|
-
|
|
1951
|
-
|
|
1952
|
-
|
|
1953
|
-
}
|
|
1954
|
-
|
|
1955
|
-
|
|
1956
|
-
|
|
1957
|
-
|
|
1958
|
-
|
|
1404
|
+
var toInfraMap = (registry) => {
|
|
1405
|
+
const tables = {};
|
|
1406
|
+
const topics = {};
|
|
1407
|
+
const ingestApis = {};
|
|
1408
|
+
const apis = {};
|
|
1409
|
+
const sqlResources = {};
|
|
1410
|
+
const workflows = {};
|
|
1411
|
+
const webApps = {};
|
|
1412
|
+
registry.tables.forEach((table) => {
|
|
1413
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1414
|
+
let metadata = table.metadata;
|
|
1415
|
+
if (!metadata && table.config && table.pipelineParent) {
|
|
1416
|
+
metadata = table.pipelineParent.metadata;
|
|
1417
|
+
}
|
|
1418
|
+
const engineConfig = convertTableConfigToEngineConfig(table.config);
|
|
1419
|
+
let tableSettings = void 0;
|
|
1420
|
+
if (table.config.settings) {
|
|
1421
|
+
tableSettings = Object.entries(table.config.settings).reduce(
|
|
1422
|
+
(acc, [key, value]) => {
|
|
1423
|
+
if (value !== void 0) {
|
|
1424
|
+
acc[key] = String(value);
|
|
1425
|
+
}
|
|
1426
|
+
return acc;
|
|
1427
|
+
},
|
|
1428
|
+
{}
|
|
1429
|
+
);
|
|
1430
|
+
}
|
|
1431
|
+
if (engineConfig?.engine === "S3Queue") {
|
|
1432
|
+
if (!tableSettings) {
|
|
1433
|
+
tableSettings = {};
|
|
1434
|
+
}
|
|
1435
|
+
if (!tableSettings.mode) {
|
|
1436
|
+
tableSettings.mode = "unordered";
|
|
1959
1437
|
}
|
|
1960
1438
|
}
|
|
1961
|
-
|
|
1962
|
-
|
|
1963
|
-
|
|
1964
|
-
|
|
1965
|
-
|
|
1966
|
-
|
|
1967
|
-
|
|
1968
|
-
|
|
1969
|
-
|
|
1970
|
-
|
|
1971
|
-
|
|
1972
|
-
|
|
1973
|
-
|
|
1974
|
-
|
|
1975
|
-
|
|
1976
|
-
|
|
1977
|
-
|
|
1439
|
+
const hasOrderByFields = "orderByFields" in table.config && Array.isArray(table.config.orderByFields) && table.config.orderByFields.length > 0;
|
|
1440
|
+
const hasOrderByExpression = "orderByExpression" in table.config && typeof table.config.orderByExpression === "string" && table.config.orderByExpression.length > 0;
|
|
1441
|
+
if (hasOrderByFields && hasOrderByExpression) {
|
|
1442
|
+
throw new Error(
|
|
1443
|
+
`Table ${table.name}: Provide either orderByFields or orderByExpression, not both.`
|
|
1444
|
+
);
|
|
1445
|
+
}
|
|
1446
|
+
const orderBy = hasOrderByExpression && "orderByExpression" in table.config ? table.config.orderByExpression ?? "" : "orderByFields" in table.config ? table.config.orderByFields ?? [] : [];
|
|
1447
|
+
tables[id] = {
|
|
1448
|
+
name: table.name,
|
|
1449
|
+
columns: table.columnArray,
|
|
1450
|
+
orderBy,
|
|
1451
|
+
partitionBy: "partitionBy" in table.config ? table.config.partitionBy : void 0,
|
|
1452
|
+
sampleByExpression: "sampleByExpression" in table.config ? table.config.sampleByExpression : void 0,
|
|
1453
|
+
primaryKeyExpression: "primaryKeyExpression" in table.config ? table.config.primaryKeyExpression : void 0,
|
|
1454
|
+
engineConfig,
|
|
1455
|
+
version: table.config.version,
|
|
1456
|
+
metadata,
|
|
1457
|
+
lifeCycle: table.config.lifeCycle,
|
|
1458
|
+
// Map 'settings' to 'tableSettings' for internal use
|
|
1459
|
+
tableSettings: tableSettings && Object.keys(tableSettings).length > 0 ? tableSettings : void 0,
|
|
1460
|
+
indexes: table.config.indexes?.map((i) => ({
|
|
1461
|
+
...i,
|
|
1462
|
+
granularity: i.granularity === void 0 ? 1 : i.granularity,
|
|
1463
|
+
arguments: i.arguments === void 0 ? [] : i.arguments
|
|
1464
|
+
})) || [],
|
|
1465
|
+
ttl: table.config.ttl,
|
|
1466
|
+
database: table.config.database,
|
|
1467
|
+
cluster: table.config.cluster
|
|
1978
1468
|
};
|
|
1979
|
-
|
|
1980
|
-
|
|
1981
|
-
|
|
1982
|
-
|
|
1983
|
-
|
|
1984
|
-
|
|
1985
|
-
|
|
1986
|
-
|
|
1987
|
-
|
|
1988
|
-
|
|
1989
|
-
|
|
1990
|
-
|
|
1991
|
-
|
|
1992
|
-
|
|
1993
|
-
|
|
1994
|
-
|
|
1995
|
-
let tableSettings = void 0;
|
|
1996
|
-
if (table.config.settings) {
|
|
1997
|
-
tableSettings = Object.entries(table.config.settings).reduce(
|
|
1998
|
-
(acc, [key, value]) => {
|
|
1999
|
-
if (value !== void 0) {
|
|
2000
|
-
acc[key] = String(value);
|
|
2001
|
-
}
|
|
2002
|
-
return acc;
|
|
2003
|
-
},
|
|
2004
|
-
{}
|
|
2005
|
-
);
|
|
2006
|
-
}
|
|
2007
|
-
if (engineConfig?.engine === "S3Queue") {
|
|
2008
|
-
if (!tableSettings) {
|
|
2009
|
-
tableSettings = {};
|
|
2010
|
-
}
|
|
2011
|
-
if (!tableSettings.mode) {
|
|
2012
|
-
tableSettings.mode = "unordered";
|
|
2013
|
-
}
|
|
2014
|
-
}
|
|
2015
|
-
const hasOrderByFields = "orderByFields" in table.config && Array.isArray(table.config.orderByFields) && table.config.orderByFields.length > 0;
|
|
2016
|
-
const hasOrderByExpression = "orderByExpression" in table.config && typeof table.config.orderByExpression === "string" && table.config.orderByExpression.length > 0;
|
|
2017
|
-
if (hasOrderByFields && hasOrderByExpression) {
|
|
2018
|
-
throw new Error(
|
|
2019
|
-
`Table ${table.name}: Provide either orderByFields or orderByExpression, not both.`
|
|
2020
|
-
);
|
|
2021
|
-
}
|
|
2022
|
-
const orderBy = hasOrderByExpression && "orderByExpression" in table.config ? table.config.orderByExpression ?? "" : "orderByFields" in table.config ? table.config.orderByFields ?? [] : [];
|
|
2023
|
-
tables[id] = {
|
|
2024
|
-
name: table.name,
|
|
2025
|
-
columns: table.columnArray,
|
|
2026
|
-
orderBy,
|
|
2027
|
-
partitionBy: "partitionBy" in table.config ? table.config.partitionBy : void 0,
|
|
2028
|
-
sampleByExpression: "sampleByExpression" in table.config ? table.config.sampleByExpression : void 0,
|
|
2029
|
-
primaryKeyExpression: "primaryKeyExpression" in table.config ? table.config.primaryKeyExpression : void 0,
|
|
2030
|
-
engineConfig,
|
|
2031
|
-
version: table.config.version,
|
|
2032
|
-
metadata,
|
|
2033
|
-
lifeCycle: table.config.lifeCycle,
|
|
2034
|
-
// Map 'settings' to 'tableSettings' for internal use
|
|
2035
|
-
tableSettings: tableSettings && Object.keys(tableSettings).length > 0 ? tableSettings : void 0,
|
|
2036
|
-
indexes: table.config.indexes?.map((i) => ({
|
|
2037
|
-
...i,
|
|
2038
|
-
granularity: i.granularity === void 0 ? 1 : i.granularity,
|
|
2039
|
-
arguments: i.arguments === void 0 ? [] : i.arguments
|
|
2040
|
-
})) || [],
|
|
2041
|
-
ttl: table.config.ttl,
|
|
2042
|
-
database: table.config.database,
|
|
2043
|
-
cluster: table.config.cluster
|
|
2044
|
-
};
|
|
2045
|
-
});
|
|
2046
|
-
registry.streams.forEach((stream) => {
|
|
2047
|
-
let metadata = stream.metadata;
|
|
2048
|
-
if (!metadata && stream.config && stream.pipelineParent) {
|
|
2049
|
-
metadata = stream.pipelineParent.metadata;
|
|
2050
|
-
}
|
|
2051
|
-
const transformationTargets = [];
|
|
2052
|
-
const consumers = [];
|
|
2053
|
-
stream._transformations.forEach((transforms, destinationName) => {
|
|
2054
|
-
transforms.forEach(([destination, _, config]) => {
|
|
2055
|
-
transformationTargets.push({
|
|
2056
|
-
kind: "stream",
|
|
2057
|
-
name: destinationName,
|
|
2058
|
-
version: config.version,
|
|
2059
|
-
metadata: config.metadata,
|
|
2060
|
-
sourceFile: config.sourceFile
|
|
2061
|
-
});
|
|
2062
|
-
});
|
|
2063
|
-
});
|
|
2064
|
-
stream._consumers.forEach((consumer) => {
|
|
2065
|
-
consumers.push({
|
|
2066
|
-
version: consumer.config.version,
|
|
2067
|
-
sourceFile: consumer.config.sourceFile
|
|
2068
|
-
});
|
|
1469
|
+
});
|
|
1470
|
+
registry.streams.forEach((stream) => {
|
|
1471
|
+
let metadata = stream.metadata;
|
|
1472
|
+
if (!metadata && stream.config && stream.pipelineParent) {
|
|
1473
|
+
metadata = stream.pipelineParent.metadata;
|
|
1474
|
+
}
|
|
1475
|
+
const transformationTargets = [];
|
|
1476
|
+
const consumers = [];
|
|
1477
|
+
stream._transformations.forEach((transforms, destinationName) => {
|
|
1478
|
+
transforms.forEach(([destination, _, config]) => {
|
|
1479
|
+
transformationTargets.push({
|
|
1480
|
+
kind: "stream",
|
|
1481
|
+
name: destinationName,
|
|
1482
|
+
version: config.version,
|
|
1483
|
+
metadata: config.metadata,
|
|
1484
|
+
sourceFile: config.sourceFile
|
|
2069
1485
|
});
|
|
2070
|
-
topics[stream.name] = {
|
|
2071
|
-
name: stream.name,
|
|
2072
|
-
columns: stream.columnArray,
|
|
2073
|
-
targetTable: stream.config.destination?.name,
|
|
2074
|
-
targetTableVersion: stream.config.destination?.config.version,
|
|
2075
|
-
retentionPeriod: stream.config.retentionPeriod ?? defaultRetentionPeriod,
|
|
2076
|
-
partitionCount: stream.config.parallelism ?? 1,
|
|
2077
|
-
version: stream.config.version,
|
|
2078
|
-
transformationTargets,
|
|
2079
|
-
hasMultiTransform: stream._multipleTransformations === void 0,
|
|
2080
|
-
consumers,
|
|
2081
|
-
metadata,
|
|
2082
|
-
lifeCycle: stream.config.lifeCycle,
|
|
2083
|
-
schemaConfig: stream.config.schemaConfig
|
|
2084
|
-
};
|
|
2085
|
-
});
|
|
2086
|
-
registry.ingestApis.forEach((api) => {
|
|
2087
|
-
let metadata = api.metadata;
|
|
2088
|
-
if (!metadata && api.config && api.pipelineParent) {
|
|
2089
|
-
metadata = api.pipelineParent.metadata;
|
|
2090
|
-
}
|
|
2091
|
-
ingestApis[api.name] = {
|
|
2092
|
-
name: api.name,
|
|
2093
|
-
columns: api.columnArray,
|
|
2094
|
-
version: api.config.version,
|
|
2095
|
-
path: api.config.path,
|
|
2096
|
-
writeTo: {
|
|
2097
|
-
kind: "stream",
|
|
2098
|
-
name: api.config.destination.name
|
|
2099
|
-
},
|
|
2100
|
-
deadLetterQueue: api.config.deadLetterQueue?.name,
|
|
2101
|
-
metadata,
|
|
2102
|
-
schema: api.schema,
|
|
2103
|
-
allowExtraFields: api.allowExtraFields
|
|
2104
|
-
};
|
|
2105
|
-
});
|
|
2106
|
-
registry.apis.forEach((api, key) => {
|
|
2107
|
-
const rustKey = api.config.version ? `${api.name}:${api.config.version}` : api.name;
|
|
2108
|
-
apis[rustKey] = {
|
|
2109
|
-
name: api.name,
|
|
2110
|
-
queryParams: api.columnArray,
|
|
2111
|
-
responseSchema: api.responseSchema,
|
|
2112
|
-
version: api.config.version,
|
|
2113
|
-
path: api.config.path,
|
|
2114
|
-
metadata: api.metadata
|
|
2115
|
-
};
|
|
2116
|
-
});
|
|
2117
|
-
registry.sqlResources.forEach((sqlResource) => {
|
|
2118
|
-
sqlResources[sqlResource.name] = {
|
|
2119
|
-
name: sqlResource.name,
|
|
2120
|
-
setup: sqlResource.setup,
|
|
2121
|
-
teardown: sqlResource.teardown,
|
|
2122
|
-
sourceFile: sqlResource.sourceFile,
|
|
2123
|
-
sourceLine: sqlResource.sourceLine,
|
|
2124
|
-
sourceColumn: sqlResource.sourceColumn,
|
|
2125
|
-
pullsDataFrom: sqlResource.pullsDataFrom.map((r) => {
|
|
2126
|
-
if (r.kind === "OlapTable") {
|
|
2127
|
-
const table = r;
|
|
2128
|
-
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
2129
|
-
return {
|
|
2130
|
-
id,
|
|
2131
|
-
kind: "Table"
|
|
2132
|
-
};
|
|
2133
|
-
} else if (r.kind === "SqlResource") {
|
|
2134
|
-
const resource = r;
|
|
2135
|
-
return {
|
|
2136
|
-
id: resource.name,
|
|
2137
|
-
kind: "SqlResource"
|
|
2138
|
-
};
|
|
2139
|
-
} else {
|
|
2140
|
-
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2141
|
-
}
|
|
2142
|
-
}),
|
|
2143
|
-
pushesDataTo: sqlResource.pushesDataTo.map((r) => {
|
|
2144
|
-
if (r.kind === "OlapTable") {
|
|
2145
|
-
const table = r;
|
|
2146
|
-
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
2147
|
-
return {
|
|
2148
|
-
id,
|
|
2149
|
-
kind: "Table"
|
|
2150
|
-
};
|
|
2151
|
-
} else if (r.kind === "SqlResource") {
|
|
2152
|
-
const resource = r;
|
|
2153
|
-
return {
|
|
2154
|
-
id: resource.name,
|
|
2155
|
-
kind: "SqlResource"
|
|
2156
|
-
};
|
|
2157
|
-
} else {
|
|
2158
|
-
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2159
|
-
}
|
|
2160
|
-
})
|
|
2161
|
-
};
|
|
2162
|
-
});
|
|
2163
|
-
registry.workflows.forEach((workflow) => {
|
|
2164
|
-
workflows[workflow.name] = {
|
|
2165
|
-
name: workflow.name,
|
|
2166
|
-
retries: workflow.config.retries,
|
|
2167
|
-
timeout: workflow.config.timeout,
|
|
2168
|
-
schedule: workflow.config.schedule
|
|
2169
|
-
};
|
|
2170
1486
|
});
|
|
2171
|
-
|
|
2172
|
-
|
|
2173
|
-
|
|
2174
|
-
|
|
2175
|
-
|
|
2176
|
-
};
|
|
1487
|
+
});
|
|
1488
|
+
stream._consumers.forEach((consumer) => {
|
|
1489
|
+
consumers.push({
|
|
1490
|
+
version: consumer.config.version,
|
|
1491
|
+
sourceFile: consumer.config.sourceFile
|
|
2177
1492
|
});
|
|
2178
|
-
|
|
2179
|
-
|
|
2180
|
-
|
|
2181
|
-
|
|
2182
|
-
|
|
2183
|
-
|
|
2184
|
-
|
|
2185
|
-
|
|
2186
|
-
|
|
1493
|
+
});
|
|
1494
|
+
topics[stream.name] = {
|
|
1495
|
+
name: stream.name,
|
|
1496
|
+
columns: stream.columnArray,
|
|
1497
|
+
targetTable: stream.config.destination?.name,
|
|
1498
|
+
targetTableVersion: stream.config.destination?.config.version,
|
|
1499
|
+
retentionPeriod: stream.config.retentionPeriod ?? defaultRetentionPeriod,
|
|
1500
|
+
partitionCount: stream.config.parallelism ?? 1,
|
|
1501
|
+
version: stream.config.version,
|
|
1502
|
+
transformationTargets,
|
|
1503
|
+
hasMultiTransform: stream._multipleTransformations === void 0,
|
|
1504
|
+
consumers,
|
|
1505
|
+
metadata,
|
|
1506
|
+
lifeCycle: stream.config.lifeCycle,
|
|
1507
|
+
schemaConfig: stream.config.schemaConfig
|
|
2187
1508
|
};
|
|
2188
|
-
|
|
2189
|
-
|
|
2190
|
-
|
|
1509
|
+
});
|
|
1510
|
+
registry.ingestApis.forEach((api) => {
|
|
1511
|
+
let metadata = api.metadata;
|
|
1512
|
+
if (!metadata && api.config && api.pipelineParent) {
|
|
1513
|
+
metadata = api.pipelineParent.metadata;
|
|
2191
1514
|
}
|
|
2192
|
-
|
|
2193
|
-
|
|
2194
|
-
|
|
2195
|
-
|
|
2196
|
-
|
|
2197
|
-
|
|
2198
|
-
|
|
2199
|
-
|
|
2200
|
-
|
|
2201
|
-
|
|
2202
|
-
|
|
2203
|
-
|
|
2204
|
-
|
|
2205
|
-
registry.apis.clear();
|
|
2206
|
-
registry.sqlResources.clear();
|
|
2207
|
-
registry.workflows.clear();
|
|
2208
|
-
registry.webApps.clear();
|
|
2209
|
-
const appDir = `${process2.cwd()}/${getSourceDir()}`;
|
|
2210
|
-
Object.keys(__require.cache).forEach((key) => {
|
|
2211
|
-
if (key.startsWith(appDir)) {
|
|
2212
|
-
delete __require.cache[key];
|
|
2213
|
-
}
|
|
2214
|
-
});
|
|
2215
|
-
try {
|
|
2216
|
-
__require(`${process2.cwd()}/${getSourceDir()}/index.ts`);
|
|
2217
|
-
} catch (error) {
|
|
2218
|
-
let hint;
|
|
2219
|
-
const details = error instanceof Error ? error.message : String(error);
|
|
2220
|
-
if (details.includes("ERR_REQUIRE_ESM") || details.includes("ES Module")) {
|
|
2221
|
-
hint = "The file or its dependencies are ESM-only. Switch to packages that dual-support CJS & ESM, or upgrade to Node 22.12+. If you must use Node 20, you may try Node 20.19\n\n";
|
|
2222
|
-
}
|
|
2223
|
-
const errorMsg = `${hint ?? ""}${details}`;
|
|
2224
|
-
const cause = error instanceof Error ? error : void 0;
|
|
2225
|
-
throw new Error(errorMsg, { cause });
|
|
2226
|
-
}
|
|
1515
|
+
ingestApis[api.name] = {
|
|
1516
|
+
name: api.name,
|
|
1517
|
+
columns: api.columnArray,
|
|
1518
|
+
version: api.config.version,
|
|
1519
|
+
path: api.config.path,
|
|
1520
|
+
writeTo: {
|
|
1521
|
+
kind: "stream",
|
|
1522
|
+
name: api.config.destination.name
|
|
1523
|
+
},
|
|
1524
|
+
deadLetterQueue: api.config.deadLetterQueue?.name,
|
|
1525
|
+
metadata,
|
|
1526
|
+
schema: api.schema,
|
|
1527
|
+
allowExtraFields: api.allowExtraFields
|
|
2227
1528
|
};
|
|
2228
|
-
|
|
2229
|
-
|
|
2230
|
-
|
|
2231
|
-
|
|
2232
|
-
|
|
2233
|
-
|
|
2234
|
-
|
|
2235
|
-
|
|
2236
|
-
|
|
2237
|
-
|
|
2238
|
-
transform,
|
|
2239
|
-
config,
|
|
2240
|
-
stream.columnArray
|
|
2241
|
-
]);
|
|
2242
|
-
});
|
|
2243
|
-
});
|
|
2244
|
-
stream._consumers.forEach((consumer) => {
|
|
2245
|
-
const consumerFunctionKey = `${stream.name}_<no-target>${consumer.config.version ? `_${consumer.config.version}` : ""}`;
|
|
2246
|
-
transformFunctions.set(consumerFunctionKey, [
|
|
2247
|
-
consumer.consumer,
|
|
2248
|
-
consumer.config,
|
|
2249
|
-
stream.columnArray
|
|
2250
|
-
]);
|
|
2251
|
-
});
|
|
2252
|
-
});
|
|
2253
|
-
return transformFunctions;
|
|
1529
|
+
});
|
|
1530
|
+
registry.apis.forEach((api, key) => {
|
|
1531
|
+
const rustKey = api.config.version ? `${api.name}:${api.config.version}` : api.name;
|
|
1532
|
+
apis[rustKey] = {
|
|
1533
|
+
name: api.name,
|
|
1534
|
+
queryParams: api.columnArray,
|
|
1535
|
+
responseSchema: api.responseSchema,
|
|
1536
|
+
version: api.config.version,
|
|
1537
|
+
path: api.config.path,
|
|
1538
|
+
metadata: api.metadata
|
|
2254
1539
|
};
|
|
2255
|
-
|
|
2256
|
-
|
|
2257
|
-
|
|
2258
|
-
|
|
2259
|
-
|
|
2260
|
-
|
|
2261
|
-
|
|
2262
|
-
|
|
2263
|
-
|
|
2264
|
-
|
|
2265
|
-
|
|
2266
|
-
|
|
2267
|
-
}
|
|
2268
|
-
|
|
2269
|
-
|
|
2270
|
-
|
|
2271
|
-
|
|
2272
|
-
|
|
2273
|
-
|
|
2274
|
-
|
|
2275
|
-
|
|
2276
|
-
|
|
2277
|
-
}
|
|
1540
|
+
});
|
|
1541
|
+
registry.sqlResources.forEach((sqlResource) => {
|
|
1542
|
+
sqlResources[sqlResource.name] = {
|
|
1543
|
+
name: sqlResource.name,
|
|
1544
|
+
setup: sqlResource.setup,
|
|
1545
|
+
teardown: sqlResource.teardown,
|
|
1546
|
+
sourceFile: sqlResource.sourceFile,
|
|
1547
|
+
sourceLine: sqlResource.sourceLine,
|
|
1548
|
+
sourceColumn: sqlResource.sourceColumn,
|
|
1549
|
+
pullsDataFrom: sqlResource.pullsDataFrom.map((r) => {
|
|
1550
|
+
if (r.kind === "OlapTable") {
|
|
1551
|
+
const table = r;
|
|
1552
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1553
|
+
return {
|
|
1554
|
+
id,
|
|
1555
|
+
kind: "Table"
|
|
1556
|
+
};
|
|
1557
|
+
} else if (r.kind === "SqlResource") {
|
|
1558
|
+
const resource = r;
|
|
1559
|
+
return {
|
|
1560
|
+
id: resource.name,
|
|
1561
|
+
kind: "SqlResource"
|
|
1562
|
+
};
|
|
1563
|
+
} else {
|
|
1564
|
+
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2278
1565
|
}
|
|
2279
|
-
})
|
|
2280
|
-
|
|
2281
|
-
if (
|
|
2282
|
-
|
|
1566
|
+
}),
|
|
1567
|
+
pushesDataTo: sqlResource.pushesDataTo.map((r) => {
|
|
1568
|
+
if (r.kind === "OlapTable") {
|
|
1569
|
+
const table = r;
|
|
1570
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1571
|
+
return {
|
|
1572
|
+
id,
|
|
1573
|
+
kind: "Table"
|
|
1574
|
+
};
|
|
1575
|
+
} else if (r.kind === "SqlResource") {
|
|
1576
|
+
const resource = r;
|
|
1577
|
+
return {
|
|
1578
|
+
id: resource.name,
|
|
1579
|
+
kind: "SqlResource"
|
|
1580
|
+
};
|
|
1581
|
+
} else {
|
|
1582
|
+
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2283
1583
|
}
|
|
2284
|
-
})
|
|
2285
|
-
|
|
1584
|
+
})
|
|
1585
|
+
};
|
|
1586
|
+
});
|
|
1587
|
+
registry.workflows.forEach((workflow) => {
|
|
1588
|
+
workflows[workflow.name] = {
|
|
1589
|
+
name: workflow.name,
|
|
1590
|
+
retries: workflow.config.retries,
|
|
1591
|
+
timeout: workflow.config.timeout,
|
|
1592
|
+
schedule: workflow.config.schedule
|
|
2286
1593
|
};
|
|
2287
|
-
|
|
2288
|
-
|
|
2289
|
-
|
|
2290
|
-
|
|
1594
|
+
});
|
|
1595
|
+
registry.webApps.forEach((webApp) => {
|
|
1596
|
+
webApps[webApp.name] = {
|
|
1597
|
+
name: webApp.name,
|
|
1598
|
+
mountPath: webApp.config.mountPath || "/",
|
|
1599
|
+
metadata: webApp.config.metadata
|
|
2291
1600
|
};
|
|
2292
|
-
|
|
2293
|
-
|
|
2294
|
-
|
|
2295
|
-
|
|
2296
|
-
|
|
1601
|
+
});
|
|
1602
|
+
return {
|
|
1603
|
+
topics,
|
|
1604
|
+
tables,
|
|
1605
|
+
ingestApis,
|
|
1606
|
+
apis,
|
|
1607
|
+
sqlResources,
|
|
1608
|
+
workflows,
|
|
1609
|
+
webApps
|
|
1610
|
+
};
|
|
1611
|
+
};
|
|
1612
|
+
var getMooseInternal = () => globalThis.moose_internal;
|
|
1613
|
+
if (getMooseInternal() === void 0) {
|
|
1614
|
+
globalThis.moose_internal = moose_internal;
|
|
1615
|
+
}
|
|
1616
|
+
var dumpMooseInternal = async () => {
|
|
1617
|
+
loadIndex();
|
|
1618
|
+
console.log(
|
|
1619
|
+
"___MOOSE_STUFF___start",
|
|
1620
|
+
JSON.stringify(toInfraMap(getMooseInternal())),
|
|
1621
|
+
"end___MOOSE_STUFF___"
|
|
1622
|
+
);
|
|
1623
|
+
};
|
|
1624
|
+
var loadIndex = () => {
|
|
1625
|
+
const registry = getMooseInternal();
|
|
1626
|
+
registry.tables.clear();
|
|
1627
|
+
registry.streams.clear();
|
|
1628
|
+
registry.ingestApis.clear();
|
|
1629
|
+
registry.apis.clear();
|
|
1630
|
+
registry.sqlResources.clear();
|
|
1631
|
+
registry.workflows.clear();
|
|
1632
|
+
registry.webApps.clear();
|
|
1633
|
+
const appDir = `${process2.cwd()}/${getSourceDir()}`;
|
|
1634
|
+
Object.keys(__require.cache).forEach((key) => {
|
|
1635
|
+
if (key.startsWith(appDir)) {
|
|
1636
|
+
delete __require.cache[key];
|
|
1637
|
+
}
|
|
1638
|
+
});
|
|
1639
|
+
try {
|
|
1640
|
+
__require(`${process2.cwd()}/${getSourceDir()}/index.ts`);
|
|
1641
|
+
} catch (error) {
|
|
1642
|
+
let hint;
|
|
1643
|
+
const details = error instanceof Error ? error.message : String(error);
|
|
1644
|
+
if (details.includes("ERR_REQUIRE_ESM") || details.includes("ES Module")) {
|
|
1645
|
+
hint = "The file or its dependencies are ESM-only. Switch to packages that dual-support CJS & ESM, or upgrade to Node 22.12+. If you must use Node 20, you may try Node 20.19\n\n";
|
|
1646
|
+
}
|
|
1647
|
+
const errorMsg = `${hint ?? ""}${details}`;
|
|
1648
|
+
const cause = error instanceof Error ? error : void 0;
|
|
1649
|
+
throw new Error(errorMsg, { cause });
|
|
1650
|
+
}
|
|
1651
|
+
};
|
|
1652
|
+
var getStreamingFunctions = async () => {
|
|
1653
|
+
loadIndex();
|
|
1654
|
+
const registry = getMooseInternal();
|
|
1655
|
+
const transformFunctions = /* @__PURE__ */ new Map();
|
|
1656
|
+
registry.streams.forEach((stream) => {
|
|
1657
|
+
stream._transformations.forEach((transforms, destinationName) => {
|
|
1658
|
+
transforms.forEach(([_, transform, config]) => {
|
|
1659
|
+
const transformFunctionKey = `${stream.name}_${destinationName}${config.version ? `_${config.version}` : ""}`;
|
|
1660
|
+
compilerLog(`getStreamingFunctions: ${transformFunctionKey}`);
|
|
1661
|
+
transformFunctions.set(transformFunctionKey, [
|
|
1662
|
+
transform,
|
|
1663
|
+
config,
|
|
1664
|
+
stream.columnArray
|
|
1665
|
+
]);
|
|
1666
|
+
});
|
|
1667
|
+
});
|
|
1668
|
+
stream._consumers.forEach((consumer) => {
|
|
1669
|
+
const consumerFunctionKey = `${stream.name}_<no-target>${consumer.config.version ? `_${consumer.config.version}` : ""}`;
|
|
1670
|
+
transformFunctions.set(consumerFunctionKey, [
|
|
1671
|
+
consumer.consumer,
|
|
1672
|
+
consumer.config,
|
|
1673
|
+
stream.columnArray
|
|
1674
|
+
]);
|
|
1675
|
+
});
|
|
1676
|
+
});
|
|
1677
|
+
return transformFunctions;
|
|
1678
|
+
};
|
|
1679
|
+
var getApis2 = async () => {
|
|
1680
|
+
loadIndex();
|
|
1681
|
+
const apiFunctions = /* @__PURE__ */ new Map();
|
|
1682
|
+
const registry = getMooseInternal();
|
|
1683
|
+
const versionCountByName = /* @__PURE__ */ new Map();
|
|
1684
|
+
const nameToSoleVersionHandler = /* @__PURE__ */ new Map();
|
|
1685
|
+
registry.apis.forEach((api, key) => {
|
|
1686
|
+
const handler = api.getHandler();
|
|
1687
|
+
apiFunctions.set(key, handler);
|
|
1688
|
+
if (!api.config.version) {
|
|
1689
|
+
if (!apiFunctions.has(api.name)) {
|
|
1690
|
+
apiFunctions.set(api.name, handler);
|
|
2297
1691
|
}
|
|
2298
|
-
|
|
2299
|
-
|
|
2300
|
-
|
|
2301
|
-
);
|
|
2302
|
-
|
|
2303
|
-
|
|
1692
|
+
nameToSoleVersionHandler.delete(api.name);
|
|
1693
|
+
versionCountByName.delete(api.name);
|
|
1694
|
+
} else if (!apiFunctions.has(api.name)) {
|
|
1695
|
+
const count = (versionCountByName.get(api.name) ?? 0) + 1;
|
|
1696
|
+
versionCountByName.set(api.name, count);
|
|
1697
|
+
if (count === 1) {
|
|
1698
|
+
nameToSoleVersionHandler.set(api.name, handler);
|
|
1699
|
+
} else {
|
|
1700
|
+
nameToSoleVersionHandler.delete(api.name);
|
|
2304
1701
|
}
|
|
2305
|
-
|
|
2306
|
-
|
|
2307
|
-
|
|
2308
|
-
|
|
2309
|
-
|
|
2310
|
-
}
|
|
1702
|
+
}
|
|
1703
|
+
});
|
|
1704
|
+
nameToSoleVersionHandler.forEach((handler, name) => {
|
|
1705
|
+
if (!apiFunctions.has(name)) {
|
|
1706
|
+
apiFunctions.set(name, handler);
|
|
1707
|
+
}
|
|
1708
|
+
});
|
|
1709
|
+
return apiFunctions;
|
|
1710
|
+
};
|
|
1711
|
+
var getWorkflows2 = async () => {
|
|
1712
|
+
loadIndex();
|
|
1713
|
+
const registry = getMooseInternal();
|
|
1714
|
+
return registry.workflows;
|
|
1715
|
+
};
|
|
1716
|
+
function findTaskInTree(task, targetName) {
|
|
1717
|
+
if (task.name === targetName) {
|
|
1718
|
+
return task;
|
|
2311
1719
|
}
|
|
2312
|
-
|
|
2313
|
-
|
|
2314
|
-
|
|
2315
|
-
|
|
2316
|
-
|
|
1720
|
+
if (task.config.onComplete?.length) {
|
|
1721
|
+
for (const childTask of task.config.onComplete) {
|
|
1722
|
+
const found = findTaskInTree(childTask, targetName);
|
|
1723
|
+
if (found) {
|
|
1724
|
+
return found;
|
|
1725
|
+
}
|
|
1726
|
+
}
|
|
1727
|
+
}
|
|
1728
|
+
return void 0;
|
|
1729
|
+
}
|
|
1730
|
+
var getTaskForWorkflow = async (workflowName, taskName) => {
|
|
1731
|
+
const workflows = await getWorkflows2();
|
|
1732
|
+
const workflow = workflows.get(workflowName);
|
|
1733
|
+
if (!workflow) {
|
|
1734
|
+
throw new Error(`Workflow ${workflowName} not found`);
|
|
1735
|
+
}
|
|
1736
|
+
const task = findTaskInTree(
|
|
1737
|
+
workflow.config.startingTask,
|
|
1738
|
+
taskName
|
|
1739
|
+
);
|
|
1740
|
+
if (!task) {
|
|
1741
|
+
throw new Error(`Task ${taskName} not found in workflow ${workflowName}`);
|
|
1742
|
+
}
|
|
1743
|
+
return task;
|
|
1744
|
+
};
|
|
1745
|
+
var getWebApps2 = async () => {
|
|
1746
|
+
loadIndex();
|
|
1747
|
+
return getMooseInternal().webApps;
|
|
1748
|
+
};
|
|
2317
1749
|
|
|
2318
1750
|
// src/blocks/runner.ts
|
|
2319
1751
|
init_commons();
|
|
2320
1752
|
import fastq from "fastq";
|
|
2321
1753
|
import fs2 from "fs";
|
|
2322
|
-
import
|
|
1754
|
+
import path from "path";
|
|
2323
1755
|
var walkDir = (dir, fileExtension, fileList) => {
|
|
2324
1756
|
const files = fs2.readdirSync(dir);
|
|
2325
1757
|
files.forEach((file) => {
|
|
2326
|
-
if (fs2.statSync(
|
|
2327
|
-
fileList = walkDir(
|
|
1758
|
+
if (fs2.statSync(path.join(dir, file)).isDirectory()) {
|
|
1759
|
+
fileList = walkDir(path.join(dir, file), fileExtension, fileList);
|
|
2328
1760
|
} else if (file.endsWith(fileExtension)) {
|
|
2329
|
-
fileList.push(
|
|
1761
|
+
fileList.push(path.join(dir, file));
|
|
2330
1762
|
}
|
|
2331
1763
|
});
|
|
2332
1764
|
return fileList;
|
|
@@ -2337,7 +1769,7 @@ var DependencyError = class extends Error {
|
|
|
2337
1769
|
this.name = "DependencyError";
|
|
2338
1770
|
}
|
|
2339
1771
|
};
|
|
2340
|
-
var
|
|
1772
|
+
var toClientConfig2 = (config) => ({
|
|
2341
1773
|
...config,
|
|
2342
1774
|
useSSL: config.useSSL ? "true" : "false"
|
|
2343
1775
|
});
|
|
@@ -2389,7 +1821,7 @@ var asyncWorker = async (task) => {
|
|
|
2389
1821
|
await createBlocks(task.chClient, task.blocks);
|
|
2390
1822
|
};
|
|
2391
1823
|
var runBlocks = async (config) => {
|
|
2392
|
-
const chClient = getClickhouseClient(
|
|
1824
|
+
const chClient = getClickhouseClient(toClientConfig2(config.clickhouseConfig));
|
|
2393
1825
|
console.log(`Connected`);
|
|
2394
1826
|
const blocksFiles = walkDir(config.blocksDir, ".ts", []);
|
|
2395
1827
|
const numOfBlockFiles = blocksFiles.length;
|
|
@@ -2402,10 +1834,10 @@ var runBlocks = async (config) => {
|
|
|
2402
1834
|
}
|
|
2403
1835
|
}
|
|
2404
1836
|
});
|
|
2405
|
-
for (const
|
|
2406
|
-
console.log(`Adding to queue: ${
|
|
1837
|
+
for (const path3 of blocksFiles) {
|
|
1838
|
+
console.log(`Adding to queue: ${path3}`);
|
|
2407
1839
|
try {
|
|
2408
|
-
const blocks = __require(
|
|
1840
|
+
const blocks = __require(path3).default;
|
|
2409
1841
|
queue.push({
|
|
2410
1842
|
chClient,
|
|
2411
1843
|
blocks,
|
|
@@ -2414,7 +1846,7 @@ var runBlocks = async (config) => {
|
|
|
2414
1846
|
} catch (err) {
|
|
2415
1847
|
cliLog({
|
|
2416
1848
|
action: "Blocks",
|
|
2417
|
-
message: `Failed to import blocks from ${
|
|
1849
|
+
message: `Failed to import blocks from ${path3}: ${err}`,
|
|
2418
1850
|
message_type: "Error"
|
|
2419
1851
|
});
|
|
2420
1852
|
}
|
|
@@ -2424,14 +1856,8 @@ var runBlocks = async (config) => {
|
|
|
2424
1856
|
}
|
|
2425
1857
|
};
|
|
2426
1858
|
|
|
2427
|
-
// src/moose-runner.ts
|
|
2428
|
-
init_runner();
|
|
2429
|
-
|
|
2430
1859
|
// src/streaming-functions/runner.ts
|
|
2431
1860
|
init_commons();
|
|
2432
|
-
init_cluster_utils();
|
|
2433
|
-
init_internal();
|
|
2434
|
-
init_json();
|
|
2435
1861
|
import { Readable as Readable2 } from "stream";
|
|
2436
1862
|
import { KafkaJS as KafkaJS2 } from "@514labs/kafka-javascript";
|
|
2437
1863
|
import { Buffer as Buffer2 } from "buffer";
|
|
@@ -2994,18 +2420,15 @@ async function runApiTypeSerializer(targetModel) {
|
|
|
2994
2420
|
}
|
|
2995
2421
|
|
|
2996
2422
|
// src/scripts/runner.ts
|
|
2997
|
-
init_internal();
|
|
2998
2423
|
import {
|
|
2999
2424
|
NativeConnection,
|
|
3000
2425
|
Worker,
|
|
3001
2426
|
bundleWorkflowCode
|
|
3002
2427
|
} from "@temporalio/worker";
|
|
3003
|
-
import * as
|
|
2428
|
+
import * as path2 from "path";
|
|
3004
2429
|
import * as fs3 from "fs";
|
|
3005
2430
|
|
|
3006
2431
|
// src/scripts/activity.ts
|
|
3007
|
-
init_internal();
|
|
3008
|
-
init_json();
|
|
3009
2432
|
import { log as logger, Context } from "@temporalio/activity";
|
|
3010
2433
|
import { isCancellation } from "@temporalio/workflow";
|
|
3011
2434
|
var activities = {
|
|
@@ -3296,7 +2719,7 @@ async function registerWorkflows(logger2, config) {
|
|
|
3296
2719
|
}
|
|
3297
2720
|
};
|
|
3298
2721
|
const workflowBundle = await bundleWorkflowCode({
|
|
3299
|
-
workflowsPath:
|
|
2722
|
+
workflowsPath: path2.resolve(__dirname, "scripts/workflow.js"),
|
|
3300
2723
|
logger: silentLogger
|
|
3301
2724
|
});
|
|
3302
2725
|
const worker = await Worker.create({
|