@bayoudhi/moose-lib-serverless 0.5.0 → 0.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -2
- package/dist/moose-runner.js +3988 -0
- package/package.json +7 -3
|
@@ -0,0 +1,3988 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __create = Object.create;
|
|
4
|
+
var __defProp = Object.defineProperty;
|
|
5
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
6
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
7
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
8
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
9
|
+
var __commonJS = (cb, mod) => function __require() {
|
|
10
|
+
return mod || (0, cb[__getOwnPropNames(cb)[0]])((mod = { exports: {} }).exports, mod), mod.exports;
|
|
11
|
+
};
|
|
12
|
+
var __copyProps = (to, from, except, desc) => {
|
|
13
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
14
|
+
for (let key of __getOwnPropNames(from))
|
|
15
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
16
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
17
|
+
}
|
|
18
|
+
return to;
|
|
19
|
+
};
|
|
20
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
21
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
22
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
23
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
24
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
25
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
26
|
+
mod
|
|
27
|
+
));
|
|
28
|
+
|
|
29
|
+
// stub-native:@514labs/kafka-javascript
|
|
30
|
+
var require_kafka_javascript = __commonJS({
|
|
31
|
+
"stub-native:@514labs/kafka-javascript"(exports2, module2) {
|
|
32
|
+
"use strict";
|
|
33
|
+
function createDeepProxy() {
|
|
34
|
+
var handler = {
|
|
35
|
+
get: function(_, prop) {
|
|
36
|
+
if (prop === "__esModule") return true;
|
|
37
|
+
if (prop === "default") return proxy;
|
|
38
|
+
if (typeof prop === "symbol") return void 0;
|
|
39
|
+
return proxy;
|
|
40
|
+
},
|
|
41
|
+
apply: function() {
|
|
42
|
+
return proxy;
|
|
43
|
+
},
|
|
44
|
+
construct: function() {
|
|
45
|
+
return proxy;
|
|
46
|
+
},
|
|
47
|
+
ownKeys: function() {
|
|
48
|
+
return ["length", "name", "prototype"];
|
|
49
|
+
},
|
|
50
|
+
getOwnPropertyDescriptor: function(target, prop) {
|
|
51
|
+
if (prop === "length" || prop === "name" || prop === "prototype") {
|
|
52
|
+
return Object.getOwnPropertyDescriptor(target, prop);
|
|
53
|
+
}
|
|
54
|
+
return void 0;
|
|
55
|
+
},
|
|
56
|
+
getPrototypeOf: function() {
|
|
57
|
+
return proxy;
|
|
58
|
+
}
|
|
59
|
+
};
|
|
60
|
+
var proxy = new Proxy(function() {
|
|
61
|
+
}, handler);
|
|
62
|
+
return proxy;
|
|
63
|
+
}
|
|
64
|
+
module2.exports = createDeepProxy();
|
|
65
|
+
}
|
|
66
|
+
});
|
|
67
|
+
|
|
68
|
+
// stub-native:@temporalio/client
|
|
69
|
+
var require_client = __commonJS({
|
|
70
|
+
"stub-native:@temporalio/client"(exports2, module2) {
|
|
71
|
+
"use strict";
|
|
72
|
+
function createDeepProxy() {
|
|
73
|
+
var handler = {
|
|
74
|
+
get: function(_, prop) {
|
|
75
|
+
if (prop === "__esModule") return true;
|
|
76
|
+
if (prop === "default") return proxy;
|
|
77
|
+
if (typeof prop === "symbol") return void 0;
|
|
78
|
+
return proxy;
|
|
79
|
+
},
|
|
80
|
+
apply: function() {
|
|
81
|
+
return proxy;
|
|
82
|
+
},
|
|
83
|
+
construct: function() {
|
|
84
|
+
return proxy;
|
|
85
|
+
},
|
|
86
|
+
ownKeys: function() {
|
|
87
|
+
return ["length", "name", "prototype"];
|
|
88
|
+
},
|
|
89
|
+
getOwnPropertyDescriptor: function(target, prop) {
|
|
90
|
+
if (prop === "length" || prop === "name" || prop === "prototype") {
|
|
91
|
+
return Object.getOwnPropertyDescriptor(target, prop);
|
|
92
|
+
}
|
|
93
|
+
return void 0;
|
|
94
|
+
},
|
|
95
|
+
getPrototypeOf: function() {
|
|
96
|
+
return proxy;
|
|
97
|
+
}
|
|
98
|
+
};
|
|
99
|
+
var proxy = new Proxy(function() {
|
|
100
|
+
}, handler);
|
|
101
|
+
return proxy;
|
|
102
|
+
}
|
|
103
|
+
module2.exports = createDeepProxy();
|
|
104
|
+
}
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
// stub-native:redis
|
|
108
|
+
var require_redis = __commonJS({
|
|
109
|
+
"stub-native:redis"(exports2, module2) {
|
|
110
|
+
"use strict";
|
|
111
|
+
function createDeepProxy() {
|
|
112
|
+
var handler = {
|
|
113
|
+
get: function(_, prop) {
|
|
114
|
+
if (prop === "__esModule") return true;
|
|
115
|
+
if (prop === "default") return proxy;
|
|
116
|
+
if (typeof prop === "symbol") return void 0;
|
|
117
|
+
return proxy;
|
|
118
|
+
},
|
|
119
|
+
apply: function() {
|
|
120
|
+
return proxy;
|
|
121
|
+
},
|
|
122
|
+
construct: function() {
|
|
123
|
+
return proxy;
|
|
124
|
+
},
|
|
125
|
+
ownKeys: function() {
|
|
126
|
+
return ["length", "name", "prototype"];
|
|
127
|
+
},
|
|
128
|
+
getOwnPropertyDescriptor: function(target, prop) {
|
|
129
|
+
if (prop === "length" || prop === "name" || prop === "prototype") {
|
|
130
|
+
return Object.getOwnPropertyDescriptor(target, prop);
|
|
131
|
+
}
|
|
132
|
+
return void 0;
|
|
133
|
+
},
|
|
134
|
+
getPrototypeOf: function() {
|
|
135
|
+
return proxy;
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
var proxy = new Proxy(function() {
|
|
139
|
+
}, handler);
|
|
140
|
+
return proxy;
|
|
141
|
+
}
|
|
142
|
+
module2.exports = createDeepProxy();
|
|
143
|
+
}
|
|
144
|
+
});
|
|
145
|
+
|
|
146
|
+
// stub-native:@temporalio/worker
|
|
147
|
+
var require_worker = __commonJS({
|
|
148
|
+
"stub-native:@temporalio/worker"(exports2, module2) {
|
|
149
|
+
"use strict";
|
|
150
|
+
function createDeepProxy() {
|
|
151
|
+
var handler = {
|
|
152
|
+
get: function(_, prop) {
|
|
153
|
+
if (prop === "__esModule") return true;
|
|
154
|
+
if (prop === "default") return proxy;
|
|
155
|
+
if (typeof prop === "symbol") return void 0;
|
|
156
|
+
return proxy;
|
|
157
|
+
},
|
|
158
|
+
apply: function() {
|
|
159
|
+
return proxy;
|
|
160
|
+
},
|
|
161
|
+
construct: function() {
|
|
162
|
+
return proxy;
|
|
163
|
+
},
|
|
164
|
+
ownKeys: function() {
|
|
165
|
+
return ["length", "name", "prototype"];
|
|
166
|
+
},
|
|
167
|
+
getOwnPropertyDescriptor: function(target, prop) {
|
|
168
|
+
if (prop === "length" || prop === "name" || prop === "prototype") {
|
|
169
|
+
return Object.getOwnPropertyDescriptor(target, prop);
|
|
170
|
+
}
|
|
171
|
+
return void 0;
|
|
172
|
+
},
|
|
173
|
+
getPrototypeOf: function() {
|
|
174
|
+
return proxy;
|
|
175
|
+
}
|
|
176
|
+
};
|
|
177
|
+
var proxy = new Proxy(function() {
|
|
178
|
+
}, handler);
|
|
179
|
+
return proxy;
|
|
180
|
+
}
|
|
181
|
+
module2.exports = createDeepProxy();
|
|
182
|
+
}
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
// stub-native:@temporalio/activity
|
|
186
|
+
var require_activity = __commonJS({
|
|
187
|
+
"stub-native:@temporalio/activity"(exports2, module2) {
|
|
188
|
+
"use strict";
|
|
189
|
+
function createDeepProxy() {
|
|
190
|
+
var handler = {
|
|
191
|
+
get: function(_, prop) {
|
|
192
|
+
if (prop === "__esModule") return true;
|
|
193
|
+
if (prop === "default") return proxy;
|
|
194
|
+
if (typeof prop === "symbol") return void 0;
|
|
195
|
+
return proxy;
|
|
196
|
+
},
|
|
197
|
+
apply: function() {
|
|
198
|
+
return proxy;
|
|
199
|
+
},
|
|
200
|
+
construct: function() {
|
|
201
|
+
return proxy;
|
|
202
|
+
},
|
|
203
|
+
ownKeys: function() {
|
|
204
|
+
return ["length", "name", "prototype"];
|
|
205
|
+
},
|
|
206
|
+
getOwnPropertyDescriptor: function(target, prop) {
|
|
207
|
+
if (prop === "length" || prop === "name" || prop === "prototype") {
|
|
208
|
+
return Object.getOwnPropertyDescriptor(target, prop);
|
|
209
|
+
}
|
|
210
|
+
return void 0;
|
|
211
|
+
},
|
|
212
|
+
getPrototypeOf: function() {
|
|
213
|
+
return proxy;
|
|
214
|
+
}
|
|
215
|
+
};
|
|
216
|
+
var proxy = new Proxy(function() {
|
|
217
|
+
}, handler);
|
|
218
|
+
return proxy;
|
|
219
|
+
}
|
|
220
|
+
module2.exports = createDeepProxy();
|
|
221
|
+
}
|
|
222
|
+
});
|
|
223
|
+
|
|
224
|
+
// stub-native:@temporalio/workflow
|
|
225
|
+
var require_workflow = __commonJS({
|
|
226
|
+
"stub-native:@temporalio/workflow"(exports2, module2) {
|
|
227
|
+
"use strict";
|
|
228
|
+
function createDeepProxy() {
|
|
229
|
+
var handler = {
|
|
230
|
+
get: function(_, prop) {
|
|
231
|
+
if (prop === "__esModule") return true;
|
|
232
|
+
if (prop === "default") return proxy;
|
|
233
|
+
if (typeof prop === "symbol") return void 0;
|
|
234
|
+
return proxy;
|
|
235
|
+
},
|
|
236
|
+
apply: function() {
|
|
237
|
+
return proxy;
|
|
238
|
+
},
|
|
239
|
+
construct: function() {
|
|
240
|
+
return proxy;
|
|
241
|
+
},
|
|
242
|
+
ownKeys: function() {
|
|
243
|
+
return ["length", "name", "prototype"];
|
|
244
|
+
},
|
|
245
|
+
getOwnPropertyDescriptor: function(target, prop) {
|
|
246
|
+
if (prop === "length" || prop === "name" || prop === "prototype") {
|
|
247
|
+
return Object.getOwnPropertyDescriptor(target, prop);
|
|
248
|
+
}
|
|
249
|
+
return void 0;
|
|
250
|
+
},
|
|
251
|
+
getPrototypeOf: function() {
|
|
252
|
+
return proxy;
|
|
253
|
+
}
|
|
254
|
+
};
|
|
255
|
+
var proxy = new Proxy(function() {
|
|
256
|
+
}, handler);
|
|
257
|
+
return proxy;
|
|
258
|
+
}
|
|
259
|
+
module2.exports = createDeepProxy();
|
|
260
|
+
}
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
// src/moose-runner.ts
|
|
264
|
+
var __create2 = Object.create;
|
|
265
|
+
var __defProp2 = Object.defineProperty;
|
|
266
|
+
var __getOwnPropDesc2 = Object.getOwnPropertyDescriptor;
|
|
267
|
+
var __getOwnPropNames2 = Object.getOwnPropertyNames;
|
|
268
|
+
var __getProtoOf2 = Object.getPrototypeOf;
|
|
269
|
+
var __hasOwnProp2 = Object.prototype.hasOwnProperty;
|
|
270
|
+
var __esm = (fn, res) => function __init() {
|
|
271
|
+
return fn && (res = (0, fn[__getOwnPropNames2(fn)[0]])(fn = 0)), res;
|
|
272
|
+
};
|
|
273
|
+
var __export = (target, all) => {
|
|
274
|
+
for (var name in all)
|
|
275
|
+
__defProp2(target, name, { get: all[name], enumerable: true });
|
|
276
|
+
};
|
|
277
|
+
var __copyProps2 = (to, from, except, desc) => {
|
|
278
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
279
|
+
for (let key of __getOwnPropNames2(from))
|
|
280
|
+
if (!__hasOwnProp2.call(to, key) && key !== except)
|
|
281
|
+
__defProp2(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc2(from, key)) || desc.enumerable });
|
|
282
|
+
}
|
|
283
|
+
return to;
|
|
284
|
+
};
|
|
285
|
+
var __toESM2 = (mod, isNodeMode, target) => (target = mod != null ? __create2(__getProtoOf2(mod)) : {}, __copyProps2(
|
|
286
|
+
// If the importer is in node compatibility mode or this is not an ESM
|
|
287
|
+
// file that has been converted to a CommonJS file using a Babel-
|
|
288
|
+
// compatible transform (i.e. "__esModule" has not been set), then set
|
|
289
|
+
// "default" to the CommonJS "module.exports" for node compatibility.
|
|
290
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp2(target, "default", { value: mod, enumerable: true }) : target,
|
|
291
|
+
mod
|
|
292
|
+
));
|
|
293
|
+
function getSourceDir() {
|
|
294
|
+
return process.env.MOOSE_SOURCE_DIR || "app";
|
|
295
|
+
}
|
|
296
|
+
function hasCompiledArtifacts(projectRoot = process.cwd()) {
|
|
297
|
+
const sourceDir = getSourceDir();
|
|
298
|
+
const compiledIndexPath = import_path.default.join(
|
|
299
|
+
projectRoot,
|
|
300
|
+
".moose",
|
|
301
|
+
"compiled",
|
|
302
|
+
sourceDir,
|
|
303
|
+
"index.js"
|
|
304
|
+
);
|
|
305
|
+
return (0, import_fs.existsSync)(compiledIndexPath);
|
|
306
|
+
}
|
|
307
|
+
function shouldUseCompiled(projectRoot = process.cwd()) {
|
|
308
|
+
const envSaysCompiled = process.env.MOOSE_USE_COMPILED === "true";
|
|
309
|
+
if (!envSaysCompiled) {
|
|
310
|
+
return false;
|
|
311
|
+
}
|
|
312
|
+
const hasArtifacts = hasCompiledArtifacts(projectRoot);
|
|
313
|
+
if (!hasArtifacts) {
|
|
314
|
+
console.warn(
|
|
315
|
+
`[moose] MOOSE_USE_COMPILED=true but no compiled artifacts found at .moose/compiled/${getSourceDir()}/index.js. Falling back to ts-node.`
|
|
316
|
+
);
|
|
317
|
+
}
|
|
318
|
+
return hasArtifacts;
|
|
319
|
+
}
|
|
320
|
+
function detectModuleSystem(projectRoot = process.cwd()) {
|
|
321
|
+
const pkgPath = import_path.default.join(projectRoot, "package.json");
|
|
322
|
+
if ((0, import_fs.existsSync)(pkgPath)) {
|
|
323
|
+
try {
|
|
324
|
+
const pkgContent = (0, import_fs.readFileSync)(pkgPath, "utf-8");
|
|
325
|
+
const pkg = JSON.parse(pkgContent);
|
|
326
|
+
if (pkg.type === "module") {
|
|
327
|
+
return "esm";
|
|
328
|
+
}
|
|
329
|
+
} catch (e) {
|
|
330
|
+
console.debug(
|
|
331
|
+
`[moose] Failed to parse package.json at ${pkgPath}, defaulting to CJS:`,
|
|
332
|
+
e
|
|
333
|
+
);
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
return "cjs";
|
|
337
|
+
}
|
|
338
|
+
async function loadModule(modulePath, projectRoot = process.cwd()) {
|
|
339
|
+
const moduleSystem = detectModuleSystem(projectRoot);
|
|
340
|
+
if (moduleSystem === "esm") {
|
|
341
|
+
const { pathToFileURL } = await import("url");
|
|
342
|
+
const fileUrl = pathToFileURL(modulePath).href;
|
|
343
|
+
return await import(fileUrl);
|
|
344
|
+
}
|
|
345
|
+
return require(modulePath);
|
|
346
|
+
}
|
|
347
|
+
var import_fs;
|
|
348
|
+
var import_path;
|
|
349
|
+
var MOOSE_COMPILER_PLUGINS;
|
|
350
|
+
var COMMANDS_REQUIRING_PLUGINS;
|
|
351
|
+
var init_compiler_config = __esm({
|
|
352
|
+
"src/compiler-config.ts"() {
|
|
353
|
+
"use strict";
|
|
354
|
+
import_fs = require("fs");
|
|
355
|
+
import_path = __toESM2(require("path"));
|
|
356
|
+
MOOSE_COMPILER_PLUGINS = [
|
|
357
|
+
{
|
|
358
|
+
transform: "./node_modules/@bayoudhi/moose-lib-serverless/dist/compilerPlugin.js"
|
|
359
|
+
// No longer using transformProgram - direct typia integration eliminates
|
|
360
|
+
// the need for program replacement and the associated incremental compilation issues
|
|
361
|
+
},
|
|
362
|
+
{
|
|
363
|
+
// Keep typia plugin for users who use typia directly (not through Moose resources)
|
|
364
|
+
transform: "typia/lib/transform"
|
|
365
|
+
}
|
|
366
|
+
];
|
|
367
|
+
COMMANDS_REQUIRING_PLUGINS = [
|
|
368
|
+
"consumption-apis",
|
|
369
|
+
"consumption-type-serializer",
|
|
370
|
+
"dmv2-serializer",
|
|
371
|
+
"streaming-functions",
|
|
372
|
+
"scripts"
|
|
373
|
+
];
|
|
374
|
+
}
|
|
375
|
+
});
|
|
376
|
+
var init_stackTrace = __esm({
|
|
377
|
+
"src/dmv2/utils/stackTrace.ts"() {
|
|
378
|
+
"use strict";
|
|
379
|
+
}
|
|
380
|
+
});
|
|
381
|
+
var init_typedBase = __esm({
|
|
382
|
+
"src/dmv2/typedBase.ts"() {
|
|
383
|
+
"use strict";
|
|
384
|
+
init_stackTrace();
|
|
385
|
+
}
|
|
386
|
+
});
|
|
387
|
+
var init_dataModelTypes = __esm({
|
|
388
|
+
"src/dataModels/dataModelTypes.ts"() {
|
|
389
|
+
"use strict";
|
|
390
|
+
}
|
|
391
|
+
});
|
|
392
|
+
var init_types = __esm({
|
|
393
|
+
"src/dataModels/types.ts"() {
|
|
394
|
+
"use strict";
|
|
395
|
+
}
|
|
396
|
+
});
|
|
397
|
+
function sqlImpl(strings, ...values) {
|
|
398
|
+
return new Sql(strings, values);
|
|
399
|
+
}
|
|
400
|
+
function createClickhouseParameter(parameterIndex, value) {
|
|
401
|
+
return `{p${parameterIndex}:${mapToClickHouseType(value)}}`;
|
|
402
|
+
}
|
|
403
|
+
function emptyIfUndefined(value) {
|
|
404
|
+
return value === void 0 ? "" : value;
|
|
405
|
+
}
|
|
406
|
+
var isTable;
|
|
407
|
+
var isView;
|
|
408
|
+
var isColumn;
|
|
409
|
+
var sql;
|
|
410
|
+
var instanceofSql;
|
|
411
|
+
var Sql;
|
|
412
|
+
var toQuery;
|
|
413
|
+
var toQueryPreview;
|
|
414
|
+
var getValueFromParameter;
|
|
415
|
+
var mapToClickHouseType;
|
|
416
|
+
var init_sqlHelpers = __esm({
|
|
417
|
+
"src/sqlHelpers.ts"() {
|
|
418
|
+
"use strict";
|
|
419
|
+
isTable = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "OlapTable";
|
|
420
|
+
isView = (value) => typeof value === "object" && value !== null && "kind" in value && value.kind === "View";
|
|
421
|
+
isColumn = (value) => typeof value === "object" && value !== null && !("kind" in value) && "name" in value && "annotations" in value;
|
|
422
|
+
sql = sqlImpl;
|
|
423
|
+
instanceofSql = (value) => typeof value === "object" && "values" in value && "strings" in value;
|
|
424
|
+
Sql = class _Sql {
|
|
425
|
+
values;
|
|
426
|
+
strings;
|
|
427
|
+
constructor(rawStrings, rawValues) {
|
|
428
|
+
if (rawStrings.length - 1 !== rawValues.length) {
|
|
429
|
+
if (rawStrings.length === 0) {
|
|
430
|
+
throw new TypeError("Expected at least 1 string");
|
|
431
|
+
}
|
|
432
|
+
throw new TypeError(
|
|
433
|
+
`Expected ${rawStrings.length} strings to have ${rawStrings.length - 1} values`
|
|
434
|
+
);
|
|
435
|
+
}
|
|
436
|
+
const valuesLength = rawValues.reduce(
|
|
437
|
+
(len, value) => len + (instanceofSql(value) ? value.values.length : isColumn(value) || isTable(value) || isView(value) ? 0 : 1),
|
|
438
|
+
0
|
|
439
|
+
);
|
|
440
|
+
this.values = new Array(valuesLength);
|
|
441
|
+
this.strings = new Array(valuesLength + 1);
|
|
442
|
+
this.strings[0] = rawStrings[0];
|
|
443
|
+
let i = 0, pos = 0;
|
|
444
|
+
while (i < rawValues.length) {
|
|
445
|
+
const child = rawValues[i++];
|
|
446
|
+
const rawString = rawStrings[i];
|
|
447
|
+
if (instanceofSql(child)) {
|
|
448
|
+
this.strings[pos] += child.strings[0];
|
|
449
|
+
let childIndex = 0;
|
|
450
|
+
while (childIndex < child.values.length) {
|
|
451
|
+
this.values[pos++] = child.values[childIndex++];
|
|
452
|
+
this.strings[pos] = child.strings[childIndex];
|
|
453
|
+
}
|
|
454
|
+
this.strings[pos] += rawString;
|
|
455
|
+
} else if (isColumn(child)) {
|
|
456
|
+
const aggregationFunction = child.annotations.find(
|
|
457
|
+
([k, _]) => k === "aggregationFunction"
|
|
458
|
+
);
|
|
459
|
+
if (aggregationFunction !== void 0) {
|
|
460
|
+
this.strings[pos] += `${aggregationFunction[1].functionName}Merge(\`${child.name}\`)`;
|
|
461
|
+
} else {
|
|
462
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
463
|
+
}
|
|
464
|
+
this.strings[pos] += rawString;
|
|
465
|
+
} else if (isTable(child)) {
|
|
466
|
+
if (child.config.database) {
|
|
467
|
+
this.strings[pos] += `\`${child.config.database}\`.\`${child.name}\``;
|
|
468
|
+
} else {
|
|
469
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
470
|
+
}
|
|
471
|
+
this.strings[pos] += rawString;
|
|
472
|
+
} else if (isView(child)) {
|
|
473
|
+
this.strings[pos] += `\`${child.name}\``;
|
|
474
|
+
this.strings[pos] += rawString;
|
|
475
|
+
} else {
|
|
476
|
+
this.values[pos++] = child;
|
|
477
|
+
this.strings[pos] = rawString;
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
}
|
|
481
|
+
/**
|
|
482
|
+
* Append another Sql fragment, returning a new Sql instance.
|
|
483
|
+
*/
|
|
484
|
+
append(other) {
|
|
485
|
+
return new _Sql([...this.strings, ""], [...this.values, other]);
|
|
486
|
+
}
|
|
487
|
+
};
|
|
488
|
+
sql.join = function(fragments, separator) {
|
|
489
|
+
if (fragments.length === 0) return new Sql([""], []);
|
|
490
|
+
if (fragments.length === 1) return fragments[0];
|
|
491
|
+
const sep = separator ?? ", ";
|
|
492
|
+
const normalized = sep.includes(" ") ? sep : ` ${sep} `;
|
|
493
|
+
const strings = ["", ...Array(fragments.length - 1).fill(normalized), ""];
|
|
494
|
+
return new Sql(strings, fragments);
|
|
495
|
+
};
|
|
496
|
+
sql.raw = function(text) {
|
|
497
|
+
return new Sql([text], []);
|
|
498
|
+
};
|
|
499
|
+
toQuery = (sql3) => {
|
|
500
|
+
const parameterizedStubs = sql3.values.map(
|
|
501
|
+
(v, i) => createClickhouseParameter(i, v)
|
|
502
|
+
);
|
|
503
|
+
const query = sql3.strings.map(
|
|
504
|
+
(s, i) => s != "" ? `${s}${emptyIfUndefined(parameterizedStubs[i])}` : ""
|
|
505
|
+
).join("");
|
|
506
|
+
const query_params = sql3.values.reduce(
|
|
507
|
+
(acc, v, i) => ({
|
|
508
|
+
...acc,
|
|
509
|
+
[`p${i}`]: getValueFromParameter(v)
|
|
510
|
+
}),
|
|
511
|
+
{}
|
|
512
|
+
);
|
|
513
|
+
return [query, query_params];
|
|
514
|
+
};
|
|
515
|
+
toQueryPreview = (sql3) => {
|
|
516
|
+
try {
|
|
517
|
+
const formatValue = (v) => {
|
|
518
|
+
if (Array.isArray(v)) {
|
|
519
|
+
const [type, val] = v;
|
|
520
|
+
if (type === "Identifier") {
|
|
521
|
+
return `\`${String(val)}\``;
|
|
522
|
+
}
|
|
523
|
+
return `[${v.map((x) => formatValue(x)).join(", ")}]`;
|
|
524
|
+
}
|
|
525
|
+
if (v === null || v === void 0) return "NULL";
|
|
526
|
+
if (typeof v === "string") return `'${v.replace(/'/g, "''")}'`;
|
|
527
|
+
if (typeof v === "number") return String(v);
|
|
528
|
+
if (typeof v === "boolean") return v ? "true" : "false";
|
|
529
|
+
if (v instanceof Date)
|
|
530
|
+
return `'${v.toISOString().replace("T", " ").slice(0, 19)}'`;
|
|
531
|
+
try {
|
|
532
|
+
return JSON.stringify(v);
|
|
533
|
+
} catch {
|
|
534
|
+
return String(v);
|
|
535
|
+
}
|
|
536
|
+
};
|
|
537
|
+
let out = sql3.strings[0] ?? "";
|
|
538
|
+
for (let i = 0; i < sql3.values.length; i++) {
|
|
539
|
+
const val = getValueFromParameter(sql3.values[i]);
|
|
540
|
+
out += formatValue(val);
|
|
541
|
+
out += sql3.strings[i + 1] ?? "";
|
|
542
|
+
}
|
|
543
|
+
return out.replace(/\s+/g, " ").trim();
|
|
544
|
+
} catch (error) {
|
|
545
|
+
console.log(`toQueryPreview error: ${error}`);
|
|
546
|
+
return "/* query preview unavailable */";
|
|
547
|
+
}
|
|
548
|
+
};
|
|
549
|
+
getValueFromParameter = (value) => {
|
|
550
|
+
if (Array.isArray(value)) {
|
|
551
|
+
const [type, val] = value;
|
|
552
|
+
if (type === "Identifier") return val;
|
|
553
|
+
}
|
|
554
|
+
return value;
|
|
555
|
+
};
|
|
556
|
+
mapToClickHouseType = (value) => {
|
|
557
|
+
if (typeof value === "number") {
|
|
558
|
+
return Number.isInteger(value) ? "Int" : "Float";
|
|
559
|
+
}
|
|
560
|
+
if (typeof value === "boolean") return "Bool";
|
|
561
|
+
if (value instanceof Date) return "DateTime";
|
|
562
|
+
if (Array.isArray(value)) {
|
|
563
|
+
const [type, _] = value;
|
|
564
|
+
return type;
|
|
565
|
+
}
|
|
566
|
+
return "String";
|
|
567
|
+
};
|
|
568
|
+
}
|
|
569
|
+
});
|
|
570
|
+
var import_node_stream;
|
|
571
|
+
var import_node_crypto;
|
|
572
|
+
var init_olapTable = __esm({
|
|
573
|
+
"src/dmv2/sdk/olapTable.ts"() {
|
|
574
|
+
"use strict";
|
|
575
|
+
init_typedBase();
|
|
576
|
+
init_dataModelTypes();
|
|
577
|
+
init_types();
|
|
578
|
+
init_internal();
|
|
579
|
+
import_node_stream = require("stream");
|
|
580
|
+
import_node_crypto = require("crypto");
|
|
581
|
+
init_sqlHelpers();
|
|
582
|
+
}
|
|
583
|
+
});
|
|
584
|
+
var import_node_crypto2;
|
|
585
|
+
var init_stream = __esm({
|
|
586
|
+
"src/dmv2/sdk/stream.ts"() {
|
|
587
|
+
"use strict";
|
|
588
|
+
init_typedBase();
|
|
589
|
+
init_internal();
|
|
590
|
+
import_node_crypto2 = require("crypto");
|
|
591
|
+
init_stackTrace();
|
|
592
|
+
}
|
|
593
|
+
});
|
|
594
|
+
var init_workflow = __esm({
|
|
595
|
+
"src/dmv2/sdk/workflow.ts"() {
|
|
596
|
+
"use strict";
|
|
597
|
+
init_internal();
|
|
598
|
+
}
|
|
599
|
+
});
|
|
600
|
+
var init_ingestApi = __esm({
|
|
601
|
+
"src/dmv2/sdk/ingestApi.ts"() {
|
|
602
|
+
"use strict";
|
|
603
|
+
init_typedBase();
|
|
604
|
+
init_internal();
|
|
605
|
+
}
|
|
606
|
+
});
|
|
607
|
+
var init_consumptionApi = __esm({
|
|
608
|
+
"src/dmv2/sdk/consumptionApi.ts"() {
|
|
609
|
+
"use strict";
|
|
610
|
+
init_typedBase();
|
|
611
|
+
init_internal();
|
|
612
|
+
}
|
|
613
|
+
});
|
|
614
|
+
var init_ingestPipeline = __esm({
|
|
615
|
+
"src/dmv2/sdk/ingestPipeline.ts"() {
|
|
616
|
+
"use strict";
|
|
617
|
+
init_typedBase();
|
|
618
|
+
init_stream();
|
|
619
|
+
init_olapTable();
|
|
620
|
+
init_ingestApi();
|
|
621
|
+
init_types();
|
|
622
|
+
}
|
|
623
|
+
});
|
|
624
|
+
var init_etlPipeline = __esm({
|
|
625
|
+
"src/dmv2/sdk/etlPipeline.ts"() {
|
|
626
|
+
"use strict";
|
|
627
|
+
init_workflow();
|
|
628
|
+
}
|
|
629
|
+
});
|
|
630
|
+
var init_materializedView = __esm({
|
|
631
|
+
"src/dmv2/sdk/materializedView.ts"() {
|
|
632
|
+
"use strict";
|
|
633
|
+
init_types();
|
|
634
|
+
init_sqlHelpers();
|
|
635
|
+
init_olapTable();
|
|
636
|
+
init_internal();
|
|
637
|
+
init_stackTrace();
|
|
638
|
+
}
|
|
639
|
+
});
|
|
640
|
+
var init_sqlResource = __esm({
|
|
641
|
+
"src/dmv2/sdk/sqlResource.ts"() {
|
|
642
|
+
"use strict";
|
|
643
|
+
init_internal();
|
|
644
|
+
init_sqlHelpers();
|
|
645
|
+
init_stackTrace();
|
|
646
|
+
}
|
|
647
|
+
});
|
|
648
|
+
var init_view = __esm({
|
|
649
|
+
"src/dmv2/sdk/view.ts"() {
|
|
650
|
+
"use strict";
|
|
651
|
+
init_sqlHelpers();
|
|
652
|
+
init_olapTable();
|
|
653
|
+
init_internal();
|
|
654
|
+
init_stackTrace();
|
|
655
|
+
}
|
|
656
|
+
});
|
|
657
|
+
var init_lifeCycle = __esm({
|
|
658
|
+
"src/dmv2/sdk/lifeCycle.ts"() {
|
|
659
|
+
"use strict";
|
|
660
|
+
}
|
|
661
|
+
});
|
|
662
|
+
var init_webApp = __esm({
|
|
663
|
+
"src/dmv2/sdk/webApp.ts"() {
|
|
664
|
+
"use strict";
|
|
665
|
+
init_internal();
|
|
666
|
+
}
|
|
667
|
+
});
|
|
668
|
+
var init_registry = __esm({
|
|
669
|
+
"src/dmv2/registry.ts"() {
|
|
670
|
+
"use strict";
|
|
671
|
+
init_internal();
|
|
672
|
+
}
|
|
673
|
+
});
|
|
674
|
+
var init_dmv2 = __esm({
|
|
675
|
+
"src/dmv2/index.ts"() {
|
|
676
|
+
"use strict";
|
|
677
|
+
init_olapTable();
|
|
678
|
+
init_types();
|
|
679
|
+
init_stream();
|
|
680
|
+
init_workflow();
|
|
681
|
+
init_ingestApi();
|
|
682
|
+
init_consumptionApi();
|
|
683
|
+
init_ingestPipeline();
|
|
684
|
+
init_etlPipeline();
|
|
685
|
+
init_materializedView();
|
|
686
|
+
init_sqlResource();
|
|
687
|
+
init_view();
|
|
688
|
+
init_lifeCycle();
|
|
689
|
+
init_webApp();
|
|
690
|
+
init_registry();
|
|
691
|
+
}
|
|
692
|
+
});
|
|
693
|
+
var init_browserCompatible = __esm({
|
|
694
|
+
"src/browserCompatible.ts"() {
|
|
695
|
+
"use strict";
|
|
696
|
+
init_dmv2();
|
|
697
|
+
init_types();
|
|
698
|
+
init_sqlHelpers();
|
|
699
|
+
}
|
|
700
|
+
});
|
|
701
|
+
function isTruthy(value) {
|
|
702
|
+
if (!value) return false;
|
|
703
|
+
switch (value.trim().toLowerCase()) {
|
|
704
|
+
case "1":
|
|
705
|
+
case "true":
|
|
706
|
+
case "yes":
|
|
707
|
+
case "on":
|
|
708
|
+
return true;
|
|
709
|
+
default:
|
|
710
|
+
return false;
|
|
711
|
+
}
|
|
712
|
+
}
|
|
713
|
+
function createProducerConfig(maxMessageBytes) {
|
|
714
|
+
return {
|
|
715
|
+
kafkaJS: {
|
|
716
|
+
idempotent: false,
|
|
717
|
+
// Not needed for at-least-once delivery
|
|
718
|
+
acks: ACKs,
|
|
719
|
+
retry: {
|
|
720
|
+
retries: MAX_RETRIES_PRODUCER,
|
|
721
|
+
maxRetryTime: MAX_RETRY_TIME_MS
|
|
722
|
+
}
|
|
723
|
+
},
|
|
724
|
+
"linger.ms": 0,
|
|
725
|
+
// This is to make sure at least once delivery with immediate feedback on the send
|
|
726
|
+
...maxMessageBytes && { "message.max.bytes": maxMessageBytes }
|
|
727
|
+
};
|
|
728
|
+
}
|
|
729
|
+
var import_client;
|
|
730
|
+
var import_kafka_javascript;
|
|
731
|
+
var Kafka;
|
|
732
|
+
var compilerLog;
|
|
733
|
+
var getClickhouseClient;
|
|
734
|
+
var cliLog;
|
|
735
|
+
var MAX_RETRIES;
|
|
736
|
+
var MAX_RETRY_TIME_MS;
|
|
737
|
+
var RETRY_INITIAL_TIME_MS;
|
|
738
|
+
var MAX_RETRIES_PRODUCER;
|
|
739
|
+
var ACKs;
|
|
740
|
+
var parseBrokerString;
|
|
741
|
+
var logError;
|
|
742
|
+
var buildSaslConfig;
|
|
743
|
+
var getKafkaClient;
|
|
744
|
+
var init_commons = __esm({
|
|
745
|
+
"src/commons.ts"() {
|
|
746
|
+
"use strict";
|
|
747
|
+
import_client = require("@clickhouse/client");
|
|
748
|
+
import_kafka_javascript = require_kafka_javascript();
|
|
749
|
+
({ Kafka } = import_kafka_javascript.KafkaJS);
|
|
750
|
+
compilerLog = (message) => {
|
|
751
|
+
if (!isTruthy(process.env.MOOSE_DISABLE_COMPILER_LOGS)) {
|
|
752
|
+
console.log(message);
|
|
753
|
+
}
|
|
754
|
+
};
|
|
755
|
+
getClickhouseClient = ({
|
|
756
|
+
username,
|
|
757
|
+
password,
|
|
758
|
+
database,
|
|
759
|
+
useSSL,
|
|
760
|
+
host,
|
|
761
|
+
port
|
|
762
|
+
}) => {
|
|
763
|
+
const protocol = useSSL === "1" || useSSL.toLowerCase() === "true" ? "https" : "http";
|
|
764
|
+
console.log(`Connecting to Clickhouse at ${protocol}://${host}:${port}`);
|
|
765
|
+
return (0, import_client.createClient)({
|
|
766
|
+
url: `${protocol}://${host}:${port}`,
|
|
767
|
+
username,
|
|
768
|
+
password,
|
|
769
|
+
database,
|
|
770
|
+
application: "moose"
|
|
771
|
+
// Note: wait_end_of_query is configured per operation type, not globally
|
|
772
|
+
// to preserve SELECT query performance while ensuring INSERT/DDL reliability
|
|
773
|
+
});
|
|
774
|
+
};
|
|
775
|
+
cliLog = (log) => {
|
|
776
|
+
const level = log.message_type === "Error" ? "error" : log.message_type === "Warning" ? "warn" : "info";
|
|
777
|
+
const structuredLog = {
|
|
778
|
+
__moose_structured_log__: true,
|
|
779
|
+
level,
|
|
780
|
+
message: log.message,
|
|
781
|
+
resource_type: "runtime",
|
|
782
|
+
cli_action: log.action,
|
|
783
|
+
cli_message_type: log.message_type ?? "Info",
|
|
784
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
785
|
+
};
|
|
786
|
+
process.stderr.write(JSON.stringify(structuredLog) + "\n");
|
|
787
|
+
};
|
|
788
|
+
MAX_RETRIES = 150;
|
|
789
|
+
MAX_RETRY_TIME_MS = 1e3;
|
|
790
|
+
RETRY_INITIAL_TIME_MS = 100;
|
|
791
|
+
MAX_RETRIES_PRODUCER = 150;
|
|
792
|
+
ACKs = -1;
|
|
793
|
+
parseBrokerString = (brokerString) => brokerString.split(",").map((b) => b.trim()).filter((b) => b.length > 0);
|
|
794
|
+
logError = (logger2, e) => {
|
|
795
|
+
logger2.error(e.message);
|
|
796
|
+
const stack = e.stack;
|
|
797
|
+
if (stack) {
|
|
798
|
+
logger2.error(stack);
|
|
799
|
+
}
|
|
800
|
+
};
|
|
801
|
+
buildSaslConfig = (logger2, args) => {
|
|
802
|
+
const mechanism = args.saslMechanism ? args.saslMechanism.toLowerCase() : "";
|
|
803
|
+
switch (mechanism) {
|
|
804
|
+
case "plain":
|
|
805
|
+
case "scram-sha-256":
|
|
806
|
+
case "scram-sha-512":
|
|
807
|
+
return {
|
|
808
|
+
mechanism,
|
|
809
|
+
username: args.saslUsername || "",
|
|
810
|
+
password: args.saslPassword || ""
|
|
811
|
+
};
|
|
812
|
+
default:
|
|
813
|
+
logger2.warn(`Unsupported SASL mechanism: ${args.saslMechanism}`);
|
|
814
|
+
return void 0;
|
|
815
|
+
}
|
|
816
|
+
};
|
|
817
|
+
getKafkaClient = async (cfg, logger2) => {
|
|
818
|
+
const brokers = parseBrokerString(cfg.broker || "");
|
|
819
|
+
if (brokers.length === 0) {
|
|
820
|
+
throw new Error(`No valid broker addresses found in: "${cfg.broker}"`);
|
|
821
|
+
}
|
|
822
|
+
logger2.log(`Creating Kafka client with brokers: ${brokers.join(", ")}`);
|
|
823
|
+
logger2.log(`Security protocol: ${cfg.securityProtocol || "plaintext"}`);
|
|
824
|
+
logger2.log(`Client ID: ${cfg.clientId}`);
|
|
825
|
+
const saslConfig = buildSaslConfig(logger2, cfg);
|
|
826
|
+
return new Kafka({
|
|
827
|
+
kafkaJS: {
|
|
828
|
+
clientId: cfg.clientId,
|
|
829
|
+
brokers,
|
|
830
|
+
ssl: cfg.securityProtocol === "SASL_SSL",
|
|
831
|
+
...saslConfig && { sasl: saslConfig },
|
|
832
|
+
retry: {
|
|
833
|
+
initialRetryTime: RETRY_INITIAL_TIME_MS,
|
|
834
|
+
maxRetryTime: MAX_RETRY_TIME_MS,
|
|
835
|
+
retries: MAX_RETRIES
|
|
836
|
+
}
|
|
837
|
+
}
|
|
838
|
+
});
|
|
839
|
+
};
|
|
840
|
+
}
|
|
841
|
+
});
|
|
842
|
+
var init_secrets = __esm({
|
|
843
|
+
"src/secrets.ts"() {
|
|
844
|
+
"use strict";
|
|
845
|
+
}
|
|
846
|
+
});
|
|
847
|
+
function formatElapsedTime(ms) {
|
|
848
|
+
if (ms < 1e3) {
|
|
849
|
+
return `${Math.round(ms)} ms`;
|
|
850
|
+
}
|
|
851
|
+
const seconds = ms / 1e3;
|
|
852
|
+
if (seconds < 60) {
|
|
853
|
+
return `${seconds.toFixed(2)} seconds`;
|
|
854
|
+
}
|
|
855
|
+
const minutes = Math.floor(seconds / 60);
|
|
856
|
+
const remainingSeconds = seconds % 60;
|
|
857
|
+
return `${minutes} minutes and ${remainingSeconds.toFixed(2)} seconds`;
|
|
858
|
+
}
|
|
859
|
+
async function getTemporalClient(temporalUrl, namespace, clientCert, clientKey, apiKey) {
|
|
860
|
+
try {
|
|
861
|
+
console.info(
|
|
862
|
+
`<api> Using temporal_url: ${temporalUrl} and namespace: ${namespace}`
|
|
863
|
+
);
|
|
864
|
+
let connectionOptions = {
|
|
865
|
+
address: temporalUrl,
|
|
866
|
+
connectTimeout: "3s"
|
|
867
|
+
};
|
|
868
|
+
if (clientCert && clientKey) {
|
|
869
|
+
console.log("Using TLS for secure Temporal");
|
|
870
|
+
const cert = await fs.readFileSync(clientCert);
|
|
871
|
+
const key = await fs.readFileSync(clientKey);
|
|
872
|
+
connectionOptions.tls = {
|
|
873
|
+
clientCertPair: { crt: cert, key }
|
|
874
|
+
};
|
|
875
|
+
} else if (apiKey) {
|
|
876
|
+
console.log("Using API key for secure Temporal");
|
|
877
|
+
connectionOptions.address = "us-west1.gcp.api.temporal.io:7233";
|
|
878
|
+
connectionOptions.apiKey = apiKey;
|
|
879
|
+
connectionOptions.tls = {};
|
|
880
|
+
connectionOptions.metadata = {
|
|
881
|
+
"temporal-namespace": namespace
|
|
882
|
+
};
|
|
883
|
+
}
|
|
884
|
+
console.log(`<api> Connecting to Temporal at ${connectionOptions.address}`);
|
|
885
|
+
const connection = await import_client2.Connection.connect(connectionOptions);
|
|
886
|
+
const client = new import_client2.Client({ connection, namespace });
|
|
887
|
+
console.log("<api> Connected to Temporal server");
|
|
888
|
+
return client;
|
|
889
|
+
} catch (error) {
|
|
890
|
+
console.warn(`Failed to connect to Temporal. Is the feature flag enabled?`);
|
|
891
|
+
console.warn(error);
|
|
892
|
+
return void 0;
|
|
893
|
+
}
|
|
894
|
+
}
|
|
895
|
+
var import_client2;
|
|
896
|
+
var import_node_crypto3;
|
|
897
|
+
var import_perf_hooks;
|
|
898
|
+
var fs;
|
|
899
|
+
var MooseClient;
|
|
900
|
+
var QueryClient;
|
|
901
|
+
var WorkflowClient;
|
|
902
|
+
var init_helpers = __esm({
|
|
903
|
+
"src/consumption-apis/helpers.ts"() {
|
|
904
|
+
"use strict";
|
|
905
|
+
import_client2 = require_client();
|
|
906
|
+
import_node_crypto3 = require("crypto");
|
|
907
|
+
import_perf_hooks = require("perf_hooks");
|
|
908
|
+
fs = __toESM2(require("fs"));
|
|
909
|
+
init_internal();
|
|
910
|
+
init_sqlHelpers();
|
|
911
|
+
MooseClient = class {
|
|
912
|
+
query;
|
|
913
|
+
workflow;
|
|
914
|
+
constructor(queryClient, temporalClient) {
|
|
915
|
+
this.query = queryClient;
|
|
916
|
+
this.workflow = new WorkflowClient(temporalClient);
|
|
917
|
+
}
|
|
918
|
+
};
|
|
919
|
+
QueryClient = class {
|
|
920
|
+
client;
|
|
921
|
+
query_id_prefix;
|
|
922
|
+
constructor(client, query_id_prefix) {
|
|
923
|
+
this.client = client;
|
|
924
|
+
this.query_id_prefix = query_id_prefix;
|
|
925
|
+
}
|
|
926
|
+
async execute(sql3) {
|
|
927
|
+
const [query, query_params] = toQuery(sql3);
|
|
928
|
+
console.log(`[QueryClient] | Query: ${toQueryPreview(sql3)}`);
|
|
929
|
+
const start = import_perf_hooks.performance.now();
|
|
930
|
+
const result = await this.client.query({
|
|
931
|
+
query,
|
|
932
|
+
query_params,
|
|
933
|
+
format: "JSONEachRow",
|
|
934
|
+
query_id: this.query_id_prefix + (0, import_node_crypto3.randomUUID)()
|
|
935
|
+
// Note: wait_end_of_query deliberately NOT set here as this is used for SELECT queries
|
|
936
|
+
// where response buffering would harm streaming performance and concurrency
|
|
937
|
+
});
|
|
938
|
+
const elapsedMs = import_perf_hooks.performance.now() - start;
|
|
939
|
+
console.log(
|
|
940
|
+
`[QueryClient] | Query completed: ${formatElapsedTime(elapsedMs)}`
|
|
941
|
+
);
|
|
942
|
+
return result;
|
|
943
|
+
}
|
|
944
|
+
async command(sql3) {
|
|
945
|
+
const [query, query_params] = toQuery(sql3);
|
|
946
|
+
console.log(`[QueryClient] | Command: ${toQueryPreview(sql3)}`);
|
|
947
|
+
const start = import_perf_hooks.performance.now();
|
|
948
|
+
const result = await this.client.command({
|
|
949
|
+
query,
|
|
950
|
+
query_params,
|
|
951
|
+
query_id: this.query_id_prefix + (0, import_node_crypto3.randomUUID)()
|
|
952
|
+
});
|
|
953
|
+
const elapsedMs = import_perf_hooks.performance.now() - start;
|
|
954
|
+
console.log(
|
|
955
|
+
`[QueryClient] | Command completed: ${formatElapsedTime(elapsedMs)}`
|
|
956
|
+
);
|
|
957
|
+
return result;
|
|
958
|
+
}
|
|
959
|
+
};
|
|
960
|
+
WorkflowClient = class {
|
|
961
|
+
client;
|
|
962
|
+
constructor(temporalClient) {
|
|
963
|
+
this.client = temporalClient;
|
|
964
|
+
}
|
|
965
|
+
async execute(name, input_data) {
|
|
966
|
+
try {
|
|
967
|
+
if (!this.client) {
|
|
968
|
+
return {
|
|
969
|
+
status: 404,
|
|
970
|
+
body: `Temporal client not found. Is the feature flag enabled?`
|
|
971
|
+
};
|
|
972
|
+
}
|
|
973
|
+
const config = await this.getWorkflowConfig(name);
|
|
974
|
+
const [processedInput, workflowId] = this.processInputData(
|
|
975
|
+
name,
|
|
976
|
+
input_data
|
|
977
|
+
);
|
|
978
|
+
console.log(
|
|
979
|
+
`WorkflowClient - starting workflow: ${name} with config ${JSON.stringify(config)} and input_data ${JSON.stringify(processedInput)}`
|
|
980
|
+
);
|
|
981
|
+
const handle = await this.client.workflow.start("ScriptWorkflow", {
|
|
982
|
+
args: [
|
|
983
|
+
{ workflow_name: name, execution_mode: "start" },
|
|
984
|
+
processedInput
|
|
985
|
+
],
|
|
986
|
+
taskQueue: "typescript-script-queue",
|
|
987
|
+
workflowId,
|
|
988
|
+
workflowIdConflictPolicy: "FAIL",
|
|
989
|
+
workflowIdReusePolicy: "ALLOW_DUPLICATE",
|
|
990
|
+
retry: {
|
|
991
|
+
// Temporal's maximumAttempts = total attempts (initial + retries)
|
|
992
|
+
maximumAttempts: config.retries + 1
|
|
993
|
+
},
|
|
994
|
+
workflowRunTimeout: config.timeout
|
|
995
|
+
});
|
|
996
|
+
return {
|
|
997
|
+
status: 200,
|
|
998
|
+
body: `Workflow started: ${name}. View it in the Temporal dashboard: http://localhost:8080/namespaces/default/workflows/${workflowId}/${handle.firstExecutionRunId}/history`
|
|
999
|
+
};
|
|
1000
|
+
} catch (error) {
|
|
1001
|
+
return {
|
|
1002
|
+
status: 400,
|
|
1003
|
+
body: `Error starting workflow: ${error}`
|
|
1004
|
+
};
|
|
1005
|
+
}
|
|
1006
|
+
}
|
|
1007
|
+
async terminate(workflowId) {
|
|
1008
|
+
try {
|
|
1009
|
+
if (!this.client) {
|
|
1010
|
+
return {
|
|
1011
|
+
status: 404,
|
|
1012
|
+
body: `Temporal client not found. Is the feature flag enabled?`
|
|
1013
|
+
};
|
|
1014
|
+
}
|
|
1015
|
+
const handle = this.client.workflow.getHandle(workflowId);
|
|
1016
|
+
await handle.terminate();
|
|
1017
|
+
return {
|
|
1018
|
+
status: 200,
|
|
1019
|
+
body: `Workflow terminated: ${workflowId}`
|
|
1020
|
+
};
|
|
1021
|
+
} catch (error) {
|
|
1022
|
+
return {
|
|
1023
|
+
status: 400,
|
|
1024
|
+
body: `Error terminating workflow: ${error}`
|
|
1025
|
+
};
|
|
1026
|
+
}
|
|
1027
|
+
}
|
|
1028
|
+
async getWorkflowConfig(name) {
|
|
1029
|
+
const workflows = await getWorkflows2();
|
|
1030
|
+
const workflow = workflows.get(name);
|
|
1031
|
+
if (workflow) {
|
|
1032
|
+
return {
|
|
1033
|
+
retries: workflow.config.retries || 3,
|
|
1034
|
+
timeout: workflow.config.timeout || "1h"
|
|
1035
|
+
};
|
|
1036
|
+
}
|
|
1037
|
+
throw new Error(`Workflow config not found for ${name}`);
|
|
1038
|
+
}
|
|
1039
|
+
processInputData(name, input_data) {
|
|
1040
|
+
let workflowId = name;
|
|
1041
|
+
if (input_data) {
|
|
1042
|
+
const hash = (0, import_node_crypto3.createHash)("sha256").update(JSON.stringify(input_data)).digest("hex").slice(0, 16);
|
|
1043
|
+
workflowId = `${name}-${hash}`;
|
|
1044
|
+
}
|
|
1045
|
+
return [input_data, workflowId];
|
|
1046
|
+
}
|
|
1047
|
+
};
|
|
1048
|
+
}
|
|
1049
|
+
});
|
|
1050
|
+
var init_webAppHelpers = __esm({
|
|
1051
|
+
"src/consumption-apis/webAppHelpers.ts"() {
|
|
1052
|
+
"use strict";
|
|
1053
|
+
}
|
|
1054
|
+
});
|
|
1055
|
+
var init_task = __esm({
|
|
1056
|
+
"src/scripts/task.ts"() {
|
|
1057
|
+
"use strict";
|
|
1058
|
+
}
|
|
1059
|
+
});
|
|
1060
|
+
var import_redis;
|
|
1061
|
+
var init_redisClient = __esm({
|
|
1062
|
+
"src/clients/redisClient.ts"() {
|
|
1063
|
+
"use strict";
|
|
1064
|
+
import_redis = require_redis();
|
|
1065
|
+
}
|
|
1066
|
+
});
|
|
1067
|
+
async function findConfigFile(startDir = process.cwd()) {
|
|
1068
|
+
const fs4 = await import("fs");
|
|
1069
|
+
let currentDir = import_node_path.default.resolve(startDir);
|
|
1070
|
+
while (true) {
|
|
1071
|
+
const configPath = import_node_path.default.join(currentDir, "moose.config.toml");
|
|
1072
|
+
if (fs4.existsSync(configPath)) {
|
|
1073
|
+
return configPath;
|
|
1074
|
+
}
|
|
1075
|
+
const parentDir = import_node_path.default.dirname(currentDir);
|
|
1076
|
+
if (parentDir === currentDir) {
|
|
1077
|
+
break;
|
|
1078
|
+
}
|
|
1079
|
+
currentDir = parentDir;
|
|
1080
|
+
}
|
|
1081
|
+
return null;
|
|
1082
|
+
}
|
|
1083
|
+
async function readProjectConfig() {
|
|
1084
|
+
const fs4 = await import("fs");
|
|
1085
|
+
const configPath = await findConfigFile();
|
|
1086
|
+
if (!configPath) {
|
|
1087
|
+
throw new ConfigError(
|
|
1088
|
+
"moose.config.toml not found in current directory or any parent directory"
|
|
1089
|
+
);
|
|
1090
|
+
}
|
|
1091
|
+
try {
|
|
1092
|
+
const configContent = fs4.readFileSync(configPath, "utf-8");
|
|
1093
|
+
const config = toml.parse(configContent);
|
|
1094
|
+
return config;
|
|
1095
|
+
} catch (error) {
|
|
1096
|
+
throw new ConfigError(`Failed to parse moose.config.toml: ${error}`);
|
|
1097
|
+
}
|
|
1098
|
+
}
|
|
1099
|
+
var import_node_path;
|
|
1100
|
+
var toml;
|
|
1101
|
+
var ConfigError;
|
|
1102
|
+
var init_configFile = __esm({
|
|
1103
|
+
"src/config/configFile.ts"() {
|
|
1104
|
+
"use strict";
|
|
1105
|
+
import_node_path = __toESM2(require("path"));
|
|
1106
|
+
toml = __toESM2(require("toml"));
|
|
1107
|
+
ConfigError = class extends Error {
|
|
1108
|
+
constructor(message) {
|
|
1109
|
+
super(message);
|
|
1110
|
+
this.name = "ConfigError";
|
|
1111
|
+
}
|
|
1112
|
+
};
|
|
1113
|
+
}
|
|
1114
|
+
});
|
|
1115
|
+
var runtime_exports = {};
|
|
1116
|
+
var ConfigurationRegistry;
|
|
1117
|
+
var init_runtime = __esm({
|
|
1118
|
+
"src/config/runtime.ts"() {
|
|
1119
|
+
"use strict";
|
|
1120
|
+
init_configFile();
|
|
1121
|
+
ConfigurationRegistry = class _ConfigurationRegistry {
|
|
1122
|
+
static instance;
|
|
1123
|
+
clickhouseConfig;
|
|
1124
|
+
kafkaConfig;
|
|
1125
|
+
static getInstance() {
|
|
1126
|
+
if (!_ConfigurationRegistry.instance) {
|
|
1127
|
+
_ConfigurationRegistry.instance = new _ConfigurationRegistry();
|
|
1128
|
+
}
|
|
1129
|
+
return _ConfigurationRegistry.instance;
|
|
1130
|
+
}
|
|
1131
|
+
setClickHouseConfig(config) {
|
|
1132
|
+
this.clickhouseConfig = config;
|
|
1133
|
+
}
|
|
1134
|
+
setKafkaConfig(config) {
|
|
1135
|
+
this.kafkaConfig = config;
|
|
1136
|
+
}
|
|
1137
|
+
_env(name) {
|
|
1138
|
+
const value = process.env[name];
|
|
1139
|
+
if (value === void 0) return void 0;
|
|
1140
|
+
const trimmed = value.trim();
|
|
1141
|
+
return trimmed.length > 0 ? trimmed : void 0;
|
|
1142
|
+
}
|
|
1143
|
+
_parseBool(value) {
|
|
1144
|
+
if (value === void 0) return void 0;
|
|
1145
|
+
switch (value.trim().toLowerCase()) {
|
|
1146
|
+
case "1":
|
|
1147
|
+
case "true":
|
|
1148
|
+
case "yes":
|
|
1149
|
+
case "on":
|
|
1150
|
+
return true;
|
|
1151
|
+
case "0":
|
|
1152
|
+
case "false":
|
|
1153
|
+
case "no":
|
|
1154
|
+
case "off":
|
|
1155
|
+
return false;
|
|
1156
|
+
default:
|
|
1157
|
+
return void 0;
|
|
1158
|
+
}
|
|
1159
|
+
}
|
|
1160
|
+
async getClickHouseConfig() {
|
|
1161
|
+
if (this.clickhouseConfig) {
|
|
1162
|
+
return this.clickhouseConfig;
|
|
1163
|
+
}
|
|
1164
|
+
const projectConfig = await readProjectConfig();
|
|
1165
|
+
const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
|
|
1166
|
+
const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
|
|
1167
|
+
const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
|
|
1168
|
+
const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
|
|
1169
|
+
const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
|
|
1170
|
+
const envUseSSL = this._parseBool(
|
|
1171
|
+
this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
|
|
1172
|
+
);
|
|
1173
|
+
return {
|
|
1174
|
+
host: envHost ?? projectConfig.clickhouse_config.host,
|
|
1175
|
+
port: envPort ?? projectConfig.clickhouse_config.host_port.toString(),
|
|
1176
|
+
username: envUser ?? projectConfig.clickhouse_config.user,
|
|
1177
|
+
password: envPassword ?? projectConfig.clickhouse_config.password,
|
|
1178
|
+
database: envDb ?? projectConfig.clickhouse_config.db_name,
|
|
1179
|
+
useSSL: envUseSSL !== void 0 ? envUseSSL : projectConfig.clickhouse_config.use_ssl || false
|
|
1180
|
+
};
|
|
1181
|
+
}
|
|
1182
|
+
async getStandaloneClickhouseConfig(overrides) {
|
|
1183
|
+
if (this.clickhouseConfig) {
|
|
1184
|
+
return { ...this.clickhouseConfig, ...overrides };
|
|
1185
|
+
}
|
|
1186
|
+
const envHost = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST");
|
|
1187
|
+
const envPort = this._env("MOOSE_CLICKHOUSE_CONFIG__HOST_PORT");
|
|
1188
|
+
const envUser = this._env("MOOSE_CLICKHOUSE_CONFIG__USER");
|
|
1189
|
+
const envPassword = this._env("MOOSE_CLICKHOUSE_CONFIG__PASSWORD");
|
|
1190
|
+
const envDb = this._env("MOOSE_CLICKHOUSE_CONFIG__DB_NAME");
|
|
1191
|
+
const envUseSSL = this._parseBool(
|
|
1192
|
+
this._env("MOOSE_CLICKHOUSE_CONFIG__USE_SSL")
|
|
1193
|
+
);
|
|
1194
|
+
let projectConfig;
|
|
1195
|
+
try {
|
|
1196
|
+
projectConfig = await readProjectConfig();
|
|
1197
|
+
} catch (error) {
|
|
1198
|
+
projectConfig = null;
|
|
1199
|
+
}
|
|
1200
|
+
const defaults = {
|
|
1201
|
+
host: "localhost",
|
|
1202
|
+
port: "18123",
|
|
1203
|
+
username: "default",
|
|
1204
|
+
password: "",
|
|
1205
|
+
database: "local",
|
|
1206
|
+
useSSL: false
|
|
1207
|
+
};
|
|
1208
|
+
return {
|
|
1209
|
+
host: overrides?.host ?? envHost ?? projectConfig?.clickhouse_config.host ?? defaults.host,
|
|
1210
|
+
port: overrides?.port ?? envPort ?? projectConfig?.clickhouse_config.host_port.toString() ?? defaults.port,
|
|
1211
|
+
username: overrides?.username ?? envUser ?? projectConfig?.clickhouse_config.user ?? defaults.username,
|
|
1212
|
+
password: overrides?.password ?? envPassword ?? projectConfig?.clickhouse_config.password ?? defaults.password,
|
|
1213
|
+
database: overrides?.database ?? envDb ?? projectConfig?.clickhouse_config.db_name ?? defaults.database,
|
|
1214
|
+
useSSL: overrides?.useSSL ?? envUseSSL ?? projectConfig?.clickhouse_config.use_ssl ?? defaults.useSSL
|
|
1215
|
+
};
|
|
1216
|
+
}
|
|
1217
|
+
async getKafkaConfig() {
|
|
1218
|
+
if (this.kafkaConfig) {
|
|
1219
|
+
return this.kafkaConfig;
|
|
1220
|
+
}
|
|
1221
|
+
const projectConfig = await readProjectConfig();
|
|
1222
|
+
const envBroker = this._env("MOOSE_REDPANDA_CONFIG__BROKER") ?? this._env("MOOSE_KAFKA_CONFIG__BROKER");
|
|
1223
|
+
const envMsgTimeout = this._env("MOOSE_REDPANDA_CONFIG__MESSAGE_TIMEOUT_MS") ?? this._env("MOOSE_KAFKA_CONFIG__MESSAGE_TIMEOUT_MS");
|
|
1224
|
+
const envSaslUsername = this._env("MOOSE_REDPANDA_CONFIG__SASL_USERNAME") ?? this._env("MOOSE_KAFKA_CONFIG__SASL_USERNAME");
|
|
1225
|
+
const envSaslPassword = this._env("MOOSE_REDPANDA_CONFIG__SASL_PASSWORD") ?? this._env("MOOSE_KAFKA_CONFIG__SASL_PASSWORD");
|
|
1226
|
+
const envSaslMechanism = this._env("MOOSE_REDPANDA_CONFIG__SASL_MECHANISM") ?? this._env("MOOSE_KAFKA_CONFIG__SASL_MECHANISM");
|
|
1227
|
+
const envSecurityProtocol = this._env("MOOSE_REDPANDA_CONFIG__SECURITY_PROTOCOL") ?? this._env("MOOSE_KAFKA_CONFIG__SECURITY_PROTOCOL");
|
|
1228
|
+
const envNamespace = this._env("MOOSE_REDPANDA_CONFIG__NAMESPACE") ?? this._env("MOOSE_KAFKA_CONFIG__NAMESPACE");
|
|
1229
|
+
const envSchemaRegistryUrl = this._env("MOOSE_REDPANDA_CONFIG__SCHEMA_REGISTRY_URL") ?? this._env("MOOSE_KAFKA_CONFIG__SCHEMA_REGISTRY_URL");
|
|
1230
|
+
const fileKafka = projectConfig.kafka_config ?? projectConfig.redpanda_config;
|
|
1231
|
+
return {
|
|
1232
|
+
broker: envBroker ?? fileKafka?.broker ?? "localhost:19092",
|
|
1233
|
+
messageTimeoutMs: envMsgTimeout ? parseInt(envMsgTimeout, 10) : fileKafka?.message_timeout_ms ?? 1e3,
|
|
1234
|
+
saslUsername: envSaslUsername ?? fileKafka?.sasl_username,
|
|
1235
|
+
saslPassword: envSaslPassword ?? fileKafka?.sasl_password,
|
|
1236
|
+
saslMechanism: envSaslMechanism ?? fileKafka?.sasl_mechanism,
|
|
1237
|
+
securityProtocol: envSecurityProtocol ?? fileKafka?.security_protocol,
|
|
1238
|
+
namespace: envNamespace ?? fileKafka?.namespace,
|
|
1239
|
+
schemaRegistryUrl: envSchemaRegistryUrl ?? fileKafka?.schema_registry_url
|
|
1240
|
+
};
|
|
1241
|
+
}
|
|
1242
|
+
hasRuntimeConfig() {
|
|
1243
|
+
return !!this.clickhouseConfig || !!this.kafkaConfig;
|
|
1244
|
+
}
|
|
1245
|
+
};
|
|
1246
|
+
globalThis._mooseConfigRegistry = ConfigurationRegistry.getInstance();
|
|
1247
|
+
}
|
|
1248
|
+
});
|
|
1249
|
+
var standalone_exports = {};
|
|
1250
|
+
__export(standalone_exports, {
|
|
1251
|
+
getMooseClients: () => getMooseClients,
|
|
1252
|
+
getMooseUtils: () => getMooseUtils
|
|
1253
|
+
});
|
|
1254
|
+
async function getMooseUtils(req) {
|
|
1255
|
+
if (req !== void 0) {
|
|
1256
|
+
console.warn(
|
|
1257
|
+
"[DEPRECATED] getMooseUtils(req) no longer requires a request parameter. Use getMooseUtils() instead."
|
|
1258
|
+
);
|
|
1259
|
+
}
|
|
1260
|
+
const runtimeContext = globalThis._mooseRuntimeContext;
|
|
1261
|
+
if (runtimeContext) {
|
|
1262
|
+
return {
|
|
1263
|
+
client: runtimeContext.client,
|
|
1264
|
+
sql,
|
|
1265
|
+
jwt: runtimeContext.jwt
|
|
1266
|
+
};
|
|
1267
|
+
}
|
|
1268
|
+
if (standaloneUtils) {
|
|
1269
|
+
return standaloneUtils;
|
|
1270
|
+
}
|
|
1271
|
+
if (initPromise) {
|
|
1272
|
+
return initPromise;
|
|
1273
|
+
}
|
|
1274
|
+
initPromise = (async () => {
|
|
1275
|
+
await Promise.resolve().then(() => (init_runtime(), runtime_exports));
|
|
1276
|
+
const configRegistry = globalThis._mooseConfigRegistry;
|
|
1277
|
+
if (!configRegistry) {
|
|
1278
|
+
throw new Error(
|
|
1279
|
+
"Moose not initialized. Ensure you're running within a Moose app or have proper configuration set up."
|
|
1280
|
+
);
|
|
1281
|
+
}
|
|
1282
|
+
const clickhouseConfig = await configRegistry.getStandaloneClickhouseConfig();
|
|
1283
|
+
const clickhouseClient = getClickhouseClient(
|
|
1284
|
+
toClientConfig(clickhouseConfig)
|
|
1285
|
+
);
|
|
1286
|
+
const queryClient = new QueryClient(clickhouseClient, "standalone");
|
|
1287
|
+
const mooseClient = new MooseClient(queryClient);
|
|
1288
|
+
standaloneUtils = {
|
|
1289
|
+
client: mooseClient,
|
|
1290
|
+
sql,
|
|
1291
|
+
jwt: void 0
|
|
1292
|
+
};
|
|
1293
|
+
return standaloneUtils;
|
|
1294
|
+
})();
|
|
1295
|
+
try {
|
|
1296
|
+
return await initPromise;
|
|
1297
|
+
} finally {
|
|
1298
|
+
initPromise = null;
|
|
1299
|
+
}
|
|
1300
|
+
}
|
|
1301
|
+
async function getMooseClients(config) {
|
|
1302
|
+
console.warn(
|
|
1303
|
+
"[DEPRECATED] getMooseClients() is deprecated. Use getMooseUtils() instead."
|
|
1304
|
+
);
|
|
1305
|
+
if (config && Object.keys(config).length > 0) {
|
|
1306
|
+
await Promise.resolve().then(() => (init_runtime(), runtime_exports));
|
|
1307
|
+
const configRegistry = globalThis._mooseConfigRegistry;
|
|
1308
|
+
if (!configRegistry) {
|
|
1309
|
+
throw new Error(
|
|
1310
|
+
"Configuration registry not initialized. Ensure the Moose framework is properly set up."
|
|
1311
|
+
);
|
|
1312
|
+
}
|
|
1313
|
+
const clickhouseConfig = await configRegistry.getStandaloneClickhouseConfig(config);
|
|
1314
|
+
const clickhouseClient = getClickhouseClient(
|
|
1315
|
+
toClientConfig(clickhouseConfig)
|
|
1316
|
+
);
|
|
1317
|
+
const queryClient = new QueryClient(clickhouseClient, "standalone");
|
|
1318
|
+
const mooseClient = new MooseClient(queryClient);
|
|
1319
|
+
return { client: mooseClient };
|
|
1320
|
+
}
|
|
1321
|
+
const utils = await getMooseUtils();
|
|
1322
|
+
return { client: utils.client };
|
|
1323
|
+
}
|
|
1324
|
+
var standaloneUtils;
|
|
1325
|
+
var initPromise;
|
|
1326
|
+
var toClientConfig;
|
|
1327
|
+
var init_standalone = __esm({
|
|
1328
|
+
"src/consumption-apis/standalone.ts"() {
|
|
1329
|
+
"use strict";
|
|
1330
|
+
init_helpers();
|
|
1331
|
+
init_commons();
|
|
1332
|
+
init_sqlHelpers();
|
|
1333
|
+
standaloneUtils = null;
|
|
1334
|
+
initPromise = null;
|
|
1335
|
+
toClientConfig = (config) => ({
|
|
1336
|
+
...config,
|
|
1337
|
+
useSSL: config.useSSL ? "true" : "false"
|
|
1338
|
+
});
|
|
1339
|
+
}
|
|
1340
|
+
});
|
|
1341
|
+
function isNullableType(dt) {
|
|
1342
|
+
return typeof dt === "object" && dt !== null && "nullable" in dt && typeof dt.nullable !== "undefined";
|
|
1343
|
+
}
|
|
1344
|
+
function isNestedType2(dt) {
|
|
1345
|
+
return typeof dt === "object" && dt !== null && "columns" in dt && Array.isArray(dt.columns);
|
|
1346
|
+
}
|
|
1347
|
+
function isArrayType(dt) {
|
|
1348
|
+
return typeof dt === "object" && dt !== null && "elementType" in dt && typeof dt.elementType !== "undefined";
|
|
1349
|
+
}
|
|
1350
|
+
function jsonDateReviver(key, value) {
|
|
1351
|
+
const iso8601Format = /^([\+-]?\d{4}(?!\d{2}\b))((-?)((0[1-9]|1[0-2])(\3([12]\d|0[1-9]|3[01]))?|W([0-4]\d|5[0-2])(-?[1-7])?|(00[1-9]|0[1-9]\d|[12]\d{2}|3([0-5]\d|6[1-6])))([T\s]((([01]\d|2[0-3])((:?)[0-5]\d)?|24\:?00)([\.,]\d+(?!:))?)?(\17[0-5]\d([\.,]\d+)?)?([zZ]|([\+-])([01]\d|2[0-3]):?([0-5]\d)?)?)?)$/;
|
|
1352
|
+
if (typeof value === "string" && iso8601Format.test(value)) {
|
|
1353
|
+
return new Date(value);
|
|
1354
|
+
}
|
|
1355
|
+
return value;
|
|
1356
|
+
}
|
|
1357
|
+
function isDateType(dataType, annotations) {
|
|
1358
|
+
if (annotations.some(
|
|
1359
|
+
([key, value]) => key === STRING_DATE_ANNOTATION && value === true
|
|
1360
|
+
)) {
|
|
1361
|
+
return false;
|
|
1362
|
+
}
|
|
1363
|
+
if (typeof dataType === "string") {
|
|
1364
|
+
return dataType === "DateTime" || dataType.startsWith("DateTime(");
|
|
1365
|
+
}
|
|
1366
|
+
if (isNullableType(dataType)) {
|
|
1367
|
+
return isDateType(dataType.nullable, annotations);
|
|
1368
|
+
}
|
|
1369
|
+
return false;
|
|
1370
|
+
}
|
|
1371
|
+
function buildFieldMutations(columns) {
|
|
1372
|
+
const mutations = [];
|
|
1373
|
+
for (const column of columns) {
|
|
1374
|
+
const dataType = column.data_type;
|
|
1375
|
+
if (isDateType(dataType, column.annotations)) {
|
|
1376
|
+
mutations.push([column.name, ["parseDate"]]);
|
|
1377
|
+
continue;
|
|
1378
|
+
}
|
|
1379
|
+
if (typeof dataType === "object" && dataType !== null) {
|
|
1380
|
+
let unwrappedType = dataType;
|
|
1381
|
+
if (isNullableType(dataType)) {
|
|
1382
|
+
unwrappedType = dataType.nullable;
|
|
1383
|
+
}
|
|
1384
|
+
if (isNestedType2(unwrappedType)) {
|
|
1385
|
+
const nestedMutations = buildFieldMutations(unwrappedType.columns);
|
|
1386
|
+
if (nestedMutations.length > 0) {
|
|
1387
|
+
mutations.push([column.name, nestedMutations]);
|
|
1388
|
+
}
|
|
1389
|
+
continue;
|
|
1390
|
+
}
|
|
1391
|
+
if (isArrayType(unwrappedType)) {
|
|
1392
|
+
const elementType = unwrappedType.elementType;
|
|
1393
|
+
if (isNestedType2(elementType)) {
|
|
1394
|
+
const nestedMutations = buildFieldMutations(elementType.columns);
|
|
1395
|
+
if (nestedMutations.length > 0) {
|
|
1396
|
+
mutations.push([column.name, nestedMutations]);
|
|
1397
|
+
}
|
|
1398
|
+
continue;
|
|
1399
|
+
}
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
}
|
|
1403
|
+
return mutations;
|
|
1404
|
+
}
|
|
1405
|
+
function applyMutation(value, mutation) {
|
|
1406
|
+
if (mutation === "parseDate") {
|
|
1407
|
+
if (typeof value === "string") {
|
|
1408
|
+
try {
|
|
1409
|
+
const date = new Date(value);
|
|
1410
|
+
return !isNaN(date.getTime()) ? date : value;
|
|
1411
|
+
} catch {
|
|
1412
|
+
return value;
|
|
1413
|
+
}
|
|
1414
|
+
}
|
|
1415
|
+
}
|
|
1416
|
+
return value;
|
|
1417
|
+
}
|
|
1418
|
+
function applyFieldMutations(obj, mutations) {
|
|
1419
|
+
if (!obj || typeof obj !== "object") {
|
|
1420
|
+
return;
|
|
1421
|
+
}
|
|
1422
|
+
for (const [fieldName, mutation] of mutations) {
|
|
1423
|
+
if (!(fieldName in obj)) {
|
|
1424
|
+
continue;
|
|
1425
|
+
}
|
|
1426
|
+
if (Array.isArray(mutation)) {
|
|
1427
|
+
if (mutation.length > 0 && typeof mutation[0] === "string") {
|
|
1428
|
+
const operations = mutation;
|
|
1429
|
+
for (const operation of operations) {
|
|
1430
|
+
obj[fieldName] = applyMutation(obj[fieldName], operation);
|
|
1431
|
+
}
|
|
1432
|
+
} else {
|
|
1433
|
+
const nestedMutations = mutation;
|
|
1434
|
+
const fieldValue = obj[fieldName];
|
|
1435
|
+
if (Array.isArray(fieldValue)) {
|
|
1436
|
+
for (const item of fieldValue) {
|
|
1437
|
+
applyFieldMutations(item, nestedMutations);
|
|
1438
|
+
}
|
|
1439
|
+
} else if (fieldValue && typeof fieldValue === "object") {
|
|
1440
|
+
applyFieldMutations(fieldValue, nestedMutations);
|
|
1441
|
+
}
|
|
1442
|
+
}
|
|
1443
|
+
}
|
|
1444
|
+
}
|
|
1445
|
+
}
|
|
1446
|
+
function buildFieldMutationsFromColumns(columns) {
|
|
1447
|
+
if (!columns || columns.length === 0) {
|
|
1448
|
+
return void 0;
|
|
1449
|
+
}
|
|
1450
|
+
const mutations = buildFieldMutations(columns);
|
|
1451
|
+
return mutations.length > 0 ? mutations : void 0;
|
|
1452
|
+
}
|
|
1453
|
+
function mutateParsedJson(data, fieldMutations) {
|
|
1454
|
+
if (!fieldMutations || !data) {
|
|
1455
|
+
return;
|
|
1456
|
+
}
|
|
1457
|
+
applyFieldMutations(data, fieldMutations);
|
|
1458
|
+
}
|
|
1459
|
+
var STRING_DATE_ANNOTATION;
|
|
1460
|
+
var init_json = __esm({
|
|
1461
|
+
"src/utilities/json.ts"() {
|
|
1462
|
+
"use strict";
|
|
1463
|
+
STRING_DATE_ANNOTATION = "stringDate";
|
|
1464
|
+
}
|
|
1465
|
+
});
|
|
1466
|
+
var import_csv_parse;
|
|
1467
|
+
var CSV_DELIMITERS;
|
|
1468
|
+
var DEFAULT_CSV_CONFIG;
|
|
1469
|
+
var init_dataParser = __esm({
|
|
1470
|
+
"src/utilities/dataParser.ts"() {
|
|
1471
|
+
"use strict";
|
|
1472
|
+
import_csv_parse = require("csv-parse");
|
|
1473
|
+
init_json();
|
|
1474
|
+
CSV_DELIMITERS = {
|
|
1475
|
+
COMMA: ",",
|
|
1476
|
+
TAB: " ",
|
|
1477
|
+
SEMICOLON: ";",
|
|
1478
|
+
PIPE: "|"
|
|
1479
|
+
};
|
|
1480
|
+
DEFAULT_CSV_CONFIG = {
|
|
1481
|
+
delimiter: CSV_DELIMITERS.COMMA,
|
|
1482
|
+
columns: true,
|
|
1483
|
+
skipEmptyLines: true,
|
|
1484
|
+
trim: true
|
|
1485
|
+
};
|
|
1486
|
+
}
|
|
1487
|
+
});
|
|
1488
|
+
var init_utilities = __esm({
|
|
1489
|
+
"src/utilities/index.ts"() {
|
|
1490
|
+
"use strict";
|
|
1491
|
+
init_dataParser();
|
|
1492
|
+
}
|
|
1493
|
+
});
|
|
1494
|
+
var init_dataSource = __esm({
|
|
1495
|
+
"src/connectors/dataSource.ts"() {
|
|
1496
|
+
"use strict";
|
|
1497
|
+
}
|
|
1498
|
+
});
|
|
1499
|
+
var init_index = __esm({
|
|
1500
|
+
"src/index.ts"() {
|
|
1501
|
+
"use strict";
|
|
1502
|
+
init_browserCompatible();
|
|
1503
|
+
init_commons();
|
|
1504
|
+
init_secrets();
|
|
1505
|
+
init_helpers();
|
|
1506
|
+
init_webAppHelpers();
|
|
1507
|
+
init_task();
|
|
1508
|
+
init_redisClient();
|
|
1509
|
+
init_helpers();
|
|
1510
|
+
init_standalone();
|
|
1511
|
+
init_sqlHelpers();
|
|
1512
|
+
init_utilities();
|
|
1513
|
+
init_dataSource();
|
|
1514
|
+
init_types();
|
|
1515
|
+
}
|
|
1516
|
+
});
|
|
1517
|
+
function findSourceFiles(dir, extensions = [".ts", ".tsx", ".js", ".jsx", ".mts", ".cts"]) {
|
|
1518
|
+
const files = [];
|
|
1519
|
+
try {
|
|
1520
|
+
const entries = fs2.readdirSync(dir, { withFileTypes: true });
|
|
1521
|
+
for (const entry of entries) {
|
|
1522
|
+
const fullPath = path3.join(dir, entry.name);
|
|
1523
|
+
if (entry.isDirectory()) {
|
|
1524
|
+
if (entry.name !== "node_modules" && !entry.name.startsWith(".")) {
|
|
1525
|
+
files.push(...findSourceFiles(fullPath, extensions));
|
|
1526
|
+
}
|
|
1527
|
+
} else if (entry.isFile()) {
|
|
1528
|
+
if (entry.name.endsWith(".d.ts") || entry.name.endsWith(".d.mts") || entry.name.endsWith(".d.cts")) {
|
|
1529
|
+
continue;
|
|
1530
|
+
}
|
|
1531
|
+
const ext = path3.extname(entry.name);
|
|
1532
|
+
if (extensions.includes(ext)) {
|
|
1533
|
+
files.push(fullPath);
|
|
1534
|
+
}
|
|
1535
|
+
}
|
|
1536
|
+
}
|
|
1537
|
+
} catch (error) {
|
|
1538
|
+
compilerLog(`Warning: Could not read directory ${dir}: ${error}`);
|
|
1539
|
+
}
|
|
1540
|
+
return files;
|
|
1541
|
+
}
|
|
1542
|
+
function findUnloadedFiles() {
|
|
1543
|
+
const appDir = path3.resolve(import_process.default.cwd(), getSourceDir());
|
|
1544
|
+
const allSourceFiles = findSourceFiles(appDir);
|
|
1545
|
+
const loadedFiles = new Set(
|
|
1546
|
+
Object.keys(require.cache).filter((key) => key.startsWith(appDir)).map((key) => path3.resolve(key))
|
|
1547
|
+
);
|
|
1548
|
+
const unloadedFiles = allSourceFiles.map((file) => path3.resolve(file)).filter((file) => !loadedFiles.has(file)).map((file) => path3.relative(import_process.default.cwd(), file));
|
|
1549
|
+
return unloadedFiles;
|
|
1550
|
+
}
|
|
1551
|
+
function isS3QueueConfig(config) {
|
|
1552
|
+
return "engine" in config && config.engine === "S3Queue";
|
|
1553
|
+
}
|
|
1554
|
+
function hasReplicatedEngine(config) {
|
|
1555
|
+
if (!("engine" in config)) {
|
|
1556
|
+
return false;
|
|
1557
|
+
}
|
|
1558
|
+
const engine = config.engine;
|
|
1559
|
+
return engine === "ReplicatedMergeTree" || engine === "ReplicatedReplacingMergeTree" || engine === "ReplicatedAggregatingMergeTree" || engine === "ReplicatedSummingMergeTree" || engine === "ReplicatedCollapsingMergeTree" || engine === "ReplicatedVersionedCollapsingMergeTree";
|
|
1560
|
+
}
|
|
1561
|
+
function extractEngineValue(config) {
|
|
1562
|
+
if (!("engine" in config)) {
|
|
1563
|
+
return "MergeTree";
|
|
1564
|
+
}
|
|
1565
|
+
return config.engine;
|
|
1566
|
+
}
|
|
1567
|
+
function convertBasicEngineConfig(engine, config) {
|
|
1568
|
+
switch (engine) {
|
|
1569
|
+
case "MergeTree":
|
|
1570
|
+
return { engine: "MergeTree" };
|
|
1571
|
+
case "AggregatingMergeTree":
|
|
1572
|
+
return { engine: "AggregatingMergeTree" };
|
|
1573
|
+
case "ReplacingMergeTree": {
|
|
1574
|
+
const replacingConfig = config;
|
|
1575
|
+
return {
|
|
1576
|
+
engine: "ReplacingMergeTree",
|
|
1577
|
+
ver: replacingConfig.ver,
|
|
1578
|
+
isDeleted: replacingConfig.isDeleted
|
|
1579
|
+
};
|
|
1580
|
+
}
|
|
1581
|
+
case "SummingMergeTree": {
|
|
1582
|
+
const summingConfig = config;
|
|
1583
|
+
return {
|
|
1584
|
+
engine: "SummingMergeTree",
|
|
1585
|
+
columns: summingConfig.columns
|
|
1586
|
+
};
|
|
1587
|
+
}
|
|
1588
|
+
case "CollapsingMergeTree": {
|
|
1589
|
+
const collapsingConfig = config;
|
|
1590
|
+
return {
|
|
1591
|
+
engine: "CollapsingMergeTree",
|
|
1592
|
+
sign: collapsingConfig.sign
|
|
1593
|
+
};
|
|
1594
|
+
}
|
|
1595
|
+
case "VersionedCollapsingMergeTree": {
|
|
1596
|
+
const versionedConfig = config;
|
|
1597
|
+
return {
|
|
1598
|
+
engine: "VersionedCollapsingMergeTree",
|
|
1599
|
+
sign: versionedConfig.sign,
|
|
1600
|
+
ver: versionedConfig.ver
|
|
1601
|
+
};
|
|
1602
|
+
}
|
|
1603
|
+
default:
|
|
1604
|
+
return void 0;
|
|
1605
|
+
}
|
|
1606
|
+
}
|
|
1607
|
+
function convertReplicatedEngineConfig(engine, config) {
|
|
1608
|
+
if (!hasReplicatedEngine(config)) {
|
|
1609
|
+
return void 0;
|
|
1610
|
+
}
|
|
1611
|
+
switch (engine) {
|
|
1612
|
+
case "ReplicatedMergeTree": {
|
|
1613
|
+
const replicatedConfig = config;
|
|
1614
|
+
return {
|
|
1615
|
+
engine: "ReplicatedMergeTree",
|
|
1616
|
+
keeperPath: replicatedConfig.keeperPath,
|
|
1617
|
+
replicaName: replicatedConfig.replicaName
|
|
1618
|
+
};
|
|
1619
|
+
}
|
|
1620
|
+
case "ReplicatedReplacingMergeTree": {
|
|
1621
|
+
const replicatedConfig = config;
|
|
1622
|
+
return {
|
|
1623
|
+
engine: "ReplicatedReplacingMergeTree",
|
|
1624
|
+
keeperPath: replicatedConfig.keeperPath,
|
|
1625
|
+
replicaName: replicatedConfig.replicaName,
|
|
1626
|
+
ver: replicatedConfig.ver,
|
|
1627
|
+
isDeleted: replicatedConfig.isDeleted
|
|
1628
|
+
};
|
|
1629
|
+
}
|
|
1630
|
+
case "ReplicatedAggregatingMergeTree": {
|
|
1631
|
+
const replicatedConfig = config;
|
|
1632
|
+
return {
|
|
1633
|
+
engine: "ReplicatedAggregatingMergeTree",
|
|
1634
|
+
keeperPath: replicatedConfig.keeperPath,
|
|
1635
|
+
replicaName: replicatedConfig.replicaName
|
|
1636
|
+
};
|
|
1637
|
+
}
|
|
1638
|
+
case "ReplicatedSummingMergeTree": {
|
|
1639
|
+
const replicatedConfig = config;
|
|
1640
|
+
return {
|
|
1641
|
+
engine: "ReplicatedSummingMergeTree",
|
|
1642
|
+
keeperPath: replicatedConfig.keeperPath,
|
|
1643
|
+
replicaName: replicatedConfig.replicaName,
|
|
1644
|
+
columns: replicatedConfig.columns
|
|
1645
|
+
};
|
|
1646
|
+
}
|
|
1647
|
+
case "ReplicatedCollapsingMergeTree": {
|
|
1648
|
+
const replicatedConfig = config;
|
|
1649
|
+
return {
|
|
1650
|
+
engine: "ReplicatedCollapsingMergeTree",
|
|
1651
|
+
keeperPath: replicatedConfig.keeperPath,
|
|
1652
|
+
replicaName: replicatedConfig.replicaName,
|
|
1653
|
+
sign: replicatedConfig.sign
|
|
1654
|
+
};
|
|
1655
|
+
}
|
|
1656
|
+
case "ReplicatedVersionedCollapsingMergeTree": {
|
|
1657
|
+
const replicatedConfig = config;
|
|
1658
|
+
return {
|
|
1659
|
+
engine: "ReplicatedVersionedCollapsingMergeTree",
|
|
1660
|
+
keeperPath: replicatedConfig.keeperPath,
|
|
1661
|
+
replicaName: replicatedConfig.replicaName,
|
|
1662
|
+
sign: replicatedConfig.sign,
|
|
1663
|
+
ver: replicatedConfig.ver
|
|
1664
|
+
};
|
|
1665
|
+
}
|
|
1666
|
+
default:
|
|
1667
|
+
return void 0;
|
|
1668
|
+
}
|
|
1669
|
+
}
|
|
1670
|
+
function convertS3QueueEngineConfig(config) {
|
|
1671
|
+
if (!isS3QueueConfig(config)) {
|
|
1672
|
+
return void 0;
|
|
1673
|
+
}
|
|
1674
|
+
return {
|
|
1675
|
+
engine: "S3Queue",
|
|
1676
|
+
s3Path: config.s3Path,
|
|
1677
|
+
format: config.format,
|
|
1678
|
+
awsAccessKeyId: config.awsAccessKeyId,
|
|
1679
|
+
awsSecretAccessKey: config.awsSecretAccessKey,
|
|
1680
|
+
compression: config.compression,
|
|
1681
|
+
headers: config.headers
|
|
1682
|
+
};
|
|
1683
|
+
}
|
|
1684
|
+
function convertS3EngineConfig(config) {
|
|
1685
|
+
if (!("engine" in config) || config.engine !== "S3") {
|
|
1686
|
+
return void 0;
|
|
1687
|
+
}
|
|
1688
|
+
return {
|
|
1689
|
+
engine: "S3",
|
|
1690
|
+
path: config.path,
|
|
1691
|
+
format: config.format,
|
|
1692
|
+
awsAccessKeyId: config.awsAccessKeyId,
|
|
1693
|
+
awsSecretAccessKey: config.awsSecretAccessKey,
|
|
1694
|
+
compression: config.compression,
|
|
1695
|
+
partitionStrategy: config.partitionStrategy,
|
|
1696
|
+
partitionColumnsInDataFile: config.partitionColumnsInDataFile
|
|
1697
|
+
};
|
|
1698
|
+
}
|
|
1699
|
+
function convertBufferEngineConfig(config) {
|
|
1700
|
+
if (!("engine" in config) || config.engine !== "Buffer") {
|
|
1701
|
+
return void 0;
|
|
1702
|
+
}
|
|
1703
|
+
return {
|
|
1704
|
+
engine: "Buffer",
|
|
1705
|
+
targetDatabase: config.targetDatabase,
|
|
1706
|
+
targetTable: config.targetTable,
|
|
1707
|
+
numLayers: config.numLayers,
|
|
1708
|
+
minTime: config.minTime,
|
|
1709
|
+
maxTime: config.maxTime,
|
|
1710
|
+
minRows: config.minRows,
|
|
1711
|
+
maxRows: config.maxRows,
|
|
1712
|
+
minBytes: config.minBytes,
|
|
1713
|
+
maxBytes: config.maxBytes,
|
|
1714
|
+
flushTime: config.flushTime,
|
|
1715
|
+
flushRows: config.flushRows,
|
|
1716
|
+
flushBytes: config.flushBytes
|
|
1717
|
+
};
|
|
1718
|
+
}
|
|
1719
|
+
function convertDistributedEngineConfig(config) {
|
|
1720
|
+
if (!("engine" in config) || config.engine !== "Distributed") {
|
|
1721
|
+
return void 0;
|
|
1722
|
+
}
|
|
1723
|
+
return {
|
|
1724
|
+
engine: "Distributed",
|
|
1725
|
+
cluster: config.cluster,
|
|
1726
|
+
targetDatabase: config.targetDatabase,
|
|
1727
|
+
targetTable: config.targetTable,
|
|
1728
|
+
shardingKey: config.shardingKey,
|
|
1729
|
+
policyName: config.policyName
|
|
1730
|
+
};
|
|
1731
|
+
}
|
|
1732
|
+
function convertIcebergS3EngineConfig(config) {
|
|
1733
|
+
if (!("engine" in config) || config.engine !== "IcebergS3") {
|
|
1734
|
+
return void 0;
|
|
1735
|
+
}
|
|
1736
|
+
return {
|
|
1737
|
+
engine: "IcebergS3",
|
|
1738
|
+
path: config.path,
|
|
1739
|
+
format: config.format,
|
|
1740
|
+
awsAccessKeyId: config.awsAccessKeyId,
|
|
1741
|
+
awsSecretAccessKey: config.awsSecretAccessKey,
|
|
1742
|
+
compression: config.compression
|
|
1743
|
+
};
|
|
1744
|
+
}
|
|
1745
|
+
function convertKafkaEngineConfig(config) {
|
|
1746
|
+
if (!("engine" in config) || config.engine !== "Kafka") {
|
|
1747
|
+
return void 0;
|
|
1748
|
+
}
|
|
1749
|
+
return {
|
|
1750
|
+
engine: "Kafka",
|
|
1751
|
+
brokerList: config.brokerList,
|
|
1752
|
+
topicList: config.topicList,
|
|
1753
|
+
groupName: config.groupName,
|
|
1754
|
+
format: config.format
|
|
1755
|
+
};
|
|
1756
|
+
}
|
|
1757
|
+
function convertTableConfigToEngineConfig(config) {
|
|
1758
|
+
const engine = extractEngineValue(config);
|
|
1759
|
+
const basicConfig = convertBasicEngineConfig(engine, config);
|
|
1760
|
+
if (basicConfig) {
|
|
1761
|
+
return basicConfig;
|
|
1762
|
+
}
|
|
1763
|
+
const replicatedConfig = convertReplicatedEngineConfig(engine, config);
|
|
1764
|
+
if (replicatedConfig) {
|
|
1765
|
+
return replicatedConfig;
|
|
1766
|
+
}
|
|
1767
|
+
if (engine === "S3Queue") {
|
|
1768
|
+
return convertS3QueueEngineConfig(config);
|
|
1769
|
+
}
|
|
1770
|
+
if (engine === "S3") {
|
|
1771
|
+
return convertS3EngineConfig(config);
|
|
1772
|
+
}
|
|
1773
|
+
if (engine === "Buffer") {
|
|
1774
|
+
return convertBufferEngineConfig(config);
|
|
1775
|
+
}
|
|
1776
|
+
if (engine === "Distributed") {
|
|
1777
|
+
return convertDistributedEngineConfig(config);
|
|
1778
|
+
}
|
|
1779
|
+
if (engine === "IcebergS3") {
|
|
1780
|
+
return convertIcebergS3EngineConfig(config);
|
|
1781
|
+
}
|
|
1782
|
+
if (engine === "Kafka") {
|
|
1783
|
+
return convertKafkaEngineConfig(config);
|
|
1784
|
+
}
|
|
1785
|
+
return void 0;
|
|
1786
|
+
}
|
|
1787
|
+
function findTaskInTree(task, targetName) {
|
|
1788
|
+
if (task.name === targetName) {
|
|
1789
|
+
return task;
|
|
1790
|
+
}
|
|
1791
|
+
if (task.config.onComplete?.length) {
|
|
1792
|
+
for (const childTask of task.config.onComplete) {
|
|
1793
|
+
const found = findTaskInTree(childTask, targetName);
|
|
1794
|
+
if (found) {
|
|
1795
|
+
return found;
|
|
1796
|
+
}
|
|
1797
|
+
}
|
|
1798
|
+
}
|
|
1799
|
+
return void 0;
|
|
1800
|
+
}
|
|
1801
|
+
var import_process;
|
|
1802
|
+
var fs2;
|
|
1803
|
+
var path3;
|
|
1804
|
+
var moose_internal;
|
|
1805
|
+
var defaultRetentionPeriod;
|
|
1806
|
+
var toInfraMap;
|
|
1807
|
+
var getMooseInternal;
|
|
1808
|
+
var dumpMooseInternal;
|
|
1809
|
+
var loadIndex;
|
|
1810
|
+
var getStreamingFunctions;
|
|
1811
|
+
var getApis2;
|
|
1812
|
+
var getWorkflows2;
|
|
1813
|
+
var getTaskForWorkflow;
|
|
1814
|
+
var getWebApps2;
|
|
1815
|
+
var init_internal = __esm({
|
|
1816
|
+
"src/dmv2/internal.ts"() {
|
|
1817
|
+
"use strict";
|
|
1818
|
+
import_process = __toESM2(require("process"));
|
|
1819
|
+
fs2 = __toESM2(require("fs"));
|
|
1820
|
+
path3 = __toESM2(require("path"));
|
|
1821
|
+
init_index();
|
|
1822
|
+
init_commons();
|
|
1823
|
+
init_compiler_config();
|
|
1824
|
+
moose_internal = {
|
|
1825
|
+
tables: /* @__PURE__ */ new Map(),
|
|
1826
|
+
streams: /* @__PURE__ */ new Map(),
|
|
1827
|
+
ingestApis: /* @__PURE__ */ new Map(),
|
|
1828
|
+
apis: /* @__PURE__ */ new Map(),
|
|
1829
|
+
sqlResources: /* @__PURE__ */ new Map(),
|
|
1830
|
+
workflows: /* @__PURE__ */ new Map(),
|
|
1831
|
+
webApps: /* @__PURE__ */ new Map(),
|
|
1832
|
+
materializedViews: /* @__PURE__ */ new Map(),
|
|
1833
|
+
views: /* @__PURE__ */ new Map()
|
|
1834
|
+
};
|
|
1835
|
+
defaultRetentionPeriod = 60 * 60 * 24 * 7;
|
|
1836
|
+
toInfraMap = (registry) => {
|
|
1837
|
+
const tables = {};
|
|
1838
|
+
const topics = {};
|
|
1839
|
+
const ingestApis = {};
|
|
1840
|
+
const apis = {};
|
|
1841
|
+
const sqlResources = {};
|
|
1842
|
+
const workflows = {};
|
|
1843
|
+
const webApps = {};
|
|
1844
|
+
const materializedViews = {};
|
|
1845
|
+
const views = {};
|
|
1846
|
+
registry.tables.forEach((table) => {
|
|
1847
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1848
|
+
let metadata = table.metadata;
|
|
1849
|
+
if (!metadata && table.config && table.pipelineParent) {
|
|
1850
|
+
metadata = table.pipelineParent.metadata;
|
|
1851
|
+
}
|
|
1852
|
+
const engineConfig = convertTableConfigToEngineConfig(table.config);
|
|
1853
|
+
let tableSettings = void 0;
|
|
1854
|
+
if (table.config.settings) {
|
|
1855
|
+
tableSettings = Object.entries(table.config.settings).reduce(
|
|
1856
|
+
(acc, [key, value]) => {
|
|
1857
|
+
if (value !== void 0) {
|
|
1858
|
+
acc[key] = String(value);
|
|
1859
|
+
}
|
|
1860
|
+
return acc;
|
|
1861
|
+
},
|
|
1862
|
+
{}
|
|
1863
|
+
);
|
|
1864
|
+
}
|
|
1865
|
+
if (engineConfig?.engine === "S3Queue") {
|
|
1866
|
+
if (!tableSettings) {
|
|
1867
|
+
tableSettings = {};
|
|
1868
|
+
}
|
|
1869
|
+
if (!tableSettings.mode) {
|
|
1870
|
+
tableSettings.mode = "unordered";
|
|
1871
|
+
}
|
|
1872
|
+
}
|
|
1873
|
+
const hasOrderByFields = "orderByFields" in table.config && Array.isArray(table.config.orderByFields) && table.config.orderByFields.length > 0;
|
|
1874
|
+
const hasOrderByExpression = "orderByExpression" in table.config && typeof table.config.orderByExpression === "string" && table.config.orderByExpression.length > 0;
|
|
1875
|
+
if (hasOrderByFields && hasOrderByExpression) {
|
|
1876
|
+
throw new Error(
|
|
1877
|
+
`Table ${table.name}: Provide either orderByFields or orderByExpression, not both.`
|
|
1878
|
+
);
|
|
1879
|
+
}
|
|
1880
|
+
const orderBy = hasOrderByExpression && "orderByExpression" in table.config ? table.config.orderByExpression ?? "" : "orderByFields" in table.config ? table.config.orderByFields ?? [] : [];
|
|
1881
|
+
tables[id] = {
|
|
1882
|
+
name: table.name,
|
|
1883
|
+
columns: table.columnArray,
|
|
1884
|
+
orderBy,
|
|
1885
|
+
partitionBy: "partitionBy" in table.config ? table.config.partitionBy : void 0,
|
|
1886
|
+
sampleByExpression: "sampleByExpression" in table.config ? table.config.sampleByExpression : void 0,
|
|
1887
|
+
primaryKeyExpression: "primaryKeyExpression" in table.config ? table.config.primaryKeyExpression : void 0,
|
|
1888
|
+
engineConfig,
|
|
1889
|
+
version: table.config.version,
|
|
1890
|
+
metadata,
|
|
1891
|
+
lifeCycle: table.config.lifeCycle,
|
|
1892
|
+
// Map 'settings' to 'tableSettings' for internal use
|
|
1893
|
+
tableSettings: tableSettings && Object.keys(tableSettings).length > 0 ? tableSettings : void 0,
|
|
1894
|
+
indexes: table.config.indexes?.map((i) => ({
|
|
1895
|
+
...i,
|
|
1896
|
+
granularity: i.granularity === void 0 ? 1 : i.granularity,
|
|
1897
|
+
arguments: i.arguments === void 0 ? [] : i.arguments
|
|
1898
|
+
})) || [],
|
|
1899
|
+
ttl: table.config.ttl,
|
|
1900
|
+
database: table.config.database,
|
|
1901
|
+
cluster: table.config.cluster
|
|
1902
|
+
};
|
|
1903
|
+
});
|
|
1904
|
+
registry.streams.forEach((stream) => {
|
|
1905
|
+
let metadata = stream.metadata;
|
|
1906
|
+
if (!metadata && stream.config && stream.pipelineParent) {
|
|
1907
|
+
metadata = stream.pipelineParent.metadata;
|
|
1908
|
+
}
|
|
1909
|
+
const transformationTargets = [];
|
|
1910
|
+
const consumers = [];
|
|
1911
|
+
stream._transformations.forEach((transforms, destinationName) => {
|
|
1912
|
+
transforms.forEach(([destination, _, config]) => {
|
|
1913
|
+
transformationTargets.push({
|
|
1914
|
+
kind: "stream",
|
|
1915
|
+
name: destinationName,
|
|
1916
|
+
version: config.version,
|
|
1917
|
+
metadata: config.metadata,
|
|
1918
|
+
sourceFile: config.sourceFile
|
|
1919
|
+
});
|
|
1920
|
+
});
|
|
1921
|
+
});
|
|
1922
|
+
stream._consumers.forEach((consumer) => {
|
|
1923
|
+
consumers.push({
|
|
1924
|
+
version: consumer.config.version,
|
|
1925
|
+
sourceFile: consumer.config.sourceFile
|
|
1926
|
+
});
|
|
1927
|
+
});
|
|
1928
|
+
topics[stream.name] = {
|
|
1929
|
+
name: stream.name,
|
|
1930
|
+
columns: stream.columnArray,
|
|
1931
|
+
targetTable: stream.config.destination?.name,
|
|
1932
|
+
targetTableVersion: stream.config.destination?.config.version,
|
|
1933
|
+
retentionPeriod: stream.config.retentionPeriod ?? defaultRetentionPeriod,
|
|
1934
|
+
partitionCount: stream.config.parallelism ?? 1,
|
|
1935
|
+
version: stream.config.version,
|
|
1936
|
+
transformationTargets,
|
|
1937
|
+
hasMultiTransform: stream._multipleTransformations === void 0,
|
|
1938
|
+
consumers,
|
|
1939
|
+
metadata,
|
|
1940
|
+
lifeCycle: stream.config.lifeCycle,
|
|
1941
|
+
schemaConfig: stream.config.schemaConfig
|
|
1942
|
+
};
|
|
1943
|
+
});
|
|
1944
|
+
registry.ingestApis.forEach((api) => {
|
|
1945
|
+
let metadata = api.metadata;
|
|
1946
|
+
if (!metadata && api.config && api.pipelineParent) {
|
|
1947
|
+
metadata = api.pipelineParent.metadata;
|
|
1948
|
+
}
|
|
1949
|
+
ingestApis[api.name] = {
|
|
1950
|
+
name: api.name,
|
|
1951
|
+
columns: api.columnArray,
|
|
1952
|
+
version: api.config.version,
|
|
1953
|
+
path: api.config.path,
|
|
1954
|
+
writeTo: {
|
|
1955
|
+
kind: "stream",
|
|
1956
|
+
name: api.config.destination.name
|
|
1957
|
+
},
|
|
1958
|
+
deadLetterQueue: api.config.deadLetterQueue?.name,
|
|
1959
|
+
metadata,
|
|
1960
|
+
schema: api.schema,
|
|
1961
|
+
allowExtraFields: api.allowExtraFields
|
|
1962
|
+
};
|
|
1963
|
+
});
|
|
1964
|
+
registry.apis.forEach((api, key) => {
|
|
1965
|
+
const rustKey = api.config.version ? `${api.name}:${api.config.version}` : api.name;
|
|
1966
|
+
apis[rustKey] = {
|
|
1967
|
+
name: api.name,
|
|
1968
|
+
queryParams: api.columnArray,
|
|
1969
|
+
responseSchema: api.responseSchema,
|
|
1970
|
+
version: api.config.version,
|
|
1971
|
+
path: api.config.path,
|
|
1972
|
+
metadata: api.metadata
|
|
1973
|
+
};
|
|
1974
|
+
});
|
|
1975
|
+
registry.sqlResources.forEach((sqlResource) => {
|
|
1976
|
+
sqlResources[sqlResource.name] = {
|
|
1977
|
+
name: sqlResource.name,
|
|
1978
|
+
setup: sqlResource.setup,
|
|
1979
|
+
teardown: sqlResource.teardown,
|
|
1980
|
+
sourceFile: sqlResource.sourceFile,
|
|
1981
|
+
sourceLine: sqlResource.sourceLine,
|
|
1982
|
+
sourceColumn: sqlResource.sourceColumn,
|
|
1983
|
+
pullsDataFrom: sqlResource.pullsDataFrom.map((r) => {
|
|
1984
|
+
if (r.kind === "OlapTable") {
|
|
1985
|
+
const table = r;
|
|
1986
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
1987
|
+
return {
|
|
1988
|
+
id,
|
|
1989
|
+
kind: "Table"
|
|
1990
|
+
};
|
|
1991
|
+
} else if (r.kind === "SqlResource") {
|
|
1992
|
+
const resource = r;
|
|
1993
|
+
return {
|
|
1994
|
+
id: resource.name,
|
|
1995
|
+
kind: "SqlResource"
|
|
1996
|
+
};
|
|
1997
|
+
} else if (r.kind === "View") {
|
|
1998
|
+
const view = r;
|
|
1999
|
+
return {
|
|
2000
|
+
id: view.name,
|
|
2001
|
+
kind: "View"
|
|
2002
|
+
};
|
|
2003
|
+
} else if (r.kind === "MaterializedView") {
|
|
2004
|
+
const mv = r;
|
|
2005
|
+
return {
|
|
2006
|
+
id: mv.name,
|
|
2007
|
+
kind: "MaterializedView"
|
|
2008
|
+
};
|
|
2009
|
+
} else {
|
|
2010
|
+
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2011
|
+
}
|
|
2012
|
+
}),
|
|
2013
|
+
pushesDataTo: sqlResource.pushesDataTo.map((r) => {
|
|
2014
|
+
if (r.kind === "OlapTable") {
|
|
2015
|
+
const table = r;
|
|
2016
|
+
const id = table.config.version ? `${table.name}_${table.config.version}` : table.name;
|
|
2017
|
+
return {
|
|
2018
|
+
id,
|
|
2019
|
+
kind: "Table"
|
|
2020
|
+
};
|
|
2021
|
+
} else if (r.kind === "SqlResource") {
|
|
2022
|
+
const resource = r;
|
|
2023
|
+
return {
|
|
2024
|
+
id: resource.name,
|
|
2025
|
+
kind: "SqlResource"
|
|
2026
|
+
};
|
|
2027
|
+
} else if (r.kind === "View") {
|
|
2028
|
+
const view = r;
|
|
2029
|
+
return {
|
|
2030
|
+
id: view.name,
|
|
2031
|
+
kind: "View"
|
|
2032
|
+
};
|
|
2033
|
+
} else if (r.kind === "MaterializedView") {
|
|
2034
|
+
const mv = r;
|
|
2035
|
+
return {
|
|
2036
|
+
id: mv.name,
|
|
2037
|
+
kind: "MaterializedView"
|
|
2038
|
+
};
|
|
2039
|
+
} else {
|
|
2040
|
+
throw new Error(`Unknown sql resource dependency type: ${r}`);
|
|
2041
|
+
}
|
|
2042
|
+
})
|
|
2043
|
+
};
|
|
2044
|
+
});
|
|
2045
|
+
registry.workflows.forEach((workflow) => {
|
|
2046
|
+
workflows[workflow.name] = {
|
|
2047
|
+
name: workflow.name,
|
|
2048
|
+
retries: workflow.config.retries,
|
|
2049
|
+
timeout: workflow.config.timeout,
|
|
2050
|
+
schedule: workflow.config.schedule
|
|
2051
|
+
};
|
|
2052
|
+
});
|
|
2053
|
+
registry.webApps.forEach((webApp) => {
|
|
2054
|
+
webApps[webApp.name] = {
|
|
2055
|
+
name: webApp.name,
|
|
2056
|
+
mountPath: webApp.config.mountPath || "/",
|
|
2057
|
+
metadata: webApp.config.metadata
|
|
2058
|
+
};
|
|
2059
|
+
});
|
|
2060
|
+
registry.materializedViews.forEach((mv) => {
|
|
2061
|
+
materializedViews[mv.name] = {
|
|
2062
|
+
name: mv.name,
|
|
2063
|
+
selectSql: mv.selectSql,
|
|
2064
|
+
sourceTables: mv.sourceTables,
|
|
2065
|
+
targetTable: mv.targetTable.name,
|
|
2066
|
+
targetDatabase: mv.targetTable.config.database,
|
|
2067
|
+
metadata: mv.metadata
|
|
2068
|
+
};
|
|
2069
|
+
});
|
|
2070
|
+
registry.views.forEach((view) => {
|
|
2071
|
+
views[view.name] = {
|
|
2072
|
+
name: view.name,
|
|
2073
|
+
selectSql: view.selectSql,
|
|
2074
|
+
sourceTables: view.sourceTables,
|
|
2075
|
+
metadata: view.metadata
|
|
2076
|
+
};
|
|
2077
|
+
});
|
|
2078
|
+
return {
|
|
2079
|
+
topics,
|
|
2080
|
+
tables,
|
|
2081
|
+
ingestApis,
|
|
2082
|
+
apis,
|
|
2083
|
+
sqlResources,
|
|
2084
|
+
workflows,
|
|
2085
|
+
webApps,
|
|
2086
|
+
materializedViews,
|
|
2087
|
+
views,
|
|
2088
|
+
unloadedFiles: []
|
|
2089
|
+
// Will be populated by dumpMooseInternal
|
|
2090
|
+
};
|
|
2091
|
+
};
|
|
2092
|
+
getMooseInternal = () => globalThis.moose_internal;
|
|
2093
|
+
if (getMooseInternal() === void 0) {
|
|
2094
|
+
globalThis.moose_internal = moose_internal;
|
|
2095
|
+
}
|
|
2096
|
+
dumpMooseInternal = async () => {
|
|
2097
|
+
await loadIndex();
|
|
2098
|
+
const infraMap = toInfraMap(getMooseInternal());
|
|
2099
|
+
const unloadedFiles = findUnloadedFiles();
|
|
2100
|
+
infraMap.unloadedFiles = unloadedFiles;
|
|
2101
|
+
console.log(
|
|
2102
|
+
"___MOOSE_STUFF___start",
|
|
2103
|
+
JSON.stringify(infraMap),
|
|
2104
|
+
"end___MOOSE_STUFF___"
|
|
2105
|
+
);
|
|
2106
|
+
};
|
|
2107
|
+
loadIndex = async () => {
|
|
2108
|
+
const useCompiled2 = shouldUseCompiled();
|
|
2109
|
+
if (!useCompiled2) {
|
|
2110
|
+
const registry = getMooseInternal();
|
|
2111
|
+
registry.tables.clear();
|
|
2112
|
+
registry.streams.clear();
|
|
2113
|
+
registry.ingestApis.clear();
|
|
2114
|
+
registry.apis.clear();
|
|
2115
|
+
registry.sqlResources.clear();
|
|
2116
|
+
registry.workflows.clear();
|
|
2117
|
+
registry.webApps.clear();
|
|
2118
|
+
registry.materializedViews.clear();
|
|
2119
|
+
registry.views.clear();
|
|
2120
|
+
const appDir = `${import_process.default.cwd()}/${getSourceDir()}`;
|
|
2121
|
+
Object.keys(require.cache).forEach((key) => {
|
|
2122
|
+
if (key.startsWith(appDir)) {
|
|
2123
|
+
delete require.cache[key];
|
|
2124
|
+
}
|
|
2125
|
+
});
|
|
2126
|
+
}
|
|
2127
|
+
try {
|
|
2128
|
+
const sourceDir = getSourceDir();
|
|
2129
|
+
if (useCompiled2) {
|
|
2130
|
+
await loadModule(
|
|
2131
|
+
`${import_process.default.cwd()}/.moose/compiled/${sourceDir}/index.js`
|
|
2132
|
+
);
|
|
2133
|
+
} else {
|
|
2134
|
+
require(`${import_process.default.cwd()}/${sourceDir}/index.ts`);
|
|
2135
|
+
}
|
|
2136
|
+
} catch (error) {
|
|
2137
|
+
let hint;
|
|
2138
|
+
let includeDetails = true;
|
|
2139
|
+
const details = error instanceof Error ? error.message : String(error);
|
|
2140
|
+
if (details.includes("no transform has been configured") || details.includes("NoTransformConfigurationError")) {
|
|
2141
|
+
hint = "\u{1F534} Typia Transformation Error\n\nThis is likely a bug in Moose. The Typia type transformer failed to process your code.\n\nPlease report this issue:\n \u2022 Moose Slack: https://join.slack.com/t/moose-community/shared_invite/zt-2fjh5n3wz-cnOmM9Xe9DYAgQrNu8xKxg\n \u2022 Include the stack trace below and the file being processed\n\n";
|
|
2142
|
+
includeDetails = false;
|
|
2143
|
+
} else if (details.includes("ERR_REQUIRE_ESM") || details.includes("ES Module")) {
|
|
2144
|
+
hint = "The file or its dependencies are ESM-only. Switch to packages that dual-support CJS & ESM, or upgrade to Node 22.12+. If you must use Node 20, you may try Node 20.19\n\n";
|
|
2145
|
+
}
|
|
2146
|
+
if (hint === void 0) {
|
|
2147
|
+
throw error;
|
|
2148
|
+
} else {
|
|
2149
|
+
const errorMsg = includeDetails ? `${hint}${details}` : hint;
|
|
2150
|
+
const cause = error instanceof Error ? error : void 0;
|
|
2151
|
+
throw new Error(errorMsg, { cause });
|
|
2152
|
+
}
|
|
2153
|
+
}
|
|
2154
|
+
};
|
|
2155
|
+
getStreamingFunctions = async () => {
|
|
2156
|
+
await loadIndex();
|
|
2157
|
+
const registry = getMooseInternal();
|
|
2158
|
+
const transformFunctions = /* @__PURE__ */ new Map();
|
|
2159
|
+
registry.streams.forEach((stream) => {
|
|
2160
|
+
stream._transformations.forEach((transforms, destinationName) => {
|
|
2161
|
+
transforms.forEach(([_, transform, config]) => {
|
|
2162
|
+
const transformFunctionKey = `${stream.name}_${destinationName}${config.version ? `_${config.version}` : ""}`;
|
|
2163
|
+
compilerLog(`getStreamingFunctions: ${transformFunctionKey}`);
|
|
2164
|
+
transformFunctions.set(transformFunctionKey, [
|
|
2165
|
+
transform,
|
|
2166
|
+
config,
|
|
2167
|
+
stream.columnArray
|
|
2168
|
+
]);
|
|
2169
|
+
});
|
|
2170
|
+
});
|
|
2171
|
+
stream._consumers.forEach((consumer) => {
|
|
2172
|
+
const consumerFunctionKey = `${stream.name}_<no-target>${consumer.config.version ? `_${consumer.config.version}` : ""}`;
|
|
2173
|
+
transformFunctions.set(consumerFunctionKey, [
|
|
2174
|
+
consumer.consumer,
|
|
2175
|
+
consumer.config,
|
|
2176
|
+
stream.columnArray
|
|
2177
|
+
]);
|
|
2178
|
+
});
|
|
2179
|
+
});
|
|
2180
|
+
return transformFunctions;
|
|
2181
|
+
};
|
|
2182
|
+
getApis2 = async () => {
|
|
2183
|
+
await loadIndex();
|
|
2184
|
+
const apiFunctions = /* @__PURE__ */ new Map();
|
|
2185
|
+
const registry = getMooseInternal();
|
|
2186
|
+
const versionCountByName = /* @__PURE__ */ new Map();
|
|
2187
|
+
const nameToSoleVersionHandler = /* @__PURE__ */ new Map();
|
|
2188
|
+
registry.apis.forEach((api, key) => {
|
|
2189
|
+
const handler = api.getHandler();
|
|
2190
|
+
apiFunctions.set(key, handler);
|
|
2191
|
+
if (!api.config.version) {
|
|
2192
|
+
if (!apiFunctions.has(api.name)) {
|
|
2193
|
+
apiFunctions.set(api.name, handler);
|
|
2194
|
+
}
|
|
2195
|
+
nameToSoleVersionHandler.delete(api.name);
|
|
2196
|
+
versionCountByName.delete(api.name);
|
|
2197
|
+
} else if (!apiFunctions.has(api.name)) {
|
|
2198
|
+
const count = (versionCountByName.get(api.name) ?? 0) + 1;
|
|
2199
|
+
versionCountByName.set(api.name, count);
|
|
2200
|
+
if (count === 1) {
|
|
2201
|
+
nameToSoleVersionHandler.set(api.name, handler);
|
|
2202
|
+
} else {
|
|
2203
|
+
nameToSoleVersionHandler.delete(api.name);
|
|
2204
|
+
}
|
|
2205
|
+
}
|
|
2206
|
+
});
|
|
2207
|
+
nameToSoleVersionHandler.forEach((handler, name) => {
|
|
2208
|
+
if (!apiFunctions.has(name)) {
|
|
2209
|
+
apiFunctions.set(name, handler);
|
|
2210
|
+
}
|
|
2211
|
+
});
|
|
2212
|
+
return apiFunctions;
|
|
2213
|
+
};
|
|
2214
|
+
getWorkflows2 = async () => {
|
|
2215
|
+
await loadIndex();
|
|
2216
|
+
const registry = getMooseInternal();
|
|
2217
|
+
return registry.workflows;
|
|
2218
|
+
};
|
|
2219
|
+
getTaskForWorkflow = async (workflowName, taskName) => {
|
|
2220
|
+
const workflows = await getWorkflows2();
|
|
2221
|
+
const workflow = workflows.get(workflowName);
|
|
2222
|
+
if (!workflow) {
|
|
2223
|
+
throw new Error(`Workflow ${workflowName} not found`);
|
|
2224
|
+
}
|
|
2225
|
+
const task = findTaskInTree(
|
|
2226
|
+
workflow.config.startingTask,
|
|
2227
|
+
taskName
|
|
2228
|
+
);
|
|
2229
|
+
if (!task) {
|
|
2230
|
+
throw new Error(`Task ${taskName} not found in workflow ${workflowName}`);
|
|
2231
|
+
}
|
|
2232
|
+
return task;
|
|
2233
|
+
};
|
|
2234
|
+
getWebApps2 = async () => {
|
|
2235
|
+
await loadIndex();
|
|
2236
|
+
return getMooseInternal().webApps;
|
|
2237
|
+
};
|
|
2238
|
+
}
|
|
2239
|
+
});
|
|
2240
|
+
var import_ts_node = require("ts-node");
|
|
2241
|
+
init_compiler_config();
|
|
2242
|
+
init_internal();
|
|
2243
|
+
var import_http = __toESM2(require("http"));
|
|
2244
|
+
init_commons();
|
|
2245
|
+
init_helpers();
|
|
2246
|
+
var jose = __toESM2(require("jose"));
|
|
2247
|
+
var import_node_cluster = __toESM2(require("cluster"));
|
|
2248
|
+
var import_node_os = require("os");
|
|
2249
|
+
var import_node_process = require("process");
|
|
2250
|
+
var DEFAULT_MAX_CPU_USAGE_RATIO = 0.7;
|
|
2251
|
+
var RESTART_TIME_MS = 1e4;
|
|
2252
|
+
var SIGTERM = "SIGTERM";
|
|
2253
|
+
var SIGINT = "SIGINT";
|
|
2254
|
+
var SHUTDOWN_WORKERS_INTERVAL = 500;
|
|
2255
|
+
var Cluster = class {
|
|
2256
|
+
// Tracks if shutdown is currently in progress
|
|
2257
|
+
shutdownInProgress = false;
|
|
2258
|
+
// Tracks if workers exited cleanly during shutdown
|
|
2259
|
+
hasCleanWorkerExit = true;
|
|
2260
|
+
// String identifying if this is primary or worker process
|
|
2261
|
+
processStr = `${import_node_cluster.default.isPrimary ? "primary" : "worker"} process ${process.pid}`;
|
|
2262
|
+
// Functions for starting and stopping workers
|
|
2263
|
+
workerStart;
|
|
2264
|
+
workerStop;
|
|
2265
|
+
// Result from starting worker, needed for cleanup
|
|
2266
|
+
startOutput;
|
|
2267
|
+
maxCpuUsageRatio;
|
|
2268
|
+
usedCpuCount;
|
|
2269
|
+
/**
|
|
2270
|
+
* Creates a new cluster manager instance.
|
|
2271
|
+
*
|
|
2272
|
+
* @param options - Configuration options for the cluster
|
|
2273
|
+
* @param options.workerStart - Async function to execute when starting a worker
|
|
2274
|
+
* @param options.workerStop - Async function to execute when stopping a worker
|
|
2275
|
+
* @param options.maxCpuUsageRatio - Maximum ratio of CPU cores to utilize (0-1)
|
|
2276
|
+
* @param options.maxWorkerCount - Maximum number of workers to spawn
|
|
2277
|
+
* @throws {Error} If maxCpuUsageRatio is not between 0 and 1
|
|
2278
|
+
*/
|
|
2279
|
+
constructor(options) {
|
|
2280
|
+
this.workerStart = options.workerStart;
|
|
2281
|
+
this.workerStop = options.workerStop;
|
|
2282
|
+
if (options.maxCpuUsageRatio && (options.maxCpuUsageRatio > 1 || options.maxCpuUsageRatio < 0)) {
|
|
2283
|
+
throw new Error("maxCpuUsageRatio must be between 0 and 1");
|
|
2284
|
+
}
|
|
2285
|
+
this.maxCpuUsageRatio = options.maxCpuUsageRatio || DEFAULT_MAX_CPU_USAGE_RATIO;
|
|
2286
|
+
this.usedCpuCount = this.computeCPUUsageCount(
|
|
2287
|
+
this.maxCpuUsageRatio,
|
|
2288
|
+
options.maxWorkerCount
|
|
2289
|
+
);
|
|
2290
|
+
}
|
|
2291
|
+
/**
|
|
2292
|
+
* Calculates the number of CPU cores to utilize based on available parallelism and constraints.
|
|
2293
|
+
*
|
|
2294
|
+
* @param cpuUsageRatio - Ratio of CPU cores to use (0-1)
|
|
2295
|
+
* @param maxWorkerCount - Optional maximum number of workers
|
|
2296
|
+
* @returns The number of CPU cores to utilize
|
|
2297
|
+
*/
|
|
2298
|
+
computeCPUUsageCount(cpuUsageRatio, maxWorkerCount) {
|
|
2299
|
+
const cpuCount = (0, import_node_os.availableParallelism)();
|
|
2300
|
+
const maxWorkers = maxWorkerCount || cpuCount;
|
|
2301
|
+
return Math.min(
|
|
2302
|
+
maxWorkers,
|
|
2303
|
+
Math.max(1, Math.floor(cpuCount * cpuUsageRatio))
|
|
2304
|
+
);
|
|
2305
|
+
}
|
|
2306
|
+
/**
|
|
2307
|
+
* Initializes the cluster by spawning worker processes and setting up signal handlers.
|
|
2308
|
+
* For the primary process, spawns workers and monitors parent process.
|
|
2309
|
+
* For worker processes, executes the worker startup function.
|
|
2310
|
+
*
|
|
2311
|
+
* @throws {Error} If worker is undefined in worker process
|
|
2312
|
+
*/
|
|
2313
|
+
async start() {
|
|
2314
|
+
process.on(SIGTERM, this.gracefulClusterShutdown(SIGTERM));
|
|
2315
|
+
process.on(SIGINT, this.gracefulClusterShutdown(SIGINT));
|
|
2316
|
+
if (import_node_cluster.default.isPrimary) {
|
|
2317
|
+
const parentPid = process.ppid;
|
|
2318
|
+
setInterval(() => {
|
|
2319
|
+
try {
|
|
2320
|
+
process.kill(parentPid, 0);
|
|
2321
|
+
} catch (e) {
|
|
2322
|
+
console.log("Parent process has exited.");
|
|
2323
|
+
this.gracefulClusterShutdown(SIGTERM)();
|
|
2324
|
+
}
|
|
2325
|
+
}, 1e3);
|
|
2326
|
+
await this.bootWorkers(this.usedCpuCount);
|
|
2327
|
+
} else {
|
|
2328
|
+
if (!import_node_cluster.default.worker) {
|
|
2329
|
+
throw new Error(
|
|
2330
|
+
"Worker is not defined, it should be defined in worker process"
|
|
2331
|
+
);
|
|
2332
|
+
}
|
|
2333
|
+
this.startOutput = await this.workerStart(
|
|
2334
|
+
import_node_cluster.default.worker,
|
|
2335
|
+
this.usedCpuCount
|
|
2336
|
+
);
|
|
2337
|
+
}
|
|
2338
|
+
}
|
|
2339
|
+
/**
|
|
2340
|
+
* Spawns worker processes and configures their lifecycle event handlers.
|
|
2341
|
+
* Handles worker online, exit and disconnect events.
|
|
2342
|
+
* Automatically restarts failed workers during normal operation.
|
|
2343
|
+
*
|
|
2344
|
+
* @param numWorkers - Number of worker processes to spawn
|
|
2345
|
+
*/
|
|
2346
|
+
bootWorkers = async (numWorkers) => {
|
|
2347
|
+
console.info(`Setting ${numWorkers} workers...`);
|
|
2348
|
+
for (let i = 0; i < numWorkers; i++) {
|
|
2349
|
+
import_node_cluster.default.fork();
|
|
2350
|
+
}
|
|
2351
|
+
import_node_cluster.default.on("online", (worker) => {
|
|
2352
|
+
console.info(`worker process ${worker.process.pid} is online`);
|
|
2353
|
+
});
|
|
2354
|
+
import_node_cluster.default.on("exit", (worker, code, signal) => {
|
|
2355
|
+
console.info(
|
|
2356
|
+
`worker ${worker.process.pid} exited with code ${code} and signal ${signal}`
|
|
2357
|
+
);
|
|
2358
|
+
if (!this.shutdownInProgress) {
|
|
2359
|
+
setTimeout(() => import_node_cluster.default.fork(), RESTART_TIME_MS);
|
|
2360
|
+
}
|
|
2361
|
+
if (this.shutdownInProgress && code != 0) {
|
|
2362
|
+
this.hasCleanWorkerExit = false;
|
|
2363
|
+
}
|
|
2364
|
+
});
|
|
2365
|
+
import_node_cluster.default.on("disconnect", (worker) => {
|
|
2366
|
+
console.info(`worker process ${worker.process.pid} has disconnected`);
|
|
2367
|
+
});
|
|
2368
|
+
};
|
|
2369
|
+
/**
|
|
2370
|
+
* Creates a handler function for graceful shutdown on receipt of a signal.
|
|
2371
|
+
* Ensures only one shutdown can occur at a time.
|
|
2372
|
+
* Handles shutdown differently for primary and worker processes.
|
|
2373
|
+
*
|
|
2374
|
+
* @param signal - The signal triggering the shutdown (e.g. SIGTERM)
|
|
2375
|
+
* @returns An async function that performs the shutdown
|
|
2376
|
+
*/
|
|
2377
|
+
gracefulClusterShutdown = (signal) => async () => {
|
|
2378
|
+
if (this.shutdownInProgress) {
|
|
2379
|
+
return;
|
|
2380
|
+
}
|
|
2381
|
+
this.shutdownInProgress = true;
|
|
2382
|
+
this.hasCleanWorkerExit = true;
|
|
2383
|
+
console.info(
|
|
2384
|
+
`Got ${signal} on ${this.processStr}. Graceful shutdown start at ${(/* @__PURE__ */ new Date()).toISOString()}`
|
|
2385
|
+
);
|
|
2386
|
+
try {
|
|
2387
|
+
if (import_node_cluster.default.isPrimary) {
|
|
2388
|
+
await this.shutdownWorkers(signal);
|
|
2389
|
+
console.info(`${this.processStr} - worker shutdown successful`);
|
|
2390
|
+
(0, import_node_process.exit)(0);
|
|
2391
|
+
} else {
|
|
2392
|
+
if (this.startOutput) {
|
|
2393
|
+
await this.workerStop(this.startOutput);
|
|
2394
|
+
} else {
|
|
2395
|
+
console.info(
|
|
2396
|
+
`${this.processStr} - shutdown before worker fully started`
|
|
2397
|
+
);
|
|
2398
|
+
}
|
|
2399
|
+
console.info(`${this.processStr} shutdown successful`);
|
|
2400
|
+
this.hasCleanWorkerExit ? (0, import_node_process.exit)(0) : (0, import_node_process.exit)(1);
|
|
2401
|
+
}
|
|
2402
|
+
} catch (e) {
|
|
2403
|
+
console.error(`${this.processStr} - shutdown failed`, e);
|
|
2404
|
+
(0, import_node_process.exit)(1);
|
|
2405
|
+
}
|
|
2406
|
+
};
|
|
2407
|
+
/**
|
|
2408
|
+
* Gracefully terminates all worker processes.
|
|
2409
|
+
* Monitors workers until they all exit or timeout occurs.
|
|
2410
|
+
* Only relevant for the primary process.
|
|
2411
|
+
*
|
|
2412
|
+
* @param signal - The signal to send to worker processes
|
|
2413
|
+
* @returns A promise that resolves when all workers have terminated
|
|
2414
|
+
*/
|
|
2415
|
+
shutdownWorkers = (signal) => {
|
|
2416
|
+
return new Promise((resolve3, reject) => {
|
|
2417
|
+
if (!import_node_cluster.default.isPrimary) {
|
|
2418
|
+
return resolve3();
|
|
2419
|
+
}
|
|
2420
|
+
if (!import_node_cluster.default.workers) {
|
|
2421
|
+
return resolve3();
|
|
2422
|
+
}
|
|
2423
|
+
const workerIds = Object.keys(import_node_cluster.default.workers);
|
|
2424
|
+
if (workerIds.length == 0) {
|
|
2425
|
+
return resolve3();
|
|
2426
|
+
}
|
|
2427
|
+
let workersAlive = 0;
|
|
2428
|
+
let funcRun = 0;
|
|
2429
|
+
const cleanWorkers = () => {
|
|
2430
|
+
++funcRun;
|
|
2431
|
+
workersAlive = 0;
|
|
2432
|
+
Object.values(import_node_cluster.default.workers || {}).filter((worker) => !!worker).forEach((worker) => {
|
|
2433
|
+
if (worker && !worker.isDead()) {
|
|
2434
|
+
++workersAlive;
|
|
2435
|
+
if (funcRun == 1) {
|
|
2436
|
+
worker.kill(signal);
|
|
2437
|
+
}
|
|
2438
|
+
}
|
|
2439
|
+
});
|
|
2440
|
+
console.info(workersAlive + " workers alive");
|
|
2441
|
+
if (workersAlive == 0) {
|
|
2442
|
+
clearInterval(interval);
|
|
2443
|
+
return resolve3();
|
|
2444
|
+
}
|
|
2445
|
+
};
|
|
2446
|
+
const interval = setInterval(cleanWorkers, SHUTDOWN_WORKERS_INTERVAL);
|
|
2447
|
+
});
|
|
2448
|
+
};
|
|
2449
|
+
};
|
|
2450
|
+
init_sqlHelpers();
|
|
2451
|
+
init_internal();
|
|
2452
|
+
init_compiler_config();
|
|
2453
|
+
var util = __toESM2(require("util"));
|
|
2454
|
+
var import_async_hooks = require("async_hooks");
|
|
2455
|
+
function setupStructuredConsole(getContextField, contextFieldName) {
|
|
2456
|
+
const contextStorage = new import_async_hooks.AsyncLocalStorage();
|
|
2457
|
+
const originalConsole = {
|
|
2458
|
+
log: console.log,
|
|
2459
|
+
info: console.info,
|
|
2460
|
+
warn: console.warn,
|
|
2461
|
+
error: console.error,
|
|
2462
|
+
debug: console.debug
|
|
2463
|
+
};
|
|
2464
|
+
console.log = createStructuredConsoleWrapper(
|
|
2465
|
+
contextStorage,
|
|
2466
|
+
getContextField,
|
|
2467
|
+
contextFieldName,
|
|
2468
|
+
originalConsole.log,
|
|
2469
|
+
"info"
|
|
2470
|
+
);
|
|
2471
|
+
console.info = createStructuredConsoleWrapper(
|
|
2472
|
+
contextStorage,
|
|
2473
|
+
getContextField,
|
|
2474
|
+
contextFieldName,
|
|
2475
|
+
originalConsole.info,
|
|
2476
|
+
"info"
|
|
2477
|
+
);
|
|
2478
|
+
console.warn = createStructuredConsoleWrapper(
|
|
2479
|
+
contextStorage,
|
|
2480
|
+
getContextField,
|
|
2481
|
+
contextFieldName,
|
|
2482
|
+
originalConsole.warn,
|
|
2483
|
+
"warn"
|
|
2484
|
+
);
|
|
2485
|
+
console.error = createStructuredConsoleWrapper(
|
|
2486
|
+
contextStorage,
|
|
2487
|
+
getContextField,
|
|
2488
|
+
contextFieldName,
|
|
2489
|
+
originalConsole.error,
|
|
2490
|
+
"error"
|
|
2491
|
+
);
|
|
2492
|
+
console.debug = createStructuredConsoleWrapper(
|
|
2493
|
+
contextStorage,
|
|
2494
|
+
getContextField,
|
|
2495
|
+
contextFieldName,
|
|
2496
|
+
originalConsole.debug,
|
|
2497
|
+
"debug"
|
|
2498
|
+
);
|
|
2499
|
+
return contextStorage;
|
|
2500
|
+
}
|
|
2501
|
+
function emitStructuredLog(contextStorage, getContextField, contextFieldName, level, message) {
|
|
2502
|
+
const context = contextStorage.getStore();
|
|
2503
|
+
if (!context) {
|
|
2504
|
+
return false;
|
|
2505
|
+
}
|
|
2506
|
+
let ctxValue;
|
|
2507
|
+
try {
|
|
2508
|
+
ctxValue = getContextField(context);
|
|
2509
|
+
} catch {
|
|
2510
|
+
ctxValue = "unknown";
|
|
2511
|
+
}
|
|
2512
|
+
try {
|
|
2513
|
+
process.stderr.write(
|
|
2514
|
+
JSON.stringify({
|
|
2515
|
+
__moose_structured_log__: true,
|
|
2516
|
+
level,
|
|
2517
|
+
message,
|
|
2518
|
+
[contextFieldName]: ctxValue,
|
|
2519
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2520
|
+
}) + "\n"
|
|
2521
|
+
);
|
|
2522
|
+
return true;
|
|
2523
|
+
} catch {
|
|
2524
|
+
return false;
|
|
2525
|
+
}
|
|
2526
|
+
}
|
|
2527
|
+
function safeStringify(arg) {
|
|
2528
|
+
if (typeof arg === "object" && arg !== null) {
|
|
2529
|
+
if (arg instanceof Error) {
|
|
2530
|
+
return util.inspect(arg, { depth: 2, breakLength: Infinity });
|
|
2531
|
+
}
|
|
2532
|
+
try {
|
|
2533
|
+
return JSON.stringify(arg);
|
|
2534
|
+
} catch (e) {
|
|
2535
|
+
return util.inspect(arg, { depth: 2, breakLength: Infinity });
|
|
2536
|
+
}
|
|
2537
|
+
}
|
|
2538
|
+
if (typeof arg === "string") {
|
|
2539
|
+
return arg;
|
|
2540
|
+
}
|
|
2541
|
+
return util.inspect(arg);
|
|
2542
|
+
}
|
|
2543
|
+
function createStructuredConsoleWrapper(contextStorage, getContextField, contextFieldName, originalMethod, level) {
|
|
2544
|
+
return (...args) => {
|
|
2545
|
+
const context = contextStorage.getStore();
|
|
2546
|
+
if (!context) {
|
|
2547
|
+
originalMethod(...args);
|
|
2548
|
+
return;
|
|
2549
|
+
}
|
|
2550
|
+
let ctxValue;
|
|
2551
|
+
try {
|
|
2552
|
+
ctxValue = getContextField(context);
|
|
2553
|
+
} catch {
|
|
2554
|
+
ctxValue = "unknown";
|
|
2555
|
+
}
|
|
2556
|
+
try {
|
|
2557
|
+
const message = args.map((arg) => safeStringify(arg)).join(" ");
|
|
2558
|
+
process.stderr.write(
|
|
2559
|
+
JSON.stringify({
|
|
2560
|
+
__moose_structured_log__: true,
|
|
2561
|
+
level,
|
|
2562
|
+
message,
|
|
2563
|
+
[contextFieldName]: ctxValue,
|
|
2564
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2565
|
+
}) + "\n"
|
|
2566
|
+
);
|
|
2567
|
+
} catch {
|
|
2568
|
+
originalMethod(...args);
|
|
2569
|
+
}
|
|
2570
|
+
};
|
|
2571
|
+
}
|
|
2572
|
+
var toClientConfig2 = (config) => ({
|
|
2573
|
+
...config,
|
|
2574
|
+
useSSL: config.useSSL ? "true" : "false"
|
|
2575
|
+
});
|
|
2576
|
+
var createPath = (apisDir, path5, useCompiled2) => {
|
|
2577
|
+
const extension = useCompiled2 ? ".js" : ".ts";
|
|
2578
|
+
return `${apisDir}${path5}${extension}`;
|
|
2579
|
+
};
|
|
2580
|
+
var httpLogger = (req, res, startMs, apiName) => {
|
|
2581
|
+
const logFn = () => console.log(
|
|
2582
|
+
`${req.method} ${req.url} ${res.statusCode} ${Date.now() - startMs}ms`
|
|
2583
|
+
);
|
|
2584
|
+
if (apiName) {
|
|
2585
|
+
apiContextStorage.run({ apiName }, logFn);
|
|
2586
|
+
} else {
|
|
2587
|
+
logFn();
|
|
2588
|
+
}
|
|
2589
|
+
};
|
|
2590
|
+
var modulesCache = /* @__PURE__ */ new Map();
|
|
2591
|
+
var apiContextStorage = setupStructuredConsole(
|
|
2592
|
+
(ctx) => ctx.apiName,
|
|
2593
|
+
"api_name"
|
|
2594
|
+
);
|
|
2595
|
+
var apiHandler = async (publicKey, clickhouseClient, temporalClient, enforceAuth, jwtConfig) => {
|
|
2596
|
+
const useCompiled2 = shouldUseCompiled();
|
|
2597
|
+
const sourceDir = getSourceDir();
|
|
2598
|
+
const actualApisDir = useCompiled2 ? `${process.cwd()}/.moose/compiled/${sourceDir}/apis/` : void 0;
|
|
2599
|
+
const apis = await getApis2();
|
|
2600
|
+
return async (req, res) => {
|
|
2601
|
+
const start = Date.now();
|
|
2602
|
+
let matchedApiName;
|
|
2603
|
+
try {
|
|
2604
|
+
const url = new URL(req.url || "", "http://localhost");
|
|
2605
|
+
const fileName = url.pathname;
|
|
2606
|
+
let jwtPayload;
|
|
2607
|
+
if (publicKey && jwtConfig) {
|
|
2608
|
+
const jwt = req.headers.authorization?.split(" ")[1];
|
|
2609
|
+
if (jwt) {
|
|
2610
|
+
try {
|
|
2611
|
+
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
2612
|
+
issuer: jwtConfig.issuer,
|
|
2613
|
+
audience: jwtConfig.audience
|
|
2614
|
+
});
|
|
2615
|
+
jwtPayload = payload;
|
|
2616
|
+
} catch (error) {
|
|
2617
|
+
console.log("JWT verification failed");
|
|
2618
|
+
if (enforceAuth) {
|
|
2619
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
2620
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
2621
|
+
httpLogger(req, res, start);
|
|
2622
|
+
return;
|
|
2623
|
+
}
|
|
2624
|
+
}
|
|
2625
|
+
} else if (enforceAuth) {
|
|
2626
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
2627
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
2628
|
+
httpLogger(req, res, start);
|
|
2629
|
+
return;
|
|
2630
|
+
}
|
|
2631
|
+
} else if (enforceAuth) {
|
|
2632
|
+
res.writeHead(401, { "Content-Type": "application/json" });
|
|
2633
|
+
res.end(JSON.stringify({ error: "Unauthorized" }));
|
|
2634
|
+
httpLogger(req, res, start);
|
|
2635
|
+
return;
|
|
2636
|
+
}
|
|
2637
|
+
const pathName = actualApisDir ? createPath(actualApisDir, fileName, useCompiled2) : fileName;
|
|
2638
|
+
const paramsObject = Array.from(url.searchParams.entries()).reduce(
|
|
2639
|
+
(obj, [key, value]) => {
|
|
2640
|
+
const existingValue = obj[key];
|
|
2641
|
+
if (existingValue) {
|
|
2642
|
+
if (Array.isArray(existingValue)) {
|
|
2643
|
+
existingValue.push(value);
|
|
2644
|
+
} else {
|
|
2645
|
+
obj[key] = [existingValue, value];
|
|
2646
|
+
}
|
|
2647
|
+
} else {
|
|
2648
|
+
obj[key] = value;
|
|
2649
|
+
}
|
|
2650
|
+
return obj;
|
|
2651
|
+
},
|
|
2652
|
+
{}
|
|
2653
|
+
);
|
|
2654
|
+
const versionParam = url.searchParams.get("version");
|
|
2655
|
+
const cacheKey = versionParam ? `${pathName}:${versionParam}` : pathName;
|
|
2656
|
+
let userFuncModule;
|
|
2657
|
+
const cachedEntry = modulesCache.get(cacheKey);
|
|
2658
|
+
if (cachedEntry !== void 0) {
|
|
2659
|
+
userFuncModule = cachedEntry.module;
|
|
2660
|
+
matchedApiName = cachedEntry.apiName;
|
|
2661
|
+
} else {
|
|
2662
|
+
let lookupName = fileName.replace(/^\/+|\/+$/g, "");
|
|
2663
|
+
let version = null;
|
|
2664
|
+
userFuncModule = apis.get(lookupName);
|
|
2665
|
+
if (userFuncModule) {
|
|
2666
|
+
matchedApiName = lookupName;
|
|
2667
|
+
}
|
|
2668
|
+
if (!userFuncModule) {
|
|
2669
|
+
version = url.searchParams.get("version");
|
|
2670
|
+
if (!version && lookupName.includes("/")) {
|
|
2671
|
+
const pathParts = lookupName.split("/");
|
|
2672
|
+
if (pathParts.length >= 2) {
|
|
2673
|
+
lookupName = pathParts[0];
|
|
2674
|
+
version = pathParts.slice(1).join("/");
|
|
2675
|
+
}
|
|
2676
|
+
}
|
|
2677
|
+
if (!userFuncModule && version) {
|
|
2678
|
+
const versionedKey = `${lookupName}:${version}`;
|
|
2679
|
+
userFuncModule = apis.get(versionedKey);
|
|
2680
|
+
if (userFuncModule) {
|
|
2681
|
+
matchedApiName = lookupName;
|
|
2682
|
+
}
|
|
2683
|
+
}
|
|
2684
|
+
if (!userFuncModule) {
|
|
2685
|
+
userFuncModule = apis.get(lookupName);
|
|
2686
|
+
if (userFuncModule) {
|
|
2687
|
+
matchedApiName = lookupName;
|
|
2688
|
+
}
|
|
2689
|
+
}
|
|
2690
|
+
}
|
|
2691
|
+
if (!userFuncModule || matchedApiName === void 0) {
|
|
2692
|
+
const availableApis = Array.from(apis.keys()).map(
|
|
2693
|
+
(key) => key.replace(":", "/")
|
|
2694
|
+
);
|
|
2695
|
+
const errorMessage = version ? `API ${lookupName} with version ${version} not found. Available APIs: ${availableApis.join(", ")}` : `API ${lookupName} not found. Available APIs: ${availableApis.join(", ")}`;
|
|
2696
|
+
throw new Error(errorMessage);
|
|
2697
|
+
}
|
|
2698
|
+
modulesCache.set(cacheKey, {
|
|
2699
|
+
module: userFuncModule,
|
|
2700
|
+
apiName: matchedApiName
|
|
2701
|
+
});
|
|
2702
|
+
apiContextStorage.run({ apiName: matchedApiName }, () => {
|
|
2703
|
+
console.log(`[API] | Executing API: ${matchedApiName}`);
|
|
2704
|
+
});
|
|
2705
|
+
}
|
|
2706
|
+
const queryClient = new QueryClient(clickhouseClient, fileName);
|
|
2707
|
+
const apiName = matchedApiName;
|
|
2708
|
+
const result = await apiContextStorage.run({ apiName }, async () => {
|
|
2709
|
+
return await userFuncModule(paramsObject, {
|
|
2710
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
2711
|
+
sql,
|
|
2712
|
+
jwt: jwtPayload
|
|
2713
|
+
});
|
|
2714
|
+
});
|
|
2715
|
+
let body;
|
|
2716
|
+
let status;
|
|
2717
|
+
if (Object.getPrototypeOf(result).constructor.name === "ResultSet") {
|
|
2718
|
+
body = JSON.stringify(await result.json());
|
|
2719
|
+
} else {
|
|
2720
|
+
if ("body" in result && "status" in result) {
|
|
2721
|
+
body = JSON.stringify(result.body);
|
|
2722
|
+
status = result.status;
|
|
2723
|
+
} else {
|
|
2724
|
+
body = JSON.stringify(result);
|
|
2725
|
+
}
|
|
2726
|
+
}
|
|
2727
|
+
if (status) {
|
|
2728
|
+
res.writeHead(status, { "Content-Type": "application/json" });
|
|
2729
|
+
httpLogger(req, res, start, apiName);
|
|
2730
|
+
} else {
|
|
2731
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
2732
|
+
httpLogger(req, res, start, apiName);
|
|
2733
|
+
}
|
|
2734
|
+
res.end(body);
|
|
2735
|
+
} catch (error) {
|
|
2736
|
+
const logError2 = () => console.log("error in path ", req.url, error);
|
|
2737
|
+
if (matchedApiName) {
|
|
2738
|
+
apiContextStorage.run({ apiName: matchedApiName }, logError2);
|
|
2739
|
+
} else {
|
|
2740
|
+
logError2();
|
|
2741
|
+
}
|
|
2742
|
+
if (Object.getPrototypeOf(error).constructor.name === "TypeGuardError") {
|
|
2743
|
+
res.writeHead(400, { "Content-Type": "application/json" });
|
|
2744
|
+
res.end(JSON.stringify({ error: error.message }));
|
|
2745
|
+
httpLogger(req, res, start, matchedApiName);
|
|
2746
|
+
}
|
|
2747
|
+
if (error instanceof Error) {
|
|
2748
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
2749
|
+
res.end(JSON.stringify({ error: error.message }));
|
|
2750
|
+
httpLogger(req, res, start, matchedApiName);
|
|
2751
|
+
} else {
|
|
2752
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
2753
|
+
res.end();
|
|
2754
|
+
httpLogger(req, res, start, matchedApiName);
|
|
2755
|
+
}
|
|
2756
|
+
}
|
|
2757
|
+
};
|
|
2758
|
+
};
|
|
2759
|
+
var createMainRouter = async (publicKey, clickhouseClient, temporalClient, enforceAuth, jwtConfig) => {
|
|
2760
|
+
const apiRequestHandler = await apiHandler(
|
|
2761
|
+
publicKey,
|
|
2762
|
+
clickhouseClient,
|
|
2763
|
+
temporalClient,
|
|
2764
|
+
enforceAuth,
|
|
2765
|
+
jwtConfig
|
|
2766
|
+
);
|
|
2767
|
+
const webApps = await getWebApps2();
|
|
2768
|
+
const sortedWebApps = Array.from(webApps.values()).sort((a, b) => {
|
|
2769
|
+
const pathA = a.config.mountPath || "/";
|
|
2770
|
+
const pathB = b.config.mountPath || "/";
|
|
2771
|
+
return pathB.length - pathA.length;
|
|
2772
|
+
});
|
|
2773
|
+
return async (req, res) => {
|
|
2774
|
+
const start = Date.now();
|
|
2775
|
+
const url = new URL(req.url || "", "http://localhost");
|
|
2776
|
+
const pathname = url.pathname;
|
|
2777
|
+
if (pathname === "/_moose_internal/health") {
|
|
2778
|
+
res.writeHead(200, { "Content-Type": "application/json" });
|
|
2779
|
+
res.end(
|
|
2780
|
+
JSON.stringify({
|
|
2781
|
+
status: "healthy",
|
|
2782
|
+
timestamp: (/* @__PURE__ */ new Date()).toISOString()
|
|
2783
|
+
})
|
|
2784
|
+
);
|
|
2785
|
+
return;
|
|
2786
|
+
}
|
|
2787
|
+
let jwtPayload;
|
|
2788
|
+
if (publicKey && jwtConfig) {
|
|
2789
|
+
const jwt = req.headers.authorization?.split(" ")[1];
|
|
2790
|
+
if (jwt) {
|
|
2791
|
+
try {
|
|
2792
|
+
const { payload } = await jose.jwtVerify(jwt, publicKey, {
|
|
2793
|
+
issuer: jwtConfig.issuer,
|
|
2794
|
+
audience: jwtConfig.audience
|
|
2795
|
+
});
|
|
2796
|
+
jwtPayload = payload;
|
|
2797
|
+
} catch (error) {
|
|
2798
|
+
console.log("JWT verification failed for WebApp route");
|
|
2799
|
+
}
|
|
2800
|
+
}
|
|
2801
|
+
}
|
|
2802
|
+
for (const webApp of sortedWebApps) {
|
|
2803
|
+
const mountPath = webApp.config.mountPath || "/";
|
|
2804
|
+
const normalizedMount = mountPath.endsWith("/") && mountPath !== "/" ? mountPath.slice(0, -1) : mountPath;
|
|
2805
|
+
const matches = pathname === normalizedMount || pathname.startsWith(normalizedMount + "/");
|
|
2806
|
+
if (matches) {
|
|
2807
|
+
if (webApp.config.injectMooseUtils !== false) {
|
|
2808
|
+
const { getMooseUtils: getMooseUtils2 } = await Promise.resolve().then(() => (init_standalone(), standalone_exports));
|
|
2809
|
+
req.moose = await getMooseUtils2();
|
|
2810
|
+
}
|
|
2811
|
+
let proxiedUrl = req.url;
|
|
2812
|
+
if (normalizedMount !== "/") {
|
|
2813
|
+
const pathWithoutMount = pathname.substring(normalizedMount.length) || "/";
|
|
2814
|
+
proxiedUrl = pathWithoutMount + url.search;
|
|
2815
|
+
}
|
|
2816
|
+
try {
|
|
2817
|
+
const modifiedReq = Object.assign(
|
|
2818
|
+
Object.create(Object.getPrototypeOf(req)),
|
|
2819
|
+
req,
|
|
2820
|
+
{
|
|
2821
|
+
url: proxiedUrl
|
|
2822
|
+
}
|
|
2823
|
+
);
|
|
2824
|
+
await webApp.handler(modifiedReq, res);
|
|
2825
|
+
return;
|
|
2826
|
+
} catch (error) {
|
|
2827
|
+
console.error(`Error in WebApp ${webApp.name}:`, error);
|
|
2828
|
+
if (!res.headersSent) {
|
|
2829
|
+
res.writeHead(500, { "Content-Type": "application/json" });
|
|
2830
|
+
res.end(JSON.stringify({ error: "Internal Server Error" }));
|
|
2831
|
+
}
|
|
2832
|
+
return;
|
|
2833
|
+
}
|
|
2834
|
+
}
|
|
2835
|
+
}
|
|
2836
|
+
let apiPath = pathname;
|
|
2837
|
+
if (pathname.startsWith("/api/")) {
|
|
2838
|
+
apiPath = pathname.substring(4);
|
|
2839
|
+
} else if (pathname.startsWith("/consumption/")) {
|
|
2840
|
+
apiPath = pathname.substring(13);
|
|
2841
|
+
}
|
|
2842
|
+
if (apiPath !== pathname) {
|
|
2843
|
+
const modifiedReq = Object.assign(
|
|
2844
|
+
Object.create(Object.getPrototypeOf(req)),
|
|
2845
|
+
req,
|
|
2846
|
+
{
|
|
2847
|
+
url: apiPath + url.search
|
|
2848
|
+
}
|
|
2849
|
+
);
|
|
2850
|
+
await apiRequestHandler(modifiedReq, res);
|
|
2851
|
+
return;
|
|
2852
|
+
}
|
|
2853
|
+
res.writeHead(404, { "Content-Type": "application/json" });
|
|
2854
|
+
res.end(JSON.stringify({ error: "Not Found" }));
|
|
2855
|
+
httpLogger(req, res, start);
|
|
2856
|
+
};
|
|
2857
|
+
};
|
|
2858
|
+
var runApis = async (config) => {
|
|
2859
|
+
const apisCluster = new Cluster({
|
|
2860
|
+
maxWorkerCount: (config.workerCount ?? 0) > 0 ? config.workerCount : void 0,
|
|
2861
|
+
workerStart: async () => {
|
|
2862
|
+
let temporalClient;
|
|
2863
|
+
if (config.temporalConfig) {
|
|
2864
|
+
temporalClient = await getTemporalClient(
|
|
2865
|
+
config.temporalConfig.url,
|
|
2866
|
+
config.temporalConfig.namespace,
|
|
2867
|
+
config.temporalConfig.clientCert,
|
|
2868
|
+
config.temporalConfig.clientKey,
|
|
2869
|
+
config.temporalConfig.apiKey
|
|
2870
|
+
);
|
|
2871
|
+
}
|
|
2872
|
+
const clickhouseClient = getClickhouseClient(
|
|
2873
|
+
toClientConfig2(config.clickhouseConfig)
|
|
2874
|
+
);
|
|
2875
|
+
let publicKey;
|
|
2876
|
+
if (config.jwtConfig?.secret) {
|
|
2877
|
+
console.log("Importing JWT public key...");
|
|
2878
|
+
publicKey = await jose.importSPKI(config.jwtConfig.secret, "RS256");
|
|
2879
|
+
}
|
|
2880
|
+
const runtimeQueryClient = new QueryClient(clickhouseClient, "runtime");
|
|
2881
|
+
globalThis._mooseRuntimeContext = {
|
|
2882
|
+
client: new MooseClient(runtimeQueryClient, temporalClient)
|
|
2883
|
+
};
|
|
2884
|
+
const server = import_http.default.createServer(
|
|
2885
|
+
await createMainRouter(
|
|
2886
|
+
publicKey,
|
|
2887
|
+
clickhouseClient,
|
|
2888
|
+
temporalClient,
|
|
2889
|
+
config.enforceAuth,
|
|
2890
|
+
config.jwtConfig
|
|
2891
|
+
)
|
|
2892
|
+
);
|
|
2893
|
+
const port = config.proxyPort !== void 0 ? config.proxyPort : 4001;
|
|
2894
|
+
server.listen(port, "localhost", () => {
|
|
2895
|
+
console.log(`Server running on port ${port}`);
|
|
2896
|
+
});
|
|
2897
|
+
return server;
|
|
2898
|
+
},
|
|
2899
|
+
workerStop: async (server) => {
|
|
2900
|
+
return new Promise((resolve3) => {
|
|
2901
|
+
server.close(() => resolve3());
|
|
2902
|
+
});
|
|
2903
|
+
}
|
|
2904
|
+
});
|
|
2905
|
+
apisCluster.start();
|
|
2906
|
+
};
|
|
2907
|
+
var import_node_stream2 = require("stream");
|
|
2908
|
+
var import_kafka_javascript2 = require_kafka_javascript();
|
|
2909
|
+
var import_node_buffer = require("buffer");
|
|
2910
|
+
var process3 = __toESM2(require("process"));
|
|
2911
|
+
var http2 = __toESM2(require("http"));
|
|
2912
|
+
init_commons();
|
|
2913
|
+
init_internal();
|
|
2914
|
+
init_json();
|
|
2915
|
+
var { Kafka: Kafka2 } = import_kafka_javascript2.KafkaJS;
|
|
2916
|
+
var HOSTNAME = process3.env.HOSTNAME;
|
|
2917
|
+
var AUTO_COMMIT_INTERVAL_MS = 5e3;
|
|
2918
|
+
var PARTITIONS_CONSUMED_CONCURRENTLY = 3;
|
|
2919
|
+
var MAX_RETRIES_CONSUMER = 150;
|
|
2920
|
+
var SESSION_TIMEOUT_CONSUMER = 3e4;
|
|
2921
|
+
var HEARTBEAT_INTERVAL_CONSUMER = 3e3;
|
|
2922
|
+
var DEFAULT_MAX_STREAMING_CONCURRENCY = 100;
|
|
2923
|
+
var CONSUMER_MAX_BATCH_SIZE = 1e3;
|
|
2924
|
+
var functionContextStorage = setupStructuredConsole(
|
|
2925
|
+
(ctx) => ctx.functionName,
|
|
2926
|
+
"function_name"
|
|
2927
|
+
);
|
|
2928
|
+
var MAX_STREAMING_CONCURRENCY = process3.env.MAX_STREAMING_CONCURRENCY ? parseInt(process3.env.MAX_STREAMING_CONCURRENCY, 10) : DEFAULT_MAX_STREAMING_CONCURRENCY;
|
|
2929
|
+
var metricsLog = (log) => {
|
|
2930
|
+
const req = http2.request({
|
|
2931
|
+
port: parseInt(process3.env.MOOSE_MANAGEMENT_PORT ?? "5001", 10),
|
|
2932
|
+
method: "POST",
|
|
2933
|
+
path: "/metrics-logs"
|
|
2934
|
+
});
|
|
2935
|
+
req.on("error", (err) => {
|
|
2936
|
+
console.log(
|
|
2937
|
+
`Error ${err.name} sending metrics to management port.`,
|
|
2938
|
+
err.message
|
|
2939
|
+
);
|
|
2940
|
+
});
|
|
2941
|
+
req.write(JSON.stringify({ ...log }));
|
|
2942
|
+
req.end();
|
|
2943
|
+
};
|
|
2944
|
+
var startProducer = async (logger2, producer) => {
|
|
2945
|
+
try {
|
|
2946
|
+
logger2.log("Connecting producer...");
|
|
2947
|
+
await producer.connect();
|
|
2948
|
+
logger2.log("Producer is running...");
|
|
2949
|
+
} catch (error) {
|
|
2950
|
+
logger2.error("Failed to connect producer:");
|
|
2951
|
+
if (error instanceof Error) {
|
|
2952
|
+
logError(logger2, error);
|
|
2953
|
+
}
|
|
2954
|
+
throw error;
|
|
2955
|
+
}
|
|
2956
|
+
};
|
|
2957
|
+
var stopProducer = async (logger2, producer) => {
|
|
2958
|
+
await producer.disconnect();
|
|
2959
|
+
logger2.log("Producer is shutting down...");
|
|
2960
|
+
};
|
|
2961
|
+
var stopConsumer = async (logger2, consumer, sourceTopic) => {
|
|
2962
|
+
try {
|
|
2963
|
+
logger2.log("Pausing consumer...");
|
|
2964
|
+
const partitionNumbers = Array.from(
|
|
2965
|
+
{ length: sourceTopic.partitions },
|
|
2966
|
+
(_, i) => i
|
|
2967
|
+
);
|
|
2968
|
+
await consumer.pause([
|
|
2969
|
+
{
|
|
2970
|
+
topic: sourceTopic.name,
|
|
2971
|
+
partitions: partitionNumbers
|
|
2972
|
+
}
|
|
2973
|
+
]);
|
|
2974
|
+
logger2.log("Disconnecting consumer...");
|
|
2975
|
+
await consumer.disconnect();
|
|
2976
|
+
logger2.log("Consumer is shutting down...");
|
|
2977
|
+
} catch (error) {
|
|
2978
|
+
logger2.error(`Error during consumer shutdown: ${error}`);
|
|
2979
|
+
try {
|
|
2980
|
+
await consumer.disconnect();
|
|
2981
|
+
logger2.log("Consumer disconnected after error");
|
|
2982
|
+
} catch (disconnectError) {
|
|
2983
|
+
logger2.error(`Failed to disconnect consumer: ${disconnectError}`);
|
|
2984
|
+
}
|
|
2985
|
+
}
|
|
2986
|
+
};
|
|
2987
|
+
var handleMessage = async (logger2, streamingFunctionWithConfigList, message, producer, fieldMutations, logPayloads) => {
|
|
2988
|
+
if (message.value === void 0 || message.value === null) {
|
|
2989
|
+
logger2.log(`Received message with no value, skipping...`);
|
|
2990
|
+
return void 0;
|
|
2991
|
+
}
|
|
2992
|
+
try {
|
|
2993
|
+
let payloadBuffer = message.value;
|
|
2994
|
+
if (payloadBuffer && payloadBuffer.length >= 5 && payloadBuffer[0] === 0) {
|
|
2995
|
+
payloadBuffer = payloadBuffer.subarray(5);
|
|
2996
|
+
}
|
|
2997
|
+
const parsedData = JSON.parse(payloadBuffer.toString());
|
|
2998
|
+
mutateParsedJson(parsedData, fieldMutations);
|
|
2999
|
+
if (logPayloads) {
|
|
3000
|
+
logger2.log(`[PAYLOAD:STREAM_IN] ${JSON.stringify(parsedData)}`);
|
|
3001
|
+
}
|
|
3002
|
+
const transformedData = await Promise.all(
|
|
3003
|
+
streamingFunctionWithConfigList.map(async ([fn, config]) => {
|
|
3004
|
+
try {
|
|
3005
|
+
return await fn(parsedData);
|
|
3006
|
+
} catch (e) {
|
|
3007
|
+
const deadLetterQueue = config.deadLetterQueue;
|
|
3008
|
+
if (deadLetterQueue) {
|
|
3009
|
+
const deadLetterRecord = {
|
|
3010
|
+
originalRecord: {
|
|
3011
|
+
...parsedData,
|
|
3012
|
+
// Include original Kafka message metadata
|
|
3013
|
+
__sourcePartition: message.partition,
|
|
3014
|
+
__sourceOffset: message.offset,
|
|
3015
|
+
__sourceTimestamp: message.timestamp
|
|
3016
|
+
},
|
|
3017
|
+
errorMessage: e instanceof Error ? e.message : String(e),
|
|
3018
|
+
errorType: e instanceof Error ? e.constructor.name : "Unknown",
|
|
3019
|
+
failedAt: /* @__PURE__ */ new Date(),
|
|
3020
|
+
source: "transform"
|
|
3021
|
+
};
|
|
3022
|
+
cliLog({
|
|
3023
|
+
action: "DeadLetter",
|
|
3024
|
+
message: `Sending message to DLQ ${deadLetterQueue.name}: ${e instanceof Error ? e.message : String(e)}`,
|
|
3025
|
+
message_type: "Error"
|
|
3026
|
+
});
|
|
3027
|
+
try {
|
|
3028
|
+
await producer.send({
|
|
3029
|
+
topic: deadLetterQueue.name,
|
|
3030
|
+
messages: [{ value: JSON.stringify(deadLetterRecord) }]
|
|
3031
|
+
});
|
|
3032
|
+
} catch (dlqError) {
|
|
3033
|
+
logger2.error(`Failed to send to dead letter queue: ${dlqError}`);
|
|
3034
|
+
}
|
|
3035
|
+
} else {
|
|
3036
|
+
cliLog({
|
|
3037
|
+
action: "Function",
|
|
3038
|
+
message: `Error processing message (no DLQ configured): ${e instanceof Error ? e.message : String(e)}`,
|
|
3039
|
+
message_type: "Error"
|
|
3040
|
+
});
|
|
3041
|
+
}
|
|
3042
|
+
throw e;
|
|
3043
|
+
}
|
|
3044
|
+
})
|
|
3045
|
+
);
|
|
3046
|
+
const processedMessages = transformedData.map((userFunctionOutput, i) => {
|
|
3047
|
+
const [_, config] = streamingFunctionWithConfigList[i];
|
|
3048
|
+
if (userFunctionOutput) {
|
|
3049
|
+
if (Array.isArray(userFunctionOutput)) {
|
|
3050
|
+
return userFunctionOutput.flat().filter((item) => item !== void 0 && item !== null).map((item) => ({
|
|
3051
|
+
value: JSON.stringify(item),
|
|
3052
|
+
originalValue: parsedData,
|
|
3053
|
+
originalMessage: message,
|
|
3054
|
+
dlq: config.deadLetterQueue ?? void 0
|
|
3055
|
+
}));
|
|
3056
|
+
} else {
|
|
3057
|
+
return [
|
|
3058
|
+
{
|
|
3059
|
+
value: JSON.stringify(userFunctionOutput),
|
|
3060
|
+
originalValue: parsedData,
|
|
3061
|
+
originalMessage: message,
|
|
3062
|
+
dlq: config.deadLetterQueue ?? void 0
|
|
3063
|
+
}
|
|
3064
|
+
];
|
|
3065
|
+
}
|
|
3066
|
+
}
|
|
3067
|
+
}).flat().filter((item) => item !== void 0 && item !== null);
|
|
3068
|
+
if (logPayloads) {
|
|
3069
|
+
if (processedMessages.length > 0) {
|
|
3070
|
+
const outgoingJsonStrings = processedMessages.map((msg) => msg.value);
|
|
3071
|
+
logger2.log(`[PAYLOAD:STREAM_OUT] [${outgoingJsonStrings.join(",")}]`);
|
|
3072
|
+
} else {
|
|
3073
|
+
logger2.log(`[PAYLOAD:STREAM_OUT] (no output from streaming function)`);
|
|
3074
|
+
}
|
|
3075
|
+
}
|
|
3076
|
+
return processedMessages;
|
|
3077
|
+
} catch (e) {
|
|
3078
|
+
logger2.error(`Failed to transform data`);
|
|
3079
|
+
if (e instanceof Error) {
|
|
3080
|
+
logError(logger2, e);
|
|
3081
|
+
}
|
|
3082
|
+
}
|
|
3083
|
+
return void 0;
|
|
3084
|
+
};
|
|
3085
|
+
var handleDLQForFailedMessages = async (logger2, producer, messages, error) => {
|
|
3086
|
+
let messagesHandledByDLQ = 0;
|
|
3087
|
+
let messagesWithoutDLQ = 0;
|
|
3088
|
+
let dlqErrors = 0;
|
|
3089
|
+
for (const msg of messages) {
|
|
3090
|
+
if (msg.dlq && msg.originalValue) {
|
|
3091
|
+
const deadLetterRecord = {
|
|
3092
|
+
originalRecord: {
|
|
3093
|
+
...msg.originalValue,
|
|
3094
|
+
// Include original Kafka message metadata
|
|
3095
|
+
__sourcePartition: msg.originalMessage.partition,
|
|
3096
|
+
__sourceOffset: msg.originalMessage.offset,
|
|
3097
|
+
__sourceTimestamp: msg.originalMessage.timestamp
|
|
3098
|
+
},
|
|
3099
|
+
errorMessage: error instanceof Error ? error.message : String(error),
|
|
3100
|
+
errorType: error instanceof Error ? error.constructor.name : "Unknown",
|
|
3101
|
+
failedAt: /* @__PURE__ */ new Date(),
|
|
3102
|
+
source: "transform"
|
|
3103
|
+
};
|
|
3104
|
+
cliLog({
|
|
3105
|
+
action: "DeadLetter",
|
|
3106
|
+
message: `Sending failed message to DLQ ${msg.dlq.name}: ${error instanceof Error ? error.message : String(error)}`,
|
|
3107
|
+
message_type: "Error"
|
|
3108
|
+
});
|
|
3109
|
+
try {
|
|
3110
|
+
await producer.send({
|
|
3111
|
+
topic: msg.dlq.name,
|
|
3112
|
+
messages: [{ value: JSON.stringify(deadLetterRecord) }]
|
|
3113
|
+
});
|
|
3114
|
+
logger2.log(`Sent failed message to DLQ ${msg.dlq.name}`);
|
|
3115
|
+
messagesHandledByDLQ++;
|
|
3116
|
+
} catch (dlqError) {
|
|
3117
|
+
logger2.error(`Failed to send to DLQ: ${dlqError}`);
|
|
3118
|
+
dlqErrors++;
|
|
3119
|
+
}
|
|
3120
|
+
} else if (!msg.dlq) {
|
|
3121
|
+
messagesWithoutDLQ++;
|
|
3122
|
+
logger2.warn(`Cannot send to DLQ: no DLQ configured for message`);
|
|
3123
|
+
} else {
|
|
3124
|
+
messagesWithoutDLQ++;
|
|
3125
|
+
logger2.warn(`Cannot send to DLQ: original message value not available`);
|
|
3126
|
+
}
|
|
3127
|
+
}
|
|
3128
|
+
const allMessagesHandled = messagesHandledByDLQ === messages.length && messagesWithoutDLQ === 0 && dlqErrors === 0;
|
|
3129
|
+
if (allMessagesHandled) {
|
|
3130
|
+
logger2.log(
|
|
3131
|
+
`All ${messagesHandledByDLQ} failed message(s) sent to DLQ, suppressing original error`
|
|
3132
|
+
);
|
|
3133
|
+
} else if (messagesHandledByDLQ > 0) {
|
|
3134
|
+
logger2.warn(
|
|
3135
|
+
`Partial DLQ success: ${messagesHandledByDLQ}/${messages.length} message(s) sent to DLQ`
|
|
3136
|
+
);
|
|
3137
|
+
if (messagesWithoutDLQ > 0) {
|
|
3138
|
+
logger2.error(
|
|
3139
|
+
`Cannot handle batch failure: ${messagesWithoutDLQ} message(s) have no DLQ configured or missing original value`
|
|
3140
|
+
);
|
|
3141
|
+
}
|
|
3142
|
+
if (dlqErrors > 0) {
|
|
3143
|
+
logger2.error(`${dlqErrors} message(s) failed to send to DLQ`);
|
|
3144
|
+
}
|
|
3145
|
+
}
|
|
3146
|
+
return allMessagesHandled;
|
|
3147
|
+
};
|
|
3148
|
+
var sendMessages = async (logger2, metrics, targetTopic, producer, messages) => {
|
|
3149
|
+
if (messages.length === 0) return;
|
|
3150
|
+
try {
|
|
3151
|
+
await producer.send({
|
|
3152
|
+
topic: targetTopic.name,
|
|
3153
|
+
messages
|
|
3154
|
+
});
|
|
3155
|
+
for (const msg of messages) {
|
|
3156
|
+
metrics.bytes += import_node_buffer.Buffer.byteLength(msg.value, "utf8");
|
|
3157
|
+
}
|
|
3158
|
+
metrics.count_out += messages.length;
|
|
3159
|
+
logger2.log(`Sent ${messages.length} messages to ${targetTopic.name}`);
|
|
3160
|
+
} catch (e) {
|
|
3161
|
+
logger2.error(`Failed to send transformed data`);
|
|
3162
|
+
if (e instanceof Error) {
|
|
3163
|
+
logError(logger2, e);
|
|
3164
|
+
}
|
|
3165
|
+
const allHandledByDLQ = await handleDLQForFailedMessages(
|
|
3166
|
+
logger2,
|
|
3167
|
+
producer,
|
|
3168
|
+
messages,
|
|
3169
|
+
e
|
|
3170
|
+
);
|
|
3171
|
+
if (!allHandledByDLQ) {
|
|
3172
|
+
throw e;
|
|
3173
|
+
}
|
|
3174
|
+
}
|
|
3175
|
+
};
|
|
3176
|
+
var sendMessageMetrics = (logger2, metrics) => {
|
|
3177
|
+
if (metrics.count_in > 0 || metrics.count_out > 0 || metrics.bytes > 0) {
|
|
3178
|
+
metricsLog({
|
|
3179
|
+
count_in: metrics.count_in,
|
|
3180
|
+
count_out: metrics.count_out,
|
|
3181
|
+
function_name: logger2.logPrefix,
|
|
3182
|
+
bytes: metrics.bytes,
|
|
3183
|
+
timestamp: /* @__PURE__ */ new Date()
|
|
3184
|
+
});
|
|
3185
|
+
}
|
|
3186
|
+
metrics.count_in = 0;
|
|
3187
|
+
metrics.bytes = 0;
|
|
3188
|
+
metrics.count_out = 0;
|
|
3189
|
+
setTimeout(() => sendMessageMetrics(logger2, metrics), 1e3);
|
|
3190
|
+
};
|
|
3191
|
+
async function loadStreamingFunction(sourceTopic, targetTopic) {
|
|
3192
|
+
const transformFunctions = await getStreamingFunctions();
|
|
3193
|
+
const transformFunctionKey = `${topicNameToStreamName(sourceTopic)}_${targetTopic ? topicNameToStreamName(targetTopic) : "<no-target>"}`;
|
|
3194
|
+
const matchingEntries = Array.from(transformFunctions.entries()).filter(
|
|
3195
|
+
([key]) => key.startsWith(transformFunctionKey)
|
|
3196
|
+
);
|
|
3197
|
+
if (matchingEntries.length === 0) {
|
|
3198
|
+
const message = `No functions found for ${transformFunctionKey}`;
|
|
3199
|
+
cliLog({
|
|
3200
|
+
action: "Function",
|
|
3201
|
+
message: `${message}`,
|
|
3202
|
+
message_type: "Error"
|
|
3203
|
+
});
|
|
3204
|
+
throw new Error(message);
|
|
3205
|
+
}
|
|
3206
|
+
const functions = matchingEntries.map(([_, [fn, config]]) => [
|
|
3207
|
+
fn,
|
|
3208
|
+
config
|
|
3209
|
+
]);
|
|
3210
|
+
const [_key, firstEntry] = matchingEntries[0];
|
|
3211
|
+
const sourceColumns = firstEntry[2];
|
|
3212
|
+
const fieldMutations = buildFieldMutationsFromColumns(sourceColumns);
|
|
3213
|
+
return { functions, fieldMutations };
|
|
3214
|
+
}
|
|
3215
|
+
var startConsumer = async (args, logger2, metrics, _parallelism, consumer, producer, streamingFuncId) => {
|
|
3216
|
+
validateTopicConfig(args.sourceTopic);
|
|
3217
|
+
if (args.targetTopic) {
|
|
3218
|
+
validateTopicConfig(args.targetTopic);
|
|
3219
|
+
}
|
|
3220
|
+
try {
|
|
3221
|
+
logger2.log("Connecting consumer...");
|
|
3222
|
+
await consumer.connect();
|
|
3223
|
+
logger2.log("Consumer connected successfully");
|
|
3224
|
+
} catch (error) {
|
|
3225
|
+
logger2.error("Failed to connect consumer:");
|
|
3226
|
+
if (error instanceof Error) {
|
|
3227
|
+
logError(logger2, error);
|
|
3228
|
+
}
|
|
3229
|
+
throw error;
|
|
3230
|
+
}
|
|
3231
|
+
logger2.log(
|
|
3232
|
+
`Starting consumer group '${streamingFuncId}' with source topic: ${args.sourceTopic.name} and target topic: ${args.targetTopic?.name || "none"}`
|
|
3233
|
+
);
|
|
3234
|
+
const result = await loadStreamingFunction(
|
|
3235
|
+
args.sourceTopic,
|
|
3236
|
+
args.targetTopic
|
|
3237
|
+
);
|
|
3238
|
+
const streamingFunctions = result.functions;
|
|
3239
|
+
const fieldMutations = result.fieldMutations;
|
|
3240
|
+
await consumer.subscribe({
|
|
3241
|
+
topics: [args.sourceTopic.name]
|
|
3242
|
+
// Use full topic name for Kafka operations
|
|
3243
|
+
});
|
|
3244
|
+
await consumer.run({
|
|
3245
|
+
eachBatchAutoResolve: true,
|
|
3246
|
+
// Enable parallel processing of partitions
|
|
3247
|
+
partitionsConsumedConcurrently: PARTITIONS_CONSUMED_CONCURRENTLY,
|
|
3248
|
+
// To be adjusted
|
|
3249
|
+
eachBatch: async ({ batch, heartbeat, isRunning, isStale }) => {
|
|
3250
|
+
if (!isRunning() || isStale()) {
|
|
3251
|
+
return;
|
|
3252
|
+
}
|
|
3253
|
+
const functionName = logger2.logPrefix;
|
|
3254
|
+
await functionContextStorage.run({ functionName }, async () => {
|
|
3255
|
+
metrics.count_in += batch.messages.length;
|
|
3256
|
+
cliLog({
|
|
3257
|
+
action: "Received",
|
|
3258
|
+
message: `${logger2.logPrefix.replace("__", " -> ")} ${batch.messages.length} message(s)`
|
|
3259
|
+
});
|
|
3260
|
+
logger2.log(`Received ${batch.messages.length} message(s)`);
|
|
3261
|
+
let index = 0;
|
|
3262
|
+
const readableStream = import_node_stream2.Readable.from(batch.messages);
|
|
3263
|
+
const processedMessages = await readableStream.map(
|
|
3264
|
+
async (message) => {
|
|
3265
|
+
index++;
|
|
3266
|
+
if (batch.messages.length > DEFAULT_MAX_STREAMING_CONCURRENCY && index % DEFAULT_MAX_STREAMING_CONCURRENCY || index - 1 === batch.messages.length) {
|
|
3267
|
+
await heartbeat();
|
|
3268
|
+
}
|
|
3269
|
+
return handleMessage(
|
|
3270
|
+
logger2,
|
|
3271
|
+
streamingFunctions,
|
|
3272
|
+
message,
|
|
3273
|
+
producer,
|
|
3274
|
+
fieldMutations,
|
|
3275
|
+
args.logPayloads
|
|
3276
|
+
);
|
|
3277
|
+
},
|
|
3278
|
+
{
|
|
3279
|
+
concurrency: MAX_STREAMING_CONCURRENCY
|
|
3280
|
+
}
|
|
3281
|
+
).toArray();
|
|
3282
|
+
const filteredMessages = processedMessages.flat().filter((msg) => msg !== void 0 && msg.value !== void 0);
|
|
3283
|
+
if (args.targetTopic === void 0 || processedMessages.length === 0) {
|
|
3284
|
+
return;
|
|
3285
|
+
}
|
|
3286
|
+
await heartbeat();
|
|
3287
|
+
if (filteredMessages.length > 0) {
|
|
3288
|
+
await sendMessages(
|
|
3289
|
+
logger2,
|
|
3290
|
+
metrics,
|
|
3291
|
+
args.targetTopic,
|
|
3292
|
+
producer,
|
|
3293
|
+
filteredMessages
|
|
3294
|
+
);
|
|
3295
|
+
}
|
|
3296
|
+
});
|
|
3297
|
+
}
|
|
3298
|
+
});
|
|
3299
|
+
logger2.log("Consumer is running...");
|
|
3300
|
+
};
|
|
3301
|
+
var buildLogger = (args, workerId) => {
|
|
3302
|
+
const sourceBaseName = topicNameToStreamName(args.sourceTopic);
|
|
3303
|
+
const targetBaseName = args.targetTopic ? topicNameToStreamName(args.targetTopic) : void 0;
|
|
3304
|
+
const functionName = targetBaseName ? `${sourceBaseName}__${targetBaseName}` : sourceBaseName;
|
|
3305
|
+
const logPrefix = `${functionName} (worker ${workerId})`;
|
|
3306
|
+
return {
|
|
3307
|
+
// logPrefix is used for structured logging (function_name field)
|
|
3308
|
+
// Must match source_primitive.name format for log correlation
|
|
3309
|
+
logPrefix: functionName,
|
|
3310
|
+
log: (message) => {
|
|
3311
|
+
console.log(`${logPrefix}: ${message}`);
|
|
3312
|
+
},
|
|
3313
|
+
error: (message) => {
|
|
3314
|
+
console.error(`${logPrefix}: ${message}`);
|
|
3315
|
+
},
|
|
3316
|
+
warn: (message) => {
|
|
3317
|
+
console.warn(`${logPrefix}: ${message}`);
|
|
3318
|
+
}
|
|
3319
|
+
};
|
|
3320
|
+
};
|
|
3321
|
+
function formatVersionSuffix(version) {
|
|
3322
|
+
return `_${version.replace(/\./g, "_")}`;
|
|
3323
|
+
}
|
|
3324
|
+
function topicNameToStreamName(config) {
|
|
3325
|
+
let name = config.name;
|
|
3326
|
+
if (config.version) {
|
|
3327
|
+
const versionSuffix = formatVersionSuffix(config.version);
|
|
3328
|
+
if (name.endsWith(versionSuffix)) {
|
|
3329
|
+
name = name.slice(0, -versionSuffix.length);
|
|
3330
|
+
} else {
|
|
3331
|
+
throw new Error(
|
|
3332
|
+
`Version suffix ${versionSuffix} not found in topic name ${name}`
|
|
3333
|
+
);
|
|
3334
|
+
}
|
|
3335
|
+
}
|
|
3336
|
+
if (config.namespace && config.namespace !== "") {
|
|
3337
|
+
const prefix = `${config.namespace}.`;
|
|
3338
|
+
if (name.startsWith(prefix)) {
|
|
3339
|
+
name = name.slice(prefix.length);
|
|
3340
|
+
} else {
|
|
3341
|
+
throw new Error(
|
|
3342
|
+
`Namespace prefix ${prefix} not found in topic name ${name}`
|
|
3343
|
+
);
|
|
3344
|
+
}
|
|
3345
|
+
}
|
|
3346
|
+
return name;
|
|
3347
|
+
}
|
|
3348
|
+
function validateTopicConfig(config) {
|
|
3349
|
+
if (config.namespace && !config.name.startsWith(`${config.namespace}.`)) {
|
|
3350
|
+
throw new Error(
|
|
3351
|
+
`Topic name ${config.name} must start with namespace ${config.namespace}`
|
|
3352
|
+
);
|
|
3353
|
+
}
|
|
3354
|
+
if (config.version) {
|
|
3355
|
+
const versionSuffix = formatVersionSuffix(config.version);
|
|
3356
|
+
if (!config.name.endsWith(versionSuffix)) {
|
|
3357
|
+
throw new Error(
|
|
3358
|
+
`Topic name ${config.name} must end with version ${config.version}`
|
|
3359
|
+
);
|
|
3360
|
+
}
|
|
3361
|
+
}
|
|
3362
|
+
}
|
|
3363
|
+
var runStreamingFunctions = async (args) => {
|
|
3364
|
+
validateTopicConfig(args.sourceTopic);
|
|
3365
|
+
if (args.targetTopic) {
|
|
3366
|
+
validateTopicConfig(args.targetTopic);
|
|
3367
|
+
}
|
|
3368
|
+
const streamingFuncId = `flow-${args.sourceTopic.name}-${args.targetTopic?.name || ""}`;
|
|
3369
|
+
const cluster2 = new Cluster({
|
|
3370
|
+
maxCpuUsageRatio: 0.5,
|
|
3371
|
+
maxWorkerCount: args.maxSubscriberCount,
|
|
3372
|
+
workerStart: async (worker, parallelism) => {
|
|
3373
|
+
const logger2 = buildLogger(args, worker.id);
|
|
3374
|
+
const functionName = logger2.logPrefix;
|
|
3375
|
+
return await functionContextStorage.run({ functionName }, async () => {
|
|
3376
|
+
const metrics = {
|
|
3377
|
+
count_in: 0,
|
|
3378
|
+
count_out: 0,
|
|
3379
|
+
bytes: 0
|
|
3380
|
+
};
|
|
3381
|
+
setTimeout(() => sendMessageMetrics(logger2, metrics), 1e3);
|
|
3382
|
+
const clientIdPrefix = HOSTNAME ? `${HOSTNAME}-` : "";
|
|
3383
|
+
const processId = `${clientIdPrefix}${streamingFuncId}-ts-${worker.id}`;
|
|
3384
|
+
const kafka = await getKafkaClient(
|
|
3385
|
+
{
|
|
3386
|
+
clientId: processId,
|
|
3387
|
+
broker: args.broker,
|
|
3388
|
+
securityProtocol: args.securityProtocol,
|
|
3389
|
+
saslUsername: args.saslUsername,
|
|
3390
|
+
saslPassword: args.saslPassword,
|
|
3391
|
+
saslMechanism: args.saslMechanism
|
|
3392
|
+
},
|
|
3393
|
+
logger2
|
|
3394
|
+
);
|
|
3395
|
+
const consumer = kafka.consumer({
|
|
3396
|
+
kafkaJS: {
|
|
3397
|
+
groupId: streamingFuncId,
|
|
3398
|
+
sessionTimeout: SESSION_TIMEOUT_CONSUMER,
|
|
3399
|
+
heartbeatInterval: HEARTBEAT_INTERVAL_CONSUMER,
|
|
3400
|
+
retry: {
|
|
3401
|
+
retries: MAX_RETRIES_CONSUMER
|
|
3402
|
+
},
|
|
3403
|
+
autoCommit: true,
|
|
3404
|
+
autoCommitInterval: AUTO_COMMIT_INTERVAL_MS,
|
|
3405
|
+
fromBeginning: true
|
|
3406
|
+
},
|
|
3407
|
+
"js.consumer.max.batch.size": CONSUMER_MAX_BATCH_SIZE
|
|
3408
|
+
});
|
|
3409
|
+
const maxMessageBytes = args.targetTopic?.max_message_bytes || 1024 * 1024;
|
|
3410
|
+
const producer = kafka.producer(
|
|
3411
|
+
createProducerConfig(maxMessageBytes)
|
|
3412
|
+
);
|
|
3413
|
+
try {
|
|
3414
|
+
logger2.log("Starting producer...");
|
|
3415
|
+
await startProducer(logger2, producer);
|
|
3416
|
+
try {
|
|
3417
|
+
logger2.log("Starting consumer...");
|
|
3418
|
+
await startConsumer(
|
|
3419
|
+
args,
|
|
3420
|
+
logger2,
|
|
3421
|
+
metrics,
|
|
3422
|
+
parallelism,
|
|
3423
|
+
consumer,
|
|
3424
|
+
producer,
|
|
3425
|
+
streamingFuncId
|
|
3426
|
+
);
|
|
3427
|
+
} catch (e) {
|
|
3428
|
+
logger2.error("Failed to start kafka consumer: ");
|
|
3429
|
+
if (e instanceof Error) {
|
|
3430
|
+
logError(logger2, e);
|
|
3431
|
+
}
|
|
3432
|
+
throw e;
|
|
3433
|
+
}
|
|
3434
|
+
} catch (e) {
|
|
3435
|
+
logger2.error("Failed to start kafka producer: ");
|
|
3436
|
+
if (e instanceof Error) {
|
|
3437
|
+
logError(logger2, e);
|
|
3438
|
+
}
|
|
3439
|
+
throw e;
|
|
3440
|
+
}
|
|
3441
|
+
return [logger2, producer, consumer];
|
|
3442
|
+
});
|
|
3443
|
+
},
|
|
3444
|
+
workerStop: async ([logger2, producer, consumer]) => {
|
|
3445
|
+
const functionName = logger2.logPrefix;
|
|
3446
|
+
await functionContextStorage.run({ functionName }, async () => {
|
|
3447
|
+
logger2.log(`Received SIGTERM, shutting down gracefully...`);
|
|
3448
|
+
logger2.log("Stopping consumer first...");
|
|
3449
|
+
await stopConsumer(logger2, consumer, args.sourceTopic);
|
|
3450
|
+
logger2.log("Waiting for in-flight messages to complete...");
|
|
3451
|
+
await new Promise((resolve3) => setTimeout(resolve3, 2e3));
|
|
3452
|
+
logger2.log("Stopping producer...");
|
|
3453
|
+
await stopProducer(logger2, producer);
|
|
3454
|
+
logger2.log("Graceful shutdown completed");
|
|
3455
|
+
});
|
|
3456
|
+
}
|
|
3457
|
+
});
|
|
3458
|
+
cluster2.start();
|
|
3459
|
+
};
|
|
3460
|
+
init_compiler_config();
|
|
3461
|
+
async function runExportSerializer(targetModel) {
|
|
3462
|
+
const useCompiled2 = shouldUseCompiled();
|
|
3463
|
+
const sourceDir = getSourceDir();
|
|
3464
|
+
let modulePath = targetModel;
|
|
3465
|
+
if (useCompiled2) {
|
|
3466
|
+
const sourcePattern = `/${sourceDir}/`;
|
|
3467
|
+
if (modulePath.includes(sourcePattern)) {
|
|
3468
|
+
modulePath = modulePath.replace(
|
|
3469
|
+
sourcePattern,
|
|
3470
|
+
`/.moose/compiled/${sourceDir}/`
|
|
3471
|
+
);
|
|
3472
|
+
}
|
|
3473
|
+
modulePath = modulePath.replace(/\.ts$/, ".js");
|
|
3474
|
+
}
|
|
3475
|
+
const exports_list = await loadModule(modulePath);
|
|
3476
|
+
console.log(JSON.stringify(exports_list));
|
|
3477
|
+
}
|
|
3478
|
+
var import_process2 = __toESM2(require("process"));
|
|
3479
|
+
init_compiler_config();
|
|
3480
|
+
async function runApiTypeSerializer(targetModel) {
|
|
3481
|
+
const sourceDir = getSourceDir();
|
|
3482
|
+
const useCompiled2 = shouldUseCompiled();
|
|
3483
|
+
const apiPath = useCompiled2 ? `${import_process2.default.cwd()}/.moose/compiled/${sourceDir}/apis/${targetModel}.js` : `${import_process2.default.cwd()}/${sourceDir}/apis/${targetModel}.ts`;
|
|
3484
|
+
const module2 = await loadModule(apiPath);
|
|
3485
|
+
const func = module2.default;
|
|
3486
|
+
const inputSchema = func["moose_input_schema"] || null;
|
|
3487
|
+
const outputSchema = func["moose_output_schema"] || null;
|
|
3488
|
+
console.log(
|
|
3489
|
+
JSON.stringify({
|
|
3490
|
+
inputSchema,
|
|
3491
|
+
outputSchema
|
|
3492
|
+
})
|
|
3493
|
+
);
|
|
3494
|
+
}
|
|
3495
|
+
var import_worker2 = require_worker();
|
|
3496
|
+
var path4 = __toESM2(require("path"));
|
|
3497
|
+
var fs3 = __toESM2(require("fs"));
|
|
3498
|
+
init_internal();
|
|
3499
|
+
var import_activity = require_activity();
|
|
3500
|
+
var import_workflow3 = require_workflow();
|
|
3501
|
+
init_internal();
|
|
3502
|
+
init_json();
|
|
3503
|
+
var taskContextStorage = setupStructuredConsole(
|
|
3504
|
+
(ctx) => ctx.taskName,
|
|
3505
|
+
"task_name"
|
|
3506
|
+
);
|
|
3507
|
+
var TASK_CONTEXT_FIELD_NAME = "task_name";
|
|
3508
|
+
var getTaskContextField = (ctx) => ctx.taskName;
|
|
3509
|
+
var activities = {
|
|
3510
|
+
async hasWorkflow(name) {
|
|
3511
|
+
try {
|
|
3512
|
+
const workflows = await getWorkflows2();
|
|
3513
|
+
const hasWorkflow = workflows.has(name);
|
|
3514
|
+
import_activity.log.info(`Found workflow:: ${hasWorkflow}`);
|
|
3515
|
+
return hasWorkflow;
|
|
3516
|
+
} catch (error) {
|
|
3517
|
+
import_activity.log.error(`Failed to check if workflow ${name} exists: ${error}`);
|
|
3518
|
+
return false;
|
|
3519
|
+
}
|
|
3520
|
+
},
|
|
3521
|
+
async getWorkflowByName(name) {
|
|
3522
|
+
try {
|
|
3523
|
+
import_activity.log.info(`Getting workflow ${name}`);
|
|
3524
|
+
const workflows = await getWorkflows2();
|
|
3525
|
+
if (workflows.has(name)) {
|
|
3526
|
+
import_activity.log.info(`Workflow ${name} found`);
|
|
3527
|
+
return workflows.get(name);
|
|
3528
|
+
} else {
|
|
3529
|
+
const errorData = {
|
|
3530
|
+
error: "Workflow not found",
|
|
3531
|
+
details: `Workflow ${name} not found`,
|
|
3532
|
+
stack: void 0
|
|
3533
|
+
};
|
|
3534
|
+
const errorMsg = JSON.stringify(errorData);
|
|
3535
|
+
import_activity.log.error(errorMsg);
|
|
3536
|
+
throw new Error(errorMsg);
|
|
3537
|
+
}
|
|
3538
|
+
} catch (error) {
|
|
3539
|
+
const errorData = {
|
|
3540
|
+
error: "Failed to get workflow",
|
|
3541
|
+
details: error instanceof Error ? error.message : String(error),
|
|
3542
|
+
stack: error instanceof Error ? error.stack : void 0
|
|
3543
|
+
};
|
|
3544
|
+
const errorMsg = JSON.stringify(errorData);
|
|
3545
|
+
import_activity.log.error(errorMsg);
|
|
3546
|
+
throw new Error(errorMsg);
|
|
3547
|
+
}
|
|
3548
|
+
},
|
|
3549
|
+
async getTaskForWorkflow(workflowName, taskName) {
|
|
3550
|
+
try {
|
|
3551
|
+
import_activity.log.info(`Getting task ${taskName} from workflow ${workflowName}`);
|
|
3552
|
+
const task = await getTaskForWorkflow(workflowName, taskName);
|
|
3553
|
+
import_activity.log.info(`Task ${taskName} found in workflow ${workflowName}`);
|
|
3554
|
+
return task;
|
|
3555
|
+
} catch (error) {
|
|
3556
|
+
const errorData = {
|
|
3557
|
+
error: "Failed to get task",
|
|
3558
|
+
details: error instanceof Error ? error.message : String(error),
|
|
3559
|
+
stack: error instanceof Error ? error.stack : void 0
|
|
3560
|
+
};
|
|
3561
|
+
const errorMsg = JSON.stringify(errorData);
|
|
3562
|
+
import_activity.log.error(errorMsg);
|
|
3563
|
+
throw new Error(errorMsg);
|
|
3564
|
+
}
|
|
3565
|
+
},
|
|
3566
|
+
async executeTask(workflow, task, inputData) {
|
|
3567
|
+
const context = import_activity.Context.current();
|
|
3568
|
+
const taskState = {};
|
|
3569
|
+
const taskIdentifier = workflow.name;
|
|
3570
|
+
return await taskContextStorage.run(
|
|
3571
|
+
{ taskName: taskIdentifier },
|
|
3572
|
+
async () => {
|
|
3573
|
+
let heartbeatInterval = null;
|
|
3574
|
+
const startPeriodicHeartbeat = () => {
|
|
3575
|
+
heartbeatInterval = setInterval(() => {
|
|
3576
|
+
context.heartbeat(`Task ${task.name} in progress`);
|
|
3577
|
+
}, 5e3);
|
|
3578
|
+
};
|
|
3579
|
+
const stopPeriodicHeartbeat = () => {
|
|
3580
|
+
if (heartbeatInterval) {
|
|
3581
|
+
clearInterval(heartbeatInterval);
|
|
3582
|
+
heartbeatInterval = null;
|
|
3583
|
+
}
|
|
3584
|
+
};
|
|
3585
|
+
try {
|
|
3586
|
+
import_activity.log.info(
|
|
3587
|
+
`Task ${task.name} received input: ${JSON.stringify(inputData)}`
|
|
3588
|
+
);
|
|
3589
|
+
context.heartbeat(`Starting task: ${task.name}`);
|
|
3590
|
+
const fullTask = await getTaskForWorkflow(workflow.name, task.name);
|
|
3591
|
+
const revivedInputData = inputData ? JSON.parse(JSON.stringify(inputData), jsonDateReviver) : inputData;
|
|
3592
|
+
try {
|
|
3593
|
+
startPeriodicHeartbeat();
|
|
3594
|
+
const result = await Promise.race([
|
|
3595
|
+
fullTask.config.run({
|
|
3596
|
+
state: taskState,
|
|
3597
|
+
input: revivedInputData
|
|
3598
|
+
}),
|
|
3599
|
+
context.cancelled
|
|
3600
|
+
]);
|
|
3601
|
+
return result;
|
|
3602
|
+
} catch (error) {
|
|
3603
|
+
if ((0, import_workflow3.isCancellation)(error)) {
|
|
3604
|
+
import_activity.log.info(
|
|
3605
|
+
`Task ${task.name} cancelled, calling onCancel handler if it exists`
|
|
3606
|
+
);
|
|
3607
|
+
if (fullTask.config.onCancel) {
|
|
3608
|
+
await fullTask.config.onCancel({
|
|
3609
|
+
state: taskState,
|
|
3610
|
+
input: revivedInputData
|
|
3611
|
+
});
|
|
3612
|
+
}
|
|
3613
|
+
return [];
|
|
3614
|
+
} else {
|
|
3615
|
+
throw error;
|
|
3616
|
+
}
|
|
3617
|
+
} finally {
|
|
3618
|
+
stopPeriodicHeartbeat();
|
|
3619
|
+
}
|
|
3620
|
+
} catch (error) {
|
|
3621
|
+
const errorData = {
|
|
3622
|
+
error: "Task execution failed",
|
|
3623
|
+
details: error instanceof Error ? error.message : String(error),
|
|
3624
|
+
stack: error instanceof Error ? error.stack : void 0
|
|
3625
|
+
};
|
|
3626
|
+
const errorMsg = JSON.stringify(errorData);
|
|
3627
|
+
import_activity.log.error(errorMsg);
|
|
3628
|
+
throw new Error(errorMsg);
|
|
3629
|
+
}
|
|
3630
|
+
}
|
|
3631
|
+
);
|
|
3632
|
+
}
|
|
3633
|
+
};
|
|
3634
|
+
function createActivityForScript(scriptName) {
|
|
3635
|
+
return {
|
|
3636
|
+
[scriptName]: activities.executeTask
|
|
3637
|
+
};
|
|
3638
|
+
}
|
|
3639
|
+
var import_worker = require_worker();
|
|
3640
|
+
var LoggerSingleton = class _LoggerSingleton {
|
|
3641
|
+
static instance = null;
|
|
3642
|
+
constructor() {
|
|
3643
|
+
}
|
|
3644
|
+
static initializeLogger() {
|
|
3645
|
+
if (!_LoggerSingleton.instance) {
|
|
3646
|
+
_LoggerSingleton.instance = new import_worker.DefaultLogger(
|
|
3647
|
+
"DEBUG",
|
|
3648
|
+
({ level, message }) => {
|
|
3649
|
+
const structuredLevel = level.toLowerCase();
|
|
3650
|
+
const emitted = emitStructuredLog(
|
|
3651
|
+
taskContextStorage,
|
|
3652
|
+
getTaskContextField,
|
|
3653
|
+
TASK_CONTEXT_FIELD_NAME,
|
|
3654
|
+
structuredLevel,
|
|
3655
|
+
message
|
|
3656
|
+
);
|
|
3657
|
+
if (!emitted) {
|
|
3658
|
+
console.log(`${level} | ${message}`);
|
|
3659
|
+
}
|
|
3660
|
+
}
|
|
3661
|
+
);
|
|
3662
|
+
import_worker.Runtime.install({
|
|
3663
|
+
logger: _LoggerSingleton.instance,
|
|
3664
|
+
telemetryOptions: {
|
|
3665
|
+
logging: {
|
|
3666
|
+
filter: (0, import_worker.makeTelemetryFilterString)({ core: "INFO", other: "INFO" }),
|
|
3667
|
+
forward: {}
|
|
3668
|
+
}
|
|
3669
|
+
}
|
|
3670
|
+
});
|
|
3671
|
+
}
|
|
3672
|
+
return _LoggerSingleton.instance;
|
|
3673
|
+
}
|
|
3674
|
+
static getInstance() {
|
|
3675
|
+
return _LoggerSingleton.instance;
|
|
3676
|
+
}
|
|
3677
|
+
};
|
|
3678
|
+
var initializeLogger = LoggerSingleton.initializeLogger;
|
|
3679
|
+
var ALREADY_REGISTERED = /* @__PURE__ */ new Set();
|
|
3680
|
+
function collectActivities(logger2, workflows) {
|
|
3681
|
+
logger2.info(`Collecting tasks from workflows`);
|
|
3682
|
+
const scriptNames = [];
|
|
3683
|
+
for (const [name, workflow] of workflows.entries()) {
|
|
3684
|
+
logger2.info(
|
|
3685
|
+
`Registering workflow: ${name} with starting task: ${workflow.config.startingTask.name}`
|
|
3686
|
+
);
|
|
3687
|
+
scriptNames.push(`${name}/${workflow.config.startingTask.name}`);
|
|
3688
|
+
}
|
|
3689
|
+
return scriptNames;
|
|
3690
|
+
}
|
|
3691
|
+
async function createTemporalConnection(logger2, temporalConfig) {
|
|
3692
|
+
logger2.info(
|
|
3693
|
+
`Using temporal_url: ${temporalConfig.url} and namespace: ${temporalConfig.namespace}`
|
|
3694
|
+
);
|
|
3695
|
+
let connectionOptions = {
|
|
3696
|
+
address: temporalConfig.url
|
|
3697
|
+
};
|
|
3698
|
+
if (temporalConfig.clientCert && temporalConfig.clientKey) {
|
|
3699
|
+
logger2.info("Using TLS for secure Temporal");
|
|
3700
|
+
const cert = await fs3.readFileSync(temporalConfig.clientCert);
|
|
3701
|
+
const key = await fs3.readFileSync(temporalConfig.clientKey);
|
|
3702
|
+
connectionOptions.tls = {
|
|
3703
|
+
clientCertPair: {
|
|
3704
|
+
crt: cert,
|
|
3705
|
+
key
|
|
3706
|
+
}
|
|
3707
|
+
};
|
|
3708
|
+
} else if (temporalConfig.apiKey) {
|
|
3709
|
+
logger2.info(`Using API key for secure Temporal`);
|
|
3710
|
+
connectionOptions.address = "us-west1.gcp.api.temporal.io:7233";
|
|
3711
|
+
connectionOptions.apiKey = temporalConfig.apiKey;
|
|
3712
|
+
connectionOptions.tls = {};
|
|
3713
|
+
connectionOptions.metadata = {
|
|
3714
|
+
"temporal-namespace": temporalConfig.namespace
|
|
3715
|
+
};
|
|
3716
|
+
}
|
|
3717
|
+
logger2.info(`Connecting to Temporal at ${connectionOptions.address}`);
|
|
3718
|
+
const maxRetries = 5;
|
|
3719
|
+
const baseDelay = 1e3;
|
|
3720
|
+
let attempt = 0;
|
|
3721
|
+
while (true) {
|
|
3722
|
+
try {
|
|
3723
|
+
const connection = await import_worker2.NativeConnection.connect(connectionOptions);
|
|
3724
|
+
logger2.info("Connected to Temporal server");
|
|
3725
|
+
return connection;
|
|
3726
|
+
} catch (err) {
|
|
3727
|
+
attempt++;
|
|
3728
|
+
logger2.error(`Connection attempt ${attempt} failed: ${err}`);
|
|
3729
|
+
if (attempt >= maxRetries) {
|
|
3730
|
+
logger2.error(`Failed to connect after ${attempt} attempts`);
|
|
3731
|
+
throw err;
|
|
3732
|
+
}
|
|
3733
|
+
const backoff = baseDelay * Math.pow(2, attempt - 1);
|
|
3734
|
+
logger2.warn(`Retrying connection in ${backoff}ms...`);
|
|
3735
|
+
await new Promise((resolve3) => setTimeout(resolve3, backoff));
|
|
3736
|
+
}
|
|
3737
|
+
}
|
|
3738
|
+
}
|
|
3739
|
+
async function registerWorkflows(logger2, config) {
|
|
3740
|
+
logger2.info(`Registering workflows`);
|
|
3741
|
+
if (!config.temporalConfig) {
|
|
3742
|
+
logger2.info(`Temporal config not provided, skipping workflow registration`);
|
|
3743
|
+
return null;
|
|
3744
|
+
}
|
|
3745
|
+
const allScriptPaths = [];
|
|
3746
|
+
const dynamicActivities = [];
|
|
3747
|
+
try {
|
|
3748
|
+
const workflows = await getWorkflows2();
|
|
3749
|
+
if (workflows.size > 0) {
|
|
3750
|
+
logger2.info(`Found ${workflows.size} workflows`);
|
|
3751
|
+
allScriptPaths.push(...collectActivities(logger2, workflows));
|
|
3752
|
+
if (allScriptPaths.length === 0) {
|
|
3753
|
+
logger2.info(`No tasks found in workflows`);
|
|
3754
|
+
return null;
|
|
3755
|
+
}
|
|
3756
|
+
logger2.info(`Found ${allScriptPaths.length} tasks in workflows`);
|
|
3757
|
+
for (const activityName of allScriptPaths) {
|
|
3758
|
+
if (!ALREADY_REGISTERED.has(activityName)) {
|
|
3759
|
+
const activity = await createActivityForScript(activityName);
|
|
3760
|
+
dynamicActivities.push(activity);
|
|
3761
|
+
ALREADY_REGISTERED.add(activityName);
|
|
3762
|
+
logger2.info(`Registered task ${activityName}`);
|
|
3763
|
+
}
|
|
3764
|
+
}
|
|
3765
|
+
if (dynamicActivities.length === 0) {
|
|
3766
|
+
logger2.info(`No dynamic activities found in workflows`);
|
|
3767
|
+
return null;
|
|
3768
|
+
}
|
|
3769
|
+
logger2.info(
|
|
3770
|
+
`Found ${dynamicActivities.length} dynamic activities in workflows`
|
|
3771
|
+
);
|
|
3772
|
+
}
|
|
3773
|
+
if (allScriptPaths.length === 0) {
|
|
3774
|
+
logger2.info(`No workflows found`);
|
|
3775
|
+
return null;
|
|
3776
|
+
}
|
|
3777
|
+
logger2.info(`Found ${allScriptPaths.length} workflows`);
|
|
3778
|
+
if (dynamicActivities.length === 0) {
|
|
3779
|
+
logger2.info(`No tasks found`);
|
|
3780
|
+
return null;
|
|
3781
|
+
}
|
|
3782
|
+
logger2.info(`Found ${dynamicActivities.length} task(s)`);
|
|
3783
|
+
const connection = await createTemporalConnection(
|
|
3784
|
+
logger2,
|
|
3785
|
+
config.temporalConfig
|
|
3786
|
+
);
|
|
3787
|
+
const silentLogger = {
|
|
3788
|
+
info: () => {
|
|
3789
|
+
},
|
|
3790
|
+
// Suppress info logs (webpack output)
|
|
3791
|
+
debug: () => {
|
|
3792
|
+
},
|
|
3793
|
+
// Suppress debug logs
|
|
3794
|
+
warn: () => {
|
|
3795
|
+
},
|
|
3796
|
+
// Suppress warnings if desired
|
|
3797
|
+
log: () => {
|
|
3798
|
+
},
|
|
3799
|
+
// Suppress general logs
|
|
3800
|
+
trace: () => {
|
|
3801
|
+
},
|
|
3802
|
+
// Suppress trace logs
|
|
3803
|
+
error: (message, meta) => {
|
|
3804
|
+
logger2.error(message, meta);
|
|
3805
|
+
}
|
|
3806
|
+
};
|
|
3807
|
+
const workflowBundle = await (0, import_worker2.bundleWorkflowCode)({
|
|
3808
|
+
workflowsPath: path4.resolve(__dirname, "scripts/workflow.js"),
|
|
3809
|
+
logger: silentLogger
|
|
3810
|
+
});
|
|
3811
|
+
const worker = await import_worker2.Worker.create({
|
|
3812
|
+
connection,
|
|
3813
|
+
namespace: config.temporalConfig.namespace,
|
|
3814
|
+
taskQueue: "typescript-script-queue",
|
|
3815
|
+
workflowBundle,
|
|
3816
|
+
activities: {
|
|
3817
|
+
...activities,
|
|
3818
|
+
...Object.fromEntries(
|
|
3819
|
+
dynamicActivities.map((activity) => [
|
|
3820
|
+
Object.keys(activity)[0],
|
|
3821
|
+
Object.values(activity)[0]
|
|
3822
|
+
])
|
|
3823
|
+
)
|
|
3824
|
+
}
|
|
3825
|
+
});
|
|
3826
|
+
return worker;
|
|
3827
|
+
} catch (error) {
|
|
3828
|
+
logger2.error(`Error registering workflows: ${error}`);
|
|
3829
|
+
throw error;
|
|
3830
|
+
}
|
|
3831
|
+
}
|
|
3832
|
+
async function runScripts(config) {
|
|
3833
|
+
const logger2 = initializeLogger();
|
|
3834
|
+
process.on("uncaughtException", (error) => {
|
|
3835
|
+
console.error(`[PROCESS] Uncaught Exception: ${error}`);
|
|
3836
|
+
process.exit(1);
|
|
3837
|
+
});
|
|
3838
|
+
const worker = await registerWorkflows(logger2, config);
|
|
3839
|
+
if (!worker) {
|
|
3840
|
+
logger2.warn(
|
|
3841
|
+
`No workflows found. To disable workflow infrastructure, set workflows=false in moose.config.toml`
|
|
3842
|
+
);
|
|
3843
|
+
process.exit(0);
|
|
3844
|
+
}
|
|
3845
|
+
let isShuttingDown = false;
|
|
3846
|
+
async function handleSignal(signal) {
|
|
3847
|
+
console.log(`[SHUTDOWN] Received ${signal}`);
|
|
3848
|
+
if (isShuttingDown) {
|
|
3849
|
+
return;
|
|
3850
|
+
}
|
|
3851
|
+
isShuttingDown = true;
|
|
3852
|
+
try {
|
|
3853
|
+
if (!worker) {
|
|
3854
|
+
process.exit(0);
|
|
3855
|
+
}
|
|
3856
|
+
await Promise.race([
|
|
3857
|
+
worker.shutdown(),
|
|
3858
|
+
new Promise(
|
|
3859
|
+
(_, reject) => setTimeout(() => reject(new Error("Shutdown timeout")), 3e3)
|
|
3860
|
+
)
|
|
3861
|
+
]);
|
|
3862
|
+
process.exit(0);
|
|
3863
|
+
} catch (error) {
|
|
3864
|
+
console.log(`[SHUTDOWN] Error: ${error}`);
|
|
3865
|
+
process.exit(1);
|
|
3866
|
+
}
|
|
3867
|
+
}
|
|
3868
|
+
["SIGTERM", "SIGINT", "SIGHUP", "SIGQUIT"].forEach((signal) => {
|
|
3869
|
+
process.on(signal, () => {
|
|
3870
|
+
handleSignal(signal).catch((error) => {
|
|
3871
|
+
console.log(`[SHUTDOWN] Error: ${error}`);
|
|
3872
|
+
process.exit(1);
|
|
3873
|
+
});
|
|
3874
|
+
});
|
|
3875
|
+
});
|
|
3876
|
+
logger2.info("Starting TypeScript worker...");
|
|
3877
|
+
try {
|
|
3878
|
+
await worker.run();
|
|
3879
|
+
} catch (error) {
|
|
3880
|
+
console.log(`[SHUTDOWN] Error: ${error}`);
|
|
3881
|
+
process.exit(1);
|
|
3882
|
+
}
|
|
3883
|
+
return worker;
|
|
3884
|
+
}
|
|
3885
|
+
var import_process3 = __toESM2(require("process"));
|
|
3886
|
+
var import_commander = require("commander");
|
|
3887
|
+
var useCompiled = shouldUseCompiled();
|
|
3888
|
+
if (!useCompiled) {
|
|
3889
|
+
const command = import_process3.default.argv[2];
|
|
3890
|
+
const needsPlugins = COMMANDS_REQUIRING_PLUGINS.includes(command);
|
|
3891
|
+
if (needsPlugins) {
|
|
3892
|
+
(0, import_ts_node.register)({
|
|
3893
|
+
require: ["tsconfig-paths/register"],
|
|
3894
|
+
esm: true,
|
|
3895
|
+
experimentalTsImportSpecifiers: true,
|
|
3896
|
+
compiler: "ts-patch/compiler",
|
|
3897
|
+
compilerOptions: {
|
|
3898
|
+
plugins: [...MOOSE_COMPILER_PLUGINS],
|
|
3899
|
+
experimentalDecorators: true
|
|
3900
|
+
}
|
|
3901
|
+
});
|
|
3902
|
+
} else {
|
|
3903
|
+
(0, import_ts_node.register)({
|
|
3904
|
+
esm: true,
|
|
3905
|
+
experimentalTsImportSpecifiers: true
|
|
3906
|
+
});
|
|
3907
|
+
}
|
|
3908
|
+
}
|
|
3909
|
+
var program = new import_commander.Command();
|
|
3910
|
+
program.name("moose-runner").description("Moose runner for various operations").version("1.0.0");
|
|
3911
|
+
program.command("dmv2-serializer").description("Load DMv2 index").action(async () => {
|
|
3912
|
+
await dumpMooseInternal();
|
|
3913
|
+
});
|
|
3914
|
+
program.command("export-serializer").description("Run export serializer").argument("<target-model>", "Target model to serialize").action(async (targetModel) => {
|
|
3915
|
+
await runExportSerializer(targetModel);
|
|
3916
|
+
});
|
|
3917
|
+
program.command("consumption-apis").description("Run consumption APIs").argument("<clickhouse-db>", "Clickhouse database name").argument("<clickhouse-host>", "Clickhouse host").argument("<clickhouse-port>", "Clickhouse port").argument("<clickhouse-username>", "Clickhouse username").argument("<clickhouse-password>", "Clickhouse password").option("--clickhouse-use-ssl", "Use SSL for Clickhouse connection", false).option("--jwt-secret <secret>", "JWT public key for verification").option("--jwt-issuer <issuer>", "Expected JWT issuer").option("--jwt-audience <audience>", "Expected JWT audience").option(
|
|
3918
|
+
"--enforce-auth",
|
|
3919
|
+
"Enforce authentication on all consumption APIs",
|
|
3920
|
+
false
|
|
3921
|
+
).option("--temporal-url <url>", "Temporal server URL").option("--temporal-namespace <namespace>", "Temporal namespace").option("--client-cert <path>", "Path to client certificate").option("--client-key <path>", "Path to client key").option("--api-key <key>", "API key for authentication").option("--proxy-port <port>", "Port to run the proxy server on", parseInt).option(
|
|
3922
|
+
"--worker-count <count>",
|
|
3923
|
+
"Number of worker processes for the consumption API cluster",
|
|
3924
|
+
parseInt
|
|
3925
|
+
).action(
|
|
3926
|
+
(clickhouseDb, clickhouseHost, clickhousePort, clickhouseUsername, clickhousePassword, options) => {
|
|
3927
|
+
runApis({
|
|
3928
|
+
clickhouseConfig: {
|
|
3929
|
+
database: clickhouseDb,
|
|
3930
|
+
host: clickhouseHost,
|
|
3931
|
+
port: clickhousePort,
|
|
3932
|
+
username: clickhouseUsername,
|
|
3933
|
+
password: clickhousePassword,
|
|
3934
|
+
useSSL: options.clickhouseUseSsl
|
|
3935
|
+
},
|
|
3936
|
+
jwtConfig: {
|
|
3937
|
+
secret: options.jwtSecret,
|
|
3938
|
+
issuer: options.jwtIssuer,
|
|
3939
|
+
audience: options.jwtAudience
|
|
3940
|
+
},
|
|
3941
|
+
temporalConfig: options.temporalUrl ? {
|
|
3942
|
+
url: options.temporalUrl,
|
|
3943
|
+
namespace: options.temporalNamespace,
|
|
3944
|
+
clientCert: options.clientCert,
|
|
3945
|
+
clientKey: options.clientKey,
|
|
3946
|
+
apiKey: options.apiKey
|
|
3947
|
+
} : void 0,
|
|
3948
|
+
enforceAuth: options.enforceAuth,
|
|
3949
|
+
proxyPort: options.proxyPort,
|
|
3950
|
+
workerCount: options.workerCount
|
|
3951
|
+
});
|
|
3952
|
+
}
|
|
3953
|
+
);
|
|
3954
|
+
program.command("streaming-functions").description("Run streaming functions").argument("<source-topic>", "Source topic configuration as JSON").argument("<function-file-path>", "Path to the function file").argument(
|
|
3955
|
+
"<broker>",
|
|
3956
|
+
"Kafka broker address(es) - comma-separated for multiple brokers (e.g., 'broker1:9092, broker2:9092'). Whitespace around commas is automatically trimmed."
|
|
3957
|
+
).argument("<max-subscriber-count>", "Maximum number of subscribers").option("--target-topic <target-topic>", "Target topic configuration as JSON").option("--sasl-username <username>", "SASL username").option("--sasl-password <password>", "SASL password").option("--sasl-mechanism <mechanism>", "SASL mechanism").option("--security-protocol <protocol>", "Security protocol").option("--log-payloads", "Log payloads for debugging", false).action(
|
|
3958
|
+
(sourceTopic, functionFilePath, broker, maxSubscriberCount, options) => {
|
|
3959
|
+
const config = {
|
|
3960
|
+
sourceTopic: JSON.parse(sourceTopic),
|
|
3961
|
+
targetTopic: options.targetTopic ? JSON.parse(options.targetTopic) : void 0,
|
|
3962
|
+
functionFilePath,
|
|
3963
|
+
broker,
|
|
3964
|
+
maxSubscriberCount: parseInt(maxSubscriberCount),
|
|
3965
|
+
logPayloads: options.logPayloads,
|
|
3966
|
+
saslUsername: options.saslUsername,
|
|
3967
|
+
saslPassword: options.saslPassword,
|
|
3968
|
+
saslMechanism: options.saslMechanism,
|
|
3969
|
+
securityProtocol: options.securityProtocol
|
|
3970
|
+
};
|
|
3971
|
+
runStreamingFunctions(config);
|
|
3972
|
+
}
|
|
3973
|
+
);
|
|
3974
|
+
program.command("consumption-type-serializer").description("Run consumption type serializer").argument("<target-model>", "Target model to serialize").action(async (targetModel) => {
|
|
3975
|
+
await runApiTypeSerializer(targetModel);
|
|
3976
|
+
});
|
|
3977
|
+
program.command("scripts").description("Run scripts").option("--temporal-url <url>", "Temporal server URL").option("--temporal-namespace <namespace>", "Temporal namespace").option("--client-cert <path>", "Path to client certificate").option("--client-key <path>", "Path to client key").option("--api-key <key>", "API key for authentication").action((options) => {
|
|
3978
|
+
runScripts({
|
|
3979
|
+
temporalConfig: options.temporalUrl ? {
|
|
3980
|
+
url: options.temporalUrl,
|
|
3981
|
+
namespace: options.temporalNamespace,
|
|
3982
|
+
clientCert: options.clientCert,
|
|
3983
|
+
clientKey: options.clientKey,
|
|
3984
|
+
apiKey: options.apiKey
|
|
3985
|
+
} : void 0
|
|
3986
|
+
});
|
|
3987
|
+
});
|
|
3988
|
+
program.parse();
|