@514labs/moose-lib 0.6.321-ci-5-ga23d35fe → 0.6.322-ci-1-gb296906d
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/browserCompatible-BVw4gSAN.d.mts +184 -0
- package/dist/browserCompatible-rK8ei0bt.d.ts +184 -0
- package/dist/browserCompatible.d.mts +3 -7
- package/dist/browserCompatible.d.ts +3 -7
- package/dist/browserCompatible.js +180 -197
- package/dist/browserCompatible.js.map +1 -1
- package/dist/browserCompatible.mjs +180 -196
- package/dist/browserCompatible.mjs.map +1 -1
- package/dist/compilerPlugin.js.map +1 -1
- package/dist/compilerPlugin.mjs.map +1 -1
- package/dist/dataModels/toDataModels.js.map +1 -1
- package/dist/dataModels/toDataModels.mjs.map +1 -1
- package/dist/dmv2/index.d.mts +1 -2
- package/dist/dmv2/index.d.ts +1 -2
- package/dist/dmv2/index.js +24 -41
- package/dist/dmv2/index.js.map +1 -1
- package/dist/dmv2/index.mjs +24 -40
- package/dist/dmv2/index.mjs.map +1 -1
- package/dist/{index-Aq9KzsRd.d.mts → index-C4miZc-A.d.mts} +33 -181
- package/dist/{index-Aq9KzsRd.d.ts → index-C4miZc-A.d.ts} +33 -181
- package/dist/index.d.mts +4 -4
- package/dist/index.d.ts +4 -4
- package/dist/index.js +202 -176
- package/dist/index.js.map +1 -1
- package/dist/index.mjs +199 -176
- package/dist/index.mjs.map +1 -1
- package/dist/moose-runner.js +257 -88
- package/dist/moose-runner.js.map +1 -1
- package/dist/moose-runner.mjs +258 -89
- package/dist/moose-runner.mjs.map +1 -1
- package/dist/scripts/workflow.js +11 -9
- package/dist/scripts/workflow.js.map +1 -1
- package/dist/scripts/workflow.mjs +11 -9
- package/dist/scripts/workflow.mjs.map +1 -1
- package/package.json +1 -1
package/dist/moose-runner.mjs
CHANGED
|
@@ -37,13 +37,6 @@ var init_dataModelTypes = __esm({
|
|
|
37
37
|
}
|
|
38
38
|
});
|
|
39
39
|
|
|
40
|
-
// src/dataModels/types.ts
|
|
41
|
-
var init_types = __esm({
|
|
42
|
-
"src/dataModels/types.ts"() {
|
|
43
|
-
"use strict";
|
|
44
|
-
}
|
|
45
|
-
});
|
|
46
|
-
|
|
47
40
|
// src/sqlHelpers.ts
|
|
48
41
|
function sql(strings, ...values) {
|
|
49
42
|
return new Sql(strings, values);
|
|
@@ -192,6 +185,14 @@ var init_sqlHelpers = __esm({
|
|
|
192
185
|
}
|
|
193
186
|
});
|
|
194
187
|
|
|
188
|
+
// src/blocks/helpers.ts
|
|
189
|
+
var init_helpers = __esm({
|
|
190
|
+
"src/blocks/helpers.ts"() {
|
|
191
|
+
"use strict";
|
|
192
|
+
init_sqlHelpers();
|
|
193
|
+
}
|
|
194
|
+
});
|
|
195
|
+
|
|
195
196
|
// src/dmv2/sdk/olapTable.ts
|
|
196
197
|
import { Readable } from "stream";
|
|
197
198
|
import { createHash } from "crypto";
|
|
@@ -200,7 +201,7 @@ var init_olapTable = __esm({
|
|
|
200
201
|
"use strict";
|
|
201
202
|
init_typedBase();
|
|
202
203
|
init_dataModelTypes();
|
|
203
|
-
|
|
204
|
+
init_helpers();
|
|
204
205
|
init_internal();
|
|
205
206
|
init_sqlHelpers();
|
|
206
207
|
}
|
|
@@ -251,7 +252,7 @@ var init_ingestPipeline = __esm({
|
|
|
251
252
|
init_stream();
|
|
252
253
|
init_olapTable();
|
|
253
254
|
init_ingestApi();
|
|
254
|
-
|
|
255
|
+
init_helpers();
|
|
255
256
|
}
|
|
256
257
|
});
|
|
257
258
|
|
|
@@ -267,7 +268,7 @@ var init_etlPipeline = __esm({
|
|
|
267
268
|
var init_materializedView = __esm({
|
|
268
269
|
"src/dmv2/sdk/materializedView.ts"() {
|
|
269
270
|
"use strict";
|
|
270
|
-
|
|
271
|
+
init_helpers();
|
|
271
272
|
init_sqlHelpers();
|
|
272
273
|
init_olapTable();
|
|
273
274
|
init_internal();
|
|
@@ -324,7 +325,6 @@ var init_dmv2 = __esm({
|
|
|
324
325
|
"src/dmv2/index.ts"() {
|
|
325
326
|
"use strict";
|
|
326
327
|
init_olapTable();
|
|
327
|
-
init_types();
|
|
328
328
|
init_stream();
|
|
329
329
|
init_workflow();
|
|
330
330
|
init_ingestApi();
|
|
@@ -340,6 +340,13 @@ var init_dmv2 = __esm({
|
|
|
340
340
|
}
|
|
341
341
|
});
|
|
342
342
|
|
|
343
|
+
// src/dataModels/types.ts
|
|
344
|
+
var init_types = __esm({
|
|
345
|
+
"src/dataModels/types.ts"() {
|
|
346
|
+
"use strict";
|
|
347
|
+
}
|
|
348
|
+
});
|
|
349
|
+
|
|
343
350
|
// src/browserCompatible.ts
|
|
344
351
|
var init_browserCompatible = __esm({
|
|
345
352
|
"src/browserCompatible.ts"() {
|
|
@@ -543,7 +550,7 @@ async function getTemporalClient(temporalUrl, namespace, clientCert, clientKey,
|
|
|
543
550
|
}
|
|
544
551
|
}
|
|
545
552
|
var MooseClient, QueryClient, WorkflowClient;
|
|
546
|
-
var
|
|
553
|
+
var init_helpers2 = __esm({
|
|
547
554
|
"src/consumption-apis/helpers.ts"() {
|
|
548
555
|
"use strict";
|
|
549
556
|
init_internal();
|
|
@@ -666,11 +673,11 @@ var init_helpers = __esm({
|
|
|
666
673
|
}
|
|
667
674
|
async getWorkflowConfig(name) {
|
|
668
675
|
const workflows = await getWorkflows2();
|
|
669
|
-
const
|
|
670
|
-
if (
|
|
676
|
+
const dmv2Workflow = workflows.get(name);
|
|
677
|
+
if (dmv2Workflow) {
|
|
671
678
|
return {
|
|
672
|
-
retries:
|
|
673
|
-
timeout:
|
|
679
|
+
retries: dmv2Workflow.config.retries || 3,
|
|
680
|
+
timeout: dmv2Workflow.config.timeout || "1h"
|
|
674
681
|
};
|
|
675
682
|
}
|
|
676
683
|
throw new Error(`Workflow config not found for ${name}`);
|
|
@@ -916,11 +923,11 @@ var init_cluster_utils = __esm({
|
|
|
916
923
|
import path from "path";
|
|
917
924
|
import * as toml from "toml";
|
|
918
925
|
async function findConfigFile(startDir = process.cwd()) {
|
|
919
|
-
const
|
|
926
|
+
const fs5 = await import("fs");
|
|
920
927
|
let currentDir = path.resolve(startDir);
|
|
921
928
|
while (true) {
|
|
922
929
|
const configPath = path.join(currentDir, "moose.config.toml");
|
|
923
|
-
if (
|
|
930
|
+
if (fs5.existsSync(configPath)) {
|
|
924
931
|
return configPath;
|
|
925
932
|
}
|
|
926
933
|
const parentDir = path.dirname(currentDir);
|
|
@@ -932,7 +939,7 @@ async function findConfigFile(startDir = process.cwd()) {
|
|
|
932
939
|
return null;
|
|
933
940
|
}
|
|
934
941
|
async function readProjectConfig() {
|
|
935
|
-
const
|
|
942
|
+
const fs5 = await import("fs");
|
|
936
943
|
const configPath = await findConfigFile();
|
|
937
944
|
if (!configPath) {
|
|
938
945
|
throw new ConfigError(
|
|
@@ -940,7 +947,7 @@ async function readProjectConfig() {
|
|
|
940
947
|
);
|
|
941
948
|
}
|
|
942
949
|
try {
|
|
943
|
-
const configContent =
|
|
950
|
+
const configContent = fs5.readFileSync(configPath, "utf-8");
|
|
944
951
|
const config = toml.parse(configContent);
|
|
945
952
|
return config;
|
|
946
953
|
} catch (error) {
|
|
@@ -1176,7 +1183,7 @@ var standaloneUtils, initPromise, toClientConfig;
|
|
|
1176
1183
|
var init_standalone = __esm({
|
|
1177
1184
|
"src/consumption-apis/standalone.ts"() {
|
|
1178
1185
|
"use strict";
|
|
1179
|
-
|
|
1186
|
+
init_helpers2();
|
|
1180
1187
|
init_commons();
|
|
1181
1188
|
init_sqlHelpers();
|
|
1182
1189
|
standaloneUtils = null;
|
|
@@ -1191,12 +1198,12 @@ var init_standalone = __esm({
|
|
|
1191
1198
|
// src/consumption-apis/runner.ts
|
|
1192
1199
|
import http2 from "http";
|
|
1193
1200
|
import * as jose from "jose";
|
|
1194
|
-
var toClientConfig2, httpLogger, modulesCache, apiHandler, createMainRouter, runApis;
|
|
1201
|
+
var toClientConfig2, createPath, httpLogger, modulesCache, apiHandler, createMainRouter, runApis;
|
|
1195
1202
|
var init_runner = __esm({
|
|
1196
1203
|
"src/consumption-apis/runner.ts"() {
|
|
1197
1204
|
"use strict";
|
|
1198
1205
|
init_commons();
|
|
1199
|
-
|
|
1206
|
+
init_helpers2();
|
|
1200
1207
|
init_cluster_utils();
|
|
1201
1208
|
init_sqlHelpers();
|
|
1202
1209
|
init_internal();
|
|
@@ -1204,14 +1211,15 @@ var init_runner = __esm({
|
|
|
1204
1211
|
...config,
|
|
1205
1212
|
useSSL: config.useSSL ? "true" : "false"
|
|
1206
1213
|
});
|
|
1214
|
+
createPath = (apisDir, path5) => `${apisDir}${path5}.ts`;
|
|
1207
1215
|
httpLogger = (req, res, startMs) => {
|
|
1208
1216
|
console.log(
|
|
1209
1217
|
`${req.method} ${req.url} ${res.statusCode} ${Date.now() - startMs}ms`
|
|
1210
1218
|
);
|
|
1211
1219
|
};
|
|
1212
1220
|
modulesCache = /* @__PURE__ */ new Map();
|
|
1213
|
-
apiHandler = async (publicKey, clickhouseClient, temporalClient, enforceAuth, jwtConfig) => {
|
|
1214
|
-
const apis = await getApis2();
|
|
1221
|
+
apiHandler = async (publicKey, clickhouseClient, temporalClient, apisDir, enforceAuth, isDmv2, jwtConfig) => {
|
|
1222
|
+
const apis = isDmv2 ? await getApis2() : /* @__PURE__ */ new Map();
|
|
1215
1223
|
return async (req, res) => {
|
|
1216
1224
|
const start = Date.now();
|
|
1217
1225
|
try {
|
|
@@ -1248,6 +1256,7 @@ var init_runner = __esm({
|
|
|
1248
1256
|
httpLogger(req, res, start);
|
|
1249
1257
|
return;
|
|
1250
1258
|
}
|
|
1259
|
+
const pathName = createPath(apisDir, fileName);
|
|
1251
1260
|
const paramsObject = Array.from(url.searchParams.entries()).reduce(
|
|
1252
1261
|
(obj, [key, value]) => {
|
|
1253
1262
|
const existingValue = obj[key];
|
|
@@ -1264,44 +1273,53 @@ var init_runner = __esm({
|
|
|
1264
1273
|
},
|
|
1265
1274
|
{}
|
|
1266
1275
|
);
|
|
1267
|
-
let userFuncModule = modulesCache.get(
|
|
1276
|
+
let userFuncModule = modulesCache.get(pathName);
|
|
1268
1277
|
if (userFuncModule === void 0) {
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1278
|
+
if (isDmv2) {
|
|
1279
|
+
let apiName = fileName.replace(/^\/+|\/+$/g, "");
|
|
1280
|
+
let version = null;
|
|
1281
|
+
userFuncModule = apis.get(apiName);
|
|
1282
|
+
if (!userFuncModule) {
|
|
1283
|
+
version = url.searchParams.get("version");
|
|
1284
|
+
if (!version && apiName.includes("/")) {
|
|
1285
|
+
const pathParts = apiName.split("/");
|
|
1286
|
+
if (pathParts.length >= 2) {
|
|
1287
|
+
userFuncModule = apis.get(apiName);
|
|
1288
|
+
if (!userFuncModule) {
|
|
1289
|
+
apiName = pathParts[0];
|
|
1290
|
+
version = pathParts.slice(1).join("/");
|
|
1291
|
+
}
|
|
1292
|
+
}
|
|
1293
|
+
}
|
|
1294
|
+
if (!userFuncModule) {
|
|
1295
|
+
if (version) {
|
|
1296
|
+
const versionedKey = `${apiName}:${version}`;
|
|
1297
|
+
userFuncModule = apis.get(versionedKey);
|
|
1298
|
+
} else {
|
|
1299
|
+
userFuncModule = apis.get(apiName);
|
|
1281
1300
|
}
|
|
1282
1301
|
}
|
|
1283
1302
|
}
|
|
1284
1303
|
if (!userFuncModule) {
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
}
|
|
1289
|
-
|
|
1290
|
-
}
|
|
1304
|
+
const availableApis = Array.from(apis.keys()).map(
|
|
1305
|
+
(key) => key.replace(":", "/")
|
|
1306
|
+
);
|
|
1307
|
+
const errorMessage = version ? `API ${apiName} with version ${version} not found. Available APIs: ${availableApis.join(", ")}` : `API ${apiName} not found. Available APIs: ${availableApis.join(", ")}`;
|
|
1308
|
+
throw new Error(errorMessage);
|
|
1291
1309
|
}
|
|
1310
|
+
modulesCache.set(pathName, userFuncModule);
|
|
1311
|
+
console.log(`[API] | Executing API: ${apiName}`);
|
|
1312
|
+
} else {
|
|
1313
|
+
userFuncModule = __require(pathName);
|
|
1314
|
+
modulesCache.set(pathName, userFuncModule);
|
|
1292
1315
|
}
|
|
1293
|
-
if (!userFuncModule) {
|
|
1294
|
-
const availableApis = Array.from(apis.keys()).map(
|
|
1295
|
-
(key) => key.replace(":", "/")
|
|
1296
|
-
);
|
|
1297
|
-
const errorMessage = version ? `API ${apiName} with version ${version} not found. Available APIs: ${availableApis.join(", ")}` : `API ${apiName} not found. Available APIs: ${availableApis.join(", ")}`;
|
|
1298
|
-
throw new Error(errorMessage);
|
|
1299
|
-
}
|
|
1300
|
-
modulesCache.set(fileName, userFuncModule);
|
|
1301
|
-
console.log(`[API] | Executing API: ${apiName}`);
|
|
1302
1316
|
}
|
|
1303
1317
|
const queryClient = new QueryClient(clickhouseClient, fileName);
|
|
1304
|
-
let result = await userFuncModule(paramsObject, {
|
|
1318
|
+
let result = isDmv2 ? await userFuncModule(paramsObject, {
|
|
1319
|
+
client: new MooseClient(queryClient, temporalClient),
|
|
1320
|
+
sql,
|
|
1321
|
+
jwt: jwtPayload
|
|
1322
|
+
}) : await userFuncModule.default(paramsObject, {
|
|
1305
1323
|
client: new MooseClient(queryClient, temporalClient),
|
|
1306
1324
|
sql,
|
|
1307
1325
|
jwt: jwtPayload
|
|
@@ -1345,15 +1363,17 @@ var init_runner = __esm({
|
|
|
1345
1363
|
}
|
|
1346
1364
|
};
|
|
1347
1365
|
};
|
|
1348
|
-
createMainRouter = async (publicKey, clickhouseClient, temporalClient, enforceAuth, jwtConfig) => {
|
|
1366
|
+
createMainRouter = async (publicKey, clickhouseClient, temporalClient, apisDir, enforceAuth, isDmv2, jwtConfig) => {
|
|
1349
1367
|
const apiRequestHandler = await apiHandler(
|
|
1350
1368
|
publicKey,
|
|
1351
1369
|
clickhouseClient,
|
|
1352
1370
|
temporalClient,
|
|
1371
|
+
apisDir,
|
|
1353
1372
|
enforceAuth,
|
|
1373
|
+
isDmv2,
|
|
1354
1374
|
jwtConfig
|
|
1355
1375
|
);
|
|
1356
|
-
const webApps = await getWebApps2();
|
|
1376
|
+
const webApps = isDmv2 ? await getWebApps2() : /* @__PURE__ */ new Map();
|
|
1357
1377
|
const sortedWebApps = Array.from(webApps.values()).sort((a, b) => {
|
|
1358
1378
|
const pathA = a.config.mountPath || "/";
|
|
1359
1379
|
const pathB = b.config.mountPath || "/";
|
|
@@ -1475,7 +1495,9 @@ var init_runner = __esm({
|
|
|
1475
1495
|
publicKey,
|
|
1476
1496
|
clickhouseClient,
|
|
1477
1497
|
temporalClient,
|
|
1498
|
+
config.apisDir,
|
|
1478
1499
|
config.enforceAuth,
|
|
1500
|
+
config.isDmv2,
|
|
1479
1501
|
config.jwtConfig
|
|
1480
1502
|
)
|
|
1481
1503
|
);
|
|
@@ -1673,14 +1695,15 @@ var init_index = __esm({
|
|
|
1673
1695
|
"src/index.ts"() {
|
|
1674
1696
|
"use strict";
|
|
1675
1697
|
init_browserCompatible();
|
|
1698
|
+
init_helpers();
|
|
1676
1699
|
init_commons();
|
|
1677
1700
|
init_secrets();
|
|
1678
|
-
|
|
1701
|
+
init_helpers2();
|
|
1679
1702
|
init_webAppHelpers();
|
|
1680
1703
|
init_task();
|
|
1681
1704
|
init_runner();
|
|
1682
1705
|
init_redisClient();
|
|
1683
|
-
|
|
1706
|
+
init_helpers2();
|
|
1684
1707
|
init_standalone();
|
|
1685
1708
|
init_sqlHelpers();
|
|
1686
1709
|
init_utilities();
|
|
@@ -2395,9 +2418,120 @@ var init_internal = __esm({
|
|
|
2395
2418
|
|
|
2396
2419
|
// src/moose-runner.ts
|
|
2397
2420
|
init_internal();
|
|
2398
|
-
init_runner();
|
|
2399
2421
|
import { register } from "ts-node";
|
|
2400
2422
|
|
|
2423
|
+
// src/blocks/runner.ts
|
|
2424
|
+
init_commons();
|
|
2425
|
+
import fastq from "fastq";
|
|
2426
|
+
import fs3 from "fs";
|
|
2427
|
+
import path3 from "path";
|
|
2428
|
+
var walkDir = (dir, fileExtension, fileList) => {
|
|
2429
|
+
const files = fs3.readdirSync(dir);
|
|
2430
|
+
files.forEach((file) => {
|
|
2431
|
+
if (fs3.statSync(path3.join(dir, file)).isDirectory()) {
|
|
2432
|
+
fileList = walkDir(path3.join(dir, file), fileExtension, fileList);
|
|
2433
|
+
} else if (file.endsWith(fileExtension)) {
|
|
2434
|
+
fileList.push(path3.join(dir, file));
|
|
2435
|
+
}
|
|
2436
|
+
});
|
|
2437
|
+
return fileList;
|
|
2438
|
+
};
|
|
2439
|
+
var DependencyError = class extends Error {
|
|
2440
|
+
constructor(message) {
|
|
2441
|
+
super(message);
|
|
2442
|
+
this.name = "DependencyError";
|
|
2443
|
+
}
|
|
2444
|
+
};
|
|
2445
|
+
var toClientConfig3 = (config) => ({
|
|
2446
|
+
...config,
|
|
2447
|
+
useSSL: config.useSSL ? "true" : "false"
|
|
2448
|
+
});
|
|
2449
|
+
var createBlocks = async (chClient, blocks) => {
|
|
2450
|
+
for (const query of blocks.setup) {
|
|
2451
|
+
try {
|
|
2452
|
+
console.log(`Creating block using query ${query}`);
|
|
2453
|
+
await chClient.command({
|
|
2454
|
+
query,
|
|
2455
|
+
clickhouse_settings: {
|
|
2456
|
+
wait_end_of_query: 1
|
|
2457
|
+
// Ensure at least once delivery and DDL acknowledgment
|
|
2458
|
+
}
|
|
2459
|
+
});
|
|
2460
|
+
} catch (err) {
|
|
2461
|
+
cliLog({
|
|
2462
|
+
action: "Blocks",
|
|
2463
|
+
message: `Failed to create blocks: ${err}`,
|
|
2464
|
+
message_type: "Error"
|
|
2465
|
+
});
|
|
2466
|
+
if (err && JSON.stringify(err).includes(`UNKNOWN_TABLE`)) {
|
|
2467
|
+
throw new DependencyError(err.toString());
|
|
2468
|
+
}
|
|
2469
|
+
}
|
|
2470
|
+
}
|
|
2471
|
+
};
|
|
2472
|
+
var deleteBlocks = async (chClient, blocks) => {
|
|
2473
|
+
for (const query of blocks.teardown) {
|
|
2474
|
+
try {
|
|
2475
|
+
console.log(`Deleting block using query ${query}`);
|
|
2476
|
+
await chClient.command({
|
|
2477
|
+
query,
|
|
2478
|
+
clickhouse_settings: {
|
|
2479
|
+
wait_end_of_query: 1
|
|
2480
|
+
// Ensure at least once delivery and DDL acknowledgment
|
|
2481
|
+
}
|
|
2482
|
+
});
|
|
2483
|
+
} catch (err) {
|
|
2484
|
+
cliLog({
|
|
2485
|
+
action: "Blocks",
|
|
2486
|
+
message: `Failed to delete blocks: ${err}`,
|
|
2487
|
+
message_type: "Error"
|
|
2488
|
+
});
|
|
2489
|
+
}
|
|
2490
|
+
}
|
|
2491
|
+
};
|
|
2492
|
+
var asyncWorker = async (task) => {
|
|
2493
|
+
await deleteBlocks(task.chClient, task.blocks);
|
|
2494
|
+
await createBlocks(task.chClient, task.blocks);
|
|
2495
|
+
};
|
|
2496
|
+
var runBlocks = async (config) => {
|
|
2497
|
+
const chClient = getClickhouseClient(toClientConfig3(config.clickhouseConfig));
|
|
2498
|
+
console.log(`Connected`);
|
|
2499
|
+
const blocksFiles = walkDir(config.blocksDir, ".ts", []);
|
|
2500
|
+
const numOfBlockFiles = blocksFiles.length;
|
|
2501
|
+
console.log(`Found ${numOfBlockFiles} blocks files`);
|
|
2502
|
+
const queue = fastq.promise(asyncWorker, 1);
|
|
2503
|
+
queue.error((err, task) => {
|
|
2504
|
+
if (err && task.retries > 0) {
|
|
2505
|
+
if (err instanceof DependencyError) {
|
|
2506
|
+
queue.push({ ...task, retries: task.retries - 1 });
|
|
2507
|
+
}
|
|
2508
|
+
}
|
|
2509
|
+
});
|
|
2510
|
+
for (const path5 of blocksFiles) {
|
|
2511
|
+
console.log(`Adding to queue: ${path5}`);
|
|
2512
|
+
try {
|
|
2513
|
+
const blocks = __require(path5).default;
|
|
2514
|
+
queue.push({
|
|
2515
|
+
chClient,
|
|
2516
|
+
blocks,
|
|
2517
|
+
retries: numOfBlockFiles
|
|
2518
|
+
});
|
|
2519
|
+
} catch (err) {
|
|
2520
|
+
cliLog({
|
|
2521
|
+
action: "Blocks",
|
|
2522
|
+
message: `Failed to import blocks from ${path5}: ${err}`,
|
|
2523
|
+
message_type: "Error"
|
|
2524
|
+
});
|
|
2525
|
+
}
|
|
2526
|
+
}
|
|
2527
|
+
while (!queue.idle()) {
|
|
2528
|
+
await new Promise((resolve3) => setTimeout(resolve3, 1e3));
|
|
2529
|
+
}
|
|
2530
|
+
};
|
|
2531
|
+
|
|
2532
|
+
// src/moose-runner.ts
|
|
2533
|
+
init_runner();
|
|
2534
|
+
|
|
2401
2535
|
// src/streaming-functions/runner.ts
|
|
2402
2536
|
init_commons();
|
|
2403
2537
|
init_cluster_utils();
|
|
@@ -2680,7 +2814,17 @@ var sendMessageMetrics = (logger2, metrics) => {
|
|
|
2680
2814
|
metrics.count_out = 0;
|
|
2681
2815
|
setTimeout(() => sendMessageMetrics(logger2, metrics), 1e3);
|
|
2682
2816
|
};
|
|
2683
|
-
|
|
2817
|
+
function loadStreamingFunction(functionFilePath) {
|
|
2818
|
+
let streamingFunctionImport;
|
|
2819
|
+
try {
|
|
2820
|
+
streamingFunctionImport = __require(functionFilePath.substring(0, functionFilePath.length - 3));
|
|
2821
|
+
} catch (e) {
|
|
2822
|
+
cliLog({ action: "Function", message: `${e}`, message_type: "Error" });
|
|
2823
|
+
throw e;
|
|
2824
|
+
}
|
|
2825
|
+
return streamingFunctionImport.default;
|
|
2826
|
+
}
|
|
2827
|
+
async function loadStreamingFunctionV2(sourceTopic, targetTopic) {
|
|
2684
2828
|
const transformFunctions = await getStreamingFunctions();
|
|
2685
2829
|
const transformFunctionKey = `${topicNameToStreamName(sourceTopic)}_${targetTopic ? topicNameToStreamName(targetTopic) : "<no-target>"}`;
|
|
2686
2830
|
const matchingEntries = Array.from(transformFunctions.entries()).filter(
|
|
@@ -2725,12 +2869,17 @@ var startConsumer = async (args, logger2, metrics, _parallelism, consumer, produ
|
|
|
2725
2869
|
);
|
|
2726
2870
|
let streamingFunctions;
|
|
2727
2871
|
let fieldMutations;
|
|
2728
|
-
|
|
2729
|
-
|
|
2730
|
-
|
|
2731
|
-
|
|
2732
|
-
|
|
2733
|
-
|
|
2872
|
+
if (args.isDmv2) {
|
|
2873
|
+
const result = await loadStreamingFunctionV2(
|
|
2874
|
+
args.sourceTopic,
|
|
2875
|
+
args.targetTopic
|
|
2876
|
+
);
|
|
2877
|
+
streamingFunctions = result.functions;
|
|
2878
|
+
fieldMutations = result.fieldMutations;
|
|
2879
|
+
} else {
|
|
2880
|
+
streamingFunctions = [[loadStreamingFunction(args.functionFilePath), {}]];
|
|
2881
|
+
fieldMutations = void 0;
|
|
2882
|
+
}
|
|
2734
2883
|
await consumer.subscribe({
|
|
2735
2884
|
topics: [args.sourceTopic.name]
|
|
2736
2885
|
// Use full topic name for Kafka operations
|
|
@@ -2969,8 +3118,8 @@ import {
|
|
|
2969
3118
|
Worker,
|
|
2970
3119
|
bundleWorkflowCode
|
|
2971
3120
|
} from "@temporalio/worker";
|
|
2972
|
-
import * as
|
|
2973
|
-
import * as
|
|
3121
|
+
import * as path4 from "path";
|
|
3122
|
+
import * as fs4 from "fs";
|
|
2974
3123
|
|
|
2975
3124
|
// src/scripts/activity.ts
|
|
2976
3125
|
init_internal();
|
|
@@ -2978,7 +3127,7 @@ init_json();
|
|
|
2978
3127
|
import { log as logger, Context } from "@temporalio/activity";
|
|
2979
3128
|
import { isCancellation } from "@temporalio/workflow";
|
|
2980
3129
|
var activities = {
|
|
2981
|
-
async
|
|
3130
|
+
async hasDmv2Workflow(name) {
|
|
2982
3131
|
try {
|
|
2983
3132
|
const workflows = await getWorkflows2();
|
|
2984
3133
|
const hasWorkflow = workflows.has(name);
|
|
@@ -2989,7 +3138,7 @@ var activities = {
|
|
|
2989
3138
|
return false;
|
|
2990
3139
|
}
|
|
2991
3140
|
},
|
|
2992
|
-
async
|
|
3141
|
+
async getDmv2Workflow(name) {
|
|
2993
3142
|
try {
|
|
2994
3143
|
logger.info(`Getting workflow ${name}`);
|
|
2995
3144
|
const workflows = await getWorkflows2();
|
|
@@ -3034,7 +3183,7 @@ var activities = {
|
|
|
3034
3183
|
throw new Error(errorMsg);
|
|
3035
3184
|
}
|
|
3036
3185
|
},
|
|
3037
|
-
async
|
|
3186
|
+
async executeDmv2Task(workflow, task, inputData) {
|
|
3038
3187
|
const context = Context.current();
|
|
3039
3188
|
const taskState = {};
|
|
3040
3189
|
let heartbeatInterval = null;
|
|
@@ -3095,7 +3244,7 @@ var activities = {
|
|
|
3095
3244
|
};
|
|
3096
3245
|
function createActivityForScript(scriptName) {
|
|
3097
3246
|
return {
|
|
3098
|
-
[scriptName]: activities.
|
|
3247
|
+
[scriptName]: activities.executeDmv2Task
|
|
3099
3248
|
};
|
|
3100
3249
|
}
|
|
3101
3250
|
|
|
@@ -3137,12 +3286,12 @@ var initializeLogger = LoggerSingleton.initializeLogger;
|
|
|
3137
3286
|
|
|
3138
3287
|
// src/scripts/runner.ts
|
|
3139
3288
|
var ALREADY_REGISTERED = /* @__PURE__ */ new Set();
|
|
3140
|
-
function
|
|
3141
|
-
logger2.info(`<
|
|
3289
|
+
function collectActivitiesDmv2(logger2, workflows) {
|
|
3290
|
+
logger2.info(`<DMV2WF> Collecting tasks from dmv2 workflows`);
|
|
3142
3291
|
const scriptNames = [];
|
|
3143
3292
|
for (const [name, workflow] of workflows.entries()) {
|
|
3144
3293
|
logger2.info(
|
|
3145
|
-
`<
|
|
3294
|
+
`<DMV2WF> Registering dmv2 workflow: ${name} with starting task: ${workflow.config.startingTask.name}`
|
|
3146
3295
|
);
|
|
3147
3296
|
scriptNames.push(`${name}/${workflow.config.startingTask.name}`);
|
|
3148
3297
|
}
|
|
@@ -3157,8 +3306,8 @@ async function createTemporalConnection(logger2, temporalConfig) {
|
|
|
3157
3306
|
};
|
|
3158
3307
|
if (temporalConfig.clientCert && temporalConfig.clientKey) {
|
|
3159
3308
|
logger2.info("Using TLS for secure Temporal");
|
|
3160
|
-
const cert = await
|
|
3161
|
-
const key = await
|
|
3309
|
+
const cert = await fs4.readFileSync(temporalConfig.clientCert);
|
|
3310
|
+
const key = await fs4.readFileSync(temporalConfig.clientKey);
|
|
3162
3311
|
connectionOptions.tls = {
|
|
3163
3312
|
clientCertPair: {
|
|
3164
3313
|
crt: cert,
|
|
@@ -3205,27 +3354,29 @@ async function registerWorkflows(logger2, config) {
|
|
|
3205
3354
|
try {
|
|
3206
3355
|
const workflows = await getWorkflows2();
|
|
3207
3356
|
if (workflows.size > 0) {
|
|
3208
|
-
logger2.info(`<
|
|
3209
|
-
allScriptPaths.push(...
|
|
3357
|
+
logger2.info(`<DMV2WF> Found ${workflows.size} dmv2 workflows`);
|
|
3358
|
+
allScriptPaths.push(...collectActivitiesDmv2(logger2, workflows));
|
|
3210
3359
|
if (allScriptPaths.length === 0) {
|
|
3211
|
-
logger2.info(`<
|
|
3360
|
+
logger2.info(`<DMV2WF> No tasks found in dmv2 workflows`);
|
|
3212
3361
|
return null;
|
|
3213
3362
|
}
|
|
3214
|
-
logger2.info(
|
|
3363
|
+
logger2.info(
|
|
3364
|
+
`<DMV2WF> Found ${allScriptPaths.length} tasks in dmv2 workflows`
|
|
3365
|
+
);
|
|
3215
3366
|
for (const activityName of allScriptPaths) {
|
|
3216
3367
|
if (!ALREADY_REGISTERED.has(activityName)) {
|
|
3217
3368
|
const activity = await createActivityForScript(activityName);
|
|
3218
3369
|
dynamicActivities.push(activity);
|
|
3219
3370
|
ALREADY_REGISTERED.add(activityName);
|
|
3220
|
-
logger2.info(`<
|
|
3371
|
+
logger2.info(`<DMV2WF> Registered task ${activityName}`);
|
|
3221
3372
|
}
|
|
3222
3373
|
}
|
|
3223
3374
|
if (dynamicActivities.length === 0) {
|
|
3224
|
-
logger2.info(`<
|
|
3375
|
+
logger2.info(`<DMV2WF> No dynamic activities found in dmv2 workflows`);
|
|
3225
3376
|
return null;
|
|
3226
3377
|
}
|
|
3227
3378
|
logger2.info(
|
|
3228
|
-
`<
|
|
3379
|
+
`<DMV2WF> Found ${dynamicActivities.length} dynamic activities in dmv2 workflows`
|
|
3229
3380
|
);
|
|
3230
3381
|
}
|
|
3231
3382
|
if (allScriptPaths.length === 0) {
|
|
@@ -3263,7 +3414,7 @@ async function registerWorkflows(logger2, config) {
|
|
|
3263
3414
|
}
|
|
3264
3415
|
};
|
|
3265
3416
|
const workflowBundle = await bundleWorkflowCode({
|
|
3266
|
-
workflowsPath:
|
|
3417
|
+
workflowsPath: path4.resolve(__dirname, "scripts/workflow.js"),
|
|
3267
3418
|
logger: silentLogger
|
|
3268
3419
|
});
|
|
3269
3420
|
const worker = await Worker.create({
|
|
@@ -3378,17 +3529,33 @@ program.command("dmv2-serializer").description("Load DMv2 index").action(() => {
|
|
|
3378
3529
|
program.command("export-serializer").description("Run export serializer").argument("<target-model>", "Target model to serialize").action((targetModel) => {
|
|
3379
3530
|
runExportSerializer(targetModel);
|
|
3380
3531
|
});
|
|
3381
|
-
program.command("
|
|
3532
|
+
program.command("blocks").description("Run blocks").argument("<blocks-dir>", "Directory containing blocks").argument("<clickhouse-db>", "Clickhouse database name").argument("<clickhouse-host>", "Clickhouse host").argument("<clickhouse-port>", "Clickhouse port").argument("<clickhouse-username>", "Clickhouse username").argument("<clickhouse-password>", "Clickhouse password").option("--clickhouse-use-ssl", "Use SSL for Clickhouse connection", false).action(
|
|
3533
|
+
(blocksDir, clickhouseDb, clickhouseHost, clickhousePort, clickhouseUsername, clickhousePassword, options) => {
|
|
3534
|
+
runBlocks({
|
|
3535
|
+
blocksDir,
|
|
3536
|
+
clickhouseConfig: {
|
|
3537
|
+
database: clickhouseDb,
|
|
3538
|
+
host: clickhouseHost,
|
|
3539
|
+
port: clickhousePort,
|
|
3540
|
+
username: clickhouseUsername,
|
|
3541
|
+
password: clickhousePassword,
|
|
3542
|
+
useSSL: options.clickhouseUseSsl
|
|
3543
|
+
}
|
|
3544
|
+
});
|
|
3545
|
+
}
|
|
3546
|
+
);
|
|
3547
|
+
program.command("consumption-apis").description("Run consumption APIs").argument("<consumption-dir>", "Directory containing consumption APIs").argument("<clickhouse-db>", "Clickhouse database name").argument("<clickhouse-host>", "Clickhouse host").argument("<clickhouse-port>", "Clickhouse port").argument("<clickhouse-username>", "Clickhouse username").argument("<clickhouse-password>", "Clickhouse password").option("--clickhouse-use-ssl", "Use SSL for Clickhouse connection", false).option("--jwt-secret <secret>", "JWT public key for verification").option("--jwt-issuer <issuer>", "Expected JWT issuer").option("--jwt-audience <audience>", "Expected JWT audience").option(
|
|
3382
3548
|
"--enforce-auth",
|
|
3383
3549
|
"Enforce authentication on all consumption APIs",
|
|
3384
3550
|
false
|
|
3385
|
-
).option("--temporal-url <url>", "Temporal server URL").option("--temporal-namespace <namespace>", "Temporal namespace").option("--client-cert <path>", "Path to client certificate").option("--client-key <path>", "Path to client key").option("--api-key <key>", "API key for authentication").option("--proxy-port <port>", "Port to run the proxy server on", parseInt).option(
|
|
3551
|
+
).option("--temporal-url <url>", "Temporal server URL").option("--temporal-namespace <namespace>", "Temporal namespace").option("--client-cert <path>", "Path to client certificate").option("--client-key <path>", "Path to client key").option("--api-key <key>", "API key for authentication").option("--is-dmv2", "Whether this is a DMv2 consumption", false).option("--proxy-port <port>", "Port to run the proxy server on", parseInt).option(
|
|
3386
3552
|
"--worker-count <count>",
|
|
3387
3553
|
"Number of worker processes for the consumption API cluster",
|
|
3388
3554
|
parseInt
|
|
3389
3555
|
).action(
|
|
3390
|
-
(clickhouseDb, clickhouseHost, clickhousePort, clickhouseUsername, clickhousePassword, options) => {
|
|
3556
|
+
(apisDir, clickhouseDb, clickhouseHost, clickhousePort, clickhouseUsername, clickhousePassword, options) => {
|
|
3391
3557
|
runApis({
|
|
3558
|
+
apisDir,
|
|
3392
3559
|
clickhouseConfig: {
|
|
3393
3560
|
database: clickhouseDb,
|
|
3394
3561
|
host: clickhouseHost,
|
|
@@ -3410,6 +3577,7 @@ program.command("consumption-apis").description("Run consumption APIs").argument
|
|
|
3410
3577
|
apiKey: options.apiKey
|
|
3411
3578
|
},
|
|
3412
3579
|
enforceAuth: options.enforceAuth,
|
|
3580
|
+
isDmv2: options.isDmv2,
|
|
3413
3581
|
proxyPort: options.proxyPort,
|
|
3414
3582
|
workerCount: options.workerCount
|
|
3415
3583
|
});
|
|
@@ -3418,7 +3586,7 @@ program.command("consumption-apis").description("Run consumption APIs").argument
|
|
|
3418
3586
|
program.command("streaming-functions").description("Run streaming functions").argument("<source-topic>", "Source topic configuration as JSON").argument("<function-file-path>", "Path to the function file").argument(
|
|
3419
3587
|
"<broker>",
|
|
3420
3588
|
"Kafka broker address(es) - comma-separated for multiple brokers (e.g., 'broker1:9092, broker2:9092'). Whitespace around commas is automatically trimmed."
|
|
3421
|
-
).argument("<max-subscriber-count>", "Maximum number of subscribers").option("--target-topic <target-topic>", "Target topic configuration as JSON").option("--sasl-username <username>", "SASL username").option("--sasl-password <password>", "SASL password").option("--sasl-mechanism <mechanism>", "SASL mechanism").option("--security-protocol <protocol>", "Security protocol").option("--log-payloads", "Log payloads for debugging", false).action(
|
|
3589
|
+
).argument("<max-subscriber-count>", "Maximum number of subscribers").option("--target-topic <target-topic>", "Target topic configuration as JSON").option("--sasl-username <username>", "SASL username").option("--sasl-password <password>", "SASL password").option("--sasl-mechanism <mechanism>", "SASL mechanism").option("--security-protocol <protocol>", "Security protocol").option("--is-dmv2", "Whether this is a DMv2 function", false).option("--log-payloads", "Log payloads for debugging", false).action(
|
|
3422
3590
|
(sourceTopic, functionFilePath, broker, maxSubscriberCount, options) => {
|
|
3423
3591
|
const config = {
|
|
3424
3592
|
sourceTopic: JSON.parse(sourceTopic),
|
|
@@ -3426,6 +3594,7 @@ program.command("streaming-functions").description("Run streaming functions").ar
|
|
|
3426
3594
|
functionFilePath,
|
|
3427
3595
|
broker,
|
|
3428
3596
|
maxSubscriberCount: parseInt(maxSubscriberCount),
|
|
3597
|
+
isDmv2: options.isDmv2,
|
|
3429
3598
|
logPayloads: options.logPayloads,
|
|
3430
3599
|
saslUsername: options.saslUsername,
|
|
3431
3600
|
saslPassword: options.saslPassword,
|