@forzalabs/remora 1.1.13 → 1.1.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/index.js +61 -29
- package/package.json +1 -1
- package/workers/ExecutorWorker.js +61 -29
package/index.js
CHANGED
|
@@ -13500,7 +13500,7 @@ var import_promises = __toESM(require("fs/promises"), 1);
|
|
|
13500
13500
|
|
|
13501
13501
|
// ../../packages/constants/src/Constants.ts
|
|
13502
13502
|
var CONSTANTS = {
|
|
13503
|
-
cliVersion: "1.1.
|
|
13503
|
+
cliVersion: "1.1.14",
|
|
13504
13504
|
backendVersion: 1,
|
|
13505
13505
|
backendPort: 5088,
|
|
13506
13506
|
workerVersion: 2,
|
|
@@ -18911,18 +18911,24 @@ var ConsumerExecutorClass = class {
|
|
|
18911
18911
|
for (const field of fields) {
|
|
18912
18912
|
const { cField } = field;
|
|
18913
18913
|
const fieldKey = cField.alias ?? cField.key;
|
|
18914
|
-
|
|
18915
|
-
|
|
18916
|
-
if (
|
|
18917
|
-
|
|
18918
|
-
|
|
18919
|
-
|
|
18920
|
-
|
|
18921
|
-
|
|
18922
|
-
|
|
18923
|
-
|
|
18924
|
-
|
|
18925
|
-
|
|
18914
|
+
try {
|
|
18915
|
+
const dimension = dimensions.find((x) => x.name === cField.key);
|
|
18916
|
+
if (!dimension) {
|
|
18917
|
+
if (cField.fixed && Algo_default.hasVal(cField.default))
|
|
18918
|
+
record[fieldKey] = cField.default;
|
|
18919
|
+
else if (cField.copyFrom)
|
|
18920
|
+
record[fieldKey] = record[cField.copyFrom];
|
|
18921
|
+
else
|
|
18922
|
+
throw new Error(`The requested field "${cField.key}" from the consumer is not present in the underlying producer "${producer.name}" (${dimensions.map((x) => x.name).join(", ")})`);
|
|
18923
|
+
}
|
|
18924
|
+
if (cField.alias && cField.alias !== dimension.name) {
|
|
18925
|
+
record[cField.alias] = record[dimension.name];
|
|
18926
|
+
delete record[dimension.name];
|
|
18927
|
+
}
|
|
18928
|
+
} catch (error) {
|
|
18929
|
+
const err = new Error(`Field mapping failed for field "${fieldKey}" of producer "${producer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18930
|
+
Logger_default.error(err);
|
|
18931
|
+
throw err;
|
|
18926
18932
|
}
|
|
18927
18933
|
}
|
|
18928
18934
|
for (const field of fields) {
|
|
@@ -18941,28 +18947,51 @@ var ConsumerExecutorClass = class {
|
|
|
18941
18947
|
case "skip":
|
|
18942
18948
|
continue;
|
|
18943
18949
|
case "fail":
|
|
18944
|
-
default:
|
|
18945
|
-
|
|
18950
|
+
default: {
|
|
18951
|
+
const err = new Error(errorMessage, { cause: error });
|
|
18952
|
+
Logger_default.error(err);
|
|
18953
|
+
throw err;
|
|
18954
|
+
}
|
|
18946
18955
|
}
|
|
18947
18956
|
} else {
|
|
18948
|
-
|
|
18957
|
+
const err = new Error(errorMessage, { cause: error });
|
|
18958
|
+
Logger_default.error(err);
|
|
18959
|
+
throw err;
|
|
18949
18960
|
}
|
|
18950
18961
|
}
|
|
18951
18962
|
}
|
|
18952
|
-
|
|
18953
|
-
const
|
|
18954
|
-
|
|
18955
|
-
|
|
18963
|
+
try {
|
|
18964
|
+
for (const dimension of dimensions) {
|
|
18965
|
+
const field = fields.find((x) => x.cField.key === dimension.name);
|
|
18966
|
+
if (!field)
|
|
18967
|
+
delete record[dimension.name];
|
|
18968
|
+
}
|
|
18969
|
+
} catch (error) {
|
|
18970
|
+
const err = new Error(`Removing unmapped dimensions failed for producer "${producer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18971
|
+
Logger_default.error(err);
|
|
18972
|
+
throw err;
|
|
18956
18973
|
}
|
|
18957
|
-
|
|
18958
|
-
|
|
18959
|
-
|
|
18960
|
-
|
|
18974
|
+
try {
|
|
18975
|
+
if (consumer.filters && consumer.filters.length > 0) {
|
|
18976
|
+
const isKept = consumer.filters.every((x) => RequestExecutor_default.evaluateFilter(record, x.rule));
|
|
18977
|
+
if (!isKept)
|
|
18978
|
+
return null;
|
|
18979
|
+
}
|
|
18980
|
+
} catch (error) {
|
|
18981
|
+
const err = new Error(`Consumer filter evaluation failed for consumer "${consumer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18982
|
+
Logger_default.error(err);
|
|
18983
|
+
throw err;
|
|
18961
18984
|
}
|
|
18962
|
-
|
|
18963
|
-
|
|
18964
|
-
|
|
18965
|
-
|
|
18985
|
+
try {
|
|
18986
|
+
if (requestOptions && requestOptions.filters) {
|
|
18987
|
+
const isKept = requestOptions.filters.every((x) => RequestExecutor_default.evaluateFilter(record, x));
|
|
18988
|
+
if (!isKept)
|
|
18989
|
+
return null;
|
|
18990
|
+
}
|
|
18991
|
+
} catch (error) {
|
|
18992
|
+
const err = new Error(`Request filter evaluation failed for consumer "${consumer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18993
|
+
Logger_default.error(err);
|
|
18994
|
+
throw err;
|
|
18966
18995
|
}
|
|
18967
18996
|
return record;
|
|
18968
18997
|
};
|
|
@@ -19479,6 +19508,9 @@ var ExecutorOrchestratorClass = class {
|
|
|
19479
19508
|
Logger_default.log(`[${usageId}] Spawning worker ${workerId} for producer "${prod.name}" \u2014 chunk ${chunk.start}-${chunk.end} (${Math.round((chunk.end - chunk.start) / 1024)}KB)`);
|
|
19480
19509
|
workerThreads.push(pool.exec("executor", [workerData], {
|
|
19481
19510
|
on: (payload) => this.onWorkAdvanced(payload, currentWorkerIndex, _progress)
|
|
19511
|
+
}).catch((error) => {
|
|
19512
|
+
Logger_default.error(error);
|
|
19513
|
+
return null;
|
|
19482
19514
|
}));
|
|
19483
19515
|
}
|
|
19484
19516
|
Logger_default.log(`[${usageId}] Waiting for ${workerThreads.length} worker(s) to complete`);
|
|
@@ -19544,7 +19576,7 @@ var ExecutorOrchestratorClass = class {
|
|
|
19544
19576
|
return finalResult;
|
|
19545
19577
|
} catch (error) {
|
|
19546
19578
|
Logger_default.log(`[${usageId}] Consumer "${consumer.name}" failed: ${Helper_default.asError(error).message}`);
|
|
19547
|
-
Logger_default.error(
|
|
19579
|
+
Logger_default.error(error);
|
|
19548
19580
|
await pool.terminate();
|
|
19549
19581
|
await ConsumerOnFinishManager_default.onConsumerError(consumer, usageId);
|
|
19550
19582
|
Logger_default.log(`[${usageId}] Running cleanup after failure`);
|
package/package.json
CHANGED
|
@@ -13494,7 +13494,7 @@ var import_promises = __toESM(require("fs/promises"), 1);
|
|
|
13494
13494
|
|
|
13495
13495
|
// ../../packages/constants/src/Constants.ts
|
|
13496
13496
|
var CONSTANTS = {
|
|
13497
|
-
cliVersion: "1.1.
|
|
13497
|
+
cliVersion: "1.1.14",
|
|
13498
13498
|
backendVersion: 1,
|
|
13499
13499
|
backendPort: 5088,
|
|
13500
13500
|
workerVersion: 2,
|
|
@@ -18510,18 +18510,24 @@ var ConsumerExecutorClass = class {
|
|
|
18510
18510
|
for (const field of fields) {
|
|
18511
18511
|
const { cField } = field;
|
|
18512
18512
|
const fieldKey = cField.alias ?? cField.key;
|
|
18513
|
-
|
|
18514
|
-
|
|
18515
|
-
if (
|
|
18516
|
-
|
|
18517
|
-
|
|
18518
|
-
|
|
18519
|
-
|
|
18520
|
-
|
|
18521
|
-
|
|
18522
|
-
|
|
18523
|
-
|
|
18524
|
-
|
|
18513
|
+
try {
|
|
18514
|
+
const dimension = dimensions.find((x) => x.name === cField.key);
|
|
18515
|
+
if (!dimension) {
|
|
18516
|
+
if (cField.fixed && Algo_default.hasVal(cField.default))
|
|
18517
|
+
record[fieldKey] = cField.default;
|
|
18518
|
+
else if (cField.copyFrom)
|
|
18519
|
+
record[fieldKey] = record[cField.copyFrom];
|
|
18520
|
+
else
|
|
18521
|
+
throw new Error(`The requested field "${cField.key}" from the consumer is not present in the underlying producer "${producer.name}" (${dimensions.map((x) => x.name).join(", ")})`);
|
|
18522
|
+
}
|
|
18523
|
+
if (cField.alias && cField.alias !== dimension.name) {
|
|
18524
|
+
record[cField.alias] = record[dimension.name];
|
|
18525
|
+
delete record[dimension.name];
|
|
18526
|
+
}
|
|
18527
|
+
} catch (error) {
|
|
18528
|
+
const err = new Error(`Field mapping failed for field "${fieldKey}" of producer "${producer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18529
|
+
Logger_default.error(err);
|
|
18530
|
+
throw err;
|
|
18525
18531
|
}
|
|
18526
18532
|
}
|
|
18527
18533
|
for (const field of fields) {
|
|
@@ -18540,28 +18546,51 @@ var ConsumerExecutorClass = class {
|
|
|
18540
18546
|
case "skip":
|
|
18541
18547
|
continue;
|
|
18542
18548
|
case "fail":
|
|
18543
|
-
default:
|
|
18544
|
-
|
|
18549
|
+
default: {
|
|
18550
|
+
const err = new Error(errorMessage, { cause: error });
|
|
18551
|
+
Logger_default.error(err);
|
|
18552
|
+
throw err;
|
|
18553
|
+
}
|
|
18545
18554
|
}
|
|
18546
18555
|
} else {
|
|
18547
|
-
|
|
18556
|
+
const err = new Error(errorMessage, { cause: error });
|
|
18557
|
+
Logger_default.error(err);
|
|
18558
|
+
throw err;
|
|
18548
18559
|
}
|
|
18549
18560
|
}
|
|
18550
18561
|
}
|
|
18551
|
-
|
|
18552
|
-
const
|
|
18553
|
-
|
|
18554
|
-
|
|
18562
|
+
try {
|
|
18563
|
+
for (const dimension of dimensions) {
|
|
18564
|
+
const field = fields.find((x) => x.cField.key === dimension.name);
|
|
18565
|
+
if (!field)
|
|
18566
|
+
delete record[dimension.name];
|
|
18567
|
+
}
|
|
18568
|
+
} catch (error) {
|
|
18569
|
+
const err = new Error(`Removing unmapped dimensions failed for producer "${producer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18570
|
+
Logger_default.error(err);
|
|
18571
|
+
throw err;
|
|
18555
18572
|
}
|
|
18556
|
-
|
|
18557
|
-
|
|
18558
|
-
|
|
18559
|
-
|
|
18573
|
+
try {
|
|
18574
|
+
if (consumer.filters && consumer.filters.length > 0) {
|
|
18575
|
+
const isKept = consumer.filters.every((x) => RequestExecutor_default.evaluateFilter(record, x.rule));
|
|
18576
|
+
if (!isKept)
|
|
18577
|
+
return null;
|
|
18578
|
+
}
|
|
18579
|
+
} catch (error) {
|
|
18580
|
+
const err = new Error(`Consumer filter evaluation failed for consumer "${consumer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18581
|
+
Logger_default.error(err);
|
|
18582
|
+
throw err;
|
|
18560
18583
|
}
|
|
18561
|
-
|
|
18562
|
-
|
|
18563
|
-
|
|
18564
|
-
|
|
18584
|
+
try {
|
|
18585
|
+
if (requestOptions && requestOptions.filters) {
|
|
18586
|
+
const isKept = requestOptions.filters.every((x) => RequestExecutor_default.evaluateFilter(record, x));
|
|
18587
|
+
if (!isKept)
|
|
18588
|
+
return null;
|
|
18589
|
+
}
|
|
18590
|
+
} catch (error) {
|
|
18591
|
+
const err = new Error(`Request filter evaluation failed for consumer "${consumer.name}" (index: ${recordIndex}): ${error.message}`, { cause: error });
|
|
18592
|
+
Logger_default.error(err);
|
|
18593
|
+
throw err;
|
|
18565
18594
|
}
|
|
18566
18595
|
return record;
|
|
18567
18596
|
};
|
|
@@ -19238,6 +19267,9 @@ var ExecutorOrchestratorClass = class {
|
|
|
19238
19267
|
Logger_default.log(`[${usageId}] Spawning worker ${workerId} for producer "${prod.name}" \u2014 chunk ${chunk.start}-${chunk.end} (${Math.round((chunk.end - chunk.start) / 1024)}KB)`);
|
|
19239
19268
|
workerThreads.push(pool.exec("executor", [workerData], {
|
|
19240
19269
|
on: (payload) => this.onWorkAdvanced(payload, currentWorkerIndex, _progress)
|
|
19270
|
+
}).catch((error) => {
|
|
19271
|
+
Logger_default.error(error);
|
|
19272
|
+
return null;
|
|
19241
19273
|
}));
|
|
19242
19274
|
}
|
|
19243
19275
|
Logger_default.log(`[${usageId}] Waiting for ${workerThreads.length} worker(s) to complete`);
|
|
@@ -19303,7 +19335,7 @@ var ExecutorOrchestratorClass = class {
|
|
|
19303
19335
|
return finalResult;
|
|
19304
19336
|
} catch (error) {
|
|
19305
19337
|
Logger_default.log(`[${usageId}] Consumer "${consumer.name}" failed: ${Helper_default.asError(error).message}`);
|
|
19306
|
-
Logger_default.error(
|
|
19338
|
+
Logger_default.error(error);
|
|
19307
19339
|
await pool.terminate();
|
|
19308
19340
|
await ConsumerOnFinishManager_default.onConsumerError(consumer, usageId);
|
|
19309
19341
|
Logger_default.log(`[${usageId}] Running cleanup after failure`);
|