@forzalabs/remora 0.2.6 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Constants.js +10 -2
- package/README.md +0 -14
- package/actions/debug.js +1 -0
- package/actions/deploy.js +1 -0
- package/actions/run.js +17 -13
- package/actions/sample.js +1 -1
- package/core/Algo.js +8 -4
- package/definitions/ExecutorDefinitions.js +2 -0
- package/definitions/json_schemas/consumer-schema.json +1 -1
- package/definitions/json_schemas/producer-schema.json +1 -1
- package/definitions/temp.js +2 -0
- package/drivers/DeltaShareDriver.js +4 -0
- package/drivers/DriverFactory.js +10 -10
- package/drivers/DriverHelper.js +33 -10
- package/drivers/HttpApiDriver.js +4 -0
- package/drivers/LocalDriver.js +73 -6
- package/drivers/RedshiftDriver.js +4 -0
- package/drivers/S3Driver.js +36 -52
- package/drivers/files/LocalDestinationDriver.js +200 -0
- package/drivers/files/LocalSourceDriver.js +394 -0
- package/drivers/s3/S3DestinationDriver.js +159 -0
- package/drivers/s3/S3SourceDriver.js +455 -0
- package/engines/ai/LLM.js +0 -11
- package/engines/consumer/ConsumerEngine.js +0 -77
- package/engines/consumer/ConsumerManager.js +61 -36
- package/engines/consumer/ConsumerOnFinishManager.js +14 -0
- package/engines/consumer/PostProcessor.js +1 -7
- package/engines/dataset/Dataset.js +0 -61
- package/engines/dataset/DatasetManager.js +16 -76
- package/engines/dataset/DatasetRecord.js +4 -3
- package/engines/deployment/DeploymentPlanner.js +0 -7
- package/engines/execution/ExecutionPlanner.js +2 -2
- package/engines/execution/RequestExecutor.js +4 -45
- package/engines/file/FileExporter.js +7 -32
- package/engines/parsing/CSVParser.js +27 -26
- package/engines/parsing/LineParser.js +52 -0
- package/engines/parsing/XMLParser.js +1 -1
- package/engines/producer/ProducerEngine.js +0 -45
- package/engines/scheduler/CronScheduler.js +12 -4
- package/engines/scheduler/QueueManager.js +11 -4
- package/engines/sql/SQLCompiler.js +4 -4
- package/engines/transform/JoinEngine.js +3 -3
- package/engines/transform/TransformationEngine.js +3 -89
- package/engines/usage/UsageManager.js +8 -6
- package/engines/validation/Validator.js +12 -18
- package/executors/ConsumerExecutor.js +152 -0
- package/executors/Executor.js +168 -0
- package/executors/ExecutorOrchestrator.js +315 -0
- package/executors/ExecutorPerformance.js +17 -0
- package/executors/ExecutorProgress.js +52 -0
- package/executors/OutputExecutor.js +118 -0
- package/executors/ProducerExecutor.js +108 -0
- package/package.json +3 -3
- package/workers/ExecutorWorker.js +48 -0
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
class ExecutorProgress {
|
|
4
|
+
constructor(isEnabled) {
|
|
5
|
+
this._isEnabled = false;
|
|
6
|
+
this._FPS = 2;
|
|
7
|
+
this._lastRenderTime = 0;
|
|
8
|
+
this._lastRenderedLines = -1;
|
|
9
|
+
this.register = (name) => {
|
|
10
|
+
this.workers[name] = 0;
|
|
11
|
+
};
|
|
12
|
+
this.update = (name, value) => {
|
|
13
|
+
this.workers[name] = value;
|
|
14
|
+
const now = Date.now();
|
|
15
|
+
const interval = 1000 / this._FPS;
|
|
16
|
+
if (now - this._lastRenderTime >= interval) {
|
|
17
|
+
this._lastRenderTime = now;
|
|
18
|
+
this.render();
|
|
19
|
+
}
|
|
20
|
+
};
|
|
21
|
+
this.complete = () => {
|
|
22
|
+
for (const key of Object.keys(this.workers)) {
|
|
23
|
+
this.workers[key] = 1;
|
|
24
|
+
}
|
|
25
|
+
this.render();
|
|
26
|
+
};
|
|
27
|
+
this.render = () => {
|
|
28
|
+
if (!this._isEnabled)
|
|
29
|
+
return;
|
|
30
|
+
if (this._lastRenderedLines > 0) {
|
|
31
|
+
for (let i = 0; i < this._lastRenderedLines; i++) {
|
|
32
|
+
process.stdout.moveCursor(0, -1);
|
|
33
|
+
process.stdout.clearLine(1);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
this._lastRenderedLines = 0;
|
|
37
|
+
for (const key of Object.keys(this.workers)) {
|
|
38
|
+
const wrk = this.workers[key] * 100;
|
|
39
|
+
const percentage = Math.min(100, Math.max(0, wrk));
|
|
40
|
+
const barWidth = 30;
|
|
41
|
+
const filledWidth = Math.floor((percentage / 100) * barWidth);
|
|
42
|
+
const emptyWidth = barWidth - filledWidth;
|
|
43
|
+
const bar = '#'.repeat(filledWidth) + '-'.repeat(emptyWidth);
|
|
44
|
+
console.log(`Worker ${key.padStart(2, '0')}: [${bar}] ${percentage.toFixed(2)}%`);
|
|
45
|
+
this._lastRenderedLines++;
|
|
46
|
+
}
|
|
47
|
+
};
|
|
48
|
+
this._isEnabled = isEnabled;
|
|
49
|
+
this.workers = {};
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
exports.default = ExecutorProgress;
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const Algo_1 = __importDefault(require("../core/Algo"));
|
|
16
|
+
const DSTE_1 = __importDefault(require("../core/dste/DSTE"));
|
|
17
|
+
const DriverFactory_1 = __importDefault(require("../drivers/DriverFactory"));
|
|
18
|
+
const Environment_1 = __importDefault(require("../engines/Environment"));
|
|
19
|
+
const CSVParser_1 = __importDefault(require("../engines/parsing/CSVParser"));
|
|
20
|
+
const ConsumerExecutor_1 = __importDefault(require("./ConsumerExecutor"));
|
|
21
|
+
class OutputExecutorClass {
|
|
22
|
+
constructor() {
|
|
23
|
+
this._getInternalRecordFormat = (consumer) => {
|
|
24
|
+
const output = consumer.outputs[0];
|
|
25
|
+
const format = output.format === 'API'
|
|
26
|
+
? 'JSON'
|
|
27
|
+
: output.format === 'PARQUET'
|
|
28
|
+
? 'CSV'
|
|
29
|
+
: output.format;
|
|
30
|
+
return format;
|
|
31
|
+
};
|
|
32
|
+
this.outputRecord = (record, consumer, fields) => {
|
|
33
|
+
const format = this._getInternalRecordFormat(consumer);
|
|
34
|
+
switch (format) {
|
|
35
|
+
case 'CSV':
|
|
36
|
+
return this.toCSV(record, fields, ',');
|
|
37
|
+
case 'JSON':
|
|
38
|
+
return this.toJSON(record, fields);
|
|
39
|
+
default:
|
|
40
|
+
throw new Error(`Export format ${format} not implemented yet.`);
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
this.toCSV = (record, fields, delimiter) => {
|
|
44
|
+
const myDelimtier = delimiter !== null && delimiter !== void 0 ? delimiter : ',';
|
|
45
|
+
// remove the not wanted dimension
|
|
46
|
+
const line = fields
|
|
47
|
+
.filter(x => !x.cField.hidden)
|
|
48
|
+
.map(x => { var _a, _b; return `"${Algo_1.default.replaceAll((_b = (_a = record[x.finalKey]) === null || _a === void 0 ? void 0 : _a.toString()) !== null && _b !== void 0 ? _b : '', '"', '""')}"`; })
|
|
49
|
+
.join(myDelimtier);
|
|
50
|
+
return line;
|
|
51
|
+
};
|
|
52
|
+
this.toJSON = (record, fields) => {
|
|
53
|
+
if (fields.some(x => x.cField.hidden)) {
|
|
54
|
+
// remove the un-wanted dimensions
|
|
55
|
+
for (const dim of fields) {
|
|
56
|
+
if (dim.cField.hidden)
|
|
57
|
+
delete record[dim.finalKey];
|
|
58
|
+
}
|
|
59
|
+
return JSON.stringify(record);
|
|
60
|
+
}
|
|
61
|
+
else {
|
|
62
|
+
return JSON.stringify(record);
|
|
63
|
+
}
|
|
64
|
+
};
|
|
65
|
+
this.exportResult = (consumer, executionId, fields) => __awaiter(this, void 0, void 0, function* () {
|
|
66
|
+
const internalFormat = this._getInternalRecordFormat(consumer);
|
|
67
|
+
for (const output of consumer.outputs) {
|
|
68
|
+
const destination = Environment_1.default.getSource(output.exportDestination);
|
|
69
|
+
const driver = yield DriverFactory_1.default.instantiateDestination(destination);
|
|
70
|
+
const currentPath = ConsumerExecutor_1.default._getWorkPath(consumer, executionId);
|
|
71
|
+
const destinationName = this._composeFileName(consumer, output, this._getExtension(output), executionId);
|
|
72
|
+
if (output.format === internalFormat) {
|
|
73
|
+
return yield driver.move(currentPath, destinationName);
|
|
74
|
+
}
|
|
75
|
+
else {
|
|
76
|
+
switch (output.format) {
|
|
77
|
+
case 'CSV':
|
|
78
|
+
return yield driver.transformAndMove(currentPath, line => {
|
|
79
|
+
const parsed = JSON.parse(line);
|
|
80
|
+
return Object.keys(parsed).map(x => `"${parsed[x]}"`).join(',');
|
|
81
|
+
}, destinationName);
|
|
82
|
+
case 'API':
|
|
83
|
+
case 'JSON':
|
|
84
|
+
return yield driver.transformAndMove(currentPath, line => {
|
|
85
|
+
const parts = CSVParser_1.default.parseRow(line, ',');
|
|
86
|
+
const value = {};
|
|
87
|
+
for (const [index, field] of fields.entries())
|
|
88
|
+
value[field.finalKey] = parts[index];
|
|
89
|
+
return JSON.stringify(value);
|
|
90
|
+
}, destinationName);
|
|
91
|
+
case 'PARQUET':
|
|
92
|
+
default:
|
|
93
|
+
throw new Error(`Export result to format ${output.format} not implemented yet.`);
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
});
|
|
98
|
+
this._getExtension = (output) => {
|
|
99
|
+
return output.format === 'CSV'
|
|
100
|
+
? 'csv'
|
|
101
|
+
: output.format === 'JSON'
|
|
102
|
+
? 'jsonl'
|
|
103
|
+
: 'txt';
|
|
104
|
+
};
|
|
105
|
+
this._composeFileName = (consumer, output, extension, executionId) => {
|
|
106
|
+
if (output.exportName && output.exportName.trim().length > 0) {
|
|
107
|
+
// Ensure no extension duplication
|
|
108
|
+
const sanitized = output.exportName.replace(/\.[^.]+$/, '');
|
|
109
|
+
return `${sanitized}.${extension}`;
|
|
110
|
+
}
|
|
111
|
+
const baseTs = Algo_1.default.replaceAll(DSTE_1.default.now().toISOString().split('.')[0], ':', '-');
|
|
112
|
+
const suffix = executionId ? `_${executionId}` : '';
|
|
113
|
+
return `${consumer.name}_${baseTs}${suffix}.${extension}`;
|
|
114
|
+
};
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
const OutputExecutor = new OutputExecutorClass();
|
|
118
|
+
exports.default = OutputExecutor;
|
|
@@ -0,0 +1,108 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const Affirm_1 = __importDefault(require("../core/Affirm"));
|
|
16
|
+
const DriverFactory_1 = __importDefault(require("../drivers/DriverFactory"));
|
|
17
|
+
const Environment_1 = __importDefault(require("../engines/Environment"));
|
|
18
|
+
const CSVParser_1 = __importDefault(require("../engines/parsing/CSVParser"));
|
|
19
|
+
const ProducerManager_1 = __importDefault(require("../engines/producer/ProducerManager"));
|
|
20
|
+
const Algo_1 = __importDefault(require("../core/Algo"));
|
|
21
|
+
const LineParser_1 = __importDefault(require("../engines/parsing/LineParser"));
|
|
22
|
+
const CryptoEngine_1 = __importDefault(require("../engines/CryptoEngine"));
|
|
23
|
+
class ProducerExecutorClass {
|
|
24
|
+
constructor() {
|
|
25
|
+
this.ready = (producer) => __awaiter(this, void 0, void 0, function* () {
|
|
26
|
+
(0, Affirm_1.default)(producer, 'Invalid producer');
|
|
27
|
+
const source = Environment_1.default.getSource(producer.source);
|
|
28
|
+
(0, Affirm_1.default)(source, `Invalid source ${producer.source} on producer ${producer.name}`);
|
|
29
|
+
const driver = yield DriverFactory_1.default.instantiateSource(source);
|
|
30
|
+
return yield driver.ready(producer);
|
|
31
|
+
});
|
|
32
|
+
this.processHeader = (line, producer) => {
|
|
33
|
+
const { settings: { fileType, hasHeaderRow, delimiter } } = producer;
|
|
34
|
+
switch (fileType) {
|
|
35
|
+
case 'PARQUET':
|
|
36
|
+
case 'XML':
|
|
37
|
+
case 'XLS':
|
|
38
|
+
case 'XLSX':
|
|
39
|
+
case 'CSV': {
|
|
40
|
+
const parts = CSVParser_1.default.parseRow(line, delimiter);
|
|
41
|
+
return parts;
|
|
42
|
+
}
|
|
43
|
+
case 'TXT': {
|
|
44
|
+
if (hasHeaderRow) {
|
|
45
|
+
const parts = CSVParser_1.default.parseRow(line, delimiter);
|
|
46
|
+
return parts;
|
|
47
|
+
}
|
|
48
|
+
else {
|
|
49
|
+
return producer.dimensions.map(x => { var _a; return (_a = x.alias) !== null && _a !== void 0 ? _a : x.name; });
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
case 'JSON':
|
|
53
|
+
case 'JSONL': {
|
|
54
|
+
const keys = Object.keys(JSON.parse(line));
|
|
55
|
+
return keys;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
this.reconcileHeader = (header, producer) => {
|
|
60
|
+
var _a;
|
|
61
|
+
const myHeader = [...header];
|
|
62
|
+
const producerDimensions = producer.dimensions;
|
|
63
|
+
if (producerDimensions.some(x => x.sourceFilename === true))
|
|
64
|
+
myHeader.push(...producerDimensions.filter(x => x.sourceFilename === true).map(x => x.name));
|
|
65
|
+
const dimensions = [];
|
|
66
|
+
for (const dimension of producerDimensions) {
|
|
67
|
+
const key = (_a = dimension.alias) !== null && _a !== void 0 ? _a : dimension.name;
|
|
68
|
+
const index = myHeader.findIndex(x => x === key);
|
|
69
|
+
if (index < 0)
|
|
70
|
+
throw new Error(`The dimension "${dimension.name}" (with key "${key}") of producer "${producer.name}" doesn't exist in the underlying dataset.`);
|
|
71
|
+
dimensions.push({
|
|
72
|
+
index,
|
|
73
|
+
name: dimension.name,
|
|
74
|
+
prodDimension: dimension
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
return dimensions;
|
|
78
|
+
};
|
|
79
|
+
this.processLine = (options) => {
|
|
80
|
+
var _a;
|
|
81
|
+
const { line, dimensions, index, producer, tracker } = options;
|
|
82
|
+
Affirm_1.default.hasValue(line, 'Invalid line');
|
|
83
|
+
Affirm_1.default.hasValue(index, 'Invalid index');
|
|
84
|
+
(0, Affirm_1.default)(producer, 'Invalid producer');
|
|
85
|
+
if (!line)
|
|
86
|
+
return null;
|
|
87
|
+
// 1. map the underlying aliases to the new names AND cast to the correct type
|
|
88
|
+
let counter = performance.now();
|
|
89
|
+
const record = LineParser_1.default.parse(line.trim(), producer, dimensions, tracker);
|
|
90
|
+
tracker.measure('process-line:line-parse', performance.now() - counter);
|
|
91
|
+
counter = performance.now();
|
|
92
|
+
for (const dimension of dimensions) {
|
|
93
|
+
// 2. apply source file name
|
|
94
|
+
// TODO: to replace with the actual full filename and not just the fileKey
|
|
95
|
+
if (dimension.prodDimension.sourceFilename === true)
|
|
96
|
+
record[dimension.name] = producer.settings.fileKey;
|
|
97
|
+
// 3. mask
|
|
98
|
+
const maskType = ProducerManager_1.default.getMask(dimension.prodDimension);
|
|
99
|
+
if (Algo_1.default.hasVal(maskType))
|
|
100
|
+
record[dimension.name] = CryptoEngine_1.default.hashValue(maskType, (_a = record[dimension.name]) === null || _a === void 0 ? void 0 : _a.toString(), dimension.prodDimension.type);
|
|
101
|
+
}
|
|
102
|
+
tracker.measure('process-line:dimensions-filename-mask', performance.now() - counter);
|
|
103
|
+
return record;
|
|
104
|
+
};
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
const ProducerExecutor = new ProducerExecutorClass();
|
|
108
|
+
exports.default = ProducerExecutor;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@forzalabs/remora",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "1.0.1",
|
|
4
4
|
"description": "A powerful CLI tool for seamless data translation.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"private": false,
|
|
@@ -11,7 +11,7 @@
|
|
|
11
11
|
"sync": "cd ../dev_ops && npm run sync",
|
|
12
12
|
"dev": "clear && npm run fast-build && clear && npx tsx scripts/dev.ts",
|
|
13
13
|
"dev:w": "clear && npm run fast-build && clear && npx tsx scripts/dev.ts",
|
|
14
|
-
"
|
|
14
|
+
"ts-check": "npx tsc --noemit",
|
|
15
15
|
"init": "npx tsx ./src/index.ts init",
|
|
16
16
|
"version": "npx tsx ./src/index.ts -v",
|
|
17
17
|
"run": "npx tsx ./src/index.ts run",
|
|
@@ -23,7 +23,7 @@
|
|
|
23
23
|
"copy-static-file": "npx tsx ./scripts/CopyStaticFile.js",
|
|
24
24
|
"build": "npm i && npm run sync && tsc --outDir .build && npm run copy-static-file",
|
|
25
25
|
"fast-build": "tsc --outDir .build",
|
|
26
|
-
"upload": "npm run build && cd .build && npm publish --access=public"
|
|
26
|
+
"upload": "npm run build && cd .build && npm login && npm publish --access=public"
|
|
27
27
|
},
|
|
28
28
|
"keywords": [
|
|
29
29
|
"nextjs",
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const workerpool_1 = __importDefault(require("workerpool"));
|
|
16
|
+
const dotenv_1 = __importDefault(require("dotenv"));
|
|
17
|
+
const Affirm_1 = __importDefault(require("../core/Affirm"));
|
|
18
|
+
const Environment_1 = __importDefault(require("../engines/Environment"));
|
|
19
|
+
const Executor_1 = __importDefault(require("../executors/Executor"));
|
|
20
|
+
dotenv_1.default.configDotenv();
|
|
21
|
+
const run = (workerData) => __awaiter(void 0, void 0, void 0, function* () {
|
|
22
|
+
Environment_1.default.load('./');
|
|
23
|
+
try {
|
|
24
|
+
const { workerId, chunk, consumer, producer, prodDimensions, options } = workerData;
|
|
25
|
+
(0, Affirm_1.default)(workerId, `Invalid worker id`);
|
|
26
|
+
(0, Affirm_1.default)(consumer, `Invalid consumer`);
|
|
27
|
+
(0, Affirm_1.default)(producer, `Invalid producer`);
|
|
28
|
+
(0, Affirm_1.default)(chunk, `Invalid chunk`);
|
|
29
|
+
const executor = new Executor_1.default();
|
|
30
|
+
const res = yield executor.run({
|
|
31
|
+
consumer,
|
|
32
|
+
producer,
|
|
33
|
+
prodDimensions,
|
|
34
|
+
workerId,
|
|
35
|
+
chunk,
|
|
36
|
+
options,
|
|
37
|
+
reportWork: packet => workerpool_1.default.workerEmit(packet)
|
|
38
|
+
});
|
|
39
|
+
return res;
|
|
40
|
+
}
|
|
41
|
+
catch (error) {
|
|
42
|
+
console.error(error);
|
|
43
|
+
return null;
|
|
44
|
+
}
|
|
45
|
+
});
|
|
46
|
+
workerpool_1.default.worker({
|
|
47
|
+
executor: run
|
|
48
|
+
});
|