@forzalabs/remora 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/engines/scheduler/CronScheduler.js +2 -2
  2. package/engines/scheduler/QueueManager.js +2 -2
  3. package/package.json +1 -1
  4. package/settings.js +12 -0
  5. package/documentation/default_resources/schema.json +0 -36
  6. package/drivers/LocalDriver.js +0 -542
  7. package/drivers/S3Driver.js +0 -563
  8. package/drivers/S3SourceDriver.js +0 -132
  9. package/engines/DataframeManager.js +0 -55
  10. package/engines/ParseManager.js +0 -75
  11. package/engines/ProducerEngine.js +0 -160
  12. package/engines/UsageDataManager.js +0 -110
  13. package/engines/UsageManager.js +0 -61
  14. package/engines/Validator.js +0 -157
  15. package/engines/consumer/ConsumerEngine.js +0 -128
  16. package/engines/consumer/PostProcessor.js +0 -253
  17. package/engines/dataset/ParallelDataset.js +0 -184
  18. package/engines/dataset/TransformWorker.js +0 -2
  19. package/engines/dataset/definitions.js +0 -2
  20. package/engines/dataset/example-parallel-transform.js +0 -2
  21. package/engines/dataset/test-parallel.js +0 -2
  22. package/engines/deployment/DeploymentPlanner.js +0 -39
  23. package/engines/execution/ExecutionEnvironment.js +0 -209
  24. package/engines/execution/ExecutionPlanner.js +0 -131
  25. package/engines/file/FileCompiler.js +0 -29
  26. package/engines/file/FileContentBuilder.js +0 -34
  27. package/engines/schema/SchemaEngine.js +0 -33
  28. package/engines/sql/SQLBuilder.js +0 -96
  29. package/engines/sql/SQLCompiler.js +0 -141
  30. package/engines/sql/SQLUtils.js +0 -22
  31. package/workers/FilterWorker.js +0 -62
  32. package/workers/ProjectionWorker.js +0 -63
  33. package/workers/TransformWorker.js +0 -63
  34. package/workers/TsWorker.js +0 -14
@@ -1,33 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- const Affirm_1 = __importDefault(require("../../core/Affirm"));
7
- const SchemaValidator_1 = __importDefault(require("./SchemaValidator"));
8
- const ConsumerEngine_1 = __importDefault(require("../consumer/ConsumerEngine"));
9
- class SchemaEngineClass {
10
- constructor() {
11
- /**
12
- * If the consumer has a ".schema" then checks that the output shape of the consumer complies with what the JSON schema says
13
- */
14
- this.enforceConsumerOutputSchema = (consumer) => {
15
- var _a;
16
- (0, Affirm_1.default)(consumer, `Invalid consumer`);
17
- if (!consumer.schema)
18
- return true;
19
- const validator = SchemaValidator_1.default.getSchema(consumer.schema);
20
- const schema = validator.schema;
21
- // TODO: right now I just check that all the required properites are there...
22
- // in the future this needs to be more fleshed (types, ...)
23
- const properties = ConsumerEngine_1.default.compile(consumer);
24
- const matches = (_a = schema.required) === null || _a === void 0 ? void 0 : _a.map(x => ({
25
- exists: properties.find(k => k.consumerAlias === x) ? true : false,
26
- property: x
27
- }));
28
- return matches.every(x => x.exists);
29
- };
30
- }
31
- }
32
- const SchemaEngine = new SchemaEngineClass();
33
- exports.default = SchemaEngine;
@@ -1,96 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- const Affirm_1 = __importDefault(require("../../core/Affirm"));
7
- class SQLBuilderClass {
8
- constructor() {
9
- this.getSQLOperator = (operator) => {
10
- switch (operator) {
11
- case 'equals': return '=';
12
- case 'notEquals': return '!=';
13
- case 'contains': return 'LIKE';
14
- case 'notContains': return 'NOT LIKE';
15
- case 'startsWith': return 'LIKE';
16
- case 'endsWith': return 'LIKE';
17
- case 'greaterThan': return '>';
18
- case 'greaterThanOrEquals': return '>=';
19
- case 'lessThan': return '<';
20
- case 'lessThanOrEquals': return '<=';
21
- case 'in': return 'IN';
22
- case 'notIn': return 'NOT IN';
23
- case 'between': return 'BETWEEN';
24
- case 'notBetween': return 'NOT BETWEEN';
25
- case 'isNull': return 'IS NULL';
26
- case 'isNotNull': return 'IS NOT NULL';
27
- case 'true': return '= TRUE';
28
- case 'false': return '= FALSE';
29
- case 'matches': return '~';
30
- case 'notMatches': return '!~';
31
- case 'sql': return ''; // Custom SQL should be handled separately
32
- default: throw new Error(`Unsupported operator: ${operator}`);
33
- }
34
- };
35
- this.buildFilterClause = (filter) => {
36
- var _a, _b;
37
- const operator = this.getSQLOperator(filter.operator);
38
- let baseClause;
39
- if (filter.operator === 'in' || filter.operator === 'notIn') {
40
- baseClause = `${filter.member} ${operator} (${filter.values.map(value => `'${value}'`).join(', ')})`;
41
- }
42
- else if (filter.operator === 'between' || filter.operator === 'notBetween') {
43
- baseClause = `${filter.member} ${operator} '${filter.values[0]}' AND '${filter.values[1]}'`;
44
- }
45
- else if (filter.operator === 'isNull' || filter.operator === 'isNotNull' || filter.operator === 'true' || filter.operator === 'false') {
46
- baseClause = `${filter.member} ${operator}`;
47
- }
48
- else if (filter.operator === 'contains' || filter.operator === 'notContains') {
49
- baseClause = `${filter.member} ${operator} '%${filter.values[0]}%'`;
50
- }
51
- else if (filter.operator === 'startsWith') {
52
- baseClause = `${filter.member} ${operator} '${filter.values[0]}%'`;
53
- }
54
- else if (filter.operator === 'endsWith') {
55
- baseClause = `${filter.member} ${operator} '%${filter.values[0]}'`;
56
- }
57
- else if (filter.operator === 'sql') {
58
- baseClause = `(${filter.values[0]})`;
59
- }
60
- else {
61
- baseClause = `${filter.member} ${operator} '${filter.values[0]}'`;
62
- }
63
- const orClauses = (_a = filter.or) === null || _a === void 0 ? void 0 : _a.map(this.buildFilterClause).join(' OR ');
64
- const andClauses = (_b = filter.and) === null || _b === void 0 ? void 0 : _b.map(this.buildFilterClause).join(' AND ');
65
- let combinedClause = baseClause;
66
- if (orClauses) {
67
- combinedClause = `(${combinedClause} OR ${orClauses})`;
68
- }
69
- if (andClauses) {
70
- combinedClause = `(${combinedClause} AND ${andClauses})`;
71
- }
72
- return combinedClause;
73
- };
74
- this.buildConsumerQuery = (request) => {
75
- (0, Affirm_1.default)(request, `Invalid build consumer request`);
76
- let query = ``;
77
- if (request.filters && request.filters.length > 0) {
78
- const filterClauses = request.filters.map(this.buildFilterClause);
79
- query += ` WHERE ${filterClauses.join(' AND ')}`;
80
- }
81
- if (request.order && request.order.length > 0) {
82
- const orderClauses = request.order.map(([field, direction]) => `${field} ${direction}`);
83
- query += ` ORDER BY ${orderClauses.join(', ')}`;
84
- }
85
- if (request.limit) {
86
- query += ` LIMIT ${request.limit}`;
87
- }
88
- if (request.offset) {
89
- query += ` OFFSET ${request.offset}`;
90
- }
91
- return query;
92
- };
93
- }
94
- }
95
- const SQLBuilder = new SQLBuilderClass();
96
- exports.default = SQLBuilder;
@@ -1,141 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- const Affirm_1 = __importDefault(require("../../core/Affirm"));
7
- const Algo_1 = __importDefault(require("../../core/Algo"));
8
- const ConsumerEngine_1 = __importDefault(require("../consumer/ConsumerEngine"));
9
- const CryptoEngine_1 = __importDefault(require("../CryptoEngine"));
10
- const Environment_1 = __importDefault(require("../Environment"));
11
- const ProducerManager_1 = __importDefault(require("../producer/ProducerManager"));
12
- const SQLUtils_1 = __importDefault(require("../sql/SQLUtils"));
13
- class SQLCompilerClass {
14
- constructor() {
15
- this.compileProducer = (producer, source) => {
16
- var _a, _b;
17
- (0, Affirm_1.default)(producer, `Invalid producer`);
18
- (0, Affirm_1.default)(source, `Invalid source`);
19
- if (Algo_1.default.hasVal(producer.settings.sql)) {
20
- const libraryItem = Environment_1.default.getSqlInLibrary(producer.settings.sql);
21
- (0, Affirm_1.default)(libraryItem, `Misconfiguration for producer "${producer.name}": SQL library item is missing. Required SQL item named "${producer.settings.sql}" was not found.`);
22
- return libraryItem.sql;
23
- }
24
- (0, Affirm_1.default)(producer.settings.sqlTable, `Misconfiguration for producer "${producer.name}": SQL table name is missing. Required since no override to the SQL was used.`);
25
- const dimensions = producer.dimensions.map(x => {
26
- var _a;
27
- const maskType = ProducerManager_1.default.getMask(x);
28
- const columnReference = (_a = x.alias) !== null && _a !== void 0 ? _a : x.name;
29
- const fieldReference = `"${producer.settings.sqlTable}"."${columnReference}"`;
30
- if (maskType && maskType === 'hash')
31
- return CryptoEngine_1.default.hashQuery(maskType, fieldReference, x.name);
32
- else
33
- return `${fieldReference} AS "${x.name}"`;
34
- });
35
- const measures = (_b = (_a = producer.measures) === null || _a === void 0 ? void 0 : _a.map(measure => {
36
- const thisProducer = `"${producer.settings.sqlTable}"`;
37
- const sanitized = measure.sql.replace('P', thisProducer).replace('PROD', thisProducer).replace('PRODUCER', thisProducer)
38
- .replace('${', '').replace('}', '');
39
- return `${sanitized} AS "${measure.name}"`;
40
- })) !== null && _b !== void 0 ? _b : [];
41
- const columns = dimensions.concat(measures);
42
- const sql = `SELECT ${columns.join(', ')} FROM "${source.authentication['schema']}"."${producer.settings.sqlTable}"`;
43
- if (measures.length > 0) {
44
- const groupBys = `GROUP BY ${dimensions.map((_, i) => i + 1).join(', ')}`;
45
- return `${sql} ${groupBys}`;
46
- }
47
- return sql;
48
- };
49
- this.deployProducer = (producer, source) => {
50
- (0, Affirm_1.default)(producer, `Invalid producer`);
51
- (0, Affirm_1.default)(source, `Invalid source`);
52
- const sql = this.compileProducer(producer, source);
53
- if (producer.settings.direct) {
54
- return sql;
55
- }
56
- else {
57
- const internalSchema = Environment_1.default.get('REMORA_SCHEMA');
58
- (0, Affirm_1.default)(internalSchema, `Missing "REMORA_SCHEMA" on project settings (needed due to "${producer.name}" wanting to create a view)`);
59
- return `CREATE OR REPLACE VIEW "${internalSchema}"."${producer.name}" AS ${sql}`;
60
- }
61
- };
62
- /**
63
- * Returns the SQL reference to this producer, used in FROM when constructing SQL statements of consumers (and others).
64
- * This might easily just return a reference to a view or the underlying SQL of a producer if the view is not available.
65
- */
66
- this.getProducerReference = (producer) => {
67
- (0, Affirm_1.default)(producer, 'Invalid producer');
68
- const source = Environment_1.default.getSource(producer.source);
69
- (0, Affirm_1.default)(source, `No source found for producer "${producer.name}"`);
70
- if (producer.settings.direct) {
71
- return this.compileProducer(producer, source);
72
- }
73
- else {
74
- const internalSchema = Environment_1.default.get('REMORA_SCHEMA');
75
- (0, Affirm_1.default)(internalSchema, `Missing "REMORA_SCHEMA" on project settings (needed due to "${producer.name}" wanting to create a view)`);
76
- return `SELECT * FROM "${internalSchema}"."${producer.name}"`;
77
- }
78
- };
79
- this.getConsumerReference = (consumer) => {
80
- (0, Affirm_1.default)(consumer, 'Invalid consumer');
81
- // if (consumer.outputs.some(x => x.format === 'SQL' && x.accelerated))
82
- // return `SELECT * FROM "av_remora_${SQLUtils.sanitizeName(consumer.name)}"`
83
- // if (consumer.outputs.some(x => x.format === 'SQL' && !x.direct))
84
- // return `SELECT * FROM "v_remora_${SQLUtils.sanitizeName(consumer.name)}"`
85
- return `SELECT * FROM (${this.compileConsumer(consumer)})`;
86
- };
87
- this.compileConsumer = (consumer) => {
88
- var _a;
89
- (0, Affirm_1.default)(consumer, `Invalid consumer`);
90
- (0, Affirm_1.default)(consumer.producers.length > 0, `Consumer has no producers to draw data from ("${consumer.name}")`);
91
- const subqueries = consumer.producers.map(cProd => {
92
- const producer = Environment_1.default.getProducer(cProd.name);
93
- if (!producer) {
94
- const consumer = Environment_1.default.getConsumer(cProd.name);
95
- (0, Affirm_1.default)(consumer, `No producer found for consumer "${consumer.name}" with name "${cProd.name}"`);
96
- return `"${cProd.name}" AS (${this.getConsumerReference(consumer)})`;
97
- }
98
- return `"${cProd.name}" AS (${this.getProducerReference(producer)})`;
99
- });
100
- const columns = ConsumerEngine_1.default.compile(consumer);
101
- const sqlColumns = columns.map(x => { var _a, _b, _c; return `"${x.owner}"."${(_b = (_a = x.dimension) === null || _a === void 0 ? void 0 : _a.name) !== null && _b !== void 0 ? _b : (_c = x.measure) === null || _c === void 0 ? void 0 : _c.name}" AS "${x.consumerAlias}"`; });
102
- const columnQualifiers = sqlColumns.map(x => Algo_1.default.replaceAll(x.split('.')[1], '"', ''));
103
- const dupes = Algo_1.default.duplicates(columnQualifiers);
104
- (0, Affirm_1.default)(dupes.length === 0, `Wrong consumer configuration: duplicate columns where found for consumer "${consumer.name}" ("${dupes.join(', ')}")`);
105
- const joins = consumer.producers.filter(x => x.joins && x.joins.length > 0).flatMap(cProd => {
106
- return cProd.joins.map(x => {
107
- const otherProd = consumer.producers.find(k => k.name === x.otherName);
108
- (0, Affirm_1.default)(otherProd, `Invalid JOIN relantionship: the producer "${cProd.name}" is asking for a join with "${x.otherName}", but this one doesn't exists.`);
109
- const starts = Algo_1.default.locations(x.sql, '${');
110
- const ends = Algo_1.default.locations(x.sql, '}');
111
- (0, Affirm_1.default)(starts.length === ends.length, `Invalid JOIN SQL: number of condition parameters does not match on consumer "${consumer.name}" producer "${cProd.name}" ("${x.sql}")`);
112
- const params = starts.map((start, index) => x.sql.substring(start, ends[index] + 1));
113
- const thisProducer = `"${cProd.name}"`;
114
- const sqlParams = params.map(param => param.replace('P', thisProducer).replace('PROD', thisProducer).replace('PRODUCER', thisProducer)
115
- .replace('${', '').replace('}', ''));
116
- let sqlCondition = x.sql;
117
- params.forEach((p, i) => sqlCondition = sqlCondition.replace(p, sqlParams[i]));
118
- return `LEFT JOIN "${otherProd.name}" ON ${sqlCondition}`;
119
- });
120
- });
121
- const filters = (_a = consumer.filters) === null || _a === void 0 ? void 0 : _a.map(x => {
122
- const starts = Algo_1.default.locations(x.sql, '${');
123
- const ends = Algo_1.default.locations(x.sql, '}');
124
- (0, Affirm_1.default)(starts.length === ends.length, `Invalid filter SQL: number of condition parameters does not match on consumer "${consumer.name}" ("${x.sql}")`);
125
- const params = starts.map((start, index) => x.sql.substring(start, ends[index] + 1));
126
- const sqlParams = params.map(param => SQLUtils_1.default.findDimension(columns, param));
127
- let sqlFilter = x.sql;
128
- params.forEach((p, i) => sqlFilter = sqlFilter.replace(p, sqlParams[i].consumerAlias));
129
- return sqlFilter;
130
- });
131
- let sql = `WITH ${subqueries.join(',\n')}\n\nSELECT ${sqlColumns.join(', ')} FROM "${consumer.producers[0].name}"`;
132
- if (joins && joins.length > 0)
133
- sql += ` ${joins.join(', ')}`;
134
- if (filters && filters.length > 0)
135
- sql += ` WHERE ${filters.join(', ')}`;
136
- return sql;
137
- };
138
- }
139
- }
140
- const SQLCompiler = new SQLCompilerClass();
141
- exports.default = SQLCompiler;
@@ -1,22 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- const Affirm_1 = __importDefault(require("../../core/Affirm"));
7
- const Algo_1 = __importDefault(require("../../core/Algo"));
8
- const SQLUtils = {
9
- viewName: (name) => 'v_remora_' + SQLUtils.sanitizeName(name),
10
- acceleratedViewName: (name) => 'av_remora_' + SQLUtils.sanitizeName(name),
11
- findDimension: (compiledDimensions, dimensionQualifier) => {
12
- const cleanedQualifier = Algo_1.default.replaceAll(dimensionQualifier.split('.')[1], '}', '');
13
- const hit = compiledDimensions.find(x => x.consumerAlias === cleanedQualifier);
14
- (0, Affirm_1.default)(hit, `Unable to find the column referenced by "${dimensionQualifier}" in the compiled list of columns (${compiledDimensions.map(x => `"${x.consumerAlias}"`).join(', ')})`);
15
- return hit;
16
- },
17
- sanitizeName: (string) => {
18
- (0, Affirm_1.default)(string, 'Invalid string to sanitize');
19
- return string.replace(/[^a-z0-9]/gi, '_');
20
- }
21
- };
22
- exports.default = SQLUtils;
@@ -1,62 +0,0 @@
1
- "use strict";
2
- var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
- return new (P || (P = Promise))(function (resolve, reject) {
5
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
- step((generator = generator.apply(thisArg, _arguments || [])).next());
9
- });
10
- };
11
- var __importDefault = (this && this.__importDefault) || function (mod) {
12
- return (mod && mod.__esModule) ? mod : { "default": mod };
13
- };
14
- Object.defineProperty(exports, "__esModule", { value: true });
15
- const workerpool_1 = __importDefault(require("workerpool"));
16
- const dotenv_1 = __importDefault(require("dotenv"));
17
- const Affirm_1 = __importDefault(require("../core/Affirm"));
18
- const Dataset_1 = __importDefault(require("../engines/dataset/Dataset"));
19
- const Environment_1 = __importDefault(require("../engines/Environment"));
20
- const RequestExecutor_1 = __importDefault(require("../engines/execution/RequestExecutor"));
21
- dotenv_1.default.configDotenv();
22
- const run = (workerData) => __awaiter(void 0, void 0, void 0, function* () {
23
- Environment_1.default.load('./');
24
- try {
25
- const { datasetName, fromLine, toLine, workerId, executionId, datasetFile, datasetDimensions, datasetDelimiter, filterData: filter } = workerData;
26
- Affirm_1.default.hasValue(fromLine, `Invalid from line`);
27
- Affirm_1.default.hasValue(toLine, `Invalid to line`);
28
- (0, Affirm_1.default)(datasetName, `Invalid dataset name`);
29
- (0, Affirm_1.default)(workerId, `Invalid worker id`);
30
- (0, Affirm_1.default)(datasetFile, `Invalid dataset file`);
31
- (0, Affirm_1.default)(datasetDimensions, `Invalid dataset dimensions`);
32
- (0, Affirm_1.default)(filter, `Invalid filter data`);
33
- (0, Affirm_1.default)(datasetDelimiter, `Invalid dataset delimter`);
34
- const dataset = new Dataset_1.default({ name: datasetName, file: datasetFile, baseProducer: null, executionId });
35
- dataset
36
- .setDimensions(datasetDimensions)
37
- .setDelimiter(datasetDelimiter);
38
- const outputPath = dataset['_tempPath'] + workerId;
39
- const rules = filter.rules.map(x => x.rule);
40
- yield RequestExecutor_1.default.applyFilters(dataset, rules, { outputPath, range: { fromLine, toLine } });
41
- const result = {
42
- success: true,
43
- datasetDelimiter: dataset.getDelimiter(),
44
- datasetDimensions: dataset.getDimensions(),
45
- datasetFile: dataset.getFile(),
46
- datasetName: dataset.name,
47
- datasetPath: outputPath
48
- };
49
- return result;
50
- }
51
- catch (error) {
52
- console.error(error);
53
- const result = {
54
- success: false,
55
- error
56
- };
57
- return result;
58
- }
59
- });
60
- workerpool_1.default.worker({
61
- filter: run
62
- });
@@ -1,63 +0,0 @@
1
- "use strict";
2
- var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
- return new (P || (P = Promise))(function (resolve, reject) {
5
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
- step((generator = generator.apply(thisArg, _arguments || [])).next());
9
- });
10
- };
11
- var __importDefault = (this && this.__importDefault) || function (mod) {
12
- return (mod && mod.__esModule) ? mod : { "default": mod };
13
- };
14
- Object.defineProperty(exports, "__esModule", { value: true });
15
- const workerpool_1 = __importDefault(require("workerpool"));
16
- const dotenv_1 = __importDefault(require("dotenv"));
17
- const Affirm_1 = __importDefault(require("../core/Affirm"));
18
- const Dataset_1 = __importDefault(require("../engines/dataset/Dataset"));
19
- const Environment_1 = __importDefault(require("../engines/Environment"));
20
- const PostProcessor_1 = __importDefault(require("../engines/consumer/PostProcessor"));
21
- dotenv_1.default.configDotenv();
22
- const run = (workerData) => __awaiter(void 0, void 0, void 0, function* () {
23
- Environment_1.default.load('./');
24
- try {
25
- const { datasetName, fromLine, toLine, workerId, datasetFile, executionId, datasetDimensions, datasetDelimiter, projectionData } = workerData;
26
- Affirm_1.default.hasValue(fromLine, `Invalid from line`);
27
- Affirm_1.default.hasValue(toLine, `Invalid to line`);
28
- (0, Affirm_1.default)(datasetName, `Invalid dataset name`);
29
- (0, Affirm_1.default)(workerId, `Invalid worker id`);
30
- (0, Affirm_1.default)(datasetFile, `Invalid dataset file`);
31
- (0, Affirm_1.default)(datasetDimensions, `Invalid dataset dimensions`);
32
- (0, Affirm_1.default)(projectionData, `Invalid projection data`);
33
- (0, Affirm_1.default)(datasetDelimiter, `Invalid dataset delimter`);
34
- const consumer = Environment_1.default.getConsumer(projectionData.consumerName);
35
- (0, Affirm_1.default)(consumer, `Wrong consumer name sent to projection worker: "${projectionData.consumerName}" not found.`);
36
- const dataset = new Dataset_1.default({ name: datasetName, file: datasetFile, baseProducer: null, executionId });
37
- dataset
38
- .setDimensions(datasetDimensions)
39
- .setDelimiter(datasetDelimiter);
40
- const outputPath = dataset['_tempPath'] + workerId;
41
- yield PostProcessor_1.default.doProjection(consumer, dataset, { outputPath, range: { fromLine, toLine } });
42
- const result = {
43
- success: true,
44
- datasetDelimiter: dataset.getDelimiter(),
45
- datasetDimensions: dataset.getDimensions(),
46
- datasetFile: dataset.getFile(),
47
- datasetName: dataset.name,
48
- datasetPath: outputPath
49
- };
50
- return result;
51
- }
52
- catch (error) {
53
- console.error(error);
54
- const result = {
55
- success: false,
56
- error
57
- };
58
- return result;
59
- }
60
- });
61
- workerpool_1.default.worker({
62
- projection: run
63
- });
@@ -1,63 +0,0 @@
1
- "use strict";
2
- var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
- return new (P || (P = Promise))(function (resolve, reject) {
5
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
- step((generator = generator.apply(thisArg, _arguments || [])).next());
9
- });
10
- };
11
- var __importDefault = (this && this.__importDefault) || function (mod) {
12
- return (mod && mod.__esModule) ? mod : { "default": mod };
13
- };
14
- Object.defineProperty(exports, "__esModule", { value: true });
15
- const workerpool_1 = __importDefault(require("workerpool"));
16
- const dotenv_1 = __importDefault(require("dotenv"));
17
- const Affirm_1 = __importDefault(require("../core/Affirm"));
18
- const Dataset_1 = __importDefault(require("../engines/dataset/Dataset"));
19
- const Environment_1 = __importDefault(require("../engines/Environment"));
20
- const TransformationEngine_1 = __importDefault(require("../engines/transform/TransformationEngine"));
21
- dotenv_1.default.configDotenv();
22
- const run = (workerData) => __awaiter(void 0, void 0, void 0, function* () {
23
- Environment_1.default.load('./');
24
- try {
25
- const { datasetName, fromLine, toLine, workerId, executionId, datasetFile, datasetDimensions, datasetDelimiter, transformData } = workerData;
26
- Affirm_1.default.hasValue(fromLine, `Invalid from line`);
27
- Affirm_1.default.hasValue(toLine, `Invalid to line`);
28
- (0, Affirm_1.default)(datasetName, `Invalid dataset name`);
29
- (0, Affirm_1.default)(workerId, `Invalid worker id`);
30
- (0, Affirm_1.default)(datasetFile, `Invalid dataset file`);
31
- (0, Affirm_1.default)(datasetDimensions, `Invalid dataset dimensions`);
32
- (0, Affirm_1.default)(transformData, `Invalid transform data`);
33
- (0, Affirm_1.default)(datasetDelimiter, `Invalid dataset delimter`);
34
- const consumer = Environment_1.default.getConsumer(transformData.consumerName);
35
- (0, Affirm_1.default)(consumer, `Wrong consumer name sent to projection worker: "${transformData.consumerName}" not found.`);
36
- const dataset = new Dataset_1.default({ name: datasetName, file: datasetFile, baseProducer: null, executionId });
37
- dataset
38
- .setDimensions(datasetDimensions)
39
- .setDelimiter(datasetDelimiter);
40
- const outputPath = dataset['_tempPath'] + workerId;
41
- yield TransformationEngine_1.default.apply(consumer, dataset, { outputPath, range: { fromLine, toLine } });
42
- const result = {
43
- success: true,
44
- datasetDelimiter: dataset.getDelimiter(),
45
- datasetDimensions: dataset.getDimensions(),
46
- datasetFile: dataset.getFile(),
47
- datasetName: dataset.name,
48
- datasetPath: outputPath
49
- };
50
- return result;
51
- }
52
- catch (error) {
53
- console.error(error);
54
- const result = {
55
- success: false,
56
- error
57
- };
58
- return result;
59
- }
60
- });
61
- workerpool_1.default.worker({
62
- transform: run
63
- });
@@ -1,14 +0,0 @@
1
- "use strict";
2
- var __importDefault = (this && this.__importDefault) || function (mod) {
3
- return (mod && mod.__esModule) ? mod : { "default": mod };
4
- };
5
- Object.defineProperty(exports, "__esModule", { value: true });
6
- const workerpool_1 = __importDefault(require("workerpool"));
7
- const DatasetRecord_1 = __importDefault(require("../engines/dataset/DatasetRecord"));
8
- workerpool_1.default.worker({
9
- ts: () => {
10
- const tt = new DatasetRecord_1.default('bububub,bububbu', [{ hidden: false, index: 0, key: '11', name: '11' }], ',');
11
- console.log(tt);
12
- console.log('hello form typescript', tt.stringify());
13
- }
14
- });