@forzalabs/remora 1.0.21 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/actions/automap.js +26 -42
- package/actions/compile.js +27 -43
- package/actions/create_consumer.js +24 -40
- package/actions/create_producer.js +16 -32
- package/actions/debug.js +18 -34
- package/actions/deploy.js +30 -46
- package/actions/discover.js +13 -29
- package/actions/init.js +29 -45
- package/actions/mock.js +16 -32
- package/actions/run.js +34 -52
- package/actions/sample.js +42 -58
- package/index.js +38 -43
- package/package.json +4 -4
- package/workers/ExecutorWorker.js +18 -32
- package/Constants.js +0 -34
- package/core/Affirm.js +0 -42
- package/core/Algo.js +0 -160
- package/core/dste/DSTE.js +0 -113
- package/core/logger/DebugLogService.js +0 -48
- package/core/logger/DevelopmentLogService.js +0 -70
- package/core/logger/LocalLogService.js +0 -70
- package/core/logger/Logger.js +0 -54
- package/database/DatabaseEngine.js +0 -149
- package/database/DatabaseStructure.js +0 -27
- package/definitions/DatasetDefinitions.js +0 -2
- package/definitions/ExecutorDefinitions.js +0 -2
- package/definitions/ProcessENV.js +0 -2
- package/definitions/agents/DestinationDriver.js +0 -2
- package/definitions/agents/SourceDriver.js +0 -2
- package/definitions/cli.js +0 -2
- package/definitions/database/ApiKeys.js +0 -2
- package/definitions/database/Stored.js +0 -7
- package/definitions/database/UsageStat.js +0 -2
- package/definitions/database/User.js +0 -2
- package/definitions/json_schemas/consumer-schema.json +0 -1226
- package/definitions/json_schemas/producer-schema.json +0 -308
- package/definitions/json_schemas/project-schema.json +0 -100
- package/definitions/json_schemas/source-schema.json +0 -249
- package/definitions/requests/ConsumerRequest.js +0 -2
- package/definitions/requests/Developer.js +0 -2
- package/definitions/requests/Mapping.js +0 -2
- package/definitions/requests/ProducerRequest.js +0 -2
- package/definitions/requests/Request.js +0 -2
- package/definitions/resources/Compiled.js +0 -2
- package/definitions/resources/Consumer.js +0 -2
- package/definitions/resources/Environment.js +0 -2
- package/definitions/resources/Library.js +0 -2
- package/definitions/resources/Producer.js +0 -2
- package/definitions/resources/Project.js +0 -2
- package/definitions/resources/Schema.js +0 -2
- package/definitions/resources/Source.js +0 -2
- package/definitions/temp.js +0 -2
- package/definitions/transform/Transformations.js +0 -2
- package/drivers/DeltaShareDriver.js +0 -186
- package/drivers/DriverFactory.js +0 -72
- package/drivers/DriverHelper.js +0 -248
- package/drivers/HttpApiDriver.js +0 -208
- package/drivers/RedshiftDriver.js +0 -184
- package/drivers/files/LocalDestinationDriver.js +0 -146
- package/drivers/files/LocalSourceDriver.js +0 -405
- package/drivers/s3/S3DestinationDriver.js +0 -197
- package/drivers/s3/S3SourceDriver.js +0 -495
- package/engines/CryptoEngine.js +0 -75
- package/engines/Environment.js +0 -170
- package/engines/ProcessENVManager.js +0 -83
- package/engines/RandomEngine.js +0 -47
- package/engines/SecretManager.js +0 -23
- package/engines/UserManager.js +0 -66
- package/engines/ai/AutoMapperEngine.js +0 -37
- package/engines/ai/DeveloperEngine.js +0 -497
- package/engines/ai/LLM.js +0 -255
- package/engines/consumer/ConsumerManager.js +0 -218
- package/engines/consumer/ConsumerOnFinishManager.js +0 -202
- package/engines/dataset/Dataset.js +0 -824
- package/engines/dataset/DatasetManager.js +0 -211
- package/engines/dataset/DatasetRecord.js +0 -120
- package/engines/dataset/DatasetRecordPool.js +0 -77
- package/engines/execution/RequestExecutor.js +0 -67
- package/engines/parsing/CSVParser.js +0 -60
- package/engines/parsing/LineParser.js +0 -71
- package/engines/parsing/ParseCompression.js +0 -101
- package/engines/parsing/ParseHelper.js +0 -18
- package/engines/parsing/ParseManager.js +0 -54
- package/engines/parsing/XLSParser.js +0 -87
- package/engines/parsing/XMLParser.js +0 -115
- package/engines/producer/ProducerEngine.js +0 -127
- package/engines/producer/ProducerManager.js +0 -43
- package/engines/scheduler/CronScheduler.js +0 -222
- package/engines/scheduler/QueueManager.js +0 -314
- package/engines/schema/SchemaValidator.js +0 -67
- package/engines/transform/JoinEngine.js +0 -232
- package/engines/transform/TransformationEngine.js +0 -277
- package/engines/transform/TypeCaster.js +0 -59
- package/engines/usage/DataframeManager.js +0 -55
- package/engines/usage/UsageDataManager.js +0 -151
- package/engines/usage/UsageManager.js +0 -65
- package/engines/validation/Validator.js +0 -216
- package/executors/ConsumerExecutor.js +0 -280
- package/executors/Executor.js +0 -177
- package/executors/ExecutorOrchestrator.js +0 -331
- package/executors/ExecutorPerformance.js +0 -17
- package/executors/ExecutorProgress.js +0 -54
- package/executors/ExecutorScope.js +0 -52
- package/executors/OutputExecutor.js +0 -118
- package/executors/ProducerExecutor.js +0 -108
- package/helper/Helper.js +0 -149
- package/helper/Logger.js +0 -84
- package/helper/Runtime.js +0 -20
- package/helper/Settings.js +0 -13
- package/licencing/LicenceManager.js +0 -64
- package/settings.js +0 -12
|
@@ -1,232 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
-
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
-
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
-
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
-
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
-
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
-
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
-
});
|
|
10
|
-
};
|
|
11
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
-
};
|
|
14
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
-
const Affirm_1 = __importDefault(require("../../core/Affirm"));
|
|
16
|
-
const Environment_1 = __importDefault(require("../Environment"));
|
|
17
|
-
const Dataset_1 = __importDefault(require("../dataset/Dataset"));
|
|
18
|
-
const DatasetRecord_1 = __importDefault(require("../dataset/DatasetRecord"));
|
|
19
|
-
const ConsumerManager_1 = __importDefault(require("../consumer/ConsumerManager"));
|
|
20
|
-
class JoinEngineClass {
|
|
21
|
-
constructor() {
|
|
22
|
-
this.validateFieldInProducer = (fieldName, producerName) => {
|
|
23
|
-
var _a, _b, _c, _d;
|
|
24
|
-
const producer = Environment_1.default.getProducer(producerName);
|
|
25
|
-
if (!producer) {
|
|
26
|
-
throw new Error(`Producer ${producerName} not found`);
|
|
27
|
-
}
|
|
28
|
-
// Check dimensions
|
|
29
|
-
const hasDimension = producer.dimensions.some(d => d.name === fieldName);
|
|
30
|
-
// Check measures
|
|
31
|
-
const hasMeasure = (_b = (_a = producer.measures) === null || _a === void 0 ? void 0 : _a.some(m => m.name === fieldName)) !== null && _b !== void 0 ? _b : false;
|
|
32
|
-
if (!hasDimension && !hasMeasure) {
|
|
33
|
-
throw new Error(`Field '${fieldName}' not found in producer '${producerName}'. Available fields: ${producer.dimensions.map(d => d.name).concat((_d = (_c = producer.measures) === null || _c === void 0 ? void 0 : _c.map(m => m.name)) !== null && _d !== void 0 ? _d : []).join(', ')}`);
|
|
34
|
-
}
|
|
35
|
-
};
|
|
36
|
-
this.validateFieldInConsumer = (fieldName, consumerShape) => {
|
|
37
|
-
const hasField = consumerShape.dimensions.find(x => x.name === fieldName);
|
|
38
|
-
if (!hasField)
|
|
39
|
-
throw new Error(`Field '${fieldName}' not found in consumer '${consumerShape.name}'. Your join condition must be on fields that are present in the consumer.`);
|
|
40
|
-
};
|
|
41
|
-
this.parseJoinCondition = (sql, producer) => {
|
|
42
|
-
// Extract field names from SQL condition like ${P.id} = ${orders.user_id}
|
|
43
|
-
const regex = /\${([^}]+)}/g;
|
|
44
|
-
const matches = Array.from(sql.matchAll(regex));
|
|
45
|
-
if (matches.length !== 2)
|
|
46
|
-
throw new Error(`Invalid join condition: ${sql}. Expected format: \${P.field} = \${producer.field}`);
|
|
47
|
-
const [left, right] = matches.map(m => m[1]);
|
|
48
|
-
const [leftProducer, leftField] = left.split('.');
|
|
49
|
-
const [rightProducer, rightField] = right.split('.');
|
|
50
|
-
if (!leftField || !rightField)
|
|
51
|
-
throw new Error(`Invalid join condition: ${sql}. Both sides must specify a field name after the dot.`);
|
|
52
|
-
// Replace P with actual producer name
|
|
53
|
-
const actualLeftProducer = leftProducer === 'P' ? producer.name : leftProducer;
|
|
54
|
-
const actualRightProducer = rightProducer === 'P' ? producer.name : rightProducer;
|
|
55
|
-
// Validate both fields exist in their respective producers
|
|
56
|
-
this.validateFieldInProducer(leftField, actualLeftProducer);
|
|
57
|
-
this.validateFieldInProducer(rightField, actualRightProducer);
|
|
58
|
-
return {
|
|
59
|
-
leftProducer: actualLeftProducer,
|
|
60
|
-
leftField: leftField,
|
|
61
|
-
rightProducer: actualRightProducer,
|
|
62
|
-
rightField: rightField
|
|
63
|
-
};
|
|
64
|
-
};
|
|
65
|
-
this.findProducerData = (producerName, producedData) => {
|
|
66
|
-
const data = producedData.find(pd => pd.producerKey === producerName);
|
|
67
|
-
if (!data)
|
|
68
|
-
throw new Error(`No data found for producer: ${producerName}`);
|
|
69
|
-
return data.dataset;
|
|
70
|
-
};
|
|
71
|
-
this.createLookupMap = (dataset, key) => __awaiter(this, void 0, void 0, function* () {
|
|
72
|
-
const map = new Map();
|
|
73
|
-
yield dataset.streamBatches((batch) => __awaiter(this, void 0, void 0, function* () {
|
|
74
|
-
var _a;
|
|
75
|
-
for (const record of batch) {
|
|
76
|
-
const keyValue = (_a = record.getValue(key)) === null || _a === void 0 ? void 0 : _a.toString();
|
|
77
|
-
if (keyValue === undefined)
|
|
78
|
-
continue;
|
|
79
|
-
const existing = map.get(keyValue);
|
|
80
|
-
if (existing) {
|
|
81
|
-
existing.push(record);
|
|
82
|
-
}
|
|
83
|
-
else {
|
|
84
|
-
map.set(keyValue, [record]);
|
|
85
|
-
}
|
|
86
|
-
}
|
|
87
|
-
}));
|
|
88
|
-
return map;
|
|
89
|
-
});
|
|
90
|
-
this.join = (consumer, producedData) => __awaiter(this, void 0, void 0, function* () {
|
|
91
|
-
(0, Affirm_1.default)(consumer, 'Invalid consumer');
|
|
92
|
-
(0, Affirm_1.default)(producedData, 'Invalid produced data');
|
|
93
|
-
if (consumer.producers.length <= 1)
|
|
94
|
-
return this.findProducerData(consumer.producers[0].name, producedData);
|
|
95
|
-
if (consumer.producers.some(x => x.union))
|
|
96
|
-
return yield this.union(consumer, producedData);
|
|
97
|
-
const consumerShape = ConsumerManager_1.default.getOutputShape(consumer);
|
|
98
|
-
const consumerColumns = ConsumerManager_1.default.compile(consumer);
|
|
99
|
-
// Create a new dataset for the joined result
|
|
100
|
-
const resultDataset = new Dataset_1.default({
|
|
101
|
-
name: `joined_${consumer.name}`,
|
|
102
|
-
file: {
|
|
103
|
-
fileKey: 'temp',
|
|
104
|
-
fileType: 'CSV'
|
|
105
|
-
},
|
|
106
|
-
baseProducer: Environment_1.default.getProducer(consumer.producers[0].name),
|
|
107
|
-
executionId: producedData[0].dataset.getExecutionId()
|
|
108
|
-
});
|
|
109
|
-
// Get dimensions for the result dataset based on consumer columns
|
|
110
|
-
const resultDimensions = consumerColumns.map((col, index) => {
|
|
111
|
-
var _a, _b;
|
|
112
|
-
return ({
|
|
113
|
-
name: col.consumerAlias || col.consumerKey,
|
|
114
|
-
key: col.consumerAlias || col.consumerKey,
|
|
115
|
-
index,
|
|
116
|
-
type: (_b = (_a = col.dimension) === null || _a === void 0 ? void 0 : _a.type) !== null && _b !== void 0 ? _b : 'string',
|
|
117
|
-
hidden: null
|
|
118
|
-
});
|
|
119
|
-
});
|
|
120
|
-
// Initialize the result dataset with proper dimensions
|
|
121
|
-
resultDataset.getDimensions().length = 0;
|
|
122
|
-
resultDataset.getDimensions().push(...resultDimensions);
|
|
123
|
-
// Process joins sequentially
|
|
124
|
-
for (let i = 0; i < consumer.producers.length; i++) {
|
|
125
|
-
const producer = consumer.producers[i];
|
|
126
|
-
if (!producer.joins)
|
|
127
|
-
continue;
|
|
128
|
-
for (const join of producer.joins) {
|
|
129
|
-
const otherProducer = consumer.producers.find(p => p.name === join.otherName);
|
|
130
|
-
if (!otherProducer) {
|
|
131
|
-
throw new Error(`Producer ${join.otherName} not found`);
|
|
132
|
-
}
|
|
133
|
-
const condition = this.parseJoinCondition(join.sql, producer);
|
|
134
|
-
this.validateFieldInConsumer(condition.leftField, consumerShape);
|
|
135
|
-
this.validateFieldInConsumer(condition.rightField, consumerShape);
|
|
136
|
-
const leftDataset = this.findProducerData(condition.leftProducer, producedData);
|
|
137
|
-
const rightDataset = this.findProducerData(condition.rightProducer, producedData);
|
|
138
|
-
// Create lookup map for the right dataset (smaller dataset)
|
|
139
|
-
const rightLookup = yield this.createLookupMap(rightDataset, condition.rightField);
|
|
140
|
-
// Perform streaming join
|
|
141
|
-
yield this.performStreamingJoin(leftDataset, rightLookup, condition, join.relationship, consumerColumns, resultDataset);
|
|
142
|
-
}
|
|
143
|
-
}
|
|
144
|
-
return resultDataset;
|
|
145
|
-
});
|
|
146
|
-
this.union = (consumer, producedData) => __awaiter(this, void 0, void 0, function* () {
|
|
147
|
-
const getDimensionsKey = (ds) => ds.getDimensions().map(x => x.name.trim()).join(';').trim();
|
|
148
|
-
const mainDataset = producedData[0].dataset;
|
|
149
|
-
const mainDimKey = getDimensionsKey(mainDataset);
|
|
150
|
-
const otherProducedData = producedData.slice(1);
|
|
151
|
-
for (const prodData of otherProducedData) {
|
|
152
|
-
const prodDimKey = getDimensionsKey(prodData.dataset);
|
|
153
|
-
if (mainDimKey !== prodDimKey)
|
|
154
|
-
throw new Error(`On consumer "${consumer.name}", can't union the dataset "${prodData.dataset.name}" (producer: ${prodData.producerKey}) because the dimensions are different from the main dataset "${mainDataset.name}" (producer: ${producedData[0].producerKey}). "${mainDimKey}" != "${prodDimKey}"`);
|
|
155
|
-
yield prodData.dataset.streamBatches((batch) => __awaiter(this, void 0, void 0, function* () {
|
|
156
|
-
yield mainDataset.append(batch);
|
|
157
|
-
}));
|
|
158
|
-
}
|
|
159
|
-
return mainDataset;
|
|
160
|
-
});
|
|
161
|
-
this.performStreamingJoin = (leftDataset, rightLookup, condition, relationship, consumerColumns, resultDataset) => __awaiter(this, void 0, void 0, function* () {
|
|
162
|
-
const joinedRecords = [];
|
|
163
|
-
const batchSize = leftDataset.getBatchSize();
|
|
164
|
-
yield leftDataset.streamBatches((leftBatch) => __awaiter(this, void 0, void 0, function* () {
|
|
165
|
-
var _a;
|
|
166
|
-
for (const leftRecord of leftBatch) {
|
|
167
|
-
const leftValue = (_a = leftRecord.getValue(condition.leftField)) === null || _a === void 0 ? void 0 : _a.toString();
|
|
168
|
-
if (leftValue === undefined)
|
|
169
|
-
continue;
|
|
170
|
-
const rightRecords = rightLookup.get(leftValue) || [];
|
|
171
|
-
if (rightRecords.length === 0) {
|
|
172
|
-
// Handle cases where there's no match
|
|
173
|
-
if (relationship !== 'one-to-many') {
|
|
174
|
-
// For one-to-one and many-to-one, keep rows even without matches
|
|
175
|
-
const mergedRecord = this.createMergedRecord(leftRecord, null, condition, consumerColumns, resultDataset);
|
|
176
|
-
if (mergedRecord) {
|
|
177
|
-
joinedRecords.push(mergedRecord);
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
continue;
|
|
181
|
-
}
|
|
182
|
-
// Create joined records for each match
|
|
183
|
-
for (const rightRecord of rightRecords) {
|
|
184
|
-
const mergedRecord = this.createMergedRecord(leftRecord, rightRecord, condition, consumerColumns, resultDataset);
|
|
185
|
-
if (mergedRecord) {
|
|
186
|
-
joinedRecords.push(mergedRecord);
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
// Write batch if it's getting large
|
|
190
|
-
if (joinedRecords.length >= batchSize) {
|
|
191
|
-
yield resultDataset.append(joinedRecords);
|
|
192
|
-
joinedRecords.length = 0;
|
|
193
|
-
}
|
|
194
|
-
}
|
|
195
|
-
}));
|
|
196
|
-
// Write remaining records
|
|
197
|
-
if (joinedRecords.length > 0) {
|
|
198
|
-
yield resultDataset.append(joinedRecords);
|
|
199
|
-
}
|
|
200
|
-
});
|
|
201
|
-
this.createMergedRecord = (leftRecord, rightRecord, condition, consumerColumns, resultDataset) => {
|
|
202
|
-
const mergedValues = {};
|
|
203
|
-
// Map each field from the appropriate source
|
|
204
|
-
for (const column of consumerColumns) {
|
|
205
|
-
const fieldName = column.consumerAlias || column.consumerKey;
|
|
206
|
-
if (column.owner === condition.leftProducer) {
|
|
207
|
-
// Get value from left dataset
|
|
208
|
-
const leftFieldName = column.nameInProducer || fieldName;
|
|
209
|
-
mergedValues[fieldName] = leftRecord.getValue(leftFieldName);
|
|
210
|
-
}
|
|
211
|
-
else if (column.owner === condition.rightProducer) {
|
|
212
|
-
// Get value from right dataset (if exists)
|
|
213
|
-
if (rightRecord) {
|
|
214
|
-
const rightFieldName = column.nameInProducer || fieldName;
|
|
215
|
-
mergedValues[fieldName] = rightRecord.getValue(rightFieldName);
|
|
216
|
-
}
|
|
217
|
-
else {
|
|
218
|
-
mergedValues[fieldName] = null;
|
|
219
|
-
}
|
|
220
|
-
}
|
|
221
|
-
}
|
|
222
|
-
// Create the merged record
|
|
223
|
-
const dimensions = resultDataset.getDimensions();
|
|
224
|
-
const delimiter = resultDataset.getDelimiter();
|
|
225
|
-
const values = dimensions.map(dim => mergedValues[dim.name] || '');
|
|
226
|
-
const recordString = values.join(delimiter);
|
|
227
|
-
return new DatasetRecord_1.default(recordString, dimensions, delimiter);
|
|
228
|
-
};
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
const JoinEngine = new JoinEngineClass();
|
|
232
|
-
exports.default = JoinEngine;
|
|
@@ -1,277 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const Algo_1 = __importDefault(require("../../core/Algo"));
|
|
7
|
-
const TypeCaster_1 = __importDefault(require("./TypeCaster"));
|
|
8
|
-
const CryptoEngine_1 = __importDefault(require("../CryptoEngine"));
|
|
9
|
-
const DeveloperEngine_1 = __importDefault(require("../ai/DeveloperEngine"));
|
|
10
|
-
class TransformationEngineClass {
|
|
11
|
-
constructor() {
|
|
12
|
-
this.applyTransformations = (value, transformations, field, record) => {
|
|
13
|
-
var _a;
|
|
14
|
-
if (Array.isArray(transformations)) {
|
|
15
|
-
// Process array transformations without creating intermediate arrays
|
|
16
|
-
let result = value;
|
|
17
|
-
for (const transform of transformations) {
|
|
18
|
-
result = this.applyTransformations(result, transform, field, record);
|
|
19
|
-
}
|
|
20
|
-
return result;
|
|
21
|
-
}
|
|
22
|
-
// Single transformation
|
|
23
|
-
if ('cast' in transformations) {
|
|
24
|
-
const { cast, format } = transformations;
|
|
25
|
-
const casted = TypeCaster_1.default.cast(value, cast, format);
|
|
26
|
-
if (cast === 'number' && isNaN(casted))
|
|
27
|
-
throw new Error(`Cannot cast non-numeric value in field '${field.key}'`);
|
|
28
|
-
if (cast === 'datetime' && casted instanceof Date && isNaN(casted.getTime()))
|
|
29
|
-
throw new Error(`Cannot cast value to date in field '${field.key}'`);
|
|
30
|
-
return casted;
|
|
31
|
-
}
|
|
32
|
-
if ('multiply' in transformations) {
|
|
33
|
-
const num = TypeCaster_1.default.cast(value, 'number');
|
|
34
|
-
if (isNaN(num))
|
|
35
|
-
throw new Error(`Cannot multiply non-numeric value in field '${field.key}'`);
|
|
36
|
-
return num * transformations.multiply;
|
|
37
|
-
}
|
|
38
|
-
if ('multiplyBy' in transformations) {
|
|
39
|
-
if (!record) {
|
|
40
|
-
throw new Error(`Cannot apply combine_fields transformation without record context in field '${field.key}'`);
|
|
41
|
-
}
|
|
42
|
-
const { fields } = transformations.multiplyBy;
|
|
43
|
-
const fieldValues = fields.map(fieldName => {
|
|
44
|
-
const fieldValue = record[fieldName];
|
|
45
|
-
return fieldValue !== null && fieldValue !== undefined ? TypeCaster_1.default.cast(fieldValue, 'number') : 1;
|
|
46
|
-
});
|
|
47
|
-
const product = fieldValues.reduce((accumulator, value) => accumulator * value, 1);
|
|
48
|
-
return product;
|
|
49
|
-
}
|
|
50
|
-
if ('add' in transformations) {
|
|
51
|
-
const num = TypeCaster_1.default.cast(value, 'number');
|
|
52
|
-
if (isNaN(num))
|
|
53
|
-
throw new Error(`Cannot add to non-numeric value in field '${field.key}'`);
|
|
54
|
-
return num + transformations.add;
|
|
55
|
-
}
|
|
56
|
-
if ('addBy' in transformations) {
|
|
57
|
-
if (!record) {
|
|
58
|
-
throw new Error(`Cannot apply combine_fields transformation without record context in field '${field.key}'`);
|
|
59
|
-
}
|
|
60
|
-
const { fields } = transformations.addBy;
|
|
61
|
-
const fieldValues = fields.map(fieldName => {
|
|
62
|
-
const fieldValue = record[fieldName];
|
|
63
|
-
return fieldValue !== null && fieldValue !== undefined ? TypeCaster_1.default.cast(fieldValue, 'number') : 1;
|
|
64
|
-
});
|
|
65
|
-
const sum = fieldValues.reduce((accumulator, value) => accumulator + value);
|
|
66
|
-
return sum;
|
|
67
|
-
}
|
|
68
|
-
if ('extract' in transformations) {
|
|
69
|
-
const date = TypeCaster_1.default.cast(value, 'date');
|
|
70
|
-
if (isNaN(date.getTime()))
|
|
71
|
-
throw new Error(`Invalid date for extraction in field '${field.key}'`);
|
|
72
|
-
switch (transformations.extract) {
|
|
73
|
-
case 'year': return date.getFullYear();
|
|
74
|
-
case 'month': return date.getMonth() + 1; // 1-based month
|
|
75
|
-
case 'day': return date.getDate();
|
|
76
|
-
case 'hour': return date.getHours();
|
|
77
|
-
case 'minute': return date.getMinutes();
|
|
78
|
-
}
|
|
79
|
-
}
|
|
80
|
-
if ('concat' in transformations) {
|
|
81
|
-
if (!Array.isArray(value))
|
|
82
|
-
throw new Error(`Cannot concat non-array value in field '${field.key}'`);
|
|
83
|
-
return value.join(transformations.concat.separator);
|
|
84
|
-
}
|
|
85
|
-
if ('split' in transformations) {
|
|
86
|
-
if (typeof value !== 'string')
|
|
87
|
-
throw new Error(`Cannot split non-string value in field '${field.key}'`);
|
|
88
|
-
const parts = value.split(transformations.split.separator);
|
|
89
|
-
if (transformations.split.index >= parts.length) {
|
|
90
|
-
throw new Error(`Split index ${transformations.split.index} out of bounds in field '${field.key}'`);
|
|
91
|
-
}
|
|
92
|
-
return parts[transformations.split.index];
|
|
93
|
-
}
|
|
94
|
-
if ('regex_match' in transformations) {
|
|
95
|
-
if (typeof value !== 'string')
|
|
96
|
-
throw new Error(`Cannot apply regex_match to non-string value in field '${field.key}'`);
|
|
97
|
-
try {
|
|
98
|
-
const regex = new RegExp(transformations.regex_match.pattern, transformations.regex_match.flags);
|
|
99
|
-
return regex.test(value);
|
|
100
|
-
}
|
|
101
|
-
catch (error) {
|
|
102
|
-
throw new Error(`Invalid regex pattern in field '${field.key}': ${error.message}`);
|
|
103
|
-
}
|
|
104
|
-
}
|
|
105
|
-
if ('regex_replace' in transformations) {
|
|
106
|
-
if (typeof value !== 'string')
|
|
107
|
-
throw new Error(`Cannot apply regex_replace to non-string value in field '${field.key}'`);
|
|
108
|
-
try {
|
|
109
|
-
const regex = new RegExp(transformations.regex_replace.pattern, transformations.regex_replace.flags);
|
|
110
|
-
return value.replace(regex, transformations.regex_replace.replacement);
|
|
111
|
-
}
|
|
112
|
-
catch (error) {
|
|
113
|
-
throw new Error(`Invalid regex pattern in field '${field.key}': ${error.message}`);
|
|
114
|
-
}
|
|
115
|
-
}
|
|
116
|
-
if ('regex_extract' in transformations) {
|
|
117
|
-
if (typeof value !== 'string')
|
|
118
|
-
throw new Error(`Cannot apply regex_extract to non-string value in field '${field.key}'`);
|
|
119
|
-
try {
|
|
120
|
-
const regex = new RegExp(transformations.regex_extract.pattern, transformations.regex_extract.flags);
|
|
121
|
-
const matches = value.match(regex);
|
|
122
|
-
if (!matches)
|
|
123
|
-
return null;
|
|
124
|
-
const groupIndex = transformations.regex_extract.group;
|
|
125
|
-
return (_a = matches[groupIndex]) !== null && _a !== void 0 ? _a : null;
|
|
126
|
-
}
|
|
127
|
-
catch (error) {
|
|
128
|
-
throw new Error(`Invalid regex pattern in field '${field.key}': ${error.message}`);
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
if ('trim' in transformations) {
|
|
132
|
-
if (typeof value !== 'string')
|
|
133
|
-
throw new Error(`Cannot trim non-string value in field '${field.key}'`);
|
|
134
|
-
return value.trim();
|
|
135
|
-
}
|
|
136
|
-
if ('to_lowercase' in transformations) {
|
|
137
|
-
if (typeof value !== 'string')
|
|
138
|
-
throw new Error(`Cannot convert non-string value to lowercase in field '${field.key}'`);
|
|
139
|
-
return value.toLowerCase();
|
|
140
|
-
}
|
|
141
|
-
if ('to_uppercase' in transformations) {
|
|
142
|
-
if (typeof value !== 'string')
|
|
143
|
-
throw new Error(`Cannot convert non-string value to uppercase in field '${field.key}'`);
|
|
144
|
-
return value.toUpperCase();
|
|
145
|
-
}
|
|
146
|
-
if ('capitalize' in transformations) {
|
|
147
|
-
if (typeof value !== 'string')
|
|
148
|
-
throw new Error(`Cannot capitalize non-string value in field '${field.key}'`);
|
|
149
|
-
return value.charAt(0).toUpperCase() + value.slice(1);
|
|
150
|
-
}
|
|
151
|
-
if ('substring' in transformations) {
|
|
152
|
-
if (!Algo_1.default.hasVal(value))
|
|
153
|
-
return '';
|
|
154
|
-
if (typeof value !== 'string')
|
|
155
|
-
throw new Error(`Cannot take substring of non-string value in field '${field.key}'`);
|
|
156
|
-
const { start, end } = transformations.substring;
|
|
157
|
-
return end !== undefined ? value.substring(start, end) : value.substring(start);
|
|
158
|
-
}
|
|
159
|
-
if ('pad_start' in transformations) {
|
|
160
|
-
if (typeof value !== 'string')
|
|
161
|
-
throw new Error(`Cannot pad non-string value in field '${field.key}'`);
|
|
162
|
-
const { length, char } = transformations.pad_start;
|
|
163
|
-
if (char.length !== 1)
|
|
164
|
-
throw new Error(`Pad character must be exactly one character in field '${field.key}'`);
|
|
165
|
-
return value.padStart(length, char);
|
|
166
|
-
}
|
|
167
|
-
if ('pad_end' in transformations) {
|
|
168
|
-
if (typeof value !== 'string')
|
|
169
|
-
throw new Error(`Cannot pad non-string value in field '${field.key}'`);
|
|
170
|
-
const { length, char } = transformations.pad_end;
|
|
171
|
-
if (char.length !== 1)
|
|
172
|
-
throw new Error(`Pad character must be exactly one character in field '${field.key}'`);
|
|
173
|
-
return value.padEnd(length, char);
|
|
174
|
-
}
|
|
175
|
-
if ('prepend' in transformations)
|
|
176
|
-
return transformations.prepend + TypeCaster_1.default.cast(value, 'string');
|
|
177
|
-
if ('append' in transformations)
|
|
178
|
-
return TypeCaster_1.default.cast(value, 'string') + transformations.append;
|
|
179
|
-
if ('combine_fields' in transformations) {
|
|
180
|
-
if (!record) {
|
|
181
|
-
throw new Error(`Cannot apply combine_fields transformation without record context in field '${field.key}'`);
|
|
182
|
-
}
|
|
183
|
-
const { fields, separator = '', template } = transformations.combine_fields;
|
|
184
|
-
// Get values from the specified fields
|
|
185
|
-
const fieldValues = fields.map(fieldName => {
|
|
186
|
-
const fieldValue = record[fieldName];
|
|
187
|
-
return fieldValue !== null && fieldValue !== undefined ? String(fieldValue) : '';
|
|
188
|
-
});
|
|
189
|
-
// If template is provided, use it for formatting
|
|
190
|
-
if (template) {
|
|
191
|
-
let result = template;
|
|
192
|
-
for (let i = 0; i < fields.length; i++) {
|
|
193
|
-
const placeholder = `{${fields[i]}}`;
|
|
194
|
-
result = result.replace(new RegExp(placeholder, 'g'), fieldValues[i]);
|
|
195
|
-
}
|
|
196
|
-
return result;
|
|
197
|
-
}
|
|
198
|
-
else {
|
|
199
|
-
// Otherwise, join with separator
|
|
200
|
-
return fieldValues.join(separator);
|
|
201
|
-
}
|
|
202
|
-
}
|
|
203
|
-
if ('mask' in transformations) {
|
|
204
|
-
return this.applyMasking(value, transformations.mask, field);
|
|
205
|
-
}
|
|
206
|
-
if ('conditional' in transformations) {
|
|
207
|
-
for (const clause of transformations.conditional.clauses) {
|
|
208
|
-
if (this.evaluateCondition(value, clause.if)) {
|
|
209
|
-
return clause.then;
|
|
210
|
-
}
|
|
211
|
-
}
|
|
212
|
-
return transformations.conditional.else !== undefined ? transformations.conditional.else : value;
|
|
213
|
-
}
|
|
214
|
-
return value;
|
|
215
|
-
};
|
|
216
|
-
this.evaluateCondition = (value, condition) => {
|
|
217
|
-
if ('greater_than' in condition) {
|
|
218
|
-
return TypeCaster_1.default.cast(value, 'number') > condition.greater_than;
|
|
219
|
-
}
|
|
220
|
-
if ('greater_than_or_equal' in condition) {
|
|
221
|
-
return TypeCaster_1.default.cast(value, 'number') >= condition.greater_than_or_equal;
|
|
222
|
-
}
|
|
223
|
-
if ('less_than' in condition) {
|
|
224
|
-
return TypeCaster_1.default.cast(value, 'number') < condition.less_than;
|
|
225
|
-
}
|
|
226
|
-
if ('less_than_or_equal' in condition) {
|
|
227
|
-
return TypeCaster_1.default.cast(value, 'number') <= condition.less_than_or_equal;
|
|
228
|
-
}
|
|
229
|
-
if ('equals' in condition) {
|
|
230
|
-
return value === condition.equals;
|
|
231
|
-
}
|
|
232
|
-
if ('not_equals' in condition) {
|
|
233
|
-
return value !== condition.not_equals;
|
|
234
|
-
}
|
|
235
|
-
if ('in' in condition) {
|
|
236
|
-
return condition.in.includes(value);
|
|
237
|
-
}
|
|
238
|
-
if ('not_in' in condition) {
|
|
239
|
-
return !condition.not_in.includes(value);
|
|
240
|
-
}
|
|
241
|
-
if ('starts_with' in condition) {
|
|
242
|
-
return TypeCaster_1.default.cast(value, 'string').startsWith(condition.starts_with);
|
|
243
|
-
}
|
|
244
|
-
if ('ends_with' in condition) {
|
|
245
|
-
return TypeCaster_1.default.cast(value, 'string').endsWith(condition.ends_with);
|
|
246
|
-
}
|
|
247
|
-
if ('contains' in condition) {
|
|
248
|
-
return TypeCaster_1.default.cast(value, 'string').includes(condition.contains);
|
|
249
|
-
}
|
|
250
|
-
if ('not_contains' in condition) {
|
|
251
|
-
return !TypeCaster_1.default.cast(value, 'string').includes(condition.not_contains);
|
|
252
|
-
}
|
|
253
|
-
if ('is_empty' in condition) {
|
|
254
|
-
return value === null || value === undefined || TypeCaster_1.default.cast(value, 'string').trim() === '';
|
|
255
|
-
}
|
|
256
|
-
if ('is_not_empty' in condition) {
|
|
257
|
-
return value !== null && value !== undefined && TypeCaster_1.default.cast(value, 'string').trim() !== '';
|
|
258
|
-
}
|
|
259
|
-
return false;
|
|
260
|
-
};
|
|
261
|
-
this.applyMasking = (value, maskType, field) => {
|
|
262
|
-
if (!Algo_1.default.hasVal(value))
|
|
263
|
-
return value;
|
|
264
|
-
if (maskType === 'none')
|
|
265
|
-
return value;
|
|
266
|
-
const valueType = DeveloperEngine_1.default.inferDimensionType(value);
|
|
267
|
-
try {
|
|
268
|
-
return CryptoEngine_1.default.hashValue(maskType, String(value), valueType);
|
|
269
|
-
}
|
|
270
|
-
catch (error) {
|
|
271
|
-
throw new Error(`Failed to apply masking transformation '${maskType}' to field '${field.key}': ${error.message}`);
|
|
272
|
-
}
|
|
273
|
-
};
|
|
274
|
-
}
|
|
275
|
-
}
|
|
276
|
-
const TransformationEngine = new TransformationEngineClass();
|
|
277
|
-
exports.default = TransformationEngine;
|
|
@@ -1,59 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const Algo_1 = __importDefault(require("../../core/Algo"));
|
|
7
|
-
const dayjs_1 = __importDefault(require("dayjs"));
|
|
8
|
-
const customParseFormat_1 = __importDefault(require("dayjs/plugin/customParseFormat"));
|
|
9
|
-
const utc_1 = __importDefault(require("dayjs/plugin/utc"));
|
|
10
|
-
dayjs_1.default.extend(customParseFormat_1.default);
|
|
11
|
-
dayjs_1.default.extend(utc_1.default);
|
|
12
|
-
class TypeCasterClass {
|
|
13
|
-
/**
|
|
14
|
-
* Casts the value to the requested type (only if needed)
|
|
15
|
-
* Optional format parameter currently supports:
|
|
16
|
-
* - Parsing dates (type 'date'/'datetime') from string with tokens: yyyy, mm, dd
|
|
17
|
-
* - Formatting dates when casting to string with same tokens
|
|
18
|
-
*/
|
|
19
|
-
cast(value, type, format) {
|
|
20
|
-
if (!Algo_1.default.hasVal(value))
|
|
21
|
-
return value;
|
|
22
|
-
switch (type) {
|
|
23
|
-
case 'boolean': {
|
|
24
|
-
if (typeof value === 'boolean')
|
|
25
|
-
return value;
|
|
26
|
-
return Boolean(value);
|
|
27
|
-
}
|
|
28
|
-
case 'datetime':
|
|
29
|
-
case 'date': {
|
|
30
|
-
let dateValue = null;
|
|
31
|
-
try {
|
|
32
|
-
if (format && typeof value === 'string')
|
|
33
|
-
dateValue = dayjs_1.default.utc(value, format, true).toDate();
|
|
34
|
-
else
|
|
35
|
-
dateValue = new Date(value);
|
|
36
|
-
return dateValue.toISOString();
|
|
37
|
-
}
|
|
38
|
-
catch (error) {
|
|
39
|
-
dateValue = new Date(value);
|
|
40
|
-
if (!isNaN(dateValue))
|
|
41
|
-
return dateValue.toISOString();
|
|
42
|
-
throw new Error(`Error casting "${value}" to date with format "${format}": ${error}`);
|
|
43
|
-
}
|
|
44
|
-
}
|
|
45
|
-
case 'number': {
|
|
46
|
-
if (typeof value === 'number')
|
|
47
|
-
return value;
|
|
48
|
-
return Number(value);
|
|
49
|
-
}
|
|
50
|
-
case 'string': {
|
|
51
|
-
if (typeof value === 'string')
|
|
52
|
-
return value;
|
|
53
|
-
return String(value);
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
}
|
|
57
|
-
}
|
|
58
|
-
const TypeCaster = new TypeCasterClass();
|
|
59
|
-
exports.default = TypeCaster;
|
|
@@ -1,55 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
-
};
|
|
5
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
-
const Algo_1 = __importDefault(require("../../core/Algo"));
|
|
7
|
-
const Helper_1 = __importDefault(require("../../helper/Helper"));
|
|
8
|
-
class DataframeManagerClass {
|
|
9
|
-
fill(points, from, to, onlyLastValue, maintainLastValue) {
|
|
10
|
-
const min = from !== null && from !== void 0 ? from : this.getMinDate(points);
|
|
11
|
-
const max = to !== null && to !== void 0 ? to : this.getMaxDate(points);
|
|
12
|
-
const orderPoints = points.length > 0 ? Algo_1.default.orderBy(points, 'x') : [];
|
|
13
|
-
const filledPoints = [];
|
|
14
|
-
const currentDate = new Date(min);
|
|
15
|
-
while (currentDate <= max) {
|
|
16
|
-
const monthKey = Helper_1.default.formatDateToYYYYMM(currentDate);
|
|
17
|
-
filledPoints.push({ x: monthKey, y: 0 });
|
|
18
|
-
currentDate.setMonth(currentDate.getMonth() + 1);
|
|
19
|
-
}
|
|
20
|
-
for (let i = 0; i < orderPoints.length; i++) {
|
|
21
|
-
const point = orderPoints[i];
|
|
22
|
-
const date = new Date(point.x);
|
|
23
|
-
const filledPoint = filledPoints.find(x => x.x === Helper_1.default.formatDateToYYYYMM(date));
|
|
24
|
-
if (filledPoint) {
|
|
25
|
-
if (!onlyLastValue)
|
|
26
|
-
filledPoint.y += point.y;
|
|
27
|
-
else
|
|
28
|
-
filledPoint.y = point.y;
|
|
29
|
-
if (maintainLastValue) {
|
|
30
|
-
const index = filledPoints.findIndex(x => x.x === Helper_1.default.formatDateToYYYYMM(date));
|
|
31
|
-
for (let k = index; k < filledPoints.length; k++) {
|
|
32
|
-
const nextFilledPoint = filledPoints[k];
|
|
33
|
-
nextFilledPoint.y = filledPoint.y;
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
}
|
|
37
|
-
}
|
|
38
|
-
return filledPoints;
|
|
39
|
-
}
|
|
40
|
-
getMinDate(points) {
|
|
41
|
-
if (!points || points.length === 0) {
|
|
42
|
-
const currentDate = new Date();
|
|
43
|
-
return new Date(currentDate.getFullYear() - 1, currentDate.getMonth(), currentDate.getDate());
|
|
44
|
-
}
|
|
45
|
-
return points.reduce((min, point) => (new Date(point.x) < min ? new Date(point === null || point === void 0 ? void 0 : point.x) : min), new Date(points[0].x));
|
|
46
|
-
}
|
|
47
|
-
getMaxDate(points) {
|
|
48
|
-
if (!points || points.length === 0) {
|
|
49
|
-
return new Date();
|
|
50
|
-
}
|
|
51
|
-
return points.reduce((max, point) => (new Date(point.x) > max ? new Date(point.x) : max), new Date(points[0].x));
|
|
52
|
-
}
|
|
53
|
-
}
|
|
54
|
-
const DataframeManager = new DataframeManagerClass();
|
|
55
|
-
exports.default = DataframeManager;
|