@forzalabs/remora 0.1.6-nasco.3 → 0.1.7-nasco.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/Constants.js +1 -1
- package/database/DatabaseEngine.js +17 -14
- package/definitions/json_schemas/consumer-schema.json +68 -3
- package/drivers/DeltaShareDriver.js +9 -5
- package/drivers/LocalDriver.js +59 -0
- package/drivers/S3Driver.js +51 -0
- package/engines/Environment.js +4 -0
- package/engines/consumer/ConsumerOnFinishManager.js +188 -0
- package/engines/dataset/ParallelDataset.js +3 -0
- package/engines/execution/ExecutionEnvironment.js +11 -0
- package/engines/execution/ExecutionPlanner.js +3 -0
- package/engines/producer/ProducerEngine.js +1 -0
- package/engines/scheduler/CronScheduler.js +215 -0
- package/engines/scheduler/QueueManager.js +307 -0
- package/engines/usage/UsageDataManager.js +41 -0
- package/package.json +3 -1
package/Constants.js
CHANGED
|
@@ -20,23 +20,28 @@ class DatabaseEngineClass {
|
|
|
20
20
|
this.MAX_TRY_CONNECTION = 3;
|
|
21
21
|
this.db = () => this._db;
|
|
22
22
|
this.connect = () => __awaiter(this, void 0, void 0, function* () {
|
|
23
|
+
var _a;
|
|
24
|
+
// WARNING: this was changed during the deployment to ECS... I've reverted, but maybe it needs to be changed or looked into...
|
|
25
|
+
this._uri = ((_a = process.env.MONGO_URI) !== null && _a !== void 0 ? _a : Helper_1.default.isDev())
|
|
26
|
+
? 'mongodb://mongo:27017/remora'
|
|
27
|
+
: 'mongodb://localhost:27017/remora';
|
|
28
|
+
this._client = new mongodb_1.MongoClient(this._uri);
|
|
23
29
|
const errors = [];
|
|
24
30
|
for (let i = 0; i < this.MAX_TRY_CONNECTION; i++) {
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
catch (error) {
|
|
33
|
-
errors.push(i + '° connection to MongoDB throws this error:', error);
|
|
34
|
-
}
|
|
31
|
+
try {
|
|
32
|
+
console.log(`Attempting to connect to mongo: "${this._uri}"`);
|
|
33
|
+
yield this._client.connect();
|
|
34
|
+
this._db = this._client.db(Settings_1.default.db.name);
|
|
35
|
+
this._connected = true;
|
|
36
|
+
console.log('Connected to MongoDB');
|
|
37
|
+
break;
|
|
35
38
|
}
|
|
36
|
-
|
|
37
|
-
|
|
39
|
+
catch (error) {
|
|
40
|
+
errors.push((i + 1) + ': connection to MongoDB throws this error:', error);
|
|
38
41
|
}
|
|
39
42
|
}
|
|
43
|
+
if (!this._connected)
|
|
44
|
+
console.error(`Despite ${this.MAX_TRY_CONNECTION} retries it was not possible to connect to mongoDb, these are the errors encountered:\n` + errors.join('\n'));
|
|
40
45
|
});
|
|
41
46
|
this.disconnect = () => __awaiter(this, void 0, void 0, function* () {
|
|
42
47
|
try {
|
|
@@ -121,8 +126,6 @@ class DatabaseEngineClass {
|
|
|
121
126
|
throw error;
|
|
122
127
|
}
|
|
123
128
|
});
|
|
124
|
-
const uri = Helper_1.default.isDev() ? 'mongodb://mongo:27017/remora' : 'mongodb://localhost:27017/remora';
|
|
125
|
-
this._client = new mongodb_1.MongoClient(uri);
|
|
126
129
|
}
|
|
127
130
|
}
|
|
128
131
|
const DatabaseEngine = new DatabaseEngineClass();
|
|
@@ -235,13 +235,47 @@
|
|
|
235
235
|
"type": "string",
|
|
236
236
|
"enum": [
|
|
237
237
|
"CRON",
|
|
238
|
-
"API"
|
|
238
|
+
"API",
|
|
239
|
+
"QUEUE"
|
|
239
240
|
],
|
|
240
|
-
"description": "The type of trigger schedule"
|
|
241
|
+
"description": "The type of trigger schedule. CRON: time-based scheduling. API: HTTP endpoint trigger. QUEUE: SQS queue message trigger (supports shared queues with message type filtering)."
|
|
241
242
|
},
|
|
242
243
|
"value": {
|
|
243
244
|
"type": "string",
|
|
244
|
-
"description": "The value for the trigger (e.g.,
|
|
245
|
+
"description": "The value for the trigger. For CRON: cron expression (e.g., '0 0 * * *'). For API: endpoint path. For QUEUE: SQS queue URL or queue name (will construct full URL using metadata.region and metadata.accountId if needed)."
|
|
246
|
+
},
|
|
247
|
+
"metadata": {
|
|
248
|
+
"type": "object",
|
|
249
|
+
"description": "Additional metadata for the trigger (e.g., AWS credentials, message type filter)",
|
|
250
|
+
"properties": {
|
|
251
|
+
"messageType": {
|
|
252
|
+
"type": "string",
|
|
253
|
+
"description": "Optional message type filter for QUEUE triggers. Only messages with matching 'type', 'messageType', or 'eventType' fields will be processed by this consumer. Messages without a matching type will be left in the queue for other consumers, enabling shared queue usage."
|
|
254
|
+
},
|
|
255
|
+
"region": {
|
|
256
|
+
"type": "string",
|
|
257
|
+
"description": "AWS region for the queue (for QUEUE triggers)"
|
|
258
|
+
},
|
|
259
|
+
"accountId": {
|
|
260
|
+
"type": "string",
|
|
261
|
+
"description": "AWS account ID for constructing queue URL (for QUEUE triggers)"
|
|
262
|
+
},
|
|
263
|
+
"accessKeyId": {
|
|
264
|
+
"type": "string",
|
|
265
|
+
"description": "AWS access key ID for queue authentication (for QUEUE triggers)"
|
|
266
|
+
},
|
|
267
|
+
"secretAccessKey": {
|
|
268
|
+
"type": "string",
|
|
269
|
+
"description": "AWS secret access key for queue authentication (for QUEUE triggers)"
|
|
270
|
+
},
|
|
271
|
+
"sessionToken": {
|
|
272
|
+
"type": "string",
|
|
273
|
+
"description": "AWS session token for temporary credentials (for QUEUE triggers)"
|
|
274
|
+
}
|
|
275
|
+
},
|
|
276
|
+
"additionalProperties": {
|
|
277
|
+
"type": "string"
|
|
278
|
+
}
|
|
245
279
|
}
|
|
246
280
|
},
|
|
247
281
|
"required": [
|
|
@@ -249,6 +283,20 @@
|
|
|
249
283
|
"value"
|
|
250
284
|
],
|
|
251
285
|
"additionalProperties": false
|
|
286
|
+
},
|
|
287
|
+
"onSuccess": {
|
|
288
|
+
"type": "array",
|
|
289
|
+
"description": "Actions to perform when the output operation completes successfully",
|
|
290
|
+
"items": {
|
|
291
|
+
"$ref": "#/definitions/consumerOutputOnFinish"
|
|
292
|
+
}
|
|
293
|
+
},
|
|
294
|
+
"onError": {
|
|
295
|
+
"type": "array",
|
|
296
|
+
"description": "Actions to perform when the output operation fails",
|
|
297
|
+
"items": {
|
|
298
|
+
"$ref": "#/definitions/consumerOutputOnFinish"
|
|
299
|
+
}
|
|
252
300
|
}
|
|
253
301
|
},
|
|
254
302
|
"required": [
|
|
@@ -792,6 +840,23 @@
|
|
|
792
840
|
"additionalProperties": false
|
|
793
841
|
}
|
|
794
842
|
]
|
|
843
|
+
},
|
|
844
|
+
"consumerOutputOnFinish": {
|
|
845
|
+
"type": "object",
|
|
846
|
+
"description": "Actions to perform when output operations complete",
|
|
847
|
+
"properties": {
|
|
848
|
+
"action": {
|
|
849
|
+
"type": "string",
|
|
850
|
+
"enum": ["move-file"],
|
|
851
|
+
"description": "The action to perform"
|
|
852
|
+
},
|
|
853
|
+
"moveToDestination": {
|
|
854
|
+
"type": "string",
|
|
855
|
+
"description": "If the action is 'move-file', this specifies the source destination in remora where the source file should be moved"
|
|
856
|
+
}
|
|
857
|
+
},
|
|
858
|
+
"required": ["action"],
|
|
859
|
+
"additionalProperties": false
|
|
795
860
|
}
|
|
796
861
|
},
|
|
797
862
|
"examples": [
|
|
@@ -13,6 +13,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
13
13
|
};
|
|
14
14
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
15
|
const Affirm_1 = __importDefault(require("../core/Affirm"));
|
|
16
|
+
const SecretManager_1 = __importDefault(require("../engines/SecretManager"));
|
|
16
17
|
const DriverHelper_1 = __importDefault(require("./DriverHelper"));
|
|
17
18
|
/**
|
|
18
19
|
* Delta Share (Databricks Delta Sharing) Source Driver
|
|
@@ -31,7 +32,7 @@ class DeltaShareSourceDriver {
|
|
|
31
32
|
const { authentication } = source;
|
|
32
33
|
(0, Affirm_1.default)(authentication, 'Invalid authentication for delta-share source');
|
|
33
34
|
this._shareUrl = authentication.host;
|
|
34
|
-
this._bearerToken = authentication.bearerToken || authentication.sessionToken || authentication.password;
|
|
35
|
+
this._bearerToken = SecretManager_1.default.replaceSecret(authentication.bearerToken || authentication.sessionToken || authentication.password);
|
|
35
36
|
this._share = authentication.share;
|
|
36
37
|
this._schema = authentication.schema;
|
|
37
38
|
this._table = authentication.table;
|
|
@@ -58,7 +59,8 @@ class DeltaShareSourceDriver {
|
|
|
58
59
|
(0, Affirm_1.default)(request, `Invalid download request`);
|
|
59
60
|
(0, Affirm_1.default)(!request.fileKey.includes('%'), `On a delta-share the file key can not include "%"`);
|
|
60
61
|
const deltaFiles = yield this._getAllFilesInTables(this._table);
|
|
61
|
-
const
|
|
62
|
+
const hyparquet = yield import('hyparquet');
|
|
63
|
+
const { asyncBufferFromUrl, parquetReadObjects } = hyparquet;
|
|
62
64
|
const lines = [];
|
|
63
65
|
for (const deltaFile of deltaFiles) {
|
|
64
66
|
const byteLength = (_b = (_a = deltaFile.file.deltaSingleAction.add) === null || _a === void 0 ? void 0 : _a.size) !== null && _b !== void 0 ? _b : (_c = deltaFile.file.deltaSingleAction.remove) === null || _c === void 0 ? void 0 : _c.size;
|
|
@@ -75,7 +77,8 @@ class DeltaShareSourceDriver {
|
|
|
75
77
|
(0, Affirm_1.default)(request.options.lineFrom !== undefined && request.options.lineTo !== undefined, 'Missing read range');
|
|
76
78
|
const deltaFiles = yield this._getAllFilesInTables(this._table);
|
|
77
79
|
const { options: { lineFrom, lineTo } } = request;
|
|
78
|
-
const
|
|
80
|
+
const hyparquet = yield import('hyparquet');
|
|
81
|
+
const { asyncBufferFromUrl, parquetReadObjects } = hyparquet;
|
|
79
82
|
const lines = [];
|
|
80
83
|
let index = 0;
|
|
81
84
|
for (const deltaFile of deltaFiles) {
|
|
@@ -96,7 +99,8 @@ class DeltaShareSourceDriver {
|
|
|
96
99
|
var _a, _b, _c;
|
|
97
100
|
(0, Affirm_1.default)(dataset, 'Invalid dataset');
|
|
98
101
|
const deltaFiles = yield this._getAllFilesInTables(this._table);
|
|
99
|
-
const
|
|
102
|
+
const hyparquet = yield import('hyparquet');
|
|
103
|
+
const { asyncBufferFromUrl, parquetReadObjects } = hyparquet;
|
|
100
104
|
// For each file, download it with the hyparquet package, read lines, then save locally to create the dataset
|
|
101
105
|
let index = 0;
|
|
102
106
|
let totalLineCount = 0;
|
|
@@ -142,7 +146,7 @@ class DeltaShareSourceDriver {
|
|
|
142
146
|
Authorization: `Bearer ${this._bearerToken}`
|
|
143
147
|
}
|
|
144
148
|
});
|
|
145
|
-
(0, Affirm_1.default)(res.ok, `Error fetching version from the delta share: ${res.status} ${res.statusText}`);
|
|
149
|
+
(0, Affirm_1.default)(res.ok, `Error fetching version from the delta share: ${res.status} ${res.statusText} (${yield res.text()})`);
|
|
146
150
|
const version = res.headers['delta-table-version'];
|
|
147
151
|
return version;
|
|
148
152
|
});
|
package/drivers/LocalDriver.js
CHANGED
|
@@ -320,6 +320,36 @@ class LocalSourceDriver {
|
|
|
320
320
|
throw new Error(`Failed to list files in directory "${this._path}": ${error.message}`);
|
|
321
321
|
}
|
|
322
322
|
};
|
|
323
|
+
this.readFile = (fileKey) => {
|
|
324
|
+
(0, Affirm_1.default)(this._path, 'Path not initialized');
|
|
325
|
+
(0, Affirm_1.default)(fileKey, 'Invalid file key');
|
|
326
|
+
const filePath = path_1.default.join(this._path, fileKey);
|
|
327
|
+
(0, Affirm_1.default)(fs.existsSync(filePath), `Source file does not exist: ${filePath}`);
|
|
328
|
+
return fs.readFileSync(filePath);
|
|
329
|
+
};
|
|
330
|
+
this.deleteFile = (fileKey) => {
|
|
331
|
+
(0, Affirm_1.default)(this._path, 'Path not initialized');
|
|
332
|
+
(0, Affirm_1.default)(fileKey, 'Invalid file key');
|
|
333
|
+
const filePath = path_1.default.join(this._path, fileKey);
|
|
334
|
+
if (fs.existsSync(filePath)) {
|
|
335
|
+
fs.unlinkSync(filePath);
|
|
336
|
+
}
|
|
337
|
+
};
|
|
338
|
+
this.moveFile = (sourceFileKey, destinationPath, destinationFileKey) => {
|
|
339
|
+
(0, Affirm_1.default)(this._path, 'Path not initialized');
|
|
340
|
+
(0, Affirm_1.default)(sourceFileKey, 'Invalid source file key');
|
|
341
|
+
(0, Affirm_1.default)(destinationPath, 'Invalid destination path');
|
|
342
|
+
(0, Affirm_1.default)(destinationFileKey, 'Invalid destination file key');
|
|
343
|
+
const sourceFilePath = path_1.default.join(this._path, sourceFileKey);
|
|
344
|
+
const destinationFilePath = path_1.default.join(destinationPath, destinationFileKey);
|
|
345
|
+
(0, Affirm_1.default)(fs.existsSync(sourceFilePath), `Source file does not exist: ${sourceFilePath}`);
|
|
346
|
+
// Ensure destination directory exists
|
|
347
|
+
const destinationDir = path_1.default.dirname(destinationFilePath);
|
|
348
|
+
if (!fs.existsSync(destinationDir)) {
|
|
349
|
+
fs.mkdirSync(destinationDir, { recursive: true });
|
|
350
|
+
}
|
|
351
|
+
fs.renameSync(sourceFilePath, destinationFilePath);
|
|
352
|
+
};
|
|
323
353
|
}
|
|
324
354
|
}
|
|
325
355
|
exports.LocalSourceDriver = LocalSourceDriver;
|
|
@@ -385,6 +415,35 @@ class LocalDestinationDriver {
|
|
|
385
415
|
throw new Error(`Failed to complete local multipart upload for "${options.name}": ${error.message}`);
|
|
386
416
|
}
|
|
387
417
|
});
|
|
418
|
+
this.saveFile = (fileKey, content) => {
|
|
419
|
+
(0, Affirm_1.default)(this._path, 'Path not initialized');
|
|
420
|
+
(0, Affirm_1.default)(fileKey, 'Invalid file key');
|
|
421
|
+
(0, Affirm_1.default)(content, 'Invalid content');
|
|
422
|
+
const filePath = path_1.default.join(this._path, fileKey);
|
|
423
|
+
const directory = path_1.default.dirname(filePath);
|
|
424
|
+
// Create directory if it doesn't exist
|
|
425
|
+
if (!fs.existsSync(directory)) {
|
|
426
|
+
fs.mkdirSync(directory, { recursive: true });
|
|
427
|
+
}
|
|
428
|
+
fs.writeFileSync(filePath, content);
|
|
429
|
+
return Promise.resolve();
|
|
430
|
+
};
|
|
431
|
+
this.copyFromLocal = (sourceFilePath, destinationFileKey) => {
|
|
432
|
+
(0, Affirm_1.default)(this._path, 'Path not initialized');
|
|
433
|
+
(0, Affirm_1.default)(sourceFilePath, 'Invalid source file path');
|
|
434
|
+
(0, Affirm_1.default)(destinationFileKey, 'Invalid destination file key');
|
|
435
|
+
const destinationFilePath = path_1.default.join(this._path, destinationFileKey);
|
|
436
|
+
const destinationDir = path_1.default.dirname(destinationFilePath);
|
|
437
|
+
// Ensure destination directory exists
|
|
438
|
+
if (!fs.existsSync(destinationDir)) {
|
|
439
|
+
fs.mkdirSync(destinationDir, { recursive: true });
|
|
440
|
+
}
|
|
441
|
+
fs.copyFileSync(sourceFilePath, destinationFilePath);
|
|
442
|
+
};
|
|
443
|
+
this.copyFromS3 = (s3Driver, sourceFileKey, destinationFileKey) => __awaiter(this, void 0, void 0, function* () {
|
|
444
|
+
const fileContent = yield s3Driver.downloadFile(sourceFileKey);
|
|
445
|
+
yield this.saveFile(destinationFileKey, fileContent);
|
|
446
|
+
});
|
|
388
447
|
}
|
|
389
448
|
}
|
|
390
449
|
exports.LocalDestinationDriver = LocalDestinationDriver;
|
package/drivers/S3Driver.js
CHANGED
|
@@ -134,6 +134,27 @@ class S3DestinationDriver {
|
|
|
134
134
|
throw error;
|
|
135
135
|
}
|
|
136
136
|
});
|
|
137
|
+
this.copyFromS3 = (sourceBucket, sourceFileKey, destinationFileKey) => __awaiter(this, void 0, void 0, function* () {
|
|
138
|
+
(0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
|
|
139
|
+
(0, Affirm_1.default)(sourceBucket, 'Invalid source bucket');
|
|
140
|
+
(0, Affirm_1.default)(sourceFileKey, 'Invalid source file key');
|
|
141
|
+
(0, Affirm_1.default)(destinationFileKey, 'Invalid destination file key');
|
|
142
|
+
yield this._client.send(new client_s3_1.CopyObjectCommand({
|
|
143
|
+
CopySource: `${sourceBucket}/${sourceFileKey}`,
|
|
144
|
+
Bucket: this._bucketName,
|
|
145
|
+
Key: destinationFileKey
|
|
146
|
+
}));
|
|
147
|
+
});
|
|
148
|
+
this.saveFile = (fileKey, content) => __awaiter(this, void 0, void 0, function* () {
|
|
149
|
+
(0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
|
|
150
|
+
(0, Affirm_1.default)(fileKey, 'Invalid file key');
|
|
151
|
+
(0, Affirm_1.default)(content, 'Invalid content');
|
|
152
|
+
yield this._client.send(new client_s3_1.PutObjectCommand({
|
|
153
|
+
Bucket: this._bucketName,
|
|
154
|
+
Key: fileKey,
|
|
155
|
+
Body: content
|
|
156
|
+
}));
|
|
157
|
+
});
|
|
137
158
|
}
|
|
138
159
|
}
|
|
139
160
|
exports.S3DestinationDriver = S3DestinationDriver;
|
|
@@ -491,6 +512,36 @@ class S3SourceDriver {
|
|
|
491
512
|
} while (continuationToken);
|
|
492
513
|
return allFiles;
|
|
493
514
|
});
|
|
515
|
+
this.downloadFile = (fileKey) => __awaiter(this, void 0, void 0, function* () {
|
|
516
|
+
(0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
|
|
517
|
+
(0, Affirm_1.default)(fileKey, 'Invalid file key');
|
|
518
|
+
const response = yield this._client.send(new client_s3_1.GetObjectCommand({
|
|
519
|
+
Bucket: this._bucketName,
|
|
520
|
+
Key: fileKey
|
|
521
|
+
}));
|
|
522
|
+
(0, Affirm_1.default)(response.Body, 'Failed to fetch object from S3');
|
|
523
|
+
const content = yield response.Body.transformToByteArray();
|
|
524
|
+
return Buffer.from(content);
|
|
525
|
+
});
|
|
526
|
+
this.deleteFile = (fileKey) => __awaiter(this, void 0, void 0, function* () {
|
|
527
|
+
(0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
|
|
528
|
+
(0, Affirm_1.default)(fileKey, 'Invalid file key');
|
|
529
|
+
yield this._client.send(new client_s3_1.DeleteObjectCommand({
|
|
530
|
+
Bucket: this._bucketName,
|
|
531
|
+
Key: fileKey
|
|
532
|
+
}));
|
|
533
|
+
});
|
|
534
|
+
this.copyFile = (sourceFileKey, destinationBucket, destinationFileKey) => __awaiter(this, void 0, void 0, function* () {
|
|
535
|
+
(0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
|
|
536
|
+
(0, Affirm_1.default)(sourceFileKey, 'Invalid source file key');
|
|
537
|
+
(0, Affirm_1.default)(destinationBucket, 'Invalid destination bucket');
|
|
538
|
+
(0, Affirm_1.default)(destinationFileKey, 'Invalid destination file key');
|
|
539
|
+
yield this._client.send(new client_s3_1.CopyObjectCommand({
|
|
540
|
+
CopySource: `${this._bucketName}/${sourceFileKey}`,
|
|
541
|
+
Bucket: destinationBucket,
|
|
542
|
+
Key: destinationFileKey
|
|
543
|
+
}));
|
|
544
|
+
});
|
|
494
545
|
}
|
|
495
546
|
}
|
|
496
547
|
exports.S3SourceDriver = S3SourceDriver;
|
package/engines/Environment.js
CHANGED
|
@@ -128,6 +128,10 @@ class EnvironmentClass {
|
|
|
128
128
|
(0, Affirm_1.default)(consumerName, `Invalid consumer name`);
|
|
129
129
|
return this._env.consumers.find(x => x.name.toLowerCase() === consumerName.toLowerCase());
|
|
130
130
|
};
|
|
131
|
+
this.getAllConsumers = () => {
|
|
132
|
+
(0, Affirm_1.default)(this._env, 'Environment not initialized');
|
|
133
|
+
return this._env.consumers || [];
|
|
134
|
+
};
|
|
131
135
|
this.getSchema = (schemaName) => {
|
|
132
136
|
(0, Affirm_1.default)(schemaName, 'Invalid schema name');
|
|
133
137
|
return this._env.schemas.find(x => x.title === schemaName);
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const Affirm_1 = __importDefault(require("../../core/Affirm"));
|
|
16
|
+
const Environment_1 = __importDefault(require("../Environment"));
|
|
17
|
+
const DriverFactory_1 = __importDefault(require("../../drivers/DriverFactory"));
|
|
18
|
+
class ConsumerOnFinishManagerClass {
|
|
19
|
+
constructor() {
|
|
20
|
+
this.performOnSuccessActions = (consumer, output) => __awaiter(this, void 0, void 0, function* () {
|
|
21
|
+
(0, Affirm_1.default)(consumer, 'Invalid consumer');
|
|
22
|
+
(0, Affirm_1.default)(output, 'Invalid output');
|
|
23
|
+
if (!output.onSuccess || output.onSuccess.length === 0)
|
|
24
|
+
return;
|
|
25
|
+
for (const onSuccess of output.onSuccess) {
|
|
26
|
+
switch (onSuccess.action) {
|
|
27
|
+
case 'move-file': {
|
|
28
|
+
yield this.moveSourceFiles(consumer, onSuccess.moveToDestination);
|
|
29
|
+
break;
|
|
30
|
+
}
|
|
31
|
+
default:
|
|
32
|
+
throw new Error(`On success action "${onSuccess.action}" is not implemented yet.`);
|
|
33
|
+
}
|
|
34
|
+
}
|
|
35
|
+
});
|
|
36
|
+
this.moveSourceFiles = (consumer, moveDestination) => __awaiter(this, void 0, void 0, function* () {
|
|
37
|
+
(0, Affirm_1.default)(moveDestination, 'Move destination is required for move-file action');
|
|
38
|
+
const destinationSource = Environment_1.default.getSource(moveDestination);
|
|
39
|
+
(0, Affirm_1.default)(destinationSource, `Destination source "${moveDestination}" not found`);
|
|
40
|
+
// Get all unique source files from all producers used by this consumer
|
|
41
|
+
const sourceFilesToMove = [];
|
|
42
|
+
for (const consumerProducer of consumer.producers) {
|
|
43
|
+
const producer = Environment_1.default.getProducer(consumerProducer.name);
|
|
44
|
+
(0, Affirm_1.default)(producer, `Producer "${consumerProducer.name}" not found`);
|
|
45
|
+
const source = Environment_1.default.getSource(producer.source);
|
|
46
|
+
(0, Affirm_1.default)(source, `Source "${producer.source}" not found`);
|
|
47
|
+
// Only handle file-based sources that have fileKey
|
|
48
|
+
if (producer.settings.fileKey) {
|
|
49
|
+
// Handle wildcard patterns (files with %)
|
|
50
|
+
if (producer.settings.fileKey.includes('%')) {
|
|
51
|
+
// Get all files matching the pattern
|
|
52
|
+
const matchingFiles = yield this.getMatchingFiles(source, producer.settings.fileKey);
|
|
53
|
+
for (const fileKey of matchingFiles) {
|
|
54
|
+
sourceFilesToMove.push({
|
|
55
|
+
sourceName: producer.source,
|
|
56
|
+
filePath: this.getSourceFilePath(source, fileKey),
|
|
57
|
+
fileKey
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
else {
|
|
62
|
+
// Single file
|
|
63
|
+
sourceFilesToMove.push({
|
|
64
|
+
sourceName: producer.source,
|
|
65
|
+
filePath: this.getSourceFilePath(source, producer.settings.fileKey),
|
|
66
|
+
fileKey: producer.settings.fileKey
|
|
67
|
+
});
|
|
68
|
+
}
|
|
69
|
+
}
|
|
70
|
+
}
|
|
71
|
+
// Move all files to destination
|
|
72
|
+
yield this.moveFiles(sourceFilesToMove, destinationSource);
|
|
73
|
+
});
|
|
74
|
+
this.getMatchingFiles = (source, fileKeyPattern) => __awaiter(this, void 0, void 0, function* () {
|
|
75
|
+
const sourceDriver = yield DriverFactory_1.default.instantiateSource(source);
|
|
76
|
+
if (source.engine === 'aws-s3') {
|
|
77
|
+
// S3 driver has a public listFiles method that handles patterns
|
|
78
|
+
const s3Driver = sourceDriver;
|
|
79
|
+
return yield s3Driver.listFiles(fileKeyPattern);
|
|
80
|
+
}
|
|
81
|
+
else if (source.engine === 'local') {
|
|
82
|
+
// Local driver now has a public listFiles method consistent with S3
|
|
83
|
+
const localDriver = sourceDriver;
|
|
84
|
+
return localDriver.listFiles(fileKeyPattern);
|
|
85
|
+
}
|
|
86
|
+
throw new Error(`Unsupported source engine for file listing: ${source.engine}`);
|
|
87
|
+
});
|
|
88
|
+
this.getSourceFilePath = (source, fileKey) => {
|
|
89
|
+
if (source.engine === 'local') {
|
|
90
|
+
return `${source.authentication['path']}/${fileKey}`;
|
|
91
|
+
}
|
|
92
|
+
else if (source.engine === 'aws-s3') {
|
|
93
|
+
// For S3, we return the key as the path since S3 uses keys instead of file paths
|
|
94
|
+
return fileKey;
|
|
95
|
+
}
|
|
96
|
+
throw new Error(`Unsupported source engine for file move: ${source.engine}`);
|
|
97
|
+
};
|
|
98
|
+
this.moveFiles = (files, destinationSource) => __awaiter(this, void 0, void 0, function* () {
|
|
99
|
+
for (const file of files) {
|
|
100
|
+
const sourceSource = Environment_1.default.getSource(file.sourceName);
|
|
101
|
+
if (sourceSource.engine === 'local' && destinationSource.engine === 'local') {
|
|
102
|
+
// Local to Local move
|
|
103
|
+
yield this.moveLocalToLocal(file.filePath, destinationSource, file.fileKey);
|
|
104
|
+
}
|
|
105
|
+
else if (sourceSource.engine === 'local' && destinationSource.engine === 'aws-s3') {
|
|
106
|
+
// Local to S3 move
|
|
107
|
+
yield this.moveLocalToS3(file.filePath, destinationSource, file.fileKey);
|
|
108
|
+
}
|
|
109
|
+
else if (sourceSource.engine === 'aws-s3' && destinationSource.engine === 'local') {
|
|
110
|
+
// S3 to Local move
|
|
111
|
+
yield this.moveS3ToLocal(sourceSource, file.fileKey, destinationSource);
|
|
112
|
+
}
|
|
113
|
+
else if (sourceSource.engine === 'aws-s3' && destinationSource.engine === 'aws-s3') {
|
|
114
|
+
// S3 to S3 move
|
|
115
|
+
yield this.moveS3ToS3(sourceSource, file.fileKey, destinationSource);
|
|
116
|
+
}
|
|
117
|
+
else {
|
|
118
|
+
throw new Error(`Unsupported move operation from ${sourceSource.engine} to ${destinationSource.engine}`);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
this.moveLocalToLocal = (sourceFilePath, destinationSource, fileKey) => __awaiter(this, void 0, void 0, function* () {
|
|
123
|
+
const sourceDriver = yield DriverFactory_1.default.instantiateSource(this.findSourceForPath(sourceFilePath));
|
|
124
|
+
const destinationDriver = yield DriverFactory_1.default.instantiateDestination(destinationSource);
|
|
125
|
+
// Read file from source
|
|
126
|
+
const fileContent = sourceDriver.readFile(fileKey);
|
|
127
|
+
// Save to destination
|
|
128
|
+
yield destinationDriver.saveFile(fileKey, fileContent);
|
|
129
|
+
// Delete from source
|
|
130
|
+
sourceDriver.deleteFile(fileKey);
|
|
131
|
+
});
|
|
132
|
+
this.findSourceForPath = (filePath) => {
|
|
133
|
+
// Extract directory from file path for source creation
|
|
134
|
+
const directory = filePath.substring(0, filePath.lastIndexOf('/'));
|
|
135
|
+
return {
|
|
136
|
+
name: 'temp-source',
|
|
137
|
+
engine: 'local',
|
|
138
|
+
authentication: { path: directory }
|
|
139
|
+
};
|
|
140
|
+
};
|
|
141
|
+
this.moveLocalToS3 = (sourceFilePath, destinationSource, fileKey) => __awaiter(this, void 0, void 0, function* () {
|
|
142
|
+
const sourceDriver = yield DriverFactory_1.default.instantiateSource(this.findSourceForPath(sourceFilePath));
|
|
143
|
+
const destinationDriver = yield DriverFactory_1.default.instantiateDestination(destinationSource);
|
|
144
|
+
// Read file from local source
|
|
145
|
+
const fileContent = sourceDriver.readFile(fileKey);
|
|
146
|
+
// Upload to S3 destination
|
|
147
|
+
yield destinationDriver.saveFile(fileKey, fileContent);
|
|
148
|
+
// Remove source file after successful upload
|
|
149
|
+
sourceDriver.deleteFile(fileKey);
|
|
150
|
+
});
|
|
151
|
+
this.moveS3ToLocal = (sourceSource, fileKey, destinationSource) => __awaiter(this, void 0, void 0, function* () {
|
|
152
|
+
const sourceDriver = yield DriverFactory_1.default.instantiateSource(sourceSource);
|
|
153
|
+
const destinationDriver = yield DriverFactory_1.default.instantiateDestination(destinationSource);
|
|
154
|
+
// Download from S3
|
|
155
|
+
const content = yield sourceDriver.downloadFile(fileKey);
|
|
156
|
+
// Save to local destination
|
|
157
|
+
yield destinationDriver.saveFile(fileKey, content);
|
|
158
|
+
// Delete from S3 source
|
|
159
|
+
yield sourceDriver.deleteFile(fileKey);
|
|
160
|
+
});
|
|
161
|
+
this.moveS3ToS3 = (sourceSource, fileKey, destinationSource) => __awaiter(this, void 0, void 0, function* () {
|
|
162
|
+
const sourceDriver = yield DriverFactory_1.default.instantiateSource(sourceSource);
|
|
163
|
+
const destinationDriver = yield DriverFactory_1.default.instantiateDestination(destinationSource);
|
|
164
|
+
// Copy from source S3 to destination S3
|
|
165
|
+
yield destinationDriver.copyFromS3(sourceSource.authentication['bucket'], fileKey, fileKey);
|
|
166
|
+
// Delete from source S3
|
|
167
|
+
yield sourceDriver.deleteFile(fileKey);
|
|
168
|
+
});
|
|
169
|
+
this.performOnErrorActions = (consumer, output) => __awaiter(this, void 0, void 0, function* () {
|
|
170
|
+
(0, Affirm_1.default)(consumer, 'Invalid consumer');
|
|
171
|
+
(0, Affirm_1.default)(output, 'Invalid output');
|
|
172
|
+
if (!output.onError || output.onError.length === 0)
|
|
173
|
+
return;
|
|
174
|
+
for (const onError of output.onError) {
|
|
175
|
+
switch (onError.action) {
|
|
176
|
+
case 'move-file': {
|
|
177
|
+
yield this.moveSourceFiles(consumer, onError.moveToDestination);
|
|
178
|
+
break;
|
|
179
|
+
}
|
|
180
|
+
default:
|
|
181
|
+
throw new Error(`On success action "${onError.action}" is not implemented yet.`);
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
}
|
|
187
|
+
const ConsumerOnFinishManager = new ConsumerOnFinishManagerClass();
|
|
188
|
+
exports.default = ConsumerOnFinishManager;
|
|
@@ -43,6 +43,9 @@ class ParallelDatasetClass {
|
|
|
43
43
|
const currentDir = __dirname;
|
|
44
44
|
if (process.env.NODE_ENV === 'dev' || process.env.NODE_ENV === 'development')
|
|
45
45
|
return path_1.default.resolve('./.build/workers');
|
|
46
|
+
const forcedPath = process.env.REMORA_WORKERS_PATH;
|
|
47
|
+
if (forcedPath && forcedPath.length > 0)
|
|
48
|
+
return path_1.default.join(__dirname, forcedPath);
|
|
46
49
|
// Check if we're in a published npm package (no .build in path)
|
|
47
50
|
if (!currentDir.includes('.build')) {
|
|
48
51
|
// We're in the published package, workers are relative to package root
|
|
@@ -27,6 +27,7 @@ const Environment_1 = __importDefault(require("../Environment"));
|
|
|
27
27
|
const Algo_1 = __importDefault(require("../../core/Algo"));
|
|
28
28
|
const Logger_1 = __importDefault(require("../../helper/Logger"));
|
|
29
29
|
const ParallelDataset_1 = __importDefault(require("../dataset/ParallelDataset"));
|
|
30
|
+
const ConsumerOnFinishManager_1 = __importDefault(require("../consumer/ConsumerOnFinishManager"));
|
|
30
31
|
class ExecutionEnvironment {
|
|
31
32
|
constructor(consumer, executionId) {
|
|
32
33
|
this.run = (options) => __awaiter(this, void 0, void 0, function* () {
|
|
@@ -127,6 +128,10 @@ class ExecutionEnvironment {
|
|
|
127
128
|
this._resultingDataset = yield PostProcessor_1.default.distinct(this._resultingDataset);
|
|
128
129
|
break;
|
|
129
130
|
}
|
|
131
|
+
case 'perform-on-success-actions': {
|
|
132
|
+
yield ConsumerOnFinishManager_1.default.performOnSuccessActions(this._consumer, planStep.output);
|
|
133
|
+
break;
|
|
134
|
+
}
|
|
130
135
|
case 'save-execution-stats': {
|
|
131
136
|
(0, Affirm_1.default)(this._resultingDataset, `Invalid result dataset in save-execution-stats`);
|
|
132
137
|
result._stats = {
|
|
@@ -156,6 +161,12 @@ class ExecutionEnvironment {
|
|
|
156
161
|
if (ds)
|
|
157
162
|
Logger_1.default.log(`Failed execution of consumer at step ${currentStep.type}:\n\tSize: ${ds.getCount()}\n\tCycles: ${ds.getCycles()}\n\tOperations: ${Logger_1.default.formatList(ds.getOperations())}`);
|
|
158
163
|
Logger_1.default.log(`\tFailed step: ${currentStep.type}->\n\t${error}`);
|
|
164
|
+
try {
|
|
165
|
+
yield ConsumerOnFinishManager_1.default.performOnErrorActions(this._consumer, currentStep.output);
|
|
166
|
+
}
|
|
167
|
+
catch (error) {
|
|
168
|
+
Logger_1.default.log(`Error when trying to perform onError actions on failed consumer ${error}`);
|
|
169
|
+
}
|
|
159
170
|
// IMPORTANT: cleanup all the datasets to not leave any data around and to avoid memory leaks
|
|
160
171
|
const datasets = [
|
|
161
172
|
...this._producedData.map(x => x.dataset),
|
|
@@ -13,6 +13,7 @@ class ExecutionPlannerClas {
|
|
|
13
13
|
switch (engine) {
|
|
14
14
|
case 'aws-dynamodb': return 'no-sql';
|
|
15
15
|
case 'aws-redshift':
|
|
16
|
+
case 'delta-share':
|
|
16
17
|
case 'postgres': return 'sql';
|
|
17
18
|
case 'aws-s3': return 'file';
|
|
18
19
|
case 'local': return 'local';
|
|
@@ -65,6 +66,8 @@ class ExecutionPlannerClas {
|
|
|
65
66
|
default:
|
|
66
67
|
throw new Error(`Output format "${output.format}" not supported`);
|
|
67
68
|
}
|
|
69
|
+
if (output.onSuccess && output.onSuccess.length > 0)
|
|
70
|
+
plan.push({ type: 'perform-on-success-actions', output });
|
|
68
71
|
}
|
|
69
72
|
plan.push({ type: 'clean-datasets' });
|
|
70
73
|
plan.push({ type: 'save-execution-stats' });
|
|
@@ -0,0 +1,215 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
+
if (k2 === undefined) k2 = k;
|
|
4
|
+
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
+
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
+
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
+
}
|
|
8
|
+
Object.defineProperty(o, k2, desc);
|
|
9
|
+
}) : (function(o, m, k, k2) {
|
|
10
|
+
if (k2 === undefined) k2 = k;
|
|
11
|
+
o[k2] = m[k];
|
|
12
|
+
}));
|
|
13
|
+
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
+
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
+
}) : function(o, v) {
|
|
16
|
+
o["default"] = v;
|
|
17
|
+
});
|
|
18
|
+
var __importStar = (this && this.__importStar) || (function () {
|
|
19
|
+
var ownKeys = function(o) {
|
|
20
|
+
ownKeys = Object.getOwnPropertyNames || function (o) {
|
|
21
|
+
var ar = [];
|
|
22
|
+
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
|
23
|
+
return ar;
|
|
24
|
+
};
|
|
25
|
+
return ownKeys(o);
|
|
26
|
+
};
|
|
27
|
+
return function (mod) {
|
|
28
|
+
if (mod && mod.__esModule) return mod;
|
|
29
|
+
var result = {};
|
|
30
|
+
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
|
31
|
+
__setModuleDefault(result, mod);
|
|
32
|
+
return result;
|
|
33
|
+
};
|
|
34
|
+
})();
|
|
35
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
36
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
37
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
38
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
39
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
40
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
41
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
42
|
+
});
|
|
43
|
+
};
|
|
44
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
45
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
46
|
+
};
|
|
47
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
48
|
+
const cron = __importStar(require("node-cron"));
|
|
49
|
+
const Environment_1 = __importDefault(require("../Environment"));
|
|
50
|
+
const ConsumerEngine_1 = __importDefault(require("../consumer/ConsumerEngine"));
|
|
51
|
+
const UserManager_1 = __importDefault(require("../UserManager"));
|
|
52
|
+
class CronScheduler {
|
|
53
|
+
constructor() {
|
|
54
|
+
this.scheduledJobs = new Map();
|
|
55
|
+
this.isInitialized = false;
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Initialize the CRON scheduler by scanning all consumers and scheduling those with CRON triggers
|
|
59
|
+
*/
|
|
60
|
+
initialize() {
|
|
61
|
+
if (this.isInitialized) {
|
|
62
|
+
console.log('CRON scheduler already initialized');
|
|
63
|
+
return;
|
|
64
|
+
}
|
|
65
|
+
console.log('Initializing CRON scheduler...');
|
|
66
|
+
try {
|
|
67
|
+
const consumers = Environment_1.default.getAllConsumers();
|
|
68
|
+
let cronJobCount = 0;
|
|
69
|
+
for (const consumer of consumers) {
|
|
70
|
+
if (this.hasCronTrigger(consumer)) {
|
|
71
|
+
this.scheduleConsumer(consumer);
|
|
72
|
+
cronJobCount++;
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
this.isInitialized = true;
|
|
76
|
+
console.log(`CRON scheduler initialized with ${cronJobCount} scheduled jobs`);
|
|
77
|
+
}
|
|
78
|
+
catch (error) {
|
|
79
|
+
console.error('Failed to initialize CRON scheduler:', error);
|
|
80
|
+
throw error;
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
/**
|
|
84
|
+
* Check if a consumer has any CRON triggers configured
|
|
85
|
+
*/
|
|
86
|
+
hasCronTrigger(consumer) {
|
|
87
|
+
return consumer.outputs.some(output => { var _a; return ((_a = output.trigger) === null || _a === void 0 ? void 0 : _a.type) === 'CRON' && output.trigger.value; });
|
|
88
|
+
}
|
|
89
|
+
/**
|
|
90
|
+
* Schedule a consumer with CRON triggers
|
|
91
|
+
*/
|
|
92
|
+
scheduleConsumer(consumer) {
|
|
93
|
+
consumer.outputs.forEach((output, index) => {
|
|
94
|
+
var _a;
|
|
95
|
+
if (((_a = output.trigger) === null || _a === void 0 ? void 0 : _a.type) === 'CRON' && output.trigger.value) {
|
|
96
|
+
const jobKey = `${consumer.name}_output_${index}`;
|
|
97
|
+
const cronExpression = output.trigger.value;
|
|
98
|
+
try {
|
|
99
|
+
// Validate CRON expression
|
|
100
|
+
if (!cron.validate(cronExpression)) {
|
|
101
|
+
console.error(`Invalid CRON expression for consumer ${consumer.name}: ${cronExpression}`);
|
|
102
|
+
return;
|
|
103
|
+
}
|
|
104
|
+
// Schedule the job
|
|
105
|
+
const task = cron.schedule(cronExpression, () => __awaiter(this, void 0, void 0, function* () {
|
|
106
|
+
yield this.executeConsumerOutput(consumer, output, index);
|
|
107
|
+
}));
|
|
108
|
+
// Don't start the task immediately, we'll start it manually
|
|
109
|
+
task.stop();
|
|
110
|
+
this.scheduledJobs.set(jobKey, task);
|
|
111
|
+
task.start();
|
|
112
|
+
console.log(`Scheduled CRON job for consumer "${consumer.name}" output ${index} with expression: ${cronExpression}`);
|
|
113
|
+
}
|
|
114
|
+
catch (error) {
|
|
115
|
+
console.error(`Failed to schedule CRON job for consumer ${consumer.name}:`, error);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
/**
|
|
121
|
+
* Execute a consumer output when triggered by CRON
|
|
122
|
+
*/
|
|
123
|
+
executeConsumerOutput(consumer, output, outputIndex) {
|
|
124
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
125
|
+
try {
|
|
126
|
+
console.log(`Executing CRON job for consumer "${consumer.name}" output ${outputIndex}`);
|
|
127
|
+
const user = UserManager_1.default.getUser();
|
|
128
|
+
// Execute the consumer with default options
|
|
129
|
+
const result = yield ConsumerEngine_1.default.execute(consumer, {}, user);
|
|
130
|
+
console.log(`CRON job completed successfully for consumer "${consumer.name}" output ${outputIndex}`);
|
|
131
|
+
// Log execution statistics
|
|
132
|
+
if (result && result._stats) {
|
|
133
|
+
console.log(`CRON job stats: ${result._stats.elapsedMS}ms, size: ${result._stats.size}, cycles: ${result._stats.cycles}`);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
catch (error) {
|
|
137
|
+
console.error(`CRON job failed for consumer "${consumer.name}" output ${outputIndex}:`, error);
|
|
138
|
+
// Optionally, you could implement error handling strategies here:
|
|
139
|
+
// - Send notifications
|
|
140
|
+
// - Log to a monitoring system
|
|
141
|
+
// - Retry logic
|
|
142
|
+
// - Disable the job after repeated failures
|
|
143
|
+
}
|
|
144
|
+
});
|
|
145
|
+
}
|
|
146
|
+
/**
|
|
147
|
+
* Add or update a CRON job for a specific consumer
|
|
148
|
+
*/
|
|
149
|
+
updateConsumerSchedule(consumer) {
|
|
150
|
+
// First, remove any existing schedules for this consumer
|
|
151
|
+
this.removeConsumerSchedule(consumer.name);
|
|
152
|
+
// Then, add new schedules if they have CRON triggers
|
|
153
|
+
if (this.hasCronTrigger(consumer)) {
|
|
154
|
+
this.scheduleConsumer(consumer);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
/**
|
|
158
|
+
* Remove all scheduled jobs for a consumer
|
|
159
|
+
*/
|
|
160
|
+
removeConsumerSchedule(consumerName) {
|
|
161
|
+
const jobsToRemove = Array.from(this.scheduledJobs.keys()).filter(key => key.startsWith(`${consumerName}_output_`));
|
|
162
|
+
jobsToRemove.forEach(jobKey => {
|
|
163
|
+
const task = this.scheduledJobs.get(jobKey);
|
|
164
|
+
if (task) {
|
|
165
|
+
task.stop();
|
|
166
|
+
task.destroy();
|
|
167
|
+
this.scheduledJobs.delete(jobKey);
|
|
168
|
+
console.log(`Removed CRON job: ${jobKey}`);
|
|
169
|
+
}
|
|
170
|
+
});
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Get information about all scheduled jobs
|
|
174
|
+
*/
|
|
175
|
+
getScheduledJobs() {
|
|
176
|
+
return Array.from(this.scheduledJobs.entries()).map(([jobKey, task]) => ({
|
|
177
|
+
jobKey,
|
|
178
|
+
isRunning: task.getStatus() === 'scheduled'
|
|
179
|
+
}));
|
|
180
|
+
}
|
|
181
|
+
/**
|
|
182
|
+
* Stop all scheduled jobs
|
|
183
|
+
*/
|
|
184
|
+
stopAllJobs() {
|
|
185
|
+
console.log('Stopping all CRON jobs...');
|
|
186
|
+
this.scheduledJobs.forEach((task, jobKey) => {
|
|
187
|
+
task.stop();
|
|
188
|
+
task.destroy();
|
|
189
|
+
console.log(`Stopped CRON job: ${jobKey}`);
|
|
190
|
+
});
|
|
191
|
+
this.scheduledJobs.clear();
|
|
192
|
+
this.isInitialized = false;
|
|
193
|
+
console.log('All CRON jobs stopped');
|
|
194
|
+
}
|
|
195
|
+
/**
|
|
196
|
+
* Restart the scheduler (useful for configuration reloads)
|
|
197
|
+
*/
|
|
198
|
+
restart() {
|
|
199
|
+
console.log('Restarting CRON scheduler...');
|
|
200
|
+
this.stopAllJobs();
|
|
201
|
+
this.initialize();
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* Get the scheduler status
|
|
205
|
+
*/
|
|
206
|
+
getStatus() {
|
|
207
|
+
return {
|
|
208
|
+
initialized: this.isInitialized,
|
|
209
|
+
jobCount: this.scheduledJobs.size,
|
|
210
|
+
jobs: this.getScheduledJobs()
|
|
211
|
+
};
|
|
212
|
+
}
|
|
213
|
+
}
|
|
214
|
+
// Export a singleton instance
|
|
215
|
+
exports.default = new CronScheduler();
|
|
@@ -0,0 +1,307 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
3
|
+
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
4
|
+
return new (P || (P = Promise))(function (resolve, reject) {
|
|
5
|
+
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
6
|
+
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
7
|
+
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
8
|
+
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
9
|
+
});
|
|
10
|
+
};
|
|
11
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
12
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
13
|
+
};
|
|
14
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
15
|
+
const client_sqs_1 = require("@aws-sdk/client-sqs");
|
|
16
|
+
const Environment_1 = __importDefault(require("../Environment"));
|
|
17
|
+
const ConsumerEngine_1 = __importDefault(require("../consumer/ConsumerEngine"));
|
|
18
|
+
const UserManager_1 = __importDefault(require("../UserManager"));
|
|
19
|
+
const SecretManager_1 = __importDefault(require("../SecretManager"));
|
|
20
|
+
class QueueManager {
|
|
21
|
+
constructor() {
|
|
22
|
+
this.queueMappings = new Map();
|
|
23
|
+
this.pollingIntervals = new Map();
|
|
24
|
+
this.isInitialized = false;
|
|
25
|
+
this.POLLING_INTERVAL_MS = 5000; // Poll every 5 seconds
|
|
26
|
+
this.MAX_MESSAGES = 10; // Maximum messages to receive in one poll
|
|
27
|
+
// Initialize SQS client with default configuration
|
|
28
|
+
// Will be reconfigured when we know the specific queue details
|
|
29
|
+
this.sqsClient = new client_sqs_1.SQSClient({});
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Initialize the Queue Manager by scanning all consumers and setting up queue listeners for those with QUEUE triggers
|
|
33
|
+
*/
|
|
34
|
+
initialize() {
|
|
35
|
+
if (this.isInitialized) {
|
|
36
|
+
console.log('Queue Manager already initialized');
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
console.log('Initializing Queue Manager...');
|
|
40
|
+
try {
|
|
41
|
+
const consumers = Environment_1.default.getAllConsumers();
|
|
42
|
+
let queueTriggerCount = 0;
|
|
43
|
+
for (const consumer of consumers) {
|
|
44
|
+
if (this.hasQueueTrigger(consumer)) {
|
|
45
|
+
this.setupQueueListeners(consumer);
|
|
46
|
+
queueTriggerCount++;
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
this.isInitialized = true;
|
|
50
|
+
console.log(`Queue Manager initialized with ${queueTriggerCount} queue triggers`);
|
|
51
|
+
}
|
|
52
|
+
catch (error) {
|
|
53
|
+
console.error('Failed to initialize Queue Manager:', error);
|
|
54
|
+
throw error;
|
|
55
|
+
}
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Check if a consumer has any QUEUE triggers configured
|
|
59
|
+
*/
|
|
60
|
+
hasQueueTrigger(consumer) {
|
|
61
|
+
return consumer.outputs.some(output => { var _a; return ((_a = output.trigger) === null || _a === void 0 ? void 0 : _a.type) === 'QUEUE' && output.trigger.value; });
|
|
62
|
+
}
|
|
63
|
+
/**
|
|
64
|
+
* Setup queue listeners for a consumer with QUEUE triggers
|
|
65
|
+
*/
|
|
66
|
+
setupQueueListeners(consumer) {
|
|
67
|
+
consumer.outputs.forEach((output, index) => {
|
|
68
|
+
var _a;
|
|
69
|
+
if (((_a = output.trigger) === null || _a === void 0 ? void 0 : _a.type) === 'QUEUE' && output.trigger.value) {
|
|
70
|
+
try {
|
|
71
|
+
const queueConfig = this.parseQueueConfig(output.trigger.value, output.trigger.metadata);
|
|
72
|
+
const mapping = {
|
|
73
|
+
consumer,
|
|
74
|
+
outputIndex: index,
|
|
75
|
+
queueUrl: queueConfig.queueUrl,
|
|
76
|
+
messageType: queueConfig.messageType
|
|
77
|
+
};
|
|
78
|
+
// Add to mappings
|
|
79
|
+
if (!this.queueMappings.has(queueConfig.queueUrl)) {
|
|
80
|
+
this.queueMappings.set(queueConfig.queueUrl, []);
|
|
81
|
+
}
|
|
82
|
+
this.queueMappings.get(queueConfig.queueUrl).push(mapping);
|
|
83
|
+
// Start polling for this queue if not already started
|
|
84
|
+
if (!this.pollingIntervals.has(queueConfig.queueUrl)) {
|
|
85
|
+
this.startQueuePolling(queueConfig.queueUrl, queueConfig.region, queueConfig.credentials);
|
|
86
|
+
}
|
|
87
|
+
console.log(`Setup queue listener for consumer "${consumer.name}" output ${index} on queue: ${queueConfig.queueUrl}`);
|
|
88
|
+
}
|
|
89
|
+
catch (error) {
|
|
90
|
+
console.error(`Failed to setup queue listener for consumer ${consumer.name}:`, error);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
});
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Parse queue configuration from trigger value and metadata
|
|
97
|
+
*/
|
|
98
|
+
parseQueueConfig(triggerValue, metadata) {
|
|
99
|
+
// triggerValue should be the queue URL or queue name
|
|
100
|
+
let queueUrl = triggerValue;
|
|
101
|
+
// If it's not a full URL, construct it
|
|
102
|
+
if (!queueUrl.startsWith('https://')) {
|
|
103
|
+
const region = (metadata === null || metadata === void 0 ? void 0 : metadata.region) || process.env.AWS_DEFAULT_REGION || 'us-east-1';
|
|
104
|
+
const accountId = (metadata === null || metadata === void 0 ? void 0 : metadata.accountId) || process.env.AWS_ACCOUNT_ID;
|
|
105
|
+
if (!accountId) {
|
|
106
|
+
throw new Error('AWS Account ID is required for queue trigger. Set it in metadata.accountId or AWS_ACCOUNT_ID environment variable');
|
|
107
|
+
}
|
|
108
|
+
queueUrl = `https://sqs.${region}.amazonaws.com/${accountId}/${triggerValue}`;
|
|
109
|
+
}
|
|
110
|
+
// Extract region from URL if not provided in metadata
|
|
111
|
+
const urlParts = queueUrl.match(/https:\/\/sqs\.([^.]+)\.amazonaws\.com\//);
|
|
112
|
+
const region = (metadata === null || metadata === void 0 ? void 0 : metadata.region) || (urlParts === null || urlParts === void 0 ? void 0 : urlParts[1]) || 'us-east-1';
|
|
113
|
+
// Get credentials from metadata or environment
|
|
114
|
+
let credentials;
|
|
115
|
+
const accessKeyId = (metadata === null || metadata === void 0 ? void 0 : metadata.accessKeyId) || process.env.AWS_ACCESS_KEY_ID;
|
|
116
|
+
const secretAccessKey = (metadata === null || metadata === void 0 ? void 0 : metadata.secretAccessKey) || process.env.AWS_SECRET_ACCESS_KEY;
|
|
117
|
+
const sessionToken = (metadata === null || metadata === void 0 ? void 0 : metadata.sessionToken) || process.env.AWS_SESSION_TOKEN;
|
|
118
|
+
if (accessKeyId && secretAccessKey) {
|
|
119
|
+
credentials = {
|
|
120
|
+
accessKeyId: SecretManager_1.default.replaceSecret(accessKeyId),
|
|
121
|
+
secretAccessKey: SecretManager_1.default.replaceSecret(secretAccessKey),
|
|
122
|
+
sessionToken: sessionToken ? SecretManager_1.default.replaceSecret(sessionToken) : undefined
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
return {
|
|
126
|
+
queueUrl,
|
|
127
|
+
messageType: metadata === null || metadata === void 0 ? void 0 : metadata.messageType,
|
|
128
|
+
region,
|
|
129
|
+
credentials
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Start polling a specific queue for messages
|
|
134
|
+
*/
|
|
135
|
+
startQueuePolling(queueUrl, region, credentials) {
|
|
136
|
+
// Create SQS client for this specific queue
|
|
137
|
+
const sqsClient = new client_sqs_1.SQSClient({
|
|
138
|
+
region,
|
|
139
|
+
credentials
|
|
140
|
+
});
|
|
141
|
+
const pollQueue = () => __awaiter(this, void 0, void 0, function* () {
|
|
142
|
+
try {
|
|
143
|
+
const command = new client_sqs_1.ReceiveMessageCommand({
|
|
144
|
+
QueueUrl: queueUrl,
|
|
145
|
+
MaxNumberOfMessages: this.MAX_MESSAGES,
|
|
146
|
+
WaitTimeSeconds: 20, // Long polling
|
|
147
|
+
VisibilityTimeout: 300 // 5 minutes to process the message
|
|
148
|
+
});
|
|
149
|
+
const response = yield sqsClient.send(command);
|
|
150
|
+
if (response.Messages && response.Messages.length > 0) {
|
|
151
|
+
console.log(`Received ${response.Messages.length} messages from queue: ${queueUrl}`);
|
|
152
|
+
for (const message of response.Messages) {
|
|
153
|
+
yield this.processMessage(queueUrl, message, sqsClient);
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
catch (error) {
|
|
158
|
+
console.error(`Error polling queue ${queueUrl}:`, error);
|
|
159
|
+
}
|
|
160
|
+
});
|
|
161
|
+
// Start continuous polling
|
|
162
|
+
const interval = setInterval(pollQueue, this.POLLING_INTERVAL_MS);
|
|
163
|
+
this.pollingIntervals.set(queueUrl, interval);
|
|
164
|
+
// Start immediately
|
|
165
|
+
pollQueue();
|
|
166
|
+
console.log(`Started polling queue: ${queueUrl}`);
|
|
167
|
+
}
|
|
168
|
+
/**
|
|
169
|
+
* Process a message from the queue
|
|
170
|
+
*/
|
|
171
|
+
processMessage(queueUrl, message, sqsClient) {
|
|
172
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
173
|
+
try {
|
|
174
|
+
const mappings = this.queueMappings.get(queueUrl);
|
|
175
|
+
if (!mappings || mappings.length === 0) {
|
|
176
|
+
console.log(`No consumer mappings found for queue: ${queueUrl}`);
|
|
177
|
+
return;
|
|
178
|
+
}
|
|
179
|
+
// Parse message body
|
|
180
|
+
let messageData;
|
|
181
|
+
try {
|
|
182
|
+
messageData = JSON.parse(message.Body || '{}');
|
|
183
|
+
}
|
|
184
|
+
catch (_a) {
|
|
185
|
+
console.warn(`Failed to parse message body as JSON for queue ${queueUrl}. Using raw body.`);
|
|
186
|
+
messageData = { body: message.Body };
|
|
187
|
+
}
|
|
188
|
+
let messageProcessedByAnyConsumer = false;
|
|
189
|
+
// Process message for each mapped consumer that matches the message criteria
|
|
190
|
+
for (const mapping of mappings) {
|
|
191
|
+
try {
|
|
192
|
+
// Check if message type matches (if specified)
|
|
193
|
+
if (mapping.messageType) {
|
|
194
|
+
const messageType = messageData.type || messageData.messageType || messageData.eventType;
|
|
195
|
+
if (messageType !== mapping.messageType) {
|
|
196
|
+
console.log(`Message type ${messageType} does not match expected ${mapping.messageType} for consumer ${mapping.consumer.name} - skipping`);
|
|
197
|
+
continue;
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
console.log(`Processing queue message for consumer "${mapping.consumer.name}" output ${mapping.outputIndex}`);
|
|
201
|
+
const user = UserManager_1.default.getUser();
|
|
202
|
+
// Execute the consumer with default options
|
|
203
|
+
const result = yield ConsumerEngine_1.default.execute(mapping.consumer, {}, user);
|
|
204
|
+
console.log(`Queue trigger completed successfully for consumer "${mapping.consumer.name}" output ${mapping.outputIndex}`);
|
|
205
|
+
// Log execution statistics
|
|
206
|
+
if (result && result._stats) {
|
|
207
|
+
console.log(`Queue trigger stats: ${result._stats.elapsedMS}ms, size: ${result._stats.size}, cycles: ${result._stats.cycles}`);
|
|
208
|
+
}
|
|
209
|
+
messageProcessedByAnyConsumer = true;
|
|
210
|
+
}
|
|
211
|
+
catch (error) {
|
|
212
|
+
console.error(`Queue trigger failed for consumer "${mapping.consumer.name}" output ${mapping.outputIndex}:`, error);
|
|
213
|
+
// Continue processing for other consumers even if one fails
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// Only delete message from queue if it was processed by at least one consumer
|
|
217
|
+
// This ensures messages intended for other consumers or systems remain in the queue
|
|
218
|
+
if (messageProcessedByAnyConsumer && message.ReceiptHandle) {
|
|
219
|
+
yield sqsClient.send(new client_sqs_1.DeleteMessageCommand({
|
|
220
|
+
QueueUrl: queueUrl,
|
|
221
|
+
ReceiptHandle: message.ReceiptHandle
|
|
222
|
+
}));
|
|
223
|
+
console.log(`Deleted processed message ${message.MessageId} from queue`);
|
|
224
|
+
}
|
|
225
|
+
else if (!messageProcessedByAnyConsumer) {
|
|
226
|
+
console.log(`Message ${message.MessageId} was not processed by any consumer - leaving in queue for other consumers or systems`);
|
|
227
|
+
}
|
|
228
|
+
}
|
|
229
|
+
catch (error) {
|
|
230
|
+
console.error(`Error processing message from queue ${queueUrl}:`, error);
|
|
231
|
+
// Message will remain in queue and be retried or go to DLQ based on queue configuration
|
|
232
|
+
}
|
|
233
|
+
});
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* Add or update queue listeners for a specific consumer
|
|
237
|
+
*/
|
|
238
|
+
updateConsumerSchedule(consumer) {
|
|
239
|
+
// First, remove any existing listeners for this consumer
|
|
240
|
+
this.removeConsumerSchedule(consumer.name);
|
|
241
|
+
// Then, add new listeners if they have QUEUE triggers
|
|
242
|
+
if (this.hasQueueTrigger(consumer)) {
|
|
243
|
+
this.setupQueueListeners(consumer);
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
/**
|
|
247
|
+
* Remove all queue listeners for a consumer
|
|
248
|
+
*/
|
|
249
|
+
removeConsumerSchedule(consumerName) {
|
|
250
|
+
// Remove mappings for this consumer
|
|
251
|
+
for (const [queueUrl, mappings] of this.queueMappings.entries()) {
|
|
252
|
+
const updatedMappings = mappings.filter(mapping => mapping.consumer.name !== consumerName);
|
|
253
|
+
if (updatedMappings.length === 0) {
|
|
254
|
+
// No more consumers listening to this queue, stop polling
|
|
255
|
+
const interval = this.pollingIntervals.get(queueUrl);
|
|
256
|
+
if (interval) {
|
|
257
|
+
clearInterval(interval);
|
|
258
|
+
this.pollingIntervals.delete(queueUrl);
|
|
259
|
+
console.log(`Stopped polling queue: ${queueUrl}`);
|
|
260
|
+
}
|
|
261
|
+
this.queueMappings.delete(queueUrl);
|
|
262
|
+
}
|
|
263
|
+
else {
|
|
264
|
+
this.queueMappings.set(queueUrl, updatedMappings);
|
|
265
|
+
}
|
|
266
|
+
}
|
|
267
|
+
}
|
|
268
|
+
/**
|
|
269
|
+
* Stop all queue polling
|
|
270
|
+
*/
|
|
271
|
+
stopAllQueues() {
|
|
272
|
+
console.log('Stopping all queue polling...');
|
|
273
|
+
this.pollingIntervals.forEach((interval, queueUrl) => {
|
|
274
|
+
clearInterval(interval);
|
|
275
|
+
console.log(`Stopped polling queue: ${queueUrl}`);
|
|
276
|
+
});
|
|
277
|
+
this.pollingIntervals.clear();
|
|
278
|
+
this.queueMappings.clear();
|
|
279
|
+
this.isInitialized = false;
|
|
280
|
+
console.log('All queue polling stopped');
|
|
281
|
+
}
|
|
282
|
+
/**
|
|
283
|
+
* Restart the queue manager (useful for configuration reloads)
|
|
284
|
+
*/
|
|
285
|
+
restart() {
|
|
286
|
+
console.log('Restarting Queue Manager...');
|
|
287
|
+
this.stopAllQueues();
|
|
288
|
+
this.initialize();
|
|
289
|
+
}
|
|
290
|
+
/**
|
|
291
|
+
* Get the queue manager status
|
|
292
|
+
*/
|
|
293
|
+
getStatus() {
|
|
294
|
+
const queues = Array.from(this.queueMappings.entries()).map(([queueUrl, mappings]) => ({
|
|
295
|
+
queueUrl,
|
|
296
|
+
consumerCount: mappings.length
|
|
297
|
+
}));
|
|
298
|
+
return {
|
|
299
|
+
initialized: this.isInitialized,
|
|
300
|
+
queueCount: this.queueMappings.size,
|
|
301
|
+
consumerCount: Array.from(this.queueMappings.values()).flat().length,
|
|
302
|
+
queues
|
|
303
|
+
};
|
|
304
|
+
}
|
|
305
|
+
}
|
|
306
|
+
// Export a singleton instance
|
|
307
|
+
exports.default = new QueueManager();
|
|
@@ -106,5 +106,46 @@ class UsageDataManager {
|
|
|
106
106
|
};
|
|
107
107
|
});
|
|
108
108
|
}
|
|
109
|
+
getFilteredUsageDetails(from, to, filters) {
|
|
110
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
111
|
+
const now = DSTE_1.default.now();
|
|
112
|
+
const fromDate = from || new Date(now.getTime() - 30 * 24 * 60 * 60 * 1000);
|
|
113
|
+
const toDate = to || now;
|
|
114
|
+
const collection = 'usage';
|
|
115
|
+
// Build match criteria
|
|
116
|
+
const matchCriteria = {
|
|
117
|
+
startedAt: { $gte: fromDate, $lte: toDate }
|
|
118
|
+
};
|
|
119
|
+
if (filters === null || filters === void 0 ? void 0 : filters.consumer) {
|
|
120
|
+
matchCriteria.consumer = filters.consumer;
|
|
121
|
+
}
|
|
122
|
+
if (filters === null || filters === void 0 ? void 0 : filters.user) {
|
|
123
|
+
matchCriteria['executedBy.name'] = filters.user;
|
|
124
|
+
}
|
|
125
|
+
if (filters === null || filters === void 0 ? void 0 : filters.status) {
|
|
126
|
+
matchCriteria.status = filters.status;
|
|
127
|
+
}
|
|
128
|
+
// Query with filters and sorting
|
|
129
|
+
const usageDetails = yield DatabaseEngine_1.default.query(collection, matchCriteria, {
|
|
130
|
+
sort: { startedAt: -1 },
|
|
131
|
+
limit: 1000 // Reasonable limit to prevent large result sets
|
|
132
|
+
});
|
|
133
|
+
return usageDetails;
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
getUsageById(id) {
|
|
137
|
+
return __awaiter(this, void 0, void 0, function* () {
|
|
138
|
+
const collection = 'usage';
|
|
139
|
+
try {
|
|
140
|
+
const usageDetail = yield DatabaseEngine_1.default.get(collection, id);
|
|
141
|
+
return usageDetail;
|
|
142
|
+
}
|
|
143
|
+
catch (error) {
|
|
144
|
+
// If document not found or invalid ID, return null
|
|
145
|
+
console.error('Error fetching usage by ID:', error);
|
|
146
|
+
return null;
|
|
147
|
+
}
|
|
148
|
+
});
|
|
149
|
+
}
|
|
109
150
|
}
|
|
110
151
|
exports.default = new UsageDataManager();
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@forzalabs/remora",
|
|
3
|
-
"version": "0.1.
|
|
3
|
+
"version": "0.1.7-nasco.3",
|
|
4
4
|
"description": "A powerful CLI tool for seamless data translation.",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"private": false,
|
|
@@ -35,6 +35,7 @@
|
|
|
35
35
|
"dependencies": {
|
|
36
36
|
"@aws-sdk/client-redshift-data": "^3.699.0",
|
|
37
37
|
"@aws-sdk/client-s3": "^3.701.0",
|
|
38
|
+
"@aws-sdk/client-sqs": "^3.886.0",
|
|
38
39
|
"adm-zip": "^0.5.16",
|
|
39
40
|
"ajv": "^8.17.1",
|
|
40
41
|
"ajv-formats": "^3.0.1",
|
|
@@ -54,6 +55,7 @@
|
|
|
54
55
|
"knex": "^2.4.2",
|
|
55
56
|
"mongodb": "^6.15.0",
|
|
56
57
|
"next": "^13.4.1",
|
|
58
|
+
"node-cron": "^4.2.1",
|
|
57
59
|
"ora": "^5.4.1",
|
|
58
60
|
"react": "^18.2.0",
|
|
59
61
|
"react-dom": "^18.2.0",
|