@forzalabs/remora 0.2.5 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. package/Constants.js +10 -2
  2. package/actions/debug.js +1 -0
  3. package/actions/deploy.js +1 -0
  4. package/actions/run.js +17 -13
  5. package/actions/sample.js +1 -1
  6. package/core/Algo.js +8 -4
  7. package/definitions/ExecutorDefinitions.js +2 -0
  8. package/definitions/json_schemas/consumer-schema.json +1 -1
  9. package/definitions/json_schemas/producer-schema.json +1 -1
  10. package/definitions/temp.js +2 -0
  11. package/drivers/DeltaShareDriver.js +4 -0
  12. package/drivers/DriverFactory.js +10 -10
  13. package/drivers/DriverHelper.js +33 -10
  14. package/drivers/HttpApiDriver.js +4 -0
  15. package/drivers/LocalDriver.js +72 -5
  16. package/drivers/RedshiftDriver.js +4 -0
  17. package/drivers/S3Driver.js +36 -52
  18. package/drivers/files/LocalDestinationDriver.js +200 -0
  19. package/drivers/files/LocalSourceDriver.js +394 -0
  20. package/drivers/s3/S3DestinationDriver.js +159 -0
  21. package/drivers/s3/S3SourceDriver.js +455 -0
  22. package/engines/ai/LLM.js +0 -11
  23. package/engines/consumer/ConsumerEngine.js +0 -77
  24. package/engines/consumer/ConsumerManager.js +61 -36
  25. package/engines/consumer/ConsumerOnFinishManager.js +14 -0
  26. package/engines/consumer/PostProcessor.js +1 -7
  27. package/engines/dataset/Dataset.js +0 -61
  28. package/engines/dataset/DatasetManager.js +16 -76
  29. package/engines/dataset/DatasetRecord.js +4 -3
  30. package/engines/deployment/DeploymentPlanner.js +0 -7
  31. package/engines/execution/ExecutionPlanner.js +2 -2
  32. package/engines/execution/RequestExecutor.js +4 -45
  33. package/engines/file/FileExporter.js +7 -32
  34. package/engines/parsing/CSVParser.js +27 -26
  35. package/engines/parsing/LineParser.js +52 -0
  36. package/engines/parsing/XMLParser.js +1 -1
  37. package/engines/producer/ProducerEngine.js +0 -45
  38. package/engines/scheduler/CronScheduler.js +12 -4
  39. package/engines/scheduler/QueueManager.js +11 -4
  40. package/engines/sql/SQLCompiler.js +4 -4
  41. package/engines/transform/JoinEngine.js +3 -3
  42. package/engines/transform/TransformationEngine.js +3 -86
  43. package/engines/usage/UsageManager.js +8 -6
  44. package/engines/validation/Validator.js +12 -18
  45. package/executors/ConsumerExecutor.js +152 -0
  46. package/executors/Executor.js +168 -0
  47. package/executors/ExecutorOrchestrator.js +315 -0
  48. package/executors/ExecutorPerformance.js +17 -0
  49. package/executors/ExecutorProgress.js +52 -0
  50. package/executors/OutputExecutor.js +118 -0
  51. package/executors/ProducerExecutor.js +108 -0
  52. package/package.json +3 -3
  53. package/workers/ExecutorWorker.js +48 -0
@@ -0,0 +1,159 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ const client_s3_1 = require("@aws-sdk/client-s3");
16
+ const Affirm_1 = __importDefault(require("../../core/Affirm"));
17
+ const SecretManager_1 = __importDefault(require("../../engines/SecretManager"));
18
+ const FileExporter_1 = __importDefault(require("../../engines/file/FileExporter"));
19
+ class S3DestinationDriver {
20
+ constructor() {
21
+ this.init = (source) => __awaiter(this, void 0, void 0, function* () {
22
+ this._bucketName = source.authentication['bucket'];
23
+ const sessionToken = SecretManager_1.default.replaceSecret(source.authentication['sessionToken']);
24
+ const config = {
25
+ region: source.authentication['region'],
26
+ credentials: {
27
+ accessKeyId: SecretManager_1.default.replaceSecret(source.authentication['accessKey']),
28
+ secretAccessKey: SecretManager_1.default.replaceSecret(source.authentication['secretKey']),
29
+ sessionToken: sessionToken ? sessionToken : undefined
30
+ }
31
+ };
32
+ this._client = new client_s3_1.S3Client(config);
33
+ // TODO: is there a way to test if the connection was successful? like a query or scan that I can do?
34
+ return this;
35
+ });
36
+ this.uploadFile = (options) => __awaiter(this, void 0, void 0, function* () {
37
+ (0, Affirm_1.default)(options, `Invalid upload options`);
38
+ const { content, name } = options;
39
+ const commandParams = {
40
+ Bucket: this._bucketName,
41
+ Key: name,
42
+ Body: content
43
+ };
44
+ const command = new client_s3_1.PutObjectCommand(commandParams);
45
+ const res = yield this._client.send(command);
46
+ (0, Affirm_1.default)(res.$metadata.httpStatusCode === 200, `Failed to upload the file "${name}" to the bucket "${this._bucketName}": status code ${res.$metadata.httpStatusCode}`);
47
+ return { res: true, key: name, bucket: this._bucketName };
48
+ });
49
+ this.uploadStream = (options) => __awaiter(this, void 0, void 0, function* () {
50
+ (0, Affirm_1.default)(options, `Invalid upload options`);
51
+ const { dataset, name, recordProjection } = options;
52
+ (0, Affirm_1.default)(dataset, 'No streaming dataset');
53
+ (0, Affirm_1.default)(name, 'No filename provided for upload stream');
54
+ (0, Affirm_1.default)(recordProjection, 'No recordProjection for upload stream');
55
+ try {
56
+ // Create the multipart upload
57
+ const createMultipartUploadRes = yield this._client.send(new client_s3_1.CreateMultipartUploadCommand({
58
+ Bucket: this._bucketName,
59
+ Key: name
60
+ }));
61
+ const uploadId = createMultipartUploadRes.UploadId;
62
+ (0, Affirm_1.default)(uploadId, 'Failed to initiate multipart upload');
63
+ const uploadedParts = [];
64
+ let partNumber = 1;
65
+ const MIN_PART_SIZE = 5 * 1024 * 1024; // 5MB
66
+ let accumulatedBuffer = Buffer.alloc(0);
67
+ const uploadPart = (buffer) => __awaiter(this, void 0, void 0, function* () {
68
+ const uploadPartRes = yield this._client.send(new client_s3_1.UploadPartCommand({
69
+ Bucket: this._bucketName,
70
+ Key: name,
71
+ UploadId: uploadId,
72
+ PartNumber: partNumber,
73
+ Body: buffer
74
+ }));
75
+ uploadedParts.push({
76
+ PartNumber: partNumber,
77
+ ETag: uploadPartRes.ETag
78
+ });
79
+ partNumber++;
80
+ });
81
+ yield dataset.streamBatches((batch) => __awaiter(this, void 0, void 0, function* () {
82
+ const chunks = FileExporter_1.default.prepareBatch(batch, options);
83
+ for (const chunk of chunks) {
84
+ const chunkBuffer = Buffer.from(chunk);
85
+ accumulatedBuffer = Buffer.concat([accumulatedBuffer, chunkBuffer]);
86
+ // If accumulated buffer is at least 5MB, upload it as a part
87
+ if (accumulatedBuffer.length >= MIN_PART_SIZE) {
88
+ yield uploadPart(accumulatedBuffer);
89
+ accumulatedBuffer = Buffer.alloc(0);
90
+ }
91
+ }
92
+ }));
93
+ // Upload any remaining data as the final part (even if smaller than 5MB)
94
+ if (accumulatedBuffer.length > 0) {
95
+ yield uploadPart(accumulatedBuffer);
96
+ }
97
+ // Complete the multipart upload
98
+ const completeRes = yield this._client.send(new client_s3_1.CompleteMultipartUploadCommand({
99
+ Bucket: this._bucketName,
100
+ Key: options.name,
101
+ UploadId: uploadId,
102
+ MultipartUpload: {
103
+ Parts: uploadedParts
104
+ }
105
+ }));
106
+ (0, Affirm_1.default)(completeRes.$metadata.httpStatusCode === 200, `Failed to complete multipart upload for "${options.name}": status code ${completeRes.$metadata.httpStatusCode}`);
107
+ return { res: true, key: options.name, bucket: this._bucketName };
108
+ }
109
+ catch (error) {
110
+ // If anything fails, make sure to abort the multipart upload
111
+ if (error.UploadId) {
112
+ yield this._client.send(new client_s3_1.AbortMultipartUploadCommand({
113
+ Bucket: this._bucketName,
114
+ Key: options.name,
115
+ UploadId: error.UploadId
116
+ }));
117
+ }
118
+ throw error;
119
+ }
120
+ });
121
+ this.copyFromS3 = (sourceBucket, sourceFileKey, destinationFileKey) => __awaiter(this, void 0, void 0, function* () {
122
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
123
+ (0, Affirm_1.default)(sourceBucket, 'Invalid source bucket');
124
+ (0, Affirm_1.default)(sourceFileKey, 'Invalid source file key');
125
+ (0, Affirm_1.default)(destinationFileKey, 'Invalid destination file key');
126
+ yield this._client.send(new client_s3_1.CopyObjectCommand({
127
+ CopySource: `${sourceBucket}/${sourceFileKey}`,
128
+ Bucket: this._bucketName,
129
+ Key: destinationFileKey
130
+ }));
131
+ });
132
+ this.saveFile = (fileKey, content) => __awaiter(this, void 0, void 0, function* () {
133
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
134
+ (0, Affirm_1.default)(fileKey, 'Invalid file key');
135
+ (0, Affirm_1.default)(content, 'Invalid content');
136
+ yield this._client.send(new client_s3_1.PutObjectCommand({
137
+ Bucket: this._bucketName,
138
+ Key: fileKey,
139
+ Body: content
140
+ }));
141
+ });
142
+ this.ready = (destinationPath) => {
143
+ void destinationPath;
144
+ throw new Error('Not implemented yet');
145
+ };
146
+ this.move = (fromPath, toName) => {
147
+ void fromPath;
148
+ void toName;
149
+ throw new Error('Not implemented yet');
150
+ };
151
+ this.transformAndMove = (fromPath, transform, toName) => {
152
+ void fromPath;
153
+ void toName;
154
+ void transform;
155
+ throw new Error('Not implemented yet');
156
+ };
157
+ }
158
+ }
159
+ exports.default = S3DestinationDriver;
@@ -0,0 +1,455 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __asyncValues = (this && this.__asyncValues) || function (o) {
12
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
13
+ var m = o[Symbol.asyncIterator], i;
14
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
15
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
16
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
17
+ };
18
+ var __importDefault = (this && this.__importDefault) || function (mod) {
19
+ return (mod && mod.__esModule) ? mod : { "default": mod };
20
+ };
21
+ Object.defineProperty(exports, "__esModule", { value: true });
22
+ const client_s3_1 = require("@aws-sdk/client-s3");
23
+ const Affirm_1 = __importDefault(require("../../core/Affirm"));
24
+ const SecretManager_1 = __importDefault(require("../../engines/SecretManager"));
25
+ const readline_1 = __importDefault(require("readline"));
26
+ const path_1 = __importDefault(require("path"));
27
+ const Algo_1 = __importDefault(require("../../core/Algo"));
28
+ const xlsx_1 = __importDefault(require("xlsx"));
29
+ const XMLParser_1 = __importDefault(require("../../engines/parsing/XMLParser"));
30
+ const Helper_1 = __importDefault(require("../../helper/Helper"));
31
+ const ParseHelper_1 = __importDefault(require("../../engines/parsing/ParseHelper"));
32
+ const DriverHelper_1 = __importDefault(require("../DriverHelper"));
33
+ const Logger_1 = __importDefault(require("../../helper/Logger"));
34
+ const Constants_1 = __importDefault(require("../../Constants"));
35
+ const XLSParser_1 = __importDefault(require("../../engines/parsing/XLSParser"));
36
+ class S3SourceDriver {
37
+ constructor() {
38
+ this.init = (source) => __awaiter(this, void 0, void 0, function* () {
39
+ this._bucketName = source.authentication['bucket'];
40
+ const sessionToken = SecretManager_1.default.replaceSecret(source.authentication['sessionToken']);
41
+ const config = {
42
+ region: source.authentication['region'],
43
+ credentials: {
44
+ accessKeyId: SecretManager_1.default.replaceSecret(source.authentication['accessKey']),
45
+ secretAccessKey: SecretManager_1.default.replaceSecret(source.authentication['secretKey']),
46
+ sessionToken: sessionToken ? sessionToken : undefined
47
+ }
48
+ };
49
+ this._client = new client_s3_1.S3Client(config);
50
+ // TODO: is there a way to test if the connection was successful? like a query or scan that I can do?
51
+ return this;
52
+ });
53
+ this.readAll = (request) => __awaiter(this, void 0, void 0, function* () {
54
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "connect()" first');
55
+ (0, Affirm_1.default)(request, `Invalid download request`);
56
+ (0, Affirm_1.default)(request.fileKey, `Invalid file key for download request`);
57
+ const { fileKey } = request;
58
+ if (fileKey.includes('%')) {
59
+ const allFileKeys = yield this.listFiles(fileKey);
60
+ (0, Affirm_1.default)(allFileKeys.length < 50, `Pattern ${fileKey} of producer requested to S3 matches more than 50 files (${allFileKeys.length}), this is more than the S3 allowed limit. Please refine your pattern, remove some files or use a separate bucket.`);
61
+ const promises = allFileKeys.map((x, i) => this._get(Object.assign(Object.assign({}, request), { fileKey: x }), i));
62
+ const results = yield Promise.all(promises);
63
+ return results.flat();
64
+ }
65
+ else {
66
+ return yield this._get(request);
67
+ }
68
+ });
69
+ this.readLinesInRange = (request) => __awaiter(this, void 0, void 0, function* () {
70
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "connect()" first');
71
+ (0, Affirm_1.default)(request, 'Invalid read request');
72
+ (0, Affirm_1.default)(request.options, 'Invalid read request options');
73
+ const { fileKey } = request;
74
+ if (fileKey.includes('%')) {
75
+ const allFileKeys = yield this.listFiles(fileKey);
76
+ const promises = allFileKeys.map((x, i) => this._get(Object.assign(Object.assign({}, request), { fileKey: x }), i));
77
+ const results = yield Promise.all(promises);
78
+ return results.flat();
79
+ }
80
+ else {
81
+ return yield this._get(request);
82
+ }
83
+ });
84
+ this.download = (dataset) => __awaiter(this, void 0, void 0, function* () {
85
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "connect()" first');
86
+ (0, Affirm_1.default)(dataset, 'Invalid dataset');
87
+ const file = dataset.getFile();
88
+ (0, Affirm_1.default)(file, 'Invalid dataset file');
89
+ (0, Affirm_1.default)(file.fileKey, 'Invalid file key');
90
+ (0, Affirm_1.default)(file.fileType, `Invalid file type`);
91
+ const includeSourceFilename = file.includeSourceFilename === true;
92
+ const downloadLocally = (fileUrl_1, headerLine_1, ...args_1) => __awaiter(this, [fileUrl_1, headerLine_1, ...args_1], void 0, function* (fileUrl, headerLine, appendMode = false, sourceFilename) {
93
+ // Download and validate header in a single stream pass
94
+ const command = new client_s3_1.GetObjectCommand({
95
+ Bucket: this._bucketName,
96
+ Key: fileUrl
97
+ });
98
+ const response = yield this._client.send(command);
99
+ (0, Affirm_1.default)(response.Body, 'Failed to fetch object from S3');
100
+ let stream;
101
+ switch (file.fileType) {
102
+ case 'XLS':
103
+ case 'XLSX':
104
+ stream = yield XLSParser_1.default.parseXLSStream(response.Body, file.sheetName);
105
+ break;
106
+ default:
107
+ stream = response.Body;
108
+ break;
109
+ }
110
+ return DriverHelper_1.default.appendToUnifiedFile({
111
+ stream,
112
+ fileKey: fileUrl,
113
+ destinationPath: dataset.getPath(),
114
+ append: appendMode,
115
+ headerLine,
116
+ fileType: file.fileType,
117
+ hasHeaderRow: file.hasHeaderRow,
118
+ delimiter: dataset.getDelimiter(),
119
+ sourceFilename
120
+ });
121
+ });
122
+ const { fileKey } = file;
123
+ const setFirstLineFromStream = (stream) => __awaiter(this, void 0, void 0, function* () {
124
+ var _a, e_1, _b, _c;
125
+ const rl = readline_1.default.createInterface({ input: stream, crlfDelay: Infinity });
126
+ let firstLine = '';
127
+ switch (file.fileType) {
128
+ case 'XLSX':
129
+ case 'XLS':
130
+ firstLine = yield XLSParser_1.default.getHeaderXlsFromStream(stream, file.sheetName);
131
+ break;
132
+ case 'CSV':
133
+ case 'JSON':
134
+ case 'JSONL':
135
+ case 'TXT':
136
+ try {
137
+ for (var _d = true, rl_1 = __asyncValues(rl), rl_1_1; rl_1_1 = yield rl_1.next(), _a = rl_1_1.done, !_a; _d = true) {
138
+ _c = rl_1_1.value;
139
+ _d = false;
140
+ const line = _c;
141
+ firstLine = line;
142
+ break;
143
+ }
144
+ }
145
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
146
+ finally {
147
+ try {
148
+ if (!_d && !_a && (_b = rl_1.return)) yield _b.call(rl_1);
149
+ }
150
+ finally { if (e_1) throw e_1.error; }
151
+ }
152
+ rl.close();
153
+ break;
154
+ }
155
+ // If including source filename, append a placeholder column name to the header
156
+ if (file.includeSourceFilename) {
157
+ firstLine = firstLine + dataset.getDelimiter() + Constants_1.default.SOURCE_FILENAME_COLUMN;
158
+ }
159
+ dataset.setFirstLine(firstLine);
160
+ return firstLine;
161
+ });
162
+ if (fileKey.includes('%')) {
163
+ const allFileKeys = yield this.listFiles(fileKey);
164
+ Logger_1.default.log(`Matched ${allFileKeys.length} files, copying locally and creating unified dataset.`);
165
+ Affirm_1.default.hasItems(allFileKeys, `The file key "${fileKey}" doesn't have any matches in bucket "${this._bucketName}".`);
166
+ // Get header line from the first file
167
+ const firstFileCommand = new client_s3_1.GetObjectCommand({
168
+ Bucket: this._bucketName,
169
+ Key: allFileKeys[0]
170
+ });
171
+ const firstFileResponse = yield this._client.send(firstFileCommand);
172
+ (0, Affirm_1.default)(firstFileResponse.Body, 'Failed to fetch first file from S3');
173
+ const firstFileStream = firstFileResponse.Body;
174
+ const headerLine = yield setFirstLineFromStream(firstFileStream);
175
+ let totalLineCount = 0;
176
+ // Download files sequentially to avoid file conflicts
177
+ for (let i = 0; i < allFileKeys.length; i++) {
178
+ const currentFileKey = allFileKeys[i];
179
+ // Pass the filename (just the basename) if includeSourceFilename is enabled
180
+ const sourceFilename = includeSourceFilename ? path_1.default.basename(currentFileKey) : undefined;
181
+ totalLineCount += yield downloadLocally(currentFileKey, headerLine, i > 0, sourceFilename); // Append mode for subsequent files
182
+ }
183
+ dataset.setCount(totalLineCount);
184
+ return dataset;
185
+ }
186
+ else {
187
+ // Get header line from the single file
188
+ const firstFileCommand = new client_s3_1.GetObjectCommand({
189
+ Bucket: this._bucketName,
190
+ Key: fileKey
191
+ });
192
+ const firstFileResponse = yield this._client.send(firstFileCommand);
193
+ (0, Affirm_1.default)(firstFileResponse.Body, 'Failed to fetch first file from S3');
194
+ const firstFileStream = firstFileResponse.Body;
195
+ const headerLine = yield setFirstLineFromStream(firstFileStream);
196
+ // Pass the filename if includeSourceFilename is enabled
197
+ const sourceFilename = includeSourceFilename ? path_1.default.basename(fileKey) : undefined;
198
+ const totalLineCount = yield downloadLocally(fileKey, headerLine, false, sourceFilename);
199
+ dataset.setCount(totalLineCount);
200
+ return dataset;
201
+ }
202
+ });
203
+ this.exist = (producer) => __awaiter(this, void 0, void 0, function* () {
204
+ var _a;
205
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "connect()" first');
206
+ (0, Affirm_1.default)(producer, 'Invalid read producer');
207
+ const bucket = this._bucketName;
208
+ const fileKey = producer.settings.fileKey;
209
+ (0, Affirm_1.default)(fileKey, `Invalid file key for download request`);
210
+ if (fileKey.includes('%')) {
211
+ const allFileKeys = yield this.listFiles(fileKey);
212
+ return allFileKeys.length > 0;
213
+ }
214
+ else {
215
+ try {
216
+ yield this._client.send(new client_s3_1.HeadObjectCommand({ Bucket: bucket, Key: fileKey }));
217
+ return true;
218
+ }
219
+ catch (error) {
220
+ if (((_a = error.$metadata) === null || _a === void 0 ? void 0 : _a.httpStatusCode) === 404 || error.name === 'NotFound')
221
+ return false;
222
+ throw error;
223
+ }
224
+ }
225
+ });
226
+ this._readLines = (stream, lineFrom, lineTo) => __awaiter(this, void 0, void 0, function* () {
227
+ var _a, e_2, _b, _c;
228
+ const reader = readline_1.default.createInterface({ input: stream, crlfDelay: Infinity });
229
+ const lines = [];
230
+ let lineCounter = 0;
231
+ try {
232
+ for (var _d = true, reader_1 = __asyncValues(reader), reader_1_1; reader_1_1 = yield reader_1.next(), _a = reader_1_1.done, !_a; _d = true) {
233
+ _c = reader_1_1.value;
234
+ _d = false;
235
+ const line = _c;
236
+ if (Algo_1.default.hasVal(lineFrom) && Algo_1.default.hasVal(lineTo)) {
237
+ if (lineCounter >= lineFrom && lineCounter < lineTo) {
238
+ if (line && line.length > 0)
239
+ lines.push(line);
240
+ }
241
+ lineCounter++;
242
+ if (lineCounter >= lineTo)
243
+ break;
244
+ }
245
+ else {
246
+ if (line && line.length > 0)
247
+ lines.push(line);
248
+ }
249
+ }
250
+ }
251
+ catch (e_2_1) { e_2 = { error: e_2_1 }; }
252
+ finally {
253
+ try {
254
+ if (!_d && !_a && (_b = reader_1.return)) yield _b.call(reader_1);
255
+ }
256
+ finally { if (e_2) throw e_2.error; }
257
+ }
258
+ reader.close();
259
+ return lines;
260
+ });
261
+ this._readExcelLines = (stream, sheetName, lineFrom, lineTo) => __awaiter(this, void 0, void 0, function* () {
262
+ var _a, stream_1, stream_1_1;
263
+ var _b, e_3, _c, _d;
264
+ (0, Affirm_1.default)(sheetName, `Invalid sheetname`);
265
+ const chunks = [];
266
+ try {
267
+ for (_a = true, stream_1 = __asyncValues(stream); stream_1_1 = yield stream_1.next(), _b = stream_1_1.done, !_b; _a = true) {
268
+ _d = stream_1_1.value;
269
+ _a = false;
270
+ const chunk = _d;
271
+ chunks.push(chunk);
272
+ }
273
+ }
274
+ catch (e_3_1) { e_3 = { error: e_3_1 }; }
275
+ finally {
276
+ try {
277
+ if (!_a && !_b && (_c = stream_1.return)) yield _c.call(stream_1);
278
+ }
279
+ finally { if (e_3) throw e_3.error; }
280
+ }
281
+ const buffer = Buffer.concat(chunks);
282
+ const excel = xlsx_1.default.read(buffer, { type: 'buffer' });
283
+ (0, Affirm_1.default)(excel.SheetNames.includes(sheetName), `The sheet "${sheetName}" doesn't exist in the excel (available: ${excel.SheetNames.join(', ')})`);
284
+ const sheet = excel.Sheets[sheetName];
285
+ const csv = xlsx_1.default.utils.sheet_to_csv(sheet);
286
+ const lines = csv.split('\n');
287
+ if (Algo_1.default.hasVal(lineFrom) && Algo_1.default.hasVal(lineTo))
288
+ return lines.slice(lineFrom, lineTo + 1);
289
+ else
290
+ return lines;
291
+ });
292
+ this._readXmlLines = (stream, lineFrom, lineTo) => __awaiter(this, void 0, void 0, function* () {
293
+ var _a, stream_2, stream_2_1;
294
+ var _b, e_4, _c, _d;
295
+ const chunks = [];
296
+ try {
297
+ for (_a = true, stream_2 = __asyncValues(stream); stream_2_1 = yield stream_2.next(), _b = stream_2_1.done, !_b; _a = true) {
298
+ _d = stream_2_1.value;
299
+ _a = false;
300
+ const chunk = _d;
301
+ chunks.push(chunk);
302
+ }
303
+ }
304
+ catch (e_4_1) { e_4 = { error: e_4_1 }; }
305
+ finally {
306
+ try {
307
+ if (!_a && !_b && (_c = stream_2.return)) yield _c.call(stream_2);
308
+ }
309
+ finally { if (e_4) throw e_4.error; }
310
+ }
311
+ const buffer = Buffer.concat(chunks);
312
+ const jsonData = XMLParser_1.default.xmlToJson(buffer);
313
+ // Convert JSON data to string lines. This might need adjustment based on XML structure.
314
+ let lines = Array.isArray(jsonData) ? jsonData.map(item => JSON.stringify(item)) : [JSON.stringify(jsonData)];
315
+ if (Algo_1.default.hasVal(lineFrom) && Algo_1.default.hasVal(lineTo)) {
316
+ lines = lines.slice(lineFrom, lineTo + 1);
317
+ }
318
+ return lines;
319
+ });
320
+ this._get = (request, index) => __awaiter(this, void 0, void 0, function* () {
321
+ const { fileKey, fileType, options } = request;
322
+ const bucket = this._bucketName;
323
+ let lineFrom, lineTo, sheetName, hasHeaderRow;
324
+ if (options) {
325
+ lineFrom = options.lineFrom;
326
+ lineTo = options.lineTo;
327
+ sheetName = options.sheetName;
328
+ hasHeaderRow = options.hasHeaderRow;
329
+ }
330
+ const response = yield this._client.send(new client_s3_1.GetObjectCommand({
331
+ Bucket: bucket,
332
+ Key: fileKey
333
+ }));
334
+ (0, Affirm_1.default)(response.Body, 'Failed to fetch object from S3');
335
+ const stream = response.Body;
336
+ let lines = [];
337
+ switch (fileType) {
338
+ case 'CSV':
339
+ case 'JSON':
340
+ case 'JSONL':
341
+ case 'TXT':
342
+ if (Algo_1.default.hasVal(lineFrom) && Algo_1.default.hasVal(lineTo))
343
+ lines = yield this._readLines(stream, lineFrom, lineTo);
344
+ else
345
+ lines = yield this._readLines(stream);
346
+ break;
347
+ case 'XLS':
348
+ case 'XLSX':
349
+ if (Algo_1.default.hasVal(lineFrom) && Algo_1.default.hasVal(lineTo))
350
+ lines = yield this._readExcelLines(stream, sheetName, lineFrom, lineTo);
351
+ else
352
+ lines = yield this._readExcelLines(stream, sheetName);
353
+ break;
354
+ case 'XML':
355
+ if (Algo_1.default.hasVal(lineFrom) && Algo_1.default.hasVal(lineTo))
356
+ lines = yield this._readXmlLines(stream, lineFrom, lineTo);
357
+ else
358
+ lines = yield this._readXmlLines(stream);
359
+ break;
360
+ }
361
+ // If this is not the first file read in a pattern match AND the file type has an header,
362
+ // then I need to remove the header from the resulting lines or the header will be duplicated
363
+ if (index > 0 && ParseHelper_1.default.shouldHaveHeader(fileType, hasHeaderRow)) {
364
+ lines = lines.slice(1);
365
+ }
366
+ return lines;
367
+ });
368
+ this._listFiles = (fileKeyPattern, maxKeys, continuationToken) => __awaiter(this, void 0, void 0, function* () {
369
+ var _a;
370
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "connect()" first');
371
+ // Convert SQL-like pattern to prefix and pattern parts for filtering
372
+ let prefix = '';
373
+ if (fileKeyPattern) {
374
+ if (fileKeyPattern.includes('%')) {
375
+ const parts = fileKeyPattern.split('%').filter(part => part.length > 0);
376
+ // If pattern starts with text before first %, use it as prefix for S3 optimization
377
+ if (!fileKeyPattern.startsWith('%') && parts[0]) {
378
+ prefix = parts[0];
379
+ }
380
+ }
381
+ else {
382
+ // No wildcard, use the entire pattern as prefix
383
+ prefix = fileKeyPattern;
384
+ }
385
+ }
386
+ const listParams = {
387
+ Bucket: this._bucketName,
388
+ Prefix: prefix || undefined,
389
+ MaxKeys: maxKeys || 10000,
390
+ ContinuationToken: continuationToken
391
+ };
392
+ try {
393
+ const response = yield this._client.send(new client_s3_1.ListObjectsV2Command(listParams));
394
+ const files = ((_a = response.Contents) === null || _a === void 0 ? void 0 : _a.map(obj => obj.Key).filter(key => key !== undefined)) || [];
395
+ const matchingFiles = Helper_1.default.matchPattern(fileKeyPattern, files);
396
+ return {
397
+ files: matchingFiles,
398
+ nextContinuationToken: response.NextContinuationToken
399
+ };
400
+ }
401
+ catch (error) {
402
+ throw new Error(`Failed to list files in bucket "${this._bucketName}": ${error.message}`);
403
+ }
404
+ });
405
+ this.listFiles = (fileKeyPattern, maxKeys) => __awaiter(this, void 0, void 0, function* () {
406
+ const allFiles = [];
407
+ let continuationToken = undefined;
408
+ do {
409
+ const result = yield this._listFiles(fileKeyPattern, maxKeys, continuationToken);
410
+ allFiles.push(...result.files);
411
+ continuationToken = result.nextContinuationToken;
412
+ // If maxKeys is specified and we've reached the limit, break
413
+ if (maxKeys && allFiles.length >= maxKeys) {
414
+ return allFiles.slice(0, maxKeys);
415
+ }
416
+ } while (continuationToken);
417
+ return allFiles;
418
+ });
419
+ this.downloadFile = (fileKey) => __awaiter(this, void 0, void 0, function* () {
420
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
421
+ (0, Affirm_1.default)(fileKey, 'Invalid file key');
422
+ const response = yield this._client.send(new client_s3_1.GetObjectCommand({
423
+ Bucket: this._bucketName,
424
+ Key: fileKey
425
+ }));
426
+ (0, Affirm_1.default)(response.Body, 'Failed to fetch object from S3');
427
+ const content = yield response.Body.transformToByteArray();
428
+ return Buffer.from(content);
429
+ });
430
+ this.deleteFile = (fileKey) => __awaiter(this, void 0, void 0, function* () {
431
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
432
+ (0, Affirm_1.default)(fileKey, 'Invalid file key');
433
+ yield this._client.send(new client_s3_1.DeleteObjectCommand({
434
+ Bucket: this._bucketName,
435
+ Key: fileKey
436
+ }));
437
+ });
438
+ this.copyFile = (sourceFileKey, destinationBucket, destinationFileKey) => __awaiter(this, void 0, void 0, function* () {
439
+ (0, Affirm_1.default)(this._client, 'S3 client not yet initialized, call "init()" first');
440
+ (0, Affirm_1.default)(sourceFileKey, 'Invalid source file key');
441
+ (0, Affirm_1.default)(destinationBucket, 'Invalid destination bucket');
442
+ (0, Affirm_1.default)(destinationFileKey, 'Invalid destination file key');
443
+ yield this._client.send(new client_s3_1.CopyObjectCommand({
444
+ CopySource: `${this._bucketName}/${sourceFileKey}`,
445
+ Bucket: destinationBucket,
446
+ Key: destinationFileKey
447
+ }));
448
+ });
449
+ this.ready = (producer) => {
450
+ void producer;
451
+ throw new Error('Not implemented yet');
452
+ };
453
+ }
454
+ }
455
+ exports.default = S3SourceDriver;
package/engines/ai/LLM.js CHANGED
@@ -244,17 +244,6 @@ class LLM {
244
244
  const res = yield this._client.chat.completions.create(item);
245
245
  const msg = res.choices[0].message;
246
246
  const finalDraft = JSON.parse(msg.content);
247
- // Do some manual adjustments cause some things still don't work...
248
- if (finalDraft && finalDraft.consumers) {
249
- for (const cons of finalDraft.consumers) {
250
- for (const field of cons.fields) {
251
- if (field.grouping) {
252
- if (!field.grouping.groupingKey || field.grouping.groupingKey.length === 0)
253
- field.grouping = undefined;
254
- }
255
- }
256
- }
257
- }
258
247
  return finalDraft;
259
248
  });
260
249
  this._client = new openai_1.default({