@forzalabs/remora 0.0.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. package/Constants.js +8 -0
  2. package/actions/automap.js +73 -0
  3. package/actions/compile.js +57 -0
  4. package/actions/debug.js +61 -0
  5. package/actions/deploy.js +95 -0
  6. package/actions/discover.js +36 -0
  7. package/actions/init.js +78 -0
  8. package/actions/run.js +51 -0
  9. package/auth/AdminManager.js +48 -0
  10. package/auth/ApiKeysManager.js +45 -0
  11. package/auth/JWTManager.js +56 -0
  12. package/core/Affirm.js +42 -0
  13. package/core/Algo.js +155 -0
  14. package/core/dste/DSTE.js +113 -0
  15. package/core/logger/DebugLogService.js +48 -0
  16. package/core/logger/DevelopmentLogService.js +70 -0
  17. package/core/logger/LocalLogService.js +70 -0
  18. package/core/logger/Logger.js +54 -0
  19. package/database/DatabaseEngine.js +119 -0
  20. package/database/DatabaseInitializer.js +80 -0
  21. package/database/DatabaseStructure.js +27 -0
  22. package/definitions/agents/DestinationDriver.js +2 -0
  23. package/definitions/agents/SourceDriver.js +2 -0
  24. package/definitions/cli.js +2 -0
  25. package/definitions/database/ApiKeys.js +2 -0
  26. package/definitions/database/Stored.js +7 -0
  27. package/definitions/database/UsageStat.js +2 -0
  28. package/definitions/database/User.js +2 -0
  29. package/definitions/json_schemas/consumer-schema.json +423 -0
  30. package/definitions/json_schemas/producer-schema.json +236 -0
  31. package/definitions/json_schemas/project-schema.json +59 -0
  32. package/definitions/json_schemas/source-schema.json +109 -0
  33. package/definitions/requests/ConsumerRequest.js +2 -0
  34. package/definitions/requests/Developer.js +2 -0
  35. package/definitions/requests/Mapping.js +2 -0
  36. package/definitions/requests/ProducerRequest.js +2 -0
  37. package/definitions/requests/Request.js +2 -0
  38. package/definitions/resources/Compiled.js +2 -0
  39. package/definitions/resources/Consumer.js +2 -0
  40. package/definitions/resources/Environment.js +2 -0
  41. package/definitions/resources/Library.js +2 -0
  42. package/definitions/resources/Producer.js +2 -0
  43. package/definitions/resources/Project.js +2 -0
  44. package/definitions/resources/Schema.js +2 -0
  45. package/definitions/resources/Source.js +2 -0
  46. package/documentation/README.md +123 -0
  47. package/documentation/default_resources/consumer.json +52 -0
  48. package/documentation/default_resources/producer.json +32 -0
  49. package/documentation/default_resources/project.json +14 -0
  50. package/documentation/default_resources/schema.json +36 -0
  51. package/documentation/default_resources/source.json +15 -0
  52. package/drivers/DriverFactory.js +56 -0
  53. package/drivers/LocalDriver.js +122 -0
  54. package/drivers/RedshiftDriver.js +179 -0
  55. package/drivers/S3Driver.js +47 -0
  56. package/drivers/S3SourceDriver.js +127 -0
  57. package/engines/CryptoEngine.js +46 -0
  58. package/engines/Environment.js +139 -0
  59. package/engines/ParseManager.js +38 -0
  60. package/engines/ProducerEngine.js +150 -0
  61. package/engines/UsageManager.js +61 -0
  62. package/engines/UserManager.js +43 -0
  63. package/engines/Validator.js +154 -0
  64. package/engines/ai/AutoMapperEngine.js +37 -0
  65. package/engines/ai/DeveloperEngine.js +70 -0
  66. package/engines/ai/LLM.js +299 -0
  67. package/engines/consumer/ConsumerEngine.js +204 -0
  68. package/engines/consumer/ConsumerManager.js +155 -0
  69. package/engines/consumer/PostProcessor.js +143 -0
  70. package/engines/deployment/DeploymentPlanner.js +46 -0
  71. package/engines/execution/ExecutionEnvironment.js +114 -0
  72. package/engines/execution/ExecutionPlanner.js +92 -0
  73. package/engines/execution/RequestExecutor.js +100 -0
  74. package/engines/file/FileCompiler.js +28 -0
  75. package/engines/file/FileExporter.js +116 -0
  76. package/engines/schema/SchemaEngine.js +33 -0
  77. package/engines/schema/SchemaValidator.js +67 -0
  78. package/engines/sql/SQLBuilder.js +96 -0
  79. package/engines/sql/SQLCompiler.js +140 -0
  80. package/engines/sql/SQLUtils.js +22 -0
  81. package/engines/validation/Validator.js +151 -0
  82. package/helper/Helper.js +64 -0
  83. package/helper/Settings.js +13 -0
  84. package/index.js +63 -0
  85. package/package.json +77 -0
@@ -0,0 +1,139 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const fs_1 = __importDefault(require("fs"));
7
+ const path_1 = __importDefault(require("path"));
8
+ const Affirm_1 = __importDefault(require("../core/Affirm"));
9
+ const SchemaValidator_1 = __importDefault(require("./schema/SchemaValidator"));
10
+ const Validator_1 = __importDefault(require("./Validator"));
11
+ class EnvironmentClass {
12
+ constructor() {
13
+ this._env = null;
14
+ this.init = (env) => {
15
+ this._env = env;
16
+ };
17
+ this.load = (remoraPath) => {
18
+ const envPath = path_1.default.join(remoraPath, 'remora');
19
+ // Read and parse project.json
20
+ const projectPath = path_1.default.join(envPath, 'project.json');
21
+ if (!fs_1.default.existsSync(projectPath))
22
+ throw new Error('Project configuration file not found');
23
+ const projectConfig = JSON.parse(fs_1.default.readFileSync(projectPath, 'utf-8'));
24
+ // Validate project schema
25
+ const isValid = SchemaValidator_1.default.validate('project-schema', projectConfig);
26
+ if (!isValid)
27
+ throw new Error('Invalid project configuration');
28
+ const loadConfigurations = (basePath, paths) => {
29
+ const configs = [];
30
+ for (const configPath of paths) {
31
+ const fullPath = path_1.default.join(basePath, configPath);
32
+ try {
33
+ if (fs_1.default.statSync(fullPath).isDirectory()) {
34
+ const files = fs_1.default.readdirSync(fullPath).filter(file => file.endsWith('.json'));
35
+ for (const file of files) {
36
+ const config = JSON.parse(fs_1.default.readFileSync(path_1.default.join(fullPath, file), 'utf-8'));
37
+ configs.push(config);
38
+ }
39
+ }
40
+ else {
41
+ const config = JSON.parse(fs_1.default.readFileSync(fullPath, 'utf-8'));
42
+ configs.push(config);
43
+ }
44
+ }
45
+ catch (error) {
46
+ throw new Error(`Error loading configuration from ${configPath}: ${error.message}`);
47
+ }
48
+ }
49
+ return configs;
50
+ };
51
+ // Unload all the schemes other than the default ones, because otherwise the RW could keep loaded schemes not saved in the remora folder
52
+ SchemaValidator_1.default.unloadSchemas();
53
+ // Load all schemas first since they're needed for validation
54
+ const schemas = loadConfigurations(envPath, projectConfig.schemas);
55
+ schemas.forEach(schema => {
56
+ SchemaValidator_1.default.addSchema(schema, schema.title);
57
+ });
58
+ // Load configurations with validation
59
+ const sources = loadConfigurations(envPath, projectConfig.sources);
60
+ sources.forEach(source => {
61
+ if (!SchemaValidator_1.default.validate('source-schema', source))
62
+ throw new Error(`Invalid source configuration: ${source.name}`);
63
+ });
64
+ const producers = loadConfigurations(envPath, projectConfig.producers);
65
+ producers.forEach(producer => {
66
+ if (!SchemaValidator_1.default.validate('producer-schema', producer))
67
+ throw new Error(`Invalid producer configuration: ${producer.name}`);
68
+ });
69
+ const consumers = loadConfigurations(envPath, projectConfig.consumers);
70
+ consumers.forEach(consumer => {
71
+ if (!SchemaValidator_1.default.validate('consumer-schema', consumer))
72
+ throw new Error(`Invalid consumer configuration: ${consumer.name}`);
73
+ });
74
+ // Initialize environment
75
+ this.init({
76
+ settings: new Map(Object.entries({
77
+ SQL_MAX_QUERY_ROWS: projectConfig.settings.SQL_MAX_QUERY_ROWS.toString()
78
+ })),
79
+ sources,
80
+ producers,
81
+ consumers,
82
+ schemas,
83
+ sqlLibrary: [] // TODO: Add SQL library loading if needed
84
+ });
85
+ };
86
+ this.get = (setting) => {
87
+ return this._env.settings.get(setting);
88
+ };
89
+ this.getSource = (sourceName) => {
90
+ (0, Affirm_1.default)(sourceName, 'Invalid source name');
91
+ return this._env.sources.find(x => x.name === sourceName);
92
+ };
93
+ /**
94
+ * A consumer can reference another consumer, in this case I need to keep digging to find the real producer
95
+ */
96
+ this.getFirstProducer = (producerName) => {
97
+ (0, Affirm_1.default)(producerName, `Invalid producer name`);
98
+ const prod = this._env.producers.find(x => x.name === producerName);
99
+ if (!prod) {
100
+ const consumer = this.getConsumer(producerName);
101
+ (0, Affirm_1.default)(consumer, `Invalid producer name`);
102
+ return this.getFirstProducer(consumer.producers[0].name);
103
+ }
104
+ return prod;
105
+ };
106
+ this.getProducer = (producerName) => {
107
+ (0, Affirm_1.default)(producerName, `Invalid producer name`);
108
+ return this._env.producers.find(x => x.name === producerName);
109
+ };
110
+ this.getConsumer = (consumerName) => {
111
+ (0, Affirm_1.default)(consumerName, `Invalid consumer name`);
112
+ return this._env.consumers.find(x => x.name === consumerName);
113
+ };
114
+ this.getSchema = (schemaName) => {
115
+ (0, Affirm_1.default)(schemaName, 'Invalid schema name');
116
+ return this._env.schemas.find(x => x.title === schemaName);
117
+ };
118
+ this.getSqlInLibrary = (name) => {
119
+ (0, Affirm_1.default)(name, `Invalid sql library item name`);
120
+ return this._env.sqlLibrary.find(x => x.name === name);
121
+ };
122
+ this.validate = () => {
123
+ (0, Affirm_1.default)(this._env, 'Invalid environment');
124
+ let errors = [];
125
+ try {
126
+ errors = [...errors, ...Validator_1.default.validateSources(this._env.sources)];
127
+ errors = [...errors, ...Validator_1.default.validateProducers(this._env.producers)];
128
+ errors = [...errors, ...Validator_1.default.validateConsumers(this._env.consumers)];
129
+ }
130
+ catch (e) {
131
+ if (errors.length === 0)
132
+ errors.push(`There was an error in the validation Environment. (error: ${e})`);
133
+ }
134
+ return errors;
135
+ };
136
+ }
137
+ }
138
+ const Environment = new EnvironmentClass();
139
+ exports.default = Environment;
@@ -0,0 +1,38 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const Affirm_1 = __importDefault(require("../core/Affirm"));
7
+ class ParseManagerClass {
8
+ constructor() {
9
+ this.csvToJson = (csv, producer) => {
10
+ (0, Affirm_1.default)(csv, 'Invalid csv content');
11
+ Affirm_1.default.hasValue(csv.length, 'Invalid csv content length');
12
+ const fileRows = csv.split('\n');
13
+ return this.csvLinesToJson(fileRows, producer);
14
+ };
15
+ this.csvLinesToJson = (lines, producer) => {
16
+ var _a;
17
+ (0, Affirm_1.default)(lines, 'Invalid csv lines');
18
+ Affirm_1.default.hasValue(lines.length, 'Invalid csv lines length');
19
+ const delimiterChar = (_a = producer.settings.delimiter) !== null && _a !== void 0 ? _a : ',';
20
+ const fileRows = lines;
21
+ const columns = fileRows[0].split(delimiterChar).map(x => x.trim());
22
+ const rows = fileRows.slice(1).map(x => x.split(delimiterChar).map(k => k.trim()));
23
+ const result = [];
24
+ for (let i = 0; i < rows.length; i++) {
25
+ const row = rows[i];
26
+ const rowObject = {};
27
+ for (let j = 0; j < columns.length; j++) {
28
+ const column = columns[j];
29
+ rowObject[column] = row[j];
30
+ }
31
+ result.push(rowObject);
32
+ }
33
+ return result;
34
+ };
35
+ }
36
+ }
37
+ const ParseManager = new ParseManagerClass();
38
+ exports.default = ParseManager;
@@ -0,0 +1,150 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ const Affirm_1 = __importDefault(require("../core/Affirm"));
16
+ const DriverFactory_1 = __importDefault(require("../drivers/DriverFactory"));
17
+ const DeploymentPlanner_1 = __importDefault(require("./deployment/DeploymentPlanner"));
18
+ const Environment_1 = __importDefault(require("./Environment"));
19
+ const FileCompiler_1 = __importDefault(require("./file/FileCompiler"));
20
+ const SQLCompiler_1 = __importDefault(require("./sql/SQLCompiler"));
21
+ const SQLUtils_1 = __importDefault(require("./sql/SQLUtils"));
22
+ const ParseManager_1 = __importDefault(require("./ParseManager"));
23
+ class ProducerEngineClass {
24
+ constructor() {
25
+ this.compile = (producer) => {
26
+ (0, Affirm_1.default)(producer, 'Invalid producer');
27
+ const source = Environment_1.default.getSource(producer.source);
28
+ (0, Affirm_1.default)(source, `No source found for producer "${producer.name}" with name "${producer.source}"`);
29
+ switch (source.engine) {
30
+ case 'aws-redshift':
31
+ case 'postgres': {
32
+ const sql = SQLCompiler_1.default.compileProducer(producer, source);
33
+ (0, Affirm_1.default)(sql, `Invalid SQL from compilation for producer "${producer.name}"`);
34
+ return sql;
35
+ }
36
+ case 'aws-s3': {
37
+ const columns = FileCompiler_1.default.compileProducer(producer, source);
38
+ (0, Affirm_1.default)(columns, `Invalid columns from compilation for producer "${producer.name}"`);
39
+ break;
40
+ }
41
+ default: throw new Error(`Invalid engine type "${source.engine}" for producer "${producer.name}": not implemented yet`);
42
+ }
43
+ };
44
+ this.deploy = (producer) => __awaiter(this, void 0, void 0, function* () {
45
+ (0, Affirm_1.default)(producer, 'Invalid producer');
46
+ const source = Environment_1.default.getSource(producer.source);
47
+ (0, Affirm_1.default)(source, `No source found for producer "${producer.name}" with name "${producer.source}"`);
48
+ const driver = yield DriverFactory_1.default.instantiateSource(source);
49
+ (0, Affirm_1.default)(driver, `No driver found for producer "${producer.name}" with driver type "${source.engine}"`);
50
+ const plan = DeploymentPlanner_1.default.planProducer(producer);
51
+ for (const planStep of plan) {
52
+ switch (planStep.type) {
53
+ case 'create-view': {
54
+ const internalSchema = Environment_1.default.get('schema');
55
+ (0, Affirm_1.default)(internalSchema, `Invalid schema set on the authentication for source "${source.name}"`);
56
+ const sql = SQLCompiler_1.default.compileProducer(producer, source);
57
+ const vSQL = `CREATE OR REPLACE VIEW "${internalSchema}"."${SQLUtils_1.default.viewName(producer.name)}" AS ${sql}`;
58
+ yield driver.execute(vSQL);
59
+ break;
60
+ }
61
+ default: throw new Error(`Invalid execution consumer plan step type "${planStep.type}"`);
62
+ }
63
+ }
64
+ });
65
+ this.execute = (producer) => __awaiter(this, void 0, void 0, function* () {
66
+ (0, Affirm_1.default)(producer, 'Invalid producer');
67
+ const source = Environment_1.default.getSource(producer.source);
68
+ (0, Affirm_1.default)(source, `No source found for producer "${producer.name}" with name "${producer.source}"`);
69
+ const driver = yield DriverFactory_1.default.instantiateSource(source);
70
+ (0, Affirm_1.default)(driver, `No driver found for producer "${producer.name}" with driver type "${source.engine}"`);
71
+ switch (source.engine) {
72
+ case 'aws-redshift': {
73
+ const sql = SQLCompiler_1.default.compileProducer(producer, source);
74
+ (0, Affirm_1.default)(sql, `Invalid SQL from deployment compilation for producer "${producer.name}"`);
75
+ const res = yield driver.query(sql);
76
+ return { data: res.rows, dataType: 'array-of-json' };
77
+ }
78
+ case 'aws-s3': {
79
+ return this.readFile(producer, { readmode: 'all' });
80
+ }
81
+ default: throw new Error(`Invalid engine type "${source.engine}" for producer "${producer.name}": not supported`);
82
+ }
83
+ });
84
+ this.readFile = (producer, options) => __awaiter(this, void 0, void 0, function* () {
85
+ (0, Affirm_1.default)(producer, 'Invalid producer');
86
+ (0, Affirm_1.default)(options, 'Invalid options');
87
+ if (options.readmode === 'lines')
88
+ (0, Affirm_1.default)(options.lines, 'Invalid lines');
89
+ const source = Environment_1.default.getSource(producer.source);
90
+ (0, Affirm_1.default)(source, `No source found for producer "${producer.name}" with name "${producer.source}"`);
91
+ const driver = yield DriverFactory_1.default.instantiateSource(source);
92
+ (0, Affirm_1.default)(driver, `No driver found for producer "${producer.name}" with driver type "${source.engine}"`);
93
+ let lines = [];
94
+ if (options.readmode === 'lines')
95
+ lines = yield driver.readLinesInRange({ fileKey: producer.settings.fileKey, lineFrom: options.lines.from, lineTo: options.lines.to });
96
+ else
97
+ lines = [(yield driver.download({ fileKey: producer.settings.fileKey }))];
98
+ switch (producer.settings.fileType.toUpperCase()) {
99
+ case 'CSV': {
100
+ return { data: lines, dataType: 'lines-of-text' };
101
+ }
102
+ case 'JSONL':
103
+ case 'JSON': {
104
+ const json = lines.map(x => JSON.parse(x));
105
+ return { data: json, dataType: 'array-of-json' };
106
+ }
107
+ default:
108
+ throw new Error(`Invalid file type "${producer.settings.fileType}" for engine type "${source.engine}" for producer "${producer.name}": not supported`);
109
+ }
110
+ });
111
+ this.readSampleData = (producer_1, ...args_1) => __awaiter(this, [producer_1, ...args_1], void 0, function* (producer, sampleSize = 10) {
112
+ var _a, _b, _c;
113
+ (0, Affirm_1.default)(producer, 'Invalid producer');
114
+ (0, Affirm_1.default)(sampleSize > 0, 'Sample size must be greater than 0');
115
+ const source = Environment_1.default.getSource(producer.source);
116
+ (0, Affirm_1.default)(source, `No source found for producer "${producer.name}" with name "${producer.source}"`);
117
+ let sampleData = [];
118
+ switch (source.engine) {
119
+ case 'aws-redshift': {
120
+ const sql = `SELECT * FROM "${source.authentication['schema']}"."${producer.settings.sqlTable}" LIMIT ${sampleSize}`;
121
+ (0, Affirm_1.default)(sql, `Invalid SQL from deployment compilation for producer "${producer.name}"`);
122
+ const driver = yield DriverFactory_1.default.instantiateSource(source);
123
+ (0, Affirm_1.default)(driver, `No driver found for producer "${producer.name}" with driver type "${source.engine}"`);
124
+ const res = yield driver.query(sql);
125
+ sampleData = res.rows;
126
+ break;
127
+ }
128
+ case 'local':
129
+ case 'aws-s3': {
130
+ const fileData = yield this.readFile(producer, { readmode: 'lines', lines: { from: 0, to: sampleSize } });
131
+ if (((_a = producer.settings.fileType) === null || _a === void 0 ? void 0 : _a.toUpperCase()) === 'CSV') {
132
+ sampleData = ParseManager_1.default.csvLinesToJson(fileData.data, producer);
133
+ }
134
+ else if (((_b = producer.settings.fileType) === null || _b === void 0 ? void 0 : _b.toUpperCase()) === 'JSON' || ((_c = producer.settings.fileType) === null || _c === void 0 ? void 0 : _c.toUpperCase()) === 'JSONL') {
135
+ sampleData = fileData.data.map(line => JSON.parse(line));
136
+ }
137
+ else {
138
+ sampleData = fileData.data;
139
+ }
140
+ break;
141
+ }
142
+ default:
143
+ throw new Error(`Invalid engine type "${source.engine}" for producer "${producer.name}": not supported`);
144
+ }
145
+ return sampleData;
146
+ });
147
+ }
148
+ }
149
+ const ProducerEngine = new ProducerEngineClass();
150
+ exports.default = ProducerEngine;
@@ -0,0 +1,61 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const Affirm_1 = __importDefault(require("../core/Affirm"));
7
+ const DSTE_1 = __importDefault(require("../core/dste/DSTE"));
8
+ const Helper_1 = __importDefault(require("../helper/Helper"));
9
+ const DatabaseEngine_1 = __importDefault(require("../database/DatabaseEngine"));
10
+ const Settings_1 = __importDefault(require("../helper/Settings"));
11
+ class UsageManagerClass {
12
+ constructor() {
13
+ /**
14
+ * TODO: I need to group the usage stats into a bucket daily. When and how I do it is still a question...
15
+ */
16
+ this.getTodayBucketId = (consumer) => {
17
+ (0, Affirm_1.default)(consumer, `Invalid consumer`);
18
+ const now = DSTE_1.default.now();
19
+ return `${consumer.name}_${now.getUTCFullYear()}_${now.getUTCMonth()}_${now.getUTCDate()}`.toLowerCase();
20
+ };
21
+ this.startUsage = (consumer, user) => {
22
+ const newUsage = {
23
+ _id: Helper_1.default.uuid(),
24
+ consumer: consumer.name,
25
+ startedAt: DSTE_1.default.now(),
26
+ executedBy: { name: user.name, _id: user._id },
27
+ itemsCount: -1,
28
+ status: 'started',
29
+ _signature: ''
30
+ };
31
+ if (Helper_1.default.isDev())
32
+ return { usageId: newUsage._id, usage: Promise.resolve(newUsage) };
33
+ const updateRes = DatabaseEngine_1.default.upsert(Settings_1.default.db.collections.usage, newUsage._id, newUsage);
34
+ return { usageId: newUsage._id, usage: updateRes };
35
+ };
36
+ this.endUsage = (usageId, itemsCount) => {
37
+ const update = {
38
+ itemsCount: itemsCount,
39
+ status: 'success',
40
+ finishedAt: DSTE_1.default.now()
41
+ };
42
+ if (Helper_1.default.isDev())
43
+ return { usageId: null, usage: Promise.resolve(update) };
44
+ const updateRes = DatabaseEngine_1.default.upsert(Settings_1.default.db.collections.usage, usageId, update);
45
+ return { usageId: usageId, usage: updateRes };
46
+ };
47
+ this.failUsage = (usageId, error) => {
48
+ const update = {
49
+ status: 'failed',
50
+ error: error,
51
+ finishedAt: DSTE_1.default.now()
52
+ };
53
+ if (Helper_1.default.isDev())
54
+ return { usageId: null, usage: Promise.resolve(update) };
55
+ const updateRes = DatabaseEngine_1.default.upsert(Settings_1.default.db.collections.usage, usageId, update);
56
+ return { usageId: usageId, usage: updateRes };
57
+ };
58
+ }
59
+ }
60
+ const UsageManager = new UsageManagerClass();
61
+ exports.default = UsageManager;
@@ -0,0 +1,43 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ const DatabaseEngine_1 = __importDefault(require("../database/DatabaseEngine"));
16
+ const Helper_1 = __importDefault(require("../helper/Helper"));
17
+ const Settings_1 = __importDefault(require("../helper/Settings"));
18
+ class UserManagerClass {
19
+ constructor() {
20
+ this.getUser = () => {
21
+ if (Helper_1.default.isDev())
22
+ return DEV_USER;
23
+ // TODO: figure out how to handle users
24
+ };
25
+ this.findOIDC = (oid) => __awaiter(this, void 0, void 0, function* () {
26
+ return yield DatabaseEngine_1.default.findOne(Settings_1.default.db.collections.users, { 'auth.oid': oid });
27
+ });
28
+ this.update = (user) => __awaiter(this, void 0, void 0, function* () {
29
+ return yield DatabaseEngine_1.default.upsert(Settings_1.default.db.collections.users, user._id, user);
30
+ });
31
+ }
32
+ }
33
+ const UserManager = new UserManagerClass();
34
+ exports.default = UserManager;
35
+ const DEV_USER = {
36
+ _id: '__dev__',
37
+ auth: { oid: '', provider: 'azure' },
38
+ email: '',
39
+ name: 'Dev',
40
+ roles: ['admin'],
41
+ _signature: '',
42
+ lastLogin: new Date().toJSON()
43
+ };
@@ -0,0 +1,154 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ const Affirm_1 = __importDefault(require("../core/Affirm"));
7
+ const Algo_1 = __importDefault(require("../core/Algo"));
8
+ const ConsumerManager_1 = __importDefault(require("./consumer/ConsumerManager"));
9
+ const Environment_1 = __importDefault(require("./Environment"));
10
+ const ExecutionPlanner_1 = __importDefault(require("./execution/ExecutionPlanner"));
11
+ class ValidatorClass {
12
+ constructor() {
13
+ this.validateSources = (sources) => {
14
+ (0, Affirm_1.default)(sources, 'Invalid sources');
15
+ const errors = [];
16
+ try {
17
+ const dupes = Algo_1.default.duplicatesObject(sources, 'name');
18
+ if (dupes.length > 0)
19
+ errors.push(`Duplicate name(s) found in sources: "${dupes.map(x => x.name).join(', ')}"`);
20
+ for (let i = 0; i < sources.length; i++) {
21
+ const source = sources[i];
22
+ if (source.engine === 'local' && !source.authentication.path)
23
+ errors.push(`For source ${source.name}, the path has not been configured`);
24
+ }
25
+ }
26
+ catch (e) {
27
+ if (errors.length === 0)
28
+ errors.push(`There was an error in the validation Sources. (error: ${e})`);
29
+ }
30
+ return errors;
31
+ };
32
+ this.validateProducers = (producers) => {
33
+ (0, Affirm_1.default)(producers, 'Invalid producers');
34
+ const errors = [];
35
+ try {
36
+ const dupes = Algo_1.default.duplicatesObject(producers, 'name');
37
+ if (dupes.length > 0)
38
+ errors.push(`Duplicate name(s) found in producers: "${dupes.map(x => x.name).join(', ')}"`);
39
+ }
40
+ catch (e) {
41
+ if (errors.length === 0)
42
+ errors.push(`There was an error in the validation Producers. (error: ${e})`);
43
+ }
44
+ return errors;
45
+ };
46
+ this.validateProducer = (producer) => {
47
+ (0, Affirm_1.default)(producer, 'Invalid producer');
48
+ const errors = [];
49
+ try {
50
+ if (!producer.source || producer.source.length === 0)
51
+ errors.push(`Missing parameter "source" in producer`);
52
+ if (producer.dimensions.some(x => x.name.includes('{') || x.name.includes('[')))
53
+ errors.push(`Invalid dimension name found in producer "${producer.name}": can't use characters "{" or "[" in dimension names`);
54
+ }
55
+ catch (e) {
56
+ if (errors.length === 0)
57
+ errors.push(`There was an error in the validation Producer. (error: ${e})`);
58
+ }
59
+ return errors;
60
+ };
61
+ this.validateConsumers = (consumers) => {
62
+ (0, Affirm_1.default)(consumers, 'Invalid consumers');
63
+ const errors = [];
64
+ try {
65
+ const dupes = Algo_1.default.duplicatesObject(consumers, 'name');
66
+ if (dupes.length > 0)
67
+ errors.push(`Duplicate name(s) found in consumers: "${dupes.map(x => x.name).join(', ')}"`);
68
+ errors.push(...consumers.flatMap(x => this.validateConsumer(x)));
69
+ }
70
+ catch (e) {
71
+ if (errors.length === 0)
72
+ errors.push(`There was an error in the validation Consumers. (error: ${e})`);
73
+ }
74
+ return errors;
75
+ };
76
+ this.validateConsumer = (consumer) => {
77
+ (0, Affirm_1.default)(consumer, 'Invalid consumer');
78
+ const errors = [];
79
+ try {
80
+ // TODO: check that a consumer doesn't consume hitself
81
+ const allFieldsWithNoFrom = consumer.fields.filter(x => x.key === '*' && !x.from);
82
+ if (allFieldsWithNoFrom.length > 0 && consumer.producers.length > 1)
83
+ errors.push(`Field with key "*" was used without specifying the "from" producer and multiple producers were found.`);
84
+ if (consumer.fields.some(x => x.key === '*' && x.grouping))
85
+ errors.push(`Field with key "*" can't be used for "grouping". Either remove the grouping or change the key.`);
86
+ // Validation on producers
87
+ if (consumer.producers.length === 0)
88
+ errors.push(`Consumer must have at least 1 producer.`);
89
+ const producers = consumer.producers.map(x => Environment_1.default.getProducer(x.name));
90
+ if (producers.length === 0)
91
+ errors.push('No producers found');
92
+ if (producers.some(x => !x))
93
+ errors.push(`Invalid producer found in consumer "${consumer.name}"`);
94
+ // Validation on sources
95
+ const sources = producers.map(x => Environment_1.default.getSource(x.source));
96
+ if (sources.length === 0)
97
+ errors.push('No sources found');
98
+ if (sources.some(x => !x))
99
+ errors.push(`Invalid source found in consumer "${consumer.name}"`);
100
+ // For now we only support connecting producers of the same engine type to a consumer, so we give an error if we detect different ones
101
+ const uniqEngines = Algo_1.default.uniqBy(sources, 'engine');
102
+ if (uniqEngines.length !== 1)
103
+ errors.push(`Sources with different engines were used in the consumer "${consumer.name}" (${uniqEngines.join(', ')})`);
104
+ // For now we also only support consumers that have producers ALL having the same exact source
105
+ const uniqNames = Algo_1.default.uniqBy(sources, 'name');
106
+ if (uniqNames.length !== 1)
107
+ errors.push(`Producers with different sources were used in the consumer "${consumer.name}" (${uniqNames.join(', ')})`);
108
+ if (consumer.filters && consumer.filters.length > 0) {
109
+ if (consumer.filters.some(x => x.sql && x.rule))
110
+ errors.push(`A single consumer can't have both filters based on SQL and filters based on rules.`);
111
+ const [source] = ConsumerManager_1.default.getSource(consumer);
112
+ const engineClass = ExecutionPlanner_1.default.getEngineClass(source.engine);
113
+ if (engineClass === 'file' && consumer.filters.some(x => x.sql))
114
+ errors.push(`Filters based on SQL are only valid for SQL based sources. (source: ${source.name})`);
115
+ if (engineClass === 'sql' && consumer.filters.some(x => x.rule))
116
+ errors.push(`Filters based on rules are only valid for non-SQL based sources. (source: ${source.name})`);
117
+ }
118
+ // Validation on fields
119
+ const validateGroupingLevels = (fields, level = 0) => {
120
+ let errors = [];
121
+ const groupingFields = fields.filter(x => x.grouping);
122
+ if (groupingFields.length > 1)
123
+ errors.push(`There can't be 2 fields with grouping defined at the same level (${groupingFields.map(x => x.key).join(', ')}). Level: ${level}`);
124
+ groupingFields.forEach(field => {
125
+ if (field.grouping)
126
+ errors = [...errors, ...validateGroupingLevels(field.grouping.subFields, level + 1)];
127
+ });
128
+ return errors;
129
+ };
130
+ errors.push(...validateGroupingLevels(consumer.fields));
131
+ // Validation outputs
132
+ const duplicatesOutputs = Algo_1.default.duplicatesObject(consumer.outputs, 'format');
133
+ if (duplicatesOutputs.length > 0) {
134
+ const duplicatesTypes = Algo_1.default.uniq(duplicatesOutputs.map(x => x.format));
135
+ errors.push(`There are outputs with the same type. (duplicates type: ${duplicatesTypes.join(' and ')})`);
136
+ }
137
+ for (let i = 0; i < consumer.outputs.length; i++) {
138
+ const output = consumer.outputs[i];
139
+ if (output.format === 'SQL' && output.accellerated && output.direct)
140
+ errors.push(`An output SQL cannot be both direct and accelerated (output: ${output.format})`);
141
+ if ((output.format === 'CSV' || output.format === 'JSON' || output.format === 'PARQUET') && !output.exportDestination)
142
+ errors.push(`A static file output must have an export destination set (${output.format})`);
143
+ }
144
+ }
145
+ catch (e) {
146
+ if (errors.length === 0)
147
+ errors.push(`There was an error in the validation Consumer. (error: ${e})`);
148
+ }
149
+ return errors;
150
+ };
151
+ }
152
+ }
153
+ const Validator = new ValidatorClass();
154
+ exports.default = Validator;
@@ -0,0 +1,37 @@
1
+ "use strict";
2
+ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
3
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
4
+ return new (P || (P = Promise))(function (resolve, reject) {
5
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
6
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
7
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
8
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
9
+ });
10
+ };
11
+ var __importDefault = (this && this.__importDefault) || function (mod) {
12
+ return (mod && mod.__esModule) ? mod : { "default": mod };
13
+ };
14
+ Object.defineProperty(exports, "__esModule", { value: true });
15
+ const Affirm_1 = __importDefault(require("../../core/Affirm"));
16
+ const LLM_1 = __importDefault(require("./LLM"));
17
+ class AutoMapperEngineClass {
18
+ constructor() {
19
+ /**
20
+ * input: the first ten lines of the uploaded file
21
+ * outputs: the selected schemas
22
+ */
23
+ this.map = (input, outputs) => __awaiter(this, void 0, void 0, function* () {
24
+ (0, Affirm_1.default)(input, 'Invalid input');
25
+ (0, Affirm_1.default)(outputs, 'Invalid outputs');
26
+ const llm = new LLM_1.default();
27
+ const producersRes = yield llm.inferProducers(input, outputs);
28
+ const consumersRes = yield llm.inferConsumers(producersRes.producers, outputs);
29
+ return {
30
+ consumers: consumersRes.consumers,
31
+ producers: producersRes.producers
32
+ };
33
+ });
34
+ }
35
+ }
36
+ const AutoMapperEngine = new AutoMapperEngineClass();
37
+ exports.default = AutoMapperEngine;