retold-data-service 2.0.13 → 2.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/example_applications/data-cloner/data/cloned.sqlite +0 -0
- package/example_applications/data-cloner/data/cloned.sqlite-shm +0 -0
- package/example_applications/data-cloner/data/cloned.sqlite-wal +0 -0
- package/example_applications/data-cloner/data-cloner-web.html +935 -0
- package/example_applications/data-cloner/data-cloner.js +1047 -0
- package/example_applications/data-cloner/package.json +19 -0
- package/package.json +13 -9
- package/source/Retold-Data-Service.js +225 -73
- package/source/services/Retold-Data-Service-ConnectionManager.js +277 -0
- package/source/services/Retold-Data-Service-MeadowEndpoints.js +217 -0
- package/source/services/Retold-Data-Service-ModelManager.js +335 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-CSVCheck.js +85 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-CSVTransform.js +180 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionIntersect.js +153 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionPush.js +190 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionToArray.js +113 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionToCSV.js +211 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-EntityFromTabularFolder.js +244 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-JSONArrayTransform.js +213 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-TSVCheck.js +80 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-TSVTransform.js +166 -0
- package/source/services/meadow-integration/Retold-Data-Service-MeadowIntegration.js +113 -0
- package/source/services/migration-manager/MigrationManager-Command-Connections.js +220 -0
- package/source/services/migration-manager/MigrationManager-Command-DiffMigrate.js +169 -0
- package/source/services/migration-manager/MigrationManager-Command-Schemas.js +532 -0
- package/source/services/migration-manager/MigrationManager-Command-WebUI.js +123 -0
- package/source/services/migration-manager/Retold-Data-Service-MigrationManager.js +357 -0
- package/source/services/stricture/Retold-Data-Service-Stricture.js +303 -0
- package/source/services/stricture/Stricture-Command-Compile.js +39 -0
- package/source/services/stricture/Stricture-Command-Generate-AuthorizationChart.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-DictionaryCSV.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-LaTeX.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Markdown.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Meadow.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-ModelGraph.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-MySQL.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-MySQLMigrate.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Pict.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-TestObjectContainers.js +14 -0
- package/test/RetoldDataService_tests.js +161 -1
package/source/services/meadow-integration/MeadowIntegration-Command-EntityFromTabularFolder.js
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - Entity From Tabular Folder
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/Entity/FromTabularFolder
|
|
5
|
+
*
|
|
6
|
+
* Generate entity comprehensions from tabular data files in a folder.
|
|
7
|
+
* Supports CSV, TSV, and JSON array files.
|
|
8
|
+
*
|
|
9
|
+
* Body: { "Folder": "/path/to/folder/", "Entity": "ForcedEntity", "MappingConfiguration": {...} }
|
|
10
|
+
* Returns: Comprehension JSON from all tabular files in the folder
|
|
11
|
+
*/
|
|
12
|
+
const libFS = require('fs');
|
|
13
|
+
const libPath = require('path');
|
|
14
|
+
|
|
15
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
16
|
+
{
|
|
17
|
+
let tmpPict = pIntegrationService.pict;
|
|
18
|
+
|
|
19
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Entity/FromTabularFolder`,
|
|
20
|
+
(pRequest, pResponse, fNext) =>
|
|
21
|
+
{
|
|
22
|
+
let tmpBody = pRequest.body || {};
|
|
23
|
+
|
|
24
|
+
if (!tmpBody.Folder || (typeof(tmpBody.Folder) !== 'string'))
|
|
25
|
+
{
|
|
26
|
+
pResponse.send(400, { Error: 'No valid Folder path provided in request body.' });
|
|
27
|
+
return fNext();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
let tmpFolderPath = tmpPict.FilePersistence.resolvePath(tmpBody.Folder);
|
|
31
|
+
|
|
32
|
+
if (!libFS.existsSync(tmpFolderPath))
|
|
33
|
+
{
|
|
34
|
+
pResponse.send(404, { Error: `Folder [${tmpFolderPath}] does not exist.` });
|
|
35
|
+
return fNext();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
let tmpStat = libFS.statSync(tmpFolderPath);
|
|
39
|
+
if (!tmpStat.isDirectory())
|
|
40
|
+
{
|
|
41
|
+
pResponse.send(400, { Error: `Path [${tmpFolderPath}] is not a directory.` });
|
|
42
|
+
return fNext();
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
let tmpFiles;
|
|
46
|
+
try
|
|
47
|
+
{
|
|
48
|
+
tmpFiles = libFS.readdirSync(tmpFolderPath);
|
|
49
|
+
}
|
|
50
|
+
catch (pError)
|
|
51
|
+
{
|
|
52
|
+
pResponse.send(500, { Error: `Error reading folder [${tmpFolderPath}]: ${pError.message}` });
|
|
53
|
+
return fNext();
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
let tmpTabularFiles = tmpFiles.filter(
|
|
57
|
+
(pFile) =>
|
|
58
|
+
{
|
|
59
|
+
let tmpExt = libPath.extname(pFile).toLowerCase();
|
|
60
|
+
return (tmpExt === '.csv' || tmpExt === '.tsv' || tmpExt === '.json');
|
|
61
|
+
});
|
|
62
|
+
|
|
63
|
+
if (tmpTabularFiles.length < 1)
|
|
64
|
+
{
|
|
65
|
+
pResponse.send(400, { Error: `No tabular files (CSV, TSV, JSON) found in folder [${tmpFolderPath}].` });
|
|
66
|
+
return fNext();
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
let tmpComprehension = {};
|
|
70
|
+
let tmpAnticipate = tmpPict.newAnticipate();
|
|
71
|
+
|
|
72
|
+
for (let f = 0; f < tmpTabularFiles.length; f++)
|
|
73
|
+
{
|
|
74
|
+
let tmpFileName = tmpTabularFiles[f];
|
|
75
|
+
let tmpFilePath = libPath.join(tmpFolderPath, tmpFileName);
|
|
76
|
+
let tmpExt = libPath.extname(tmpFileName).toLowerCase();
|
|
77
|
+
|
|
78
|
+
tmpAnticipate.anticipate(
|
|
79
|
+
(function(pFilePath, pFileName, pExt)
|
|
80
|
+
{
|
|
81
|
+
return function(fDone)
|
|
82
|
+
{
|
|
83
|
+
processTabularFile(tmpPict, pFilePath, pFileName, pExt, tmpBody, tmpComprehension, fDone);
|
|
84
|
+
};
|
|
85
|
+
})(tmpFilePath, tmpFileName, tmpExt));
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
tmpAnticipate.wait(
|
|
89
|
+
(pError) =>
|
|
90
|
+
{
|
|
91
|
+
if (pError)
|
|
92
|
+
{
|
|
93
|
+
tmpPict.log.error(`Error processing tabular folder: ${pError}`, pError);
|
|
94
|
+
pResponse.send(500, { Error: `Error processing tabular folder: ${pError.message || pError}` });
|
|
95
|
+
return fNext();
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
let tmpEntityCount = Object.keys(tmpComprehension).length;
|
|
99
|
+
tmpPict.log.info(`Entity From Tabular Folder: Generated comprehension with ${tmpEntityCount} entity(ies) from ${tmpTabularFiles.length} file(s).`);
|
|
100
|
+
pResponse.send(200, tmpComprehension);
|
|
101
|
+
return fNext();
|
|
102
|
+
});
|
|
103
|
+
});
|
|
104
|
+
};
|
|
105
|
+
|
|
106
|
+
function processTabularFile(pPict, pFilePath, pFileName, pExt, pOptions, pComprehension, fCallback)
|
|
107
|
+
{
|
|
108
|
+
let tmpMappingOutcome = pPict.MeadowIntegrationTabularTransform.newMappingOutcomeObject();
|
|
109
|
+
|
|
110
|
+
if (pOptions.Entity)
|
|
111
|
+
{
|
|
112
|
+
tmpMappingOutcome.UserConfiguration.Entity = pOptions.Entity;
|
|
113
|
+
}
|
|
114
|
+
if (pOptions.MappingConfiguration && (typeof(pOptions.MappingConfiguration) === 'object'))
|
|
115
|
+
{
|
|
116
|
+
tmpMappingOutcome.ExplicitConfiguration = pOptions.MappingConfiguration;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (pExt === '.json')
|
|
120
|
+
{
|
|
121
|
+
let tmpRawContents;
|
|
122
|
+
try
|
|
123
|
+
{
|
|
124
|
+
tmpRawContents = pPict.FilePersistence.readFileSync(pFilePath, { encoding: 'utf8' });
|
|
125
|
+
let tmpRecords = JSON.parse(tmpRawContents);
|
|
126
|
+
if (!Array.isArray(tmpRecords))
|
|
127
|
+
{
|
|
128
|
+
pPict.log.warn(`File [${pFileName}] is not a JSON array. Skipping.`);
|
|
129
|
+
return fCallback();
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
for (let i = 0; i < tmpRecords.length; i++)
|
|
133
|
+
{
|
|
134
|
+
let tmpIncomingRecord = tmpRecords[i];
|
|
135
|
+
tmpMappingOutcome.ParsedRowCount++;
|
|
136
|
+
|
|
137
|
+
if (tmpIncomingRecord)
|
|
138
|
+
{
|
|
139
|
+
if (!tmpMappingOutcome.ImplicitConfiguration)
|
|
140
|
+
{
|
|
141
|
+
tmpMappingOutcome.ImplicitConfiguration = pPict.MeadowIntegrationTabularTransform.generateMappingConfigurationPrototype(libPath.basename(pFileName, pExt), tmpIncomingRecord);
|
|
142
|
+
mergeConfiguration(tmpMappingOutcome);
|
|
143
|
+
}
|
|
144
|
+
pPict.MeadowIntegrationTabularTransform.addRecordToComprehension(tmpIncomingRecord, tmpMappingOutcome);
|
|
145
|
+
}
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
mergeIntoMainComprehension(pComprehension, tmpMappingOutcome.Comprehension);
|
|
149
|
+
return fCallback();
|
|
150
|
+
}
|
|
151
|
+
catch (pError)
|
|
152
|
+
{
|
|
153
|
+
pPict.log.error(`Error parsing JSON file [${pFileName}]: ${pError.message}`);
|
|
154
|
+
return fCallback();
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
else
|
|
158
|
+
{
|
|
159
|
+
let tmpCSVParser = pPict.instantiateServiceProviderWithoutRegistration('CSVParser');
|
|
160
|
+
|
|
161
|
+
if (pExt === '.tsv')
|
|
162
|
+
{
|
|
163
|
+
tmpCSVParser.Delimiter = '\t';
|
|
164
|
+
}
|
|
165
|
+
else
|
|
166
|
+
{
|
|
167
|
+
tmpCSVParser.Delimiter = ',';
|
|
168
|
+
}
|
|
169
|
+
|
|
170
|
+
const libReadline = require('readline');
|
|
171
|
+
const tmpReadline = libReadline.createInterface(
|
|
172
|
+
{
|
|
173
|
+
input: libFS.createReadStream(pFilePath),
|
|
174
|
+
crlfDelay: Infinity,
|
|
175
|
+
});
|
|
176
|
+
|
|
177
|
+
tmpReadline.on('line',
|
|
178
|
+
(pLine) =>
|
|
179
|
+
{
|
|
180
|
+
const tmpIncomingRecord = tmpCSVParser.parseCSVLine(pLine);
|
|
181
|
+
tmpMappingOutcome.ParsedRowCount++;
|
|
182
|
+
|
|
183
|
+
if (tmpIncomingRecord)
|
|
184
|
+
{
|
|
185
|
+
if (!tmpMappingOutcome.ImplicitConfiguration)
|
|
186
|
+
{
|
|
187
|
+
tmpMappingOutcome.ImplicitConfiguration = pPict.MeadowIntegrationTabularTransform.generateMappingConfigurationPrototype(libPath.basename(pFileName, pExt), tmpIncomingRecord);
|
|
188
|
+
mergeConfiguration(tmpMappingOutcome);
|
|
189
|
+
}
|
|
190
|
+
pPict.MeadowIntegrationTabularTransform.addRecordToComprehension(tmpIncomingRecord, tmpMappingOutcome);
|
|
191
|
+
}
|
|
192
|
+
});
|
|
193
|
+
|
|
194
|
+
tmpReadline.on('close',
|
|
195
|
+
() =>
|
|
196
|
+
{
|
|
197
|
+
mergeIntoMainComprehension(pComprehension, tmpMappingOutcome.Comprehension);
|
|
198
|
+
return fCallback();
|
|
199
|
+
});
|
|
200
|
+
|
|
201
|
+
tmpReadline.on('error',
|
|
202
|
+
(pError) =>
|
|
203
|
+
{
|
|
204
|
+
pPict.log.error(`Error reading file [${pFileName}]: ${pError.message}`);
|
|
205
|
+
return fCallback();
|
|
206
|
+
});
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
function mergeConfiguration(pMappingOutcome)
|
|
211
|
+
{
|
|
212
|
+
if ((!pMappingOutcome.ExplicitConfiguration) || (typeof(pMappingOutcome.ExplicitConfiguration) != 'object'))
|
|
213
|
+
{
|
|
214
|
+
pMappingOutcome.Configuration = Object.assign({}, pMappingOutcome.ImplicitConfiguration, pMappingOutcome.UserConfiguration);
|
|
215
|
+
}
|
|
216
|
+
else
|
|
217
|
+
{
|
|
218
|
+
pMappingOutcome.Configuration = Object.assign({}, pMappingOutcome.ImplicitConfiguration, pMappingOutcome.ExplicitConfiguration, pMappingOutcome.UserConfiguration);
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
if (!('GUIDName' in pMappingOutcome.Configuration))
|
|
222
|
+
{
|
|
223
|
+
pMappingOutcome.Configuration.GUIDName = `GUID${pMappingOutcome.Configuration.Entity}`;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
if (!(pMappingOutcome.Configuration.Entity in pMappingOutcome.Comprehension))
|
|
227
|
+
{
|
|
228
|
+
pMappingOutcome.Comprehension[pMappingOutcome.Configuration.Entity] = {};
|
|
229
|
+
}
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
function mergeIntoMainComprehension(pMainComprehension, pFileComprehension)
|
|
233
|
+
{
|
|
234
|
+
let tmpEntities = Object.keys(pFileComprehension);
|
|
235
|
+
for (let i = 0; i < tmpEntities.length; i++)
|
|
236
|
+
{
|
|
237
|
+
let tmpEntity = tmpEntities[i];
|
|
238
|
+
if (!pMainComprehension[tmpEntity])
|
|
239
|
+
{
|
|
240
|
+
pMainComprehension[tmpEntity] = {};
|
|
241
|
+
}
|
|
242
|
+
Object.assign(pMainComprehension[tmpEntity], pFileComprehension[tmpEntity]);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
@@ -0,0 +1,213 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - JSON Array Transform
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/JSONArray/Transform
|
|
5
|
+
* Transform a JSON Array file into a comprehension.
|
|
6
|
+
*
|
|
7
|
+
* POST /1.0/Retold/MeadowIntegration/JSONArray/TransformRecords
|
|
8
|
+
* Transform an in-memory JSON array into a comprehension (no file needed).
|
|
9
|
+
*
|
|
10
|
+
* Body (file-based): { "File": "/path/to/array.json", ...mapping options... }
|
|
11
|
+
* Body (in-memory): { "Records": [{...}, ...], ...mapping options... }
|
|
12
|
+
* Returns: Comprehension JSON (or extended state if Extended=true)
|
|
13
|
+
*/
|
|
14
|
+
const libPath = require('path');
|
|
15
|
+
|
|
16
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
17
|
+
{
|
|
18
|
+
let tmpPict = pIntegrationService.pict;
|
|
19
|
+
|
|
20
|
+
// File-based transform
|
|
21
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/JSONArray/Transform`,
|
|
22
|
+
(pRequest, pResponse, fNext) =>
|
|
23
|
+
{
|
|
24
|
+
let tmpBody = pRequest.body || {};
|
|
25
|
+
|
|
26
|
+
if (!tmpBody.File || (typeof(tmpBody.File) !== 'string'))
|
|
27
|
+
{
|
|
28
|
+
pResponse.send(400, { Error: 'No valid File path provided in request body.' });
|
|
29
|
+
return fNext();
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
let tmpInputFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.File);
|
|
33
|
+
|
|
34
|
+
if (!tmpPict.FilePersistence.existsSync(tmpInputFilePath))
|
|
35
|
+
{
|
|
36
|
+
pResponse.send(404, { Error: `File [${tmpInputFilePath}] does not exist.` });
|
|
37
|
+
return fNext();
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
let tmpJSONArrayRecords;
|
|
41
|
+
try
|
|
42
|
+
{
|
|
43
|
+
let tmpRawContents = tmpPict.FilePersistence.readFileSync(tmpInputFilePath, { encoding: 'utf8' });
|
|
44
|
+
tmpJSONArrayRecords = JSON.parse(tmpRawContents);
|
|
45
|
+
}
|
|
46
|
+
catch (pError)
|
|
47
|
+
{
|
|
48
|
+
pResponse.send(400, { Error: `Error parsing JSON file [${tmpInputFilePath}]: ${pError.message}` });
|
|
49
|
+
return fNext();
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
if (!Array.isArray(tmpJSONArrayRecords))
|
|
53
|
+
{
|
|
54
|
+
pResponse.send(400, { Error: `File [${tmpInputFilePath}] does not contain a valid JSON array.` });
|
|
55
|
+
return fNext();
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
let tmpResult = processJSONArrayTransform(tmpPict, tmpBody, tmpJSONArrayRecords, libPath.basename(tmpInputFilePath));
|
|
59
|
+
if (tmpResult.Error)
|
|
60
|
+
{
|
|
61
|
+
pResponse.send(400, tmpResult);
|
|
62
|
+
return fNext();
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
if (tmpBody.Extended)
|
|
66
|
+
{
|
|
67
|
+
pResponse.send(200, tmpResult.MappingOutcome);
|
|
68
|
+
}
|
|
69
|
+
else
|
|
70
|
+
{
|
|
71
|
+
pResponse.send(200, tmpResult.MappingOutcome.Comprehension);
|
|
72
|
+
}
|
|
73
|
+
return fNext();
|
|
74
|
+
});
|
|
75
|
+
|
|
76
|
+
// In-memory records transform
|
|
77
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/JSONArray/TransformRecords`,
|
|
78
|
+
(pRequest, pResponse, fNext) =>
|
|
79
|
+
{
|
|
80
|
+
let tmpBody = pRequest.body || {};
|
|
81
|
+
|
|
82
|
+
if (!tmpBody.Records || !Array.isArray(tmpBody.Records))
|
|
83
|
+
{
|
|
84
|
+
pResponse.send(400, { Error: 'No valid Records array provided in request body.' });
|
|
85
|
+
return fNext();
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
if (tmpBody.Records.length < 1)
|
|
89
|
+
{
|
|
90
|
+
pResponse.send(400, { Error: 'Records array is empty.' });
|
|
91
|
+
return fNext();
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
let tmpDatasetName = tmpBody.Entity || 'Records';
|
|
95
|
+
let tmpResult = processJSONArrayTransform(tmpPict, tmpBody, tmpBody.Records, tmpDatasetName);
|
|
96
|
+
if (tmpResult.Error)
|
|
97
|
+
{
|
|
98
|
+
pResponse.send(400, tmpResult);
|
|
99
|
+
return fNext();
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
if (tmpBody.Extended)
|
|
103
|
+
{
|
|
104
|
+
pResponse.send(200, tmpResult.MappingOutcome);
|
|
105
|
+
}
|
|
106
|
+
else
|
|
107
|
+
{
|
|
108
|
+
pResponse.send(200, tmpResult.MappingOutcome.Comprehension);
|
|
109
|
+
}
|
|
110
|
+
return fNext();
|
|
111
|
+
});
|
|
112
|
+
};
|
|
113
|
+
|
|
114
|
+
function processJSONArrayTransform(pPict, pOptions, pRecords, pDatasetName)
|
|
115
|
+
{
|
|
116
|
+
let tmpMappingOutcome = pPict.MeadowIntegrationTabularTransform.newMappingOutcomeObject();
|
|
117
|
+
|
|
118
|
+
if (pOptions.Entity)
|
|
119
|
+
{
|
|
120
|
+
tmpMappingOutcome.UserConfiguration.Entity = pOptions.Entity;
|
|
121
|
+
}
|
|
122
|
+
if (pOptions.GUIDName)
|
|
123
|
+
{
|
|
124
|
+
tmpMappingOutcome.UserConfiguration.GUIDName = pOptions.GUIDName;
|
|
125
|
+
}
|
|
126
|
+
if (pOptions.GUIDTemplate)
|
|
127
|
+
{
|
|
128
|
+
tmpMappingOutcome.UserConfiguration.GUIDTemplate = pOptions.GUIDTemplate;
|
|
129
|
+
}
|
|
130
|
+
if (pOptions.Mappings && (typeof(pOptions.Mappings) === 'object'))
|
|
131
|
+
{
|
|
132
|
+
tmpMappingOutcome.UserConfiguration.Mappings = pOptions.Mappings;
|
|
133
|
+
}
|
|
134
|
+
if (pOptions.MappingConfiguration && (typeof(pOptions.MappingConfiguration) === 'object'))
|
|
135
|
+
{
|
|
136
|
+
tmpMappingOutcome.ExplicitConfiguration = pOptions.MappingConfiguration;
|
|
137
|
+
}
|
|
138
|
+
if (pOptions.IncomingComprehension && (typeof(pOptions.IncomingComprehension) === 'object'))
|
|
139
|
+
{
|
|
140
|
+
tmpMappingOutcome.ExistingComprehension = pOptions.IncomingComprehension;
|
|
141
|
+
tmpMappingOutcome.Comprehension = JSON.parse(JSON.stringify(pOptions.IncomingComprehension));
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
for (let i = 0; i < pRecords.length; i++)
|
|
145
|
+
{
|
|
146
|
+
const tmpIncomingRecord = pRecords[i];
|
|
147
|
+
tmpMappingOutcome.ParsedRowCount++;
|
|
148
|
+
|
|
149
|
+
if (tmpIncomingRecord)
|
|
150
|
+
{
|
|
151
|
+
if (!tmpMappingOutcome.ImplicitConfiguration)
|
|
152
|
+
{
|
|
153
|
+
tmpMappingOutcome.ImplicitConfiguration = pPict.MeadowIntegrationTabularTransform.generateMappingConfigurationPrototype(pDatasetName, tmpIncomingRecord);
|
|
154
|
+
|
|
155
|
+
if ((!tmpMappingOutcome.ExplicitConfiguration) || (typeof(tmpMappingOutcome.ExplicitConfiguration) != 'object'))
|
|
156
|
+
{
|
|
157
|
+
tmpMappingOutcome.Configuration = Object.assign({}, tmpMappingOutcome.ImplicitConfiguration, tmpMappingOutcome.UserConfiguration);
|
|
158
|
+
}
|
|
159
|
+
else
|
|
160
|
+
{
|
|
161
|
+
tmpMappingOutcome.Configuration = Object.assign({}, tmpMappingOutcome.ImplicitConfiguration, tmpMappingOutcome.ExplicitConfiguration, tmpMappingOutcome.UserConfiguration);
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
if (!('GUIDName' in tmpMappingOutcome.Configuration))
|
|
165
|
+
{
|
|
166
|
+
tmpMappingOutcome.Configuration.GUIDName = `GUID${tmpMappingOutcome.Configuration.Entity}`;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
if (!(tmpMappingOutcome.Configuration.Entity in tmpMappingOutcome.Comprehension))
|
|
170
|
+
{
|
|
171
|
+
tmpMappingOutcome.Comprehension[tmpMappingOutcome.Configuration.Entity] = {};
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
let tmpMappingRecordSolution = (
|
|
176
|
+
{
|
|
177
|
+
IncomingRecord: tmpIncomingRecord,
|
|
178
|
+
MappingConfiguration: tmpMappingOutcome.Configuration,
|
|
179
|
+
MappingOutcome: tmpMappingOutcome,
|
|
180
|
+
RowIndex: tmpMappingOutcome.ParsedRowCount,
|
|
181
|
+
NewRecordsGUIDUniqueness: [],
|
|
182
|
+
NewRecordPrototype: {},
|
|
183
|
+
Fable: pPict,
|
|
184
|
+
Pict: pPict,
|
|
185
|
+
AppData: pPict.AppData
|
|
186
|
+
});
|
|
187
|
+
|
|
188
|
+
let tmpSolverResultsObject = {};
|
|
189
|
+
if (tmpMappingOutcome.Configuration.Solvers && Array.isArray(tmpMappingOutcome.Configuration.Solvers))
|
|
190
|
+
{
|
|
191
|
+
for (let j = 0; j < tmpMappingOutcome.Configuration.Solvers.length; j++)
|
|
192
|
+
{
|
|
193
|
+
pPict.ExpressionParser.solve(tmpMappingOutcome.Configuration.Solvers[j], tmpMappingRecordSolution, tmpSolverResultsObject, pPict.manifest, tmpMappingRecordSolution);
|
|
194
|
+
}
|
|
195
|
+
}
|
|
196
|
+
|
|
197
|
+
if (tmpMappingOutcome.Configuration.MultipleGUIDUniqueness && tmpMappingRecordSolution.NewRecordsGUIDUniqueness.length > 0)
|
|
198
|
+
{
|
|
199
|
+
for (let j = 0; j < tmpMappingRecordSolution.NewRecordsGUIDUniqueness.length; j++)
|
|
200
|
+
{
|
|
201
|
+
pPict.MeadowIntegrationTabularTransform.addRecordToComprehension(tmpIncomingRecord, tmpMappingOutcome, tmpMappingRecordSolution.NewRecordPrototype, tmpMappingRecordSolution.NewRecordsGUIDUniqueness[j]);
|
|
202
|
+
}
|
|
203
|
+
}
|
|
204
|
+
else if (!tmpMappingOutcome.Configuration.MultipleGUIDUniqueness)
|
|
205
|
+
{
|
|
206
|
+
pPict.MeadowIntegrationTabularTransform.addRecordToComprehension(tmpIncomingRecord, tmpMappingOutcome, tmpMappingRecordSolution.NewRecordPrototype);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
}
|
|
210
|
+
|
|
211
|
+
pPict.log.info(`JSON Array Transform: Parsed ${tmpMappingOutcome.ParsedRowCount} records from [${pDatasetName}].`);
|
|
212
|
+
return { MappingOutcome: tmpMappingOutcome };
|
|
213
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - TSV Check
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/TSV/Check
|
|
5
|
+
*
|
|
6
|
+
* Analyze a TSV file for statistics.
|
|
7
|
+
*
|
|
8
|
+
* Body: { "File": "/path/to/file.tsv", "Records": false }
|
|
9
|
+
* Returns: Statistics JSON
|
|
10
|
+
*/
|
|
11
|
+
const libFS = require('fs');
|
|
12
|
+
const libReadline = require('readline');
|
|
13
|
+
|
|
14
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
15
|
+
{
|
|
16
|
+
let tmpPict = pIntegrationService.pict;
|
|
17
|
+
|
|
18
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/TSV/Check`,
|
|
19
|
+
(pRequest, pResponse, fNext) =>
|
|
20
|
+
{
|
|
21
|
+
let tmpBody = pRequest.body || {};
|
|
22
|
+
|
|
23
|
+
if (!tmpBody.File || (typeof(tmpBody.File) !== 'string'))
|
|
24
|
+
{
|
|
25
|
+
pResponse.send(400, { Error: 'No valid File path provided in request body.' });
|
|
26
|
+
return fNext();
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
let tmpInputFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.File);
|
|
30
|
+
|
|
31
|
+
if (!tmpPict.FilePersistence.existsSync(tmpInputFilePath))
|
|
32
|
+
{
|
|
33
|
+
pResponse.send(404, { Error: `File [${tmpInputFilePath}] does not exist.` });
|
|
34
|
+
return fNext();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
let tmpCSVParser = tmpPict.instantiateServiceProviderWithoutRegistration('CSVParser');
|
|
38
|
+
tmpCSVParser.Delimiter = '\t';
|
|
39
|
+
|
|
40
|
+
let tmpStatistics = tmpPict.MeadowIntegrationTabularCheck.newStatisticsObject(tmpInputFilePath);
|
|
41
|
+
let tmpStoreFullRecord = (tmpBody.Records === true);
|
|
42
|
+
|
|
43
|
+
if (tmpStoreFullRecord)
|
|
44
|
+
{
|
|
45
|
+
tmpStatistics.Records = [];
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
const tmpReadline = libReadline.createInterface(
|
|
49
|
+
{
|
|
50
|
+
input: libFS.createReadStream(tmpInputFilePath),
|
|
51
|
+
crlfDelay: Infinity,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
tmpReadline.on('line',
|
|
55
|
+
(pLine) =>
|
|
56
|
+
{
|
|
57
|
+
const tmpRecord = tmpCSVParser.parseCSVLine(pLine);
|
|
58
|
+
if (tmpRecord)
|
|
59
|
+
{
|
|
60
|
+
tmpPict.MeadowIntegrationTabularCheck.collectStatistics(tmpRecord, tmpStatistics, tmpStoreFullRecord);
|
|
61
|
+
}
|
|
62
|
+
});
|
|
63
|
+
|
|
64
|
+
tmpReadline.on('close',
|
|
65
|
+
() =>
|
|
66
|
+
{
|
|
67
|
+
tmpPict.log.info(`TSV Check: ${tmpStatistics.RowCount} rows, ${tmpStatistics.ColumnCount} columns in [${tmpInputFilePath}].`);
|
|
68
|
+
pResponse.send(200, tmpStatistics);
|
|
69
|
+
return fNext();
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
tmpReadline.on('error',
|
|
73
|
+
(pError) =>
|
|
74
|
+
{
|
|
75
|
+
tmpPict.log.error(`TSV Check error reading file [${tmpInputFilePath}]: ${pError}`, pError);
|
|
76
|
+
pResponse.send(500, { Error: `Error reading TSV file: ${pError.message}` });
|
|
77
|
+
return fNext();
|
|
78
|
+
});
|
|
79
|
+
});
|
|
80
|
+
};
|
|
@@ -0,0 +1,166 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - TSV Transform
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/TSV/Transform
|
|
5
|
+
*
|
|
6
|
+
* Transform a TSV file into a comprehension using mapping configuration.
|
|
7
|
+
*
|
|
8
|
+
* Body: same as CSV Transform but for TSV files.
|
|
9
|
+
* Returns: Comprehension JSON (or extended state if Extended=true)
|
|
10
|
+
*/
|
|
11
|
+
const libFS = require('fs');
|
|
12
|
+
const libPath = require('path');
|
|
13
|
+
const libReadline = require('readline');
|
|
14
|
+
|
|
15
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
16
|
+
{
|
|
17
|
+
let tmpPict = pIntegrationService.pict;
|
|
18
|
+
|
|
19
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/TSV/Transform`,
|
|
20
|
+
(pRequest, pResponse, fNext) =>
|
|
21
|
+
{
|
|
22
|
+
let tmpBody = pRequest.body || {};
|
|
23
|
+
|
|
24
|
+
if (!tmpBody.File || (typeof(tmpBody.File) !== 'string'))
|
|
25
|
+
{
|
|
26
|
+
pResponse.send(400, { Error: 'No valid File path provided in request body.' });
|
|
27
|
+
return fNext();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
let tmpInputFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.File);
|
|
31
|
+
|
|
32
|
+
if (!tmpPict.FilePersistence.existsSync(tmpInputFilePath))
|
|
33
|
+
{
|
|
34
|
+
pResponse.send(404, { Error: `File [${tmpInputFilePath}] does not exist.` });
|
|
35
|
+
return fNext();
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
let tmpCSVParser = tmpPict.instantiateServiceProviderWithoutRegistration('CSVParser');
|
|
39
|
+
tmpCSVParser.Delimiter = '\t';
|
|
40
|
+
|
|
41
|
+
let tmpMappingOutcome = tmpPict.MeadowIntegrationTabularTransform.newMappingOutcomeObject();
|
|
42
|
+
|
|
43
|
+
if (tmpBody.Entity)
|
|
44
|
+
{
|
|
45
|
+
tmpMappingOutcome.UserConfiguration.Entity = tmpBody.Entity;
|
|
46
|
+
}
|
|
47
|
+
if (tmpBody.GUIDName)
|
|
48
|
+
{
|
|
49
|
+
tmpMappingOutcome.UserConfiguration.GUIDName = tmpBody.GUIDName;
|
|
50
|
+
}
|
|
51
|
+
if (tmpBody.GUIDTemplate)
|
|
52
|
+
{
|
|
53
|
+
tmpMappingOutcome.UserConfiguration.GUIDTemplate = tmpBody.GUIDTemplate;
|
|
54
|
+
}
|
|
55
|
+
if (tmpBody.Mappings && (typeof(tmpBody.Mappings) === 'object'))
|
|
56
|
+
{
|
|
57
|
+
tmpMappingOutcome.UserConfiguration.Mappings = tmpBody.Mappings;
|
|
58
|
+
}
|
|
59
|
+
if (tmpBody.MappingConfiguration && (typeof(tmpBody.MappingConfiguration) === 'object'))
|
|
60
|
+
{
|
|
61
|
+
tmpMappingOutcome.ExplicitConfiguration = tmpBody.MappingConfiguration;
|
|
62
|
+
}
|
|
63
|
+
if (tmpBody.IncomingComprehension && (typeof(tmpBody.IncomingComprehension) === 'object'))
|
|
64
|
+
{
|
|
65
|
+
tmpMappingOutcome.ExistingComprehension = tmpBody.IncomingComprehension;
|
|
66
|
+
tmpMappingOutcome.Comprehension = JSON.parse(JSON.stringify(tmpBody.IncomingComprehension));
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
const tmpReadline = libReadline.createInterface(
|
|
70
|
+
{
|
|
71
|
+
input: libFS.createReadStream(tmpInputFilePath),
|
|
72
|
+
crlfDelay: Infinity,
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
tmpReadline.on('line',
|
|
76
|
+
(pLine) =>
|
|
77
|
+
{
|
|
78
|
+
const tmpIncomingRecord = tmpCSVParser.parseCSVLine(pLine);
|
|
79
|
+
tmpMappingOutcome.ParsedRowCount++;
|
|
80
|
+
|
|
81
|
+
if (tmpIncomingRecord)
|
|
82
|
+
{
|
|
83
|
+
if (!tmpMappingOutcome.ImplicitConfiguration)
|
|
84
|
+
{
|
|
85
|
+
tmpMappingOutcome.ImplicitConfiguration = tmpPict.MeadowIntegrationTabularTransform.generateMappingConfigurationPrototype(libPath.basename(tmpInputFilePath), tmpIncomingRecord);
|
|
86
|
+
|
|
87
|
+
if ((!tmpMappingOutcome.ExplicitConfiguration) || (typeof(tmpMappingOutcome.ExplicitConfiguration) != 'object'))
|
|
88
|
+
{
|
|
89
|
+
tmpMappingOutcome.Configuration = Object.assign({}, tmpMappingOutcome.ImplicitConfiguration, tmpMappingOutcome.UserConfiguration);
|
|
90
|
+
}
|
|
91
|
+
else
|
|
92
|
+
{
|
|
93
|
+
tmpMappingOutcome.Configuration = Object.assign({}, tmpMappingOutcome.ImplicitConfiguration, tmpMappingOutcome.ExplicitConfiguration, tmpMappingOutcome.UserConfiguration);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
if (!('GUIDName' in tmpMappingOutcome.Configuration))
|
|
97
|
+
{
|
|
98
|
+
tmpMappingOutcome.Configuration.GUIDName = `GUID${tmpMappingOutcome.Configuration.Entity}`;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
if (!(tmpMappingOutcome.Configuration.Entity in tmpMappingOutcome.Comprehension))
|
|
102
|
+
{
|
|
103
|
+
tmpMappingOutcome.Comprehension[tmpMappingOutcome.Configuration.Entity] = {};
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
let tmpMappingRecordSolution = (
|
|
108
|
+
{
|
|
109
|
+
IncomingRecord: tmpIncomingRecord,
|
|
110
|
+
MappingConfiguration: tmpMappingOutcome.Configuration,
|
|
111
|
+
MappingOutcome: tmpMappingOutcome,
|
|
112
|
+
RowIndex: tmpMappingOutcome.ParsedRowCount,
|
|
113
|
+
NewRecordsGUIDUniqueness: [],
|
|
114
|
+
NewRecordPrototype: {},
|
|
115
|
+
Fable: tmpPict,
|
|
116
|
+
Pict: tmpPict,
|
|
117
|
+
AppData: tmpPict.AppData
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
let tmpSolverResultsObject = {};
|
|
121
|
+
if (tmpMappingOutcome.Configuration.Solvers && Array.isArray(tmpMappingOutcome.Configuration.Solvers))
|
|
122
|
+
{
|
|
123
|
+
for (let i = 0; i < tmpMappingOutcome.Configuration.Solvers.length; i++)
|
|
124
|
+
{
|
|
125
|
+
tmpPict.ExpressionParser.solve(tmpMappingOutcome.Configuration.Solvers[i], tmpMappingRecordSolution, tmpSolverResultsObject, tmpPict.manifest, tmpMappingRecordSolution);
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
if (tmpMappingOutcome.Configuration.MultipleGUIDUniqueness && tmpMappingRecordSolution.NewRecordsGUIDUniqueness.length > 0)
|
|
130
|
+
{
|
|
131
|
+
for (let i = 0; i < tmpMappingRecordSolution.NewRecordsGUIDUniqueness.length; i++)
|
|
132
|
+
{
|
|
133
|
+
tmpPict.MeadowIntegrationTabularTransform.addRecordToComprehension(tmpIncomingRecord, tmpMappingOutcome, tmpMappingRecordSolution.NewRecordPrototype, tmpMappingRecordSolution.NewRecordsGUIDUniqueness[i]);
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
else if (!tmpMappingOutcome.Configuration.MultipleGUIDUniqueness)
|
|
137
|
+
{
|
|
138
|
+
tmpPict.MeadowIntegrationTabularTransform.addRecordToComprehension(tmpIncomingRecord, tmpMappingOutcome, tmpMappingRecordSolution.NewRecordPrototype);
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
tmpReadline.on('close',
|
|
144
|
+
() =>
|
|
145
|
+
{
|
|
146
|
+
tmpPict.log.info(`TSV Transform: Parsed ${tmpMappingOutcome.ParsedRowCount} rows from [${tmpInputFilePath}].`);
|
|
147
|
+
if (tmpBody.Extended)
|
|
148
|
+
{
|
|
149
|
+
pResponse.send(200, tmpMappingOutcome);
|
|
150
|
+
}
|
|
151
|
+
else
|
|
152
|
+
{
|
|
153
|
+
pResponse.send(200, tmpMappingOutcome.Comprehension);
|
|
154
|
+
}
|
|
155
|
+
return fNext();
|
|
156
|
+
});
|
|
157
|
+
|
|
158
|
+
tmpReadline.on('error',
|
|
159
|
+
(pError) =>
|
|
160
|
+
{
|
|
161
|
+
tmpPict.log.error(`TSV Transform error reading file [${tmpInputFilePath}]: ${pError}`, pError);
|
|
162
|
+
pResponse.send(500, { Error: `Error reading TSV file: ${pError.message}` });
|
|
163
|
+
return fNext();
|
|
164
|
+
});
|
|
165
|
+
});
|
|
166
|
+
};
|