retold-data-service 2.0.13 → 2.0.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/example_applications/data-cloner/data/cloned.sqlite +0 -0
- package/example_applications/data-cloner/data/cloned.sqlite-shm +0 -0
- package/example_applications/data-cloner/data/cloned.sqlite-wal +0 -0
- package/example_applications/data-cloner/data-cloner-web.html +935 -0
- package/example_applications/data-cloner/data-cloner.js +1047 -0
- package/example_applications/data-cloner/package.json +19 -0
- package/package.json +13 -9
- package/source/Retold-Data-Service.js +225 -73
- package/source/services/Retold-Data-Service-ConnectionManager.js +277 -0
- package/source/services/Retold-Data-Service-MeadowEndpoints.js +217 -0
- package/source/services/Retold-Data-Service-ModelManager.js +335 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-CSVCheck.js +85 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-CSVTransform.js +180 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionIntersect.js +153 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionPush.js +190 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionToArray.js +113 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionToCSV.js +211 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-EntityFromTabularFolder.js +244 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-JSONArrayTransform.js +213 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-TSVCheck.js +80 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-TSVTransform.js +166 -0
- package/source/services/meadow-integration/Retold-Data-Service-MeadowIntegration.js +113 -0
- package/source/services/migration-manager/MigrationManager-Command-Connections.js +220 -0
- package/source/services/migration-manager/MigrationManager-Command-DiffMigrate.js +169 -0
- package/source/services/migration-manager/MigrationManager-Command-Schemas.js +532 -0
- package/source/services/migration-manager/MigrationManager-Command-WebUI.js +123 -0
- package/source/services/migration-manager/Retold-Data-Service-MigrationManager.js +357 -0
- package/source/services/stricture/Retold-Data-Service-Stricture.js +303 -0
- package/source/services/stricture/Stricture-Command-Compile.js +39 -0
- package/source/services/stricture/Stricture-Command-Generate-AuthorizationChart.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-DictionaryCSV.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-LaTeX.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Markdown.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Meadow.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-ModelGraph.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-MySQL.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-MySQLMigrate.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Pict.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-TestObjectContainers.js +14 -0
- package/test/RetoldDataService_tests.js +161 -1
package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionIntersect.js
ADDED
|
@@ -0,0 +1,153 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - Comprehension Intersect
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/Intersect
|
|
5
|
+
* Merge two in-memory comprehension objects.
|
|
6
|
+
*
|
|
7
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/IntersectFiles
|
|
8
|
+
* Merge two comprehension files.
|
|
9
|
+
*
|
|
10
|
+
* Body (in-memory): { "PrimaryComprehension": {...}, "SecondaryComprehension": {...}, "Entity": "..." }
|
|
11
|
+
* Body (file): { "File": "/path", "IntersectFile": "/path", "Entity": "..." }
|
|
12
|
+
* Returns: Merged comprehension JSON
|
|
13
|
+
*/
|
|
14
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
15
|
+
{
|
|
16
|
+
let tmpPict = pIntegrationService.pict;
|
|
17
|
+
|
|
18
|
+
// In-memory intersection
|
|
19
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/Intersect`,
|
|
20
|
+
(pRequest, pResponse, fNext) =>
|
|
21
|
+
{
|
|
22
|
+
let tmpBody = pRequest.body || {};
|
|
23
|
+
|
|
24
|
+
if (!tmpBody.PrimaryComprehension || (typeof(tmpBody.PrimaryComprehension) !== 'object'))
|
|
25
|
+
{
|
|
26
|
+
pResponse.send(400, { Error: 'No valid PrimaryComprehension object provided in request body.' });
|
|
27
|
+
return fNext();
|
|
28
|
+
}
|
|
29
|
+
if (!tmpBody.SecondaryComprehension || (typeof(tmpBody.SecondaryComprehension) !== 'object'))
|
|
30
|
+
{
|
|
31
|
+
pResponse.send(400, { Error: 'No valid SecondaryComprehension object provided in request body.' });
|
|
32
|
+
return fNext();
|
|
33
|
+
}
|
|
34
|
+
|
|
35
|
+
let tmpResult = processComprehensionIntersect(tmpPict, tmpBody.PrimaryComprehension, tmpBody.SecondaryComprehension, tmpBody.Entity);
|
|
36
|
+
if (tmpResult.Error)
|
|
37
|
+
{
|
|
38
|
+
pResponse.send(400, tmpResult);
|
|
39
|
+
return fNext();
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
pResponse.send(200, tmpResult.Comprehension);
|
|
43
|
+
return fNext();
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
// File-based intersection
|
|
47
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/IntersectFiles`,
|
|
48
|
+
(pRequest, pResponse, fNext) =>
|
|
49
|
+
{
|
|
50
|
+
let tmpBody = pRequest.body || {};
|
|
51
|
+
|
|
52
|
+
if (!tmpBody.File || (typeof(tmpBody.File) !== 'string'))
|
|
53
|
+
{
|
|
54
|
+
pResponse.send(400, { Error: 'No valid File path provided in request body.' });
|
|
55
|
+
return fNext();
|
|
56
|
+
}
|
|
57
|
+
if (!tmpBody.IntersectFile || (typeof(tmpBody.IntersectFile) !== 'string'))
|
|
58
|
+
{
|
|
59
|
+
pResponse.send(400, { Error: 'No valid IntersectFile path provided in request body.' });
|
|
60
|
+
return fNext();
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
let tmpPrimaryFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.File);
|
|
64
|
+
let tmpSecondaryFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.IntersectFile);
|
|
65
|
+
|
|
66
|
+
if (!tmpPict.FilePersistence.existsSync(tmpPrimaryFilePath))
|
|
67
|
+
{
|
|
68
|
+
pResponse.send(404, { Error: `Primary file [${tmpPrimaryFilePath}] does not exist.` });
|
|
69
|
+
return fNext();
|
|
70
|
+
}
|
|
71
|
+
if (!tmpPict.FilePersistence.existsSync(tmpSecondaryFilePath))
|
|
72
|
+
{
|
|
73
|
+
pResponse.send(404, { Error: `Secondary file [${tmpSecondaryFilePath}] does not exist.` });
|
|
74
|
+
return fNext();
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
let tmpPrimaryComprehension;
|
|
78
|
+
let tmpSecondaryComprehension;
|
|
79
|
+
|
|
80
|
+
try
|
|
81
|
+
{
|
|
82
|
+
tmpPrimaryComprehension = JSON.parse(tmpPict.FilePersistence.readFileSync(tmpPrimaryFilePath));
|
|
83
|
+
}
|
|
84
|
+
catch (pError)
|
|
85
|
+
{
|
|
86
|
+
pResponse.send(400, { Error: `Error parsing primary comprehension file: ${pError.message}` });
|
|
87
|
+
return fNext();
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
try
|
|
91
|
+
{
|
|
92
|
+
tmpSecondaryComprehension = JSON.parse(tmpPict.FilePersistence.readFileSync(tmpSecondaryFilePath));
|
|
93
|
+
}
|
|
94
|
+
catch (pError)
|
|
95
|
+
{
|
|
96
|
+
pResponse.send(400, { Error: `Error parsing secondary comprehension file: ${pError.message}` });
|
|
97
|
+
return fNext();
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
let tmpResult = processComprehensionIntersect(tmpPict, tmpPrimaryComprehension, tmpSecondaryComprehension, tmpBody.Entity);
|
|
101
|
+
if (tmpResult.Error)
|
|
102
|
+
{
|
|
103
|
+
pResponse.send(400, tmpResult);
|
|
104
|
+
return fNext();
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
pResponse.send(200, tmpResult.Comprehension);
|
|
108
|
+
return fNext();
|
|
109
|
+
});
|
|
110
|
+
};
|
|
111
|
+
|
|
112
|
+
function processComprehensionIntersect(pPict, pPrimaryComprehension, pSecondaryComprehension, pEntity)
|
|
113
|
+
{
|
|
114
|
+
let tmpEntity = pEntity;
|
|
115
|
+
|
|
116
|
+
if (!tmpEntity)
|
|
117
|
+
{
|
|
118
|
+
let tmpEntityInference = Object.keys(pPrimaryComprehension);
|
|
119
|
+
if (tmpEntityInference.length > 0)
|
|
120
|
+
{
|
|
121
|
+
tmpEntity = tmpEntityInference[0];
|
|
122
|
+
pPict.log.info(`No entity specified. Using [${tmpEntity}] as the inferred entity.`);
|
|
123
|
+
}
|
|
124
|
+
else
|
|
125
|
+
{
|
|
126
|
+
return { Error: 'No entity specified and no entities found in the primary comprehension.' };
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
|
|
130
|
+
let tmpResultComprehension = JSON.parse(JSON.stringify(pPrimaryComprehension));
|
|
131
|
+
|
|
132
|
+
if (!tmpResultComprehension[tmpEntity])
|
|
133
|
+
{
|
|
134
|
+
tmpResultComprehension[tmpEntity] = {};
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
let tmpIntersectingKeys = Object.keys(pSecondaryComprehension[tmpEntity] || {});
|
|
138
|
+
for (let i = 0; i < tmpIntersectingKeys.length; i++)
|
|
139
|
+
{
|
|
140
|
+
const tmpRecordGUID = tmpIntersectingKeys[i];
|
|
141
|
+
if (tmpResultComprehension[tmpEntity][tmpRecordGUID])
|
|
142
|
+
{
|
|
143
|
+
tmpResultComprehension[tmpEntity][tmpRecordGUID] = Object.assign(tmpResultComprehension[tmpEntity][tmpRecordGUID], pSecondaryComprehension[tmpEntity][tmpRecordGUID]);
|
|
144
|
+
}
|
|
145
|
+
else
|
|
146
|
+
{
|
|
147
|
+
tmpResultComprehension[tmpEntity][tmpRecordGUID] = pSecondaryComprehension[tmpEntity][tmpRecordGUID];
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
pPict.log.info(`Comprehension Intersect: Merged ${tmpIntersectingKeys.length} records for entity [${tmpEntity}].`);
|
|
152
|
+
return { Comprehension: tmpResultComprehension };
|
|
153
|
+
}
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - Comprehension Push
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/Push
|
|
5
|
+
* Push an in-memory comprehension to Meadow REST APIs.
|
|
6
|
+
*
|
|
7
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/PushFile
|
|
8
|
+
* Push a comprehension file to Meadow REST APIs.
|
|
9
|
+
*
|
|
10
|
+
* Body (in-memory): { "Comprehension": {...}, "GUIDPrefix": "INTG-", "EntityGUIDPrefix": "E-", "ServerURL": "..." }
|
|
11
|
+
* Body (file): { "File": "/path", "GUIDPrefix": "INTG-", "ServerURL": "..." }
|
|
12
|
+
* Returns: { "Success": true, "EntitiesPushed": [...], "Message": "..." }
|
|
13
|
+
*/
|
|
14
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
15
|
+
{
|
|
16
|
+
let tmpPict = pIntegrationService.pict;
|
|
17
|
+
let tmpIntegrationAdapter = pIntegrationService.IntegrationAdapter;
|
|
18
|
+
|
|
19
|
+
// In-memory push
|
|
20
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/Push`,
|
|
21
|
+
(pRequest, pResponse, fNext) =>
|
|
22
|
+
{
|
|
23
|
+
let tmpBody = pRequest.body || {};
|
|
24
|
+
|
|
25
|
+
if (!tmpBody.Comprehension || (typeof(tmpBody.Comprehension) !== 'object'))
|
|
26
|
+
{
|
|
27
|
+
pResponse.send(400, { Error: 'No valid Comprehension object provided in request body.' });
|
|
28
|
+
return fNext();
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
pushComprehension(tmpPict, tmpIntegrationAdapter, tmpBody.Comprehension, tmpBody,
|
|
32
|
+
(pError, pResult) =>
|
|
33
|
+
{
|
|
34
|
+
if (pError)
|
|
35
|
+
{
|
|
36
|
+
pResponse.send(500, { Error: `Error pushing comprehension: ${pError.message || pError}` });
|
|
37
|
+
return fNext();
|
|
38
|
+
}
|
|
39
|
+
pResponse.send(200, pResult);
|
|
40
|
+
return fNext();
|
|
41
|
+
});
|
|
42
|
+
});
|
|
43
|
+
|
|
44
|
+
// File-based push
|
|
45
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/PushFile`,
|
|
46
|
+
(pRequest, pResponse, fNext) =>
|
|
47
|
+
{
|
|
48
|
+
let tmpBody = pRequest.body || {};
|
|
49
|
+
|
|
50
|
+
if (!tmpBody.File || (typeof(tmpBody.File) !== 'string'))
|
|
51
|
+
{
|
|
52
|
+
pResponse.send(400, { Error: 'No valid File path provided in request body.' });
|
|
53
|
+
return fNext();
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
let tmpFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.File);
|
|
57
|
+
|
|
58
|
+
if (!tmpPict.FilePersistence.existsSync(tmpFilePath))
|
|
59
|
+
{
|
|
60
|
+
pResponse.send(404, { Error: `File [${tmpFilePath}] does not exist.` });
|
|
61
|
+
return fNext();
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
let tmpComprehension;
|
|
65
|
+
try
|
|
66
|
+
{
|
|
67
|
+
tmpComprehension = JSON.parse(tmpPict.FilePersistence.readFileSync(tmpFilePath));
|
|
68
|
+
}
|
|
69
|
+
catch (pError)
|
|
70
|
+
{
|
|
71
|
+
pResponse.send(400, { Error: `Error parsing comprehension file: ${pError.message}` });
|
|
72
|
+
return fNext();
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
pushComprehension(tmpPict, tmpIntegrationAdapter, tmpComprehension, tmpBody,
|
|
76
|
+
(pError, pResult) =>
|
|
77
|
+
{
|
|
78
|
+
if (pError)
|
|
79
|
+
{
|
|
80
|
+
pResponse.send(500, { Error: `Error pushing comprehension: ${pError.message || pError}` });
|
|
81
|
+
return fNext();
|
|
82
|
+
}
|
|
83
|
+
pResponse.send(200, pResult);
|
|
84
|
+
return fNext();
|
|
85
|
+
});
|
|
86
|
+
});
|
|
87
|
+
};
|
|
88
|
+
|
|
89
|
+
function getCapitalLettersAsString(pInputString)
|
|
90
|
+
{
|
|
91
|
+
let tmpRegex = /[A-Z]/g;
|
|
92
|
+
let tmpMatch = pInputString.match(tmpRegex);
|
|
93
|
+
return tmpMatch ? tmpMatch.join('') : 'UNK';
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
function pushComprehension(pPict, pIntegrationAdapterClass, pComprehension, pOptions, fCallback)
|
|
97
|
+
{
|
|
98
|
+
pPict.serviceManager.addServiceType('IntegrationAdapter', pIntegrationAdapterClass);
|
|
99
|
+
|
|
100
|
+
let tmpAnticipate = pPict.newAnticipate();
|
|
101
|
+
let tmpEntitiesPushed = [];
|
|
102
|
+
|
|
103
|
+
let tmpIntegrationAdapterSet = Object.keys(pComprehension);
|
|
104
|
+
|
|
105
|
+
pPict.log.info(`Pushing comprehension with ${tmpIntegrationAdapterSet.length} entity(ies) to Meadow APIs...`);
|
|
106
|
+
|
|
107
|
+
tmpAnticipate.anticipate(
|
|
108
|
+
(fDone) =>
|
|
109
|
+
{
|
|
110
|
+
try
|
|
111
|
+
{
|
|
112
|
+
for (let i = 0; i < tmpIntegrationAdapterSet.length; i++)
|
|
113
|
+
{
|
|
114
|
+
let tmpAdapterKey = tmpIntegrationAdapterSet[i];
|
|
115
|
+
|
|
116
|
+
pIntegrationAdapterClass.getAdapter(pPict, tmpAdapterKey, getCapitalLettersAsString(tmpAdapterKey), { SimpleMarshal: true, ForceMarshal: true });
|
|
117
|
+
|
|
118
|
+
let tmpAdapter = pPict.servicesMap.IntegrationAdapter[tmpAdapterKey];
|
|
119
|
+
|
|
120
|
+
if (pOptions.ServerURL)
|
|
121
|
+
{
|
|
122
|
+
tmpAdapter.options.ServerURL = pOptions.ServerURL;
|
|
123
|
+
}
|
|
124
|
+
if (pOptions.GUIDPrefix)
|
|
125
|
+
{
|
|
126
|
+
tmpAdapter.AdapterSetGUIDMarshalPrefix = pOptions.GUIDPrefix;
|
|
127
|
+
}
|
|
128
|
+
if (pOptions.EntityGUIDPrefix)
|
|
129
|
+
{
|
|
130
|
+
tmpAdapter.EntityGUIDMarshalPrefix = pOptions.EntityGUIDPrefix;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
let tmpDataMap = pComprehension[tmpAdapterKey];
|
|
134
|
+
if (!tmpDataMap)
|
|
135
|
+
{
|
|
136
|
+
pPict.log.info(`No records to push for [${tmpAdapterKey}].`);
|
|
137
|
+
continue;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
tmpEntitiesPushed.push(tmpAdapterKey);
|
|
141
|
+
|
|
142
|
+
tmpAnticipate.anticipate(
|
|
143
|
+
(function(pAdapter, pDataMap)
|
|
144
|
+
{
|
|
145
|
+
return function(fRecordDone)
|
|
146
|
+
{
|
|
147
|
+
for (const tmpRecord in pDataMap)
|
|
148
|
+
{
|
|
149
|
+
pAdapter.addSourceRecord(pDataMap[tmpRecord]);
|
|
150
|
+
}
|
|
151
|
+
return fRecordDone();
|
|
152
|
+
};
|
|
153
|
+
})(tmpAdapter, tmpDataMap));
|
|
154
|
+
|
|
155
|
+
tmpAnticipate.anticipate(
|
|
156
|
+
(function(pAdapter)
|
|
157
|
+
{
|
|
158
|
+
return function(fIntegrateDone)
|
|
159
|
+
{
|
|
160
|
+
pAdapter.integrateRecords(fIntegrateDone);
|
|
161
|
+
};
|
|
162
|
+
})(tmpAdapter));
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
catch (pError)
|
|
166
|
+
{
|
|
167
|
+
pPict.log.error(`Error wiring up integration adapters: ${pError}`, pError);
|
|
168
|
+
return fDone(pError);
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
return fDone();
|
|
172
|
+
});
|
|
173
|
+
|
|
174
|
+
tmpAnticipate.wait(
|
|
175
|
+
(pError) =>
|
|
176
|
+
{
|
|
177
|
+
if (pError)
|
|
178
|
+
{
|
|
179
|
+
pPict.log.error(`Error pushing comprehension.`, pError);
|
|
180
|
+
return fCallback(pError);
|
|
181
|
+
}
|
|
182
|
+
pPict.log.info(`Finished pushing comprehension for entities: [${tmpEntitiesPushed.join(', ')}].`);
|
|
183
|
+
return fCallback(null,
|
|
184
|
+
{
|
|
185
|
+
Success: true,
|
|
186
|
+
EntitiesPushed: tmpEntitiesPushed,
|
|
187
|
+
Message: `Pushed comprehension for ${tmpEntitiesPushed.length} entity(ies).`
|
|
188
|
+
});
|
|
189
|
+
});
|
|
190
|
+
}
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - Comprehension To Array
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/ToArray
|
|
5
|
+
* Convert an in-memory comprehension to an array.
|
|
6
|
+
*
|
|
7
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/ToArrayFromFile
|
|
8
|
+
* Convert a comprehension file to an array.
|
|
9
|
+
*
|
|
10
|
+
* Body (in-memory): { "Comprehension": {...}, "Entity": "..." }
|
|
11
|
+
* Body (file): { "File": "/path", "Entity": "..." }
|
|
12
|
+
* Returns: JSON array of records
|
|
13
|
+
*/
|
|
14
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
15
|
+
{
|
|
16
|
+
let tmpPict = pIntegrationService.pict;
|
|
17
|
+
|
|
18
|
+
// In-memory conversion
|
|
19
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/ToArray`,
|
|
20
|
+
(pRequest, pResponse, fNext) =>
|
|
21
|
+
{
|
|
22
|
+
let tmpBody = pRequest.body || {};
|
|
23
|
+
|
|
24
|
+
if (!tmpBody.Comprehension || (typeof(tmpBody.Comprehension) !== 'object'))
|
|
25
|
+
{
|
|
26
|
+
pResponse.send(400, { Error: 'No valid Comprehension object provided in request body.' });
|
|
27
|
+
return fNext();
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
let tmpResult = processComprehensionToArray(tmpPict, tmpBody.Comprehension, tmpBody.Entity);
|
|
31
|
+
if (tmpResult.Error)
|
|
32
|
+
{
|
|
33
|
+
pResponse.send(400, tmpResult);
|
|
34
|
+
return fNext();
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
pResponse.send(200, tmpResult.RecordArray);
|
|
38
|
+
return fNext();
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
// File-based conversion
|
|
42
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/ToArrayFromFile`,
|
|
43
|
+
(pRequest, pResponse, fNext) =>
|
|
44
|
+
{
|
|
45
|
+
let tmpBody = pRequest.body || {};
|
|
46
|
+
|
|
47
|
+
if (!tmpBody.File || (typeof(tmpBody.File) !== 'string'))
|
|
48
|
+
{
|
|
49
|
+
pResponse.send(400, { Error: 'No valid File path provided in request body.' });
|
|
50
|
+
return fNext();
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
let tmpFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.File);
|
|
54
|
+
|
|
55
|
+
if (!tmpPict.FilePersistence.existsSync(tmpFilePath))
|
|
56
|
+
{
|
|
57
|
+
pResponse.send(404, { Error: `File [${tmpFilePath}] does not exist.` });
|
|
58
|
+
return fNext();
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
let tmpComprehension;
|
|
62
|
+
try
|
|
63
|
+
{
|
|
64
|
+
tmpComprehension = JSON.parse(tmpPict.FilePersistence.readFileSync(tmpFilePath));
|
|
65
|
+
}
|
|
66
|
+
catch (pError)
|
|
67
|
+
{
|
|
68
|
+
pResponse.send(400, { Error: `Error parsing comprehension file: ${pError.message}` });
|
|
69
|
+
return fNext();
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
let tmpResult = processComprehensionToArray(tmpPict, tmpComprehension, tmpBody.Entity);
|
|
73
|
+
if (tmpResult.Error)
|
|
74
|
+
{
|
|
75
|
+
pResponse.send(400, tmpResult);
|
|
76
|
+
return fNext();
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
pResponse.send(200, tmpResult.RecordArray);
|
|
80
|
+
return fNext();
|
|
81
|
+
});
|
|
82
|
+
};
|
|
83
|
+
|
|
84
|
+
function processComprehensionToArray(pPict, pComprehension, pEntity)
|
|
85
|
+
{
|
|
86
|
+
let tmpEntity = pEntity;
|
|
87
|
+
|
|
88
|
+
if (!tmpEntity)
|
|
89
|
+
{
|
|
90
|
+
let tmpEntityInference = Object.keys(pComprehension);
|
|
91
|
+
if (tmpEntityInference.length > 0)
|
|
92
|
+
{
|
|
93
|
+
tmpEntity = tmpEntityInference[0];
|
|
94
|
+
pPict.log.info(`No entity specified. Using [${tmpEntity}] as the inferred entity.`);
|
|
95
|
+
}
|
|
96
|
+
else
|
|
97
|
+
{
|
|
98
|
+
return { Error: 'No entity specified and no entities found in the comprehension.' };
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
let tmpEntityRecords = pComprehension[tmpEntity] || {};
|
|
103
|
+
let tmpRecordArray = [];
|
|
104
|
+
|
|
105
|
+
let tmpRecordKeys = Object.keys(tmpEntityRecords);
|
|
106
|
+
for (let i = 0; i < tmpRecordKeys.length; i++)
|
|
107
|
+
{
|
|
108
|
+
tmpRecordArray.push(tmpEntityRecords[tmpRecordKeys[i]]);
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
pPict.log.info(`Comprehension ToArray: Converted ${tmpRecordArray.length} records for entity [${tmpEntity}].`);
|
|
112
|
+
return { RecordArray: tmpRecordArray };
|
|
113
|
+
}
|
|
@@ -0,0 +1,211 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* MeadowIntegration Command - Comprehension To CSV
|
|
3
|
+
*
|
|
4
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/ToCSV
|
|
5
|
+
* Convert an in-memory comprehension or array to CSV.
|
|
6
|
+
*
|
|
7
|
+
* POST /1.0/Retold/MeadowIntegration/Comprehension/ToCSVFromFile
|
|
8
|
+
* Convert a comprehension/array file to CSV.
|
|
9
|
+
*
|
|
10
|
+
* Body (in-memory): { "Records": [...] } or { "Comprehension": {...}, "Entity": "..." }
|
|
11
|
+
* Body (file): { "File": "/path", "Entity": "..." }
|
|
12
|
+
* Returns: CSV text (Content-Type: text/csv)
|
|
13
|
+
*/
|
|
14
|
+
module.exports = function(pIntegrationService, pOratorServiceServer)
|
|
15
|
+
{
|
|
16
|
+
let tmpPict = pIntegrationService.pict;
|
|
17
|
+
|
|
18
|
+
// In-memory conversion
|
|
19
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/ToCSV`,
|
|
20
|
+
(pRequest, pResponse, fNext) =>
|
|
21
|
+
{
|
|
22
|
+
let tmpBody = pRequest.body || {};
|
|
23
|
+
|
|
24
|
+
let tmpRecordArray = extractRecordArray(tmpPict, tmpBody);
|
|
25
|
+
if (tmpRecordArray.Error)
|
|
26
|
+
{
|
|
27
|
+
pResponse.send(400, tmpRecordArray);
|
|
28
|
+
return fNext();
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
let tmpCSV = generateCSV(tmpRecordArray.Records);
|
|
32
|
+
pResponse.setHeader('Content-Type', 'text/csv');
|
|
33
|
+
pResponse.setHeader('Content-Disposition', 'attachment; filename="export.csv"');
|
|
34
|
+
pResponse.sendRaw(200, tmpCSV);
|
|
35
|
+
return fNext();
|
|
36
|
+
});
|
|
37
|
+
|
|
38
|
+
// File-based conversion
|
|
39
|
+
pOratorServiceServer.postWithBodyParser(`${pIntegrationService.routePrefix}/Comprehension/ToCSVFromFile`,
|
|
40
|
+
(pRequest, pResponse, fNext) =>
|
|
41
|
+
{
|
|
42
|
+
let tmpBody = pRequest.body || {};
|
|
43
|
+
|
|
44
|
+
if (!tmpBody.File || (typeof(tmpBody.File) !== 'string'))
|
|
45
|
+
{
|
|
46
|
+
pResponse.send(400, { Error: 'No valid File path provided in request body.' });
|
|
47
|
+
return fNext();
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
let tmpFilePath = tmpPict.FilePersistence.resolvePath(tmpBody.File);
|
|
51
|
+
|
|
52
|
+
if (!tmpPict.FilePersistence.existsSync(tmpFilePath))
|
|
53
|
+
{
|
|
54
|
+
pResponse.send(404, { Error: `File [${tmpFilePath}] does not exist.` });
|
|
55
|
+
return fNext();
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
let tmpRawRecordSet;
|
|
59
|
+
try
|
|
60
|
+
{
|
|
61
|
+
tmpRawRecordSet = JSON.parse(tmpPict.FilePersistence.readFileSync(tmpFilePath));
|
|
62
|
+
}
|
|
63
|
+
catch (pError)
|
|
64
|
+
{
|
|
65
|
+
pResponse.send(400, { Error: `Error parsing JSON file: ${pError.message}` });
|
|
66
|
+
return fNext();
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
let tmpExtractBody = { Entity: tmpBody.Entity };
|
|
70
|
+
if (Array.isArray(tmpRawRecordSet))
|
|
71
|
+
{
|
|
72
|
+
tmpExtractBody.Records = tmpRawRecordSet;
|
|
73
|
+
}
|
|
74
|
+
else if (typeof(tmpRawRecordSet) === 'object')
|
|
75
|
+
{
|
|
76
|
+
tmpExtractBody.Comprehension = tmpRawRecordSet;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
let tmpRecordArray = extractRecordArray(tmpPict, tmpExtractBody);
|
|
80
|
+
if (tmpRecordArray.Error)
|
|
81
|
+
{
|
|
82
|
+
pResponse.send(400, tmpRecordArray);
|
|
83
|
+
return fNext();
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
let tmpCSV = generateCSV(tmpRecordArray.Records);
|
|
87
|
+
pResponse.setHeader('Content-Type', 'text/csv');
|
|
88
|
+
pResponse.setHeader('Content-Disposition', 'attachment; filename="export.csv"');
|
|
89
|
+
pResponse.sendRaw(200, tmpCSV);
|
|
90
|
+
return fNext();
|
|
91
|
+
});
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
function extractRecordArray(pPict, pBody)
|
|
95
|
+
{
|
|
96
|
+
if (pBody.Records && Array.isArray(pBody.Records))
|
|
97
|
+
{
|
|
98
|
+
if (pBody.Records.length < 1)
|
|
99
|
+
{
|
|
100
|
+
return { Error: 'Records array is empty.' };
|
|
101
|
+
}
|
|
102
|
+
return { Records: pBody.Records };
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (pBody.Comprehension && (typeof(pBody.Comprehension) === 'object'))
|
|
106
|
+
{
|
|
107
|
+
let tmpEntity = pBody.Entity;
|
|
108
|
+
|
|
109
|
+
if (tmpEntity)
|
|
110
|
+
{
|
|
111
|
+
if (pBody.Comprehension[tmpEntity])
|
|
112
|
+
{
|
|
113
|
+
let tmpRecords;
|
|
114
|
+
if (Array.isArray(pBody.Comprehension[tmpEntity]))
|
|
115
|
+
{
|
|
116
|
+
tmpRecords = pBody.Comprehension[tmpEntity];
|
|
117
|
+
}
|
|
118
|
+
else
|
|
119
|
+
{
|
|
120
|
+
tmpRecords = Object.values(pBody.Comprehension[tmpEntity]);
|
|
121
|
+
}
|
|
122
|
+
if (tmpRecords.length < 1)
|
|
123
|
+
{
|
|
124
|
+
return { Error: 'No records found for the specified entity.' };
|
|
125
|
+
}
|
|
126
|
+
return { Records: tmpRecords };
|
|
127
|
+
}
|
|
128
|
+
else
|
|
129
|
+
{
|
|
130
|
+
return { Error: `Entity [${tmpEntity}] not found in comprehension.` };
|
|
131
|
+
}
|
|
132
|
+
}
|
|
133
|
+
else
|
|
134
|
+
{
|
|
135
|
+
let tmpKeys = Object.keys(pBody.Comprehension);
|
|
136
|
+
if (tmpKeys.length === 1 && typeof(pBody.Comprehension[tmpKeys[0]]) === 'object' && !Array.isArray(pBody.Comprehension[tmpKeys[0]]))
|
|
137
|
+
{
|
|
138
|
+
pPict.log.info(`Auto-detected entity [${tmpKeys[0]}] from comprehension.`);
|
|
139
|
+
let tmpRecords = Object.values(pBody.Comprehension[tmpKeys[0]]);
|
|
140
|
+
if (tmpRecords.length < 1)
|
|
141
|
+
{
|
|
142
|
+
return { Error: 'No records found in the auto-detected entity.' };
|
|
143
|
+
}
|
|
144
|
+
return { Records: tmpRecords };
|
|
145
|
+
}
|
|
146
|
+
else if (tmpKeys.length > 1)
|
|
147
|
+
{
|
|
148
|
+
return { Error: `Multiple entities found [${tmpKeys.join(', ')}]. Please specify an Entity.` };
|
|
149
|
+
}
|
|
150
|
+
else
|
|
151
|
+
{
|
|
152
|
+
return { Error: 'No entities found in the comprehension.' };
|
|
153
|
+
}
|
|
154
|
+
}
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
return { Error: 'No valid Records array or Comprehension object provided.' };
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
function flattenObject(pObject, pAddressPrefix)
|
|
161
|
+
{
|
|
162
|
+
let tmpPrefix = pAddressPrefix || '';
|
|
163
|
+
let tmpFlattenedObject = {};
|
|
164
|
+
for (const [pKey, pValue] of Object.entries(pObject))
|
|
165
|
+
{
|
|
166
|
+
const pPropertyPath = tmpPrefix ? `${tmpPrefix}.${pKey}` : pKey;
|
|
167
|
+
if (pValue && typeof pValue === 'object' && !Array.isArray(pValue))
|
|
168
|
+
{
|
|
169
|
+
Object.assign(tmpFlattenedObject, flattenObject(pValue, pPropertyPath));
|
|
170
|
+
}
|
|
171
|
+
else
|
|
172
|
+
{
|
|
173
|
+
tmpFlattenedObject[pPropertyPath] = pValue;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
return tmpFlattenedObject;
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
function escapeCSVValue(pValue)
|
|
180
|
+
{
|
|
181
|
+
if (pValue === null || pValue === undefined) return '';
|
|
182
|
+
const str = String(pValue);
|
|
183
|
+
return /[",\n]/.test(str) ? `"${str.replace(/"/g, '""')}"` : str;
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
function generateCSV(pRecords)
|
|
187
|
+
{
|
|
188
|
+
const tmpAllKeysSet = new Set();
|
|
189
|
+
const tmpFlattenedRecords = [];
|
|
190
|
+
|
|
191
|
+
for (const tmpRecord of pRecords)
|
|
192
|
+
{
|
|
193
|
+
const tmpFlattenedObject = flattenObject(tmpRecord);
|
|
194
|
+
tmpFlattenedRecords.push(tmpFlattenedObject);
|
|
195
|
+
for (const tmpKey of Object.keys(tmpFlattenedObject))
|
|
196
|
+
{
|
|
197
|
+
tmpAllKeysSet.add(tmpKey);
|
|
198
|
+
}
|
|
199
|
+
}
|
|
200
|
+
|
|
201
|
+
const tmpAllObjectKeys = Array.from(tmpAllKeysSet).sort();
|
|
202
|
+
|
|
203
|
+
let tmpCSV = tmpAllObjectKeys.join(',') + '\n';
|
|
204
|
+
for (const tmpFlatRecord of tmpFlattenedRecords)
|
|
205
|
+
{
|
|
206
|
+
const tmpRow = tmpAllObjectKeys.map((pKey) => escapeCSVValue(tmpFlatRecord[pKey])).join(',');
|
|
207
|
+
tmpCSV += tmpRow + '\n';
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
return tmpCSV;
|
|
211
|
+
}
|