retold-facto 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/launch.json +11 -0
- package/.dockerignore +8 -0
- package/.quackage.json +19 -0
- package/Dockerfile +26 -0
- package/bin/retold-facto.js +909 -0
- package/examples/facto-government-data.sqlite +0 -0
- package/examples/government-data-catalog.json +137 -0
- package/examples/government-data-loader.js +1432 -0
- package/package.json +91 -0
- package/scripts/facto-download.js +425 -0
- package/source/Retold-Facto.js +1042 -0
- package/source/services/Retold-Facto-BeaconProvider.js +511 -0
- package/source/services/Retold-Facto-CatalogManager.js +1252 -0
- package/source/services/Retold-Facto-DataLakeService.js +1642 -0
- package/source/services/Retold-Facto-DatasetManager.js +417 -0
- package/source/services/Retold-Facto-IngestEngine.js +1315 -0
- package/source/services/Retold-Facto-ProjectionEngine.js +3960 -0
- package/source/services/Retold-Facto-RecordManager.js +360 -0
- package/source/services/Retold-Facto-SchemaManager.js +1110 -0
- package/source/services/Retold-Facto-SourceFolderScanner.js +2243 -0
- package/source/services/Retold-Facto-SourceManager.js +730 -0
- package/source/services/Retold-Facto-StoreConnectionManager.js +441 -0
- package/source/services/Retold-Facto-ThroughputMonitor.js +478 -0
- package/source/services/web-app/codemirror-entry.js +7 -0
- package/source/services/web-app/pict-app/Pict-Application-Facto-Configuration.json +9 -0
- package/source/services/web-app/pict-app/Pict-Application-Facto.js +70 -0
- package/source/services/web-app/pict-app/Pict-Facto-Bundle.js +11 -0
- package/source/services/web-app/pict-app/providers/Pict-Provider-Facto-UI.js +66 -0
- package/source/services/web-app/pict-app/providers/Pict-Provider-Facto.js +69 -0
- package/source/services/web-app/pict-app/providers/facto-api/Facto-API-Catalog.js +93 -0
- package/source/services/web-app/pict-app/providers/facto-api/Facto-API-Connections.js +42 -0
- package/source/services/web-app/pict-app/providers/facto-api/Facto-API-Datasets.js +605 -0
- package/source/services/web-app/pict-app/providers/facto-api/Facto-API-Projections.js +188 -0
- package/source/services/web-app/pict-app/providers/facto-api/Facto-API-Scanner.js +80 -0
- package/source/services/web-app/pict-app/providers/facto-api/Facto-API-Schema.js +116 -0
- package/source/services/web-app/pict-app/providers/facto-api/Facto-API-Sources.js +104 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Catalog.js +526 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Datasets.js +173 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Ingest.js +259 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Layout.js +191 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Projections.js +231 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Records.js +326 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Scanner.js +624 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Sources.js +201 -0
- package/source/services/web-app/pict-app/views/PictView-Facto-Throughput.js +456 -0
- package/source/services/web-app/pict-app-full/Pict-Application-Facto-Full-Configuration.json +14 -0
- package/source/services/web-app/pict-app-full/Pict-Application-Facto-Full.js +391 -0
- package/source/services/web-app/pict-app-full/providers/PictRouter-Facto-Configuration.json +56 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-BottomBar.js +68 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Connections.js +340 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Dashboard.js +149 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Dashboards.js +819 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Datasets.js +178 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-IngestJobs.js +99 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Layout.js +62 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-MappingEditor.js +158 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-ProjectionDetail.js +1120 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Projections.js +172 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-QueryPanel.js +119 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-RecordViewer.js +663 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Records.js +648 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Scanner.js +1017 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-SchemaDetail.js +1404 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-SchemaDocEditor.js +1036 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-SchemaEditor.js +636 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-SchemaResearch.js +357 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-SourceDetail.js +822 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-SourceEditor.js +1036 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-SourceResearch.js +487 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Sources.js +165 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-Throughput.js +439 -0
- package/source/services/web-app/pict-app-full/views/PictView-Facto-Full-TopBar.js +335 -0
- package/source/services/web-app/pict-app-full/views/projections/Facto-Projections-Constants.js +71 -0
- package/source/services/web-app/web/chart.min.js +20 -0
- package/source/services/web-app/web/codemirror-bundle.js +30099 -0
- package/source/services/web-app/web/css/facto-themes.css +467 -0
- package/source/services/web-app/web/css/facto.css +502 -0
- package/source/services/web-app/web/index.html +28 -0
- package/source/services/web-app/web/retold-facto.js +12138 -0
- package/source/services/web-app/web/retold-facto.js.map +1 -0
- package/source/services/web-app/web/retold-facto.min.js +2 -0
- package/source/services/web-app/web/retold-facto.min.js.map +1 -0
- package/source/services/web-app/web/simple/index.html +17 -0
- package/test/Facto_Browser_Integration_tests.js +798 -0
- package/test/RetoldFacto_tests.js +4117 -0
- package/test/fixtures/weather-readings.csv +17 -0
- package/test/fixtures/weather-stations.csv +9 -0
- package/test/model/MeadowModel-Extended.json +8497 -0
- package/test/model/MeadowModel-PICT.json +1 -0
- package/test/model/MeadowModel.json +1355 -0
- package/test/model/ddl/Facto.ddl +225 -0
- package/test/model/fable-configuration.json +14 -0
|
@@ -0,0 +1,3960 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Retold Facto - Projection Engine Service
|
|
3
|
+
*
|
|
4
|
+
* Creates flattened projections, temporary SQL tables, and materialized
|
|
5
|
+
* views from multidimensional dataset data for reporting and charting.
|
|
6
|
+
*
|
|
7
|
+
* @author Steven Velozo <steven@velozo.com>
|
|
8
|
+
*/
|
|
9
|
+
const libFableServiceProviderBase = require('fable-serviceproviderbase');
|
|
10
|
+
const libFable = require('fable');
|
|
11
|
+
const libFoxHound = require('foxhound');
|
|
12
|
+
const libFS = require('fs');
|
|
13
|
+
const libPath = require('path');
|
|
14
|
+
const libMeadow = require('meadow');
|
|
15
|
+
const libMeadowEndpoints = require('meadow-endpoints');
|
|
16
|
+
const libMeadowIntegration = require('meadow-integration');
|
|
17
|
+
|
|
18
|
+
const defaultProjectionEngineOptions = (
|
|
19
|
+
{
|
|
20
|
+
RoutePrefix: '/facto'
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
const VALID_DATASET_TYPES = ['Raw', 'Compositional', 'Projection', 'Derived'];
|
|
24
|
+
|
|
25
|
+
/**
|
|
26
|
+
* Merge strategy functions for multi-set projection pipelines.
|
|
27
|
+
*
|
|
28
|
+
* Each function receives:
|
|
29
|
+
* pNewRecord – the incoming record from the current step
|
|
30
|
+
* pExistingRecord – the record already in the accumulated comprehension (or null)
|
|
31
|
+
* pContext – { NewWeight, ExistingWeight, ConfidenceConfig, ConfidenceTracker }
|
|
32
|
+
*
|
|
33
|
+
* Returns: { Action: string, Record: object }
|
|
34
|
+
*/
|
|
35
|
+
const MERGE_STRATEGIES =
|
|
36
|
+
{
|
|
37
|
+
'WriteAll': function (pNewRecord, pExistingRecord, pContext)
|
|
38
|
+
{
|
|
39
|
+
if (!pExistingRecord)
|
|
40
|
+
{
|
|
41
|
+
return { Action: 'Created', Record: pNewRecord };
|
|
42
|
+
}
|
|
43
|
+
return { Action: 'Merged', Record: Object.assign({}, pExistingRecord, pNewRecord) };
|
|
44
|
+
},
|
|
45
|
+
|
|
46
|
+
'FirstWriteWins': function (pNewRecord, pExistingRecord, pContext)
|
|
47
|
+
{
|
|
48
|
+
if (pExistingRecord)
|
|
49
|
+
{
|
|
50
|
+
return { Action: 'Skipped_FirstWriteWins', Record: pExistingRecord };
|
|
51
|
+
}
|
|
52
|
+
return { Action: 'Created', Record: pNewRecord };
|
|
53
|
+
},
|
|
54
|
+
|
|
55
|
+
'ReliabilityOverwrite': function (pNewRecord, pExistingRecord, pContext)
|
|
56
|
+
{
|
|
57
|
+
if (!pExistingRecord)
|
|
58
|
+
{
|
|
59
|
+
return { Action: 'Created', Record: pNewRecord };
|
|
60
|
+
}
|
|
61
|
+
if (pContext.NewWeight > pContext.ExistingWeight)
|
|
62
|
+
{
|
|
63
|
+
return { Action: 'Overwritten_HigherReliability', Record: pNewRecord };
|
|
64
|
+
}
|
|
65
|
+
return { Action: 'Skipped_LowerReliability', Record: pExistingRecord };
|
|
66
|
+
},
|
|
67
|
+
|
|
68
|
+
'MergeAndReinforce': function (pNewRecord, pExistingRecord, pContext)
|
|
69
|
+
{
|
|
70
|
+
if (!pExistingRecord)
|
|
71
|
+
{
|
|
72
|
+
return { Action: 'Created', Record: pNewRecord };
|
|
73
|
+
}
|
|
74
|
+
return { Action: 'Merged_Reinforced', Record: Object.assign({}, pExistingRecord, pNewRecord) };
|
|
75
|
+
},
|
|
76
|
+
|
|
77
|
+
'FieldFillOnly': function (pNewRecord, pExistingRecord, pContext)
|
|
78
|
+
{
|
|
79
|
+
if (!pExistingRecord)
|
|
80
|
+
{
|
|
81
|
+
return { Action: 'Created', Record: pNewRecord };
|
|
82
|
+
}
|
|
83
|
+
let tmpResult = Object.assign({}, pExistingRecord);
|
|
84
|
+
let tmpKeys = Object.keys(pNewRecord);
|
|
85
|
+
for (let i = 0; i < tmpKeys.length; i++)
|
|
86
|
+
{
|
|
87
|
+
if (tmpResult[tmpKeys[i]] === undefined || tmpResult[tmpKeys[i]] === null || tmpResult[tmpKeys[i]] === '')
|
|
88
|
+
{
|
|
89
|
+
tmpResult[tmpKeys[i]] = pNewRecord[tmpKeys[i]];
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
return { Action: 'Merged_FieldsAdded', Record: tmpResult };
|
|
93
|
+
}
|
|
94
|
+
};
|
|
95
|
+
|
|
96
|
+
class RetoldFactoProjectionEngine extends libFableServiceProviderBase
|
|
97
|
+
{
|
|
98
|
+
constructor(pFable, pOptions, pServiceHash)
|
|
99
|
+
{
|
|
100
|
+
let tmpOptions = Object.assign({}, defaultProjectionEngineOptions, pOptions);
|
|
101
|
+
super(pFable, tmpOptions, pServiceHash);
|
|
102
|
+
|
|
103
|
+
this.serviceType = 'RetoldFactoProjectionEngine';
|
|
104
|
+
|
|
105
|
+
// Map of dynamically registered Meadow entities for projection tables.
|
|
106
|
+
// Keyed by TargetTableName (entity scope).
|
|
107
|
+
this._ProjectionEntities = {};
|
|
108
|
+
|
|
109
|
+
// Register meadow-integration service types so we can instantiate them
|
|
110
|
+
if (!this.fable.serviceManager.services.IntegrationAdapter)
|
|
111
|
+
{
|
|
112
|
+
this.fable.serviceManager.addServiceType('IntegrationAdapter', libMeadowIntegration.IntegrationAdapter);
|
|
113
|
+
}
|
|
114
|
+
if (!this.fable.serviceManager.services.MeadowCloneRestClient)
|
|
115
|
+
{
|
|
116
|
+
this.fable.serviceManager.addServiceType('MeadowCloneRestClient', libMeadowIntegration.CloneRestClient);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
/**
|
|
121
|
+
* Connect REST API routes for projection operations.
|
|
122
|
+
*
|
|
123
|
+
* @param {object} pOratorServiceServer - The Orator service server instance
|
|
124
|
+
*/
|
|
125
|
+
connectRoutes(pOratorServiceServer)
|
|
126
|
+
{
|
|
127
|
+
let tmpRoutePrefix = this.options.RoutePrefix;
|
|
128
|
+
|
|
129
|
+
// GET /facto/projections -- list all projection datasets
|
|
130
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projections`,
|
|
131
|
+
(pRequest, pResponse, fNext) =>
|
|
132
|
+
{
|
|
133
|
+
if (!this.fable.DAL || !this.fable.DAL.Dataset)
|
|
134
|
+
{
|
|
135
|
+
pResponse.send({ Projections: [] });
|
|
136
|
+
return fNext();
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
140
|
+
.addFilter('Type', 'Projection')
|
|
141
|
+
.addFilter('Deleted', 0);
|
|
142
|
+
|
|
143
|
+
this.fable.DAL.Dataset.doReads(tmpQuery,
|
|
144
|
+
(pError, pQuery, pRecords) =>
|
|
145
|
+
{
|
|
146
|
+
if (pError)
|
|
147
|
+
{
|
|
148
|
+
this.fable.log.error(`ProjectionEngine error listing projections: ${pError}`);
|
|
149
|
+
pResponse.send({ Error: pError.message || pError, Projections: [] });
|
|
150
|
+
return fNext();
|
|
151
|
+
}
|
|
152
|
+
pResponse.send({ Count: pRecords.length, Projections: pRecords });
|
|
153
|
+
return fNext();
|
|
154
|
+
});
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
// GET /facto/datasets/by-type/:Type -- list datasets filtered by type
|
|
158
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/datasets/by-type/:Type`,
|
|
159
|
+
(pRequest, pResponse, fNext) =>
|
|
160
|
+
{
|
|
161
|
+
let tmpType = pRequest.params.Type;
|
|
162
|
+
|
|
163
|
+
if (!this.fable.DAL || !this.fable.DAL.Dataset)
|
|
164
|
+
{
|
|
165
|
+
pResponse.send({ Error: 'Dataset DAL not initialized', Datasets: [] });
|
|
166
|
+
return fNext();
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
170
|
+
.addFilter('Type', tmpType)
|
|
171
|
+
.addFilter('Deleted', 0);
|
|
172
|
+
|
|
173
|
+
this.fable.DAL.Dataset.doReads(tmpQuery,
|
|
174
|
+
(pError, pQuery, pRecords) =>
|
|
175
|
+
{
|
|
176
|
+
if (pError)
|
|
177
|
+
{
|
|
178
|
+
pResponse.send({ Error: pError.message || pError, Datasets: [] });
|
|
179
|
+
return fNext();
|
|
180
|
+
}
|
|
181
|
+
pResponse.send({ Type: tmpType, Count: pRecords.length, Datasets: pRecords });
|
|
182
|
+
return fNext();
|
|
183
|
+
});
|
|
184
|
+
});
|
|
185
|
+
|
|
186
|
+
// POST /facto/projections/query -- cross-dataset record query
|
|
187
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projections/query`,
|
|
188
|
+
(pRequest, pResponse, fNext) =>
|
|
189
|
+
{
|
|
190
|
+
if (!this.fable.DAL || !this.fable.DAL.Record)
|
|
191
|
+
{
|
|
192
|
+
pResponse.send({ Error: 'Record DAL not initialized', Records: [] });
|
|
193
|
+
return fNext();
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
let tmpBody = pRequest.body || {};
|
|
197
|
+
let tmpDatasetIDs = tmpBody.DatasetIDs || [];
|
|
198
|
+
let tmpType = tmpBody.Type || '';
|
|
199
|
+
let tmpIDSource = parseInt(tmpBody.IDSource, 10) || 0;
|
|
200
|
+
let tmpCertaintyThreshold = parseFloat(tmpBody.CertaintyThreshold) || 0;
|
|
201
|
+
let tmpTimeRangeStart = parseInt(tmpBody.TimeRangeStart, 10) || 0;
|
|
202
|
+
let tmpTimeRangeStop = parseInt(tmpBody.TimeRangeStop, 10) || 0;
|
|
203
|
+
let tmpBegin = parseInt(tmpBody.Begin, 10) || 0;
|
|
204
|
+
let tmpCap = parseInt(tmpBody.Cap, 10) || 100;
|
|
205
|
+
|
|
206
|
+
if (!Array.isArray(tmpDatasetIDs) || tmpDatasetIDs.length === 0)
|
|
207
|
+
{
|
|
208
|
+
pResponse.send({ Error: 'DatasetIDs array is required', Records: [], Count: 0 });
|
|
209
|
+
return fNext();
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
// Query records per dataset using Anticipate
|
|
213
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
214
|
+
let tmpAllRecords = [];
|
|
215
|
+
|
|
216
|
+
for (let i = 0; i < tmpDatasetIDs.length; i++)
|
|
217
|
+
{
|
|
218
|
+
let tmpDatasetID = parseInt(tmpDatasetIDs[i], 10);
|
|
219
|
+
if (!tmpDatasetID) continue;
|
|
220
|
+
|
|
221
|
+
tmpAnticipate.anticipate(
|
|
222
|
+
(fStepCallback) =>
|
|
223
|
+
{
|
|
224
|
+
let tmpQuery = this.fable.DAL.Record.query.clone()
|
|
225
|
+
.addFilter('IDDataset', tmpDatasetID)
|
|
226
|
+
.addFilter('Deleted', 0);
|
|
227
|
+
|
|
228
|
+
if (tmpType)
|
|
229
|
+
{
|
|
230
|
+
tmpQuery.addFilter('Type', tmpType);
|
|
231
|
+
}
|
|
232
|
+
if (tmpIDSource > 0)
|
|
233
|
+
{
|
|
234
|
+
tmpQuery.addFilter('IDSource', tmpIDSource);
|
|
235
|
+
}
|
|
236
|
+
if (tmpTimeRangeStart > 0)
|
|
237
|
+
{
|
|
238
|
+
tmpQuery.addFilter('RepresentedTimeStampStart', tmpTimeRangeStart, '>=');
|
|
239
|
+
}
|
|
240
|
+
if (tmpTimeRangeStop > 0)
|
|
241
|
+
{
|
|
242
|
+
tmpQuery.addFilter('RepresentedTimeStampStop', tmpTimeRangeStop, '<=');
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
tmpQuery.setCap(tmpCap);
|
|
246
|
+
|
|
247
|
+
this.fable.DAL.Record.doReads(tmpQuery,
|
|
248
|
+
(pError, pQuery, pRecords) =>
|
|
249
|
+
{
|
|
250
|
+
if (!pError && pRecords)
|
|
251
|
+
{
|
|
252
|
+
for (let j = 0; j < pRecords.length; j++)
|
|
253
|
+
{
|
|
254
|
+
tmpAllRecords.push(pRecords[j]);
|
|
255
|
+
}
|
|
256
|
+
}
|
|
257
|
+
return fStepCallback();
|
|
258
|
+
});
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
tmpAnticipate.wait(
|
|
263
|
+
() =>
|
|
264
|
+
{
|
|
265
|
+
// If certainty threshold is set, filter records
|
|
266
|
+
if (tmpCertaintyThreshold > 0 && this.fable.DAL.CertaintyIndex)
|
|
267
|
+
{
|
|
268
|
+
let tmpFilterAnticipate = this.fable.newAnticipate();
|
|
269
|
+
let tmpFilteredRecords = [];
|
|
270
|
+
|
|
271
|
+
for (let i = 0; i < tmpAllRecords.length; i++)
|
|
272
|
+
{
|
|
273
|
+
let tmpRecord = tmpAllRecords[i];
|
|
274
|
+
|
|
275
|
+
tmpFilterAnticipate.anticipate(
|
|
276
|
+
(fFilterCallback) =>
|
|
277
|
+
{
|
|
278
|
+
let tmpCIQuery = this.fable.DAL.CertaintyIndex.query.clone()
|
|
279
|
+
.addFilter('IDRecord', tmpRecord.IDRecord)
|
|
280
|
+
.addFilter('Dimension', 'overall')
|
|
281
|
+
.addFilter('Deleted', 0)
|
|
282
|
+
.setCap(1);
|
|
283
|
+
|
|
284
|
+
this.fable.DAL.CertaintyIndex.doReads(tmpCIQuery,
|
|
285
|
+
(pCIError, pCIQuery, pCIRecords) =>
|
|
286
|
+
{
|
|
287
|
+
if (!pCIError && pCIRecords && pCIRecords.length > 0)
|
|
288
|
+
{
|
|
289
|
+
if (pCIRecords[0].CertaintyValue >= tmpCertaintyThreshold)
|
|
290
|
+
{
|
|
291
|
+
tmpRecord.CertaintyValue = pCIRecords[0].CertaintyValue;
|
|
292
|
+
tmpFilteredRecords.push(tmpRecord);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
return fFilterCallback();
|
|
296
|
+
});
|
|
297
|
+
});
|
|
298
|
+
}
|
|
299
|
+
|
|
300
|
+
tmpFilterAnticipate.wait(
|
|
301
|
+
() =>
|
|
302
|
+
{
|
|
303
|
+
// Apply pagination to filtered results
|
|
304
|
+
let tmpPaginated = tmpFilteredRecords.slice(tmpBegin, tmpBegin + tmpCap);
|
|
305
|
+
pResponse.send(
|
|
306
|
+
{
|
|
307
|
+
Query: { DatasetIDs: tmpDatasetIDs, Type: tmpType, CertaintyThreshold: tmpCertaintyThreshold },
|
|
308
|
+
Count: tmpPaginated.length,
|
|
309
|
+
TotalMatched: tmpFilteredRecords.length,
|
|
310
|
+
Records: tmpPaginated
|
|
311
|
+
});
|
|
312
|
+
return fNext();
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
else
|
|
316
|
+
{
|
|
317
|
+
// Apply pagination
|
|
318
|
+
let tmpPaginated = tmpAllRecords.slice(tmpBegin, tmpBegin + tmpCap);
|
|
319
|
+
pResponse.send(
|
|
320
|
+
{
|
|
321
|
+
Query: { DatasetIDs: tmpDatasetIDs, Type: tmpType },
|
|
322
|
+
Count: tmpPaginated.length,
|
|
323
|
+
TotalMatched: tmpAllRecords.length,
|
|
324
|
+
Records: tmpPaginated
|
|
325
|
+
});
|
|
326
|
+
return fNext();
|
|
327
|
+
}
|
|
328
|
+
});
|
|
329
|
+
});
|
|
330
|
+
|
|
331
|
+
// POST /facto/projections/aggregate -- count records grouped by a dimension
|
|
332
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projections/aggregate`,
|
|
333
|
+
(pRequest, pResponse, fNext) =>
|
|
334
|
+
{
|
|
335
|
+
if (!this.fable.DAL || !this.fable.DAL.Record)
|
|
336
|
+
{
|
|
337
|
+
pResponse.send({ Error: 'Record DAL not initialized', Aggregation: [] });
|
|
338
|
+
return fNext();
|
|
339
|
+
}
|
|
340
|
+
|
|
341
|
+
let tmpBody = pRequest.body || {};
|
|
342
|
+
let tmpDatasetIDs = tmpBody.DatasetIDs || [];
|
|
343
|
+
let tmpGroupBy = tmpBody.GroupBy || 'IDDataset';
|
|
344
|
+
|
|
345
|
+
if (!Array.isArray(tmpDatasetIDs) || tmpDatasetIDs.length === 0)
|
|
346
|
+
{
|
|
347
|
+
pResponse.send({ Error: 'DatasetIDs array is required', Aggregation: [] });
|
|
348
|
+
return fNext();
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
352
|
+
let tmpAggregation = [];
|
|
353
|
+
let tmpTotal = 0;
|
|
354
|
+
|
|
355
|
+
if (tmpGroupBy === 'IDDataset')
|
|
356
|
+
{
|
|
357
|
+
// Count records per dataset
|
|
358
|
+
for (let i = 0; i < tmpDatasetIDs.length; i++)
|
|
359
|
+
{
|
|
360
|
+
let tmpDatasetID = parseInt(tmpDatasetIDs[i], 10);
|
|
361
|
+
if (!tmpDatasetID) continue;
|
|
362
|
+
|
|
363
|
+
tmpAnticipate.anticipate(
|
|
364
|
+
(fStepCallback) =>
|
|
365
|
+
{
|
|
366
|
+
let tmpCountQuery = this.fable.DAL.Record.query.clone()
|
|
367
|
+
.addFilter('IDDataset', tmpDatasetID)
|
|
368
|
+
.addFilter('Deleted', 0);
|
|
369
|
+
|
|
370
|
+
this.fable.DAL.Record.doCount(tmpCountQuery,
|
|
371
|
+
(pError, pQuery, pCount) =>
|
|
372
|
+
{
|
|
373
|
+
let tmpCount = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
374
|
+
tmpAggregation.push({ Group: 'IDDataset', Key: tmpDatasetID, RecordCount: tmpCount });
|
|
375
|
+
tmpTotal += tmpCount;
|
|
376
|
+
return fStepCallback();
|
|
377
|
+
});
|
|
378
|
+
});
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
else if (tmpGroupBy === 'IDSource')
|
|
382
|
+
{
|
|
383
|
+
// First get distinct sources linked to these datasets via DatasetSource
|
|
384
|
+
tmpAnticipate.anticipate(
|
|
385
|
+
(fStepCallback) =>
|
|
386
|
+
{
|
|
387
|
+
let tmpInnerAnticipate = this.fable.newAnticipate();
|
|
388
|
+
let tmpSourceIDs = {};
|
|
389
|
+
|
|
390
|
+
for (let i = 0; i < tmpDatasetIDs.length; i++)
|
|
391
|
+
{
|
|
392
|
+
let tmpDatasetID = parseInt(tmpDatasetIDs[i], 10);
|
|
393
|
+
if (!tmpDatasetID) continue;
|
|
394
|
+
|
|
395
|
+
tmpInnerAnticipate.anticipate(
|
|
396
|
+
(fInnerCallback) =>
|
|
397
|
+
{
|
|
398
|
+
let tmpDSQuery = this.fable.DAL.DatasetSource.query.clone()
|
|
399
|
+
.addFilter('IDDataset', tmpDatasetID)
|
|
400
|
+
.addFilter('Deleted', 0);
|
|
401
|
+
|
|
402
|
+
this.fable.DAL.DatasetSource.doReads(tmpDSQuery,
|
|
403
|
+
(pError, pQuery, pRecords) =>
|
|
404
|
+
{
|
|
405
|
+
if (!pError && pRecords)
|
|
406
|
+
{
|
|
407
|
+
for (let j = 0; j < pRecords.length; j++)
|
|
408
|
+
{
|
|
409
|
+
tmpSourceIDs[pRecords[j].IDSource] = true;
|
|
410
|
+
}
|
|
411
|
+
}
|
|
412
|
+
return fInnerCallback();
|
|
413
|
+
});
|
|
414
|
+
});
|
|
415
|
+
}
|
|
416
|
+
|
|
417
|
+
tmpInnerAnticipate.wait(
|
|
418
|
+
() =>
|
|
419
|
+
{
|
|
420
|
+
// Now count records per source
|
|
421
|
+
let tmpSourceKeys = Object.keys(tmpSourceIDs);
|
|
422
|
+
let tmpSourceAnticipate = this.fable.newAnticipate();
|
|
423
|
+
|
|
424
|
+
for (let i = 0; i < tmpSourceKeys.length; i++)
|
|
425
|
+
{
|
|
426
|
+
let tmpSourceID = parseInt(tmpSourceKeys[i], 10);
|
|
427
|
+
|
|
428
|
+
tmpSourceAnticipate.anticipate(
|
|
429
|
+
(fSourceCallback) =>
|
|
430
|
+
{
|
|
431
|
+
let tmpCountQuery = this.fable.DAL.Record.query.clone()
|
|
432
|
+
.addFilter('IDSource', tmpSourceID)
|
|
433
|
+
.addFilter('Deleted', 0);
|
|
434
|
+
|
|
435
|
+
this.fable.DAL.Record.doCount(tmpCountQuery,
|
|
436
|
+
(pError, pQuery, pCount) =>
|
|
437
|
+
{
|
|
438
|
+
let tmpCount = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
439
|
+
tmpAggregation.push({ Group: 'IDSource', Key: tmpSourceID, RecordCount: tmpCount });
|
|
440
|
+
tmpTotal += tmpCount;
|
|
441
|
+
return fSourceCallback();
|
|
442
|
+
});
|
|
443
|
+
});
|
|
444
|
+
}
|
|
445
|
+
|
|
446
|
+
tmpSourceAnticipate.wait(() => { return fStepCallback(); });
|
|
447
|
+
});
|
|
448
|
+
});
|
|
449
|
+
}
|
|
450
|
+
else if (tmpGroupBy === 'Type')
|
|
451
|
+
{
|
|
452
|
+
// Get distinct record types from the specified datasets
|
|
453
|
+
tmpAnticipate.anticipate(
|
|
454
|
+
(fStepCallback) =>
|
|
455
|
+
{
|
|
456
|
+
let tmpTypeMap = {};
|
|
457
|
+
let tmpTypeAnticipate = this.fable.newAnticipate();
|
|
458
|
+
|
|
459
|
+
for (let i = 0; i < tmpDatasetIDs.length; i++)
|
|
460
|
+
{
|
|
461
|
+
let tmpDatasetID = parseInt(tmpDatasetIDs[i], 10);
|
|
462
|
+
if (!tmpDatasetID) continue;
|
|
463
|
+
|
|
464
|
+
tmpTypeAnticipate.anticipate(
|
|
465
|
+
(fTypeCallback) =>
|
|
466
|
+
{
|
|
467
|
+
let tmpReadQuery = this.fable.DAL.Record.query.clone()
|
|
468
|
+
.addFilter('IDDataset', tmpDatasetID)
|
|
469
|
+
.addFilter('Deleted', 0)
|
|
470
|
+
.setCap(1000);
|
|
471
|
+
|
|
472
|
+
this.fable.DAL.Record.doReads(tmpReadQuery,
|
|
473
|
+
(pError, pQuery, pRecords) =>
|
|
474
|
+
{
|
|
475
|
+
if (!pError && pRecords)
|
|
476
|
+
{
|
|
477
|
+
for (let j = 0; j < pRecords.length; j++)
|
|
478
|
+
{
|
|
479
|
+
let tmpType = pRecords[j].Type || '(none)';
|
|
480
|
+
tmpTypeMap[tmpType] = (tmpTypeMap[tmpType] || 0) + 1;
|
|
481
|
+
}
|
|
482
|
+
}
|
|
483
|
+
return fTypeCallback();
|
|
484
|
+
});
|
|
485
|
+
});
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
tmpTypeAnticipate.wait(
|
|
489
|
+
() =>
|
|
490
|
+
{
|
|
491
|
+
let tmpTypes = Object.keys(tmpTypeMap);
|
|
492
|
+
for (let i = 0; i < tmpTypes.length; i++)
|
|
493
|
+
{
|
|
494
|
+
let tmpCount = tmpTypeMap[tmpTypes[i]];
|
|
495
|
+
tmpAggregation.push({ Group: 'Type', Key: tmpTypes[i], RecordCount: tmpCount });
|
|
496
|
+
tmpTotal += tmpCount;
|
|
497
|
+
}
|
|
498
|
+
return fStepCallback();
|
|
499
|
+
});
|
|
500
|
+
});
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
tmpAnticipate.wait(
|
|
504
|
+
() =>
|
|
505
|
+
{
|
|
506
|
+
pResponse.send(
|
|
507
|
+
{
|
|
508
|
+
GroupBy: tmpGroupBy,
|
|
509
|
+
Aggregation: tmpAggregation,
|
|
510
|
+
Total: tmpTotal
|
|
511
|
+
});
|
|
512
|
+
return fNext();
|
|
513
|
+
});
|
|
514
|
+
});
|
|
515
|
+
|
|
516
|
+
// POST /facto/projections/certainty -- certainty-weighted record lookup
|
|
517
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projections/certainty`,
|
|
518
|
+
(pRequest, pResponse, fNext) =>
|
|
519
|
+
{
|
|
520
|
+
if (!this.fable.DAL || !this.fable.DAL.CertaintyIndex || !this.fable.DAL.Record)
|
|
521
|
+
{
|
|
522
|
+
pResponse.send({ Error: 'DAL not initialized', Records: [] });
|
|
523
|
+
return fNext();
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
let tmpBody = pRequest.body || {};
|
|
527
|
+
let tmpDatasetIDs = tmpBody.DatasetIDs || [];
|
|
528
|
+
let tmpMinCertainty = parseFloat(tmpBody.MinCertainty);
|
|
529
|
+
let tmpMaxCertainty = parseFloat(tmpBody.MaxCertainty);
|
|
530
|
+
let tmpDimension = tmpBody.Dimension || 'overall';
|
|
531
|
+
let tmpSortDirection = (tmpBody.SortByCertainty === 'asc') ? 'asc' : 'desc';
|
|
532
|
+
let tmpBegin = parseInt(tmpBody.Begin, 10) || 0;
|
|
533
|
+
let tmpCap = parseInt(tmpBody.Cap, 10) || 50;
|
|
534
|
+
|
|
535
|
+
if (isNaN(tmpMinCertainty)) tmpMinCertainty = 0;
|
|
536
|
+
if (isNaN(tmpMaxCertainty)) tmpMaxCertainty = 1;
|
|
537
|
+
|
|
538
|
+
// Query certainty indices within range
|
|
539
|
+
let tmpCIQuery = this.fable.DAL.CertaintyIndex.query.clone()
|
|
540
|
+
.addFilter('Dimension', tmpDimension)
|
|
541
|
+
.addFilter('CertaintyValue', tmpMinCertainty, '>=')
|
|
542
|
+
.addFilter('CertaintyValue', tmpMaxCertainty, '<=')
|
|
543
|
+
.addFilter('Deleted', 0)
|
|
544
|
+
.setCap(500);
|
|
545
|
+
|
|
546
|
+
this.fable.DAL.CertaintyIndex.doReads(tmpCIQuery,
|
|
547
|
+
(pCIError, pCIQuery, pCIRecords) =>
|
|
548
|
+
{
|
|
549
|
+
if (pCIError)
|
|
550
|
+
{
|
|
551
|
+
pResponse.send({ Error: pCIError.message || pCIError, Records: [] });
|
|
552
|
+
return fNext();
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
if (!pCIRecords || pCIRecords.length === 0)
|
|
556
|
+
{
|
|
557
|
+
pResponse.send({ Count: 0, Records: [] });
|
|
558
|
+
return fNext();
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
// Sort by certainty value
|
|
562
|
+
pCIRecords.sort(
|
|
563
|
+
(a, b) =>
|
|
564
|
+
{
|
|
565
|
+
return tmpSortDirection === 'asc'
|
|
566
|
+
? a.CertaintyValue - b.CertaintyValue
|
|
567
|
+
: b.CertaintyValue - a.CertaintyValue;
|
|
568
|
+
});
|
|
569
|
+
|
|
570
|
+
// Collect IDRecord values and certainty map
|
|
571
|
+
let tmpCertaintyMap = {};
|
|
572
|
+
let tmpRecordIDs = [];
|
|
573
|
+
for (let i = 0; i < pCIRecords.length; i++)
|
|
574
|
+
{
|
|
575
|
+
let tmpIDRecord = pCIRecords[i].IDRecord;
|
|
576
|
+
if (!tmpCertaintyMap[tmpIDRecord])
|
|
577
|
+
{
|
|
578
|
+
tmpCertaintyMap[tmpIDRecord] = pCIRecords[i].CertaintyValue;
|
|
579
|
+
tmpRecordIDs.push(tmpIDRecord);
|
|
580
|
+
}
|
|
581
|
+
}
|
|
582
|
+
|
|
583
|
+
// Now fetch the actual records
|
|
584
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
585
|
+
let tmpMatchedRecords = [];
|
|
586
|
+
|
|
587
|
+
for (let i = 0; i < tmpRecordIDs.length; i++)
|
|
588
|
+
{
|
|
589
|
+
let tmpIDRecord = tmpRecordIDs[i];
|
|
590
|
+
|
|
591
|
+
tmpAnticipate.anticipate(
|
|
592
|
+
(fStepCallback) =>
|
|
593
|
+
{
|
|
594
|
+
let tmpRecQuery = this.fable.DAL.Record.query.clone()
|
|
595
|
+
.addFilter('IDRecord', tmpIDRecord)
|
|
596
|
+
.addFilter('Deleted', 0);
|
|
597
|
+
|
|
598
|
+
this.fable.DAL.Record.doRead(tmpRecQuery,
|
|
599
|
+
(pRecError, pRecQuery, pRecord) =>
|
|
600
|
+
{
|
|
601
|
+
if (!pRecError && pRecord && pRecord.IDRecord)
|
|
602
|
+
{
|
|
603
|
+
// Filter by DatasetIDs if provided
|
|
604
|
+
if (tmpDatasetIDs.length === 0 || tmpDatasetIDs.indexOf(pRecord.IDDataset) >= 0)
|
|
605
|
+
{
|
|
606
|
+
pRecord.CertaintyValue = tmpCertaintyMap[tmpIDRecord];
|
|
607
|
+
pRecord.CertaintyDimension = tmpDimension;
|
|
608
|
+
tmpMatchedRecords.push(pRecord);
|
|
609
|
+
}
|
|
610
|
+
}
|
|
611
|
+
return fStepCallback();
|
|
612
|
+
});
|
|
613
|
+
});
|
|
614
|
+
}
|
|
615
|
+
|
|
616
|
+
tmpAnticipate.wait(
|
|
617
|
+
() =>
|
|
618
|
+
{
|
|
619
|
+
// Re-sort the matched records by certainty
|
|
620
|
+
tmpMatchedRecords.sort(
|
|
621
|
+
(a, b) =>
|
|
622
|
+
{
|
|
623
|
+
return tmpSortDirection === 'asc'
|
|
624
|
+
? a.CertaintyValue - b.CertaintyValue
|
|
625
|
+
: b.CertaintyValue - a.CertaintyValue;
|
|
626
|
+
});
|
|
627
|
+
|
|
628
|
+
let tmpPaginated = tmpMatchedRecords.slice(tmpBegin, tmpBegin + tmpCap);
|
|
629
|
+
pResponse.send(
|
|
630
|
+
{
|
|
631
|
+
Dimension: tmpDimension,
|
|
632
|
+
MinCertainty: tmpMinCertainty,
|
|
633
|
+
MaxCertainty: tmpMaxCertainty,
|
|
634
|
+
Count: tmpPaginated.length,
|
|
635
|
+
TotalMatched: tmpMatchedRecords.length,
|
|
636
|
+
Records: tmpPaginated
|
|
637
|
+
});
|
|
638
|
+
return fNext();
|
|
639
|
+
});
|
|
640
|
+
});
|
|
641
|
+
});
|
|
642
|
+
|
|
643
|
+
// POST /facto/projections/compare -- compare datasets
|
|
644
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projections/compare`,
|
|
645
|
+
(pRequest, pResponse, fNext) =>
|
|
646
|
+
{
|
|
647
|
+
if (!this.fable.DAL || !this.fable.DAL.Dataset || !this.fable.DAL.Record)
|
|
648
|
+
{
|
|
649
|
+
pResponse.send({ Error: 'DAL not initialized', Datasets: [] });
|
|
650
|
+
return fNext();
|
|
651
|
+
}
|
|
652
|
+
|
|
653
|
+
let tmpBody = pRequest.body || {};
|
|
654
|
+
let tmpDatasetIDs = tmpBody.DatasetIDs || [];
|
|
655
|
+
|
|
656
|
+
if (!Array.isArray(tmpDatasetIDs) || tmpDatasetIDs.length === 0)
|
|
657
|
+
{
|
|
658
|
+
pResponse.send({ Error: 'DatasetIDs array is required', Datasets: [] });
|
|
659
|
+
return fNext();
|
|
660
|
+
}
|
|
661
|
+
|
|
662
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
663
|
+
let tmpDatasets = [];
|
|
664
|
+
|
|
665
|
+
for (let i = 0; i < tmpDatasetIDs.length; i++)
|
|
666
|
+
{
|
|
667
|
+
let tmpDatasetID = parseInt(tmpDatasetIDs[i], 10);
|
|
668
|
+
if (!tmpDatasetID) continue;
|
|
669
|
+
|
|
670
|
+
tmpAnticipate.anticipate(
|
|
671
|
+
(fStepCallback) =>
|
|
672
|
+
{
|
|
673
|
+
let tmpDatasetInfo = { IDDataset: tmpDatasetID, Name: '', Type: '', RecordCount: 0, SourceCount: 0 };
|
|
674
|
+
let tmpInner = this.fable.newAnticipate();
|
|
675
|
+
|
|
676
|
+
// Load dataset metadata
|
|
677
|
+
tmpInner.anticipate(
|
|
678
|
+
(fInnerCallback) =>
|
|
679
|
+
{
|
|
680
|
+
let tmpDSQuery = this.fable.DAL.Dataset.query.clone()
|
|
681
|
+
.addFilter('IDDataset', tmpDatasetID);
|
|
682
|
+
|
|
683
|
+
this.fable.DAL.Dataset.doRead(tmpDSQuery,
|
|
684
|
+
(pError, pQuery, pRecord) =>
|
|
685
|
+
{
|
|
686
|
+
if (!pError && pRecord)
|
|
687
|
+
{
|
|
688
|
+
tmpDatasetInfo.Name = pRecord.Name || '';
|
|
689
|
+
tmpDatasetInfo.Type = pRecord.Type || '';
|
|
690
|
+
}
|
|
691
|
+
return fInnerCallback();
|
|
692
|
+
});
|
|
693
|
+
});
|
|
694
|
+
|
|
695
|
+
// Count records
|
|
696
|
+
tmpInner.anticipate(
|
|
697
|
+
(fInnerCallback) =>
|
|
698
|
+
{
|
|
699
|
+
let tmpCountQuery = this.fable.DAL.Record.query.clone()
|
|
700
|
+
.addFilter('IDDataset', tmpDatasetID)
|
|
701
|
+
.addFilter('Deleted', 0);
|
|
702
|
+
|
|
703
|
+
this.fable.DAL.Record.doCount(tmpCountQuery,
|
|
704
|
+
(pError, pQuery, pCount) =>
|
|
705
|
+
{
|
|
706
|
+
tmpDatasetInfo.RecordCount = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
707
|
+
return fInnerCallback();
|
|
708
|
+
});
|
|
709
|
+
});
|
|
710
|
+
|
|
711
|
+
// Count linked sources
|
|
712
|
+
tmpInner.anticipate(
|
|
713
|
+
(fInnerCallback) =>
|
|
714
|
+
{
|
|
715
|
+
let tmpDSSourceQuery = this.fable.DAL.DatasetSource.query.clone()
|
|
716
|
+
.addFilter('IDDataset', tmpDatasetID)
|
|
717
|
+
.addFilter('Deleted', 0);
|
|
718
|
+
|
|
719
|
+
this.fable.DAL.DatasetSource.doCount(tmpDSSourceQuery,
|
|
720
|
+
(pError, pQuery, pCount) =>
|
|
721
|
+
{
|
|
722
|
+
tmpDatasetInfo.SourceCount = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
723
|
+
return fInnerCallback();
|
|
724
|
+
});
|
|
725
|
+
});
|
|
726
|
+
|
|
727
|
+
tmpInner.wait(
|
|
728
|
+
() =>
|
|
729
|
+
{
|
|
730
|
+
tmpDatasets.push(tmpDatasetInfo);
|
|
731
|
+
return fStepCallback();
|
|
732
|
+
});
|
|
733
|
+
});
|
|
734
|
+
}
|
|
735
|
+
|
|
736
|
+
tmpAnticipate.wait(
|
|
737
|
+
() =>
|
|
738
|
+
{
|
|
739
|
+
pResponse.send({ Datasets: tmpDatasets });
|
|
740
|
+
return fNext();
|
|
741
|
+
});
|
|
742
|
+
});
|
|
743
|
+
|
|
744
|
+
// GET /facto/projections/summary -- global warehouse statistics
|
|
745
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projections/summary`,
|
|
746
|
+
(pRequest, pResponse, fNext) =>
|
|
747
|
+
{
|
|
748
|
+
if (!this.fable.DAL)
|
|
749
|
+
{
|
|
750
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
751
|
+
return fNext();
|
|
752
|
+
}
|
|
753
|
+
|
|
754
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
755
|
+
let tmpSummary = {
|
|
756
|
+
Sources: 0,
|
|
757
|
+
Datasets: 0,
|
|
758
|
+
Records: 0,
|
|
759
|
+
CertaintyIndices: 0,
|
|
760
|
+
IngestJobs: 0,
|
|
761
|
+
DatasetsByType: { Raw: 0, Compositional: 0, Projection: 0, Derived: 0 }
|
|
762
|
+
};
|
|
763
|
+
|
|
764
|
+
// Count Sources
|
|
765
|
+
tmpAnticipate.anticipate(
|
|
766
|
+
(fStepCallback) =>
|
|
767
|
+
{
|
|
768
|
+
if (!this.fable.DAL.Source) return fStepCallback();
|
|
769
|
+
let tmpQuery = this.fable.DAL.Source.query.clone().addFilter('Deleted', 0);
|
|
770
|
+
this.fable.DAL.Source.doCount(tmpQuery,
|
|
771
|
+
(pError, pQuery, pCount) =>
|
|
772
|
+
{
|
|
773
|
+
tmpSummary.Sources = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
774
|
+
return fStepCallback();
|
|
775
|
+
});
|
|
776
|
+
});
|
|
777
|
+
|
|
778
|
+
// Count Datasets
|
|
779
|
+
tmpAnticipate.anticipate(
|
|
780
|
+
(fStepCallback) =>
|
|
781
|
+
{
|
|
782
|
+
if (!this.fable.DAL.Dataset) return fStepCallback();
|
|
783
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone().addFilter('Deleted', 0);
|
|
784
|
+
this.fable.DAL.Dataset.doCount(tmpQuery,
|
|
785
|
+
(pError, pQuery, pCount) =>
|
|
786
|
+
{
|
|
787
|
+
tmpSummary.Datasets = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
788
|
+
return fStepCallback();
|
|
789
|
+
});
|
|
790
|
+
});
|
|
791
|
+
|
|
792
|
+
// Count Records
|
|
793
|
+
tmpAnticipate.anticipate(
|
|
794
|
+
(fStepCallback) =>
|
|
795
|
+
{
|
|
796
|
+
if (!this.fable.DAL.Record) return fStepCallback();
|
|
797
|
+
let tmpQuery = this.fable.DAL.Record.query.clone().addFilter('Deleted', 0);
|
|
798
|
+
this.fable.DAL.Record.doCount(tmpQuery,
|
|
799
|
+
(pError, pQuery, pCount) =>
|
|
800
|
+
{
|
|
801
|
+
tmpSummary.Records = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
802
|
+
return fStepCallback();
|
|
803
|
+
});
|
|
804
|
+
});
|
|
805
|
+
|
|
806
|
+
// Count CertaintyIndices
|
|
807
|
+
tmpAnticipate.anticipate(
|
|
808
|
+
(fStepCallback) =>
|
|
809
|
+
{
|
|
810
|
+
if (!this.fable.DAL.CertaintyIndex) return fStepCallback();
|
|
811
|
+
let tmpQuery = this.fable.DAL.CertaintyIndex.query.clone().addFilter('Deleted', 0);
|
|
812
|
+
this.fable.DAL.CertaintyIndex.doCount(tmpQuery,
|
|
813
|
+
(pError, pQuery, pCount) =>
|
|
814
|
+
{
|
|
815
|
+
tmpSummary.CertaintyIndices = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
816
|
+
return fStepCallback();
|
|
817
|
+
});
|
|
818
|
+
});
|
|
819
|
+
|
|
820
|
+
// Count IngestJobs
|
|
821
|
+
tmpAnticipate.anticipate(
|
|
822
|
+
(fStepCallback) =>
|
|
823
|
+
{
|
|
824
|
+
if (!this.fable.DAL.IngestJob) return fStepCallback();
|
|
825
|
+
let tmpQuery = this.fable.DAL.IngestJob.query.clone().addFilter('Deleted', 0);
|
|
826
|
+
this.fable.DAL.IngestJob.doCount(tmpQuery,
|
|
827
|
+
(pError, pQuery, pCount) =>
|
|
828
|
+
{
|
|
829
|
+
tmpSummary.IngestJobs = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
830
|
+
return fStepCallback();
|
|
831
|
+
});
|
|
832
|
+
});
|
|
833
|
+
|
|
834
|
+
// Count Datasets by Type
|
|
835
|
+
for (let t = 0; t < VALID_DATASET_TYPES.length; t++)
|
|
836
|
+
{
|
|
837
|
+
let tmpType = VALID_DATASET_TYPES[t];
|
|
838
|
+
tmpAnticipate.anticipate(
|
|
839
|
+
(fStepCallback) =>
|
|
840
|
+
{
|
|
841
|
+
if (!this.fable.DAL.Dataset) return fStepCallback();
|
|
842
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
843
|
+
.addFilter('Type', tmpType)
|
|
844
|
+
.addFilter('Deleted', 0);
|
|
845
|
+
this.fable.DAL.Dataset.doCount(tmpQuery,
|
|
846
|
+
(pError, pQuery, pCount) =>
|
|
847
|
+
{
|
|
848
|
+
tmpSummary.DatasetsByType[tmpType] = (typeof pCount === 'number') ? pCount : parseInt(pCount, 10) || 0;
|
|
849
|
+
return fStepCallback();
|
|
850
|
+
});
|
|
851
|
+
});
|
|
852
|
+
}
|
|
853
|
+
|
|
854
|
+
tmpAnticipate.wait(
|
|
855
|
+
() =>
|
|
856
|
+
{
|
|
857
|
+
pResponse.send(tmpSummary);
|
|
858
|
+
return fNext();
|
|
859
|
+
});
|
|
860
|
+
});
|
|
861
|
+
|
|
862
|
+
// POST /facto/projection/compile -- compile MicroDDL text to schema JSON
|
|
863
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/compile`,
|
|
864
|
+
(pRequest, pResponse, fNext) =>
|
|
865
|
+
{
|
|
866
|
+
let tmpBody = pRequest.body || {};
|
|
867
|
+
let tmpDDL = tmpBody.DDL || '';
|
|
868
|
+
|
|
869
|
+
if (!tmpDDL.trim())
|
|
870
|
+
{
|
|
871
|
+
pResponse.send({ Error: 'DDL text is required', Schema: null });
|
|
872
|
+
return fNext();
|
|
873
|
+
}
|
|
874
|
+
|
|
875
|
+
// Client-side parse of MicroDDL into a schema object.
|
|
876
|
+
// This mirrors the Stricture symbol set without requiring
|
|
877
|
+
// a full Stricture compiler invocation.
|
|
878
|
+
try
|
|
879
|
+
{
|
|
880
|
+
let tmpSchema = this._parseMicroDDL(tmpDDL);
|
|
881
|
+
pResponse.send({ Success: true, Schema: tmpSchema });
|
|
882
|
+
}
|
|
883
|
+
catch (pError)
|
|
884
|
+
{
|
|
885
|
+
pResponse.send({ Error: pError.message || pError, Schema: null });
|
|
886
|
+
}
|
|
887
|
+
return fNext();
|
|
888
|
+
});
|
|
889
|
+
|
|
890
|
+
// GET /facto/projection/:IDDataset/schema -- get schema definition for a projection
|
|
891
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/:IDDataset/schema`,
|
|
892
|
+
(pRequest, pResponse, fNext) =>
|
|
893
|
+
{
|
|
894
|
+
if (!this.fable.DAL || !this.fable.DAL.Dataset)
|
|
895
|
+
{
|
|
896
|
+
pResponse.send({ Error: 'Dataset DAL not initialized' });
|
|
897
|
+
return fNext();
|
|
898
|
+
}
|
|
899
|
+
|
|
900
|
+
let tmpID = parseInt(pRequest.params.IDDataset, 10);
|
|
901
|
+
|
|
902
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
903
|
+
.addFilter('IDDataset', tmpID);
|
|
904
|
+
|
|
905
|
+
this.fable.DAL.Dataset.doRead(tmpQuery,
|
|
906
|
+
(pError, pQuery, pRecord) =>
|
|
907
|
+
{
|
|
908
|
+
if (pError)
|
|
909
|
+
{
|
|
910
|
+
pResponse.send({ Error: pError.message || pError });
|
|
911
|
+
return fNext();
|
|
912
|
+
}
|
|
913
|
+
if (!pRecord || !pRecord.IDDataset)
|
|
914
|
+
{
|
|
915
|
+
pResponse.send({ Error: 'Dataset not found' });
|
|
916
|
+
return fNext();
|
|
917
|
+
}
|
|
918
|
+
|
|
919
|
+
pResponse.send(
|
|
920
|
+
{
|
|
921
|
+
IDDataset: pRecord.IDDataset,
|
|
922
|
+
Name: pRecord.Name,
|
|
923
|
+
SchemaDefinition: pRecord.SchemaDefinition || '',
|
|
924
|
+
SchemaVersion: pRecord.SchemaVersion || 0,
|
|
925
|
+
SchemaHash: pRecord.SchemaHash || ''
|
|
926
|
+
});
|
|
927
|
+
return fNext();
|
|
928
|
+
});
|
|
929
|
+
});
|
|
930
|
+
|
|
931
|
+
// POST /facto/projection/:IDDataset/save-schema -- save schema definition
|
|
932
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/:IDDataset/save-schema`,
|
|
933
|
+
(pRequest, pResponse, fNext) =>
|
|
934
|
+
{
|
|
935
|
+
if (!this.fable.DAL || !this.fable.DAL.Dataset)
|
|
936
|
+
{
|
|
937
|
+
pResponse.send({ Error: 'Dataset DAL not initialized' });
|
|
938
|
+
return fNext();
|
|
939
|
+
}
|
|
940
|
+
|
|
941
|
+
let tmpID = parseInt(pRequest.params.IDDataset, 10);
|
|
942
|
+
let tmpBody = pRequest.body || {};
|
|
943
|
+
let tmpSchemaDef = tmpBody.SchemaDefinition || '';
|
|
944
|
+
|
|
945
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
946
|
+
.addFilter('IDDataset', tmpID);
|
|
947
|
+
|
|
948
|
+
this.fable.DAL.Dataset.doRead(tmpQuery,
|
|
949
|
+
(pError, pQuery, pRecord) =>
|
|
950
|
+
{
|
|
951
|
+
if (pError || !pRecord || !pRecord.IDDataset)
|
|
952
|
+
{
|
|
953
|
+
pResponse.send({ Error: 'Dataset not found' });
|
|
954
|
+
return fNext();
|
|
955
|
+
}
|
|
956
|
+
|
|
957
|
+
pRecord.SchemaDefinition = tmpSchemaDef;
|
|
958
|
+
pRecord.SchemaVersion = (pRecord.SchemaVersion || 0) + 1;
|
|
959
|
+
|
|
960
|
+
// Simple hash of the DDL text
|
|
961
|
+
let tmpHash = 0;
|
|
962
|
+
for (let i = 0; i < tmpSchemaDef.length; i++)
|
|
963
|
+
{
|
|
964
|
+
tmpHash = ((tmpHash << 5) - tmpHash + tmpSchemaDef.charCodeAt(i)) | 0;
|
|
965
|
+
}
|
|
966
|
+
pRecord.SchemaHash = 'ddl-' + Math.abs(tmpHash).toString(16);
|
|
967
|
+
|
|
968
|
+
let tmpUpdateQuery = this.fable.DAL.Dataset.query.clone()
|
|
969
|
+
.addRecord(pRecord);
|
|
970
|
+
|
|
971
|
+
this.fable.DAL.Dataset.doUpdate(tmpUpdateQuery,
|
|
972
|
+
(pUpdateError, pUpdateQuery, pUpdated) =>
|
|
973
|
+
{
|
|
974
|
+
if (pUpdateError)
|
|
975
|
+
{
|
|
976
|
+
pResponse.send({ Error: pUpdateError.message || pUpdateError });
|
|
977
|
+
return fNext();
|
|
978
|
+
}
|
|
979
|
+
|
|
980
|
+
pResponse.send(
|
|
981
|
+
{
|
|
982
|
+
Success: true,
|
|
983
|
+
IDDataset: pUpdated.IDDataset,
|
|
984
|
+
SchemaVersion: pUpdated.SchemaVersion,
|
|
985
|
+
SchemaHash: pUpdated.SchemaHash
|
|
986
|
+
});
|
|
987
|
+
return fNext();
|
|
988
|
+
});
|
|
989
|
+
});
|
|
990
|
+
});
|
|
991
|
+
|
|
992
|
+
// GET /facto/projection/:IDDataset/stores -- list projection store entries
|
|
993
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/:IDDataset/stores`,
|
|
994
|
+
(pRequest, pResponse, fNext) =>
|
|
995
|
+
{
|
|
996
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionStore)
|
|
997
|
+
{
|
|
998
|
+
pResponse.send({ Stores: [] });
|
|
999
|
+
return fNext();
|
|
1000
|
+
}
|
|
1001
|
+
|
|
1002
|
+
let tmpID = parseInt(pRequest.params.IDDataset, 10);
|
|
1003
|
+
|
|
1004
|
+
let tmpQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
1005
|
+
.addFilter('IDDataset', tmpID)
|
|
1006
|
+
.addFilter('Deleted', 0);
|
|
1007
|
+
|
|
1008
|
+
this.fable.DAL.ProjectionStore.doReads(tmpQuery,
|
|
1009
|
+
(pError, pQuery, pRecords) =>
|
|
1010
|
+
{
|
|
1011
|
+
if (pError)
|
|
1012
|
+
{
|
|
1013
|
+
pResponse.send({ Error: pError.message || pError, Stores: [] });
|
|
1014
|
+
return fNext();
|
|
1015
|
+
}
|
|
1016
|
+
|
|
1017
|
+
pResponse.send({ Count: pRecords.length, Stores: pRecords });
|
|
1018
|
+
return fNext();
|
|
1019
|
+
});
|
|
1020
|
+
});
|
|
1021
|
+
|
|
1022
|
+
// POST /facto/projection/:IDDataset/deploy -- deploy schema to a target store
|
|
1023
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/:IDDataset/deploy`,
|
|
1024
|
+
(pRequest, pResponse, fNext) =>
|
|
1025
|
+
{
|
|
1026
|
+
if (!this.fable.DAL || !this.fable.DAL.Dataset ||
|
|
1027
|
+
!this.fable.DAL.StoreConnection || !this.fable.DAL.ProjectionStore)
|
|
1028
|
+
{
|
|
1029
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1030
|
+
return fNext();
|
|
1031
|
+
}
|
|
1032
|
+
|
|
1033
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
1034
|
+
let tmpBody = pRequest.body || {};
|
|
1035
|
+
let tmpIDStoreConnection = parseInt(tmpBody.IDStoreConnection, 10);
|
|
1036
|
+
let tmpTargetTableName = tmpBody.TargetTableName || '';
|
|
1037
|
+
|
|
1038
|
+
if (!tmpIDStoreConnection)
|
|
1039
|
+
{
|
|
1040
|
+
pResponse.send({ Error: 'IDStoreConnection is required' });
|
|
1041
|
+
return fNext();
|
|
1042
|
+
}
|
|
1043
|
+
|
|
1044
|
+
this.deploySchema(tmpIDDataset, tmpIDStoreConnection, tmpTargetTableName,
|
|
1045
|
+
(pError, pResult) =>
|
|
1046
|
+
{
|
|
1047
|
+
if (pError)
|
|
1048
|
+
{
|
|
1049
|
+
pResponse.send({ Error: pError.message || pError, Log: pResult ? pResult.Log : '' });
|
|
1050
|
+
return fNext();
|
|
1051
|
+
}
|
|
1052
|
+
pResponse.send(pResult);
|
|
1053
|
+
return fNext();
|
|
1054
|
+
});
|
|
1055
|
+
});
|
|
1056
|
+
|
|
1057
|
+
// DELETE /facto/projection/store/:IDProjectionStore -- drop table and remove store
|
|
1058
|
+
pOratorServiceServer.doDel(`${tmpRoutePrefix}/projection/store/:IDProjectionStore`,
|
|
1059
|
+
(pRequest, pResponse, fNext) =>
|
|
1060
|
+
{
|
|
1061
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionStore || !this.fable.DAL.StoreConnection)
|
|
1062
|
+
{
|
|
1063
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1064
|
+
return fNext();
|
|
1065
|
+
}
|
|
1066
|
+
|
|
1067
|
+
let tmpIDStore = parseInt(pRequest.params.IDProjectionStore, 10);
|
|
1068
|
+
if (!tmpIDStore)
|
|
1069
|
+
{
|
|
1070
|
+
pResponse.send({ Error: 'IDProjectionStore is required' });
|
|
1071
|
+
return fNext();
|
|
1072
|
+
}
|
|
1073
|
+
|
|
1074
|
+
// Load the ProjectionStore record
|
|
1075
|
+
let tmpStoreQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
1076
|
+
.addFilter('IDProjectionStore', tmpIDStore)
|
|
1077
|
+
.addFilter('Deleted', 0);
|
|
1078
|
+
|
|
1079
|
+
this.fable.DAL.ProjectionStore.doReads(tmpStoreQuery,
|
|
1080
|
+
(pStoreError, pStoreQuery, pStoreRecords) =>
|
|
1081
|
+
{
|
|
1082
|
+
if (pStoreError || !pStoreRecords || pStoreRecords.length === 0)
|
|
1083
|
+
{
|
|
1084
|
+
pResponse.send({ Error: 'Projection store not found' });
|
|
1085
|
+
return fNext();
|
|
1086
|
+
}
|
|
1087
|
+
|
|
1088
|
+
let tmpStore = pStoreRecords[0];
|
|
1089
|
+
let tmpTableName = tmpStore.TargetTableName;
|
|
1090
|
+
let tmpIDConnection = tmpStore.IDStoreConnection;
|
|
1091
|
+
|
|
1092
|
+
// Load the connection
|
|
1093
|
+
let tmpConnQuery = this.fable.DAL.StoreConnection.query.clone()
|
|
1094
|
+
.addFilter('IDStoreConnection', tmpIDConnection);
|
|
1095
|
+
|
|
1096
|
+
this.fable.DAL.StoreConnection.doRead(tmpConnQuery,
|
|
1097
|
+
(pConnError, pConnQuery, pConnRecord) =>
|
|
1098
|
+
{
|
|
1099
|
+
if (pConnError || !pConnRecord)
|
|
1100
|
+
{
|
|
1101
|
+
pResponse.send({ Error: 'Store connection not found' });
|
|
1102
|
+
return fNext();
|
|
1103
|
+
}
|
|
1104
|
+
|
|
1105
|
+
let tmpConnection = pConnRecord;
|
|
1106
|
+
let tmpConfig = {};
|
|
1107
|
+
try { tmpConfig = JSON.parse(tmpConnection.Configuration || '{}'); }
|
|
1108
|
+
catch (e) { /* ignore parse error */ }
|
|
1109
|
+
|
|
1110
|
+
let tmpLog = [];
|
|
1111
|
+
tmpLog.push(`Deleting deployed store: ${tmpTableName} (ID ${tmpIDStore})`);
|
|
1112
|
+
tmpLog.push(`Connection: ${tmpConnection.Name} (${tmpConnection.Type})`);
|
|
1113
|
+
|
|
1114
|
+
this._getOrCreateConnection(tmpConnection,
|
|
1115
|
+
(pConnectError, tmpConnector) =>
|
|
1116
|
+
{
|
|
1117
|
+
if (pConnectError)
|
|
1118
|
+
{
|
|
1119
|
+
tmpLog.push(`Error connecting: ${pConnectError.message || pConnectError}`);
|
|
1120
|
+
pResponse.send({ Error: 'Could not connect to target database', Log: tmpLog.join('\n') });
|
|
1121
|
+
return fNext();
|
|
1122
|
+
}
|
|
1123
|
+
|
|
1124
|
+
// Generate and execute DROP TABLE
|
|
1125
|
+
let tmpDropSQL = tmpConnector.generateDropTableStatement(tmpTableName);
|
|
1126
|
+
tmpLog.push(`Executing: ${tmpDropSQL}`);
|
|
1127
|
+
|
|
1128
|
+
try
|
|
1129
|
+
{
|
|
1130
|
+
tmpConnector.db.exec(tmpDropSQL);
|
|
1131
|
+
tmpLog.push(`Table ${tmpTableName} dropped successfully.`);
|
|
1132
|
+
}
|
|
1133
|
+
catch (pDropError)
|
|
1134
|
+
{
|
|
1135
|
+
tmpLog.push(`Warning: DROP TABLE error: ${pDropError.message}`);
|
|
1136
|
+
// Continue with soft-delete even if drop fails
|
|
1137
|
+
}
|
|
1138
|
+
|
|
1139
|
+
// Soft-delete the ProjectionStore record
|
|
1140
|
+
let tmpDeleteQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
1141
|
+
.addRecord(
|
|
1142
|
+
{
|
|
1143
|
+
IDProjectionStore: tmpIDStore,
|
|
1144
|
+
Deleted: 1,
|
|
1145
|
+
Status: 'Deleted',
|
|
1146
|
+
DeployLog: (tmpStore.DeployLog || '') + '\n---\n' + tmpLog.join('\n')
|
|
1147
|
+
});
|
|
1148
|
+
|
|
1149
|
+
this.fable.DAL.ProjectionStore.doUpdate(tmpDeleteQuery,
|
|
1150
|
+
(pDeleteError) =>
|
|
1151
|
+
{
|
|
1152
|
+
if (pDeleteError)
|
|
1153
|
+
{
|
|
1154
|
+
tmpLog.push(`Error updating store record: ${pDeleteError.message}`);
|
|
1155
|
+
}
|
|
1156
|
+
|
|
1157
|
+
// Unregister the projection entity if cached
|
|
1158
|
+
if (this._ProjectionEntities && this._ProjectionEntities[tmpTableName])
|
|
1159
|
+
{
|
|
1160
|
+
delete this._ProjectionEntities[tmpTableName];
|
|
1161
|
+
tmpLog.push(`Unregistered projection entity: ${tmpTableName}`);
|
|
1162
|
+
}
|
|
1163
|
+
|
|
1164
|
+
tmpLog.push(`Store deletion complete.`);
|
|
1165
|
+
this.fable.log.info(`ProjectionEngine: Deleted store ${tmpTableName} (ID ${tmpIDStore})`);
|
|
1166
|
+
pResponse.send({ Success: true, Log: tmpLog.join('\n') });
|
|
1167
|
+
return fNext();
|
|
1168
|
+
});
|
|
1169
|
+
});
|
|
1170
|
+
});
|
|
1171
|
+
});
|
|
1172
|
+
});
|
|
1173
|
+
|
|
1174
|
+
// ======================================================================
|
|
1175
|
+
// Projection Mapping CRUD routes
|
|
1176
|
+
// ======================================================================
|
|
1177
|
+
|
|
1178
|
+
// GET /facto/projection/:IDDataset/mappings -- list mappings for a projection
|
|
1179
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/:IDDataset/mappings`,
|
|
1180
|
+
(pRequest, pResponse, fNext) =>
|
|
1181
|
+
{
|
|
1182
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionMapping)
|
|
1183
|
+
{
|
|
1184
|
+
pResponse.send({ Mappings: [] });
|
|
1185
|
+
return fNext();
|
|
1186
|
+
}
|
|
1187
|
+
|
|
1188
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
1189
|
+
|
|
1190
|
+
let tmpQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1191
|
+
.addFilter('IDDataset', tmpIDDataset)
|
|
1192
|
+
.addFilter('Deleted', 0);
|
|
1193
|
+
|
|
1194
|
+
this.fable.DAL.ProjectionMapping.doReads(tmpQuery,
|
|
1195
|
+
(pError, pQuery, pRecords) =>
|
|
1196
|
+
{
|
|
1197
|
+
if (pError)
|
|
1198
|
+
{
|
|
1199
|
+
this.fable.log.error(`ProjectionEngine error listing mappings: ${pError}`);
|
|
1200
|
+
pResponse.send({ Error: pError.message || pError, Mappings: [] });
|
|
1201
|
+
return fNext();
|
|
1202
|
+
}
|
|
1203
|
+
pResponse.send({ Count: pRecords.length, Mappings: pRecords });
|
|
1204
|
+
return fNext();
|
|
1205
|
+
});
|
|
1206
|
+
});
|
|
1207
|
+
|
|
1208
|
+
// GET /facto/projection/mapping/:ID -- get single mapping
|
|
1209
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/mapping/:ID`,
|
|
1210
|
+
(pRequest, pResponse, fNext) =>
|
|
1211
|
+
{
|
|
1212
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionMapping)
|
|
1213
|
+
{
|
|
1214
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1215
|
+
return fNext();
|
|
1216
|
+
}
|
|
1217
|
+
|
|
1218
|
+
let tmpID = parseInt(pRequest.params.ID, 10);
|
|
1219
|
+
|
|
1220
|
+
let tmpQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1221
|
+
.addFilter('IDProjectionMapping', tmpID);
|
|
1222
|
+
|
|
1223
|
+
this.fable.DAL.ProjectionMapping.doRead(tmpQuery,
|
|
1224
|
+
(pError, pQuery, pRecord) =>
|
|
1225
|
+
{
|
|
1226
|
+
if (pError || !pRecord || !pRecord.IDProjectionMapping)
|
|
1227
|
+
{
|
|
1228
|
+
pResponse.send({ Error: 'Mapping not found' });
|
|
1229
|
+
return fNext();
|
|
1230
|
+
}
|
|
1231
|
+
pResponse.send({ Mapping: pRecord });
|
|
1232
|
+
return fNext();
|
|
1233
|
+
});
|
|
1234
|
+
});
|
|
1235
|
+
|
|
1236
|
+
// POST /facto/projection/:IDDataset/mapping -- create mapping
|
|
1237
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/:IDDataset/mapping`,
|
|
1238
|
+
(pRequest, pResponse, fNext) =>
|
|
1239
|
+
{
|
|
1240
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionMapping)
|
|
1241
|
+
{
|
|
1242
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1243
|
+
return fNext();
|
|
1244
|
+
}
|
|
1245
|
+
|
|
1246
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
1247
|
+
let tmpBody = pRequest.body || {};
|
|
1248
|
+
|
|
1249
|
+
let tmpNewRecord =
|
|
1250
|
+
{
|
|
1251
|
+
IDDataset: tmpIDDataset,
|
|
1252
|
+
IDSource: parseInt(tmpBody.IDSource, 10) || 0,
|
|
1253
|
+
IDProjectionStore: parseInt(tmpBody.IDProjectionStore, 10) || 0,
|
|
1254
|
+
Name: tmpBody.Name || 'New Mapping',
|
|
1255
|
+
SchemaVersion: parseInt(tmpBody.SchemaVersion, 10) || 0,
|
|
1256
|
+
MappingConfiguration: (typeof tmpBody.MappingConfiguration === 'string')
|
|
1257
|
+
? tmpBody.MappingConfiguration
|
|
1258
|
+
: JSON.stringify(tmpBody.MappingConfiguration || {}),
|
|
1259
|
+
FlowDiagramState: (typeof tmpBody.FlowDiagramState === 'string')
|
|
1260
|
+
? tmpBody.FlowDiagramState
|
|
1261
|
+
: JSON.stringify(tmpBody.FlowDiagramState || {}),
|
|
1262
|
+
Active: (tmpBody.Active !== undefined) ? (tmpBody.Active ? 1 : 0) : 1
|
|
1263
|
+
};
|
|
1264
|
+
|
|
1265
|
+
let tmpCreateQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1266
|
+
.setIDUser(0)
|
|
1267
|
+
.addRecord(tmpNewRecord);
|
|
1268
|
+
|
|
1269
|
+
this.fable.DAL.ProjectionMapping.doCreate(tmpCreateQuery,
|
|
1270
|
+
(pError, pQuery, pRecord) =>
|
|
1271
|
+
{
|
|
1272
|
+
if (pError)
|
|
1273
|
+
{
|
|
1274
|
+
this.fable.log.error(`ProjectionEngine error creating mapping: ${pError}`);
|
|
1275
|
+
pResponse.send({ Error: pError.message || pError });
|
|
1276
|
+
return fNext();
|
|
1277
|
+
}
|
|
1278
|
+
pResponse.send({ Success: true, Mapping: pRecord });
|
|
1279
|
+
return fNext();
|
|
1280
|
+
});
|
|
1281
|
+
});
|
|
1282
|
+
|
|
1283
|
+
// POST /facto/projection/mapping/:ID/update -- update mapping
|
|
1284
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/mapping/:ID/update`,
|
|
1285
|
+
(pRequest, pResponse, fNext) =>
|
|
1286
|
+
{
|
|
1287
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionMapping)
|
|
1288
|
+
{
|
|
1289
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1290
|
+
return fNext();
|
|
1291
|
+
}
|
|
1292
|
+
|
|
1293
|
+
let tmpID = parseInt(pRequest.params.ID, 10);
|
|
1294
|
+
let tmpBody = pRequest.body || {};
|
|
1295
|
+
|
|
1296
|
+
let tmpReadQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1297
|
+
.addFilter('IDProjectionMapping', tmpID);
|
|
1298
|
+
|
|
1299
|
+
this.fable.DAL.ProjectionMapping.doRead(tmpReadQuery,
|
|
1300
|
+
(pReadError, pReadQuery, pExisting) =>
|
|
1301
|
+
{
|
|
1302
|
+
if (pReadError || !pExisting || !pExisting.IDProjectionMapping)
|
|
1303
|
+
{
|
|
1304
|
+
pResponse.send({ Error: 'Mapping not found' });
|
|
1305
|
+
return fNext();
|
|
1306
|
+
}
|
|
1307
|
+
|
|
1308
|
+
// Update fields that were provided
|
|
1309
|
+
if (tmpBody.Name !== undefined) pExisting.Name = tmpBody.Name;
|
|
1310
|
+
if (tmpBody.IDSource !== undefined) pExisting.IDSource = parseInt(tmpBody.IDSource, 10) || 0;
|
|
1311
|
+
if (tmpBody.IDProjectionStore !== undefined) pExisting.IDProjectionStore = parseInt(tmpBody.IDProjectionStore, 10) || 0;
|
|
1312
|
+
if (tmpBody.SchemaVersion !== undefined) pExisting.SchemaVersion = parseInt(tmpBody.SchemaVersion, 10) || 0;
|
|
1313
|
+
if (tmpBody.Active !== undefined) pExisting.Active = tmpBody.Active ? 1 : 0;
|
|
1314
|
+
if (tmpBody.MappingConfiguration !== undefined)
|
|
1315
|
+
{
|
|
1316
|
+
pExisting.MappingConfiguration = (typeof tmpBody.MappingConfiguration === 'string')
|
|
1317
|
+
? tmpBody.MappingConfiguration
|
|
1318
|
+
: JSON.stringify(tmpBody.MappingConfiguration);
|
|
1319
|
+
}
|
|
1320
|
+
if (tmpBody.FlowDiagramState !== undefined)
|
|
1321
|
+
{
|
|
1322
|
+
pExisting.FlowDiagramState = (typeof tmpBody.FlowDiagramState === 'string')
|
|
1323
|
+
? tmpBody.FlowDiagramState
|
|
1324
|
+
: JSON.stringify(tmpBody.FlowDiagramState);
|
|
1325
|
+
}
|
|
1326
|
+
|
|
1327
|
+
let tmpUpdateQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1328
|
+
.setIDUser(0)
|
|
1329
|
+
.addRecord(pExisting);
|
|
1330
|
+
|
|
1331
|
+
this.fable.DAL.ProjectionMapping.doUpdate(tmpUpdateQuery,
|
|
1332
|
+
(pUpdateError, pUpdateQuery, pUpdated) =>
|
|
1333
|
+
{
|
|
1334
|
+
if (pUpdateError)
|
|
1335
|
+
{
|
|
1336
|
+
this.fable.log.error(`ProjectionEngine error updating mapping: ${pUpdateError}`);
|
|
1337
|
+
pResponse.send({ Error: pUpdateError.message || pUpdateError });
|
|
1338
|
+
return fNext();
|
|
1339
|
+
}
|
|
1340
|
+
pResponse.send({ Success: true, Mapping: pUpdated });
|
|
1341
|
+
return fNext();
|
|
1342
|
+
});
|
|
1343
|
+
});
|
|
1344
|
+
});
|
|
1345
|
+
|
|
1346
|
+
// DELETE /facto/projection/mapping/:ID -- soft-delete mapping
|
|
1347
|
+
pOratorServiceServer.doDel(`${tmpRoutePrefix}/projection/mapping/:ID`,
|
|
1348
|
+
(pRequest, pResponse, fNext) =>
|
|
1349
|
+
{
|
|
1350
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionMapping)
|
|
1351
|
+
{
|
|
1352
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1353
|
+
return fNext();
|
|
1354
|
+
}
|
|
1355
|
+
|
|
1356
|
+
let tmpID = parseInt(pRequest.params.ID, 10);
|
|
1357
|
+
|
|
1358
|
+
let tmpReadQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1359
|
+
.addFilter('IDProjectionMapping', tmpID);
|
|
1360
|
+
|
|
1361
|
+
this.fable.DAL.ProjectionMapping.doRead(tmpReadQuery,
|
|
1362
|
+
(pReadError, pReadQuery, pExisting) =>
|
|
1363
|
+
{
|
|
1364
|
+
if (pReadError || !pExisting || !pExisting.IDProjectionMapping)
|
|
1365
|
+
{
|
|
1366
|
+
pResponse.send({ Error: 'Mapping not found' });
|
|
1367
|
+
return fNext();
|
|
1368
|
+
}
|
|
1369
|
+
|
|
1370
|
+
pExisting.Deleted = 1;
|
|
1371
|
+
pExisting.DeleteDate = new Date().toISOString();
|
|
1372
|
+
|
|
1373
|
+
let tmpUpdateQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1374
|
+
.setIDUser(0)
|
|
1375
|
+
.addRecord(pExisting);
|
|
1376
|
+
|
|
1377
|
+
this.fable.DAL.ProjectionMapping.doUpdate(tmpUpdateQuery,
|
|
1378
|
+
(pUpdateError) =>
|
|
1379
|
+
{
|
|
1380
|
+
if (pUpdateError)
|
|
1381
|
+
{
|
|
1382
|
+
pResponse.send({ Error: pUpdateError.message || pUpdateError });
|
|
1383
|
+
return fNext();
|
|
1384
|
+
}
|
|
1385
|
+
pResponse.send({ Success: true });
|
|
1386
|
+
return fNext();
|
|
1387
|
+
});
|
|
1388
|
+
});
|
|
1389
|
+
});
|
|
1390
|
+
|
|
1391
|
+
// ======================================================================
|
|
1392
|
+
// Field Discovery route
|
|
1393
|
+
// ======================================================================
|
|
1394
|
+
|
|
1395
|
+
// POST /facto/projection/:IDDataset/discover-fields
|
|
1396
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/:IDDataset/discover-fields`,
|
|
1397
|
+
(pRequest, pResponse, fNext) =>
|
|
1398
|
+
{
|
|
1399
|
+
if (!this.fable.DAL || !this.fable.DAL.Record)
|
|
1400
|
+
{
|
|
1401
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1402
|
+
return fNext();
|
|
1403
|
+
}
|
|
1404
|
+
|
|
1405
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
1406
|
+
let tmpBody = pRequest.body || {};
|
|
1407
|
+
let tmpIDSource = parseInt(tmpBody.IDSource, 10) || 0;
|
|
1408
|
+
let tmpSampleSize = parseInt(tmpBody.SampleSize, 10) || 50;
|
|
1409
|
+
|
|
1410
|
+
let tmpRecordQuery = this.fable.DAL.Record.query.clone()
|
|
1411
|
+
.addFilter('Deleted', 0)
|
|
1412
|
+
.setCap(tmpSampleSize);
|
|
1413
|
+
|
|
1414
|
+
if (tmpIDSource > 0)
|
|
1415
|
+
{
|
|
1416
|
+
tmpRecordQuery.addFilter('IDSource', tmpIDSource);
|
|
1417
|
+
}
|
|
1418
|
+
else
|
|
1419
|
+
{
|
|
1420
|
+
// Fallback: query by dataset sources
|
|
1421
|
+
tmpRecordQuery.addFilter('IDDataset', tmpIDDataset);
|
|
1422
|
+
}
|
|
1423
|
+
|
|
1424
|
+
this.fable.DAL.Record.doReads(tmpRecordQuery,
|
|
1425
|
+
(pError, pQuery, pRecords) =>
|
|
1426
|
+
{
|
|
1427
|
+
if (pError)
|
|
1428
|
+
{
|
|
1429
|
+
this.fable.log.error(`ProjectionEngine error discovering fields: ${pError}`);
|
|
1430
|
+
pResponse.send({ Error: pError.message || pError });
|
|
1431
|
+
return fNext();
|
|
1432
|
+
}
|
|
1433
|
+
|
|
1434
|
+
let tmpTabularCheck = this.fable.services.TabularCheck;
|
|
1435
|
+
let tmpStatistics = tmpTabularCheck.newStatisticsObject('FieldDiscovery');
|
|
1436
|
+
|
|
1437
|
+
let tmpRowCount = 0;
|
|
1438
|
+
for (let i = 0; i < pRecords.length; i++)
|
|
1439
|
+
{
|
|
1440
|
+
try
|
|
1441
|
+
{
|
|
1442
|
+
let tmpParsed = JSON.parse(pRecords[i].Content);
|
|
1443
|
+
tmpTabularCheck.collectStatistics(tmpParsed, tmpStatistics);
|
|
1444
|
+
tmpRowCount++;
|
|
1445
|
+
}
|
|
1446
|
+
catch (pParseError)
|
|
1447
|
+
{
|
|
1448
|
+
this.fable.log.warn(`ProjectionEngine: Could not parse record ${pRecords[i].IDRecord} content: ${pParseError.message}`);
|
|
1449
|
+
}
|
|
1450
|
+
}
|
|
1451
|
+
|
|
1452
|
+
pResponse.send(
|
|
1453
|
+
{
|
|
1454
|
+
Headers: tmpStatistics.Headers || [],
|
|
1455
|
+
ColumnStatistics: tmpStatistics.ColumnStatistics || {},
|
|
1456
|
+
SampleSize: tmpRowCount
|
|
1457
|
+
});
|
|
1458
|
+
return fNext();
|
|
1459
|
+
});
|
|
1460
|
+
});
|
|
1461
|
+
|
|
1462
|
+
// ======================================================================
|
|
1463
|
+
// Import Execution route
|
|
1464
|
+
// ======================================================================
|
|
1465
|
+
|
|
1466
|
+
// POST /facto/projection/:IDDataset/import
|
|
1467
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/:IDDataset/import`,
|
|
1468
|
+
(pRequest, pResponse, fNext) =>
|
|
1469
|
+
{
|
|
1470
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionMapping ||
|
|
1471
|
+
!this.fable.DAL.ProjectionStore || !this.fable.DAL.StoreConnection ||
|
|
1472
|
+
!this.fable.DAL.Record || !this.fable.DAL.Dataset)
|
|
1473
|
+
{
|
|
1474
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1475
|
+
return fNext();
|
|
1476
|
+
}
|
|
1477
|
+
|
|
1478
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
1479
|
+
let tmpBody = pRequest.body || {};
|
|
1480
|
+
let tmpIDProjectionMapping = parseInt(tmpBody.IDProjectionMapping, 10);
|
|
1481
|
+
let tmpIDProjectionStore = parseInt(tmpBody.IDProjectionStore, 10);
|
|
1482
|
+
let tmpBatchSize = parseInt(tmpBody.BatchSize, 10) || 100;
|
|
1483
|
+
let tmpCap = parseInt(tmpBody.Cap, 10) || 0;
|
|
1484
|
+
let tmpStageComprehension = !!tmpBody.StageComprehension;
|
|
1485
|
+
|
|
1486
|
+
if (!tmpIDProjectionMapping)
|
|
1487
|
+
{
|
|
1488
|
+
pResponse.send({ Error: 'IDProjectionMapping is required' });
|
|
1489
|
+
return fNext();
|
|
1490
|
+
}
|
|
1491
|
+
if (!tmpIDProjectionStore)
|
|
1492
|
+
{
|
|
1493
|
+
pResponse.send({ Error: 'IDProjectionStore is required' });
|
|
1494
|
+
return fNext();
|
|
1495
|
+
}
|
|
1496
|
+
|
|
1497
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
1498
|
+
let tmpMapping = null;
|
|
1499
|
+
let tmpMappingConfig = null;
|
|
1500
|
+
let tmpProjectionStore = null;
|
|
1501
|
+
let tmpConnection = null;
|
|
1502
|
+
let tmpDataset = null;
|
|
1503
|
+
let tmpLog = [];
|
|
1504
|
+
|
|
1505
|
+
// Load mapping
|
|
1506
|
+
tmpAnticipate.anticipate(
|
|
1507
|
+
(fStepCallback) =>
|
|
1508
|
+
{
|
|
1509
|
+
let tmpQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
1510
|
+
.addFilter('IDProjectionMapping', tmpIDProjectionMapping);
|
|
1511
|
+
|
|
1512
|
+
this.fable.DAL.ProjectionMapping.doRead(tmpQuery,
|
|
1513
|
+
(pError, pQuery, pRecord) =>
|
|
1514
|
+
{
|
|
1515
|
+
if (pError || !pRecord || !pRecord.IDProjectionMapping)
|
|
1516
|
+
{
|
|
1517
|
+
return fStepCallback(new Error('Mapping not found'));
|
|
1518
|
+
}
|
|
1519
|
+
tmpMapping = pRecord;
|
|
1520
|
+
try
|
|
1521
|
+
{
|
|
1522
|
+
tmpMappingConfig = JSON.parse(tmpMapping.MappingConfiguration || '{}');
|
|
1523
|
+
}
|
|
1524
|
+
catch (e)
|
|
1525
|
+
{
|
|
1526
|
+
return fStepCallback(new Error('Invalid MappingConfiguration JSON'));
|
|
1527
|
+
}
|
|
1528
|
+
return fStepCallback();
|
|
1529
|
+
});
|
|
1530
|
+
});
|
|
1531
|
+
|
|
1532
|
+
// Load projection store
|
|
1533
|
+
tmpAnticipate.anticipate(
|
|
1534
|
+
(fStepCallback) =>
|
|
1535
|
+
{
|
|
1536
|
+
let tmpQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
1537
|
+
.addFilter('IDProjectionStore', tmpIDProjectionStore)
|
|
1538
|
+
.addFilter('Deleted', 0);
|
|
1539
|
+
|
|
1540
|
+
this.fable.DAL.ProjectionStore.doRead(tmpQuery,
|
|
1541
|
+
(pError, pQuery, pRecord) =>
|
|
1542
|
+
{
|
|
1543
|
+
if (pError || !pRecord || !pRecord.IDProjectionStore)
|
|
1544
|
+
{
|
|
1545
|
+
return fStepCallback(new Error('ProjectionStore not found (it may have been deleted)'));
|
|
1546
|
+
}
|
|
1547
|
+
tmpProjectionStore = pRecord;
|
|
1548
|
+
return fStepCallback();
|
|
1549
|
+
});
|
|
1550
|
+
});
|
|
1551
|
+
|
|
1552
|
+
// Load dataset
|
|
1553
|
+
tmpAnticipate.anticipate(
|
|
1554
|
+
(fStepCallback) =>
|
|
1555
|
+
{
|
|
1556
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
1557
|
+
.addFilter('IDDataset', tmpIDDataset);
|
|
1558
|
+
|
|
1559
|
+
this.fable.DAL.Dataset.doRead(tmpQuery,
|
|
1560
|
+
(pError, pQuery, pRecord) =>
|
|
1561
|
+
{
|
|
1562
|
+
if (pError || !pRecord || !pRecord.IDDataset)
|
|
1563
|
+
{
|
|
1564
|
+
return fStepCallback(new Error('Dataset not found'));
|
|
1565
|
+
}
|
|
1566
|
+
tmpDataset = pRecord;
|
|
1567
|
+
return fStepCallback();
|
|
1568
|
+
});
|
|
1569
|
+
});
|
|
1570
|
+
|
|
1571
|
+
tmpAnticipate.wait(
|
|
1572
|
+
(pError) =>
|
|
1573
|
+
{
|
|
1574
|
+
if (pError)
|
|
1575
|
+
{
|
|
1576
|
+
pResponse.send({ Error: pError.message || pError });
|
|
1577
|
+
return fNext();
|
|
1578
|
+
}
|
|
1579
|
+
|
|
1580
|
+
if (!tmpProjectionStore)
|
|
1581
|
+
{
|
|
1582
|
+
pResponse.send({ Error: 'ProjectionStore not found (it may have been deleted). Please select a valid store.' });
|
|
1583
|
+
return fNext();
|
|
1584
|
+
}
|
|
1585
|
+
|
|
1586
|
+
// Load the store connection
|
|
1587
|
+
let tmpConnQuery = this.fable.DAL.StoreConnection.query.clone()
|
|
1588
|
+
.addFilter('IDStoreConnection', tmpProjectionStore.IDStoreConnection);
|
|
1589
|
+
|
|
1590
|
+
this.fable.DAL.StoreConnection.doRead(tmpConnQuery,
|
|
1591
|
+
(pConnError, pConnQuery, pConnRecord) =>
|
|
1592
|
+
{
|
|
1593
|
+
if (pConnError || !pConnRecord || !pConnRecord.IDStoreConnection)
|
|
1594
|
+
{
|
|
1595
|
+
pResponse.send({ Error: 'StoreConnection not found for this ProjectionStore' });
|
|
1596
|
+
return fNext();
|
|
1597
|
+
}
|
|
1598
|
+
tmpConnection = pConnRecord;
|
|
1599
|
+
|
|
1600
|
+
// Parse schema for column definitions
|
|
1601
|
+
let tmpParsedSchema = null;
|
|
1602
|
+
try
|
|
1603
|
+
{
|
|
1604
|
+
tmpParsedSchema = this._parseMicroDDL(tmpDataset.SchemaDefinition || '');
|
|
1605
|
+
}
|
|
1606
|
+
catch (e)
|
|
1607
|
+
{
|
|
1608
|
+
pResponse.send({ Error: `Schema parse error: ${e.message}` });
|
|
1609
|
+
return fNext();
|
|
1610
|
+
}
|
|
1611
|
+
|
|
1612
|
+
let tmpColumns = [];
|
|
1613
|
+
if (tmpParsedSchema && tmpParsedSchema.Tables)
|
|
1614
|
+
{
|
|
1615
|
+
let tmpTableKeys = Object.keys(tmpParsedSchema.Tables);
|
|
1616
|
+
if (tmpTableKeys.length > 0)
|
|
1617
|
+
{
|
|
1618
|
+
tmpColumns = tmpParsedSchema.Tables[tmpTableKeys[0]].Columns || [];
|
|
1619
|
+
}
|
|
1620
|
+
}
|
|
1621
|
+
|
|
1622
|
+
tmpLog.push(`[${new Date().toISOString()}] Starting import for mapping "${tmpMapping.Name}" -> store "${tmpProjectionStore.TargetTableName}"`);
|
|
1623
|
+
|
|
1624
|
+
// Query source records from internal DAL
|
|
1625
|
+
let tmpRecordQuery = this.fable.DAL.Record.query.clone()
|
|
1626
|
+
.addFilter('Deleted', 0)
|
|
1627
|
+
.addFilter('IDSource', tmpMapping.IDSource);
|
|
1628
|
+
|
|
1629
|
+
if (tmpCap > 0)
|
|
1630
|
+
{
|
|
1631
|
+
tmpRecordQuery.setCap(tmpCap);
|
|
1632
|
+
}
|
|
1633
|
+
|
|
1634
|
+
this.fable.DAL.Record.doReads(tmpRecordQuery,
|
|
1635
|
+
(pRecordError, pRecordQuery, pRecords) =>
|
|
1636
|
+
{
|
|
1637
|
+
if (pRecordError)
|
|
1638
|
+
{
|
|
1639
|
+
tmpLog.push(`[${new Date().toISOString()}] Record query error: ${pRecordError.message}`);
|
|
1640
|
+
pResponse.send({ Error: pRecordError.message, Log: tmpLog.join('\n') });
|
|
1641
|
+
return fNext();
|
|
1642
|
+
}
|
|
1643
|
+
|
|
1644
|
+
tmpLog.push(`[${new Date().toISOString()}] Found ${pRecords.length} source records`);
|
|
1645
|
+
|
|
1646
|
+
// Phase 1: Build comprehension via TabularTransform
|
|
1647
|
+
let tmpTabularTransform = this.fable.services.TabularTransform;
|
|
1648
|
+
let tmpMappingOutcome = tmpTabularTransform.newMappingOutcomeObject();
|
|
1649
|
+
tmpMappingOutcome.ExplicitConfiguration = tmpMappingConfig;
|
|
1650
|
+
// Pre-seed ImplicitConfiguration so initializeMappingOutcomeObject()
|
|
1651
|
+
// does not attempt to auto-generate one (which references an
|
|
1652
|
+
// out-of-scope variable in the meadow-integration source).
|
|
1653
|
+
tmpMappingOutcome.ImplicitConfiguration = tmpMappingConfig;
|
|
1654
|
+
|
|
1655
|
+
let tmpParseErrorCount = 0;
|
|
1656
|
+
|
|
1657
|
+
for (let i = 0; i < pRecords.length; i++)
|
|
1658
|
+
{
|
|
1659
|
+
let tmpRecord = pRecords[i];
|
|
1660
|
+
let tmpParsedContent;
|
|
1661
|
+
try
|
|
1662
|
+
{
|
|
1663
|
+
tmpParsedContent = JSON.parse(tmpRecord.Content);
|
|
1664
|
+
}
|
|
1665
|
+
catch (e)
|
|
1666
|
+
{
|
|
1667
|
+
tmpParseErrorCount++;
|
|
1668
|
+
tmpLog.push(`[${new Date().toISOString()}] Parse error on record ${tmpRecord.IDRecord}: ${e.message}`);
|
|
1669
|
+
continue;
|
|
1670
|
+
}
|
|
1671
|
+
|
|
1672
|
+
// Pass flat content — Pict's resolveStateFromAddress wraps this
|
|
1673
|
+
// as rootDataObject.Record = pRecord, so {~D:Record.field~}
|
|
1674
|
+
// resolves to pRecord.field automatically.
|
|
1675
|
+
// Only include GUID (not IDRecord) — IDs are generated by the
|
|
1676
|
+
// target storage layer; GUIDs handle deduplication on upsert.
|
|
1677
|
+
let tmpWrapped = Object.assign({ GUIDRecord: tmpRecord.GUIDRecord }, tmpParsedContent);
|
|
1678
|
+
|
|
1679
|
+
try
|
|
1680
|
+
{
|
|
1681
|
+
tmpTabularTransform.transformRecord(tmpWrapped, tmpMappingOutcome);
|
|
1682
|
+
}
|
|
1683
|
+
catch (e)
|
|
1684
|
+
{
|
|
1685
|
+
tmpLog.push(`[${new Date().toISOString()}] Transform error on record ${tmpRecord.IDRecord}: ${e.message}`);
|
|
1686
|
+
}
|
|
1687
|
+
}
|
|
1688
|
+
|
|
1689
|
+
// Extract the comprehension records
|
|
1690
|
+
let tmpEntityName = tmpMappingConfig.Entity || Object.keys(tmpMappingOutcome.Comprehension)[0];
|
|
1691
|
+
let tmpComprehensionRecords = tmpMappingOutcome.Comprehension[tmpEntityName] || {};
|
|
1692
|
+
let tmpRecordGUIDs = Object.keys(tmpComprehensionRecords);
|
|
1693
|
+
|
|
1694
|
+
tmpLog.push(`[${new Date().toISOString()}] Comprehension built: ${tmpMappingOutcome.ParsedRowCount} transformed, ${tmpRecordGUIDs.length} unique records (${tmpMappingOutcome.BadRecords.length} bad, ${tmpParseErrorCount} parse errors)`);
|
|
1695
|
+
|
|
1696
|
+
// Stage comprehension to disk if requested
|
|
1697
|
+
let tmpStagingFile = false;
|
|
1698
|
+
if (tmpStageComprehension)
|
|
1699
|
+
{
|
|
1700
|
+
try
|
|
1701
|
+
{
|
|
1702
|
+
let tmpStagingDir = libPath.join(process.cwd(), 'data', 'staging');
|
|
1703
|
+
if (!libFS.existsSync(tmpStagingDir))
|
|
1704
|
+
{
|
|
1705
|
+
libFS.mkdirSync(tmpStagingDir, { recursive: true });
|
|
1706
|
+
}
|
|
1707
|
+
let tmpTimestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
1708
|
+
let tmpFilename = `comprehension-${tmpIDDataset}-${tmpIDProjectionMapping}-${tmpTimestamp}.json`;
|
|
1709
|
+
let tmpFilePath = libPath.join(tmpStagingDir, tmpFilename);
|
|
1710
|
+
let tmpStagingData = {
|
|
1711
|
+
IDDataset: tmpIDDataset,
|
|
1712
|
+
IDProjectionMapping: tmpIDProjectionMapping,
|
|
1713
|
+
IDProjectionStore: tmpIDProjectionStore,
|
|
1714
|
+
MappingName: tmpMapping.Name,
|
|
1715
|
+
Entity: tmpEntityName,
|
|
1716
|
+
Timestamp: new Date().toISOString(),
|
|
1717
|
+
RecordCount: tmpRecordGUIDs.length,
|
|
1718
|
+
BadRecordCount: tmpMappingOutcome.BadRecords.length,
|
|
1719
|
+
Comprehension: tmpMappingOutcome.Comprehension,
|
|
1720
|
+
BadRecords: tmpMappingOutcome.BadRecords
|
|
1721
|
+
};
|
|
1722
|
+
libFS.writeFileSync(tmpFilePath, JSON.stringify(tmpStagingData, null, '\t'));
|
|
1723
|
+
tmpStagingFile = tmpFilename;
|
|
1724
|
+
tmpLog.push(`[${new Date().toISOString()}] Comprehension staged to: ${tmpFilename}`);
|
|
1725
|
+
}
|
|
1726
|
+
catch (pStagingError)
|
|
1727
|
+
{
|
|
1728
|
+
tmpLog.push(`[${new Date().toISOString()}] Staging error: ${pStagingError.message}`);
|
|
1729
|
+
}
|
|
1730
|
+
}
|
|
1731
|
+
|
|
1732
|
+
if (tmpRecordGUIDs.length === 0)
|
|
1733
|
+
{
|
|
1734
|
+
tmpLog.push(`[${new Date().toISOString()}] No records to insert`);
|
|
1735
|
+
pResponse.send(
|
|
1736
|
+
{
|
|
1737
|
+
Success: true,
|
|
1738
|
+
RecordsProcessed: pRecords.length,
|
|
1739
|
+
RecordsTransformed: 0,
|
|
1740
|
+
RecordsCreated: 0,
|
|
1741
|
+
RecordsErrored: tmpParseErrorCount + tmpMappingOutcome.BadRecords.length,
|
|
1742
|
+
StagingFile: tmpStagingFile,
|
|
1743
|
+
Log: tmpLog.join('\n')
|
|
1744
|
+
});
|
|
1745
|
+
return fNext();
|
|
1746
|
+
}
|
|
1747
|
+
|
|
1748
|
+
// Phase 2: Upsert comprehension records via IntegrationAdapter + REST to self
|
|
1749
|
+
let tmpTargetEntityName = tmpProjectionStore.TargetTableName;
|
|
1750
|
+
|
|
1751
|
+
// Lazy-register the projection entity if not already registered
|
|
1752
|
+
let tmpDoImport = () =>
|
|
1753
|
+
{
|
|
1754
|
+
let tmpPort = this.fable.settings.APIServerPort || 8080;
|
|
1755
|
+
let tmpServerURL = `http://localhost:${tmpPort}/1.0/`;
|
|
1756
|
+
|
|
1757
|
+
// Create a RestClient that points at our own Orator server
|
|
1758
|
+
let tmpRestClient = this.fable.serviceManager.instantiateServiceProviderWithoutRegistration(
|
|
1759
|
+
'MeadowCloneRestClient',
|
|
1760
|
+
{
|
|
1761
|
+
ServerURL: tmpServerURL
|
|
1762
|
+
});
|
|
1763
|
+
|
|
1764
|
+
// Create an IntegrationAdapter for this entity
|
|
1765
|
+
let tmpAdapter = this.fable.serviceManager.instantiateServiceProviderWithoutRegistration(
|
|
1766
|
+
'IntegrationAdapter',
|
|
1767
|
+
{
|
|
1768
|
+
Entity: tmpTargetEntityName,
|
|
1769
|
+
Client: tmpRestClient,
|
|
1770
|
+
AdapterSetGUIDMarshalPrefix: `PROJ-${tmpIDDataset}`,
|
|
1771
|
+
PerformUpserts: true,
|
|
1772
|
+
PerformDeletes: false,
|
|
1773
|
+
SimpleMarshal: true,
|
|
1774
|
+
// The bulk upsert URL appends an extra 's' to the entity
|
|
1775
|
+
// name (e.g. Countriess/Upserts) which breaks entities
|
|
1776
|
+
// whose names are already plural. Use single-record
|
|
1777
|
+
// upserts to avoid the mismatch.
|
|
1778
|
+
RecordThresholdForBulkUpsert: 999999
|
|
1779
|
+
});
|
|
1780
|
+
|
|
1781
|
+
// Feed comprehension records to the adapter
|
|
1782
|
+
let tmpGUIDField = `GUID${tmpTargetEntityName}`;
|
|
1783
|
+
for (let i = 0; i < tmpRecordGUIDs.length; i++)
|
|
1784
|
+
{
|
|
1785
|
+
let tmpGUID = tmpRecordGUIDs[i];
|
|
1786
|
+
let tmpCompRecord = tmpComprehensionRecords[tmpGUID];
|
|
1787
|
+
// Ensure the GUID field exists for IntegrationAdapter
|
|
1788
|
+
if (!tmpCompRecord[tmpGUIDField])
|
|
1789
|
+
{
|
|
1790
|
+
tmpCompRecord[tmpGUIDField] = tmpGUID;
|
|
1791
|
+
}
|
|
1792
|
+
tmpAdapter.addSourceRecord(tmpCompRecord);
|
|
1793
|
+
}
|
|
1794
|
+
|
|
1795
|
+
tmpLog.push(`[${new Date().toISOString()}] Pushing ${tmpRecordGUIDs.length} records via IntegrationAdapter to ${tmpServerURL}${tmpTargetEntityName}/Upsert`);
|
|
1796
|
+
|
|
1797
|
+
// Marshal and push records through the REST API
|
|
1798
|
+
tmpAdapter.marshalSourceRecords(
|
|
1799
|
+
(pMarshalError) =>
|
|
1800
|
+
{
|
|
1801
|
+
if (pMarshalError)
|
|
1802
|
+
{
|
|
1803
|
+
tmpLog.push(`[${new Date().toISOString()}] Marshal error: ${pMarshalError.message}`);
|
|
1804
|
+
pResponse.send(
|
|
1805
|
+
{
|
|
1806
|
+
Error: `Marshal error: ${pMarshalError.message}`,
|
|
1807
|
+
RecordsProcessed: pRecords.length,
|
|
1808
|
+
RecordsTransformed: tmpRecordGUIDs.length,
|
|
1809
|
+
StagingFile: tmpStagingFile,
|
|
1810
|
+
Log: tmpLog.join('\n')
|
|
1811
|
+
});
|
|
1812
|
+
return fNext();
|
|
1813
|
+
}
|
|
1814
|
+
|
|
1815
|
+
let tmpMarshaledCount = Object.keys(tmpAdapter._MarshaledRecords).length;
|
|
1816
|
+
tmpLog.push(`[${new Date().toISOString()}] Marshaled ${tmpMarshaledCount} records; pushing to server...`);
|
|
1817
|
+
|
|
1818
|
+
tmpAdapter.pushRecordsToServer(
|
|
1819
|
+
(pPushError) =>
|
|
1820
|
+
{
|
|
1821
|
+
if (pPushError)
|
|
1822
|
+
{
|
|
1823
|
+
tmpLog.push(`[${new Date().toISOString()}] Push error: ${pPushError.message}`);
|
|
1824
|
+
}
|
|
1825
|
+
|
|
1826
|
+
tmpLog.push(`[${new Date().toISOString()}] Import complete: ${pRecords.length} source records, ${tmpRecordGUIDs.length} unique, ${tmpMarshaledCount} upserted`);
|
|
1827
|
+
|
|
1828
|
+
pResponse.send(
|
|
1829
|
+
{
|
|
1830
|
+
Success: !pPushError,
|
|
1831
|
+
RecordsProcessed: pRecords.length,
|
|
1832
|
+
RecordsTransformed: tmpRecordGUIDs.length,
|
|
1833
|
+
RecordsDeduplicated: tmpMappingOutcome.ParsedRowCount - tmpRecordGUIDs.length,
|
|
1834
|
+
BadRecords: tmpMappingOutcome.BadRecords.length,
|
|
1835
|
+
RecordsUpserted: tmpMarshaledCount,
|
|
1836
|
+
StagingFile: tmpStagingFile,
|
|
1837
|
+
Log: tmpLog.join('\n')
|
|
1838
|
+
});
|
|
1839
|
+
return fNext();
|
|
1840
|
+
});
|
|
1841
|
+
});
|
|
1842
|
+
};
|
|
1843
|
+
|
|
1844
|
+
if (!this._ProjectionEntities[tmpTargetEntityName])
|
|
1845
|
+
{
|
|
1846
|
+
// Entity not yet registered — register lazily
|
|
1847
|
+
tmpLog.push(`[${new Date().toISOString()}] Registering Meadow entity [${tmpTargetEntityName}] for REST upserts...`);
|
|
1848
|
+
this._registerProjectionEntity(tmpProjectionStore, tmpConnection, tmpParsedSchema, null,
|
|
1849
|
+
(pRegError) =>
|
|
1850
|
+
{
|
|
1851
|
+
if (pRegError)
|
|
1852
|
+
{
|
|
1853
|
+
tmpLog.push(`[${new Date().toISOString()}] Entity registration failed: ${pRegError.message}`);
|
|
1854
|
+
pResponse.send(
|
|
1855
|
+
{
|
|
1856
|
+
Error: `Entity registration failed: ${pRegError.message}`,
|
|
1857
|
+
Log: tmpLog.join('\n')
|
|
1858
|
+
});
|
|
1859
|
+
return fNext();
|
|
1860
|
+
}
|
|
1861
|
+
tmpLog.push(`[${new Date().toISOString()}] Entity [${tmpTargetEntityName}] registered successfully`);
|
|
1862
|
+
return tmpDoImport();
|
|
1863
|
+
});
|
|
1864
|
+
}
|
|
1865
|
+
else
|
|
1866
|
+
{
|
|
1867
|
+
return tmpDoImport();
|
|
1868
|
+
}
|
|
1869
|
+
});
|
|
1870
|
+
});
|
|
1871
|
+
});
|
|
1872
|
+
});
|
|
1873
|
+
|
|
1874
|
+
// ======================================================================
|
|
1875
|
+
// Comprehension Staging routes
|
|
1876
|
+
// ======================================================================
|
|
1877
|
+
|
|
1878
|
+
// GET /facto/projection/:IDDataset/comprehensions — list staged comprehension files
|
|
1879
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/:IDDataset/comprehensions`,
|
|
1880
|
+
(pRequest, pResponse, fNext) =>
|
|
1881
|
+
{
|
|
1882
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
1883
|
+
let tmpStagingDir = libPath.join(process.cwd(), 'data', 'staging');
|
|
1884
|
+
|
|
1885
|
+
if (!libFS.existsSync(tmpStagingDir))
|
|
1886
|
+
{
|
|
1887
|
+
pResponse.send({ Files: [] });
|
|
1888
|
+
return fNext();
|
|
1889
|
+
}
|
|
1890
|
+
|
|
1891
|
+
try
|
|
1892
|
+
{
|
|
1893
|
+
let tmpAllFiles = libFS.readdirSync(tmpStagingDir);
|
|
1894
|
+
let tmpPrefix = `comprehension-${tmpIDDataset}-`;
|
|
1895
|
+
let tmpFiles = tmpAllFiles
|
|
1896
|
+
.filter((pFile) => { return pFile.startsWith(tmpPrefix) && pFile.endsWith('.json'); })
|
|
1897
|
+
.sort()
|
|
1898
|
+
.reverse();
|
|
1899
|
+
|
|
1900
|
+
pResponse.send({ Files: tmpFiles });
|
|
1901
|
+
}
|
|
1902
|
+
catch (pReadError)
|
|
1903
|
+
{
|
|
1904
|
+
pResponse.send({ Error: pReadError.message, Files: [] });
|
|
1905
|
+
}
|
|
1906
|
+
return fNext();
|
|
1907
|
+
});
|
|
1908
|
+
|
|
1909
|
+
// GET /facto/projection/:IDDataset/comprehension/:Filename — download a staged comprehension file
|
|
1910
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/:IDDataset/comprehension/:Filename`,
|
|
1911
|
+
(pRequest, pResponse, fNext) =>
|
|
1912
|
+
{
|
|
1913
|
+
let tmpFilename = pRequest.params.Filename;
|
|
1914
|
+
|
|
1915
|
+
// Sanitize filename to prevent path traversal
|
|
1916
|
+
if (!tmpFilename || tmpFilename.includes('..') || tmpFilename.includes('/') || tmpFilename.includes('\\'))
|
|
1917
|
+
{
|
|
1918
|
+
pResponse.send({ Error: 'Invalid filename' });
|
|
1919
|
+
return fNext();
|
|
1920
|
+
}
|
|
1921
|
+
|
|
1922
|
+
let tmpFilePath = libPath.join(process.cwd(), 'data', 'staging', tmpFilename);
|
|
1923
|
+
|
|
1924
|
+
if (!libFS.existsSync(tmpFilePath))
|
|
1925
|
+
{
|
|
1926
|
+
pResponse.send({ Error: 'File not found' });
|
|
1927
|
+
return fNext();
|
|
1928
|
+
}
|
|
1929
|
+
|
|
1930
|
+
try
|
|
1931
|
+
{
|
|
1932
|
+
let tmpContent = libFS.readFileSync(tmpFilePath, 'utf8');
|
|
1933
|
+
let tmpParsed = JSON.parse(tmpContent);
|
|
1934
|
+
pResponse.send(tmpParsed);
|
|
1935
|
+
}
|
|
1936
|
+
catch (pReadError)
|
|
1937
|
+
{
|
|
1938
|
+
pResponse.send({ Error: pReadError.message });
|
|
1939
|
+
}
|
|
1940
|
+
return fNext();
|
|
1941
|
+
});
|
|
1942
|
+
|
|
1943
|
+
// ======================================================================
|
|
1944
|
+
// Multi-Set Projection CRUD routes
|
|
1945
|
+
// ======================================================================
|
|
1946
|
+
|
|
1947
|
+
// GET /facto/projection/:IDDataset/multi-set-projections
|
|
1948
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/:IDDataset/multi-set-projections`,
|
|
1949
|
+
(pRequest, pResponse, fNext) =>
|
|
1950
|
+
{
|
|
1951
|
+
if (!this.fable.DAL || !this.fable.DAL.MultiSetProjection)
|
|
1952
|
+
{
|
|
1953
|
+
pResponse.send({ MultiSetProjections: [] });
|
|
1954
|
+
return fNext();
|
|
1955
|
+
}
|
|
1956
|
+
|
|
1957
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
1958
|
+
|
|
1959
|
+
let tmpQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
1960
|
+
.addFilter('IDDataset', tmpIDDataset)
|
|
1961
|
+
.addFilter('Deleted', 0);
|
|
1962
|
+
|
|
1963
|
+
this.fable.DAL.MultiSetProjection.doReads(tmpQuery,
|
|
1964
|
+
(pError, pQuery, pRecords) =>
|
|
1965
|
+
{
|
|
1966
|
+
if (pError)
|
|
1967
|
+
{
|
|
1968
|
+
pResponse.send({ Error: pError.message || pError, MultiSetProjections: [] });
|
|
1969
|
+
return fNext();
|
|
1970
|
+
}
|
|
1971
|
+
pResponse.send({ Count: pRecords.length, MultiSetProjections: pRecords });
|
|
1972
|
+
return fNext();
|
|
1973
|
+
});
|
|
1974
|
+
});
|
|
1975
|
+
|
|
1976
|
+
// GET /facto/projection/multi-set-projection/:ID
|
|
1977
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/multi-set-projection/:ID`,
|
|
1978
|
+
(pRequest, pResponse, fNext) =>
|
|
1979
|
+
{
|
|
1980
|
+
if (!this.fable.DAL || !this.fable.DAL.MultiSetProjection)
|
|
1981
|
+
{
|
|
1982
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
1983
|
+
return fNext();
|
|
1984
|
+
}
|
|
1985
|
+
|
|
1986
|
+
let tmpID = parseInt(pRequest.params.ID, 10);
|
|
1987
|
+
|
|
1988
|
+
let tmpQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
1989
|
+
.addFilter('IDMultiSetProjection', tmpID);
|
|
1990
|
+
|
|
1991
|
+
this.fable.DAL.MultiSetProjection.doRead(tmpQuery,
|
|
1992
|
+
(pError, pQuery, pRecord) =>
|
|
1993
|
+
{
|
|
1994
|
+
// Extract record from query chain if needed
|
|
1995
|
+
let tmpRecord = pRecord;
|
|
1996
|
+
if (pRecord && pRecord.result && Array.isArray(pRecord.result.value) && pRecord.result.value.length > 0)
|
|
1997
|
+
{
|
|
1998
|
+
tmpRecord = pRecord.result.value[0];
|
|
1999
|
+
}
|
|
2000
|
+
|
|
2001
|
+
if (pError || !tmpRecord || !tmpRecord.IDMultiSetProjection)
|
|
2002
|
+
{
|
|
2003
|
+
pResponse.send({ Error: 'MultiSetProjection not found' });
|
|
2004
|
+
return fNext();
|
|
2005
|
+
}
|
|
2006
|
+
pResponse.send({ MultiSetProjection: tmpRecord });
|
|
2007
|
+
return fNext();
|
|
2008
|
+
});
|
|
2009
|
+
});
|
|
2010
|
+
|
|
2011
|
+
// POST /facto/projection/:IDDataset/multi-set-projection -- create
|
|
2012
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/:IDDataset/multi-set-projection`,
|
|
2013
|
+
(pRequest, pResponse, fNext) =>
|
|
2014
|
+
{
|
|
2015
|
+
if (!this.fable.DAL || !this.fable.DAL.MultiSetProjection)
|
|
2016
|
+
{
|
|
2017
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
2018
|
+
return fNext();
|
|
2019
|
+
}
|
|
2020
|
+
|
|
2021
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
2022
|
+
let tmpBody = pRequest.body || {};
|
|
2023
|
+
|
|
2024
|
+
let tmpNewRecord =
|
|
2025
|
+
{
|
|
2026
|
+
IDDataset: tmpIDDataset,
|
|
2027
|
+
IDProjectionStore: parseInt(tmpBody.IDProjectionStore, 10) || 0,
|
|
2028
|
+
Name: tmpBody.Name || 'New Multi-Set Projection',
|
|
2029
|
+
Description: tmpBody.Description || '',
|
|
2030
|
+
PipelineConfiguration: (typeof tmpBody.PipelineConfiguration === 'string')
|
|
2031
|
+
? tmpBody.PipelineConfiguration
|
|
2032
|
+
: JSON.stringify(tmpBody.PipelineConfiguration || { Steps: [], ConfidenceReinforcement: { Enabled: false } }),
|
|
2033
|
+
Active: (tmpBody.Active !== undefined) ? (tmpBody.Active ? 1 : 0) : 1
|
|
2034
|
+
};
|
|
2035
|
+
|
|
2036
|
+
let tmpCreateQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
2037
|
+
.setIDUser(0)
|
|
2038
|
+
.addRecord(tmpNewRecord);
|
|
2039
|
+
|
|
2040
|
+
this.fable.DAL.MultiSetProjection.doCreate(tmpCreateQuery,
|
|
2041
|
+
(pError, pQuery, pRecord) =>
|
|
2042
|
+
{
|
|
2043
|
+
if (pError)
|
|
2044
|
+
{
|
|
2045
|
+
this.fable.log.error(`ProjectionEngine error creating MultiSetProjection: ${pError}`);
|
|
2046
|
+
pResponse.send({ Error: pError.message || pError });
|
|
2047
|
+
return fNext();
|
|
2048
|
+
}
|
|
2049
|
+
// Meadow may return the query chain; extract the record from result.value
|
|
2050
|
+
let tmpCreatedRecord = pRecord;
|
|
2051
|
+
if (pRecord && pRecord.result && Array.isArray(pRecord.result.value) && pRecord.result.value.length > 0)
|
|
2052
|
+
{
|
|
2053
|
+
tmpCreatedRecord = pRecord.result.value[0];
|
|
2054
|
+
}
|
|
2055
|
+
pResponse.send({ Success: true, MultiSetProjection: tmpCreatedRecord });
|
|
2056
|
+
return fNext();
|
|
2057
|
+
});
|
|
2058
|
+
});
|
|
2059
|
+
|
|
2060
|
+
// POST /facto/projection/multi-set-projection/:ID/update
|
|
2061
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/multi-set-projection/:ID/update`,
|
|
2062
|
+
(pRequest, pResponse, fNext) =>
|
|
2063
|
+
{
|
|
2064
|
+
if (!this.fable.DAL || !this.fable.DAL.MultiSetProjection)
|
|
2065
|
+
{
|
|
2066
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
2067
|
+
return fNext();
|
|
2068
|
+
}
|
|
2069
|
+
|
|
2070
|
+
let tmpID = parseInt(pRequest.params.ID, 10);
|
|
2071
|
+
let tmpBody = pRequest.body || {};
|
|
2072
|
+
|
|
2073
|
+
let tmpReadQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
2074
|
+
.addFilter('IDMultiSetProjection', tmpID);
|
|
2075
|
+
|
|
2076
|
+
this.fable.DAL.MultiSetProjection.doRead(tmpReadQuery,
|
|
2077
|
+
(pReadError, pReadQuery, pExisting) =>
|
|
2078
|
+
{
|
|
2079
|
+
// Extract record from query chain if needed
|
|
2080
|
+
let tmpRecord = pExisting;
|
|
2081
|
+
if (pExisting && pExisting.result && Array.isArray(pExisting.result.value) && pExisting.result.value.length > 0)
|
|
2082
|
+
{
|
|
2083
|
+
tmpRecord = pExisting.result.value[0];
|
|
2084
|
+
}
|
|
2085
|
+
|
|
2086
|
+
if (pReadError || !tmpRecord || !tmpRecord.IDMultiSetProjection)
|
|
2087
|
+
{
|
|
2088
|
+
pResponse.send({ Error: 'MultiSetProjection not found' });
|
|
2089
|
+
return fNext();
|
|
2090
|
+
}
|
|
2091
|
+
|
|
2092
|
+
if (tmpBody.Name !== undefined) tmpRecord.Name = tmpBody.Name;
|
|
2093
|
+
if (tmpBody.Description !== undefined) tmpRecord.Description = tmpBody.Description;
|
|
2094
|
+
if (tmpBody.IDProjectionStore !== undefined) tmpRecord.IDProjectionStore = parseInt(tmpBody.IDProjectionStore, 10) || 0;
|
|
2095
|
+
if (tmpBody.Active !== undefined) tmpRecord.Active = tmpBody.Active ? 1 : 0;
|
|
2096
|
+
if (tmpBody.PipelineConfiguration !== undefined)
|
|
2097
|
+
{
|
|
2098
|
+
tmpRecord.PipelineConfiguration = (typeof tmpBody.PipelineConfiguration === 'string')
|
|
2099
|
+
? tmpBody.PipelineConfiguration
|
|
2100
|
+
: JSON.stringify(tmpBody.PipelineConfiguration);
|
|
2101
|
+
}
|
|
2102
|
+
|
|
2103
|
+
let tmpUpdateQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
2104
|
+
.setIDUser(0)
|
|
2105
|
+
.addRecord(tmpRecord);
|
|
2106
|
+
|
|
2107
|
+
this.fable.DAL.MultiSetProjection.doUpdate(tmpUpdateQuery,
|
|
2108
|
+
(pUpdateError, pUpdateQuery, pUpdated) =>
|
|
2109
|
+
{
|
|
2110
|
+
if (pUpdateError)
|
|
2111
|
+
{
|
|
2112
|
+
pResponse.send({ Error: pUpdateError.message || pUpdateError });
|
|
2113
|
+
return fNext();
|
|
2114
|
+
}
|
|
2115
|
+
// Meadow may return the query chain; extract the record
|
|
2116
|
+
let tmpUpdatedRecord = pUpdated;
|
|
2117
|
+
if (pUpdated && pUpdated.result && Array.isArray(pUpdated.result.value) && pUpdated.result.value.length > 0)
|
|
2118
|
+
{
|
|
2119
|
+
tmpUpdatedRecord = pUpdated.result.value[0];
|
|
2120
|
+
}
|
|
2121
|
+
pResponse.send({ Success: true, MultiSetProjection: tmpUpdatedRecord });
|
|
2122
|
+
return fNext();
|
|
2123
|
+
});
|
|
2124
|
+
});
|
|
2125
|
+
});
|
|
2126
|
+
|
|
2127
|
+
// DELETE /facto/projection/multi-set-projection/:ID
|
|
2128
|
+
pOratorServiceServer.doDel(`${tmpRoutePrefix}/projection/multi-set-projection/:ID`,
|
|
2129
|
+
(pRequest, pResponse, fNext) =>
|
|
2130
|
+
{
|
|
2131
|
+
if (!this.fable.DAL || !this.fable.DAL.MultiSetProjection)
|
|
2132
|
+
{
|
|
2133
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
2134
|
+
return fNext();
|
|
2135
|
+
}
|
|
2136
|
+
|
|
2137
|
+
let tmpID = parseInt(pRequest.params.ID, 10);
|
|
2138
|
+
|
|
2139
|
+
let tmpReadQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
2140
|
+
.addFilter('IDMultiSetProjection', tmpID);
|
|
2141
|
+
|
|
2142
|
+
// Use Meadow's built-in delete (soft-delete via the Deleted column)
|
|
2143
|
+
let tmpDeleteQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
2144
|
+
.addFilter('IDMultiSetProjection', tmpID);
|
|
2145
|
+
|
|
2146
|
+
this.fable.DAL.MultiSetProjection.doDelete(tmpDeleteQuery,
|
|
2147
|
+
(pDeleteError, pDeleteQuery, pResult) =>
|
|
2148
|
+
{
|
|
2149
|
+
if (pDeleteError)
|
|
2150
|
+
{
|
|
2151
|
+
pResponse.send({ Error: pDeleteError.message || pDeleteError });
|
|
2152
|
+
return fNext();
|
|
2153
|
+
}
|
|
2154
|
+
pResponse.send({ Success: true });
|
|
2155
|
+
return fNext();
|
|
2156
|
+
});
|
|
2157
|
+
});
|
|
2158
|
+
|
|
2159
|
+
// ======================================================================
|
|
2160
|
+
// Multi-Set Import Execution route
|
|
2161
|
+
// ======================================================================
|
|
2162
|
+
|
|
2163
|
+
// POST /facto/projection/:IDDataset/multi-import
|
|
2164
|
+
pOratorServiceServer.doPost(`${tmpRoutePrefix}/projection/:IDDataset/multi-import`,
|
|
2165
|
+
(pRequest, pResponse, fNext) =>
|
|
2166
|
+
{
|
|
2167
|
+
if (!this.fable.DAL || !this.fable.DAL.MultiSetProjection ||
|
|
2168
|
+
!this.fable.DAL.ProjectionMapping || !this.fable.DAL.ProjectionStore ||
|
|
2169
|
+
!this.fable.DAL.StoreConnection || !this.fable.DAL.Record ||
|
|
2170
|
+
!this.fable.DAL.Dataset)
|
|
2171
|
+
{
|
|
2172
|
+
pResponse.send({ Error: 'DAL not initialized' });
|
|
2173
|
+
return fNext();
|
|
2174
|
+
}
|
|
2175
|
+
|
|
2176
|
+
let tmpIDDataset = parseInt(pRequest.params.IDDataset, 10);
|
|
2177
|
+
let tmpBody = pRequest.body || {};
|
|
2178
|
+
let tmpIDMultiSetProjection = parseInt(tmpBody.IDMultiSetProjection, 10);
|
|
2179
|
+
let tmpIDProjectionStore = parseInt(tmpBody.IDProjectionStore, 10);
|
|
2180
|
+
let tmpCap = parseInt(tmpBody.Cap, 10) || 0;
|
|
2181
|
+
let tmpStageComprehension = !!tmpBody.StageComprehension;
|
|
2182
|
+
|
|
2183
|
+
if (!tmpIDMultiSetProjection)
|
|
2184
|
+
{
|
|
2185
|
+
pResponse.send({ Error: 'IDMultiSetProjection is required' });
|
|
2186
|
+
return fNext();
|
|
2187
|
+
}
|
|
2188
|
+
|
|
2189
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
2190
|
+
let tmpMultiSetProjection = null;
|
|
2191
|
+
let tmpPipelineConfig = null;
|
|
2192
|
+
let tmpProjectionStore = null;
|
|
2193
|
+
let tmpConnection = null;
|
|
2194
|
+
let tmpDataset = null;
|
|
2195
|
+
|
|
2196
|
+
// Load MultiSetProjection
|
|
2197
|
+
tmpAnticipate.anticipate(
|
|
2198
|
+
(fStepCallback) =>
|
|
2199
|
+
{
|
|
2200
|
+
let tmpQuery = this.fable.DAL.MultiSetProjection.query.clone()
|
|
2201
|
+
.addFilter('IDMultiSetProjection', tmpIDMultiSetProjection);
|
|
2202
|
+
|
|
2203
|
+
this.fable.DAL.MultiSetProjection.doRead(tmpQuery,
|
|
2204
|
+
(pError, pQuery, pRecord) =>
|
|
2205
|
+
{
|
|
2206
|
+
if (pError || !pRecord || !pRecord.IDMultiSetProjection)
|
|
2207
|
+
{
|
|
2208
|
+
return fStepCallback(new Error('MultiSetProjection not found'));
|
|
2209
|
+
}
|
|
2210
|
+
tmpMultiSetProjection = pRecord;
|
|
2211
|
+
try
|
|
2212
|
+
{
|
|
2213
|
+
tmpPipelineConfig = JSON.parse(tmpMultiSetProjection.PipelineConfiguration || '{}');
|
|
2214
|
+
}
|
|
2215
|
+
catch (e)
|
|
2216
|
+
{
|
|
2217
|
+
return fStepCallback(new Error('Invalid PipelineConfiguration JSON'));
|
|
2218
|
+
}
|
|
2219
|
+
// Use the store from the body or fall back to the entity's store
|
|
2220
|
+
if (!tmpIDProjectionStore)
|
|
2221
|
+
{
|
|
2222
|
+
tmpIDProjectionStore = tmpMultiSetProjection.IDProjectionStore;
|
|
2223
|
+
}
|
|
2224
|
+
return fStepCallback();
|
|
2225
|
+
});
|
|
2226
|
+
});
|
|
2227
|
+
|
|
2228
|
+
// Load ProjectionStore (after MultiSetProjection so we have the fallback ID)
|
|
2229
|
+
tmpAnticipate.anticipate(
|
|
2230
|
+
(fStepCallback) =>
|
|
2231
|
+
{
|
|
2232
|
+
if (!tmpIDProjectionStore)
|
|
2233
|
+
{
|
|
2234
|
+
return fStepCallback(new Error('IDProjectionStore is required'));
|
|
2235
|
+
}
|
|
2236
|
+
|
|
2237
|
+
let tmpQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
2238
|
+
.addFilter('IDProjectionStore', tmpIDProjectionStore);
|
|
2239
|
+
|
|
2240
|
+
this.fable.DAL.ProjectionStore.doRead(tmpQuery,
|
|
2241
|
+
(pError, pQuery, pRecord) =>
|
|
2242
|
+
{
|
|
2243
|
+
if (pError || !pRecord || !pRecord.IDProjectionStore)
|
|
2244
|
+
{
|
|
2245
|
+
return fStepCallback(new Error('ProjectionStore not found'));
|
|
2246
|
+
}
|
|
2247
|
+
tmpProjectionStore = pRecord;
|
|
2248
|
+
return fStepCallback();
|
|
2249
|
+
});
|
|
2250
|
+
});
|
|
2251
|
+
|
|
2252
|
+
// Load Dataset
|
|
2253
|
+
tmpAnticipate.anticipate(
|
|
2254
|
+
(fStepCallback) =>
|
|
2255
|
+
{
|
|
2256
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
2257
|
+
.addFilter('IDDataset', tmpIDDataset);
|
|
2258
|
+
|
|
2259
|
+
this.fable.DAL.Dataset.doRead(tmpQuery,
|
|
2260
|
+
(pError, pQuery, pRecord) =>
|
|
2261
|
+
{
|
|
2262
|
+
if (pError || !pRecord || !pRecord.IDDataset)
|
|
2263
|
+
{
|
|
2264
|
+
return fStepCallback(new Error('Dataset not found'));
|
|
2265
|
+
}
|
|
2266
|
+
tmpDataset = pRecord;
|
|
2267
|
+
return fStepCallback();
|
|
2268
|
+
});
|
|
2269
|
+
});
|
|
2270
|
+
|
|
2271
|
+
tmpAnticipate.wait(
|
|
2272
|
+
(pError) =>
|
|
2273
|
+
{
|
|
2274
|
+
if (pError)
|
|
2275
|
+
{
|
|
2276
|
+
pResponse.send({ Error: pError.message || pError });
|
|
2277
|
+
return fNext();
|
|
2278
|
+
}
|
|
2279
|
+
|
|
2280
|
+
// Load the StoreConnection
|
|
2281
|
+
let tmpConnQuery = this.fable.DAL.StoreConnection.query.clone()
|
|
2282
|
+
.addFilter('IDStoreConnection', tmpProjectionStore.IDStoreConnection);
|
|
2283
|
+
|
|
2284
|
+
this.fable.DAL.StoreConnection.doRead(tmpConnQuery,
|
|
2285
|
+
(pConnError, pConnQuery, pConnRecord) =>
|
|
2286
|
+
{
|
|
2287
|
+
if (pConnError || !pConnRecord || !pConnRecord.IDStoreConnection)
|
|
2288
|
+
{
|
|
2289
|
+
pResponse.send({ Error: 'StoreConnection not found for this ProjectionStore' });
|
|
2290
|
+
return fNext();
|
|
2291
|
+
}
|
|
2292
|
+
tmpConnection = pConnRecord;
|
|
2293
|
+
|
|
2294
|
+
// Parse schema
|
|
2295
|
+
let tmpParsedSchema = null;
|
|
2296
|
+
try
|
|
2297
|
+
{
|
|
2298
|
+
tmpParsedSchema = this._parseMicroDDL(tmpDataset.SchemaDefinition || '');
|
|
2299
|
+
}
|
|
2300
|
+
catch (e)
|
|
2301
|
+
{
|
|
2302
|
+
pResponse.send({ Error: `Schema parse error: ${e.message}` });
|
|
2303
|
+
return fNext();
|
|
2304
|
+
}
|
|
2305
|
+
|
|
2306
|
+
// Execute the multi-set pipeline
|
|
2307
|
+
this._executeMultiSetImport(
|
|
2308
|
+
{
|
|
2309
|
+
MultiSetProjection: tmpMultiSetProjection,
|
|
2310
|
+
PipelineConfig: tmpPipelineConfig,
|
|
2311
|
+
ProjectionStore: tmpProjectionStore,
|
|
2312
|
+
Connection: tmpConnection,
|
|
2313
|
+
Dataset: tmpDataset,
|
|
2314
|
+
ParsedSchema: tmpParsedSchema,
|
|
2315
|
+
IDDataset: tmpIDDataset,
|
|
2316
|
+
Cap: tmpCap,
|
|
2317
|
+
StageComprehension: tmpStageComprehension
|
|
2318
|
+
},
|
|
2319
|
+
(pImportError, pResult) =>
|
|
2320
|
+
{
|
|
2321
|
+
if (pImportError)
|
|
2322
|
+
{
|
|
2323
|
+
pResponse.send({ Error: pImportError.message || pImportError, Log: (pResult && pResult.Log) || '' });
|
|
2324
|
+
return fNext();
|
|
2325
|
+
}
|
|
2326
|
+
pResponse.send(pResult);
|
|
2327
|
+
return fNext();
|
|
2328
|
+
});
|
|
2329
|
+
});
|
|
2330
|
+
});
|
|
2331
|
+
});
|
|
2332
|
+
|
|
2333
|
+
// ======================================================================
|
|
2334
|
+
// Multi-Set Certainty Log route
|
|
2335
|
+
// ======================================================================
|
|
2336
|
+
|
|
2337
|
+
// GET /facto/projection/multi-set-projection/:ID/certainty-log
|
|
2338
|
+
pOratorServiceServer.doGet(`${tmpRoutePrefix}/projection/multi-set-projection/:ID/certainty-log`,
|
|
2339
|
+
(pRequest, pResponse, fNext) =>
|
|
2340
|
+
{
|
|
2341
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionCertaintyLog)
|
|
2342
|
+
{
|
|
2343
|
+
pResponse.send({ CertaintyLog: [] });
|
|
2344
|
+
return fNext();
|
|
2345
|
+
}
|
|
2346
|
+
|
|
2347
|
+
let tmpID = parseInt(pRequest.params.ID, 10);
|
|
2348
|
+
let tmpBegin = parseInt(pRequest.query.Begin, 10) || 0;
|
|
2349
|
+
let tmpCap = parseInt(pRequest.query.Cap, 10) || 200;
|
|
2350
|
+
|
|
2351
|
+
let tmpQuery = this.fable.DAL.ProjectionCertaintyLog.query.clone()
|
|
2352
|
+
.addFilter('IDMultiSetProjection', tmpID)
|
|
2353
|
+
.addFilter('Deleted', 0)
|
|
2354
|
+
.setBegin(tmpBegin)
|
|
2355
|
+
.setCap(tmpCap);
|
|
2356
|
+
|
|
2357
|
+
this.fable.DAL.ProjectionCertaintyLog.doReads(tmpQuery,
|
|
2358
|
+
(pError, pQuery, pRecords) =>
|
|
2359
|
+
{
|
|
2360
|
+
if (pError)
|
|
2361
|
+
{
|
|
2362
|
+
pResponse.send({ Error: pError.message || pError, CertaintyLog: [] });
|
|
2363
|
+
return fNext();
|
|
2364
|
+
}
|
|
2365
|
+
pResponse.send({ Count: pRecords.length, CertaintyLog: pRecords });
|
|
2366
|
+
return fNext();
|
|
2367
|
+
});
|
|
2368
|
+
});
|
|
2369
|
+
|
|
2370
|
+
this.fable.log.info(`ProjectionEngine routes connected at ${tmpRoutePrefix}/projections/*`);
|
|
2371
|
+
}
|
|
2372
|
+
|
|
2373
|
+
/**
|
|
2374
|
+
* Parse MicroDDL text into a schema object with Tables.
|
|
2375
|
+
*
|
|
2376
|
+
* Supports the core MicroDDL symbols:
|
|
2377
|
+
* ! = Table declaration
|
|
2378
|
+
* @ = Auto-identity column
|
|
2379
|
+
* % = GUID column
|
|
2380
|
+
* $ = String column (with size)
|
|
2381
|
+
* * = Text column
|
|
2382
|
+
* # = Numeric column
|
|
2383
|
+
* . = Decimal column (with precision)
|
|
2384
|
+
* & = DateTime column
|
|
2385
|
+
* ^ = Boolean column
|
|
2386
|
+
* -> = Join reference (ignored for schema generation)
|
|
2387
|
+
*
|
|
2388
|
+
* @param {string} pDDL - MicroDDL text
|
|
2389
|
+
* @returns {object} Schema object with Tables hash
|
|
2390
|
+
*/
|
|
2391
|
+
|
|
2392
|
+
/**
|
|
2393
|
+
* Ensure an array of column definitions includes the standard Meadow
|
|
2394
|
+
* tracking columns (CreateDate, UpdateDate, Deleted, etc.).
|
|
2395
|
+
* Mutates the array in place.
|
|
2396
|
+
*/
|
|
2397
|
+
_ensureTrackingColumns(pColumns)
|
|
2398
|
+
{
|
|
2399
|
+
let tmpExisting = {};
|
|
2400
|
+
for (let i = 0; i < pColumns.length; i++)
|
|
2401
|
+
{
|
|
2402
|
+
tmpExisting[pColumns[i].Column] = true;
|
|
2403
|
+
}
|
|
2404
|
+
|
|
2405
|
+
let tmpTrackingColumns =
|
|
2406
|
+
[
|
|
2407
|
+
{ Column: 'CreateDate', DataType: 'DateTime' },
|
|
2408
|
+
{ Column: 'CreatingIDUser', DataType: 'Numeric', Size: 'int' },
|
|
2409
|
+
{ Column: 'UpdateDate', DataType: 'DateTime' },
|
|
2410
|
+
{ Column: 'UpdatingIDUser', DataType: 'Numeric', Size: 'int' },
|
|
2411
|
+
{ Column: 'Deleted', DataType: 'Boolean' },
|
|
2412
|
+
{ Column: 'DeleteDate', DataType: 'DateTime' },
|
|
2413
|
+
{ Column: 'DeletingIDUser', DataType: 'Numeric', Size: 'int' }
|
|
2414
|
+
];
|
|
2415
|
+
|
|
2416
|
+
for (let i = 0; i < tmpTrackingColumns.length; i++)
|
|
2417
|
+
{
|
|
2418
|
+
if (!tmpExisting[tmpTrackingColumns[i].Column])
|
|
2419
|
+
{
|
|
2420
|
+
pColumns.push(tmpTrackingColumns[i]);
|
|
2421
|
+
}
|
|
2422
|
+
}
|
|
2423
|
+
}
|
|
2424
|
+
|
|
2425
|
+
/**
|
|
2426
|
+
* Build a complete Meadow package object from parsed MicroDDL columns,
|
|
2427
|
+
* following the same pattern Stricture uses to generate Meadow models.
|
|
2428
|
+
*
|
|
2429
|
+
* Produces: Scope, DefaultIdentifier, Schema, DefaultObject, JsonSchema
|
|
2430
|
+
*
|
|
2431
|
+
* @param {string} pEntityName - The entity/table name (e.g. 'Countries')
|
|
2432
|
+
* @param {Array} pColumns - Column definitions from _parseMicroDDL()
|
|
2433
|
+
* @returns {object} Complete Meadow package object
|
|
2434
|
+
*/
|
|
2435
|
+
_buildMeadowPackageObject(pEntityName, pColumns)
|
|
2436
|
+
{
|
|
2437
|
+
// Ensure tracking columns are present
|
|
2438
|
+
this._ensureTrackingColumns(pColumns);
|
|
2439
|
+
|
|
2440
|
+
let tmpPrimaryKey = `ID${pEntityName}`;
|
|
2441
|
+
|
|
2442
|
+
let tmpPackage =
|
|
2443
|
+
{
|
|
2444
|
+
Scope: pEntityName,
|
|
2445
|
+
DefaultIdentifier: tmpPrimaryKey,
|
|
2446
|
+
Schema: [],
|
|
2447
|
+
DefaultObject: {},
|
|
2448
|
+
JsonSchema:
|
|
2449
|
+
{
|
|
2450
|
+
title: pEntityName,
|
|
2451
|
+
type: 'object',
|
|
2452
|
+
properties: {},
|
|
2453
|
+
required: []
|
|
2454
|
+
}
|
|
2455
|
+
};
|
|
2456
|
+
|
|
2457
|
+
for (let i = 0; i < pColumns.length; i++)
|
|
2458
|
+
{
|
|
2459
|
+
let tmpCol = pColumns[i];
|
|
2460
|
+
let tmpColumnName = tmpCol.Column;
|
|
2461
|
+
let tmpColumnSize = tmpCol.Size || 'Default';
|
|
2462
|
+
let tmpSchemaEntry = { Column: tmpColumnName, Size: tmpColumnSize };
|
|
2463
|
+
|
|
2464
|
+
let tmpDataType = tmpCol.DataType || '';
|
|
2465
|
+
|
|
2466
|
+
switch (tmpDataType)
|
|
2467
|
+
{
|
|
2468
|
+
case 'ID':
|
|
2469
|
+
tmpSchemaEntry.Type = 'AutoIdentity';
|
|
2470
|
+
tmpPackage.DefaultObject[tmpColumnName] = 0;
|
|
2471
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'integer', size: tmpColumnSize };
|
|
2472
|
+
tmpPackage.JsonSchema.required.push(tmpColumnName);
|
|
2473
|
+
break;
|
|
2474
|
+
case 'GUID':
|
|
2475
|
+
tmpSchemaEntry.Type = 'AutoGUID';
|
|
2476
|
+
tmpPackage.DefaultObject[tmpColumnName] = '0x0000000000000000';
|
|
2477
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'string', size: tmpColumnSize };
|
|
2478
|
+
break;
|
|
2479
|
+
case 'Numeric':
|
|
2480
|
+
tmpSchemaEntry.Type = 'Integer';
|
|
2481
|
+
tmpPackage.DefaultObject[tmpColumnName] = 0;
|
|
2482
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'integer', size: tmpColumnSize };
|
|
2483
|
+
break;
|
|
2484
|
+
case 'Decimal':
|
|
2485
|
+
tmpSchemaEntry.Type = 'Decimal';
|
|
2486
|
+
tmpPackage.DefaultObject[tmpColumnName] = 0.0;
|
|
2487
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'number', size: tmpColumnSize };
|
|
2488
|
+
break;
|
|
2489
|
+
case 'String':
|
|
2490
|
+
case 'Text':
|
|
2491
|
+
tmpSchemaEntry.Type = 'String';
|
|
2492
|
+
tmpPackage.DefaultObject[tmpColumnName] = '';
|
|
2493
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'string', size: tmpColumnSize };
|
|
2494
|
+
break;
|
|
2495
|
+
case 'DateTime':
|
|
2496
|
+
tmpSchemaEntry.Type = 'DateTime';
|
|
2497
|
+
tmpPackage.DefaultObject[tmpColumnName] = null;
|
|
2498
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'string', size: tmpColumnSize };
|
|
2499
|
+
break;
|
|
2500
|
+
case 'Boolean':
|
|
2501
|
+
tmpSchemaEntry.Type = 'Boolean';
|
|
2502
|
+
tmpPackage.DefaultObject[tmpColumnName] = false;
|
|
2503
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'boolean', size: tmpColumnSize };
|
|
2504
|
+
break;
|
|
2505
|
+
default:
|
|
2506
|
+
tmpSchemaEntry.Type = tmpCol.MeadowType || 'Default';
|
|
2507
|
+
tmpPackage.DefaultObject[tmpColumnName] = '';
|
|
2508
|
+
tmpPackage.JsonSchema.properties[tmpColumnName] = { type: 'string', size: tmpColumnSize };
|
|
2509
|
+
break;
|
|
2510
|
+
}
|
|
2511
|
+
|
|
2512
|
+
// Mark magic change-tracking columns by name
|
|
2513
|
+
switch (tmpColumnName)
|
|
2514
|
+
{
|
|
2515
|
+
case 'CreateDate':
|
|
2516
|
+
tmpSchemaEntry.Type = 'CreateDate';
|
|
2517
|
+
break;
|
|
2518
|
+
case 'CreatingIDUser':
|
|
2519
|
+
tmpSchemaEntry.Type = 'CreateIDUser';
|
|
2520
|
+
break;
|
|
2521
|
+
case 'UpdateDate':
|
|
2522
|
+
tmpSchemaEntry.Type = 'UpdateDate';
|
|
2523
|
+
break;
|
|
2524
|
+
case 'UpdatingIDUser':
|
|
2525
|
+
tmpSchemaEntry.Type = 'UpdateIDUser';
|
|
2526
|
+
break;
|
|
2527
|
+
case 'Deleted':
|
|
2528
|
+
tmpSchemaEntry.Type = 'Deleted';
|
|
2529
|
+
break;
|
|
2530
|
+
case 'DeleteDate':
|
|
2531
|
+
tmpSchemaEntry.Type = 'DeleteDate';
|
|
2532
|
+
break;
|
|
2533
|
+
case 'DeletingIDUser':
|
|
2534
|
+
tmpSchemaEntry.Type = 'DeleteIDUser';
|
|
2535
|
+
break;
|
|
2536
|
+
}
|
|
2537
|
+
|
|
2538
|
+
tmpPackage.Schema.push(tmpSchemaEntry);
|
|
2539
|
+
}
|
|
2540
|
+
|
|
2541
|
+
return tmpPackage;
|
|
2542
|
+
}
|
|
2543
|
+
|
|
2544
|
+
/**
|
|
2545
|
+
* Build a Meadow-compatible schema array from parsed MicroDDL columns.
|
|
2546
|
+
*
|
|
2547
|
+
* FoxHound uses this schema to determine how to handle each column in
|
|
2548
|
+
* generated INSERT queries (e.g. AutoIdentity → NULL, CreateDate → NOW()).
|
|
2549
|
+
*
|
|
2550
|
+
* @param {Array} pColumns - Column definitions from _parseMicroDDL()
|
|
2551
|
+
* @returns {Array} Meadow schema array for FoxHound
|
|
2552
|
+
*/
|
|
2553
|
+
_buildMeadowSchemaFromColumns(pColumns)
|
|
2554
|
+
{
|
|
2555
|
+
let tmpSchema = [];
|
|
2556
|
+
for (let i = 0; i < pColumns.length; i++)
|
|
2557
|
+
{
|
|
2558
|
+
let tmpCol = pColumns[i];
|
|
2559
|
+
let tmpType = 'Default';
|
|
2560
|
+
|
|
2561
|
+
// Map MicroDDL types to FoxHound schema types
|
|
2562
|
+
if (tmpCol.MeadowType)
|
|
2563
|
+
{
|
|
2564
|
+
tmpType = tmpCol.MeadowType;
|
|
2565
|
+
}
|
|
2566
|
+
else if (tmpCol.DataType === 'ID')
|
|
2567
|
+
{
|
|
2568
|
+
tmpType = 'AutoIdentity';
|
|
2569
|
+
}
|
|
2570
|
+
else if (tmpCol.DataType === 'GUID')
|
|
2571
|
+
{
|
|
2572
|
+
tmpType = 'AutoGUID';
|
|
2573
|
+
}
|
|
2574
|
+
|
|
2575
|
+
tmpSchema.push(
|
|
2576
|
+
{
|
|
2577
|
+
Column: tmpCol.Column,
|
|
2578
|
+
Type: tmpType,
|
|
2579
|
+
Size: tmpCol.Size || 'Default'
|
|
2580
|
+
});
|
|
2581
|
+
}
|
|
2582
|
+
return tmpSchema;
|
|
2583
|
+
}
|
|
2584
|
+
|
|
2585
|
+
/**
|
|
2586
|
+
* Coerce parameter values for SQLite compatibility.
|
|
2587
|
+
*
|
|
2588
|
+
* better-sqlite3 does not accept JavaScript booleans; they must be
|
|
2589
|
+
* converted to 0/1. Single-element arrays are unwrapped to scalars.
|
|
2590
|
+
* This mirrors the coercion in Meadow-Provider-SQLite.
|
|
2591
|
+
*
|
|
2592
|
+
* @param {object} pParameters - FoxHound query parameters (mutated in place)
|
|
2593
|
+
*/
|
|
2594
|
+
_coerceSQLiteParameters(pParameters)
|
|
2595
|
+
{
|
|
2596
|
+
let tmpKeys = Object.keys(pParameters);
|
|
2597
|
+
for (let i = 0; i < tmpKeys.length; i++)
|
|
2598
|
+
{
|
|
2599
|
+
let tmpKey = tmpKeys[i];
|
|
2600
|
+
let tmpValue = pParameters[tmpKey];
|
|
2601
|
+
|
|
2602
|
+
if (typeof(tmpValue) === 'boolean')
|
|
2603
|
+
{
|
|
2604
|
+
pParameters[tmpKey] = tmpValue ? 1 : 0;
|
|
2605
|
+
}
|
|
2606
|
+
else if (Array.isArray(tmpValue) && tmpValue.length === 1)
|
|
2607
|
+
{
|
|
2608
|
+
pParameters[tmpKey] = tmpValue[0];
|
|
2609
|
+
}
|
|
2610
|
+
}
|
|
2611
|
+
}
|
|
2612
|
+
|
|
2613
|
+
/**
|
|
2614
|
+
* Deploy a projection schema to a target store.
|
|
2615
|
+
*
|
|
2616
|
+
* Loads the Dataset and StoreConnection, parses the MicroDDL schema,
|
|
2617
|
+
* connects to the target database, creates the table, saves a
|
|
2618
|
+
* ProjectionStore record, and registers a dynamic Meadow entity.
|
|
2619
|
+
*
|
|
2620
|
+
* @param {number} pIDDataset - The dataset containing the SchemaDefinition.
|
|
2621
|
+
* @param {number} pIDStoreConnection - The store connection to deploy to.
|
|
2622
|
+
* @param {string} [pTargetTableName] - Override table name (derived from dataset name if empty).
|
|
2623
|
+
* @param {function} fCallback - function(pError, pResult) where pResult has Success, TargetTableName, Log, ProjectionStore.
|
|
2624
|
+
*/
|
|
2625
|
+
deploySchema(pIDDataset, pIDStoreConnection, pTargetTableName, fCallback)
|
|
2626
|
+
{
|
|
2627
|
+
if (!this.fable.DAL || !this.fable.DAL.Dataset ||
|
|
2628
|
+
!this.fable.DAL.StoreConnection || !this.fable.DAL.ProjectionStore)
|
|
2629
|
+
{
|
|
2630
|
+
return fCallback(new Error('DAL not initialized'));
|
|
2631
|
+
}
|
|
2632
|
+
|
|
2633
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
2634
|
+
let tmpDataset = null;
|
|
2635
|
+
let tmpConnection = null;
|
|
2636
|
+
|
|
2637
|
+
// Load dataset
|
|
2638
|
+
tmpAnticipate.anticipate(
|
|
2639
|
+
(fStepCallback) =>
|
|
2640
|
+
{
|
|
2641
|
+
let tmpQuery = this.fable.DAL.Dataset.query.clone()
|
|
2642
|
+
.addFilter('IDDataset', pIDDataset);
|
|
2643
|
+
|
|
2644
|
+
this.fable.DAL.Dataset.doRead(tmpQuery,
|
|
2645
|
+
(pError, pQuery, pRecord) =>
|
|
2646
|
+
{
|
|
2647
|
+
if (pError || !pRecord || !pRecord.IDDataset)
|
|
2648
|
+
{
|
|
2649
|
+
return fStepCallback(new Error('Dataset not found'));
|
|
2650
|
+
}
|
|
2651
|
+
tmpDataset = pRecord;
|
|
2652
|
+
return fStepCallback();
|
|
2653
|
+
});
|
|
2654
|
+
});
|
|
2655
|
+
|
|
2656
|
+
// Load store connection
|
|
2657
|
+
tmpAnticipate.anticipate(
|
|
2658
|
+
(fStepCallback) =>
|
|
2659
|
+
{
|
|
2660
|
+
let tmpQuery = this.fable.DAL.StoreConnection.query.clone()
|
|
2661
|
+
.addFilter('IDStoreConnection', pIDStoreConnection);
|
|
2662
|
+
|
|
2663
|
+
this.fable.DAL.StoreConnection.doRead(tmpQuery,
|
|
2664
|
+
(pError, pQuery, pRecord) =>
|
|
2665
|
+
{
|
|
2666
|
+
if (pError || !pRecord || !pRecord.IDStoreConnection)
|
|
2667
|
+
{
|
|
2668
|
+
return fStepCallback(new Error('StoreConnection not found'));
|
|
2669
|
+
}
|
|
2670
|
+
tmpConnection = pRecord;
|
|
2671
|
+
return fStepCallback();
|
|
2672
|
+
});
|
|
2673
|
+
});
|
|
2674
|
+
|
|
2675
|
+
tmpAnticipate.wait(
|
|
2676
|
+
(pError) =>
|
|
2677
|
+
{
|
|
2678
|
+
if (pError)
|
|
2679
|
+
{
|
|
2680
|
+
return fCallback(pError);
|
|
2681
|
+
}
|
|
2682
|
+
|
|
2683
|
+
if (!tmpDataset.SchemaDefinition)
|
|
2684
|
+
{
|
|
2685
|
+
return fCallback(new Error('Dataset has no schema definition'));
|
|
2686
|
+
}
|
|
2687
|
+
|
|
2688
|
+
let tmpTableName = pTargetTableName || tmpDataset.Name.replace(/[^a-zA-Z0-9_]/g, '_');
|
|
2689
|
+
|
|
2690
|
+
let tmpParsedSchema;
|
|
2691
|
+
try
|
|
2692
|
+
{
|
|
2693
|
+
tmpParsedSchema = this._parseMicroDDL(tmpDataset.SchemaDefinition);
|
|
2694
|
+
}
|
|
2695
|
+
catch (pParseError)
|
|
2696
|
+
{
|
|
2697
|
+
return fCallback(new Error(`Schema parse error: ${pParseError.message}`));
|
|
2698
|
+
}
|
|
2699
|
+
|
|
2700
|
+
if (!tmpParsedSchema || !tmpParsedSchema.Tables || Object.keys(tmpParsedSchema.Tables).length === 0)
|
|
2701
|
+
{
|
|
2702
|
+
return fCallback(new Error('Schema contains no tables'));
|
|
2703
|
+
}
|
|
2704
|
+
|
|
2705
|
+
let tmpLog = [];
|
|
2706
|
+
tmpLog.push(`[${new Date().toISOString()}] Starting deployment to ${tmpConnection.Type} connection "${tmpConnection.Name}"`);
|
|
2707
|
+
tmpLog.push(`[${new Date().toISOString()}] Target table: ${tmpTableName}`);
|
|
2708
|
+
|
|
2709
|
+
this._getOrCreateConnection(tmpConnection,
|
|
2710
|
+
(pConnectError, tmpConnector) =>
|
|
2711
|
+
{
|
|
2712
|
+
if (pConnectError)
|
|
2713
|
+
{
|
|
2714
|
+
tmpLog.push(`[${new Date().toISOString()}] Connection failed: ${pConnectError.message || pConnectError}`);
|
|
2715
|
+
this._saveProjectionStore(pIDDataset, pIDStoreConnection, tmpTableName, 'Failed', tmpLog.join('\n'),
|
|
2716
|
+
() =>
|
|
2717
|
+
{
|
|
2718
|
+
return fCallback(new Error(`Connection failed: ${pConnectError.message}`), { Log: tmpLog.join('\n') });
|
|
2719
|
+
});
|
|
2720
|
+
return;
|
|
2721
|
+
}
|
|
2722
|
+
|
|
2723
|
+
tmpLog.push(`[${new Date().toISOString()}] Connected successfully`);
|
|
2724
|
+
|
|
2725
|
+
let tmpFirstTableKey = Object.keys(tmpParsedSchema.Tables)[0];
|
|
2726
|
+
let tmpTableSchema = tmpParsedSchema.Tables[tmpFirstTableKey];
|
|
2727
|
+
tmpTableSchema.TableName = tmpTableName;
|
|
2728
|
+
|
|
2729
|
+
// Add standard Meadow tracking columns if not present
|
|
2730
|
+
this._ensureTrackingColumns(tmpTableSchema.Columns);
|
|
2731
|
+
|
|
2732
|
+
tmpLog.push(`[${new Date().toISOString()}] Creating table with ${tmpTableSchema.Columns.length} columns...`);
|
|
2733
|
+
|
|
2734
|
+
tmpConnector.createTable(tmpTableSchema,
|
|
2735
|
+
(pCreateError) =>
|
|
2736
|
+
{
|
|
2737
|
+
if (pCreateError)
|
|
2738
|
+
{
|
|
2739
|
+
tmpLog.push(`[${new Date().toISOString()}] Table creation failed: ${pCreateError.message || pCreateError}`);
|
|
2740
|
+
this._saveProjectionStore(pIDDataset, pIDStoreConnection, tmpTableName, 'Failed', tmpLog.join('\n'),
|
|
2741
|
+
() =>
|
|
2742
|
+
{
|
|
2743
|
+
return fCallback(new Error(`Table creation failed: ${pCreateError.message}`), { Log: tmpLog.join('\n') });
|
|
2744
|
+
});
|
|
2745
|
+
return;
|
|
2746
|
+
}
|
|
2747
|
+
|
|
2748
|
+
tmpLog.push(`[${new Date().toISOString()}] Table "${tmpTableName}" created successfully`);
|
|
2749
|
+
|
|
2750
|
+
this._saveProjectionStore(pIDDataset, pIDStoreConnection, tmpTableName, 'Deployed', tmpLog.join('\n'),
|
|
2751
|
+
(pSaveError, pProjectionStore) =>
|
|
2752
|
+
{
|
|
2753
|
+
if (pProjectionStore && pProjectionStore.IDProjectionStore)
|
|
2754
|
+
{
|
|
2755
|
+
this._registerProjectionEntity(pProjectionStore, tmpConnection, tmpParsedSchema, tmpConnector,
|
|
2756
|
+
(pRegError) =>
|
|
2757
|
+
{
|
|
2758
|
+
if (pRegError)
|
|
2759
|
+
{
|
|
2760
|
+
tmpLog.push(`[${new Date().toISOString()}] Entity registration warning: ${pRegError.message}`);
|
|
2761
|
+
}
|
|
2762
|
+
else
|
|
2763
|
+
{
|
|
2764
|
+
tmpLog.push(`[${new Date().toISOString()}] Meadow entity [${tmpTableName}] registered for REST upserts`);
|
|
2765
|
+
}
|
|
2766
|
+
return fCallback(null,
|
|
2767
|
+
{
|
|
2768
|
+
Success: true,
|
|
2769
|
+
TargetTableName: tmpTableName,
|
|
2770
|
+
ConnectionType: tmpConnection.Type,
|
|
2771
|
+
ConnectionName: tmpConnection.Name,
|
|
2772
|
+
Log: tmpLog.join('\n'),
|
|
2773
|
+
ProjectionStore: pProjectionStore
|
|
2774
|
+
});
|
|
2775
|
+
});
|
|
2776
|
+
}
|
|
2777
|
+
else
|
|
2778
|
+
{
|
|
2779
|
+
return fCallback(null,
|
|
2780
|
+
{
|
|
2781
|
+
Success: true,
|
|
2782
|
+
TargetTableName: tmpTableName,
|
|
2783
|
+
ConnectionType: tmpConnection.Type,
|
|
2784
|
+
ConnectionName: tmpConnection.Name,
|
|
2785
|
+
Log: tmpLog.join('\n'),
|
|
2786
|
+
ProjectionStore: pProjectionStore
|
|
2787
|
+
});
|
|
2788
|
+
}
|
|
2789
|
+
});
|
|
2790
|
+
});
|
|
2791
|
+
});
|
|
2792
|
+
|
|
2793
|
+
});
|
|
2794
|
+
}
|
|
2795
|
+
|
|
2796
|
+
_parseMicroDDL(pDDL)
|
|
2797
|
+
{
|
|
2798
|
+
let tmpLines = pDDL.split('\n');
|
|
2799
|
+
let tmpTables = {};
|
|
2800
|
+
let tmpCurrentTable = null;
|
|
2801
|
+
|
|
2802
|
+
let tmpSymbolMap =
|
|
2803
|
+
{
|
|
2804
|
+
'@': { DataType: 'ID', MeadowType: 'AutoIdentity' },
|
|
2805
|
+
'%': { DataType: 'GUID', MeadowType: 'AutoGUID' },
|
|
2806
|
+
'$': { DataType: 'String', MeadowType: 'String' },
|
|
2807
|
+
'*': { DataType: 'Text', MeadowType: 'String' },
|
|
2808
|
+
'#': { DataType: 'Numeric', MeadowType: 'Numeric' },
|
|
2809
|
+
'.': { DataType: 'Decimal', MeadowType: 'Numeric' },
|
|
2810
|
+
'&': { DataType: 'DateTime', MeadowType: 'String' },
|
|
2811
|
+
'^': { DataType: 'Boolean', MeadowType: 'Deleted' }
|
|
2812
|
+
};
|
|
2813
|
+
|
|
2814
|
+
for (let i = 0; i < tmpLines.length; i++)
|
|
2815
|
+
{
|
|
2816
|
+
let tmpLine = tmpLines[i].trim();
|
|
2817
|
+
|
|
2818
|
+
// Skip blank lines and comments
|
|
2819
|
+
if (!tmpLine || tmpLine.startsWith('//') || tmpLine.startsWith('--'))
|
|
2820
|
+
{
|
|
2821
|
+
continue;
|
|
2822
|
+
}
|
|
2823
|
+
|
|
2824
|
+
// Skip join references
|
|
2825
|
+
if (tmpLine.startsWith('->'))
|
|
2826
|
+
{
|
|
2827
|
+
continue;
|
|
2828
|
+
}
|
|
2829
|
+
|
|
2830
|
+
// Table declaration
|
|
2831
|
+
if (tmpLine.startsWith('!'))
|
|
2832
|
+
{
|
|
2833
|
+
let tmpTableName = tmpLine.substring(1).trim();
|
|
2834
|
+
tmpCurrentTable =
|
|
2835
|
+
{
|
|
2836
|
+
TableName: tmpTableName,
|
|
2837
|
+
Columns: []
|
|
2838
|
+
};
|
|
2839
|
+
tmpTables[tmpTableName] = tmpCurrentTable;
|
|
2840
|
+
continue;
|
|
2841
|
+
}
|
|
2842
|
+
|
|
2843
|
+
// Column definition
|
|
2844
|
+
let tmpSymbol = tmpLine.charAt(0);
|
|
2845
|
+
if (tmpSymbolMap.hasOwnProperty(tmpSymbol) && tmpCurrentTable)
|
|
2846
|
+
{
|
|
2847
|
+
let tmpRest = tmpLine.substring(1).trim();
|
|
2848
|
+
let tmpParts = tmpRest.split(/\s+/);
|
|
2849
|
+
let tmpColumnName = tmpParts[0] || '';
|
|
2850
|
+
let tmpSize = tmpParts[1] || 'Default';
|
|
2851
|
+
|
|
2852
|
+
tmpCurrentTable.Columns.push(
|
|
2853
|
+
{
|
|
2854
|
+
Column: tmpColumnName,
|
|
2855
|
+
DataType: tmpSymbolMap[tmpSymbol].DataType,
|
|
2856
|
+
Size: tmpSize
|
|
2857
|
+
});
|
|
2858
|
+
}
|
|
2859
|
+
}
|
|
2860
|
+
|
|
2861
|
+
return { Tables: tmpTables };
|
|
2862
|
+
}
|
|
2863
|
+
|
|
2864
|
+
/**
|
|
2865
|
+
* Save or update a ProjectionStore record.
|
|
2866
|
+
*
|
|
2867
|
+
* @param {number} pIDDataset - The dataset ID
|
|
2868
|
+
* @param {number} pIDStoreConnection - The store connection ID
|
|
2869
|
+
* @param {string} pTargetTableName - Target table name
|
|
2870
|
+
* @param {string} pStatus - Status string (Pending, Deployed, Failed)
|
|
2871
|
+
* @param {string} pLog - Deployment log text
|
|
2872
|
+
* @param {function} fCallback - Callback(pError, pRecord)
|
|
2873
|
+
*/
|
|
2874
|
+
_saveProjectionStore(pIDDataset, pIDStoreConnection, pTargetTableName, pStatus, pLog, fCallback)
|
|
2875
|
+
{
|
|
2876
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionStore)
|
|
2877
|
+
{
|
|
2878
|
+
return fCallback(new Error('ProjectionStore DAL not initialized'));
|
|
2879
|
+
}
|
|
2880
|
+
|
|
2881
|
+
// Check if a ProjectionStore record already exists for this dataset + connection
|
|
2882
|
+
let tmpQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
2883
|
+
.addFilter('IDDataset', pIDDataset)
|
|
2884
|
+
.addFilter('IDStoreConnection', pIDStoreConnection)
|
|
2885
|
+
.addFilter('Deleted', 0);
|
|
2886
|
+
|
|
2887
|
+
this.fable.DAL.ProjectionStore.doReads(tmpQuery,
|
|
2888
|
+
(pError, pQuery, pRecords) =>
|
|
2889
|
+
{
|
|
2890
|
+
if (!pError && pRecords && pRecords.length > 0)
|
|
2891
|
+
{
|
|
2892
|
+
// Update existing record
|
|
2893
|
+
let tmpExisting = pRecords[0];
|
|
2894
|
+
tmpExisting.TargetTableName = pTargetTableName;
|
|
2895
|
+
tmpExisting.Status = pStatus;
|
|
2896
|
+
tmpExisting.DeployLog = pLog;
|
|
2897
|
+
if (pStatus === 'Deployed')
|
|
2898
|
+
{
|
|
2899
|
+
tmpExisting.DeployedAt = new Date().toISOString();
|
|
2900
|
+
}
|
|
2901
|
+
|
|
2902
|
+
let tmpUpdateQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
2903
|
+
.addRecord(tmpExisting);
|
|
2904
|
+
|
|
2905
|
+
this.fable.DAL.ProjectionStore.doUpdate(tmpUpdateQuery,
|
|
2906
|
+
(pUpdateError, pUpdateQuery, pUpdateReadQuery, pUpdatedRecord) =>
|
|
2907
|
+
{
|
|
2908
|
+
return fCallback(pUpdateError, pUpdatedRecord);
|
|
2909
|
+
});
|
|
2910
|
+
}
|
|
2911
|
+
else
|
|
2912
|
+
{
|
|
2913
|
+
// Create new record
|
|
2914
|
+
let tmpNewRecord =
|
|
2915
|
+
{
|
|
2916
|
+
IDDataset: pIDDataset,
|
|
2917
|
+
IDStoreConnection: pIDStoreConnection,
|
|
2918
|
+
TargetTableName: pTargetTableName,
|
|
2919
|
+
Status: pStatus,
|
|
2920
|
+
DeployLog: pLog
|
|
2921
|
+
};
|
|
2922
|
+
|
|
2923
|
+
if (pStatus === 'Deployed')
|
|
2924
|
+
{
|
|
2925
|
+
tmpNewRecord.DeployedAt = new Date().toISOString();
|
|
2926
|
+
}
|
|
2927
|
+
|
|
2928
|
+
let tmpCreateQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
2929
|
+
.setIDUser(0)
|
|
2930
|
+
.addRecord(tmpNewRecord);
|
|
2931
|
+
|
|
2932
|
+
this.fable.DAL.ProjectionStore.doCreate(tmpCreateQuery,
|
|
2933
|
+
(pCreateError, pCreateQuery, pCreateReadQuery, pCreatedRecord) =>
|
|
2934
|
+
{
|
|
2935
|
+
return fCallback(pCreateError, pCreatedRecord);
|
|
2936
|
+
});
|
|
2937
|
+
}
|
|
2938
|
+
});
|
|
2939
|
+
}
|
|
2940
|
+
|
|
2941
|
+
// ======================================================================
|
|
2942
|
+
// Dynamic Meadow Entity Registration for Projection Tables
|
|
2943
|
+
// ======================================================================
|
|
2944
|
+
|
|
2945
|
+
/**
|
|
2946
|
+
* Get or create a named connection for a StoreConnection record via the connection manager.
|
|
2947
|
+
*
|
|
2948
|
+
* @param {object} pStoreConnection - The StoreConnection record (must have IDStoreConnection, Config, Type)
|
|
2949
|
+
* @param {function} fCallback - Callback(pError, pConnectorInstance)
|
|
2950
|
+
*/
|
|
2951
|
+
_getOrCreateConnection(pStoreConnection, fCallback)
|
|
2952
|
+
{
|
|
2953
|
+
let tmpConnectionName = `store-${pStoreConnection.IDStoreConnection}`;
|
|
2954
|
+
|
|
2955
|
+
let tmpExisting = this.fable.MeadowConnectionManager.getConnection(tmpConnectionName);
|
|
2956
|
+
if (tmpExisting && tmpExisting.instance)
|
|
2957
|
+
{
|
|
2958
|
+
return fCallback(null, tmpExisting.instance);
|
|
2959
|
+
}
|
|
2960
|
+
|
|
2961
|
+
let tmpConfig = {};
|
|
2962
|
+
try { tmpConfig = JSON.parse(pStoreConnection.Config || '{}'); }
|
|
2963
|
+
catch (e) { /* ignore */ }
|
|
2964
|
+
|
|
2965
|
+
let tmpConnConfig = Object.assign({}, tmpConfig, { Type: pStoreConnection.Type });
|
|
2966
|
+
|
|
2967
|
+
this.fable.MeadowConnectionManager.connect(tmpConnectionName, tmpConnConfig,
|
|
2968
|
+
(pError, pConnection) =>
|
|
2969
|
+
{
|
|
2970
|
+
if (pError)
|
|
2971
|
+
{
|
|
2972
|
+
return fCallback(new Error(`Connection failed for "${pStoreConnection.Name}": ${pError.message}`));
|
|
2973
|
+
}
|
|
2974
|
+
return fCallback(null, pConnection.instance);
|
|
2975
|
+
});
|
|
2976
|
+
}
|
|
2977
|
+
|
|
2978
|
+
/**
|
|
2979
|
+
* Map connection type to the Fable singleton property name used by Meadow providers.
|
|
2980
|
+
*/
|
|
2981
|
+
_getProviderPropertyName(pConnectionType)
|
|
2982
|
+
{
|
|
2983
|
+
let tmpMap =
|
|
2984
|
+
{
|
|
2985
|
+
'SQLite': 'MeadowSQLiteProvider',
|
|
2986
|
+
'MySQL': 'MeadowMySQLProvider',
|
|
2987
|
+
'MSSQL': 'MeadowMSSQLProvider',
|
|
2988
|
+
'PostgreSQL': 'MeadowPostgreSQLProvider'
|
|
2989
|
+
};
|
|
2990
|
+
return tmpMap[pConnectionType] || false;
|
|
2991
|
+
}
|
|
2992
|
+
|
|
2993
|
+
/**
|
|
2994
|
+
* Dynamically register a Meadow entity + endpoints for a projection table.
|
|
2995
|
+
*
|
|
2996
|
+
* Creates a proxy Fable that routes DB queries to the external connection,
|
|
2997
|
+
* then creates DAL + MeadowEndpoints so the IntegrationAdapter can upsert
|
|
2998
|
+
* records through our own REST API.
|
|
2999
|
+
*
|
|
3000
|
+
* @param {object} pProjectionStore - The ProjectionStore record
|
|
3001
|
+
* @param {object} pConnection - The StoreConnection record
|
|
3002
|
+
* @param {object} pParsedSchema - Output of _parseMicroDDL()
|
|
3003
|
+
* @param {object} pConnector - An already-connected meadow-connection instance (optional)
|
|
3004
|
+
* @param {function} fCallback - Callback(pError)
|
|
3005
|
+
*/
|
|
3006
|
+
_registerProjectionEntity(pProjectionStore, pConnection, pParsedSchema, pConnector, fCallback)
|
|
3007
|
+
{
|
|
3008
|
+
let tmpEntityName = pProjectionStore.TargetTableName;
|
|
3009
|
+
|
|
3010
|
+
// If already registered, skip
|
|
3011
|
+
if (this._ProjectionEntities[tmpEntityName])
|
|
3012
|
+
{
|
|
3013
|
+
this.fable.log.info(`Projection entity [${tmpEntityName}] already registered; skipping.`);
|
|
3014
|
+
return fCallback();
|
|
3015
|
+
}
|
|
3016
|
+
|
|
3017
|
+
// Build a complete Meadow package object from the parsed schema,
|
|
3018
|
+
// following the same pattern Stricture uses to generate Meadow models.
|
|
3019
|
+
let tmpFirstTableKey = Object.keys(pParsedSchema.Tables)[0];
|
|
3020
|
+
let tmpColumns = pParsedSchema.Tables[tmpFirstTableKey].Columns;
|
|
3021
|
+
let tmpPackageObject = this._buildMeadowPackageObject(tmpEntityName, tmpColumns);
|
|
3022
|
+
|
|
3023
|
+
// Parse connection configuration for the DB file path
|
|
3024
|
+
let tmpConnectionConfig = {};
|
|
3025
|
+
try { tmpConnectionConfig = JSON.parse(pConnection.Config || '{}'); }
|
|
3026
|
+
catch (e) { /* ignore */ }
|
|
3027
|
+
|
|
3028
|
+
let tmpProviderProperty = this._getProviderPropertyName(pConnection.Type);
|
|
3029
|
+
if (!tmpProviderProperty)
|
|
3030
|
+
{
|
|
3031
|
+
return fCallback(new Error(`Unsupported connection type for entity registration: ${pConnection.Type}`));
|
|
3032
|
+
}
|
|
3033
|
+
|
|
3034
|
+
// Create an isolated Fable instance for this projection entity.
|
|
3035
|
+
// Each projection gets its own Fable+Meadow+Provider stack so the
|
|
3036
|
+
// DB provider points at the correct external database file.
|
|
3037
|
+
let tmpFable = new libFable(
|
|
3038
|
+
{
|
|
3039
|
+
LogLevel: this.fable.settings.LogLevel,
|
|
3040
|
+
Product: `Projection-${tmpEntityName}`,
|
|
3041
|
+
SQLite: { SQLiteFilePath: tmpConnectionConfig.SQLiteFilePath }
|
|
3042
|
+
});
|
|
3043
|
+
|
|
3044
|
+
let tmpFinishRegistration = (pConnectorInstance) =>
|
|
3045
|
+
{
|
|
3046
|
+
// Register the connector as this Fable's provider
|
|
3047
|
+
tmpFable[tmpProviderProperty] = pConnectorInstance;
|
|
3048
|
+
|
|
3049
|
+
// Create Meadow DAL on the isolated Fable
|
|
3050
|
+
let tmpMeadow = libMeadow.new(tmpFable);
|
|
3051
|
+
let tmpDAL = tmpMeadow.loadFromPackageObject(tmpPackageObject);
|
|
3052
|
+
tmpDAL.setProvider(pConnection.Type);
|
|
3053
|
+
|
|
3054
|
+
// Create MeadowEndpoints and wire to the SHARED OratorServiceServer
|
|
3055
|
+
let tmpEndpoints = libMeadowEndpoints.new(tmpDAL);
|
|
3056
|
+
tmpEndpoints.connectRoutes(this.fable.OratorServiceServer);
|
|
3057
|
+
|
|
3058
|
+
// Store the registration
|
|
3059
|
+
this._ProjectionEntities[tmpEntityName] =
|
|
3060
|
+
{
|
|
3061
|
+
Fable: tmpFable,
|
|
3062
|
+
DAL: tmpDAL,
|
|
3063
|
+
Endpoints: tmpEndpoints,
|
|
3064
|
+
Connector: pConnectorInstance,
|
|
3065
|
+
IDProjectionStore: pProjectionStore.IDProjectionStore
|
|
3066
|
+
};
|
|
3067
|
+
|
|
3068
|
+
// Also publish to main fable DAL/MeadowEndpoints maps
|
|
3069
|
+
if (this.fable.DAL)
|
|
3070
|
+
{
|
|
3071
|
+
this.fable.DAL[tmpEntityName] = tmpDAL;
|
|
3072
|
+
}
|
|
3073
|
+
if (this.fable.MeadowEndpoints)
|
|
3074
|
+
{
|
|
3075
|
+
this.fable.MeadowEndpoints[tmpEntityName] = tmpEndpoints;
|
|
3076
|
+
}
|
|
3077
|
+
|
|
3078
|
+
this.fable.log.info(`Projection entity [${tmpEntityName}] registered (${pConnection.Type}, isolated Fable -> ${tmpConnectionConfig.SQLiteFilePath || 'default'}).`);
|
|
3079
|
+
return fCallback();
|
|
3080
|
+
};
|
|
3081
|
+
|
|
3082
|
+
if (pConnector)
|
|
3083
|
+
{
|
|
3084
|
+
// Reuse an already-connected connector
|
|
3085
|
+
return tmpFinishRegistration(pConnector);
|
|
3086
|
+
}
|
|
3087
|
+
|
|
3088
|
+
// Get or create a connection via the connection manager
|
|
3089
|
+
this._getOrCreateConnection(pConnection,
|
|
3090
|
+
(pConnectError, tmpConnectorInstance) =>
|
|
3091
|
+
{
|
|
3092
|
+
if (pConnectError)
|
|
3093
|
+
{
|
|
3094
|
+
return fCallback(new Error(`Connection failed for entity [${tmpEntityName}]: ${pConnectError.message}`));
|
|
3095
|
+
}
|
|
3096
|
+
return tmpFinishRegistration(tmpConnectorInstance);
|
|
3097
|
+
});
|
|
3098
|
+
}
|
|
3099
|
+
|
|
3100
|
+
/**
|
|
3101
|
+
* Re-register Meadow entities for all deployed ProjectionStores on startup.
|
|
3102
|
+
*
|
|
3103
|
+
* This ensures projection entities survive server restarts.
|
|
3104
|
+
*
|
|
3105
|
+
* @param {function} fCallback - Callback(pError)
|
|
3106
|
+
*/
|
|
3107
|
+
_warmUpProjectionEntities(fCallback)
|
|
3108
|
+
{
|
|
3109
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionStore)
|
|
3110
|
+
{
|
|
3111
|
+
this.fable.log.warn('ProjectionStore DAL not available; skipping projection entity warm-up.');
|
|
3112
|
+
return fCallback();
|
|
3113
|
+
}
|
|
3114
|
+
|
|
3115
|
+
let tmpQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
3116
|
+
.addFilter('Status', 'Deployed')
|
|
3117
|
+
.addFilter('Deleted', 0);
|
|
3118
|
+
|
|
3119
|
+
this.fable.DAL.ProjectionStore.doReads(tmpQuery,
|
|
3120
|
+
(pError, pQuery, pRecords) =>
|
|
3121
|
+
{
|
|
3122
|
+
if (pError || !pRecords || pRecords.length === 0)
|
|
3123
|
+
{
|
|
3124
|
+
if (pError)
|
|
3125
|
+
{
|
|
3126
|
+
this.fable.log.warn(`Projection entity warm-up query error: ${pError.message}`);
|
|
3127
|
+
}
|
|
3128
|
+
else
|
|
3129
|
+
{
|
|
3130
|
+
this.fable.log.info('No deployed ProjectionStores found; warm-up complete.');
|
|
3131
|
+
}
|
|
3132
|
+
return fCallback();
|
|
3133
|
+
}
|
|
3134
|
+
|
|
3135
|
+
this.fable.log.info(`Warming up ${pRecords.length} deployed projection entit${pRecords.length === 1 ? 'y' : 'ies'}...`);
|
|
3136
|
+
|
|
3137
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
3138
|
+
|
|
3139
|
+
for (let i = 0; i < pRecords.length; i++)
|
|
3140
|
+
{
|
|
3141
|
+
let tmpStore = pRecords[i];
|
|
3142
|
+
|
|
3143
|
+
tmpAnticipate.anticipate(
|
|
3144
|
+
(fStepCallback) =>
|
|
3145
|
+
{
|
|
3146
|
+
// Load the Dataset for the schema
|
|
3147
|
+
let tmpDatasetQuery = this.fable.DAL.Dataset.query.clone()
|
|
3148
|
+
.addFilter('IDDataset', tmpStore.IDDataset);
|
|
3149
|
+
|
|
3150
|
+
this.fable.DAL.Dataset.doRead(tmpDatasetQuery,
|
|
3151
|
+
(pDatasetError, pDatasetQuery, pDataset) =>
|
|
3152
|
+
{
|
|
3153
|
+
if (pDatasetError || !pDataset || !pDataset.IDDataset || !pDataset.SchemaDefinition)
|
|
3154
|
+
{
|
|
3155
|
+
this.fable.log.warn(`Warm-up: skipping store ${tmpStore.IDProjectionStore} — dataset not found or no schema.`);
|
|
3156
|
+
return fStepCallback();
|
|
3157
|
+
}
|
|
3158
|
+
|
|
3159
|
+
// Load the StoreConnection
|
|
3160
|
+
let tmpConnQuery = this.fable.DAL.StoreConnection.query.clone()
|
|
3161
|
+
.addFilter('IDStoreConnection', tmpStore.IDStoreConnection);
|
|
3162
|
+
|
|
3163
|
+
this.fable.DAL.StoreConnection.doRead(tmpConnQuery,
|
|
3164
|
+
(pConnError, pConnQuery, pConnection) =>
|
|
3165
|
+
{
|
|
3166
|
+
if (pConnError || !pConnection || !pConnection.IDStoreConnection)
|
|
3167
|
+
{
|
|
3168
|
+
this.fable.log.warn(`Warm-up: skipping store ${tmpStore.IDProjectionStore} — connection not found.`);
|
|
3169
|
+
return fStepCallback();
|
|
3170
|
+
}
|
|
3171
|
+
|
|
3172
|
+
let tmpParsedSchema;
|
|
3173
|
+
try
|
|
3174
|
+
{
|
|
3175
|
+
tmpParsedSchema = this._parseMicroDDL(pDataset.SchemaDefinition);
|
|
3176
|
+
}
|
|
3177
|
+
catch (pParseError)
|
|
3178
|
+
{
|
|
3179
|
+
this.fable.log.warn(`Warm-up: skipping store ${tmpStore.IDProjectionStore} — schema parse error: ${pParseError.message}`);
|
|
3180
|
+
return fStepCallback();
|
|
3181
|
+
}
|
|
3182
|
+
|
|
3183
|
+
if (!tmpParsedSchema || !tmpParsedSchema.Tables || Object.keys(tmpParsedSchema.Tables).length === 0)
|
|
3184
|
+
{
|
|
3185
|
+
this.fable.log.warn(`Warm-up: skipping store ${tmpStore.IDProjectionStore} — empty schema.`);
|
|
3186
|
+
return fStepCallback();
|
|
3187
|
+
}
|
|
3188
|
+
|
|
3189
|
+
this._registerProjectionEntity(tmpStore, pConnection, tmpParsedSchema, null,
|
|
3190
|
+
(pRegError) =>
|
|
3191
|
+
{
|
|
3192
|
+
if (pRegError)
|
|
3193
|
+
{
|
|
3194
|
+
this.fable.log.warn(`Warm-up: failed to register entity for store ${tmpStore.IDProjectionStore}: ${pRegError.message}`);
|
|
3195
|
+
}
|
|
3196
|
+
return fStepCallback();
|
|
3197
|
+
});
|
|
3198
|
+
});
|
|
3199
|
+
});
|
|
3200
|
+
});
|
|
3201
|
+
}
|
|
3202
|
+
|
|
3203
|
+
tmpAnticipate.wait(
|
|
3204
|
+
(pWaitError) =>
|
|
3205
|
+
{
|
|
3206
|
+
this.fable.log.info('Projection entity warm-up complete.');
|
|
3207
|
+
return fCallback();
|
|
3208
|
+
});
|
|
3209
|
+
});
|
|
3210
|
+
}
|
|
3211
|
+
|
|
3212
|
+
// ======================================================================
|
|
3213
|
+
// Multi-Set Projection Pipeline Execution
|
|
3214
|
+
// ======================================================================
|
|
3215
|
+
|
|
3216
|
+
/**
|
|
3217
|
+
* Read records from an already-deployed projection store.
|
|
3218
|
+
*
|
|
3219
|
+
* Queries the projection entity's DAL and wraps each row as a
|
|
3220
|
+
* pseudo-source record with JSON Content for TabularTransform.
|
|
3221
|
+
*
|
|
3222
|
+
* @param {number} pIDProjectionStore - The ProjectionStore to read from
|
|
3223
|
+
* @param {number} pCap - Maximum records to read (0 = unlimited)
|
|
3224
|
+
* @param {function} fCallback - Callback(pError, pRecords)
|
|
3225
|
+
*/
|
|
3226
|
+
_readRecordsFromProjectionStore(pIDProjectionStore, pCap, fCallback)
|
|
3227
|
+
{
|
|
3228
|
+
if (!this.fable.DAL || !this.fable.DAL.ProjectionStore)
|
|
3229
|
+
{
|
|
3230
|
+
return fCallback(new Error('ProjectionStore DAL not initialized'));
|
|
3231
|
+
}
|
|
3232
|
+
|
|
3233
|
+
// Look up the ProjectionStore to find its TargetTableName
|
|
3234
|
+
let tmpQuery = this.fable.DAL.ProjectionStore.query.clone()
|
|
3235
|
+
.addFilter('IDProjectionStore', pIDProjectionStore);
|
|
3236
|
+
|
|
3237
|
+
this.fable.DAL.ProjectionStore.doRead(tmpQuery,
|
|
3238
|
+
(pError, pQuery, pStore) =>
|
|
3239
|
+
{
|
|
3240
|
+
if (pError || !pStore || !pStore.IDProjectionStore)
|
|
3241
|
+
{
|
|
3242
|
+
return fCallback(new Error(`ProjectionStore ${pIDProjectionStore} not found`));
|
|
3243
|
+
}
|
|
3244
|
+
|
|
3245
|
+
let tmpEntityName = pStore.TargetTableName;
|
|
3246
|
+
let tmpEntity = this._ProjectionEntities[tmpEntityName];
|
|
3247
|
+
|
|
3248
|
+
if (!tmpEntity || !tmpEntity.DAL)
|
|
3249
|
+
{
|
|
3250
|
+
return fCallback(new Error(`Projection entity [${tmpEntityName}] is not registered. Deploy the projection first.`));
|
|
3251
|
+
}
|
|
3252
|
+
|
|
3253
|
+
let tmpReadQuery = tmpEntity.DAL.query.clone()
|
|
3254
|
+
.addFilter('Deleted', 0);
|
|
3255
|
+
|
|
3256
|
+
if (pCap > 0)
|
|
3257
|
+
{
|
|
3258
|
+
tmpReadQuery.setCap(pCap);
|
|
3259
|
+
}
|
|
3260
|
+
|
|
3261
|
+
tmpEntity.DAL.doReads(tmpReadQuery,
|
|
3262
|
+
(pReadError, pReadQuery, pRows) =>
|
|
3263
|
+
{
|
|
3264
|
+
if (pReadError)
|
|
3265
|
+
{
|
|
3266
|
+
return fCallback(pReadError);
|
|
3267
|
+
}
|
|
3268
|
+
|
|
3269
|
+
// Wrap each row as a pseudo-record with Content JSON
|
|
3270
|
+
let tmpRecords = [];
|
|
3271
|
+
for (let i = 0; i < pRows.length; i++)
|
|
3272
|
+
{
|
|
3273
|
+
tmpRecords.push(
|
|
3274
|
+
{
|
|
3275
|
+
IDRecord: pRows[i][`ID${tmpEntityName}`] || i,
|
|
3276
|
+
GUIDRecord: pRows[i][`GUID${tmpEntityName}`] || `projstore-${pIDProjectionStore}-${i}`,
|
|
3277
|
+
Content: JSON.stringify(pRows[i])
|
|
3278
|
+
});
|
|
3279
|
+
}
|
|
3280
|
+
|
|
3281
|
+
return fCallback(null, tmpRecords);
|
|
3282
|
+
});
|
|
3283
|
+
});
|
|
3284
|
+
}
|
|
3285
|
+
|
|
3286
|
+
/**
|
|
3287
|
+
* Execute a single step in a multi-set projection pipeline.
|
|
3288
|
+
*
|
|
3289
|
+
* Queries source records (from Records table or a ProjectionStore),
|
|
3290
|
+
* builds a per-step comprehension via TabularTransform, then applies
|
|
3291
|
+
* the step's merge strategy against the accumulated comprehension.
|
|
3292
|
+
*
|
|
3293
|
+
* @param {object} pStep - The pipeline step configuration
|
|
3294
|
+
* @param {object} pAccumulatedComprehension - The running comprehension dict { Entity: { GUID: record } }
|
|
3295
|
+
* @param {object} pReliabilityTracker - { GUID: { Weight, IDProjectionMapping } }
|
|
3296
|
+
* @param {object} pConfidenceTracker - { GUID: { Value, Confirmations } }
|
|
3297
|
+
* @param {Array} pCertaintyLogs - Array to push log entries into
|
|
3298
|
+
* @param {Array} pLog - Text log lines
|
|
3299
|
+
* @param {object} pConfig - Full pipeline config from _executeMultiSetImport
|
|
3300
|
+
* @param {object|null} pCertaintyContext - CertaintyAccumulator context (null if service unavailable)
|
|
3301
|
+
* @param {function} fCallback - Callback(pError)
|
|
3302
|
+
*/
|
|
3303
|
+
_executeMultiSetStep(pStep, pAccumulatedComprehension, pReliabilityTracker, pConfidenceTracker, pCertaintyLogs, pLog, pConfig, pCertaintyContext, fCallback)
|
|
3304
|
+
{
|
|
3305
|
+
let tmpIDProjectionMapping = parseInt(pStep.IDProjectionMapping, 10);
|
|
3306
|
+
let tmpMergeStrategy = pStep.MergeStrategy || 'WriteAll';
|
|
3307
|
+
let tmpLabel = pStep.Label || `Step-${pStep.Ordinal}`;
|
|
3308
|
+
let tmpInputType = pStep.InputType || 'Records';
|
|
3309
|
+
|
|
3310
|
+
if (!MERGE_STRATEGIES.hasOwnProperty(tmpMergeStrategy))
|
|
3311
|
+
{
|
|
3312
|
+
pLog.push(`[${new Date().toISOString()}] Unknown merge strategy "${tmpMergeStrategy}" in step "${tmpLabel}"; defaulting to WriteAll`);
|
|
3313
|
+
tmpMergeStrategy = 'WriteAll';
|
|
3314
|
+
}
|
|
3315
|
+
|
|
3316
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}": strategy=${tmpMergeStrategy}, input=${tmpInputType}`);
|
|
3317
|
+
|
|
3318
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
3319
|
+
let tmpMapping = null;
|
|
3320
|
+
let tmpMappingConfig = null;
|
|
3321
|
+
let tmpSourceRecords = null;
|
|
3322
|
+
let tmpReliabilityWeight = 0;
|
|
3323
|
+
|
|
3324
|
+
// Load the ProjectionMapping
|
|
3325
|
+
if (tmpIDProjectionMapping)
|
|
3326
|
+
{
|
|
3327
|
+
tmpAnticipate.anticipate(
|
|
3328
|
+
(fStepCallback) =>
|
|
3329
|
+
{
|
|
3330
|
+
let tmpQuery = this.fable.DAL.ProjectionMapping.query.clone()
|
|
3331
|
+
.addFilter('IDProjectionMapping', tmpIDProjectionMapping);
|
|
3332
|
+
|
|
3333
|
+
this.fable.DAL.ProjectionMapping.doRead(tmpQuery,
|
|
3334
|
+
(pError, pQuery, pRecord) =>
|
|
3335
|
+
{
|
|
3336
|
+
if (pError || !pRecord || !pRecord.IDProjectionMapping)
|
|
3337
|
+
{
|
|
3338
|
+
pLog.push(`[${new Date().toISOString()}] Warning: Mapping ${tmpIDProjectionMapping} not found for step "${tmpLabel}"`);
|
|
3339
|
+
return fStepCallback();
|
|
3340
|
+
}
|
|
3341
|
+
tmpMapping = pRecord;
|
|
3342
|
+
try
|
|
3343
|
+
{
|
|
3344
|
+
tmpMappingConfig = JSON.parse(tmpMapping.MappingConfiguration || '{}');
|
|
3345
|
+
}
|
|
3346
|
+
catch (e)
|
|
3347
|
+
{
|
|
3348
|
+
pLog.push(`[${new Date().toISOString()}] Warning: Invalid MappingConfiguration in mapping ${tmpIDProjectionMapping}`);
|
|
3349
|
+
}
|
|
3350
|
+
return fStepCallback();
|
|
3351
|
+
});
|
|
3352
|
+
});
|
|
3353
|
+
}
|
|
3354
|
+
|
|
3355
|
+
// Look up ReliabilityWeight for this source
|
|
3356
|
+
tmpAnticipate.anticipate(
|
|
3357
|
+
(fStepCallback) =>
|
|
3358
|
+
{
|
|
3359
|
+
if (!tmpMapping || !this.fable.DAL.DatasetSource)
|
|
3360
|
+
{
|
|
3361
|
+
return fStepCallback();
|
|
3362
|
+
}
|
|
3363
|
+
|
|
3364
|
+
let tmpDSQuery = this.fable.DAL.DatasetSource.query.clone()
|
|
3365
|
+
.addFilter('IDDataset', tmpMapping.IDDataset)
|
|
3366
|
+
.addFilter('IDSource', tmpMapping.IDSource)
|
|
3367
|
+
.addFilter('Deleted', 0)
|
|
3368
|
+
.setCap(1);
|
|
3369
|
+
|
|
3370
|
+
this.fable.DAL.DatasetSource.doReads(tmpDSQuery,
|
|
3371
|
+
(pError, pQuery, pRecords) =>
|
|
3372
|
+
{
|
|
3373
|
+
if (!pError && pRecords && pRecords.length > 0)
|
|
3374
|
+
{
|
|
3375
|
+
tmpReliabilityWeight = parseFloat(pRecords[0].ReliabilityWeight) || 0;
|
|
3376
|
+
}
|
|
3377
|
+
return fStepCallback();
|
|
3378
|
+
});
|
|
3379
|
+
});
|
|
3380
|
+
|
|
3381
|
+
// Load source records
|
|
3382
|
+
tmpAnticipate.anticipate(
|
|
3383
|
+
(fStepCallback) =>
|
|
3384
|
+
{
|
|
3385
|
+
if (tmpInputType === 'ProjectionStore')
|
|
3386
|
+
{
|
|
3387
|
+
let tmpInputStoreID = parseInt(pStep.IDProjectionStore, 10) || 0;
|
|
3388
|
+
if (!tmpInputStoreID)
|
|
3389
|
+
{
|
|
3390
|
+
pLog.push(`[${new Date().toISOString()}] Warning: No IDProjectionStore for ProjectionStore input step "${tmpLabel}"`);
|
|
3391
|
+
tmpSourceRecords = [];
|
|
3392
|
+
return fStepCallback();
|
|
3393
|
+
}
|
|
3394
|
+
|
|
3395
|
+
this._readRecordsFromProjectionStore(tmpInputStoreID, pConfig.Cap,
|
|
3396
|
+
(pError, pRecords) =>
|
|
3397
|
+
{
|
|
3398
|
+
if (pError)
|
|
3399
|
+
{
|
|
3400
|
+
pLog.push(`[${new Date().toISOString()}] Error reading from ProjectionStore ${tmpInputStoreID}: ${pError.message}`);
|
|
3401
|
+
tmpSourceRecords = [];
|
|
3402
|
+
}
|
|
3403
|
+
else
|
|
3404
|
+
{
|
|
3405
|
+
tmpSourceRecords = pRecords;
|
|
3406
|
+
}
|
|
3407
|
+
return fStepCallback();
|
|
3408
|
+
});
|
|
3409
|
+
}
|
|
3410
|
+
else
|
|
3411
|
+
{
|
|
3412
|
+
// Default: query Records by IDSource
|
|
3413
|
+
if (!tmpMapping)
|
|
3414
|
+
{
|
|
3415
|
+
tmpSourceRecords = [];
|
|
3416
|
+
return fStepCallback();
|
|
3417
|
+
}
|
|
3418
|
+
|
|
3419
|
+
let tmpRecordQuery = this.fable.DAL.Record.query.clone()
|
|
3420
|
+
.addFilter('Deleted', 0)
|
|
3421
|
+
.addFilter('IDSource', tmpMapping.IDSource);
|
|
3422
|
+
|
|
3423
|
+
if (pConfig.Cap > 0)
|
|
3424
|
+
{
|
|
3425
|
+
tmpRecordQuery.setCap(pConfig.Cap);
|
|
3426
|
+
}
|
|
3427
|
+
|
|
3428
|
+
this.fable.DAL.Record.doReads(tmpRecordQuery,
|
|
3429
|
+
(pError, pQuery, pRecords) =>
|
|
3430
|
+
{
|
|
3431
|
+
if (pError)
|
|
3432
|
+
{
|
|
3433
|
+
pLog.push(`[${new Date().toISOString()}] Error querying records for step "${tmpLabel}": ${pError.message}`);
|
|
3434
|
+
tmpSourceRecords = [];
|
|
3435
|
+
}
|
|
3436
|
+
else
|
|
3437
|
+
{
|
|
3438
|
+
tmpSourceRecords = pRecords;
|
|
3439
|
+
}
|
|
3440
|
+
return fStepCallback();
|
|
3441
|
+
});
|
|
3442
|
+
}
|
|
3443
|
+
});
|
|
3444
|
+
|
|
3445
|
+
tmpAnticipate.wait(
|
|
3446
|
+
(pError) =>
|
|
3447
|
+
{
|
|
3448
|
+
if (pError)
|
|
3449
|
+
{
|
|
3450
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}" load error: ${pError.message}`);
|
|
3451
|
+
return fCallback();
|
|
3452
|
+
}
|
|
3453
|
+
|
|
3454
|
+
if (!tmpSourceRecords || tmpSourceRecords.length === 0)
|
|
3455
|
+
{
|
|
3456
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}": 0 source records, skipping`);
|
|
3457
|
+
return fCallback();
|
|
3458
|
+
}
|
|
3459
|
+
|
|
3460
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}": ${tmpSourceRecords.length} source records loaded`);
|
|
3461
|
+
|
|
3462
|
+
if (!tmpMappingConfig)
|
|
3463
|
+
{
|
|
3464
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}": No mapping configuration, skipping transform`);
|
|
3465
|
+
return fCallback();
|
|
3466
|
+
}
|
|
3467
|
+
|
|
3468
|
+
// Build per-step comprehension via TabularTransform
|
|
3469
|
+
let tmpTabularTransform = this.fable.services.TabularTransform;
|
|
3470
|
+
let tmpMappingOutcome = tmpTabularTransform.newMappingOutcomeObject();
|
|
3471
|
+
tmpMappingOutcome.ExplicitConfiguration = tmpMappingConfig;
|
|
3472
|
+
tmpMappingOutcome.ImplicitConfiguration = tmpMappingConfig;
|
|
3473
|
+
|
|
3474
|
+
let tmpParseErrorCount = 0;
|
|
3475
|
+
|
|
3476
|
+
for (let i = 0; i < tmpSourceRecords.length; i++)
|
|
3477
|
+
{
|
|
3478
|
+
let tmpRecord = tmpSourceRecords[i];
|
|
3479
|
+
let tmpParsedContent;
|
|
3480
|
+
try
|
|
3481
|
+
{
|
|
3482
|
+
tmpParsedContent = JSON.parse(tmpRecord.Content);
|
|
3483
|
+
}
|
|
3484
|
+
catch (e)
|
|
3485
|
+
{
|
|
3486
|
+
tmpParseErrorCount++;
|
|
3487
|
+
continue;
|
|
3488
|
+
}
|
|
3489
|
+
|
|
3490
|
+
let tmpWrapped = Object.assign({ IDRecord: tmpRecord.IDRecord }, tmpParsedContent);
|
|
3491
|
+
|
|
3492
|
+
try
|
|
3493
|
+
{
|
|
3494
|
+
tmpTabularTransform.transformRecord(tmpWrapped, tmpMappingOutcome);
|
|
3495
|
+
}
|
|
3496
|
+
catch (e)
|
|
3497
|
+
{
|
|
3498
|
+
pLog.push(`[${new Date().toISOString()}] Transform error on record ${tmpRecord.IDRecord}: ${e.message}`);
|
|
3499
|
+
}
|
|
3500
|
+
}
|
|
3501
|
+
|
|
3502
|
+
let tmpEntityName = tmpMappingConfig.Entity || Object.keys(tmpMappingOutcome.Comprehension)[0];
|
|
3503
|
+
if (!tmpEntityName)
|
|
3504
|
+
{
|
|
3505
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}": No entity in comprehension, skipping merge`);
|
|
3506
|
+
return fCallback();
|
|
3507
|
+
}
|
|
3508
|
+
|
|
3509
|
+
let tmpStepComprehension = tmpMappingOutcome.Comprehension[tmpEntityName] || {};
|
|
3510
|
+
let tmpStepGUIDs = Object.keys(tmpStepComprehension);
|
|
3511
|
+
|
|
3512
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}": ${tmpStepGUIDs.length} unique records (${tmpParseErrorCount} parse errors)`);
|
|
3513
|
+
|
|
3514
|
+
// Ensure the entity key exists in the accumulated comprehension
|
|
3515
|
+
if (!pAccumulatedComprehension[tmpEntityName])
|
|
3516
|
+
{
|
|
3517
|
+
pAccumulatedComprehension[tmpEntityName] = {};
|
|
3518
|
+
}
|
|
3519
|
+
|
|
3520
|
+
let tmpConfidenceConfig = (pConfig.PipelineConfig && pConfig.PipelineConfig.ConfidenceReinforcement) || {};
|
|
3521
|
+
let tmpStrategyFn = MERGE_STRATEGIES[tmpMergeStrategy];
|
|
3522
|
+
let tmpCreated = 0;
|
|
3523
|
+
let tmpSkipped = 0;
|
|
3524
|
+
let tmpMerged = 0;
|
|
3525
|
+
|
|
3526
|
+
// Apply merge strategy for each GUID in this step's comprehension
|
|
3527
|
+
for (let i = 0; i < tmpStepGUIDs.length; i++)
|
|
3528
|
+
{
|
|
3529
|
+
let tmpGUID = tmpStepGUIDs[i];
|
|
3530
|
+
let tmpNewRecord = tmpStepComprehension[tmpGUID];
|
|
3531
|
+
let tmpExistingRecord = pAccumulatedComprehension[tmpEntityName][tmpGUID] || null;
|
|
3532
|
+
|
|
3533
|
+
let tmpContext =
|
|
3534
|
+
{
|
|
3535
|
+
NewWeight: tmpReliabilityWeight,
|
|
3536
|
+
ExistingWeight: (pReliabilityTracker[tmpGUID] && pReliabilityTracker[tmpGUID].Weight) || 0,
|
|
3537
|
+
ConfidenceConfig: tmpConfidenceConfig,
|
|
3538
|
+
ConfidenceTracker: pConfidenceTracker
|
|
3539
|
+
};
|
|
3540
|
+
|
|
3541
|
+
let tmpResult = tmpStrategyFn(tmpNewRecord, tmpExistingRecord, tmpContext);
|
|
3542
|
+
|
|
3543
|
+
// Apply the result
|
|
3544
|
+
pAccumulatedComprehension[tmpEntityName][tmpGUID] = tmpResult.Record;
|
|
3545
|
+
|
|
3546
|
+
// Update reliability tracker
|
|
3547
|
+
if (tmpResult.Action === 'Created' || tmpResult.Action === 'Overwritten_HigherReliability' || tmpResult.Action === 'Merged')
|
|
3548
|
+
{
|
|
3549
|
+
pReliabilityTracker[tmpGUID] = { Weight: tmpReliabilityWeight, IDProjectionMapping: tmpIDProjectionMapping };
|
|
3550
|
+
}
|
|
3551
|
+
|
|
3552
|
+
// Accumulate field-level certainty evidence
|
|
3553
|
+
if (this.fable.CertaintyAccumulator && pCertaintyContext)
|
|
3554
|
+
{
|
|
3555
|
+
this.fable.CertaintyAccumulator.accumulateEvidence(
|
|
3556
|
+
pCertaintyContext, tmpGUID,
|
|
3557
|
+
tmpNewRecord, tmpExistingRecord,
|
|
3558
|
+
tmpResult.Action,
|
|
3559
|
+
{
|
|
3560
|
+
ReliabilityWeight: tmpReliabilityWeight,
|
|
3561
|
+
DatasetSize: tmpStepGUIDs.length,
|
|
3562
|
+
RecordCountInDataset: 1,
|
|
3563
|
+
StepLabel: tmpLabel,
|
|
3564
|
+
StepOrdinal: pStep.Ordinal,
|
|
3565
|
+
});
|
|
3566
|
+
}
|
|
3567
|
+
|
|
3568
|
+
// Legacy confidence tracker (used when CertaintyAccumulator is not available)
|
|
3569
|
+
if (tmpConfidenceConfig.Enabled && !pCertaintyContext)
|
|
3570
|
+
{
|
|
3571
|
+
if (!pConfidenceTracker[tmpGUID])
|
|
3572
|
+
{
|
|
3573
|
+
pConfidenceTracker[tmpGUID] = { Value: tmpConfidenceConfig.BaseValue || 0.5, Confirmations: 0 };
|
|
3574
|
+
}
|
|
3575
|
+
if (tmpResult.Action === 'Merged_Reinforced' || tmpResult.Action === 'Merged')
|
|
3576
|
+
{
|
|
3577
|
+
pConfidenceTracker[tmpGUID].Confirmations++;
|
|
3578
|
+
let tmpIncrement = tmpConfidenceConfig.IncrementPerConfirmation || 0.1;
|
|
3579
|
+
let tmpMaxValue = tmpConfidenceConfig.MaxValue || 1.0;
|
|
3580
|
+
pConfidenceTracker[tmpGUID].Value = Math.min(
|
|
3581
|
+
pConfidenceTracker[tmpGUID].Value + tmpIncrement,
|
|
3582
|
+
tmpMaxValue);
|
|
3583
|
+
}
|
|
3584
|
+
}
|
|
3585
|
+
|
|
3586
|
+
// Track action
|
|
3587
|
+
if (tmpResult.Action.startsWith('Skipped'))
|
|
3588
|
+
{
|
|
3589
|
+
tmpSkipped++;
|
|
3590
|
+
}
|
|
3591
|
+
else if (tmpResult.Action === 'Created')
|
|
3592
|
+
{
|
|
3593
|
+
tmpCreated++;
|
|
3594
|
+
}
|
|
3595
|
+
else
|
|
3596
|
+
{
|
|
3597
|
+
tmpMerged++;
|
|
3598
|
+
}
|
|
3599
|
+
|
|
3600
|
+
// Record certainty log entry
|
|
3601
|
+
let tmpCertaintyValue = 0.5;
|
|
3602
|
+
if (pCertaintyContext && this.fable.CertaintyAccumulator)
|
|
3603
|
+
{
|
|
3604
|
+
let tmpCert = this.fable.CertaintyAccumulator.computeCertainty(pCertaintyContext, tmpGUID);
|
|
3605
|
+
tmpCertaintyValue = tmpCert.recordComposite;
|
|
3606
|
+
}
|
|
3607
|
+
else if (pConfidenceTracker[tmpGUID])
|
|
3608
|
+
{
|
|
3609
|
+
tmpCertaintyValue = pConfidenceTracker[tmpGUID].Value;
|
|
3610
|
+
}
|
|
3611
|
+
|
|
3612
|
+
pCertaintyLogs.push(
|
|
3613
|
+
{
|
|
3614
|
+
IDMultiSetProjection: pConfig.MultiSetProjection.IDMultiSetProjection,
|
|
3615
|
+
RecordGUID: tmpGUID,
|
|
3616
|
+
CertaintyValue: tmpCertaintyValue,
|
|
3617
|
+
SourceMappingLabel: tmpLabel,
|
|
3618
|
+
IDProjectionMapping: tmpIDProjectionMapping || 0,
|
|
3619
|
+
Action: tmpResult.Action,
|
|
3620
|
+
Details: JSON.stringify({ ReliabilityWeight: tmpReliabilityWeight, StepOrdinal: pStep.Ordinal })
|
|
3621
|
+
});
|
|
3622
|
+
}
|
|
3623
|
+
|
|
3624
|
+
pLog.push(`[${new Date().toISOString()}] Step "${tmpLabel}" complete: ${tmpCreated} created, ${tmpMerged} merged, ${tmpSkipped} skipped`);
|
|
3625
|
+
return fCallback();
|
|
3626
|
+
});
|
|
3627
|
+
}
|
|
3628
|
+
|
|
3629
|
+
/**
|
|
3630
|
+
* Execute a complete multi-set projection import pipeline.
|
|
3631
|
+
*
|
|
3632
|
+
* Processes pipeline steps sequentially, accumulating a comprehension,
|
|
3633
|
+
* then upserts the final result to the target store.
|
|
3634
|
+
*
|
|
3635
|
+
* @param {object} pConfig - Pipeline configuration object
|
|
3636
|
+
* @param {function} fCallback - Callback(pError, pResult)
|
|
3637
|
+
*/
|
|
3638
|
+
_executeMultiSetImport(pConfig, fCallback)
|
|
3639
|
+
{
|
|
3640
|
+
let tmpAccumulatedComprehension = {};
|
|
3641
|
+
let tmpReliabilityTracker = {};
|
|
3642
|
+
let tmpConfidenceTracker = {};
|
|
3643
|
+
let tmpCertaintyLogs = [];
|
|
3644
|
+
|
|
3645
|
+
// Initialize CertaintyAccumulator context if the service is available
|
|
3646
|
+
let tmpCertaintyContext = null;
|
|
3647
|
+
if (this.fable.CertaintyAccumulator)
|
|
3648
|
+
{
|
|
3649
|
+
let tmpCertaintyConfig = (pConfig.PipelineConfig && pConfig.PipelineConfig.CertaintyWeights) || {};
|
|
3650
|
+
tmpCertaintyContext = this.fable.CertaintyAccumulator.newAccumulationContext(tmpCertaintyConfig);
|
|
3651
|
+
}
|
|
3652
|
+
let tmpLog = [];
|
|
3653
|
+
|
|
3654
|
+
let tmpPipelineConfig = pConfig.PipelineConfig || {};
|
|
3655
|
+
let tmpSteps = tmpPipelineConfig.Steps || [];
|
|
3656
|
+
|
|
3657
|
+
// Sort steps by Ordinal
|
|
3658
|
+
tmpSteps.sort(
|
|
3659
|
+
(a, b) =>
|
|
3660
|
+
{
|
|
3661
|
+
return (a.Ordinal || 0) - (b.Ordinal || 0);
|
|
3662
|
+
});
|
|
3663
|
+
|
|
3664
|
+
if (tmpSteps.length === 0)
|
|
3665
|
+
{
|
|
3666
|
+
return fCallback(new Error('Pipeline has no steps'));
|
|
3667
|
+
}
|
|
3668
|
+
|
|
3669
|
+
tmpLog.push(`[${new Date().toISOString()}] Starting multi-set pipeline "${pConfig.MultiSetProjection.Name}" with ${tmpSteps.length} step(s)`);
|
|
3670
|
+
|
|
3671
|
+
// Process steps sequentially
|
|
3672
|
+
let tmpStepAnticipate = this.fable.newAnticipate();
|
|
3673
|
+
let tmpStepResults = [];
|
|
3674
|
+
|
|
3675
|
+
for (let i = 0; i < tmpSteps.length; i++)
|
|
3676
|
+
{
|
|
3677
|
+
let tmpStep = tmpSteps[i];
|
|
3678
|
+
|
|
3679
|
+
tmpStepAnticipate.anticipate(
|
|
3680
|
+
(fStepCallback) =>
|
|
3681
|
+
{
|
|
3682
|
+
let tmpLogLengthBefore = tmpLog.length;
|
|
3683
|
+
|
|
3684
|
+
this._executeMultiSetStep(
|
|
3685
|
+
tmpStep, tmpAccumulatedComprehension,
|
|
3686
|
+
tmpReliabilityTracker, tmpConfidenceTracker,
|
|
3687
|
+
tmpCertaintyLogs, tmpLog, pConfig, tmpCertaintyContext,
|
|
3688
|
+
(pStepError) =>
|
|
3689
|
+
{
|
|
3690
|
+
tmpStepResults.push(
|
|
3691
|
+
{
|
|
3692
|
+
Label: tmpStep.Label || `Step-${tmpStep.Ordinal}`,
|
|
3693
|
+
MergeStrategy: tmpStep.MergeStrategy || 'WriteAll',
|
|
3694
|
+
Error: pStepError ? pStepError.message : null,
|
|
3695
|
+
LogLines: tmpLog.slice(tmpLogLengthBefore)
|
|
3696
|
+
});
|
|
3697
|
+
return fStepCallback();
|
|
3698
|
+
});
|
|
3699
|
+
});
|
|
3700
|
+
}
|
|
3701
|
+
|
|
3702
|
+
tmpStepAnticipate.wait(
|
|
3703
|
+
(pStepError) =>
|
|
3704
|
+
{
|
|
3705
|
+
if (pStepError)
|
|
3706
|
+
{
|
|
3707
|
+
tmpLog.push(`[${new Date().toISOString()}] Pipeline step error: ${pStepError.message}`);
|
|
3708
|
+
}
|
|
3709
|
+
|
|
3710
|
+
// Count total unique records across all entities
|
|
3711
|
+
let tmpEntityNames = Object.keys(tmpAccumulatedComprehension);
|
|
3712
|
+
let tmpTotalRecords = 0;
|
|
3713
|
+
for (let i = 0; i < tmpEntityNames.length; i++)
|
|
3714
|
+
{
|
|
3715
|
+
tmpTotalRecords += Object.keys(tmpAccumulatedComprehension[tmpEntityNames[i]]).length;
|
|
3716
|
+
}
|
|
3717
|
+
|
|
3718
|
+
tmpLog.push(`[${new Date().toISOString()}] Pipeline comprehension complete: ${tmpTotalRecords} total records across ${tmpEntityNames.length} entity/entities`);
|
|
3719
|
+
|
|
3720
|
+
if (tmpTotalRecords === 0)
|
|
3721
|
+
{
|
|
3722
|
+
tmpLog.push(`[${new Date().toISOString()}] No records to upsert`);
|
|
3723
|
+
|
|
3724
|
+
// Still write certainty logs if any
|
|
3725
|
+
this._writeCertaintyLogs(tmpCertaintyLogs, tmpLog,
|
|
3726
|
+
() =>
|
|
3727
|
+
{
|
|
3728
|
+
return fCallback(null,
|
|
3729
|
+
{
|
|
3730
|
+
Success: true,
|
|
3731
|
+
RecordsTotal: 0,
|
|
3732
|
+
RecordsUpserted: 0,
|
|
3733
|
+
PipelineStepResults: tmpStepResults,
|
|
3734
|
+
Log: tmpLog.join('\n')
|
|
3735
|
+
});
|
|
3736
|
+
});
|
|
3737
|
+
return;
|
|
3738
|
+
}
|
|
3739
|
+
|
|
3740
|
+
// Stage comprehension if requested
|
|
3741
|
+
let tmpStagingFile = false;
|
|
3742
|
+
if (pConfig.StageComprehension)
|
|
3743
|
+
{
|
|
3744
|
+
try
|
|
3745
|
+
{
|
|
3746
|
+
let tmpStagingDir = libPath.join(process.cwd(), 'data', 'staging');
|
|
3747
|
+
if (!libFS.existsSync(tmpStagingDir))
|
|
3748
|
+
{
|
|
3749
|
+
libFS.mkdirSync(tmpStagingDir, { recursive: true });
|
|
3750
|
+
}
|
|
3751
|
+
let tmpTimestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
3752
|
+
let tmpFilename = `multi-comprehension-${pConfig.IDDataset}-${pConfig.MultiSetProjection.IDMultiSetProjection}-${tmpTimestamp}.json`;
|
|
3753
|
+
let tmpFilePath = libPath.join(tmpStagingDir, tmpFilename);
|
|
3754
|
+
let tmpStagingData =
|
|
3755
|
+
{
|
|
3756
|
+
IDDataset: pConfig.IDDataset,
|
|
3757
|
+
IDMultiSetProjection: pConfig.MultiSetProjection.IDMultiSetProjection,
|
|
3758
|
+
Timestamp: new Date().toISOString(),
|
|
3759
|
+
TotalRecords: tmpTotalRecords,
|
|
3760
|
+
StepCount: tmpSteps.length,
|
|
3761
|
+
ReliabilityTracker: tmpReliabilityTracker,
|
|
3762
|
+
ConfidenceTracker: tmpConfidenceTracker,
|
|
3763
|
+
Comprehension: tmpAccumulatedComprehension
|
|
3764
|
+
};
|
|
3765
|
+
libFS.writeFileSync(tmpFilePath, JSON.stringify(tmpStagingData, null, '\t'));
|
|
3766
|
+
tmpStagingFile = tmpFilename;
|
|
3767
|
+
tmpLog.push(`[${new Date().toISOString()}] Comprehension staged to: ${tmpFilename}`);
|
|
3768
|
+
}
|
|
3769
|
+
catch (pStagingError)
|
|
3770
|
+
{
|
|
3771
|
+
tmpLog.push(`[${new Date().toISOString()}] Staging error: ${pStagingError.message}`);
|
|
3772
|
+
}
|
|
3773
|
+
}
|
|
3774
|
+
|
|
3775
|
+
// Phase 3: Upsert to target store via IntegrationAdapter
|
|
3776
|
+
let tmpTargetEntityName = pConfig.ProjectionStore.TargetTableName;
|
|
3777
|
+
|
|
3778
|
+
let tmpDoImport = () =>
|
|
3779
|
+
{
|
|
3780
|
+
let tmpPort = this.fable.settings.APIServerPort || 8080;
|
|
3781
|
+
let tmpServerURL = `http://localhost:${tmpPort}/1.0/`;
|
|
3782
|
+
|
|
3783
|
+
let tmpRestClient = this.fable.serviceManager.instantiateServiceProviderWithoutRegistration(
|
|
3784
|
+
'MeadowCloneRestClient',
|
|
3785
|
+
{
|
|
3786
|
+
ServerURL: tmpServerURL
|
|
3787
|
+
});
|
|
3788
|
+
|
|
3789
|
+
let tmpAdapter = this.fable.serviceManager.instantiateServiceProviderWithoutRegistration(
|
|
3790
|
+
'IntegrationAdapter',
|
|
3791
|
+
{
|
|
3792
|
+
Entity: tmpTargetEntityName,
|
|
3793
|
+
Client: tmpRestClient,
|
|
3794
|
+
AdapterSetGUIDMarshalPrefix: `MSET-${pConfig.IDDataset}`,
|
|
3795
|
+
PerformUpserts: true,
|
|
3796
|
+
PerformDeletes: false,
|
|
3797
|
+
SimpleMarshal: true
|
|
3798
|
+
});
|
|
3799
|
+
|
|
3800
|
+
// Feed accumulated comprehension records to the adapter
|
|
3801
|
+
let tmpGUIDField = `GUID${tmpTargetEntityName}`;
|
|
3802
|
+
let tmpEntityComp = tmpAccumulatedComprehension[tmpEntityNames[0]] || {};
|
|
3803
|
+
let tmpRecordGUIDs = Object.keys(tmpEntityComp);
|
|
3804
|
+
|
|
3805
|
+
for (let i = 0; i < tmpRecordGUIDs.length; i++)
|
|
3806
|
+
{
|
|
3807
|
+
let tmpGUID = tmpRecordGUIDs[i];
|
|
3808
|
+
let tmpCompRecord = tmpEntityComp[tmpGUID];
|
|
3809
|
+
if (!tmpCompRecord[tmpGUIDField])
|
|
3810
|
+
{
|
|
3811
|
+
tmpCompRecord[tmpGUIDField] = tmpGUID;
|
|
3812
|
+
}
|
|
3813
|
+
tmpAdapter.addSourceRecord(tmpCompRecord);
|
|
3814
|
+
}
|
|
3815
|
+
|
|
3816
|
+
tmpLog.push(`[${new Date().toISOString()}] Pushing ${tmpRecordGUIDs.length} records via IntegrationAdapter to ${tmpServerURL}${tmpTargetEntityName}/Upsert`);
|
|
3817
|
+
|
|
3818
|
+
tmpAdapter.marshalSourceRecords(
|
|
3819
|
+
(pMarshalError) =>
|
|
3820
|
+
{
|
|
3821
|
+
if (pMarshalError)
|
|
3822
|
+
{
|
|
3823
|
+
tmpLog.push(`[${new Date().toISOString()}] Marshal error: ${pMarshalError.message}`);
|
|
3824
|
+
return fCallback(pMarshalError,
|
|
3825
|
+
{
|
|
3826
|
+
Error: `Marshal error: ${pMarshalError.message}`,
|
|
3827
|
+
PipelineStepResults: tmpStepResults,
|
|
3828
|
+
StagingFile: tmpStagingFile,
|
|
3829
|
+
Log: tmpLog.join('\n')
|
|
3830
|
+
});
|
|
3831
|
+
}
|
|
3832
|
+
|
|
3833
|
+
let tmpMarshaledCount = Object.keys(tmpAdapter._MarshaledRecords).length;
|
|
3834
|
+
tmpLog.push(`[${new Date().toISOString()}] Marshaled ${tmpMarshaledCount} records; pushing to server...`);
|
|
3835
|
+
|
|
3836
|
+
tmpAdapter.pushRecordsToServer(
|
|
3837
|
+
(pPushError) =>
|
|
3838
|
+
{
|
|
3839
|
+
if (pPushError)
|
|
3840
|
+
{
|
|
3841
|
+
tmpLog.push(`[${new Date().toISOString()}] Push error: ${pPushError.message}`);
|
|
3842
|
+
}
|
|
3843
|
+
|
|
3844
|
+
tmpLog.push(`[${new Date().toISOString()}] Multi-set import complete: ${tmpTotalRecords} unique, ${tmpMarshaledCount} upserted`);
|
|
3845
|
+
|
|
3846
|
+
// Write certainty logs
|
|
3847
|
+
this._writeCertaintyLogs(tmpCertaintyLogs, tmpLog,
|
|
3848
|
+
() =>
|
|
3849
|
+
{
|
|
3850
|
+
return fCallback(pPushError ? pPushError : null,
|
|
3851
|
+
{
|
|
3852
|
+
Success: !pPushError,
|
|
3853
|
+
RecordsTotal: tmpTotalRecords,
|
|
3854
|
+
RecordsUpserted: tmpMarshaledCount,
|
|
3855
|
+
PipelineStepResults: tmpStepResults,
|
|
3856
|
+
StagingFile: tmpStagingFile,
|
|
3857
|
+
CertaintyLogCount: tmpCertaintyLogs.length,
|
|
3858
|
+
Log: tmpLog.join('\n')
|
|
3859
|
+
});
|
|
3860
|
+
});
|
|
3861
|
+
});
|
|
3862
|
+
});
|
|
3863
|
+
};
|
|
3864
|
+
|
|
3865
|
+
// Ensure the target entity is registered
|
|
3866
|
+
if (!this._ProjectionEntities[tmpTargetEntityName])
|
|
3867
|
+
{
|
|
3868
|
+
tmpLog.push(`[${new Date().toISOString()}] Registering Meadow entity [${tmpTargetEntityName}] for REST upserts...`);
|
|
3869
|
+
|
|
3870
|
+
let tmpParsedSchema = pConfig.ParsedSchema;
|
|
3871
|
+
if (!tmpParsedSchema || !tmpParsedSchema.Tables || Object.keys(tmpParsedSchema.Tables).length === 0)
|
|
3872
|
+
{
|
|
3873
|
+
tmpLog.push(`[${new Date().toISOString()}] No schema available for entity registration; attempting import anyway`);
|
|
3874
|
+
return tmpDoImport();
|
|
3875
|
+
}
|
|
3876
|
+
|
|
3877
|
+
this._registerProjectionEntity(pConfig.ProjectionStore, pConfig.Connection, tmpParsedSchema, null,
|
|
3878
|
+
(pRegError) =>
|
|
3879
|
+
{
|
|
3880
|
+
if (pRegError)
|
|
3881
|
+
{
|
|
3882
|
+
tmpLog.push(`[${new Date().toISOString()}] Entity registration failed: ${pRegError.message}`);
|
|
3883
|
+
return fCallback(pRegError,
|
|
3884
|
+
{
|
|
3885
|
+
Error: `Entity registration failed: ${pRegError.message}`,
|
|
3886
|
+
PipelineStepResults: tmpStepResults,
|
|
3887
|
+
Log: tmpLog.join('\n')
|
|
3888
|
+
});
|
|
3889
|
+
}
|
|
3890
|
+
tmpLog.push(`[${new Date().toISOString()}] Entity [${tmpTargetEntityName}] registered successfully`);
|
|
3891
|
+
return tmpDoImport();
|
|
3892
|
+
});
|
|
3893
|
+
}
|
|
3894
|
+
else
|
|
3895
|
+
{
|
|
3896
|
+
return tmpDoImport();
|
|
3897
|
+
}
|
|
3898
|
+
});
|
|
3899
|
+
}
|
|
3900
|
+
|
|
3901
|
+
/**
|
|
3902
|
+
* Write accumulated certainty log entries to the ProjectionCertaintyLog table.
|
|
3903
|
+
*
|
|
3904
|
+
* @param {Array} pCertaintyLogs - Array of log entry objects
|
|
3905
|
+
* @param {Array} pLog - Text log lines
|
|
3906
|
+
* @param {function} fCallback - Callback()
|
|
3907
|
+
*/
|
|
3908
|
+
_writeCertaintyLogs(pCertaintyLogs, pLog, fCallback)
|
|
3909
|
+
{
|
|
3910
|
+
if (!pCertaintyLogs || pCertaintyLogs.length === 0 ||
|
|
3911
|
+
!this.fable.DAL || !this.fable.DAL.ProjectionCertaintyLog)
|
|
3912
|
+
{
|
|
3913
|
+
return fCallback();
|
|
3914
|
+
}
|
|
3915
|
+
|
|
3916
|
+
pLog.push(`[${new Date().toISOString()}] Writing ${pCertaintyLogs.length} certainty log entries...`);
|
|
3917
|
+
|
|
3918
|
+
let tmpAnticipate = this.fable.newAnticipate();
|
|
3919
|
+
let tmpWritten = 0;
|
|
3920
|
+
let tmpErrored = 0;
|
|
3921
|
+
|
|
3922
|
+
for (let i = 0; i < pCertaintyLogs.length; i++)
|
|
3923
|
+
{
|
|
3924
|
+
let tmpEntry = pCertaintyLogs[i];
|
|
3925
|
+
|
|
3926
|
+
tmpAnticipate.anticipate(
|
|
3927
|
+
(fStepCallback) =>
|
|
3928
|
+
{
|
|
3929
|
+
let tmpCreateQuery = this.fable.DAL.ProjectionCertaintyLog.query.clone()
|
|
3930
|
+
.setIDUser(0)
|
|
3931
|
+
.addRecord(tmpEntry);
|
|
3932
|
+
|
|
3933
|
+
this.fable.DAL.ProjectionCertaintyLog.doCreate(tmpCreateQuery,
|
|
3934
|
+
(pError) =>
|
|
3935
|
+
{
|
|
3936
|
+
if (pError)
|
|
3937
|
+
{
|
|
3938
|
+
tmpErrored++;
|
|
3939
|
+
}
|
|
3940
|
+
else
|
|
3941
|
+
{
|
|
3942
|
+
tmpWritten++;
|
|
3943
|
+
}
|
|
3944
|
+
return fStepCallback();
|
|
3945
|
+
});
|
|
3946
|
+
});
|
|
3947
|
+
}
|
|
3948
|
+
|
|
3949
|
+
tmpAnticipate.wait(
|
|
3950
|
+
() =>
|
|
3951
|
+
{
|
|
3952
|
+
pLog.push(`[${new Date().toISOString()}] Certainty log: ${tmpWritten} written, ${tmpErrored} errored`);
|
|
3953
|
+
return fCallback();
|
|
3954
|
+
});
|
|
3955
|
+
}
|
|
3956
|
+
}
|
|
3957
|
+
|
|
3958
|
+
module.exports = RetoldFactoProjectionEngine;
|
|
3959
|
+
module.exports.serviceType = 'RetoldFactoProjectionEngine';
|
|
3960
|
+
module.exports.default_configuration = defaultProjectionEngineOptions;
|