retold-data-service 2.0.13 → 2.0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.claude/launch.json +11 -0
- package/bin/retold-data-service-clone.js +286 -0
- package/package.json +18 -9
- package/source/Retold-Data-Service.js +275 -73
- package/source/services/Retold-Data-Service-ConnectionManager.js +277 -0
- package/source/services/Retold-Data-Service-MeadowEndpoints.js +217 -0
- package/source/services/Retold-Data-Service-ModelManager.js +335 -0
- package/source/services/data-cloner/DataCloner-Command-Connection.js +138 -0
- package/source/services/data-cloner/DataCloner-Command-Headless.js +357 -0
- package/source/services/data-cloner/DataCloner-Command-Schema.js +367 -0
- package/source/services/data-cloner/DataCloner-Command-Session.js +229 -0
- package/source/services/data-cloner/DataCloner-Command-Sync.js +491 -0
- package/source/services/data-cloner/DataCloner-Command-WebUI.js +40 -0
- package/source/services/data-cloner/DataCloner-ProviderRegistry.js +20 -0
- package/source/services/data-cloner/Retold-Data-Service-DataCloner.js +751 -0
- package/source/services/data-cloner/data-cloner-web.html +2706 -0
- package/source/services/integration-telemetry/IntegrationTelemetry-Command-Dashboard.js +60 -0
- package/source/services/integration-telemetry/IntegrationTelemetry-Command-Integrations.js +132 -0
- package/source/services/integration-telemetry/IntegrationTelemetry-Command-Runs.js +93 -0
- package/source/services/integration-telemetry/IntegrationTelemetry-StorageProvider-Base.js +116 -0
- package/source/services/integration-telemetry/IntegrationTelemetry-StorageProvider-Bibliograph.js +495 -0
- package/source/services/integration-telemetry/Retold-Data-Service-IntegrationTelemetry.js +224 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-CSVCheck.js +85 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-CSVTransform.js +180 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionIntersect.js +153 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionPush.js +190 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionToArray.js +113 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-ComprehensionToCSV.js +211 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-EntityFromTabularFolder.js +244 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-JSONArrayTransform.js +213 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-TSVCheck.js +80 -0
- package/source/services/meadow-integration/MeadowIntegration-Command-TSVTransform.js +166 -0
- package/source/services/meadow-integration/Retold-Data-Service-MeadowIntegration.js +113 -0
- package/source/services/migration-manager/MigrationManager-Command-Connections.js +220 -0
- package/source/services/migration-manager/MigrationManager-Command-DiffMigrate.js +169 -0
- package/source/services/migration-manager/MigrationManager-Command-Schemas.js +532 -0
- package/source/services/migration-manager/MigrationManager-Command-WebUI.js +123 -0
- package/source/services/migration-manager/Retold-Data-Service-MigrationManager.js +357 -0
- package/source/services/stricture/Retold-Data-Service-Stricture.js +303 -0
- package/source/services/stricture/Stricture-Command-Compile.js +39 -0
- package/source/services/stricture/Stricture-Command-Generate-AuthorizationChart.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-DictionaryCSV.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-LaTeX.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Markdown.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Meadow.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-ModelGraph.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-MySQL.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-MySQLMigrate.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-Pict.js +14 -0
- package/source/services/stricture/Stricture-Command-Generate-TestObjectContainers.js +14 -0
- package/test/RetoldDataService_tests.js +161 -1
- package/debug/data/books.csv +0 -10001
|
@@ -0,0 +1,357 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* DataCloner Headless Pipeline
|
|
3
|
+
*
|
|
4
|
+
* Runs the full clone pipeline non-interactively from a config object:
|
|
5
|
+
* connect DB → configure session → authenticate → fetch schema → deploy → sync.
|
|
6
|
+
*
|
|
7
|
+
* @param {Object} pDataClonerService - The RetoldDataServiceDataCloner instance
|
|
8
|
+
* @param {Object} pConfig - Parsed config from the JSON file
|
|
9
|
+
* @param {Object} pCLIOptions - CLI options { logPath, maxRecords, schemaPath, serverPort }
|
|
10
|
+
* @param {function} fCallback - (pError)
|
|
11
|
+
*/
|
|
12
|
+
module.exports = (pDataClonerService, pConfig, pCLIOptions, fCallback) =>
|
|
13
|
+
{
|
|
14
|
+
let tmpFable = pDataClonerService.fable;
|
|
15
|
+
let tmpCloneState = pDataClonerService.cloneState;
|
|
16
|
+
|
|
17
|
+
let libFs = require('fs');
|
|
18
|
+
let libPath = require('path');
|
|
19
|
+
|
|
20
|
+
pConfig = pDataClonerService.normalizeConfig(pConfig);
|
|
21
|
+
|
|
22
|
+
// CLI --schema overrides config SchemaPath
|
|
23
|
+
if (pCLIOptions && pCLIOptions.schemaPath)
|
|
24
|
+
{
|
|
25
|
+
pConfig.SchemaPath = pCLIOptions.schemaPath;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
let tmpMaxRecords = (pCLIOptions && pCLIOptions.maxRecords) || 0;
|
|
29
|
+
|
|
30
|
+
tmpFable.log.info('Data Cloner: Starting headless pipeline...');
|
|
31
|
+
|
|
32
|
+
let tmpHttp = require('http');
|
|
33
|
+
let tmpServerPort = (pCLIOptions && pCLIOptions.serverPort) || tmpFable.settings.APIServerPort || 8095;
|
|
34
|
+
let tmpBaseURL = `http://localhost:${tmpServerPort}`;
|
|
35
|
+
|
|
36
|
+
// Simple helper to POST JSON to our own server
|
|
37
|
+
let fPost = (pPath, pBody, fPostCallback) =>
|
|
38
|
+
{
|
|
39
|
+
let tmpPayload = JSON.stringify(pBody);
|
|
40
|
+
let tmpURL = new URL(tmpBaseURL + pPath);
|
|
41
|
+
let tmpOpts = (
|
|
42
|
+
{
|
|
43
|
+
hostname: tmpURL.hostname,
|
|
44
|
+
port: tmpURL.port,
|
|
45
|
+
path: tmpURL.pathname,
|
|
46
|
+
method: 'POST',
|
|
47
|
+
headers:
|
|
48
|
+
{
|
|
49
|
+
'Content-Type': 'application/json',
|
|
50
|
+
'Content-Length': Buffer.byteLength(tmpPayload)
|
|
51
|
+
}
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
let tmpReq = tmpHttp.request(tmpOpts,
|
|
55
|
+
(pRes) =>
|
|
56
|
+
{
|
|
57
|
+
let tmpChunks = [];
|
|
58
|
+
pRes.on('data', (pChunk) => tmpChunks.push(pChunk));
|
|
59
|
+
pRes.on('end', () =>
|
|
60
|
+
{
|
|
61
|
+
try
|
|
62
|
+
{
|
|
63
|
+
let tmpData = JSON.parse(Buffer.concat(tmpChunks).toString());
|
|
64
|
+
return fPostCallback(null, tmpData);
|
|
65
|
+
}
|
|
66
|
+
catch (pParseError)
|
|
67
|
+
{
|
|
68
|
+
return fPostCallback(pParseError);
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
});
|
|
72
|
+
tmpReq.on('error', fPostCallback);
|
|
73
|
+
tmpReq.write(tmpPayload);
|
|
74
|
+
tmpReq.end();
|
|
75
|
+
};
|
|
76
|
+
|
|
77
|
+
// Simple helper to GET JSON from our own server
|
|
78
|
+
let fGet = (pPath, fGetCallback) =>
|
|
79
|
+
{
|
|
80
|
+
tmpHttp.get(tmpBaseURL + pPath,
|
|
81
|
+
(pRes) =>
|
|
82
|
+
{
|
|
83
|
+
let tmpChunks = [];
|
|
84
|
+
pRes.on('data', (pChunk) => tmpChunks.push(pChunk));
|
|
85
|
+
pRes.on('end', () =>
|
|
86
|
+
{
|
|
87
|
+
try
|
|
88
|
+
{
|
|
89
|
+
let tmpData = JSON.parse(Buffer.concat(tmpChunks).toString());
|
|
90
|
+
return fGetCallback(null, tmpData);
|
|
91
|
+
}
|
|
92
|
+
catch (pParseError)
|
|
93
|
+
{
|
|
94
|
+
return fGetCallback(pParseError);
|
|
95
|
+
}
|
|
96
|
+
});
|
|
97
|
+
}).on('error', fGetCallback);
|
|
98
|
+
};
|
|
99
|
+
|
|
100
|
+
let tmpPrefix = pDataClonerService.routePrefix;
|
|
101
|
+
|
|
102
|
+
// Step 1: Connect local database
|
|
103
|
+
let fStep1_ConnectDB = (fNext) =>
|
|
104
|
+
{
|
|
105
|
+
let tmpDB = pConfig.LocalDatabase;
|
|
106
|
+
if (!tmpDB || !tmpDB.Provider || tmpDB.Provider === 'SQLite')
|
|
107
|
+
{
|
|
108
|
+
tmpFable.log.info('Headless: Using default SQLite connection.');
|
|
109
|
+
return fNext();
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
tmpFable.log.info(`Headless: Connecting to ${tmpDB.Provider}...`);
|
|
113
|
+
fPost(`${tmpPrefix}/connection/configure`, { Provider: tmpDB.Provider, Config: tmpDB.Config },
|
|
114
|
+
(pError, pData) =>
|
|
115
|
+
{
|
|
116
|
+
if (pError || !pData || !pData.Success)
|
|
117
|
+
{
|
|
118
|
+
tmpFable.log.error(`Headless: DB connection failed: ${pError || (pData && pData.Error) || 'Unknown error'}`);
|
|
119
|
+
return fCallback(new Error('DB connection failed'));
|
|
120
|
+
}
|
|
121
|
+
tmpFable.log.info(`Headless: ${tmpDB.Provider} connected.`);
|
|
122
|
+
return fNext();
|
|
123
|
+
});
|
|
124
|
+
};
|
|
125
|
+
|
|
126
|
+
// Step 2: Configure remote session
|
|
127
|
+
let fStep2_ConfigureSession = (fNext) =>
|
|
128
|
+
{
|
|
129
|
+
let tmpSession = pConfig.RemoteSession;
|
|
130
|
+
if (!tmpSession || !tmpSession.ServerURL)
|
|
131
|
+
{
|
|
132
|
+
tmpFable.log.error('Headless: RemoteSession.ServerURL is required in config.');
|
|
133
|
+
return fCallback(new Error('RemoteSession.ServerURL is required'));
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
tmpFable.log.info(`Headless: Configuring session for ${tmpSession.ServerURL}...`);
|
|
137
|
+
fPost(`${tmpPrefix}/session/configure`, tmpSession,
|
|
138
|
+
(pError, pData) =>
|
|
139
|
+
{
|
|
140
|
+
if (pError || !pData || !pData.Success)
|
|
141
|
+
{
|
|
142
|
+
tmpFable.log.error(`Headless: Session configure failed: ${pError || (pData && pData.Error) || 'Unknown error'}`);
|
|
143
|
+
return fCallback(new Error('Session configure failed'));
|
|
144
|
+
}
|
|
145
|
+
tmpFable.log.info(`Headless: Session configured (domain: ${pData.DomainMatch}).`);
|
|
146
|
+
return fNext();
|
|
147
|
+
});
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
// Step 3: Authenticate
|
|
151
|
+
let fStep3_Authenticate = (fNext) =>
|
|
152
|
+
{
|
|
153
|
+
let tmpCreds = pConfig.Credentials;
|
|
154
|
+
if (!tmpCreds || !tmpCreds.UserName || !tmpCreds.Password)
|
|
155
|
+
{
|
|
156
|
+
tmpFable.log.error('Headless: Credentials.UserName and Credentials.Password are required in config.');
|
|
157
|
+
return fCallback(new Error('Credentials are required'));
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
tmpFable.log.info(`Headless: Authenticating as ${tmpCreds.UserName}...`);
|
|
161
|
+
fPost(`${tmpPrefix}/session/authenticate`, { UserName: tmpCreds.UserName, Password: tmpCreds.Password },
|
|
162
|
+
(pError, pData) =>
|
|
163
|
+
{
|
|
164
|
+
if (pError || !pData || !pData.Authenticated)
|
|
165
|
+
{
|
|
166
|
+
tmpFable.log.error(`Headless: Authentication failed: ${pError || (pData && pData.Error) || 'Not authenticated'}`);
|
|
167
|
+
return fCallback(new Error('Authentication failed'));
|
|
168
|
+
}
|
|
169
|
+
tmpFable.log.info('Headless: Authenticated.');
|
|
170
|
+
return fNext();
|
|
171
|
+
});
|
|
172
|
+
};
|
|
173
|
+
|
|
174
|
+
// Step 4: Fetch schema (from local file, custom URL, or default remote endpoint)
|
|
175
|
+
let fStep4_FetchSchema = (fNext) =>
|
|
176
|
+
{
|
|
177
|
+
let tmpSchemaBody = {};
|
|
178
|
+
|
|
179
|
+
// If a local SchemaPath is provided, read the file and send the object directly
|
|
180
|
+
if (pConfig.SchemaPath)
|
|
181
|
+
{
|
|
182
|
+
let tmpSchemaPath = pConfig.SchemaPath;
|
|
183
|
+
if (!libPath.isAbsolute(tmpSchemaPath))
|
|
184
|
+
{
|
|
185
|
+
tmpSchemaPath = libPath.resolve(process.cwd(), tmpSchemaPath);
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
tmpFable.log.info(`Headless: Loading schema from local file ${tmpSchemaPath}...`);
|
|
189
|
+
|
|
190
|
+
try
|
|
191
|
+
{
|
|
192
|
+
let tmpSchemaRaw = libFs.readFileSync(tmpSchemaPath, 'utf8');
|
|
193
|
+
let tmpSchema = JSON.parse(tmpSchemaRaw);
|
|
194
|
+
tmpSchemaBody.Schema = tmpSchema;
|
|
195
|
+
}
|
|
196
|
+
catch (pReadError)
|
|
197
|
+
{
|
|
198
|
+
tmpFable.log.error(`Headless: Failed to read schema file ${tmpSchemaPath}: ${pReadError.message || pReadError}`);
|
|
199
|
+
return fCallback(new Error('Schema file read failed'));
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
else if (pConfig.SchemaURL)
|
|
203
|
+
{
|
|
204
|
+
tmpSchemaBody.SchemaURL = pConfig.SchemaURL;
|
|
205
|
+
tmpFable.log.info(`Headless: Fetching remote schema from ${pConfig.SchemaURL}...`);
|
|
206
|
+
}
|
|
207
|
+
else
|
|
208
|
+
{
|
|
209
|
+
tmpFable.log.info('Headless: Fetching remote schema from default endpoint...');
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
fPost(`${tmpPrefix}/schema/fetch`, tmpSchemaBody,
|
|
213
|
+
(pError, pData) =>
|
|
214
|
+
{
|
|
215
|
+
if (pError || !pData || !pData.Success)
|
|
216
|
+
{
|
|
217
|
+
tmpFable.log.error(`Headless: Schema fetch failed: ${pError || (pData && pData.Error) || 'Unknown error'}`);
|
|
218
|
+
return fCallback(new Error('Schema fetch failed'));
|
|
219
|
+
}
|
|
220
|
+
tmpFable.log.info(`Headless: Fetched ${pData.TableCount} tables.`);
|
|
221
|
+
return fNext();
|
|
222
|
+
});
|
|
223
|
+
};
|
|
224
|
+
|
|
225
|
+
// Step 5: Deploy tables
|
|
226
|
+
let fStep5_Deploy = (fNext) =>
|
|
227
|
+
{
|
|
228
|
+
let tmpTables = pConfig.Tables || [];
|
|
229
|
+
tmpFable.log.info(`Headless: Deploying ${tmpTables.length > 0 ? tmpTables.length + ' selected' : 'all'} tables...`);
|
|
230
|
+
fPost(`${tmpPrefix}/schema/deploy`, { Tables: tmpTables },
|
|
231
|
+
(pError, pData) =>
|
|
232
|
+
{
|
|
233
|
+
if (pError || !pData || !pData.Success)
|
|
234
|
+
{
|
|
235
|
+
tmpFable.log.error(`Headless: Deploy failed: ${pError || (pData && pData.Error) || 'Unknown error'}`);
|
|
236
|
+
return fCallback(new Error('Deploy failed'));
|
|
237
|
+
}
|
|
238
|
+
tmpFable.log.info(`Headless: ${pData.Message}`);
|
|
239
|
+
return fNext();
|
|
240
|
+
});
|
|
241
|
+
};
|
|
242
|
+
|
|
243
|
+
// Step 6: Start sync
|
|
244
|
+
let fStep6_Sync = (fNext) =>
|
|
245
|
+
{
|
|
246
|
+
let tmpSync = pConfig.Sync || {};
|
|
247
|
+
let tmpSyncBody = (
|
|
248
|
+
{
|
|
249
|
+
Tables: pConfig.Tables || [],
|
|
250
|
+
SyncMode: tmpSync.Mode || 'Initial',
|
|
251
|
+
PageSize: tmpSync.PageSize || 100,
|
|
252
|
+
SyncDeletedRecords: !!tmpSync.SyncDeletedRecords,
|
|
253
|
+
MaxRecordsPerEntity: tmpMaxRecords || tmpSync.MaxRecordsPerEntity || 0,
|
|
254
|
+
DateTimePrecisionMS: tmpSync.DateTimePrecisionMS
|
|
255
|
+
});
|
|
256
|
+
|
|
257
|
+
tmpFable.log.info(`Headless: Starting ${tmpSyncBody.SyncMode} sync...`);
|
|
258
|
+
fPost(`${tmpPrefix}/sync/start`, tmpSyncBody,
|
|
259
|
+
(pError, pData) =>
|
|
260
|
+
{
|
|
261
|
+
if (pError || !pData || !pData.Success)
|
|
262
|
+
{
|
|
263
|
+
tmpFable.log.error(`Headless: Sync start failed: ${pError || (pData && pData.Error) || 'Unknown error'}`);
|
|
264
|
+
return fCallback(new Error('Sync start failed'));
|
|
265
|
+
}
|
|
266
|
+
tmpFable.log.info(`Headless: ${pData.Message}`);
|
|
267
|
+
|
|
268
|
+
// Poll for completion
|
|
269
|
+
let fPoll = () =>
|
|
270
|
+
{
|
|
271
|
+
fGet(`${tmpPrefix}/sync/status`,
|
|
272
|
+
(pPollError, pStatus) =>
|
|
273
|
+
{
|
|
274
|
+
if (pPollError)
|
|
275
|
+
{
|
|
276
|
+
tmpFable.log.error(`Headless: Poll error: ${pPollError}`);
|
|
277
|
+
return setTimeout(fPoll, 5000);
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
if (pStatus.Running)
|
|
281
|
+
{
|
|
282
|
+
let tmpTables = pStatus.Tables || {};
|
|
283
|
+
let tmpNames = Object.keys(tmpTables);
|
|
284
|
+
let tmpActive = tmpNames.filter((n) => tmpTables[n].Status === 'Syncing');
|
|
285
|
+
let tmpDone = tmpNames.filter((n) => tmpTables[n].Status === 'Complete' || tmpTables[n].Status === 'Error' || tmpTables[n].Status === 'Partial');
|
|
286
|
+
if (tmpActive.length > 0)
|
|
287
|
+
{
|
|
288
|
+
let tmpA = tmpTables[tmpActive[0]];
|
|
289
|
+
tmpFable.log.info(`Headless: [${tmpDone.length}/${tmpNames.length}] Syncing ${tmpActive[0]}: ${tmpA.Synced}/${tmpA.Total}`);
|
|
290
|
+
}
|
|
291
|
+
return setTimeout(fPoll, 5000);
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
// Sync finished — fetch and write structured report
|
|
295
|
+
fGet(`${tmpPrefix}/sync/report`,
|
|
296
|
+
(pReportError, pReport) =>
|
|
297
|
+
{
|
|
298
|
+
if (!pReportError && pReport && pReport.ReportVersion)
|
|
299
|
+
{
|
|
300
|
+
// Write report JSON file
|
|
301
|
+
let tmpReportPath = (pCLIOptions && pCLIOptions.reportPath) || null;
|
|
302
|
+
|
|
303
|
+
// Auto-derive from log path if not explicitly set
|
|
304
|
+
if (!tmpReportPath && pCLIOptions && pCLIOptions.logPath)
|
|
305
|
+
{
|
|
306
|
+
let tmpLogBase = pCLIOptions.logPath.replace(/\.log$/, '');
|
|
307
|
+
tmpReportPath = `${tmpLogBase}-report.json`;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
if (tmpReportPath)
|
|
311
|
+
{
|
|
312
|
+
try
|
|
313
|
+
{
|
|
314
|
+
libFs.writeFileSync(tmpReportPath, JSON.stringify(pReport, null, '\t'), 'utf8');
|
|
315
|
+
tmpFable.log.info(`Headless: Report written to ${tmpReportPath}`);
|
|
316
|
+
}
|
|
317
|
+
catch (pWriteError)
|
|
318
|
+
{
|
|
319
|
+
tmpFable.log.error(`Headless: Failed to write report: ${pWriteError.message}`);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
else
|
|
324
|
+
{
|
|
325
|
+
tmpFable.log.warn(`Headless: Could not fetch sync report: ${pReportError || 'No report available'}`);
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
return fNext();
|
|
329
|
+
});
|
|
330
|
+
});
|
|
331
|
+
};
|
|
332
|
+
setTimeout(fPoll, 3000);
|
|
333
|
+
});
|
|
334
|
+
};
|
|
335
|
+
|
|
336
|
+
// Execute pipeline
|
|
337
|
+
fStep1_ConnectDB(() =>
|
|
338
|
+
{
|
|
339
|
+
fStep2_ConfigureSession(() =>
|
|
340
|
+
{
|
|
341
|
+
fStep3_Authenticate(() =>
|
|
342
|
+
{
|
|
343
|
+
fStep4_FetchSchema(() =>
|
|
344
|
+
{
|
|
345
|
+
fStep5_Deploy(() =>
|
|
346
|
+
{
|
|
347
|
+
fStep6_Sync(() =>
|
|
348
|
+
{
|
|
349
|
+
tmpFable.log.info('Headless: Pipeline complete.');
|
|
350
|
+
return fCallback();
|
|
351
|
+
});
|
|
352
|
+
});
|
|
353
|
+
});
|
|
354
|
+
});
|
|
355
|
+
});
|
|
356
|
+
});
|
|
357
|
+
};
|