s3db.js 7.3.8 → 7.3.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/s3db.cjs.js +99 -31
- package/dist/s3db.cjs.min.js +1 -1
- package/dist/s3db.es.js +99 -31
- package/dist/s3db.es.min.js +1 -1
- package/dist/s3db.iife.js +99 -31
- package/dist/s3db.iife.min.js +1 -1
- package/package.json +1 -1
- package/src/plugins/replicators/bigquery-replicator.class.js +98 -55
- package/src/plugins/replicators/postgres-replicator.class.js +23 -4
- package/src/plugins/replicators/s3db-replicator.class.js +29 -11
- package/src/plugins/replicators/sqs-replicator.class.js +23 -5
package/package.json
CHANGED
|
@@ -41,14 +41,14 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
41
41
|
this.credentials = config.credentials;
|
|
42
42
|
this.location = config.location || 'US';
|
|
43
43
|
this.logTable = config.logTable;
|
|
44
|
-
|
|
44
|
+
|
|
45
45
|
// Parse resources configuration
|
|
46
46
|
this.resources = this.parseResourcesConfig(resources);
|
|
47
47
|
}
|
|
48
48
|
|
|
49
49
|
parseResourcesConfig(resources) {
|
|
50
50
|
const parsed = {};
|
|
51
|
-
|
|
51
|
+
|
|
52
52
|
for (const [resourceName, config] of Object.entries(resources)) {
|
|
53
53
|
if (typeof config === 'string') {
|
|
54
54
|
// Short form: just table name
|
|
@@ -78,7 +78,7 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
78
78
|
}];
|
|
79
79
|
}
|
|
80
80
|
}
|
|
81
|
-
|
|
81
|
+
|
|
82
82
|
return parsed;
|
|
83
83
|
}
|
|
84
84
|
|
|
@@ -87,7 +87,7 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
87
87
|
if (!this.projectId) errors.push('projectId is required');
|
|
88
88
|
if (!this.datasetId) errors.push('datasetId is required');
|
|
89
89
|
if (Object.keys(this.resources).length === 0) errors.push('At least one resource must be configured');
|
|
90
|
-
|
|
90
|
+
|
|
91
91
|
// Validate resource configurations
|
|
92
92
|
for (const [resourceName, tables] of Object.entries(this.resources)) {
|
|
93
93
|
for (const tableConfig of tables) {
|
|
@@ -107,7 +107,7 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
107
107
|
}
|
|
108
108
|
}
|
|
109
109
|
}
|
|
110
|
-
|
|
110
|
+
|
|
111
111
|
return { isValid: errors.length === 0, errors };
|
|
112
112
|
}
|
|
113
113
|
|
|
@@ -141,15 +141,15 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
141
141
|
|
|
142
142
|
shouldReplicateAction(resourceName, operation) {
|
|
143
143
|
if (!this.resources[resourceName]) return false;
|
|
144
|
-
|
|
145
|
-
return this.resources[resourceName].some(tableConfig =>
|
|
144
|
+
|
|
145
|
+
return this.resources[resourceName].some(tableConfig =>
|
|
146
146
|
tableConfig.actions.includes(operation)
|
|
147
147
|
);
|
|
148
148
|
}
|
|
149
149
|
|
|
150
150
|
getTablesForResource(resourceName, operation) {
|
|
151
151
|
if (!this.resources[resourceName]) return [];
|
|
152
|
-
|
|
152
|
+
|
|
153
153
|
return this.resources[resourceName]
|
|
154
154
|
.filter(tableConfig => tableConfig.actions.includes(operation))
|
|
155
155
|
.map(tableConfig => ({
|
|
@@ -159,16 +159,32 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
159
159
|
}
|
|
160
160
|
|
|
161
161
|
applyTransform(data, transformFn) {
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
if (
|
|
166
|
-
|
|
162
|
+
// First, clean internal fields that shouldn't go to BigQuery
|
|
163
|
+
let cleanData = this._cleanInternalFields(data);
|
|
164
|
+
|
|
165
|
+
if (!transformFn) return cleanData;
|
|
166
|
+
|
|
167
|
+
let transformedData = JSON.parse(JSON.stringify(cleanData));
|
|
167
168
|
return transformFn(transformedData);
|
|
168
169
|
}
|
|
169
170
|
|
|
171
|
+
_cleanInternalFields(data) {
|
|
172
|
+
if (!data || typeof data !== 'object') return data;
|
|
173
|
+
|
|
174
|
+
const cleanData = { ...data };
|
|
175
|
+
|
|
176
|
+
// Remove internal fields that start with $ or _
|
|
177
|
+
Object.keys(cleanData).forEach(key => {
|
|
178
|
+
if (key.startsWith('$') || key.startsWith('_')) {
|
|
179
|
+
delete cleanData[key];
|
|
180
|
+
}
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
return cleanData;
|
|
184
|
+
}
|
|
185
|
+
|
|
170
186
|
async replicate(resourceName, operation, data, id, beforeData = null) {
|
|
171
|
-
|
|
187
|
+
|
|
172
188
|
if (!this.enabled || !this.shouldReplicateResource(resourceName)) {
|
|
173
189
|
return { skipped: true, reason: 'resource_not_included' };
|
|
174
190
|
}
|
|
@@ -187,27 +203,38 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
187
203
|
|
|
188
204
|
const [ok, err, result] = await tryFn(async () => {
|
|
189
205
|
const dataset = this.bigqueryClient.dataset(this.datasetId);
|
|
190
|
-
|
|
206
|
+
|
|
191
207
|
// Replicate to all applicable tables
|
|
192
208
|
for (const tableConfig of tableConfigs) {
|
|
193
209
|
const [okTable, errTable] = await tryFn(async () => {
|
|
194
210
|
const table = dataset.table(tableConfig.table);
|
|
195
211
|
let job;
|
|
196
|
-
|
|
212
|
+
|
|
197
213
|
if (operation === 'insert') {
|
|
198
214
|
const transformedData = this.applyTransform(data, tableConfig.transform);
|
|
199
|
-
|
|
215
|
+
try {
|
|
216
|
+
job = await table.insert([transformedData]);
|
|
217
|
+
} catch (error) {
|
|
218
|
+
// Extract detailed BigQuery error information
|
|
219
|
+
const { errors, response } = error;
|
|
220
|
+
if (this.config.verbose) {
|
|
221
|
+
console.error('[BigqueryReplicator] BigQuery insert error details:');
|
|
222
|
+
if (errors) console.error(JSON.stringify(errors, null, 2));
|
|
223
|
+
if (response) console.error(JSON.stringify(response, null, 2));
|
|
224
|
+
}
|
|
225
|
+
throw error;
|
|
226
|
+
}
|
|
200
227
|
} else if (operation === 'update') {
|
|
201
228
|
const transformedData = this.applyTransform(data, tableConfig.transform);
|
|
202
229
|
const keys = Object.keys(transformedData).filter(k => k !== 'id');
|
|
203
230
|
const setClause = keys.map(k => `${k} = @${k}`).join(', ');
|
|
204
231
|
const params = { id, ...transformedData };
|
|
205
232
|
const query = `UPDATE \`${this.projectId}.${this.datasetId}.${tableConfig.table}\` SET ${setClause} WHERE id = @id`;
|
|
206
|
-
|
|
233
|
+
|
|
207
234
|
// Retry logic for streaming buffer issues
|
|
208
235
|
const maxRetries = 2;
|
|
209
236
|
let lastError = null;
|
|
210
|
-
|
|
237
|
+
|
|
211
238
|
for (let attempt = 1; attempt <= maxRetries; attempt++) {
|
|
212
239
|
const [ok, error] = await tryFn(async () => {
|
|
213
240
|
const [updateJob] = await this.bigqueryClient.createQueryJob({
|
|
@@ -218,17 +245,22 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
218
245
|
await updateJob.getQueryResults();
|
|
219
246
|
return [updateJob];
|
|
220
247
|
});
|
|
221
|
-
|
|
248
|
+
|
|
222
249
|
if (ok) {
|
|
223
250
|
job = ok;
|
|
224
251
|
break;
|
|
225
252
|
} else {
|
|
226
253
|
lastError = error;
|
|
227
|
-
|
|
254
|
+
|
|
255
|
+
// Enhanced error logging for BigQuery update operations
|
|
228
256
|
if (this.config.verbose) {
|
|
229
257
|
console.warn(`[BigqueryReplicator] Update attempt ${attempt} failed: ${error.message}`);
|
|
258
|
+
if (error.errors) {
|
|
259
|
+
console.error('[BigqueryReplicator] BigQuery update error details:');
|
|
260
|
+
console.error('Errors:', JSON.stringify(error.errors, null, 2));
|
|
261
|
+
}
|
|
230
262
|
}
|
|
231
|
-
|
|
263
|
+
|
|
232
264
|
// If it's streaming buffer error and not the last attempt
|
|
233
265
|
if (error?.message?.includes('streaming buffer') && attempt < maxRetries) {
|
|
234
266
|
const delaySeconds = 30;
|
|
@@ -238,32 +270,43 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
238
270
|
await new Promise(resolve => setTimeout(resolve, delaySeconds * 1000));
|
|
239
271
|
continue;
|
|
240
272
|
}
|
|
241
|
-
|
|
273
|
+
|
|
242
274
|
throw error;
|
|
243
275
|
}
|
|
244
276
|
}
|
|
245
|
-
|
|
277
|
+
|
|
246
278
|
if (!job) throw lastError;
|
|
247
279
|
} else if (operation === 'delete') {
|
|
248
280
|
const query = `DELETE FROM \`${this.projectId}.${this.datasetId}.${tableConfig.table}\` WHERE id = @id`;
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
281
|
+
try {
|
|
282
|
+
const [deleteJob] = await this.bigqueryClient.createQueryJob({
|
|
283
|
+
query,
|
|
284
|
+
params: { id },
|
|
285
|
+
location: this.location
|
|
286
|
+
});
|
|
287
|
+
await deleteJob.getQueryResults();
|
|
288
|
+
job = [deleteJob];
|
|
289
|
+
} catch (error) {
|
|
290
|
+
// Enhanced error logging for BigQuery delete operations
|
|
291
|
+
if (this.config.verbose) {
|
|
292
|
+
console.error('[BigqueryReplicator] BigQuery delete error details:');
|
|
293
|
+
console.error('Query:', query);
|
|
294
|
+
if (error.errors) console.error('Errors:', JSON.stringify(error.errors, null, 2));
|
|
295
|
+
if (error.response) console.error('Response:', JSON.stringify(error.response, null, 2));
|
|
296
|
+
}
|
|
297
|
+
throw error;
|
|
298
|
+
}
|
|
256
299
|
} else {
|
|
257
300
|
throw new Error(`Unsupported operation: ${operation}`);
|
|
258
301
|
}
|
|
259
|
-
|
|
302
|
+
|
|
260
303
|
results.push({
|
|
261
304
|
table: tableConfig.table,
|
|
262
305
|
success: true,
|
|
263
306
|
jobId: job[0]?.id
|
|
264
307
|
});
|
|
265
308
|
});
|
|
266
|
-
|
|
309
|
+
|
|
267
310
|
if (!okTable) {
|
|
268
311
|
errors.push({
|
|
269
312
|
table: tableConfig.table,
|
|
@@ -271,7 +314,7 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
271
314
|
});
|
|
272
315
|
}
|
|
273
316
|
}
|
|
274
|
-
|
|
317
|
+
|
|
275
318
|
// Log operation if logTable is configured
|
|
276
319
|
if (this.logTable) {
|
|
277
320
|
const [okLog, errLog] = await tryFn(async () => {
|
|
@@ -289,14 +332,14 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
289
332
|
// Don't fail the main operation if logging fails
|
|
290
333
|
}
|
|
291
334
|
}
|
|
292
|
-
|
|
335
|
+
|
|
293
336
|
const success = errors.length === 0;
|
|
294
|
-
|
|
337
|
+
|
|
295
338
|
// Log errors if any occurred
|
|
296
339
|
if (errors.length > 0) {
|
|
297
340
|
console.warn(`[BigqueryReplicator] Replication completed with errors for ${resourceName}:`, errors);
|
|
298
341
|
}
|
|
299
|
-
|
|
342
|
+
|
|
300
343
|
this.emit('replicated', {
|
|
301
344
|
replicator: this.name,
|
|
302
345
|
resourceName,
|
|
@@ -307,17 +350,17 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
307
350
|
errors,
|
|
308
351
|
success
|
|
309
352
|
});
|
|
310
|
-
|
|
311
|
-
return {
|
|
312
|
-
success,
|
|
313
|
-
results,
|
|
353
|
+
|
|
354
|
+
return {
|
|
355
|
+
success,
|
|
356
|
+
results,
|
|
314
357
|
errors,
|
|
315
358
|
tables: tableConfigs.map(t => t.table)
|
|
316
359
|
};
|
|
317
360
|
});
|
|
318
|
-
|
|
361
|
+
|
|
319
362
|
if (ok) return result;
|
|
320
|
-
|
|
363
|
+
|
|
321
364
|
if (this.config.verbose) {
|
|
322
365
|
console.warn(`[BigqueryReplicator] Replication failed for ${resourceName}: ${err.message}`);
|
|
323
366
|
}
|
|
@@ -328,20 +371,20 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
328
371
|
id,
|
|
329
372
|
error: err.message
|
|
330
373
|
});
|
|
331
|
-
|
|
374
|
+
|
|
332
375
|
return { success: false, error: err.message };
|
|
333
376
|
}
|
|
334
377
|
|
|
335
378
|
async replicateBatch(resourceName, records) {
|
|
336
379
|
const results = [];
|
|
337
380
|
const errors = [];
|
|
338
|
-
|
|
381
|
+
|
|
339
382
|
for (const record of records) {
|
|
340
383
|
const [ok, err, res] = await tryFn(() => this.replicate(
|
|
341
|
-
resourceName,
|
|
342
|
-
record.operation,
|
|
343
|
-
record.data,
|
|
344
|
-
record.id,
|
|
384
|
+
resourceName,
|
|
385
|
+
record.operation,
|
|
386
|
+
record.data,
|
|
387
|
+
record.id,
|
|
345
388
|
record.beforeData
|
|
346
389
|
));
|
|
347
390
|
if (ok) {
|
|
@@ -353,16 +396,16 @@ class BigqueryReplicator extends BaseReplicator {
|
|
|
353
396
|
errors.push({ id: record.id, error: err.message });
|
|
354
397
|
}
|
|
355
398
|
}
|
|
356
|
-
|
|
399
|
+
|
|
357
400
|
// Log errors if any occurred during batch processing
|
|
358
401
|
if (errors.length > 0) {
|
|
359
402
|
console.warn(`[BigqueryReplicator] Batch replication completed with ${errors.length} error(s) for ${resourceName}:`, errors);
|
|
360
403
|
}
|
|
361
|
-
|
|
362
|
-
return {
|
|
363
|
-
success: errors.length === 0,
|
|
364
|
-
results,
|
|
365
|
-
errors
|
|
404
|
+
|
|
405
|
+
return {
|
|
406
|
+
success: errors.length === 0,
|
|
407
|
+
results,
|
|
408
|
+
errors
|
|
366
409
|
};
|
|
367
410
|
}
|
|
368
411
|
|
|
@@ -211,18 +211,22 @@ class PostgresReplicator extends BaseReplicator {
|
|
|
211
211
|
let result;
|
|
212
212
|
|
|
213
213
|
if (operation === 'insert') {
|
|
214
|
+
// Clean internal fields before processing
|
|
215
|
+
const cleanData = this._cleanInternalFields(data);
|
|
214
216
|
// INSERT INTO table (col1, col2, ...) VALUES (...)
|
|
215
|
-
const keys = Object.keys(
|
|
216
|
-
const values = keys.map(k =>
|
|
217
|
+
const keys = Object.keys(cleanData);
|
|
218
|
+
const values = keys.map(k => cleanData[k]);
|
|
217
219
|
const columns = keys.map(k => `"${k}"`).join(', ');
|
|
218
220
|
const params = keys.map((_, i) => `$${i + 1}`).join(', ');
|
|
219
221
|
const sql = `INSERT INTO ${table} (${columns}) VALUES (${params}) ON CONFLICT (id) DO NOTHING RETURNING *`;
|
|
220
222
|
result = await this.client.query(sql, values);
|
|
221
223
|
} else if (operation === 'update') {
|
|
224
|
+
// Clean internal fields before processing
|
|
225
|
+
const cleanData = this._cleanInternalFields(data);
|
|
222
226
|
// UPDATE table SET col1=$1, col2=$2 ... WHERE id=$N
|
|
223
|
-
const keys = Object.keys(
|
|
227
|
+
const keys = Object.keys(cleanData).filter(k => k !== 'id');
|
|
224
228
|
const setClause = keys.map((k, i) => `"${k}"=$${i + 1}`).join(', ');
|
|
225
|
-
const values = keys.map(k =>
|
|
229
|
+
const values = keys.map(k => cleanData[k]);
|
|
226
230
|
values.push(id);
|
|
227
231
|
const sql = `UPDATE ${table} SET ${setClause} WHERE id=$${keys.length + 1} RETURNING *`;
|
|
228
232
|
result = await this.client.query(sql, values);
|
|
@@ -346,6 +350,21 @@ class PostgresReplicator extends BaseReplicator {
|
|
|
346
350
|
return false;
|
|
347
351
|
}
|
|
348
352
|
|
|
353
|
+
_cleanInternalFields(data) {
|
|
354
|
+
if (!data || typeof data !== 'object') return data;
|
|
355
|
+
|
|
356
|
+
const cleanData = { ...data };
|
|
357
|
+
|
|
358
|
+
// Remove internal fields that start with $ or _
|
|
359
|
+
Object.keys(cleanData).forEach(key => {
|
|
360
|
+
if (key.startsWith('$') || key.startsWith('_')) {
|
|
361
|
+
delete cleanData[key];
|
|
362
|
+
}
|
|
363
|
+
});
|
|
364
|
+
|
|
365
|
+
return cleanData;
|
|
366
|
+
}
|
|
367
|
+
|
|
349
368
|
async cleanup() {
|
|
350
369
|
if (this.client) await this.client.end();
|
|
351
370
|
}
|
|
@@ -249,44 +249,62 @@ class S3dbReplicator extends BaseReplicator {
|
|
|
249
249
|
}
|
|
250
250
|
|
|
251
251
|
_applyTransformer(resource, data) {
|
|
252
|
+
// First, clean internal fields that shouldn't go to target S3DB
|
|
253
|
+
let cleanData = this._cleanInternalFields(data);
|
|
254
|
+
|
|
252
255
|
const normResource = normalizeResourceName(resource);
|
|
253
256
|
const entry = this.resourcesMap[normResource];
|
|
254
257
|
let result;
|
|
255
|
-
if (!entry) return
|
|
258
|
+
if (!entry) return cleanData;
|
|
256
259
|
|
|
257
260
|
// Array of multiple destinations - use first transform found
|
|
258
261
|
if (Array.isArray(entry)) {
|
|
259
262
|
for (const item of entry) {
|
|
260
263
|
if (typeof item === 'object' && item.transform && typeof item.transform === 'function') {
|
|
261
|
-
result = item.transform(
|
|
264
|
+
result = item.transform(cleanData);
|
|
262
265
|
break;
|
|
263
266
|
} else if (typeof item === 'object' && item.transformer && typeof item.transformer === 'function') {
|
|
264
|
-
result = item.transformer(
|
|
267
|
+
result = item.transformer(cleanData);
|
|
265
268
|
break;
|
|
266
269
|
}
|
|
267
270
|
}
|
|
268
|
-
if (!result) result =
|
|
271
|
+
if (!result) result = cleanData;
|
|
269
272
|
} else if (typeof entry === 'object') {
|
|
270
273
|
// Prefer transform, fallback to transformer for backwards compatibility
|
|
271
274
|
if (typeof entry.transform === 'function') {
|
|
272
|
-
result = entry.transform(
|
|
275
|
+
result = entry.transform(cleanData);
|
|
273
276
|
} else if (typeof entry.transformer === 'function') {
|
|
274
|
-
result = entry.transformer(
|
|
277
|
+
result = entry.transformer(cleanData);
|
|
275
278
|
}
|
|
276
279
|
} else if (typeof entry === 'function') {
|
|
277
280
|
// Function directly as transformer
|
|
278
|
-
result = entry(
|
|
281
|
+
result = entry(cleanData);
|
|
279
282
|
} else {
|
|
280
|
-
result =
|
|
283
|
+
result = cleanData;
|
|
281
284
|
}
|
|
282
285
|
|
|
283
286
|
// Ensure that id is always present
|
|
284
|
-
if (result &&
|
|
285
|
-
// Fallback: if transformer returns undefined/null, use original data
|
|
286
|
-
if (!result &&
|
|
287
|
+
if (result && cleanData && cleanData.id && !result.id) result.id = cleanData.id;
|
|
288
|
+
// Fallback: if transformer returns undefined/null, use original clean data
|
|
289
|
+
if (!result && cleanData) result = cleanData;
|
|
287
290
|
return result;
|
|
288
291
|
}
|
|
289
292
|
|
|
293
|
+
_cleanInternalFields(data) {
|
|
294
|
+
if (!data || typeof data !== 'object') return data;
|
|
295
|
+
|
|
296
|
+
const cleanData = { ...data };
|
|
297
|
+
|
|
298
|
+
// Remove internal fields that start with $ or _
|
|
299
|
+
Object.keys(cleanData).forEach(key => {
|
|
300
|
+
if (key.startsWith('$') || key.startsWith('_')) {
|
|
301
|
+
delete cleanData[key];
|
|
302
|
+
}
|
|
303
|
+
});
|
|
304
|
+
|
|
305
|
+
return cleanData;
|
|
306
|
+
}
|
|
307
|
+
|
|
290
308
|
_resolveDestResource(resource, data) {
|
|
291
309
|
const normResource = normalizeResourceName(resource);
|
|
292
310
|
const entry = this.resourcesMap[normResource];
|
|
@@ -90,19 +90,37 @@ class SqsReplicator extends BaseReplicator {
|
|
|
90
90
|
}
|
|
91
91
|
|
|
92
92
|
_applyTransformer(resource, data) {
|
|
93
|
+
// First, clean internal fields that shouldn't go to SQS
|
|
94
|
+
let cleanData = this._cleanInternalFields(data);
|
|
95
|
+
|
|
93
96
|
const entry = this.resources[resource];
|
|
94
|
-
let result =
|
|
97
|
+
let result = cleanData;
|
|
95
98
|
|
|
96
|
-
if (!entry) return
|
|
99
|
+
if (!entry) return cleanData;
|
|
97
100
|
|
|
98
101
|
// Support both transform and transformer (backwards compatibility)
|
|
99
102
|
if (typeof entry.transform === 'function') {
|
|
100
|
-
result = entry.transform(
|
|
103
|
+
result = entry.transform(cleanData);
|
|
101
104
|
} else if (typeof entry.transformer === 'function') {
|
|
102
|
-
result = entry.transformer(
|
|
105
|
+
result = entry.transformer(cleanData);
|
|
103
106
|
}
|
|
104
107
|
|
|
105
|
-
return result ||
|
|
108
|
+
return result || cleanData;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
_cleanInternalFields(data) {
|
|
112
|
+
if (!data || typeof data !== 'object') return data;
|
|
113
|
+
|
|
114
|
+
const cleanData = { ...data };
|
|
115
|
+
|
|
116
|
+
// Remove internal fields that start with $ or _
|
|
117
|
+
Object.keys(cleanData).forEach(key => {
|
|
118
|
+
if (key.startsWith('$') || key.startsWith('_')) {
|
|
119
|
+
delete cleanData[key];
|
|
120
|
+
}
|
|
121
|
+
});
|
|
122
|
+
|
|
123
|
+
return cleanData;
|
|
106
124
|
}
|
|
107
125
|
|
|
108
126
|
/**
|