@memberjunction/metadata-sync 2.54.0 → 2.56.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. package/README.md +92 -51
  2. package/dist/index.d.ts +21 -1
  3. package/dist/index.js +41 -3
  4. package/dist/index.js.map +1 -1
  5. package/dist/lib/file-backup-manager.js +2 -2
  6. package/dist/lib/file-backup-manager.js.map +1 -1
  7. package/dist/lib/sql-logger.d.ts +44 -0
  8. package/dist/lib/sql-logger.js +140 -0
  9. package/dist/lib/sql-logger.js.map +1 -0
  10. package/dist/lib/sync-engine.js +2 -2
  11. package/dist/lib/sync-engine.js.map +1 -1
  12. package/dist/lib/transaction-manager.d.ts +36 -0
  13. package/dist/lib/transaction-manager.js +117 -0
  14. package/dist/lib/transaction-manager.js.map +1 -0
  15. package/dist/services/FileResetService.d.ts +30 -0
  16. package/dist/services/FileResetService.js +182 -0
  17. package/dist/services/FileResetService.js.map +1 -0
  18. package/dist/services/InitService.d.ts +17 -0
  19. package/dist/services/InitService.js +118 -0
  20. package/dist/services/InitService.js.map +1 -0
  21. package/dist/services/PullService.d.ts +45 -0
  22. package/dist/services/PullService.js +564 -0
  23. package/dist/services/PullService.js.map +1 -0
  24. package/dist/services/PushService.d.ts +45 -0
  25. package/dist/services/PushService.js +394 -0
  26. package/dist/services/PushService.js.map +1 -0
  27. package/dist/services/StatusService.d.ts +32 -0
  28. package/dist/services/StatusService.js +138 -0
  29. package/dist/services/StatusService.js.map +1 -0
  30. package/dist/services/WatchService.d.ts +32 -0
  31. package/dist/services/WatchService.js +242 -0
  32. package/dist/services/WatchService.js.map +1 -0
  33. package/dist/services/index.d.ts +16 -0
  34. package/dist/services/index.js +28 -0
  35. package/dist/services/index.js.map +1 -0
  36. package/package.json +14 -45
  37. package/bin/debug.js +0 -7
  38. package/bin/run +0 -17
  39. package/bin/run.js +0 -6
  40. package/dist/commands/file-reset/index.d.ts +0 -15
  41. package/dist/commands/file-reset/index.js +0 -221
  42. package/dist/commands/file-reset/index.js.map +0 -1
  43. package/dist/commands/init/index.d.ts +0 -7
  44. package/dist/commands/init/index.js +0 -155
  45. package/dist/commands/init/index.js.map +0 -1
  46. package/dist/commands/pull/index.d.ts +0 -246
  47. package/dist/commands/pull/index.js +0 -1448
  48. package/dist/commands/pull/index.js.map +0 -1
  49. package/dist/commands/push/index.d.ts +0 -41
  50. package/dist/commands/push/index.js +0 -1131
  51. package/dist/commands/push/index.js.map +0 -1
  52. package/dist/commands/status/index.d.ts +0 -10
  53. package/dist/commands/status/index.js +0 -199
  54. package/dist/commands/status/index.js.map +0 -1
  55. package/dist/commands/validate/index.d.ts +0 -15
  56. package/dist/commands/validate/index.js +0 -149
  57. package/dist/commands/validate/index.js.map +0 -1
  58. package/dist/commands/watch/index.d.ts +0 -15
  59. package/dist/commands/watch/index.js +0 -300
  60. package/dist/commands/watch/index.js.map +0 -1
  61. package/dist/hooks/init.d.ts +0 -3
  62. package/dist/hooks/init.js +0 -59
  63. package/dist/hooks/init.js.map +0 -1
  64. package/oclif.manifest.json +0 -376
@@ -0,0 +1,564 @@
1
+ "use strict";
2
+ var __importDefault = (this && this.__importDefault) || function (mod) {
3
+ return (mod && mod.__esModule) ? mod : { "default": mod };
4
+ };
5
+ Object.defineProperty(exports, "__esModule", { value: true });
6
+ exports.PullService = void 0;
7
+ const fs_extra_1 = __importDefault(require("fs-extra"));
8
+ const path_1 = __importDefault(require("path"));
9
+ const core_1 = require("@memberjunction/core");
10
+ const config_1 = require("../config");
11
+ const config_manager_1 = require("../lib/config-manager");
12
+ class PullService {
13
+ syncEngine;
14
+ contextUser;
15
+ constructor(syncEngine, contextUser) {
16
+ this.syncEngine = syncEngine;
17
+ this.contextUser = contextUser;
18
+ }
19
+ async pull(options, callbacks) {
20
+ let targetDir;
21
+ let entityConfig;
22
+ // Check if we should use a specific target directory
23
+ if (options.targetDir) {
24
+ if (options.verbose) {
25
+ callbacks?.onLog?.(`Using specified target directory: ${options.targetDir}`);
26
+ }
27
+ process.chdir(options.targetDir);
28
+ targetDir = process.cwd();
29
+ // Load entity config from the current directory
30
+ entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
31
+ if (!entityConfig) {
32
+ throw new Error(`No .mj-sync.json found in ${targetDir}`);
33
+ }
34
+ if (entityConfig.entity !== options.entity) {
35
+ throw new Error(`Directory ${targetDir} is configured for entity "${entityConfig.entity}", not "${options.entity}"`);
36
+ }
37
+ }
38
+ else {
39
+ // Original behavior - find entity directory
40
+ const entityDirs = await this.findEntityDirectories(options.entity);
41
+ if (entityDirs.length === 0) {
42
+ throw new Error(`No directory found for entity "${options.entity}". Run "mj sync init" first.`);
43
+ }
44
+ if (entityDirs.length === 1) {
45
+ targetDir = entityDirs[0];
46
+ }
47
+ else {
48
+ // Multiple directories found - in service mode, we'll use the first one
49
+ // The CLI can handle prompting for selection
50
+ throw new Error(`Multiple directories found for entity "${options.entity}". Please specify target directory.`);
51
+ }
52
+ entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
53
+ if (!entityConfig) {
54
+ throw new Error(`Invalid entity configuration in ${targetDir}`);
55
+ }
56
+ }
57
+ // Show configuration notice only if relevant and in verbose mode
58
+ if (options.verbose && entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
59
+ const targetFile = path_1.default.join(targetDir, entityConfig.pull.newFileName.endsWith('.json')
60
+ ? entityConfig.pull.newFileName
61
+ : `${entityConfig.pull.newFileName}.json`);
62
+ if (await fs_extra_1.default.pathExists(targetFile)) {
63
+ callbacks?.onLog?.(`\n📝 Configuration: New records will be appended to existing file '${path_1.default.basename(targetFile)}'`);
64
+ }
65
+ }
66
+ // Pull records
67
+ callbacks?.onProgress?.(`Pulling ${options.entity} records`);
68
+ const rv = new core_1.RunView();
69
+ let filter = '';
70
+ if (options.filter) {
71
+ filter = options.filter;
72
+ }
73
+ else if (entityConfig.pull?.filter) {
74
+ filter = entityConfig.pull.filter;
75
+ }
76
+ const result = await rv.RunView({
77
+ EntityName: options.entity,
78
+ ExtraFilter: filter,
79
+ ResultType: 'entity_object'
80
+ }, this.contextUser);
81
+ if (!result.Success) {
82
+ throw new Error(`Failed to pull records: ${result.ErrorMessage}`);
83
+ }
84
+ callbacks?.onSuccess?.(`Found ${result.Results.length} records`);
85
+ if (options.dryRun) {
86
+ callbacks?.onLog?.(`\nDry run mode - would pull ${result.Results.length} records to ${targetDir}`);
87
+ return {
88
+ processed: 0,
89
+ created: 0,
90
+ updated: 0,
91
+ skipped: 0,
92
+ targetDir
93
+ };
94
+ }
95
+ // Check if we need to wait for async property loading
96
+ if (entityConfig.pull?.externalizeFields && result.Results.length > 0) {
97
+ await this.handleAsyncPropertyLoading(options.entity, entityConfig, options.verbose, callbacks);
98
+ }
99
+ // Process records
100
+ const pullResult = await this.processRecords(result.Results, options, targetDir, entityConfig, callbacks);
101
+ return {
102
+ ...pullResult,
103
+ targetDir
104
+ };
105
+ }
106
+ async handleAsyncPropertyLoading(entityName, entityConfig, verbose, callbacks) {
107
+ const metadata = new core_1.Metadata();
108
+ const entityInfo = metadata.EntityByName(entityName);
109
+ if (!entityInfo)
110
+ return;
111
+ const externalizeConfig = entityConfig.pull.externalizeFields;
112
+ let fieldsToExternalize = [];
113
+ if (Array.isArray(externalizeConfig)) {
114
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
115
+ fieldsToExternalize = externalizeConfig;
116
+ }
117
+ else {
118
+ fieldsToExternalize = externalizeConfig
119
+ .map(item => item.field);
120
+ }
121
+ }
122
+ else {
123
+ fieldsToExternalize = Object.keys(externalizeConfig);
124
+ }
125
+ // Get all field names from entity metadata
126
+ const metadataFieldNames = entityInfo.Fields.map(f => f.Name);
127
+ // Check if any externalized fields are NOT in metadata (likely computed properties)
128
+ const computedFields = fieldsToExternalize.filter(f => !metadataFieldNames.includes(f));
129
+ if (computedFields.length > 0) {
130
+ if (verbose) {
131
+ callbacks?.onProgress?.(`Waiting 5 seconds for async property loading in ${entityName} (${computedFields.join(', ')})...`);
132
+ }
133
+ await new Promise(resolve => setTimeout(resolve, 5000));
134
+ if (verbose) {
135
+ callbacks?.onSuccess?.('Async property loading wait complete');
136
+ }
137
+ }
138
+ }
139
+ async processRecords(records, options, targetDir, entityConfig, callbacks) {
140
+ const entityInfo = this.syncEngine.getEntityInfo(options.entity);
141
+ if (!entityInfo) {
142
+ throw new Error(`Entity information not found for: ${options.entity}`);
143
+ }
144
+ callbacks?.onProgress?.('Processing records');
145
+ let processed = 0;
146
+ let updated = 0;
147
+ let created = 0;
148
+ let skipped = 0;
149
+ // If multi-file flag is set, collect all records
150
+ if (options.multiFile) {
151
+ const allRecords = [];
152
+ for (const record of records) {
153
+ try {
154
+ // Build primary key
155
+ const primaryKey = {};
156
+ for (const pk of entityInfo.PrimaryKeys) {
157
+ primaryKey[pk.Name] = record[pk.Name];
158
+ }
159
+ // Process record for multi-file
160
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, options.verbose, true);
161
+ allRecords.push(recordData);
162
+ processed++;
163
+ if (options.verbose) {
164
+ callbacks?.onProgress?.(`Processing records (${processed}/${records.length})`);
165
+ }
166
+ }
167
+ catch (error) {
168
+ callbacks?.onWarn?.(`Failed to process record: ${error.message || error}`);
169
+ }
170
+ }
171
+ // Write all records to single file
172
+ if (allRecords.length > 0) {
173
+ const fileName = options.multiFile.endsWith('.json') ? options.multiFile : `${options.multiFile}.json`;
174
+ const filePath = path_1.default.join(targetDir, fileName);
175
+ await fs_extra_1.default.writeJson(filePath, allRecords, { spaces: 2 });
176
+ callbacks?.onSuccess?.(`Pulled ${processed} records to ${path_1.default.basename(filePath)}`);
177
+ }
178
+ }
179
+ else {
180
+ // Smart update logic for single-file-per-record
181
+ const result = await this.processIndividualRecords(records, options, targetDir, entityConfig, entityInfo, callbacks);
182
+ processed = result.processed;
183
+ updated = result.updated;
184
+ created = result.created;
185
+ skipped = result.skipped;
186
+ // Final status
187
+ const statusParts = [`Processed ${processed} records`];
188
+ if (updated > 0)
189
+ statusParts.push(`updated ${updated}`);
190
+ if (created > 0)
191
+ statusParts.push(`created ${created}`);
192
+ if (skipped > 0)
193
+ statusParts.push(`skipped ${skipped}`);
194
+ callbacks?.onSuccess?.(statusParts.join(', '));
195
+ }
196
+ return { processed, created, updated, skipped };
197
+ }
198
+ async processIndividualRecords(records, options, targetDir, entityConfig, entityInfo, callbacks) {
199
+ let processed = 0;
200
+ let updated = 0;
201
+ let created = 0;
202
+ let skipped = 0;
203
+ // Find existing files
204
+ const filePattern = entityConfig.pull?.filePattern || entityConfig.filePattern || '*.json';
205
+ const existingFiles = await this.findExistingFiles(targetDir, filePattern);
206
+ if (options.verbose) {
207
+ callbacks?.onLog?.(`Found ${existingFiles.length} existing files matching pattern '${filePattern}'`);
208
+ existingFiles.forEach(f => callbacks?.onLog?.(` - ${path_1.default.basename(f)}`));
209
+ }
210
+ // Load existing records and build lookup map
211
+ const existingRecordsMap = await this.loadExistingRecords(existingFiles, entityInfo);
212
+ if (options.verbose) {
213
+ callbacks?.onLog?.(`Loaded ${existingRecordsMap.size} existing records from files`);
214
+ }
215
+ // Separate records into new and existing
216
+ const newRecords = [];
217
+ const existingRecordsToUpdate = [];
218
+ for (const record of records) {
219
+ // Build primary key
220
+ const primaryKey = {};
221
+ for (const pk of entityInfo.PrimaryKeys) {
222
+ primaryKey[pk.Name] = record[pk.Name];
223
+ }
224
+ // Create lookup key
225
+ const lookupKey = this.createPrimaryKeyLookup(primaryKey);
226
+ const existingFileInfo = existingRecordsMap.get(lookupKey);
227
+ if (existingFileInfo) {
228
+ // Record exists locally
229
+ if (entityConfig.pull?.updateExistingRecords !== false) {
230
+ existingRecordsToUpdate.push({ record, primaryKey, filePath: existingFileInfo.filePath });
231
+ }
232
+ else {
233
+ skipped++;
234
+ if (options.verbose) {
235
+ callbacks?.onLog?.(`Skipping existing record: ${lookupKey}`);
236
+ }
237
+ }
238
+ }
239
+ else {
240
+ // Record doesn't exist locally
241
+ if (entityConfig.pull?.createNewFileIfNotFound !== false) {
242
+ newRecords.push({ record, primaryKey });
243
+ }
244
+ else {
245
+ skipped++;
246
+ if (options.verbose) {
247
+ callbacks?.onLog?.(`Skipping new record (createNewFileIfNotFound=false): ${lookupKey}`);
248
+ }
249
+ }
250
+ }
251
+ }
252
+ // Track which files have been backed up to avoid duplicates
253
+ const backedUpFiles = new Set();
254
+ // Process existing records updates
255
+ for (const { record, primaryKey, filePath } of existingRecordsToUpdate) {
256
+ try {
257
+ callbacks?.onProgress?.(`Updating existing records (${updated + 1}/${existingRecordsToUpdate.length})`);
258
+ // Create backup if configured (only once per file)
259
+ if (entityConfig.pull?.backupBeforeUpdate && !backedUpFiles.has(filePath)) {
260
+ await this.createBackup(filePath, entityConfig.pull?.backupDirectory);
261
+ backedUpFiles.add(filePath);
262
+ }
263
+ // Load existing file data
264
+ const existingData = await fs_extra_1.default.readJson(filePath);
265
+ // Find the specific existing record that matches this primary key
266
+ let existingRecordData;
267
+ if (Array.isArray(existingData)) {
268
+ // Find the matching record in the array
269
+ const matchingRecord = existingData.find(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
270
+ existingRecordData = matchingRecord || existingData[0]; // Fallback to first if not found
271
+ }
272
+ else {
273
+ existingRecordData = existingData;
274
+ }
275
+ // Process the new record data (isNewRecord = false for updates)
276
+ const newRecordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, options.verbose, false, existingRecordData);
277
+ // Apply merge strategy
278
+ const mergedData = await this.mergeRecords(existingRecordData, newRecordData, entityConfig.pull?.mergeStrategy || 'merge', entityConfig.pull?.preserveFields || []);
279
+ // Write updated data
280
+ if (Array.isArray(existingData)) {
281
+ // Update the record in the array
282
+ const index = existingData.findIndex(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
283
+ if (index >= 0) {
284
+ existingData[index] = mergedData;
285
+ await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
286
+ }
287
+ }
288
+ else {
289
+ await fs_extra_1.default.writeJson(filePath, mergedData, { spaces: 2 });
290
+ }
291
+ updated++;
292
+ processed++;
293
+ if (options.verbose) {
294
+ callbacks?.onLog?.(`Updated: ${filePath}`);
295
+ }
296
+ }
297
+ catch (error) {
298
+ callbacks?.onWarn?.(`Failed to update record: ${error.message || error}`);
299
+ }
300
+ }
301
+ // Process new records
302
+ if (newRecords.length > 0) {
303
+ callbacks?.onProgress?.(`Creating new records (0/${newRecords.length})`);
304
+ if (entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
305
+ // Append all new records to a single file
306
+ const fileName = entityConfig.pull.newFileName.endsWith('.json')
307
+ ? entityConfig.pull.newFileName
308
+ : `${entityConfig.pull.newFileName}.json`;
309
+ const filePath = path_1.default.join(targetDir, fileName);
310
+ // Load existing file if it exists
311
+ let existingData = [];
312
+ if (await fs_extra_1.default.pathExists(filePath)) {
313
+ const fileData = await fs_extra_1.default.readJson(filePath);
314
+ existingData = Array.isArray(fileData) ? fileData : [fileData];
315
+ }
316
+ // Process and append all new records
317
+ for (const { record, primaryKey } of newRecords) {
318
+ try {
319
+ // For new records, pass isNewRecord = true (default)
320
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, options.verbose, true);
321
+ existingData.push(recordData);
322
+ created++;
323
+ processed++;
324
+ if (options.verbose) {
325
+ callbacks?.onProgress?.(`Creating new records (${created}/${newRecords.length})`);
326
+ }
327
+ }
328
+ catch (error) {
329
+ callbacks?.onWarn?.(`Failed to process new record: ${error.message || error}`);
330
+ }
331
+ }
332
+ // Write the combined data
333
+ await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
334
+ if (options.verbose) {
335
+ callbacks?.onLog?.(`Appended ${created} new records to: ${filePath}`);
336
+ }
337
+ }
338
+ else {
339
+ // Create individual files for each new record
340
+ for (const { record, primaryKey } of newRecords) {
341
+ try {
342
+ await this.processRecord(record, primaryKey, targetDir, entityConfig, options.verbose);
343
+ created++;
344
+ processed++;
345
+ if (options.verbose) {
346
+ callbacks?.onProgress?.(`Creating new records (${created}/${newRecords.length})`);
347
+ }
348
+ }
349
+ catch (error) {
350
+ callbacks?.onWarn?.(`Failed to process new record: ${error.message || error}`);
351
+ }
352
+ }
353
+ }
354
+ }
355
+ return { processed, updated, created, skipped };
356
+ }
357
+ async processRecord(record, primaryKey, targetDir, entityConfig, verbose) {
358
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, verbose, true);
359
+ // Determine file path
360
+ const fileName = this.buildFileName(primaryKey, entityConfig);
361
+ const filePath = path_1.default.join(targetDir, fileName);
362
+ // Write JSON file
363
+ await fs_extra_1.default.writeJson(filePath, recordData, { spaces: 2 });
364
+ }
365
+ async processRecordData(record, primaryKey, targetDir, entityConfig, verbose, isNewRecord = true, existingRecordData, currentDepth = 0, ancestryPath = new Set()) {
366
+ // This is a simplified version - the full implementation would need to be extracted
367
+ // from the pull command. For now, we'll delegate to a method that would be
368
+ // implemented in the full service
369
+ // Build record data
370
+ const fields = {};
371
+ const relatedEntities = {};
372
+ // Get the underlying data from the entity object
373
+ let dataToProcess = record;
374
+ if (typeof record.GetAll === 'function') {
375
+ dataToProcess = record.GetAll();
376
+ }
377
+ // Process fields (simplified - full implementation needed)
378
+ for (const [fieldName, fieldValue] of Object.entries(dataToProcess)) {
379
+ // Skip primary key fields
380
+ if (primaryKey[fieldName] !== undefined) {
381
+ continue;
382
+ }
383
+ // Skip internal fields
384
+ if (fieldName.startsWith('__mj_')) {
385
+ continue;
386
+ }
387
+ // Skip excluded fields
388
+ if (entityConfig.pull?.excludeFields?.includes(fieldName)) {
389
+ continue;
390
+ }
391
+ fields[fieldName] = fieldValue;
392
+ }
393
+ // Calculate checksum
394
+ const checksum = this.syncEngine.calculateChecksum(fields);
395
+ // Build the final record data
396
+ const recordData = {
397
+ fields,
398
+ primaryKey,
399
+ sync: {
400
+ lastModified: new Date().toISOString(),
401
+ checksum: checksum
402
+ }
403
+ };
404
+ if (Object.keys(relatedEntities).length > 0) {
405
+ recordData.relatedEntities = relatedEntities;
406
+ }
407
+ return recordData;
408
+ }
409
+ async findEntityDirectories(entityName) {
410
+ const dirs = [];
411
+ // Search for directories with matching entity config
412
+ const searchDirs = async (dir) => {
413
+ const entries = await fs_extra_1.default.readdir(dir, { withFileTypes: true });
414
+ for (const entry of entries) {
415
+ if (entry.isDirectory()) {
416
+ const fullPath = path_1.default.join(dir, entry.name);
417
+ const config = await (0, config_1.loadEntityConfig)(fullPath);
418
+ if (config && config.entity === entityName) {
419
+ dirs.push(fullPath);
420
+ }
421
+ else {
422
+ // Recurse
423
+ await searchDirs(fullPath);
424
+ }
425
+ }
426
+ }
427
+ };
428
+ await searchDirs(config_manager_1.configManager.getOriginalCwd());
429
+ return dirs;
430
+ }
431
+ buildFileName(primaryKey, entityConfig) {
432
+ // Use primary key values to build filename
433
+ const keys = Object.values(primaryKey);
434
+ if (keys.length === 1 && typeof keys[0] === 'string') {
435
+ // Single string key - use as base if it's a guid
436
+ const key = keys[0];
437
+ if (key.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i)) {
438
+ // It's a GUID, use first 8 chars, prefixed with dot, lowercase
439
+ return `.${key.substring(0, 8).toLowerCase()}.json`;
440
+ }
441
+ // Use the whole key if not too long, prefixed with dot
442
+ if (key.length <= 50) {
443
+ return `.${key.replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()}.json`;
444
+ }
445
+ }
446
+ // Multiple keys or numeric - create composite name, prefixed with dot
447
+ return '.' + keys.map(k => String(k).replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()).join('-') + '.json';
448
+ }
449
+ async findExistingFiles(dir, pattern) {
450
+ const files = [];
451
+ try {
452
+ const entries = await fs_extra_1.default.readdir(dir, { withFileTypes: true });
453
+ for (const entry of entries) {
454
+ if (entry.isFile()) {
455
+ const fileName = entry.name;
456
+ // Simple pattern matching
457
+ if (pattern === '*.json' && fileName.endsWith('.json')) {
458
+ files.push(path_1.default.join(dir, fileName));
459
+ }
460
+ else if (pattern === '.*.json' && fileName.startsWith('.') && fileName.endsWith('.json')) {
461
+ files.push(path_1.default.join(dir, fileName));
462
+ }
463
+ else if (pattern === fileName) {
464
+ files.push(path_1.default.join(dir, fileName));
465
+ }
466
+ }
467
+ }
468
+ }
469
+ catch (error) {
470
+ // Directory might not exist yet
471
+ if (error.code !== 'ENOENT') {
472
+ throw error;
473
+ }
474
+ }
475
+ return files;
476
+ }
477
+ async loadExistingRecords(files, entityInfo) {
478
+ const recordsMap = new Map();
479
+ for (const filePath of files) {
480
+ try {
481
+ const fileData = await fs_extra_1.default.readJson(filePath);
482
+ const records = Array.isArray(fileData) ? fileData : [fileData];
483
+ for (const record of records) {
484
+ if (record.primaryKey) {
485
+ const lookupKey = this.createPrimaryKeyLookup(record.primaryKey);
486
+ recordsMap.set(lookupKey, { filePath, recordData: record });
487
+ }
488
+ }
489
+ }
490
+ catch (error) {
491
+ // Skip files that can't be parsed
492
+ }
493
+ }
494
+ return recordsMap;
495
+ }
496
+ createPrimaryKeyLookup(primaryKey) {
497
+ const keys = Object.keys(primaryKey).sort();
498
+ return keys.map(k => `${k}:${primaryKey[k]}`).join('|');
499
+ }
500
+ async mergeRecords(existing, newData, strategy, preserveFields) {
501
+ if (strategy === 'skip') {
502
+ return existing;
503
+ }
504
+ if (strategy === 'overwrite') {
505
+ const result = {
506
+ fields: { ...newData.fields },
507
+ primaryKey: newData.primaryKey,
508
+ sync: newData.sync
509
+ };
510
+ // Restore preserved fields from existing
511
+ if (preserveFields.length > 0 && existing.fields) {
512
+ for (const field of preserveFields) {
513
+ if (field in existing.fields) {
514
+ result.fields[field] = existing.fields[field];
515
+ }
516
+ }
517
+ }
518
+ if (newData.relatedEntities) {
519
+ result.relatedEntities = newData.relatedEntities;
520
+ }
521
+ return result;
522
+ }
523
+ // Default 'merge' strategy
524
+ const result = {
525
+ fields: { ...existing.fields, ...newData.fields },
526
+ primaryKey: newData.primaryKey || existing.primaryKey,
527
+ sync: newData.sync
528
+ };
529
+ // Restore preserved fields
530
+ if (preserveFields.length > 0 && existing.fields) {
531
+ for (const field of preserveFields) {
532
+ if (field in existing.fields) {
533
+ result.fields[field] = existing.fields[field];
534
+ }
535
+ }
536
+ }
537
+ if (existing.relatedEntities || newData.relatedEntities) {
538
+ result.relatedEntities = {
539
+ ...existing.relatedEntities,
540
+ ...newData.relatedEntities
541
+ };
542
+ }
543
+ return result;
544
+ }
545
+ async createBackup(filePath, backupDirName) {
546
+ const dir = path_1.default.dirname(filePath);
547
+ const fileName = path_1.default.basename(filePath);
548
+ const backupDir = path_1.default.join(dir, backupDirName || '.backups');
549
+ // Ensure backup directory exists
550
+ await fs_extra_1.default.ensureDir(backupDir);
551
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
552
+ // Remove .json extension, add timestamp, then add .backup extension
553
+ const backupFileName = fileName.replace(/\.json$/, `.${timestamp}.backup`);
554
+ const backupPath = path_1.default.join(backupDir, backupFileName);
555
+ try {
556
+ await fs_extra_1.default.copy(filePath, backupPath);
557
+ }
558
+ catch (error) {
559
+ // Log error but don't throw
560
+ }
561
+ }
562
+ }
563
+ exports.PullService = PullService;
564
+ //# sourceMappingURL=PullService.js.map