@memberjunction/metadata-sync 2.47.0 → 2.49.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,9 +21,10 @@ const path_1 = __importDefault(require("path"));
21
21
  const prompts_1 = require("@inquirer/prompts");
22
22
  const ora_classic_1 = __importDefault(require("ora-classic"));
23
23
  const config_1 = require("../../config");
24
- const sync_engine_1 = require("../../lib/sync-engine");
25
24
  const core_2 = require("@memberjunction/core");
26
25
  const provider_utils_1 = require("../../lib/provider-utils");
26
+ const config_manager_1 = require("../../lib/config-manager");
27
+ const singleton_manager_1 = require("../../lib/singleton-manager");
27
28
  /**
28
29
  * Pull metadata records from database to local files
29
30
  *
@@ -53,42 +54,76 @@ class Pull extends core_1.Command {
53
54
  filter: core_1.Flags.string({ description: 'Additional filter for pulling specific records' }),
54
55
  'dry-run': core_1.Flags.boolean({ description: 'Show what would be pulled without actually pulling' }),
55
56
  'multi-file': core_1.Flags.string({ description: 'Create a single file with multiple records (provide filename)' }),
57
+ verbose: core_1.Flags.boolean({ char: 'v', description: 'Show detailed output' }),
56
58
  };
57
59
  async run() {
58
60
  const { flags } = await this.parse(Pull);
59
61
  const spinner = (0, ora_classic_1.default)();
60
62
  try {
61
- // Load MJ config
63
+ // Load MJ config first (before changing directory)
62
64
  spinner.start('Loading configuration');
63
65
  const mjConfig = (0, config_1.loadMJConfig)();
64
66
  if (!mjConfig) {
65
67
  this.error('No mj.config.cjs found in current directory or parent directories');
66
68
  }
69
+ // Stop spinner before provider initialization (which logs to console)
70
+ spinner.stop();
67
71
  // Initialize data provider
68
72
  const provider = await (0, provider_utils_1.initializeProvider)(mjConfig);
69
- // Initialize sync engine
70
- const syncEngine = new sync_engine_1.SyncEngine((0, provider_utils_1.getSystemUser)());
71
- await syncEngine.initialize();
72
- spinner.succeed('Configuration loaded');
73
- // Find entity directory
74
- const entityDirs = await this.findEntityDirectories(flags.entity);
75
- if (entityDirs.length === 0) {
76
- this.error(`No directory found for entity "${flags.entity}". Run "mj-sync init" first.`);
77
- }
73
+ // Get singleton sync engine
74
+ const syncEngine = await (0, singleton_manager_1.getSyncEngine)((0, provider_utils_1.getSystemUser)());
75
+ // Show success after all initialization is complete
76
+ spinner.succeed('Configuration and metadata loaded');
78
77
  let targetDir;
79
- if (entityDirs.length === 1) {
80
- targetDir = entityDirs[0];
78
+ let entityConfig;
79
+ // Check if we should use a specific target directory
80
+ const envTargetDir = process.env.METADATA_SYNC_TARGET_DIR;
81
+ if (envTargetDir) {
82
+ if (flags.verbose) {
83
+ console.log(`Using specified target directory: ${envTargetDir}`);
84
+ }
85
+ process.chdir(envTargetDir);
86
+ targetDir = process.cwd();
87
+ // Load entity config from the current directory
88
+ entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
89
+ if (!entityConfig) {
90
+ this.error(`No .mj-sync.json found in ${targetDir}`);
91
+ }
92
+ if (entityConfig.entity !== flags.entity) {
93
+ this.error(`Directory ${targetDir} is configured for entity "${entityConfig.entity}", not "${flags.entity}"`);
94
+ }
81
95
  }
82
96
  else {
83
- // Multiple directories found, ask user
84
- targetDir = await (0, prompts_1.select)({
85
- message: `Multiple directories found for entity "${flags.entity}". Which one to use?`,
86
- choices: entityDirs.map(dir => ({ name: dir, value: dir }))
87
- });
97
+ // Original behavior - find entity directory
98
+ const entityDirs = await this.findEntityDirectories(flags.entity);
99
+ if (entityDirs.length === 0) {
100
+ this.error(`No directory found for entity "${flags.entity}". Run "mj-sync init" first.`);
101
+ }
102
+ if (entityDirs.length === 1) {
103
+ targetDir = entityDirs[0];
104
+ }
105
+ else {
106
+ // Multiple directories found, ask user
107
+ targetDir = await (0, prompts_1.select)({
108
+ message: `Multiple directories found for entity "${flags.entity}". Which one to use?`,
109
+ choices: entityDirs.map(dir => ({ name: dir, value: dir }))
110
+ });
111
+ }
112
+ entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
113
+ if (!entityConfig) {
114
+ this.error(`Invalid entity configuration in ${targetDir}`);
115
+ }
88
116
  }
89
- const entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
90
- if (!entityConfig) {
91
- this.error(`Invalid entity configuration in ${targetDir}`);
117
+ // Show configuration notice only if relevant
118
+ if (entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
119
+ const targetFile = path_1.default.join(targetDir, entityConfig.pull.newFileName.endsWith('.json')
120
+ ? entityConfig.pull.newFileName
121
+ : `${entityConfig.pull.newFileName}.json`);
122
+ if (await fs_extra_1.default.pathExists(targetFile)) {
123
+ // File exists - inform about append behavior
124
+ this.log(`\n📝 Configuration: New records will be appended to existing file '${path_1.default.basename(targetFile)}'`);
125
+ }
126
+ // If file doesn't exist, no need to mention anything special - we're just creating it
92
127
  }
93
128
  // Pull records
94
129
  spinner.start(`Pulling ${flags.entity} records`);
@@ -102,7 +137,8 @@ class Pull extends core_1.Command {
102
137
  }
103
138
  const result = await rv.RunView({
104
139
  EntityName: flags.entity,
105
- ExtraFilter: filter
140
+ ExtraFilter: filter,
141
+ ResultType: 'entity_object'
106
142
  }, (0, provider_utils_1.getSystemUser)());
107
143
  if (!result.Success) {
108
144
  this.error(`Failed to pull records: ${result.ErrorMessage}`);
@@ -112,6 +148,39 @@ class Pull extends core_1.Command {
112
148
  this.log(`\nDry run mode - would pull ${result.Results.length} records to ${targetDir}`);
113
149
  return;
114
150
  }
151
+ // Check if we need to wait for async property loading
152
+ if (entityConfig.pull?.externalizeFields && result.Results.length > 0) {
153
+ const metadata = new core_2.Metadata();
154
+ const entityInfo = metadata.EntityByName(flags.entity);
155
+ if (entityInfo) {
156
+ const externalizeConfig = entityConfig.pull.externalizeFields;
157
+ let fieldsToExternalize = [];
158
+ if (Array.isArray(externalizeConfig)) {
159
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
160
+ // Simple string array
161
+ fieldsToExternalize = externalizeConfig;
162
+ }
163
+ else {
164
+ // New pattern format
165
+ fieldsToExternalize = externalizeConfig
166
+ .map(item => item.field);
167
+ }
168
+ }
169
+ else {
170
+ // Object format
171
+ fieldsToExternalize = Object.keys(externalizeConfig);
172
+ }
173
+ // Get all field names from entity metadata
174
+ const metadataFieldNames = entityInfo.Fields.map(f => f.Name);
175
+ // Check if any externalized fields are NOT in metadata (likely computed properties)
176
+ const computedFields = fieldsToExternalize.filter(f => !metadataFieldNames.includes(f));
177
+ if (computedFields.length > 0) {
178
+ spinner.start(`Waiting 5 seconds for async property loading in ${flags.entity} (${computedFields.join(', ')})...`);
179
+ await new Promise(resolve => setTimeout(resolve, 5000));
180
+ spinner.succeed('Async property loading wait complete');
181
+ }
182
+ }
183
+ }
115
184
  // Process each record
116
185
  const entityInfo = syncEngine.getEntityInfo(flags.entity);
117
186
  if (!entityInfo) {
@@ -119,6 +188,9 @@ class Pull extends core_1.Command {
119
188
  }
120
189
  spinner.start('Processing records');
121
190
  let processed = 0;
191
+ let updated = 0;
192
+ let created = 0;
193
+ let skipped = 0;
122
194
  // If multi-file flag is set, collect all records
123
195
  if (flags['multi-file']) {
124
196
  const allRecords = [];
@@ -130,10 +202,12 @@ class Pull extends core_1.Command {
130
202
  primaryKey[pk.Name] = record[pk.Name];
131
203
  }
132
204
  // Process record for multi-file
133
- const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine);
205
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
134
206
  allRecords.push(recordData);
135
207
  processed++;
136
- spinner.text = `Processing records (${processed}/${result.Results.length})`;
208
+ if (flags.verbose) {
209
+ spinner.text = `Processing records (${processed}/${result.Results.length})`;
210
+ }
137
211
  }
138
212
  catch (error) {
139
213
  this.warn(`Failed to process record: ${error.message || error}`);
@@ -148,33 +222,213 @@ class Pull extends core_1.Command {
148
222
  }
149
223
  }
150
224
  else {
151
- // Original single-file-per-record logic
225
+ // Smart update logic for single-file-per-record
226
+ spinner.text = 'Scanning for existing files...';
227
+ // Find existing files
228
+ const filePattern = entityConfig.pull?.filePattern || entityConfig.filePattern || '*.json';
229
+ const existingFiles = await this.findExistingFiles(targetDir, filePattern);
230
+ if (flags.verbose) {
231
+ this.log(`Found ${existingFiles.length} existing files matching pattern '${filePattern}'`);
232
+ existingFiles.forEach(f => this.log(` - ${path_1.default.basename(f)}`));
233
+ }
234
+ // Load existing records and build lookup map
235
+ const existingRecordsMap = await this.loadExistingRecords(existingFiles, entityInfo);
236
+ if (flags.verbose) {
237
+ this.log(`Loaded ${existingRecordsMap.size} existing records from files`);
238
+ }
239
+ // Separate records into new and existing
240
+ const newRecords = [];
241
+ const existingRecordsToUpdate = [];
152
242
  for (const record of result.Results) {
243
+ // Build primary key
244
+ const primaryKey = {};
245
+ for (const pk of entityInfo.PrimaryKeys) {
246
+ primaryKey[pk.Name] = record[pk.Name];
247
+ }
248
+ // Create lookup key
249
+ const lookupKey = this.createPrimaryKeyLookup(primaryKey);
250
+ const existingFileInfo = existingRecordsMap.get(lookupKey);
251
+ if (existingFileInfo) {
252
+ // Record exists locally
253
+ if (entityConfig.pull?.updateExistingRecords !== false) {
254
+ existingRecordsToUpdate.push({ record, primaryKey, filePath: existingFileInfo.filePath });
255
+ }
256
+ else {
257
+ skipped++;
258
+ if (flags.verbose) {
259
+ this.log(`Skipping existing record: ${lookupKey}`);
260
+ }
261
+ }
262
+ }
263
+ else {
264
+ // Record doesn't exist locally
265
+ if (entityConfig.pull?.createNewFileIfNotFound !== false) {
266
+ newRecords.push({ record, primaryKey });
267
+ }
268
+ else {
269
+ skipped++;
270
+ if (flags.verbose) {
271
+ this.log(`Skipping new record (createNewFileIfNotFound=false): ${lookupKey}`);
272
+ }
273
+ }
274
+ }
275
+ }
276
+ // Track which files have been backed up to avoid duplicates
277
+ const backedUpFiles = new Set();
278
+ // Process existing records updates
279
+ for (const { record, primaryKey, filePath } of existingRecordsToUpdate) {
153
280
  try {
154
- // Build primary key
155
- const primaryKey = {};
156
- for (const pk of entityInfo.PrimaryKeys) {
157
- primaryKey[pk.Name] = record[pk.Name];
281
+ spinner.text = `Updating existing records (${updated + 1}/${existingRecordsToUpdate.length})`;
282
+ // Create backup if configured (only once per file)
283
+ if (entityConfig.pull?.backupBeforeUpdate && !backedUpFiles.has(filePath)) {
284
+ await this.createBackup(filePath, entityConfig.pull?.backupDirectory);
285
+ backedUpFiles.add(filePath);
286
+ }
287
+ // Load existing file data
288
+ const existingData = await fs_extra_1.default.readJson(filePath);
289
+ // Find the specific existing record that matches this primary key
290
+ let existingRecordData;
291
+ if (Array.isArray(existingData)) {
292
+ // Find the matching record in the array
293
+ const matchingRecord = existingData.find(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
294
+ existingRecordData = matchingRecord || existingData[0]; // Fallback to first if not found
295
+ }
296
+ else {
297
+ existingRecordData = existingData;
158
298
  }
159
- // Process record
160
- await this.processRecord(record, primaryKey, targetDir, entityConfig, syncEngine);
299
+ // Process the new record data (isNewRecord = false for updates)
300
+ const newRecordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, false, existingRecordData);
301
+ // Apply merge strategy
302
+ const mergedData = await this.mergeRecords(existingRecordData, newRecordData, entityConfig.pull?.mergeStrategy || 'merge', entityConfig.pull?.preserveFields || []);
303
+ // Write updated data
304
+ if (Array.isArray(existingData)) {
305
+ // Update the record in the array
306
+ const index = existingData.findIndex(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
307
+ if (index >= 0) {
308
+ existingData[index] = mergedData;
309
+ await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
310
+ }
311
+ }
312
+ else {
313
+ await fs_extra_1.default.writeJson(filePath, mergedData, { spaces: 2 });
314
+ }
315
+ updated++;
161
316
  processed++;
162
- spinner.text = `Processing records (${processed}/${result.Results.length})`;
317
+ if (flags.verbose) {
318
+ this.log(`Updated: ${filePath}`);
319
+ }
163
320
  }
164
321
  catch (error) {
165
- this.warn(`Failed to process record: ${error.message || error}`);
322
+ this.warn(`Failed to update record: ${error.message || error}`);
166
323
  }
167
324
  }
168
- spinner.succeed(`Pulled ${processed} records to ${targetDir}`);
325
+ // Process new records
326
+ if (newRecords.length > 0) {
327
+ spinner.text = `Creating new records (0/${newRecords.length})`;
328
+ if (entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
329
+ // Append all new records to a single file
330
+ const fileName = entityConfig.pull.newFileName.endsWith('.json')
331
+ ? entityConfig.pull.newFileName
332
+ : `${entityConfig.pull.newFileName}.json`;
333
+ const filePath = path_1.default.join(targetDir, fileName);
334
+ // Load existing file if it exists
335
+ let existingData = [];
336
+ if (await fs_extra_1.default.pathExists(filePath)) {
337
+ const fileData = await fs_extra_1.default.readJson(filePath);
338
+ existingData = Array.isArray(fileData) ? fileData : [fileData];
339
+ }
340
+ // Process and append all new records
341
+ for (const { record, primaryKey } of newRecords) {
342
+ try {
343
+ // For new records, pass isNewRecord = true (default)
344
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
345
+ existingData.push(recordData);
346
+ created++;
347
+ processed++;
348
+ if (flags.verbose) {
349
+ spinner.text = `Creating new records (${created}/${newRecords.length})`;
350
+ }
351
+ }
352
+ catch (error) {
353
+ this.warn(`Failed to process new record: ${error.message || error}`);
354
+ }
355
+ }
356
+ // Write the combined data
357
+ await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
358
+ if (flags.verbose) {
359
+ this.log(`Appended ${created} new records to: ${filePath}`);
360
+ }
361
+ }
362
+ else {
363
+ // Create individual files for each new record
364
+ for (const { record, primaryKey } of newRecords) {
365
+ try {
366
+ await this.processRecord(record, primaryKey, targetDir, entityConfig, syncEngine, flags);
367
+ created++;
368
+ processed++;
369
+ if (flags.verbose) {
370
+ spinner.text = `Creating new records (${created}/${newRecords.length})`;
371
+ }
372
+ }
373
+ catch (error) {
374
+ this.warn(`Failed to process new record: ${error.message || error}`);
375
+ }
376
+ }
377
+ }
378
+ }
379
+ // Final status
380
+ const statusParts = [`Processed ${processed} records`];
381
+ if (updated > 0)
382
+ statusParts.push(`updated ${updated}`);
383
+ if (created > 0)
384
+ statusParts.push(`created ${created}`);
385
+ if (skipped > 0)
386
+ statusParts.push(`skipped ${skipped}`);
387
+ spinner.succeed(statusParts.join(', '));
169
388
  }
170
389
  }
171
390
  catch (error) {
172
391
  spinner.fail('Pull failed');
392
+ // Enhanced error logging for debugging
393
+ this.log('\n=== Pull Error Details ===');
394
+ this.log(`Error type: ${error?.constructor?.name || 'Unknown'}`);
395
+ this.log(`Error message: ${error instanceof Error ? error.message : String(error)}`);
396
+ if (error instanceof Error && error.stack) {
397
+ this.log(`\nStack trace:`);
398
+ this.log(error.stack);
399
+ }
400
+ // Log context information
401
+ this.log(`\nContext:`);
402
+ this.log(`- Working directory: ${config_manager_1.configManager.getOriginalCwd()}`);
403
+ this.log(`- Entity: ${flags.entity || 'not specified'}`);
404
+ this.log(`- Filter: ${flags.filter || 'none'}`);
405
+ this.log(`- Flags: ${JSON.stringify(flags, null, 2)}`);
406
+ // Check if error is related to common issues
407
+ const errorMessage = error instanceof Error ? error.message : String(error);
408
+ if (errorMessage.includes('No directory found for entity')) {
409
+ this.log(`\nHint: This appears to be an entity directory configuration issue.`);
410
+ this.log(`Run "mj-sync init" to create directories or ensure .mj-sync.json files exist.`);
411
+ }
412
+ else if (errorMessage.includes('database') || errorMessage.includes('connection')) {
413
+ this.log(`\nHint: This appears to be a database connectivity issue.`);
414
+ this.log(`Check your mj.config.cjs configuration and database connectivity.`);
415
+ }
416
+ else if (errorMessage.includes('Failed to pull records')) {
417
+ this.log(`\nHint: This appears to be a database query issue.`);
418
+ this.log(`Check if the entity name "${flags.entity}" is correct and exists in the database.`);
419
+ }
420
+ else if (errorMessage.includes('Entity information not found')) {
421
+ this.log(`\nHint: The entity "${flags.entity}" was not found in metadata.`);
422
+ this.log(`Check the entity name spelling and ensure it exists in the database.`);
423
+ }
173
424
  this.error(error);
174
425
  }
175
426
  finally {
176
- // Clean up database connection
427
+ // Clean up database connection and reset singletons
177
428
  await (0, provider_utils_1.cleanupProvider)();
429
+ (0, singleton_manager_1.resetSyncEngine)();
430
+ // Exit process to prevent background MJ tasks from throwing errors
431
+ process.exit(0);
178
432
  }
179
433
  }
180
434
  /**
@@ -207,7 +461,7 @@ class Pull extends core_1.Command {
207
461
  }
208
462
  }
209
463
  };
210
- await searchDirs(process.cwd());
464
+ await searchDirs(config_manager_1.configManager.getOriginalCwd());
211
465
  return dirs;
212
466
  }
213
467
  /**
@@ -224,8 +478,8 @@ class Pull extends core_1.Command {
224
478
  * @returns Promise that resolves when file is written
225
479
  * @private
226
480
  */
227
- async processRecord(record, primaryKey, targetDir, entityConfig, syncEngine) {
228
- const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine);
481
+ async processRecord(record, primaryKey, targetDir, entityConfig, syncEngine, flags) {
482
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
229
483
  // Determine file path
230
484
  const fileName = this.buildFileName(primaryKey, entityConfig);
231
485
  const filePath = path_1.default.join(targetDir, fileName);
@@ -243,21 +497,56 @@ class Pull extends core_1.Command {
243
497
  * @param targetDir - Directory where files will be saved
244
498
  * @param entityConfig - Entity configuration with defaults and settings
245
499
  * @param syncEngine - Sync engine for checksum calculation
500
+ * @param flags - Command flags
501
+ * @param isNewRecord - Whether this is a new record
502
+ * @param existingRecordData - Existing record data to preserve field selection
503
+ * @param currentDepth - Current recursion depth for recursive entities
504
+ * @param ancestryPath - Set of IDs in current ancestry chain to prevent circular references
246
505
  * @returns Promise resolving to formatted RecordData
247
506
  * @private
248
507
  */
249
- async processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine) {
250
- // Build record data
251
- const recordData = {
252
- primaryKey: primaryKey,
253
- fields: {},
254
- sync: {
255
- lastModified: new Date().toISOString(),
256
- checksum: ''
508
+ async processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, isNewRecord = true, existingRecordData, currentDepth = 0, ancestryPath = new Set()) {
509
+ // Build record data - we'll restructure at the end for proper ordering
510
+ const fields = {};
511
+ const relatedEntities = {};
512
+ // Debug: Log all fields in first record (only in verbose mode)
513
+ if (flags?.verbose) {
514
+ const recordKeys = Object.keys(record);
515
+ console.log('\n=== DEBUG: Processing record ===');
516
+ console.log('Entity:', entityConfig.entity);
517
+ console.log('Total fields:', recordKeys.length);
518
+ console.log('Field names:', recordKeys.filter(k => !k.startsWith('__mj_')).join(', '));
519
+ console.log('Has TemplateText?:', recordKeys.includes('TemplateText'));
520
+ console.log('externalizeFields config:', entityConfig.pull?.externalizeFields);
521
+ }
522
+ // Get the underlying data from the entity object
523
+ // If it's an entity object, it will have a GetAll() method
524
+ let dataToProcess = record;
525
+ if (typeof record.GetAll === 'function') {
526
+ // It's an entity object, get the underlying data
527
+ dataToProcess = record.GetAll();
528
+ }
529
+ // Get externalize configuration for pattern lookup
530
+ const externalizeConfig = entityConfig.pull?.externalizeFields;
531
+ let externalizeMap = new Map();
532
+ if (externalizeConfig) {
533
+ if (Array.isArray(externalizeConfig)) {
534
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
535
+ // Simple string array
536
+ externalizeConfig.forEach(f => externalizeMap.set(f, undefined));
537
+ }
538
+ else {
539
+ // New pattern format
540
+ externalizeConfig.forEach(item => externalizeMap.set(item.field, item.pattern));
541
+ }
257
542
  }
258
- };
259
- // Process fields
260
- for (const [fieldName, fieldValue] of Object.entries(record)) {
543
+ else {
544
+ // Object format
545
+ Object.keys(externalizeConfig).forEach(f => externalizeMap.set(f, undefined));
546
+ }
547
+ }
548
+ // Process regular fields from the underlying data
549
+ for (const [fieldName, fieldValue] of Object.entries(dataToProcess)) {
261
550
  // Skip primary key fields
262
551
  if (primaryKey[fieldName] !== undefined) {
263
552
  continue;
@@ -266,49 +555,312 @@ class Pull extends core_1.Command {
266
555
  if (fieldName.startsWith('__mj_')) {
267
556
  continue;
268
557
  }
558
+ // Skip excluded fields
559
+ if (entityConfig.pull?.excludeFields?.includes(fieldName)) {
560
+ continue;
561
+ }
562
+ // Skip fields already externalized
563
+ if (fields[fieldName]) {
564
+ continue;
565
+ }
566
+ // Skip virtual/computed fields - check entity metadata
567
+ const metadata = new core_2.Metadata();
568
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
569
+ if (entityInfo) {
570
+ const fieldInfo = entityInfo.Fields.find(f => f.Name === fieldName);
571
+ if (fieldInfo && !fieldInfo.IsVirtual) {
572
+ // Field exists in metadata and is not virtual, keep it
573
+ }
574
+ else if (fieldInfo && fieldInfo.IsVirtual) {
575
+ // Skip virtual fields
576
+ continue;
577
+ }
578
+ else if (!fieldInfo) {
579
+ // Field not in metadata at all
580
+ // Check if it's explicitly configured for externalization, lookup, or exclusion
581
+ const isConfiguredField = entityConfig.pull?.externalizeFields?.includes(fieldName) ||
582
+ entityConfig.pull?.lookupFields?.[fieldName] ||
583
+ entityConfig.pull?.excludeFields?.includes(fieldName);
584
+ if (!isConfiguredField) {
585
+ // Skip fields not in metadata and not explicitly configured
586
+ continue;
587
+ }
588
+ // Otherwise, allow the field to be processed since it's explicitly configured
589
+ }
590
+ }
591
+ // Check if this field should be converted to a lookup
592
+ const lookupConfig = entityConfig.pull?.lookupFields?.[fieldName];
593
+ if (lookupConfig && fieldValue) {
594
+ // Convert foreign key to @lookup reference
595
+ const lookupValue = await this.convertToLookup(fieldValue, lookupConfig.entity, lookupConfig.field, syncEngine);
596
+ if (lookupValue) {
597
+ fields[fieldName] = lookupValue;
598
+ continue;
599
+ }
600
+ }
269
601
  // Check if this is an external file field
270
602
  if (await this.shouldExternalizeField(fieldName, fieldValue, entityConfig)) {
271
- const fileName = await this.createExternalFile(targetDir, primaryKey, fieldName, String(fieldValue));
272
- recordData.fields[fieldName] = `@file:${fileName}`;
603
+ // Check if this field is preserved and already has a @file: reference
604
+ const isPreservedField = entityConfig.pull?.preserveFields?.includes(fieldName);
605
+ const existingFieldValue = existingRecordData?.fields?.[fieldName];
606
+ if (isPreservedField && existingFieldValue && typeof existingFieldValue === 'string' && existingFieldValue.startsWith('@file:')) {
607
+ // Field is preserved and has existing @file: reference - update the existing file
608
+ const existingFilePath = existingFieldValue.replace('@file:', '');
609
+ const fullPath = path_1.default.join(targetDir, existingFilePath);
610
+ // Ensure directory exists
611
+ await fs_extra_1.default.ensureDir(path_1.default.dirname(fullPath));
612
+ // Write the content to the existing file path
613
+ await fs_extra_1.default.writeFile(fullPath, String(fieldValue), 'utf-8');
614
+ // Keep the existing @file: reference
615
+ fields[fieldName] = existingFieldValue;
616
+ }
617
+ else {
618
+ // Normal externalization - create new file
619
+ const pattern = externalizeMap.get(fieldName);
620
+ const fileName = await this.createExternalFile(targetDir, record, primaryKey, fieldName, String(fieldValue), entityConfig, pattern);
621
+ fields[fieldName] = fileName; // fileName already includes @file: prefix if pattern-based
622
+ }
273
623
  }
274
624
  else {
275
- recordData.fields[fieldName] = fieldValue;
625
+ fields[fieldName] = fieldValue;
626
+ }
627
+ }
628
+ // Now check for externalized fields that might be computed properties
629
+ // We process ALL externalized fields, including those not in the data
630
+ if (entityConfig.pull?.externalizeFields && typeof record.GetAll === 'function') {
631
+ const externalizeConfig = entityConfig.pull.externalizeFields;
632
+ // Normalize configuration to array format
633
+ let externalizeItems = [];
634
+ if (Array.isArray(externalizeConfig)) {
635
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
636
+ // Simple string array
637
+ externalizeItems = externalizeConfig.map(f => ({ field: f }));
638
+ }
639
+ else {
640
+ // Already in the new format
641
+ externalizeItems = externalizeConfig;
642
+ }
643
+ }
644
+ else {
645
+ // Object format
646
+ externalizeItems = Object.entries(externalizeConfig).map(([field, config]) => ({
647
+ field,
648
+ pattern: undefined // Will use default pattern
649
+ }));
650
+ }
651
+ // Get the keys from the underlying data to identify computed properties
652
+ const dataKeys = Object.keys(dataToProcess);
653
+ for (const externalItem of externalizeItems) {
654
+ const externalField = externalItem.field;
655
+ // Only process fields that are NOT in the underlying data
656
+ // (these are likely computed properties)
657
+ if (dataKeys.includes(externalField)) {
658
+ continue; // This was already processed in the main loop
659
+ }
660
+ try {
661
+ // Use bracket notation to access properties (including getters)
662
+ const fieldValue = record[externalField];
663
+ if (fieldValue !== undefined && fieldValue !== null && fieldValue !== '') {
664
+ if (await this.shouldExternalizeField(externalField, fieldValue, entityConfig)) {
665
+ // Check if this field is preserved and already has a @file: reference
666
+ const isPreservedField = entityConfig.pull?.preserveFields?.includes(externalField);
667
+ const existingFieldValue = existingRecordData?.fields?.[externalField];
668
+ if (isPreservedField && existingFieldValue && typeof existingFieldValue === 'string' && existingFieldValue.startsWith('@file:')) {
669
+ // Field is preserved and has existing @file: reference - update the existing file
670
+ const existingFilePath = existingFieldValue.replace('@file:', '');
671
+ const fullPath = path_1.default.join(targetDir, existingFilePath);
672
+ // Ensure directory exists
673
+ await fs_extra_1.default.ensureDir(path_1.default.dirname(fullPath));
674
+ // Write the content to the existing file path
675
+ await fs_extra_1.default.writeFile(fullPath, String(fieldValue), 'utf-8');
676
+ // Keep the existing @file: reference
677
+ fields[externalField] = existingFieldValue;
678
+ }
679
+ else {
680
+ // Normal externalization - create new file
681
+ const fileName = await this.createExternalFile(targetDir, record, primaryKey, externalField, String(fieldValue), entityConfig, externalItem.pattern);
682
+ fields[externalField] = fileName; // fileName already includes @file: prefix if pattern-based
683
+ }
684
+ }
685
+ else {
686
+ // Include the field value if not externalized
687
+ fields[externalField] = fieldValue;
688
+ }
689
+ }
690
+ }
691
+ catch (error) {
692
+ // Property might not exist, that's okay
693
+ if (flags?.verbose) {
694
+ console.log(`Could not get property ${externalField}: ${error}`);
695
+ }
696
+ }
276
697
  }
277
698
  }
278
699
  // Pull related entities if configured
279
700
  if (entityConfig.pull?.relatedEntities) {
280
- recordData.relatedEntities = await this.pullRelatedEntities(record, entityConfig.pull.relatedEntities, syncEngine);
701
+ const related = await this.pullRelatedEntities(record, entityConfig.pull.relatedEntities, syncEngine, entityConfig, flags, currentDepth, ancestryPath);
702
+ Object.assign(relatedEntities, related);
703
+ }
704
+ // Get entity metadata to check defaults
705
+ const metadata = new core_2.Metadata();
706
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
707
+ // Filter out null values and fields matching their defaults
708
+ const cleanedFields = {};
709
+ // Get the set of fields that existed in the original record (if updating)
710
+ const existingFieldNames = existingRecordData?.fields ? new Set(Object.keys(existingRecordData.fields)) : new Set();
711
+ for (const [fieldName, fieldValue] of Object.entries(fields)) {
712
+ let includeField = false;
713
+ if (!isNewRecord && existingFieldNames.has(fieldName)) {
714
+ // For updates: Always preserve fields that existed in the original record
715
+ includeField = true;
716
+ }
717
+ else {
718
+ // For new records or new fields in existing records:
719
+ // Skip null/undefined/empty string values
720
+ if (fieldValue === null || fieldValue === undefined || fieldValue === '') {
721
+ includeField = false;
722
+ }
723
+ else if (entityInfo) {
724
+ // Check if value matches the field's default
725
+ const fieldInfo = entityInfo.Fields.find(f => f.Name === fieldName);
726
+ if (fieldInfo && fieldInfo.DefaultValue !== null && fieldInfo.DefaultValue !== undefined) {
727
+ // Compare with default value
728
+ if (fieldValue === fieldInfo.DefaultValue) {
729
+ includeField = false;
730
+ }
731
+ // Special handling for boolean defaults (might be stored as strings)
732
+ else if (typeof fieldValue === 'boolean' &&
733
+ (fieldInfo.DefaultValue === (fieldValue ? '1' : '0') ||
734
+ fieldInfo.DefaultValue === (fieldValue ? 'true' : 'false'))) {
735
+ includeField = false;
736
+ }
737
+ // Special handling for numeric defaults that might be strings
738
+ else if (typeof fieldValue === 'number' && String(fieldValue) === String(fieldInfo.DefaultValue)) {
739
+ includeField = false;
740
+ }
741
+ else {
742
+ includeField = true;
743
+ }
744
+ }
745
+ else {
746
+ // No default value defined, include if not null/empty
747
+ includeField = true;
748
+ }
749
+ }
750
+ else {
751
+ // No entity info, include if not null/empty
752
+ includeField = true;
753
+ }
754
+ }
755
+ if (includeField) {
756
+ cleanedFields[fieldName] = fieldValue;
757
+ }
758
+ }
759
+ // Calculate checksum on cleaned fields
760
+ const checksum = syncEngine.calculateChecksum(cleanedFields);
761
+ // Build the final record data with proper ordering
762
+ // Use a new object to ensure property order
763
+ const recordData = {};
764
+ // 1. User fields first
765
+ recordData.fields = cleanedFields;
766
+ // 2. Related entities (if any)
767
+ if (Object.keys(relatedEntities).length > 0) {
768
+ recordData.relatedEntities = relatedEntities;
281
769
  }
282
- // Calculate checksum
283
- recordData.sync.checksum = syncEngine.calculateChecksum(recordData.fields);
770
+ // 3. Primary key (system field)
771
+ recordData.primaryKey = primaryKey;
772
+ // 4. Sync metadata (system field)
773
+ recordData.sync = {
774
+ lastModified: new Date().toISOString(),
775
+ checksum: checksum
776
+ };
284
777
  return recordData;
285
778
  }
779
+ /**
780
+ * Convert a foreign key value to a @lookup reference
781
+ *
782
+ * Looks up the related record and creates a @lookup string that can be
783
+ * resolved during push operations.
784
+ *
785
+ * @param foreignKeyValue - The foreign key value (ID)
786
+ * @param targetEntity - Name of the target entity
787
+ * @param targetField - Field in target entity to use for lookup
788
+ * @param syncEngine - Sync engine instance
789
+ * @returns @lookup string or null if lookup fails
790
+ * @private
791
+ */
792
+ async convertToLookup(foreignKeyValue, targetEntity, targetField, syncEngine) {
793
+ try {
794
+ // Get the related record
795
+ const metadata = new core_2.Metadata();
796
+ const targetEntityInfo = metadata.EntityByName(targetEntity);
797
+ if (!targetEntityInfo) {
798
+ this.warn(`Could not find entity ${targetEntity} for lookup`);
799
+ return null;
800
+ }
801
+ // Load the related record
802
+ const primaryKeyField = targetEntityInfo.PrimaryKeys?.[0]?.Name || 'ID';
803
+ const rv = new core_2.RunView();
804
+ const result = await rv.RunView({
805
+ EntityName: targetEntity,
806
+ ExtraFilter: `${primaryKeyField} = '${String(foreignKeyValue).replace(/'/g, "''")}'`,
807
+ ResultType: 'entity_object'
808
+ }, (0, provider_utils_1.getSystemUser)());
809
+ if (!result.Success || result.Results.length === 0) {
810
+ this.warn(`Could not find ${targetEntity} with ${primaryKeyField} = ${foreignKeyValue}`);
811
+ return null;
812
+ }
813
+ const relatedRecord = result.Results[0];
814
+ const lookupValue = relatedRecord[targetField];
815
+ if (!lookupValue) {
816
+ this.warn(`${targetEntity} record missing ${targetField} field`);
817
+ return null;
818
+ }
819
+ // Return the @lookup reference
820
+ return `@lookup:${targetEntity}.${targetField}=${lookupValue}`;
821
+ }
822
+ catch (error) {
823
+ this.warn(`Failed to create lookup for ${targetEntity}: ${error}`);
824
+ return null;
825
+ }
826
+ }
286
827
  /**
287
828
  * Determine if a field should be saved to an external file
288
829
  *
289
- * Checks if a field contains substantial text content that would be better
290
- * stored in a separate file rather than inline in the JSON. Uses heuristics
291
- * based on field name and content length.
830
+ * Checks if a field is configured for externalization or contains substantial
831
+ * text content that would be better stored in a separate file.
292
832
  *
293
833
  * @param fieldName - Name of the field to check
294
834
  * @param fieldValue - Value of the field
295
- * @param entityConfig - Entity configuration (for future extension)
835
+ * @param entityConfig - Entity configuration with externalization settings
296
836
  * @returns Promise resolving to true if field should be externalized
297
837
  * @private
298
838
  */
299
839
  async shouldExternalizeField(fieldName, fieldValue, entityConfig) {
300
- // Only externalize string fields with significant content
840
+ // Only externalize string fields
301
841
  if (typeof fieldValue !== 'string') {
302
842
  return false;
303
843
  }
304
- // Check if it's a known large text field
305
- const largeTextFields = ['Prompt', 'Template', 'Notes', 'Description',
306
- 'Content', 'Body', 'Text', 'HTML', 'SQL'];
307
- if (largeTextFields.some(f => fieldName.toLowerCase().includes(f.toLowerCase()))) {
308
- // Only externalize if content is substantial (more than 100 chars or has newlines)
309
- return fieldValue.length > 100 || fieldValue.includes('\n');
844
+ // Check if field is configured for externalization
845
+ const externalizeConfig = entityConfig.pull?.externalizeFields;
846
+ if (!externalizeConfig) {
847
+ return false;
848
+ }
849
+ if (Array.isArray(externalizeConfig)) {
850
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
851
+ // Simple string array
852
+ return externalizeConfig.includes(fieldName);
853
+ }
854
+ else {
855
+ // New pattern format
856
+ return externalizeConfig
857
+ .some(item => item.field === fieldName);
858
+ }
859
+ }
860
+ else {
861
+ // Object format
862
+ return fieldName in externalizeConfig;
310
863
  }
311
- return false;
312
864
  }
313
865
  /**
314
866
  * Create an external file for a field value
@@ -316,36 +868,103 @@ class Pull extends core_1.Command {
316
868
  * Saves large text content to a separate file and returns the filename.
317
869
  * Automatically determines appropriate file extension based on field name
318
870
  * and content type (e.g., .md for prompts, .html for templates).
871
+ * Uses the entity's name field for the filename if available.
319
872
  *
320
873
  * @param targetDir - Directory to save the file
321
- * @param primaryKey - Primary key for filename generation
874
+ * @param record - Full record to extract name field from
875
+ * @param primaryKey - Primary key for filename generation fallback
322
876
  * @param fieldName - Name of the field being externalized
323
877
  * @param content - Content to write to the file
878
+ * @param entityConfig - Entity configuration
324
879
  * @returns Promise resolving to the created filename
325
880
  * @private
326
881
  */
327
- async createExternalFile(targetDir, primaryKey, fieldName, content) {
328
- // Determine file extension based on field name and content
329
- let extension = '.txt';
330
- if (fieldName.toLowerCase().includes('prompt')) {
331
- extension = '.md';
332
- }
333
- else if (fieldName.toLowerCase().includes('template')) {
334
- if (content.includes('<html') || content.includes('<!DOCTYPE')) {
335
- extension = '.html';
882
+ async createExternalFile(targetDir, record, primaryKey, fieldName, content, entityConfig, pattern) {
883
+ // If pattern is provided, use it to generate the full path
884
+ if (pattern) {
885
+ // Replace placeholders in the pattern
886
+ let resolvedPattern = pattern;
887
+ // Get entity metadata for field lookups
888
+ const metadata = new core_2.Metadata();
889
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
890
+ // Replace {Name} with the entity's name field value
891
+ if (entityInfo) {
892
+ const nameField = entityInfo.Fields.find(f => f.IsNameField);
893
+ if (nameField && record[nameField.Name]) {
894
+ const nameValue = String(record[nameField.Name])
895
+ .replace(/[^a-zA-Z0-9\-_ ]/g, '') // Remove disallowed characters
896
+ .replace(/\s+/g, '-') // Replace spaces with -
897
+ .toLowerCase(); // Make lowercase
898
+ resolvedPattern = resolvedPattern.replace(/{Name}/g, nameValue);
899
+ }
900
+ }
901
+ // Replace {ID} with the primary key
902
+ const idValue = primaryKey.ID || Object.values(primaryKey)[0];
903
+ if (idValue) {
904
+ resolvedPattern = resolvedPattern.replace(/{ID}/g, String(idValue).toLowerCase());
336
905
  }
337
- else if (content.includes('{{') || content.includes('{%')) {
338
- extension = '.liquid';
906
+ // Replace {FieldName} with the current field name
907
+ resolvedPattern = resolvedPattern.replace(/{FieldName}/g, fieldName.toLowerCase());
908
+ // Replace any other {field} placeholders with field values from the record
909
+ const placeholderRegex = /{(\w+)}/g;
910
+ resolvedPattern = resolvedPattern.replace(placeholderRegex, (match, fieldName) => {
911
+ const value = record[fieldName];
912
+ if (value !== undefined && value !== null) {
913
+ return String(value)
914
+ .replace(/[^a-zA-Z0-9\-_ ]/g, '')
915
+ .replace(/\s+/g, '-')
916
+ .toLowerCase();
917
+ }
918
+ return match; // Keep placeholder if field not found
919
+ });
920
+ // Extract the file path from the pattern
921
+ const filePath = path_1.default.join(targetDir, resolvedPattern.replace('@file:', ''));
922
+ // Ensure directory exists
923
+ await fs_extra_1.default.ensureDir(path_1.default.dirname(filePath));
924
+ // Write the file
925
+ await fs_extra_1.default.writeFile(filePath, content, 'utf-8');
926
+ // Return the pattern as-is (it includes @file: prefix)
927
+ return resolvedPattern;
928
+ }
929
+ // Original logic for non-pattern based externalization
930
+ let extension = '.md'; // default to markdown
931
+ const externalizeConfig = entityConfig.pull?.externalizeFields;
932
+ if (externalizeConfig && !Array.isArray(externalizeConfig) && externalizeConfig[fieldName]?.extension) {
933
+ extension = externalizeConfig[fieldName].extension;
934
+ // Ensure extension starts with a dot
935
+ if (!extension.startsWith('.')) {
936
+ extension = '.' + extension;
339
937
  }
340
938
  }
341
- else if (fieldName.toLowerCase().includes('sql')) {
342
- extension = '.sql';
939
+ // Try to use the entity's name field for the filename
940
+ let baseFileName;
941
+ // Get entity metadata to find the name field
942
+ const metadata = new core_2.Metadata();
943
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
944
+ if (entityInfo) {
945
+ // Find the name field
946
+ const nameField = entityInfo.Fields.find(f => f.IsNameField);
947
+ if (nameField && record[nameField.Name]) {
948
+ // Use the name field value, sanitized for filesystem
949
+ const nameValue = String(record[nameField.Name]);
950
+ // Remove disallowed characters (don't replace with _), replace spaces with -, and lowercase
951
+ baseFileName = nameValue
952
+ .replace(/[^a-zA-Z0-9\-_ ]/g, '') // Remove disallowed characters
953
+ .replace(/\s+/g, '-') // Replace spaces with -
954
+ .toLowerCase(); // Make lowercase
955
+ }
956
+ else {
957
+ // Fallback to primary key
958
+ baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
959
+ }
343
960
  }
344
- else if (fieldName.toLowerCase().includes('notes') || fieldName.toLowerCase().includes('description')) {
345
- extension = '.md';
961
+ else {
962
+ // Fallback to primary key
963
+ baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
346
964
  }
347
- const baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
348
- const fileName = `${baseFileName}.${fieldName.toLowerCase()}${extension}`;
965
+ // Remove dot prefix from baseFileName if it exists (it will be a dot-prefixed name from buildFileName)
966
+ const cleanBaseFileName = baseFileName.startsWith('.') ? baseFileName.substring(1) : baseFileName;
967
+ const fileName = `.${cleanBaseFileName}.${fieldName.toLowerCase()}${extension}`;
349
968
  const filePath = path_1.default.join(targetDir, fileName);
350
969
  await fs_extra_1.default.writeFile(filePath, content, 'utf-8');
351
970
  return fileName;
@@ -356,6 +975,7 @@ class Pull extends core_1.Command {
356
975
  * Creates a safe filename based on the entity's primary key values.
357
976
  * Handles GUIDs by using first 8 characters, sanitizes special characters,
358
977
  * and creates composite names for multi-field keys.
978
+ * Files are prefixed with a dot to follow the metadata file convention.
359
979
  *
360
980
  * @param primaryKey - Primary key fields and values
361
981
  * @param entityConfig - Entity configuration (for future extension)
@@ -369,16 +989,16 @@ class Pull extends core_1.Command {
369
989
  // Single string key - use as base if it's a guid
370
990
  const key = keys[0];
371
991
  if (key.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i)) {
372
- // It's a GUID, use first 8 chars
373
- return `${key.substring(0, 8)}.json`;
992
+ // It's a GUID, use first 8 chars, prefixed with dot, lowercase
993
+ return `.${key.substring(0, 8).toLowerCase()}.json`;
374
994
  }
375
- // Use the whole key if not too long
995
+ // Use the whole key if not too long, prefixed with dot
376
996
  if (key.length <= 50) {
377
- return `${key.replace(/[^a-zA-Z0-9-_]/g, '_')}.json`;
997
+ return `.${key.replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()}.json`;
378
998
  }
379
999
  }
380
- // Multiple keys or numeric - create composite name
381
- return keys.map(k => String(k).replace(/[^a-zA-Z0-9-_]/g, '_')).join('-') + '.json';
1000
+ // Multiple keys or numeric - create composite name, prefixed with dot
1001
+ return '.' + keys.map(k => String(k).replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()).join('-') + '.json';
382
1002
  }
383
1003
  /**
384
1004
  * Pull related entities for a parent record
@@ -386,23 +1006,38 @@ class Pull extends core_1.Command {
386
1006
  * Retrieves child records that have foreign key relationships to the parent.
387
1007
  * Converts foreign key values to @parent references and supports nested
388
1008
  * related entities for deep object graphs.
1009
+ * NEW: Supports automatic recursive patterns for self-referencing entities.
389
1010
  *
390
1011
  * @param parentRecord - Parent entity record
391
1012
  * @param relatedConfig - Configuration for related entities to pull
392
1013
  * @param syncEngine - Sync engine instance
1014
+ * @param entityConfig - Entity configuration
1015
+ * @param flags - Command flags
1016
+ * @param currentDepth - Current recursion depth for recursive entities
1017
+ * @param ancestryPath - Set of IDs in current ancestry chain to prevent circular references
393
1018
  * @returns Promise resolving to map of entity names to related records
394
1019
  * @private
395
1020
  */
396
- async pullRelatedEntities(parentRecord, relatedConfig, syncEngine) {
1021
+ async pullRelatedEntities(parentRecord, relatedConfig, syncEngine, entityConfig, flags, currentDepth = 0, ancestryPath = new Set()) {
397
1022
  const relatedEntities = {};
398
1023
  for (const [key, config] of Object.entries(relatedConfig)) {
399
1024
  try {
400
- // Get the parent's primary key value
401
- const parentKeyValue = parentRecord[config.foreignKey];
1025
+ // Get entity metadata to find primary key
1026
+ const metadata = new core_2.Metadata();
1027
+ const parentEntity = metadata.EntityByName(entityConfig.entity);
1028
+ if (!parentEntity) {
1029
+ this.warn(`Could not find entity metadata for ${entityConfig.entity}`);
1030
+ continue;
1031
+ }
1032
+ // Get the parent's primary key value (usually ID)
1033
+ const primaryKeyField = parentEntity.PrimaryKeys?.[0]?.Name || 'ID';
1034
+ const parentKeyValue = parentRecord[primaryKeyField];
402
1035
  if (!parentKeyValue) {
403
- continue; // Skip if parent doesn't have the foreign key field
1036
+ this.warn(`Parent record missing primary key field ${primaryKeyField}`);
1037
+ continue;
404
1038
  }
405
1039
  // Build filter for related records
1040
+ // The foreignKey is the field in the CHILD entity that points to this parent
406
1041
  let filter = `${config.foreignKey} = '${String(parentKeyValue).replace(/'/g, "''")}'`;
407
1042
  if (config.filter) {
408
1043
  filter += ` AND (${config.filter})`;
@@ -411,39 +1046,115 @@ class Pull extends core_1.Command {
411
1046
  const rv = new core_2.RunView();
412
1047
  const result = await rv.RunView({
413
1048
  EntityName: config.entity,
414
- ExtraFilter: filter
1049
+ ExtraFilter: filter,
1050
+ ResultType: 'entity_object'
415
1051
  }, (0, provider_utils_1.getSystemUser)());
416
1052
  if (!result.Success) {
417
1053
  this.warn(`Failed to pull related ${config.entity}: ${result.ErrorMessage}`);
418
1054
  continue;
419
1055
  }
1056
+ // Get child entity metadata
1057
+ const childEntity = metadata.EntityByName(config.entity);
1058
+ if (!childEntity) {
1059
+ this.warn(`Could not find entity metadata for ${config.entity}`);
1060
+ continue;
1061
+ }
1062
+ // Check if we need to wait for async property loading for related entities
1063
+ if (config.externalizeFields && result.Results.length > 0) {
1064
+ let fieldsToExternalize = [];
1065
+ if (Array.isArray(config.externalizeFields)) {
1066
+ if (config.externalizeFields.length > 0 && typeof config.externalizeFields[0] === 'string') {
1067
+ // Simple string array
1068
+ fieldsToExternalize = config.externalizeFields;
1069
+ }
1070
+ else {
1071
+ // New pattern format
1072
+ fieldsToExternalize = config.externalizeFields
1073
+ .map(item => item.field);
1074
+ }
1075
+ }
1076
+ else {
1077
+ // Object format
1078
+ fieldsToExternalize = Object.keys(config.externalizeFields);
1079
+ }
1080
+ // Get all field names from entity metadata
1081
+ const metadataFieldNames = childEntity.Fields.map(f => f.Name);
1082
+ // Check if any externalized fields are NOT in metadata (likely computed properties)
1083
+ const computedFields = fieldsToExternalize.filter(f => !metadataFieldNames.includes(f));
1084
+ if (computedFields.length > 0) {
1085
+ console.log(`Waiting 5 seconds for async property loading in related entity ${config.entity} (${computedFields.join(', ')})...`);
1086
+ await new Promise(resolve => setTimeout(resolve, 5000));
1087
+ }
1088
+ }
420
1089
  // Process each related record
421
1090
  const relatedRecords = [];
1091
+ if (flags?.verbose && result.Results.length > 0) {
1092
+ this.log(`Found ${result.Results.length} related ${config.entity} records at depth ${currentDepth}`);
1093
+ }
422
1094
  for (const relatedRecord of result.Results) {
423
- const recordData = {
424
- fields: {}
425
- };
426
- // Process fields, omitting the foreign key since it will be set via @parent
427
- for (const [fieldName, fieldValue] of Object.entries(relatedRecord)) {
428
- // Skip internal fields
429
- if (fieldName.startsWith('__mj_')) {
430
- continue;
1095
+ // Build primary key for the related record
1096
+ const relatedPrimaryKey = {};
1097
+ for (const pk of childEntity.PrimaryKeys) {
1098
+ relatedPrimaryKey[pk.Name] = relatedRecord[pk.Name];
1099
+ }
1100
+ // Check for circular references in the current ancestry path
1101
+ const recordId = String(relatedPrimaryKey[childEntity.PrimaryKeys[0]?.Name || 'ID']);
1102
+ if (config.recursive && ancestryPath.has(recordId)) {
1103
+ if (flags?.verbose) {
1104
+ this.log(`Skipping circular reference for ${config.entity} with ID: ${recordId} (detected in ancestry path)`);
1105
+ }
1106
+ continue;
1107
+ }
1108
+ // Create new ancestry path for this branch (only track current hierarchy chain)
1109
+ const newAncestryPath = new Set(ancestryPath);
1110
+ if (config.recursive) {
1111
+ newAncestryPath.add(recordId);
1112
+ }
1113
+ // Determine related entities configuration for recursion
1114
+ let childRelatedConfig = config.relatedEntities;
1115
+ // If recursive is enabled, continue recursive fetching at child level
1116
+ if (config.recursive) {
1117
+ const maxDepth = config.maxDepth || 10;
1118
+ if (currentDepth < maxDepth) {
1119
+ // Create recursive configuration that references the same entity
1120
+ childRelatedConfig = {
1121
+ [key]: {
1122
+ ...config,
1123
+ // Keep same configuration but increment depth internally
1124
+ }
1125
+ };
1126
+ if (flags?.verbose) {
1127
+ this.log(`Processing recursive level ${currentDepth + 1} for ${config.entity} record ${recordId}`);
1128
+ }
431
1129
  }
432
- // Convert foreign key reference to @parent
433
- if (fieldName === config.foreignKey) {
434
- const parentFieldName = this.findParentField(parentRecord, parentKeyValue);
435
- if (parentFieldName) {
436
- recordData.fields[fieldName] = `@parent:${parentFieldName}`;
1130
+ else {
1131
+ // At max depth, don't recurse further
1132
+ childRelatedConfig = undefined;
1133
+ if (flags?.verbose) {
1134
+ this.log(`Max depth ${maxDepth} reached for recursive entity ${config.entity} at record ${recordId}`);
437
1135
  }
438
- continue;
439
1136
  }
440
- recordData.fields[fieldName] = fieldValue;
441
1137
  }
442
- // Pull nested related entities if configured
443
- if (config.relatedEntities) {
444
- recordData.relatedEntities = await this.pullRelatedEntities(relatedRecord, config.relatedEntities, syncEngine);
1138
+ // Process the related record using the same logic as parent records
1139
+ const relatedData = await this.processRecordData(relatedRecord, relatedPrimaryKey, '', // Not used for related entities since we don't externalize their fields
1140
+ {
1141
+ entity: config.entity,
1142
+ pull: {
1143
+ excludeFields: config.excludeFields || entityConfig.pull?.excludeFields,
1144
+ lookupFields: config.lookupFields || entityConfig.pull?.lookupFields,
1145
+ externalizeFields: config.externalizeFields,
1146
+ relatedEntities: childRelatedConfig
1147
+ }
1148
+ }, syncEngine, flags, true, // isNewRecord
1149
+ undefined, // existingRecordData
1150
+ currentDepth + 1, newAncestryPath);
1151
+ // Convert foreign key reference to @parent
1152
+ if (relatedData.fields[config.foreignKey]) {
1153
+ relatedData.fields[config.foreignKey] = `@parent:${primaryKeyField}`;
445
1154
  }
446
- relatedRecords.push(recordData);
1155
+ // The processRecordData method already filters nulls and defaults
1156
+ // No need to do it again here
1157
+ relatedRecords.push(relatedData);
447
1158
  }
448
1159
  if (relatedRecords.length > 0) {
449
1160
  relatedEntities[key] = relatedRecords;
@@ -477,6 +1188,189 @@ class Pull extends core_1.Command {
477
1188
  }
478
1189
  return null;
479
1190
  }
1191
+ /**
1192
+ * Find existing files in a directory matching a pattern
1193
+ *
1194
+ * Searches for files that match the configured file pattern, used to identify
1195
+ * which records already exist locally for smart update functionality.
1196
+ *
1197
+ * @param dir - Directory to search in
1198
+ * @param pattern - Glob pattern to match files (e.g., "*.json")
1199
+ * @returns Promise resolving to array of file paths
1200
+ * @private
1201
+ */
1202
+ async findExistingFiles(dir, pattern) {
1203
+ const files = [];
1204
+ try {
1205
+ const entries = await fs_extra_1.default.readdir(dir, { withFileTypes: true });
1206
+ for (const entry of entries) {
1207
+ if (entry.isFile()) {
1208
+ const fileName = entry.name;
1209
+ // Simple pattern matching - could be enhanced with proper glob support
1210
+ if (pattern === '*.json' && fileName.endsWith('.json')) {
1211
+ files.push(path_1.default.join(dir, fileName));
1212
+ }
1213
+ else if (pattern === '.*.json' && fileName.startsWith('.') && fileName.endsWith('.json')) {
1214
+ // Handle dot-prefixed JSON files
1215
+ files.push(path_1.default.join(dir, fileName));
1216
+ }
1217
+ else if (pattern === fileName) {
1218
+ files.push(path_1.default.join(dir, fileName));
1219
+ }
1220
+ // TODO: Add more sophisticated glob pattern matching if needed
1221
+ }
1222
+ }
1223
+ }
1224
+ catch (error) {
1225
+ // Directory might not exist yet
1226
+ if (error.code !== 'ENOENT') {
1227
+ throw error;
1228
+ }
1229
+ }
1230
+ return files;
1231
+ }
1232
+ /**
1233
+ * Load existing records from files and build a lookup map
1234
+ *
1235
+ * Reads all existing files and creates a map from primary key to file location,
1236
+ * enabling efficient lookup during the update process.
1237
+ *
1238
+ * @param files - Array of file paths to load
1239
+ * @param entityInfo - Entity metadata for primary key information
1240
+ * @returns Map from primary key string to file info
1241
+ * @private
1242
+ */
1243
+ async loadExistingRecords(files, entityInfo) {
1244
+ const recordsMap = new Map();
1245
+ for (const filePath of files) {
1246
+ try {
1247
+ const fileData = await fs_extra_1.default.readJson(filePath);
1248
+ const records = Array.isArray(fileData) ? fileData : [fileData];
1249
+ for (const record of records) {
1250
+ if (record.primaryKey) {
1251
+ const lookupKey = this.createPrimaryKeyLookup(record.primaryKey);
1252
+ recordsMap.set(lookupKey, { filePath, recordData: record });
1253
+ }
1254
+ }
1255
+ }
1256
+ catch (error) {
1257
+ // Skip files that can't be parsed
1258
+ this.warn(`Could not load file ${filePath}: ${error}`);
1259
+ }
1260
+ }
1261
+ return recordsMap;
1262
+ }
1263
+ /**
1264
+ * Create a string lookup key from primary key values
1265
+ *
1266
+ * Generates a consistent string representation of primary key values
1267
+ * for use in maps and comparisons.
1268
+ *
1269
+ * @param primaryKey - Primary key field names and values
1270
+ * @returns String representation of the primary key
1271
+ * @private
1272
+ */
1273
+ createPrimaryKeyLookup(primaryKey) {
1274
+ const keys = Object.keys(primaryKey).sort();
1275
+ return keys.map(k => `${k}:${primaryKey[k]}`).join('|');
1276
+ }
1277
+ /**
1278
+ * Merge two record data objects based on configured strategy
1279
+ *
1280
+ * Combines existing and new record data according to the merge strategy:
1281
+ * - 'overwrite': Replace all fields with new values
1282
+ * - 'merge': Combine fields, with new values taking precedence
1283
+ * - 'skip': Keep existing record unchanged
1284
+ *
1285
+ * @param existing - Existing record data
1286
+ * @param newData - New record data from database
1287
+ * @param strategy - Merge strategy to apply
1288
+ * @param preserveFields - Field names that should never be overwritten
1289
+ * @returns Merged record data
1290
+ * @private
1291
+ */
1292
+ async mergeRecords(existing, newData, strategy, preserveFields) {
1293
+ if (strategy === 'skip') {
1294
+ return existing;
1295
+ }
1296
+ if (strategy === 'overwrite') {
1297
+ // Build with proper ordering
1298
+ const result = {};
1299
+ // 1. Fields first
1300
+ result.fields = { ...newData.fields };
1301
+ // Restore preserved fields from existing
1302
+ if (preserveFields.length > 0 && existing.fields) {
1303
+ for (const field of preserveFields) {
1304
+ if (field in existing.fields) {
1305
+ result.fields[field] = existing.fields[field];
1306
+ }
1307
+ }
1308
+ }
1309
+ // 2. Related entities (if any)
1310
+ if (newData.relatedEntities) {
1311
+ result.relatedEntities = newData.relatedEntities;
1312
+ }
1313
+ // 3. Primary key
1314
+ result.primaryKey = newData.primaryKey;
1315
+ // 4. Sync metadata
1316
+ result.sync = newData.sync;
1317
+ return result;
1318
+ }
1319
+ // Default 'merge' strategy
1320
+ // Build with proper ordering
1321
+ const result = {};
1322
+ // 1. Fields first
1323
+ result.fields = { ...existing.fields, ...newData.fields };
1324
+ // Restore preserved fields
1325
+ if (preserveFields.length > 0 && existing.fields) {
1326
+ for (const field of preserveFields) {
1327
+ if (field in existing.fields) {
1328
+ result.fields[field] = existing.fields[field];
1329
+ }
1330
+ }
1331
+ }
1332
+ // 2. Related entities (if any)
1333
+ if (existing.relatedEntities || newData.relatedEntities) {
1334
+ result.relatedEntities = {
1335
+ ...existing.relatedEntities,
1336
+ ...newData.relatedEntities
1337
+ };
1338
+ }
1339
+ // 3. Primary key
1340
+ result.primaryKey = newData.primaryKey || existing.primaryKey;
1341
+ // 4. Sync metadata
1342
+ result.sync = newData.sync;
1343
+ return result;
1344
+ }
1345
+ /**
1346
+ * Create a backup of a file before updating
1347
+ *
1348
+ * Creates a timestamped backup copy of the file in a backup directory
1349
+ * with the original filename, timestamp suffix, and .backup extension.
1350
+ * The backup directory defaults to .backups but can be configured.
1351
+ *
1352
+ * @param filePath - Path to the file to backup
1353
+ * @param backupDirName - Name of the backup directory (optional)
1354
+ * @returns Promise that resolves when backup is created
1355
+ * @private
1356
+ */
1357
+ async createBackup(filePath, backupDirName) {
1358
+ const dir = path_1.default.dirname(filePath);
1359
+ const fileName = path_1.default.basename(filePath);
1360
+ const backupDir = path_1.default.join(dir, backupDirName || '.backups');
1361
+ // Ensure backup directory exists
1362
+ await fs_extra_1.default.ensureDir(backupDir);
1363
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
1364
+ // Remove .json extension, add timestamp, then add .backup extension
1365
+ const backupFileName = fileName.replace(/\.json$/, `.${timestamp}.backup`);
1366
+ const backupPath = path_1.default.join(backupDir, backupFileName);
1367
+ try {
1368
+ await fs_extra_1.default.copy(filePath, backupPath);
1369
+ }
1370
+ catch (error) {
1371
+ this.warn(`Could not create backup of ${filePath}: ${error}`);
1372
+ }
1373
+ }
480
1374
  }
481
1375
  exports.default = Pull;
482
1376
  //# sourceMappingURL=index.js.map