@memberjunction/metadata-sync 2.47.0 → 2.48.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,9 +21,10 @@ const path_1 = __importDefault(require("path"));
21
21
  const prompts_1 = require("@inquirer/prompts");
22
22
  const ora_classic_1 = __importDefault(require("ora-classic"));
23
23
  const config_1 = require("../../config");
24
- const sync_engine_1 = require("../../lib/sync-engine");
25
24
  const core_2 = require("@memberjunction/core");
26
25
  const provider_utils_1 = require("../../lib/provider-utils");
26
+ const config_manager_1 = require("../../lib/config-manager");
27
+ const singleton_manager_1 = require("../../lib/singleton-manager");
27
28
  /**
28
29
  * Pull metadata records from database to local files
29
30
  *
@@ -53,42 +54,76 @@ class Pull extends core_1.Command {
53
54
  filter: core_1.Flags.string({ description: 'Additional filter for pulling specific records' }),
54
55
  'dry-run': core_1.Flags.boolean({ description: 'Show what would be pulled without actually pulling' }),
55
56
  'multi-file': core_1.Flags.string({ description: 'Create a single file with multiple records (provide filename)' }),
57
+ verbose: core_1.Flags.boolean({ char: 'v', description: 'Show detailed output' }),
56
58
  };
57
59
  async run() {
58
60
  const { flags } = await this.parse(Pull);
59
61
  const spinner = (0, ora_classic_1.default)();
60
62
  try {
61
- // Load MJ config
63
+ // Load MJ config first (before changing directory)
62
64
  spinner.start('Loading configuration');
63
65
  const mjConfig = (0, config_1.loadMJConfig)();
64
66
  if (!mjConfig) {
65
67
  this.error('No mj.config.cjs found in current directory or parent directories');
66
68
  }
69
+ // Stop spinner before provider initialization (which logs to console)
70
+ spinner.stop();
67
71
  // Initialize data provider
68
72
  const provider = await (0, provider_utils_1.initializeProvider)(mjConfig);
69
- // Initialize sync engine
70
- const syncEngine = new sync_engine_1.SyncEngine((0, provider_utils_1.getSystemUser)());
71
- await syncEngine.initialize();
72
- spinner.succeed('Configuration loaded');
73
- // Find entity directory
74
- const entityDirs = await this.findEntityDirectories(flags.entity);
75
- if (entityDirs.length === 0) {
76
- this.error(`No directory found for entity "${flags.entity}". Run "mj-sync init" first.`);
77
- }
73
+ // Get singleton sync engine
74
+ const syncEngine = await (0, singleton_manager_1.getSyncEngine)((0, provider_utils_1.getSystemUser)());
75
+ // Show success after all initialization is complete
76
+ spinner.succeed('Configuration and metadata loaded');
78
77
  let targetDir;
79
- if (entityDirs.length === 1) {
80
- targetDir = entityDirs[0];
78
+ let entityConfig;
79
+ // Check if we should use a specific target directory
80
+ const envTargetDir = process.env.METADATA_SYNC_TARGET_DIR;
81
+ if (envTargetDir) {
82
+ if (flags.verbose) {
83
+ console.log(`Using specified target directory: ${envTargetDir}`);
84
+ }
85
+ process.chdir(envTargetDir);
86
+ targetDir = process.cwd();
87
+ // Load entity config from the current directory
88
+ entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
89
+ if (!entityConfig) {
90
+ this.error(`No .mj-sync.json found in ${targetDir}`);
91
+ }
92
+ if (entityConfig.entity !== flags.entity) {
93
+ this.error(`Directory ${targetDir} is configured for entity "${entityConfig.entity}", not "${flags.entity}"`);
94
+ }
81
95
  }
82
96
  else {
83
- // Multiple directories found, ask user
84
- targetDir = await (0, prompts_1.select)({
85
- message: `Multiple directories found for entity "${flags.entity}". Which one to use?`,
86
- choices: entityDirs.map(dir => ({ name: dir, value: dir }))
87
- });
97
+ // Original behavior - find entity directory
98
+ const entityDirs = await this.findEntityDirectories(flags.entity);
99
+ if (entityDirs.length === 0) {
100
+ this.error(`No directory found for entity "${flags.entity}". Run "mj-sync init" first.`);
101
+ }
102
+ if (entityDirs.length === 1) {
103
+ targetDir = entityDirs[0];
104
+ }
105
+ else {
106
+ // Multiple directories found, ask user
107
+ targetDir = await (0, prompts_1.select)({
108
+ message: `Multiple directories found for entity "${flags.entity}". Which one to use?`,
109
+ choices: entityDirs.map(dir => ({ name: dir, value: dir }))
110
+ });
111
+ }
112
+ entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
113
+ if (!entityConfig) {
114
+ this.error(`Invalid entity configuration in ${targetDir}`);
115
+ }
88
116
  }
89
- const entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
90
- if (!entityConfig) {
91
- this.error(`Invalid entity configuration in ${targetDir}`);
117
+ // Show configuration notice only if relevant
118
+ if (entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
119
+ const targetFile = path_1.default.join(targetDir, entityConfig.pull.newFileName.endsWith('.json')
120
+ ? entityConfig.pull.newFileName
121
+ : `${entityConfig.pull.newFileName}.json`);
122
+ if (await fs_extra_1.default.pathExists(targetFile)) {
123
+ // File exists - inform about append behavior
124
+ this.log(`\n📝 Configuration: New records will be appended to existing file '${path_1.default.basename(targetFile)}'`);
125
+ }
126
+ // If file doesn't exist, no need to mention anything special - we're just creating it
92
127
  }
93
128
  // Pull records
94
129
  spinner.start(`Pulling ${flags.entity} records`);
@@ -102,7 +137,8 @@ class Pull extends core_1.Command {
102
137
  }
103
138
  const result = await rv.RunView({
104
139
  EntityName: flags.entity,
105
- ExtraFilter: filter
140
+ ExtraFilter: filter,
141
+ ResultType: 'entity_object'
106
142
  }, (0, provider_utils_1.getSystemUser)());
107
143
  if (!result.Success) {
108
144
  this.error(`Failed to pull records: ${result.ErrorMessage}`);
@@ -112,6 +148,39 @@ class Pull extends core_1.Command {
112
148
  this.log(`\nDry run mode - would pull ${result.Results.length} records to ${targetDir}`);
113
149
  return;
114
150
  }
151
+ // Check if we need to wait for async property loading
152
+ if (entityConfig.pull?.externalizeFields && result.Results.length > 0) {
153
+ const metadata = new core_2.Metadata();
154
+ const entityInfo = metadata.EntityByName(flags.entity);
155
+ if (entityInfo) {
156
+ const externalizeConfig = entityConfig.pull.externalizeFields;
157
+ let fieldsToExternalize = [];
158
+ if (Array.isArray(externalizeConfig)) {
159
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
160
+ // Simple string array
161
+ fieldsToExternalize = externalizeConfig;
162
+ }
163
+ else {
164
+ // New pattern format
165
+ fieldsToExternalize = externalizeConfig
166
+ .map(item => item.field);
167
+ }
168
+ }
169
+ else {
170
+ // Object format
171
+ fieldsToExternalize = Object.keys(externalizeConfig);
172
+ }
173
+ // Get all field names from entity metadata
174
+ const metadataFieldNames = entityInfo.Fields.map(f => f.Name);
175
+ // Check if any externalized fields are NOT in metadata (likely computed properties)
176
+ const computedFields = fieldsToExternalize.filter(f => !metadataFieldNames.includes(f));
177
+ if (computedFields.length > 0) {
178
+ spinner.start(`Waiting 5 seconds for async property loading in ${flags.entity} (${computedFields.join(', ')})...`);
179
+ await new Promise(resolve => setTimeout(resolve, 5000));
180
+ spinner.succeed('Async property loading wait complete');
181
+ }
182
+ }
183
+ }
115
184
  // Process each record
116
185
  const entityInfo = syncEngine.getEntityInfo(flags.entity);
117
186
  if (!entityInfo) {
@@ -119,6 +188,9 @@ class Pull extends core_1.Command {
119
188
  }
120
189
  spinner.start('Processing records');
121
190
  let processed = 0;
191
+ let updated = 0;
192
+ let created = 0;
193
+ let skipped = 0;
122
194
  // If multi-file flag is set, collect all records
123
195
  if (flags['multi-file']) {
124
196
  const allRecords = [];
@@ -130,10 +202,12 @@ class Pull extends core_1.Command {
130
202
  primaryKey[pk.Name] = record[pk.Name];
131
203
  }
132
204
  // Process record for multi-file
133
- const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine);
205
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
134
206
  allRecords.push(recordData);
135
207
  processed++;
136
- spinner.text = `Processing records (${processed}/${result.Results.length})`;
208
+ if (flags.verbose) {
209
+ spinner.text = `Processing records (${processed}/${result.Results.length})`;
210
+ }
137
211
  }
138
212
  catch (error) {
139
213
  this.warn(`Failed to process record: ${error.message || error}`);
@@ -148,24 +222,169 @@ class Pull extends core_1.Command {
148
222
  }
149
223
  }
150
224
  else {
151
- // Original single-file-per-record logic
225
+ // Smart update logic for single-file-per-record
226
+ spinner.text = 'Scanning for existing files...';
227
+ // Find existing files
228
+ const filePattern = entityConfig.pull?.filePattern || entityConfig.filePattern || '*.json';
229
+ const existingFiles = await this.findExistingFiles(targetDir, filePattern);
230
+ if (flags.verbose) {
231
+ this.log(`Found ${existingFiles.length} existing files matching pattern '${filePattern}'`);
232
+ existingFiles.forEach(f => this.log(` - ${path_1.default.basename(f)}`));
233
+ }
234
+ // Load existing records and build lookup map
235
+ const existingRecordsMap = await this.loadExistingRecords(existingFiles, entityInfo);
236
+ if (flags.verbose) {
237
+ this.log(`Loaded ${existingRecordsMap.size} existing records from files`);
238
+ }
239
+ // Separate records into new and existing
240
+ const newRecords = [];
241
+ const existingRecordsToUpdate = [];
152
242
  for (const record of result.Results) {
243
+ // Build primary key
244
+ const primaryKey = {};
245
+ for (const pk of entityInfo.PrimaryKeys) {
246
+ primaryKey[pk.Name] = record[pk.Name];
247
+ }
248
+ // Create lookup key
249
+ const lookupKey = this.createPrimaryKeyLookup(primaryKey);
250
+ const existingFileInfo = existingRecordsMap.get(lookupKey);
251
+ if (existingFileInfo) {
252
+ // Record exists locally
253
+ if (entityConfig.pull?.updateExistingRecords !== false) {
254
+ existingRecordsToUpdate.push({ record, primaryKey, filePath: existingFileInfo.filePath });
255
+ }
256
+ else {
257
+ skipped++;
258
+ if (flags.verbose) {
259
+ this.log(`Skipping existing record: ${lookupKey}`);
260
+ }
261
+ }
262
+ }
263
+ else {
264
+ // Record doesn't exist locally
265
+ if (entityConfig.pull?.createNewFileIfNotFound !== false) {
266
+ newRecords.push({ record, primaryKey });
267
+ }
268
+ else {
269
+ skipped++;
270
+ if (flags.verbose) {
271
+ this.log(`Skipping new record (createNewFileIfNotFound=false): ${lookupKey}`);
272
+ }
273
+ }
274
+ }
275
+ }
276
+ // Track which files have been backed up to avoid duplicates
277
+ const backedUpFiles = new Set();
278
+ // Process existing records updates
279
+ for (const { record, primaryKey, filePath } of existingRecordsToUpdate) {
153
280
  try {
154
- // Build primary key
155
- const primaryKey = {};
156
- for (const pk of entityInfo.PrimaryKeys) {
157
- primaryKey[pk.Name] = record[pk.Name];
281
+ spinner.text = `Updating existing records (${updated + 1}/${existingRecordsToUpdate.length})`;
282
+ // Create backup if configured (only once per file)
283
+ if (entityConfig.pull?.backupBeforeUpdate && !backedUpFiles.has(filePath)) {
284
+ await this.createBackup(filePath, entityConfig.pull?.backupDirectory);
285
+ backedUpFiles.add(filePath);
158
286
  }
159
- // Process record
160
- await this.processRecord(record, primaryKey, targetDir, entityConfig, syncEngine);
287
+ // Load existing file data
288
+ const existingData = await fs_extra_1.default.readJson(filePath);
289
+ // Find the specific existing record that matches this primary key
290
+ let existingRecordData;
291
+ if (Array.isArray(existingData)) {
292
+ // Find the matching record in the array
293
+ const matchingRecord = existingData.find(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
294
+ existingRecordData = matchingRecord || existingData[0]; // Fallback to first if not found
295
+ }
296
+ else {
297
+ existingRecordData = existingData;
298
+ }
299
+ // Process the new record data (isNewRecord = false for updates)
300
+ const newRecordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, false, existingRecordData);
301
+ // Apply merge strategy
302
+ const mergedData = await this.mergeRecords(existingRecordData, newRecordData, entityConfig.pull?.mergeStrategy || 'merge', entityConfig.pull?.preserveFields || []);
303
+ // Write updated data
304
+ if (Array.isArray(existingData)) {
305
+ // Update the record in the array
306
+ const index = existingData.findIndex(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
307
+ if (index >= 0) {
308
+ existingData[index] = mergedData;
309
+ await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
310
+ }
311
+ }
312
+ else {
313
+ await fs_extra_1.default.writeJson(filePath, mergedData, { spaces: 2 });
314
+ }
315
+ updated++;
161
316
  processed++;
162
- spinner.text = `Processing records (${processed}/${result.Results.length})`;
317
+ if (flags.verbose) {
318
+ this.log(`Updated: ${filePath}`);
319
+ }
163
320
  }
164
321
  catch (error) {
165
- this.warn(`Failed to process record: ${error.message || error}`);
322
+ this.warn(`Failed to update record: ${error.message || error}`);
166
323
  }
167
324
  }
168
- spinner.succeed(`Pulled ${processed} records to ${targetDir}`);
325
+ // Process new records
326
+ if (newRecords.length > 0) {
327
+ spinner.text = `Creating new records (0/${newRecords.length})`;
328
+ if (entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
329
+ // Append all new records to a single file
330
+ const fileName = entityConfig.pull.newFileName.endsWith('.json')
331
+ ? entityConfig.pull.newFileName
332
+ : `${entityConfig.pull.newFileName}.json`;
333
+ const filePath = path_1.default.join(targetDir, fileName);
334
+ // Load existing file if it exists
335
+ let existingData = [];
336
+ if (await fs_extra_1.default.pathExists(filePath)) {
337
+ const fileData = await fs_extra_1.default.readJson(filePath);
338
+ existingData = Array.isArray(fileData) ? fileData : [fileData];
339
+ }
340
+ // Process and append all new records
341
+ for (const { record, primaryKey } of newRecords) {
342
+ try {
343
+ // For new records, pass isNewRecord = true (default)
344
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
345
+ existingData.push(recordData);
346
+ created++;
347
+ processed++;
348
+ if (flags.verbose) {
349
+ spinner.text = `Creating new records (${created}/${newRecords.length})`;
350
+ }
351
+ }
352
+ catch (error) {
353
+ this.warn(`Failed to process new record: ${error.message || error}`);
354
+ }
355
+ }
356
+ // Write the combined data
357
+ await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
358
+ if (flags.verbose) {
359
+ this.log(`Appended ${created} new records to: ${filePath}`);
360
+ }
361
+ }
362
+ else {
363
+ // Create individual files for each new record
364
+ for (const { record, primaryKey } of newRecords) {
365
+ try {
366
+ await this.processRecord(record, primaryKey, targetDir, entityConfig, syncEngine, flags);
367
+ created++;
368
+ processed++;
369
+ if (flags.verbose) {
370
+ spinner.text = `Creating new records (${created}/${newRecords.length})`;
371
+ }
372
+ }
373
+ catch (error) {
374
+ this.warn(`Failed to process new record: ${error.message || error}`);
375
+ }
376
+ }
377
+ }
378
+ }
379
+ // Final status
380
+ const statusParts = [`Processed ${processed} records`];
381
+ if (updated > 0)
382
+ statusParts.push(`updated ${updated}`);
383
+ if (created > 0)
384
+ statusParts.push(`created ${created}`);
385
+ if (skipped > 0)
386
+ statusParts.push(`skipped ${skipped}`);
387
+ spinner.succeed(statusParts.join(', '));
169
388
  }
170
389
  }
171
390
  catch (error) {
@@ -173,8 +392,11 @@ class Pull extends core_1.Command {
173
392
  this.error(error);
174
393
  }
175
394
  finally {
176
- // Clean up database connection
395
+ // Clean up database connection and reset singletons
177
396
  await (0, provider_utils_1.cleanupProvider)();
397
+ (0, singleton_manager_1.resetSyncEngine)();
398
+ // Exit process to prevent background MJ tasks from throwing errors
399
+ process.exit(0);
178
400
  }
179
401
  }
180
402
  /**
@@ -207,7 +429,7 @@ class Pull extends core_1.Command {
207
429
  }
208
430
  }
209
431
  };
210
- await searchDirs(process.cwd());
432
+ await searchDirs(config_manager_1.configManager.getOriginalCwd());
211
433
  return dirs;
212
434
  }
213
435
  /**
@@ -224,8 +446,8 @@ class Pull extends core_1.Command {
224
446
  * @returns Promise that resolves when file is written
225
447
  * @private
226
448
  */
227
- async processRecord(record, primaryKey, targetDir, entityConfig, syncEngine) {
228
- const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine);
449
+ async processRecord(record, primaryKey, targetDir, entityConfig, syncEngine, flags) {
450
+ const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
229
451
  // Determine file path
230
452
  const fileName = this.buildFileName(primaryKey, entityConfig);
231
453
  const filePath = path_1.default.join(targetDir, fileName);
@@ -243,21 +465,54 @@ class Pull extends core_1.Command {
243
465
  * @param targetDir - Directory where files will be saved
244
466
  * @param entityConfig - Entity configuration with defaults and settings
245
467
  * @param syncEngine - Sync engine for checksum calculation
468
+ * @param flags - Command flags
469
+ * @param isNewRecord - Whether this is a new record
470
+ * @param existingRecordData - Existing record data to preserve field selection
246
471
  * @returns Promise resolving to formatted RecordData
247
472
  * @private
248
473
  */
249
- async processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine) {
250
- // Build record data
251
- const recordData = {
252
- primaryKey: primaryKey,
253
- fields: {},
254
- sync: {
255
- lastModified: new Date().toISOString(),
256
- checksum: ''
474
+ async processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, isNewRecord = true, existingRecordData) {
475
+ // Build record data - we'll restructure at the end for proper ordering
476
+ const fields = {};
477
+ const relatedEntities = {};
478
+ // Debug: Log all fields in first record (only in verbose mode)
479
+ if (flags?.verbose) {
480
+ const recordKeys = Object.keys(record);
481
+ console.log('\n=== DEBUG: Processing record ===');
482
+ console.log('Entity:', entityConfig.entity);
483
+ console.log('Total fields:', recordKeys.length);
484
+ console.log('Field names:', recordKeys.filter(k => !k.startsWith('__mj_')).join(', '));
485
+ console.log('Has TemplateText?:', recordKeys.includes('TemplateText'));
486
+ console.log('externalizeFields config:', entityConfig.pull?.externalizeFields);
487
+ }
488
+ // Get the underlying data from the entity object
489
+ // If it's an entity object, it will have a GetAll() method
490
+ let dataToProcess = record;
491
+ if (typeof record.GetAll === 'function') {
492
+ // It's an entity object, get the underlying data
493
+ dataToProcess = record.GetAll();
494
+ }
495
+ // Get externalize configuration for pattern lookup
496
+ const externalizeConfig = entityConfig.pull?.externalizeFields;
497
+ let externalizeMap = new Map();
498
+ if (externalizeConfig) {
499
+ if (Array.isArray(externalizeConfig)) {
500
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
501
+ // Simple string array
502
+ externalizeConfig.forEach(f => externalizeMap.set(f, undefined));
503
+ }
504
+ else {
505
+ // New pattern format
506
+ externalizeConfig.forEach(item => externalizeMap.set(item.field, item.pattern));
507
+ }
257
508
  }
258
- };
259
- // Process fields
260
- for (const [fieldName, fieldValue] of Object.entries(record)) {
509
+ else {
510
+ // Object format
511
+ Object.keys(externalizeConfig).forEach(f => externalizeMap.set(f, undefined));
512
+ }
513
+ }
514
+ // Process regular fields from the underlying data
515
+ for (const [fieldName, fieldValue] of Object.entries(dataToProcess)) {
261
516
  // Skip primary key fields
262
517
  if (primaryKey[fieldName] !== undefined) {
263
518
  continue;
@@ -266,49 +521,312 @@ class Pull extends core_1.Command {
266
521
  if (fieldName.startsWith('__mj_')) {
267
522
  continue;
268
523
  }
524
+ // Skip excluded fields
525
+ if (entityConfig.pull?.excludeFields?.includes(fieldName)) {
526
+ continue;
527
+ }
528
+ // Skip fields already externalized
529
+ if (fields[fieldName]) {
530
+ continue;
531
+ }
532
+ // Skip virtual/computed fields - check entity metadata
533
+ const metadata = new core_2.Metadata();
534
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
535
+ if (entityInfo) {
536
+ const fieldInfo = entityInfo.Fields.find(f => f.Name === fieldName);
537
+ if (fieldInfo && !fieldInfo.IsVirtual) {
538
+ // Field exists in metadata and is not virtual, keep it
539
+ }
540
+ else if (fieldInfo && fieldInfo.IsVirtual) {
541
+ // Skip virtual fields
542
+ continue;
543
+ }
544
+ else if (!fieldInfo) {
545
+ // Field not in metadata at all
546
+ // Check if it's explicitly configured for externalization, lookup, or exclusion
547
+ const isConfiguredField = entityConfig.pull?.externalizeFields?.includes(fieldName) ||
548
+ entityConfig.pull?.lookupFields?.[fieldName] ||
549
+ entityConfig.pull?.excludeFields?.includes(fieldName);
550
+ if (!isConfiguredField) {
551
+ // Skip fields not in metadata and not explicitly configured
552
+ continue;
553
+ }
554
+ // Otherwise, allow the field to be processed since it's explicitly configured
555
+ }
556
+ }
557
+ // Check if this field should be converted to a lookup
558
+ const lookupConfig = entityConfig.pull?.lookupFields?.[fieldName];
559
+ if (lookupConfig && fieldValue) {
560
+ // Convert foreign key to @lookup reference
561
+ const lookupValue = await this.convertToLookup(fieldValue, lookupConfig.entity, lookupConfig.field, syncEngine);
562
+ if (lookupValue) {
563
+ fields[fieldName] = lookupValue;
564
+ continue;
565
+ }
566
+ }
269
567
  // Check if this is an external file field
270
568
  if (await this.shouldExternalizeField(fieldName, fieldValue, entityConfig)) {
271
- const fileName = await this.createExternalFile(targetDir, primaryKey, fieldName, String(fieldValue));
272
- recordData.fields[fieldName] = `@file:${fileName}`;
569
+ // Check if this field is preserved and already has a @file: reference
570
+ const isPreservedField = entityConfig.pull?.preserveFields?.includes(fieldName);
571
+ const existingFieldValue = existingRecordData?.fields?.[fieldName];
572
+ if (isPreservedField && existingFieldValue && typeof existingFieldValue === 'string' && existingFieldValue.startsWith('@file:')) {
573
+ // Field is preserved and has existing @file: reference - update the existing file
574
+ const existingFilePath = existingFieldValue.replace('@file:', '');
575
+ const fullPath = path_1.default.join(targetDir, existingFilePath);
576
+ // Ensure directory exists
577
+ await fs_extra_1.default.ensureDir(path_1.default.dirname(fullPath));
578
+ // Write the content to the existing file path
579
+ await fs_extra_1.default.writeFile(fullPath, String(fieldValue), 'utf-8');
580
+ // Keep the existing @file: reference
581
+ fields[fieldName] = existingFieldValue;
582
+ }
583
+ else {
584
+ // Normal externalization - create new file
585
+ const pattern = externalizeMap.get(fieldName);
586
+ const fileName = await this.createExternalFile(targetDir, record, primaryKey, fieldName, String(fieldValue), entityConfig, pattern);
587
+ fields[fieldName] = fileName; // fileName already includes @file: prefix if pattern-based
588
+ }
273
589
  }
274
590
  else {
275
- recordData.fields[fieldName] = fieldValue;
591
+ fields[fieldName] = fieldValue;
592
+ }
593
+ }
594
+ // Now check for externalized fields that might be computed properties
595
+ // We process ALL externalized fields, including those not in the data
596
+ if (entityConfig.pull?.externalizeFields && typeof record.GetAll === 'function') {
597
+ const externalizeConfig = entityConfig.pull.externalizeFields;
598
+ // Normalize configuration to array format
599
+ let externalizeItems = [];
600
+ if (Array.isArray(externalizeConfig)) {
601
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
602
+ // Simple string array
603
+ externalizeItems = externalizeConfig.map(f => ({ field: f }));
604
+ }
605
+ else {
606
+ // Already in the new format
607
+ externalizeItems = externalizeConfig;
608
+ }
609
+ }
610
+ else {
611
+ // Object format
612
+ externalizeItems = Object.entries(externalizeConfig).map(([field, config]) => ({
613
+ field,
614
+ pattern: undefined // Will use default pattern
615
+ }));
616
+ }
617
+ // Get the keys from the underlying data to identify computed properties
618
+ const dataKeys = Object.keys(dataToProcess);
619
+ for (const externalItem of externalizeItems) {
620
+ const externalField = externalItem.field;
621
+ // Only process fields that are NOT in the underlying data
622
+ // (these are likely computed properties)
623
+ if (dataKeys.includes(externalField)) {
624
+ continue; // This was already processed in the main loop
625
+ }
626
+ try {
627
+ // Use bracket notation to access properties (including getters)
628
+ const fieldValue = record[externalField];
629
+ if (fieldValue !== undefined && fieldValue !== null && fieldValue !== '') {
630
+ if (await this.shouldExternalizeField(externalField, fieldValue, entityConfig)) {
631
+ // Check if this field is preserved and already has a @file: reference
632
+ const isPreservedField = entityConfig.pull?.preserveFields?.includes(externalField);
633
+ const existingFieldValue = existingRecordData?.fields?.[externalField];
634
+ if (isPreservedField && existingFieldValue && typeof existingFieldValue === 'string' && existingFieldValue.startsWith('@file:')) {
635
+ // Field is preserved and has existing @file: reference - update the existing file
636
+ const existingFilePath = existingFieldValue.replace('@file:', '');
637
+ const fullPath = path_1.default.join(targetDir, existingFilePath);
638
+ // Ensure directory exists
639
+ await fs_extra_1.default.ensureDir(path_1.default.dirname(fullPath));
640
+ // Write the content to the existing file path
641
+ await fs_extra_1.default.writeFile(fullPath, String(fieldValue), 'utf-8');
642
+ // Keep the existing @file: reference
643
+ fields[externalField] = existingFieldValue;
644
+ }
645
+ else {
646
+ // Normal externalization - create new file
647
+ const fileName = await this.createExternalFile(targetDir, record, primaryKey, externalField, String(fieldValue), entityConfig, externalItem.pattern);
648
+ fields[externalField] = fileName; // fileName already includes @file: prefix if pattern-based
649
+ }
650
+ }
651
+ else {
652
+ // Include the field value if not externalized
653
+ fields[externalField] = fieldValue;
654
+ }
655
+ }
656
+ }
657
+ catch (error) {
658
+ // Property might not exist, that's okay
659
+ if (flags?.verbose) {
660
+ console.log(`Could not get property ${externalField}: ${error}`);
661
+ }
662
+ }
276
663
  }
277
664
  }
278
665
  // Pull related entities if configured
279
666
  if (entityConfig.pull?.relatedEntities) {
280
- recordData.relatedEntities = await this.pullRelatedEntities(record, entityConfig.pull.relatedEntities, syncEngine);
667
+ const related = await this.pullRelatedEntities(record, entityConfig.pull.relatedEntities, syncEngine, entityConfig, flags);
668
+ Object.assign(relatedEntities, related);
281
669
  }
282
- // Calculate checksum
283
- recordData.sync.checksum = syncEngine.calculateChecksum(recordData.fields);
670
+ // Get entity metadata to check defaults
671
+ const metadata = new core_2.Metadata();
672
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
673
+ // Filter out null values and fields matching their defaults
674
+ const cleanedFields = {};
675
+ // Get the set of fields that existed in the original record (if updating)
676
+ const existingFieldNames = existingRecordData?.fields ? new Set(Object.keys(existingRecordData.fields)) : new Set();
677
+ for (const [fieldName, fieldValue] of Object.entries(fields)) {
678
+ let includeField = false;
679
+ if (!isNewRecord && existingFieldNames.has(fieldName)) {
680
+ // For updates: Always preserve fields that existed in the original record
681
+ includeField = true;
682
+ }
683
+ else {
684
+ // For new records or new fields in existing records:
685
+ // Skip null/undefined/empty string values
686
+ if (fieldValue === null || fieldValue === undefined || fieldValue === '') {
687
+ includeField = false;
688
+ }
689
+ else if (entityInfo) {
690
+ // Check if value matches the field's default
691
+ const fieldInfo = entityInfo.Fields.find(f => f.Name === fieldName);
692
+ if (fieldInfo && fieldInfo.DefaultValue !== null && fieldInfo.DefaultValue !== undefined) {
693
+ // Compare with default value
694
+ if (fieldValue === fieldInfo.DefaultValue) {
695
+ includeField = false;
696
+ }
697
+ // Special handling for boolean defaults (might be stored as strings)
698
+ else if (typeof fieldValue === 'boolean' &&
699
+ (fieldInfo.DefaultValue === (fieldValue ? '1' : '0') ||
700
+ fieldInfo.DefaultValue === (fieldValue ? 'true' : 'false'))) {
701
+ includeField = false;
702
+ }
703
+ // Special handling for numeric defaults that might be strings
704
+ else if (typeof fieldValue === 'number' && String(fieldValue) === String(fieldInfo.DefaultValue)) {
705
+ includeField = false;
706
+ }
707
+ else {
708
+ includeField = true;
709
+ }
710
+ }
711
+ else {
712
+ // No default value defined, include if not null/empty
713
+ includeField = true;
714
+ }
715
+ }
716
+ else {
717
+ // No entity info, include if not null/empty
718
+ includeField = true;
719
+ }
720
+ }
721
+ if (includeField) {
722
+ cleanedFields[fieldName] = fieldValue;
723
+ }
724
+ }
725
+ // Calculate checksum on cleaned fields
726
+ const checksum = syncEngine.calculateChecksum(cleanedFields);
727
+ // Build the final record data with proper ordering
728
+ // Use a new object to ensure property order
729
+ const recordData = {};
730
+ // 1. User fields first
731
+ recordData.fields = cleanedFields;
732
+ // 2. Related entities (if any)
733
+ if (Object.keys(relatedEntities).length > 0) {
734
+ recordData.relatedEntities = relatedEntities;
735
+ }
736
+ // 3. Primary key (system field)
737
+ recordData.primaryKey = primaryKey;
738
+ // 4. Sync metadata (system field)
739
+ recordData.sync = {
740
+ lastModified: new Date().toISOString(),
741
+ checksum: checksum
742
+ };
284
743
  return recordData;
285
744
  }
745
+ /**
746
+ * Convert a foreign key value to a @lookup reference
747
+ *
748
+ * Looks up the related record and creates a @lookup string that can be
749
+ * resolved during push operations.
750
+ *
751
+ * @param foreignKeyValue - The foreign key value (ID)
752
+ * @param targetEntity - Name of the target entity
753
+ * @param targetField - Field in target entity to use for lookup
754
+ * @param syncEngine - Sync engine instance
755
+ * @returns @lookup string or null if lookup fails
756
+ * @private
757
+ */
758
+ async convertToLookup(foreignKeyValue, targetEntity, targetField, syncEngine) {
759
+ try {
760
+ // Get the related record
761
+ const metadata = new core_2.Metadata();
762
+ const targetEntityInfo = metadata.EntityByName(targetEntity);
763
+ if (!targetEntityInfo) {
764
+ this.warn(`Could not find entity ${targetEntity} for lookup`);
765
+ return null;
766
+ }
767
+ // Load the related record
768
+ const primaryKeyField = targetEntityInfo.PrimaryKeys?.[0]?.Name || 'ID';
769
+ const rv = new core_2.RunView();
770
+ const result = await rv.RunView({
771
+ EntityName: targetEntity,
772
+ ExtraFilter: `${primaryKeyField} = '${String(foreignKeyValue).replace(/'/g, "''")}'`,
773
+ ResultType: 'entity_object'
774
+ }, (0, provider_utils_1.getSystemUser)());
775
+ if (!result.Success || result.Results.length === 0) {
776
+ this.warn(`Could not find ${targetEntity} with ${primaryKeyField} = ${foreignKeyValue}`);
777
+ return null;
778
+ }
779
+ const relatedRecord = result.Results[0];
780
+ const lookupValue = relatedRecord[targetField];
781
+ if (!lookupValue) {
782
+ this.warn(`${targetEntity} record missing ${targetField} field`);
783
+ return null;
784
+ }
785
+ // Return the @lookup reference
786
+ return `@lookup:${targetEntity}.${targetField}=${lookupValue}`;
787
+ }
788
+ catch (error) {
789
+ this.warn(`Failed to create lookup for ${targetEntity}: ${error}`);
790
+ return null;
791
+ }
792
+ }
286
793
  /**
287
794
  * Determine if a field should be saved to an external file
288
795
  *
289
- * Checks if a field contains substantial text content that would be better
290
- * stored in a separate file rather than inline in the JSON. Uses heuristics
291
- * based on field name and content length.
796
+ * Checks if a field is configured for externalization or contains substantial
797
+ * text content that would be better stored in a separate file.
292
798
  *
293
799
  * @param fieldName - Name of the field to check
294
800
  * @param fieldValue - Value of the field
295
- * @param entityConfig - Entity configuration (for future extension)
801
+ * @param entityConfig - Entity configuration with externalization settings
296
802
  * @returns Promise resolving to true if field should be externalized
297
803
  * @private
298
804
  */
299
805
  async shouldExternalizeField(fieldName, fieldValue, entityConfig) {
300
- // Only externalize string fields with significant content
806
+ // Only externalize string fields
301
807
  if (typeof fieldValue !== 'string') {
302
808
  return false;
303
809
  }
304
- // Check if it's a known large text field
305
- const largeTextFields = ['Prompt', 'Template', 'Notes', 'Description',
306
- 'Content', 'Body', 'Text', 'HTML', 'SQL'];
307
- if (largeTextFields.some(f => fieldName.toLowerCase().includes(f.toLowerCase()))) {
308
- // Only externalize if content is substantial (more than 100 chars or has newlines)
309
- return fieldValue.length > 100 || fieldValue.includes('\n');
810
+ // Check if field is configured for externalization
811
+ const externalizeConfig = entityConfig.pull?.externalizeFields;
812
+ if (!externalizeConfig) {
813
+ return false;
814
+ }
815
+ if (Array.isArray(externalizeConfig)) {
816
+ if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
817
+ // Simple string array
818
+ return externalizeConfig.includes(fieldName);
819
+ }
820
+ else {
821
+ // New pattern format
822
+ return externalizeConfig
823
+ .some(item => item.field === fieldName);
824
+ }
825
+ }
826
+ else {
827
+ // Object format
828
+ return fieldName in externalizeConfig;
310
829
  }
311
- return false;
312
830
  }
313
831
  /**
314
832
  * Create an external file for a field value
@@ -316,36 +834,103 @@ class Pull extends core_1.Command {
316
834
  * Saves large text content to a separate file and returns the filename.
317
835
  * Automatically determines appropriate file extension based on field name
318
836
  * and content type (e.g., .md for prompts, .html for templates).
837
+ * Uses the entity's name field for the filename if available.
319
838
  *
320
839
  * @param targetDir - Directory to save the file
321
- * @param primaryKey - Primary key for filename generation
840
+ * @param record - Full record to extract name field from
841
+ * @param primaryKey - Primary key for filename generation fallback
322
842
  * @param fieldName - Name of the field being externalized
323
843
  * @param content - Content to write to the file
844
+ * @param entityConfig - Entity configuration
324
845
  * @returns Promise resolving to the created filename
325
846
  * @private
326
847
  */
327
- async createExternalFile(targetDir, primaryKey, fieldName, content) {
328
- // Determine file extension based on field name and content
329
- let extension = '.txt';
330
- if (fieldName.toLowerCase().includes('prompt')) {
331
- extension = '.md';
332
- }
333
- else if (fieldName.toLowerCase().includes('template')) {
334
- if (content.includes('<html') || content.includes('<!DOCTYPE')) {
335
- extension = '.html';
848
+ async createExternalFile(targetDir, record, primaryKey, fieldName, content, entityConfig, pattern) {
849
+ // If pattern is provided, use it to generate the full path
850
+ if (pattern) {
851
+ // Replace placeholders in the pattern
852
+ let resolvedPattern = pattern;
853
+ // Get entity metadata for field lookups
854
+ const metadata = new core_2.Metadata();
855
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
856
+ // Replace {Name} with the entity's name field value
857
+ if (entityInfo) {
858
+ const nameField = entityInfo.Fields.find(f => f.IsNameField);
859
+ if (nameField && record[nameField.Name]) {
860
+ const nameValue = String(record[nameField.Name])
861
+ .replace(/[^a-zA-Z0-9\-_ ]/g, '') // Remove disallowed characters
862
+ .replace(/\s+/g, '-') // Replace spaces with -
863
+ .toLowerCase(); // Make lowercase
864
+ resolvedPattern = resolvedPattern.replace(/{Name}/g, nameValue);
865
+ }
866
+ }
867
+ // Replace {ID} with the primary key
868
+ const idValue = primaryKey.ID || Object.values(primaryKey)[0];
869
+ if (idValue) {
870
+ resolvedPattern = resolvedPattern.replace(/{ID}/g, String(idValue).toLowerCase());
336
871
  }
337
- else if (content.includes('{{') || content.includes('{%')) {
338
- extension = '.liquid';
872
+ // Replace {FieldName} with the current field name
873
+ resolvedPattern = resolvedPattern.replace(/{FieldName}/g, fieldName.toLowerCase());
874
+ // Replace any other {field} placeholders with field values from the record
875
+ const placeholderRegex = /{(\w+)}/g;
876
+ resolvedPattern = resolvedPattern.replace(placeholderRegex, (match, fieldName) => {
877
+ const value = record[fieldName];
878
+ if (value !== undefined && value !== null) {
879
+ return String(value)
880
+ .replace(/[^a-zA-Z0-9\-_ ]/g, '')
881
+ .replace(/\s+/g, '-')
882
+ .toLowerCase();
883
+ }
884
+ return match; // Keep placeholder if field not found
885
+ });
886
+ // Extract the file path from the pattern
887
+ const filePath = path_1.default.join(targetDir, resolvedPattern.replace('@file:', ''));
888
+ // Ensure directory exists
889
+ await fs_extra_1.default.ensureDir(path_1.default.dirname(filePath));
890
+ // Write the file
891
+ await fs_extra_1.default.writeFile(filePath, content, 'utf-8');
892
+ // Return the pattern as-is (it includes @file: prefix)
893
+ return resolvedPattern;
894
+ }
895
+ // Original logic for non-pattern based externalization
896
+ let extension = '.md'; // default to markdown
897
+ const externalizeConfig = entityConfig.pull?.externalizeFields;
898
+ if (externalizeConfig && !Array.isArray(externalizeConfig) && externalizeConfig[fieldName]?.extension) {
899
+ extension = externalizeConfig[fieldName].extension;
900
+ // Ensure extension starts with a dot
901
+ if (!extension.startsWith('.')) {
902
+ extension = '.' + extension;
339
903
  }
340
904
  }
341
- else if (fieldName.toLowerCase().includes('sql')) {
342
- extension = '.sql';
905
+ // Try to use the entity's name field for the filename
906
+ let baseFileName;
907
+ // Get entity metadata to find the name field
908
+ const metadata = new core_2.Metadata();
909
+ const entityInfo = metadata.EntityByName(entityConfig.entity);
910
+ if (entityInfo) {
911
+ // Find the name field
912
+ const nameField = entityInfo.Fields.find(f => f.IsNameField);
913
+ if (nameField && record[nameField.Name]) {
914
+ // Use the name field value, sanitized for filesystem
915
+ const nameValue = String(record[nameField.Name]);
916
+ // Remove disallowed characters (don't replace with _), replace spaces with -, and lowercase
917
+ baseFileName = nameValue
918
+ .replace(/[^a-zA-Z0-9\-_ ]/g, '') // Remove disallowed characters
919
+ .replace(/\s+/g, '-') // Replace spaces with -
920
+ .toLowerCase(); // Make lowercase
921
+ }
922
+ else {
923
+ // Fallback to primary key
924
+ baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
925
+ }
343
926
  }
344
- else if (fieldName.toLowerCase().includes('notes') || fieldName.toLowerCase().includes('description')) {
345
- extension = '.md';
927
+ else {
928
+ // Fallback to primary key
929
+ baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
346
930
  }
347
- const baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
348
- const fileName = `${baseFileName}.${fieldName.toLowerCase()}${extension}`;
931
+ // Remove dot prefix from baseFileName if it exists (it will be a dot-prefixed name from buildFileName)
932
+ const cleanBaseFileName = baseFileName.startsWith('.') ? baseFileName.substring(1) : baseFileName;
933
+ const fileName = `.${cleanBaseFileName}.${fieldName.toLowerCase()}${extension}`;
349
934
  const filePath = path_1.default.join(targetDir, fileName);
350
935
  await fs_extra_1.default.writeFile(filePath, content, 'utf-8');
351
936
  return fileName;
@@ -356,6 +941,7 @@ class Pull extends core_1.Command {
356
941
  * Creates a safe filename based on the entity's primary key values.
357
942
  * Handles GUIDs by using first 8 characters, sanitizes special characters,
358
943
  * and creates composite names for multi-field keys.
944
+ * Files are prefixed with a dot to follow the metadata file convention.
359
945
  *
360
946
  * @param primaryKey - Primary key fields and values
361
947
  * @param entityConfig - Entity configuration (for future extension)
@@ -369,16 +955,16 @@ class Pull extends core_1.Command {
369
955
  // Single string key - use as base if it's a guid
370
956
  const key = keys[0];
371
957
  if (key.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i)) {
372
- // It's a GUID, use first 8 chars
373
- return `${key.substring(0, 8)}.json`;
958
+ // It's a GUID, use first 8 chars, prefixed with dot, lowercase
959
+ return `.${key.substring(0, 8).toLowerCase()}.json`;
374
960
  }
375
- // Use the whole key if not too long
961
+ // Use the whole key if not too long, prefixed with dot
376
962
  if (key.length <= 50) {
377
- return `${key.replace(/[^a-zA-Z0-9-_]/g, '_')}.json`;
963
+ return `.${key.replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()}.json`;
378
964
  }
379
965
  }
380
- // Multiple keys or numeric - create composite name
381
- return keys.map(k => String(k).replace(/[^a-zA-Z0-9-_]/g, '_')).join('-') + '.json';
966
+ // Multiple keys or numeric - create composite name, prefixed with dot
967
+ return '.' + keys.map(k => String(k).replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()).join('-') + '.json';
382
968
  }
383
969
  /**
384
970
  * Pull related entities for a parent record
@@ -393,16 +979,26 @@ class Pull extends core_1.Command {
393
979
  * @returns Promise resolving to map of entity names to related records
394
980
  * @private
395
981
  */
396
- async pullRelatedEntities(parentRecord, relatedConfig, syncEngine) {
982
+ async pullRelatedEntities(parentRecord, relatedConfig, syncEngine, entityConfig, flags) {
397
983
  const relatedEntities = {};
398
984
  for (const [key, config] of Object.entries(relatedConfig)) {
399
985
  try {
400
- // Get the parent's primary key value
401
- const parentKeyValue = parentRecord[config.foreignKey];
986
+ // Get entity metadata to find primary key
987
+ const metadata = new core_2.Metadata();
988
+ const parentEntity = metadata.EntityByName(entityConfig.entity);
989
+ if (!parentEntity) {
990
+ this.warn(`Could not find entity metadata for ${entityConfig.entity}`);
991
+ continue;
992
+ }
993
+ // Get the parent's primary key value (usually ID)
994
+ const primaryKeyField = parentEntity.PrimaryKeys?.[0]?.Name || 'ID';
995
+ const parentKeyValue = parentRecord[primaryKeyField];
402
996
  if (!parentKeyValue) {
403
- continue; // Skip if parent doesn't have the foreign key field
997
+ this.warn(`Parent record missing primary key field ${primaryKeyField}`);
998
+ continue;
404
999
  }
405
1000
  // Build filter for related records
1001
+ // The foreignKey is the field in the CHILD entity that points to this parent
406
1002
  let filter = `${config.foreignKey} = '${String(parentKeyValue).replace(/'/g, "''")}'`;
407
1003
  if (config.filter) {
408
1004
  filter += ` AND (${config.filter})`;
@@ -411,39 +1007,72 @@ class Pull extends core_1.Command {
411
1007
  const rv = new core_2.RunView();
412
1008
  const result = await rv.RunView({
413
1009
  EntityName: config.entity,
414
- ExtraFilter: filter
1010
+ ExtraFilter: filter,
1011
+ ResultType: 'entity_object'
415
1012
  }, (0, provider_utils_1.getSystemUser)());
416
1013
  if (!result.Success) {
417
1014
  this.warn(`Failed to pull related ${config.entity}: ${result.ErrorMessage}`);
418
1015
  continue;
419
1016
  }
1017
+ // Get child entity metadata
1018
+ const childEntity = metadata.EntityByName(config.entity);
1019
+ if (!childEntity) {
1020
+ this.warn(`Could not find entity metadata for ${config.entity}`);
1021
+ continue;
1022
+ }
1023
+ // Check if we need to wait for async property loading for related entities
1024
+ if (config.externalizeFields && result.Results.length > 0) {
1025
+ let fieldsToExternalize = [];
1026
+ if (Array.isArray(config.externalizeFields)) {
1027
+ if (config.externalizeFields.length > 0 && typeof config.externalizeFields[0] === 'string') {
1028
+ // Simple string array
1029
+ fieldsToExternalize = config.externalizeFields;
1030
+ }
1031
+ else {
1032
+ // New pattern format
1033
+ fieldsToExternalize = config.externalizeFields
1034
+ .map(item => item.field);
1035
+ }
1036
+ }
1037
+ else {
1038
+ // Object format
1039
+ fieldsToExternalize = Object.keys(config.externalizeFields);
1040
+ }
1041
+ // Get all field names from entity metadata
1042
+ const metadataFieldNames = childEntity.Fields.map(f => f.Name);
1043
+ // Check if any externalized fields are NOT in metadata (likely computed properties)
1044
+ const computedFields = fieldsToExternalize.filter(f => !metadataFieldNames.includes(f));
1045
+ if (computedFields.length > 0) {
1046
+ console.log(`Waiting 5 seconds for async property loading in related entity ${config.entity} (${computedFields.join(', ')})...`);
1047
+ await new Promise(resolve => setTimeout(resolve, 5000));
1048
+ }
1049
+ }
420
1050
  // Process each related record
421
1051
  const relatedRecords = [];
422
1052
  for (const relatedRecord of result.Results) {
423
- const recordData = {
424
- fields: {}
425
- };
426
- // Process fields, omitting the foreign key since it will be set via @parent
427
- for (const [fieldName, fieldValue] of Object.entries(relatedRecord)) {
428
- // Skip internal fields
429
- if (fieldName.startsWith('__mj_')) {
430
- continue;
431
- }
432
- // Convert foreign key reference to @parent
433
- if (fieldName === config.foreignKey) {
434
- const parentFieldName = this.findParentField(parentRecord, parentKeyValue);
435
- if (parentFieldName) {
436
- recordData.fields[fieldName] = `@parent:${parentFieldName}`;
437
- }
438
- continue;
439
- }
440
- recordData.fields[fieldName] = fieldValue;
1053
+ // Build primary key for the related record
1054
+ const relatedPrimaryKey = {};
1055
+ for (const pk of childEntity.PrimaryKeys) {
1056
+ relatedPrimaryKey[pk.Name] = relatedRecord[pk.Name];
441
1057
  }
442
- // Pull nested related entities if configured
443
- if (config.relatedEntities) {
444
- recordData.relatedEntities = await this.pullRelatedEntities(relatedRecord, config.relatedEntities, syncEngine);
1058
+ // Process the related record using the same logic as parent records
1059
+ const relatedData = await this.processRecordData(relatedRecord, relatedPrimaryKey, '', // Not used for related entities since we don't externalize their fields
1060
+ {
1061
+ entity: config.entity,
1062
+ pull: {
1063
+ excludeFields: config.excludeFields || entityConfig.pull?.excludeFields,
1064
+ lookupFields: config.lookupFields || entityConfig.pull?.lookupFields,
1065
+ externalizeFields: config.externalizeFields,
1066
+ relatedEntities: config.relatedEntities
1067
+ }
1068
+ }, syncEngine, flags, true);
1069
+ // Convert foreign key reference to @parent
1070
+ if (relatedData.fields[config.foreignKey]) {
1071
+ relatedData.fields[config.foreignKey] = `@parent:${primaryKeyField}`;
445
1072
  }
446
- relatedRecords.push(recordData);
1073
+ // The processRecordData method already filters nulls and defaults
1074
+ // No need to do it again here
1075
+ relatedRecords.push(relatedData);
447
1076
  }
448
1077
  if (relatedRecords.length > 0) {
449
1078
  relatedEntities[key] = relatedRecords;
@@ -477,6 +1106,189 @@ class Pull extends core_1.Command {
477
1106
  }
478
1107
  return null;
479
1108
  }
1109
+ /**
1110
+ * Find existing files in a directory matching a pattern
1111
+ *
1112
+ * Searches for files that match the configured file pattern, used to identify
1113
+ * which records already exist locally for smart update functionality.
1114
+ *
1115
+ * @param dir - Directory to search in
1116
+ * @param pattern - Glob pattern to match files (e.g., "*.json")
1117
+ * @returns Promise resolving to array of file paths
1118
+ * @private
1119
+ */
1120
+ async findExistingFiles(dir, pattern) {
1121
+ const files = [];
1122
+ try {
1123
+ const entries = await fs_extra_1.default.readdir(dir, { withFileTypes: true });
1124
+ for (const entry of entries) {
1125
+ if (entry.isFile()) {
1126
+ const fileName = entry.name;
1127
+ // Simple pattern matching - could be enhanced with proper glob support
1128
+ if (pattern === '*.json' && fileName.endsWith('.json')) {
1129
+ files.push(path_1.default.join(dir, fileName));
1130
+ }
1131
+ else if (pattern === '.*.json' && fileName.startsWith('.') && fileName.endsWith('.json')) {
1132
+ // Handle dot-prefixed JSON files
1133
+ files.push(path_1.default.join(dir, fileName));
1134
+ }
1135
+ else if (pattern === fileName) {
1136
+ files.push(path_1.default.join(dir, fileName));
1137
+ }
1138
+ // TODO: Add more sophisticated glob pattern matching if needed
1139
+ }
1140
+ }
1141
+ }
1142
+ catch (error) {
1143
+ // Directory might not exist yet
1144
+ if (error.code !== 'ENOENT') {
1145
+ throw error;
1146
+ }
1147
+ }
1148
+ return files;
1149
+ }
1150
+ /**
1151
+ * Load existing records from files and build a lookup map
1152
+ *
1153
+ * Reads all existing files and creates a map from primary key to file location,
1154
+ * enabling efficient lookup during the update process.
1155
+ *
1156
+ * @param files - Array of file paths to load
1157
+ * @param entityInfo - Entity metadata for primary key information
1158
+ * @returns Map from primary key string to file info
1159
+ * @private
1160
+ */
1161
+ async loadExistingRecords(files, entityInfo) {
1162
+ const recordsMap = new Map();
1163
+ for (const filePath of files) {
1164
+ try {
1165
+ const fileData = await fs_extra_1.default.readJson(filePath);
1166
+ const records = Array.isArray(fileData) ? fileData : [fileData];
1167
+ for (const record of records) {
1168
+ if (record.primaryKey) {
1169
+ const lookupKey = this.createPrimaryKeyLookup(record.primaryKey);
1170
+ recordsMap.set(lookupKey, { filePath, recordData: record });
1171
+ }
1172
+ }
1173
+ }
1174
+ catch (error) {
1175
+ // Skip files that can't be parsed
1176
+ this.warn(`Could not load file ${filePath}: ${error}`);
1177
+ }
1178
+ }
1179
+ return recordsMap;
1180
+ }
1181
+ /**
1182
+ * Create a string lookup key from primary key values
1183
+ *
1184
+ * Generates a consistent string representation of primary key values
1185
+ * for use in maps and comparisons.
1186
+ *
1187
+ * @param primaryKey - Primary key field names and values
1188
+ * @returns String representation of the primary key
1189
+ * @private
1190
+ */
1191
+ createPrimaryKeyLookup(primaryKey) {
1192
+ const keys = Object.keys(primaryKey).sort();
1193
+ return keys.map(k => `${k}:${primaryKey[k]}`).join('|');
1194
+ }
1195
+ /**
1196
+ * Merge two record data objects based on configured strategy
1197
+ *
1198
+ * Combines existing and new record data according to the merge strategy:
1199
+ * - 'overwrite': Replace all fields with new values
1200
+ * - 'merge': Combine fields, with new values taking precedence
1201
+ * - 'skip': Keep existing record unchanged
1202
+ *
1203
+ * @param existing - Existing record data
1204
+ * @param newData - New record data from database
1205
+ * @param strategy - Merge strategy to apply
1206
+ * @param preserveFields - Field names that should never be overwritten
1207
+ * @returns Merged record data
1208
+ * @private
1209
+ */
1210
+ async mergeRecords(existing, newData, strategy, preserveFields) {
1211
+ if (strategy === 'skip') {
1212
+ return existing;
1213
+ }
1214
+ if (strategy === 'overwrite') {
1215
+ // Build with proper ordering
1216
+ const result = {};
1217
+ // 1. Fields first
1218
+ result.fields = { ...newData.fields };
1219
+ // Restore preserved fields from existing
1220
+ if (preserveFields.length > 0 && existing.fields) {
1221
+ for (const field of preserveFields) {
1222
+ if (field in existing.fields) {
1223
+ result.fields[field] = existing.fields[field];
1224
+ }
1225
+ }
1226
+ }
1227
+ // 2. Related entities (if any)
1228
+ if (newData.relatedEntities) {
1229
+ result.relatedEntities = newData.relatedEntities;
1230
+ }
1231
+ // 3. Primary key
1232
+ result.primaryKey = newData.primaryKey;
1233
+ // 4. Sync metadata
1234
+ result.sync = newData.sync;
1235
+ return result;
1236
+ }
1237
+ // Default 'merge' strategy
1238
+ // Build with proper ordering
1239
+ const result = {};
1240
+ // 1. Fields first
1241
+ result.fields = { ...existing.fields, ...newData.fields };
1242
+ // Restore preserved fields
1243
+ if (preserveFields.length > 0 && existing.fields) {
1244
+ for (const field of preserveFields) {
1245
+ if (field in existing.fields) {
1246
+ result.fields[field] = existing.fields[field];
1247
+ }
1248
+ }
1249
+ }
1250
+ // 2. Related entities (if any)
1251
+ if (existing.relatedEntities || newData.relatedEntities) {
1252
+ result.relatedEntities = {
1253
+ ...existing.relatedEntities,
1254
+ ...newData.relatedEntities
1255
+ };
1256
+ }
1257
+ // 3. Primary key
1258
+ result.primaryKey = newData.primaryKey || existing.primaryKey;
1259
+ // 4. Sync metadata
1260
+ result.sync = newData.sync;
1261
+ return result;
1262
+ }
1263
+ /**
1264
+ * Create a backup of a file before updating
1265
+ *
1266
+ * Creates a timestamped backup copy of the file in a backup directory
1267
+ * with the original filename, timestamp suffix, and .backup extension.
1268
+ * The backup directory defaults to .backups but can be configured.
1269
+ *
1270
+ * @param filePath - Path to the file to backup
1271
+ * @param backupDirName - Name of the backup directory (optional)
1272
+ * @returns Promise that resolves when backup is created
1273
+ * @private
1274
+ */
1275
+ async createBackup(filePath, backupDirName) {
1276
+ const dir = path_1.default.dirname(filePath);
1277
+ const fileName = path_1.default.basename(filePath);
1278
+ const backupDir = path_1.default.join(dir, backupDirName || '.backups');
1279
+ // Ensure backup directory exists
1280
+ await fs_extra_1.default.ensureDir(backupDir);
1281
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
1282
+ // Remove .json extension, add timestamp, then add .backup extension
1283
+ const backupFileName = fileName.replace(/\.json$/, `.${timestamp}.backup`);
1284
+ const backupPath = path_1.default.join(backupDir, backupFileName);
1285
+ try {
1286
+ await fs_extra_1.default.copy(filePath, backupPath);
1287
+ }
1288
+ catch (error) {
1289
+ this.warn(`Could not create backup of ${filePath}: ${error}`);
1290
+ }
1291
+ }
480
1292
  }
481
1293
  exports.default = Pull;
482
1294
  //# sourceMappingURL=index.js.map