@memberjunction/metadata-sync 2.46.0 → 2.48.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +341 -28
- package/dist/commands/pull/index.d.ts +220 -0
- package/dist/commands/pull/index.js +1094 -113
- package/dist/commands/pull/index.js.map +1 -1
- package/dist/commands/push/index.d.ts +1 -0
- package/dist/commands/push/index.js +90 -40
- package/dist/commands/push/index.js.map +1 -1
- package/dist/commands/status/index.js +51 -7
- package/dist/commands/status/index.js.map +1 -1
- package/dist/commands/watch/index.js +20 -7
- package/dist/commands/watch/index.js.map +1 -1
- package/dist/config.d.ts +210 -0
- package/dist/config.js +83 -13
- package/dist/config.js.map +1 -1
- package/dist/hooks/init.js +9 -1
- package/dist/hooks/init.js.map +1 -1
- package/dist/lib/config-manager.d.ts +56 -0
- package/dist/lib/config-manager.js +104 -0
- package/dist/lib/config-manager.js.map +1 -0
- package/dist/lib/provider-utils.d.ts +76 -4
- package/dist/lib/provider-utils.js +136 -52
- package/dist/lib/provider-utils.js.map +1 -1
- package/dist/lib/singleton-manager.d.ts +34 -0
- package/dist/lib/singleton-manager.js +62 -0
- package/dist/lib/singleton-manager.js.map +1 -0
- package/dist/lib/sync-engine.d.ts +239 -5
- package/dist/lib/sync-engine.js +314 -5
- package/dist/lib/sync-engine.js.map +1 -1
- package/oclif.manifest.json +51 -37
- package/package.json +6 -6
|
@@ -1,4 +1,16 @@
|
|
|
1
1
|
"use strict";
|
|
2
|
+
/**
|
|
3
|
+
* @fileoverview Pull command implementation for MetadataSync
|
|
4
|
+
* @module commands/pull
|
|
5
|
+
*
|
|
6
|
+
* This module implements the pull command which retrieves metadata records from
|
|
7
|
+
* the MemberJunction database and saves them as local JSON files. It supports:
|
|
8
|
+
* - Filtering records with SQL expressions
|
|
9
|
+
* - Pulling related entities with foreign key relationships
|
|
10
|
+
* - Externalizing large text fields to separate files
|
|
11
|
+
* - Creating multi-record JSON files
|
|
12
|
+
* - Recursive directory search for entity configurations
|
|
13
|
+
*/
|
|
2
14
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
15
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
16
|
};
|
|
@@ -9,9 +21,28 @@ const path_1 = __importDefault(require("path"));
|
|
|
9
21
|
const prompts_1 = require("@inquirer/prompts");
|
|
10
22
|
const ora_classic_1 = __importDefault(require("ora-classic"));
|
|
11
23
|
const config_1 = require("../../config");
|
|
12
|
-
const sync_engine_1 = require("../../lib/sync-engine");
|
|
13
24
|
const core_2 = require("@memberjunction/core");
|
|
14
25
|
const provider_utils_1 = require("../../lib/provider-utils");
|
|
26
|
+
const config_manager_1 = require("../../lib/config-manager");
|
|
27
|
+
const singleton_manager_1 = require("../../lib/singleton-manager");
|
|
28
|
+
/**
|
|
29
|
+
* Pull metadata records from database to local files
|
|
30
|
+
*
|
|
31
|
+
* @class Pull
|
|
32
|
+
* @extends Command
|
|
33
|
+
*
|
|
34
|
+
* @example
|
|
35
|
+
* ```bash
|
|
36
|
+
* # Pull all records for an entity
|
|
37
|
+
* mj-sync pull --entity="AI Prompts"
|
|
38
|
+
*
|
|
39
|
+
* # Pull with filter
|
|
40
|
+
* mj-sync pull --entity="AI Prompts" --filter="CategoryID='123'"
|
|
41
|
+
*
|
|
42
|
+
* # Pull to multi-record file
|
|
43
|
+
* mj-sync pull --entity="AI Prompts" --multi-file="all-prompts.json"
|
|
44
|
+
* ```
|
|
45
|
+
*/
|
|
15
46
|
class Pull extends core_1.Command {
|
|
16
47
|
static description = 'Pull metadata from database to local files';
|
|
17
48
|
static examples = [
|
|
@@ -22,42 +53,77 @@ class Pull extends core_1.Command {
|
|
|
22
53
|
entity: core_1.Flags.string({ description: 'Entity name to pull', required: true }),
|
|
23
54
|
filter: core_1.Flags.string({ description: 'Additional filter for pulling specific records' }),
|
|
24
55
|
'dry-run': core_1.Flags.boolean({ description: 'Show what would be pulled without actually pulling' }),
|
|
56
|
+
'multi-file': core_1.Flags.string({ description: 'Create a single file with multiple records (provide filename)' }),
|
|
57
|
+
verbose: core_1.Flags.boolean({ char: 'v', description: 'Show detailed output' }),
|
|
25
58
|
};
|
|
26
59
|
async run() {
|
|
27
60
|
const { flags } = await this.parse(Pull);
|
|
28
61
|
const spinner = (0, ora_classic_1.default)();
|
|
29
62
|
try {
|
|
30
|
-
// Load MJ config
|
|
63
|
+
// Load MJ config first (before changing directory)
|
|
31
64
|
spinner.start('Loading configuration');
|
|
32
65
|
const mjConfig = (0, config_1.loadMJConfig)();
|
|
33
66
|
if (!mjConfig) {
|
|
34
67
|
this.error('No mj.config.cjs found in current directory or parent directories');
|
|
35
68
|
}
|
|
69
|
+
// Stop spinner before provider initialization (which logs to console)
|
|
70
|
+
spinner.stop();
|
|
36
71
|
// Initialize data provider
|
|
37
72
|
const provider = await (0, provider_utils_1.initializeProvider)(mjConfig);
|
|
38
|
-
//
|
|
39
|
-
const syncEngine =
|
|
40
|
-
|
|
41
|
-
spinner.succeed('Configuration loaded');
|
|
42
|
-
// Find entity directory
|
|
43
|
-
const entityDirs = await this.findEntityDirectories(flags.entity);
|
|
44
|
-
if (entityDirs.length === 0) {
|
|
45
|
-
this.error(`No directory found for entity "${flags.entity}". Run "mj-sync init" first.`);
|
|
46
|
-
}
|
|
73
|
+
// Get singleton sync engine
|
|
74
|
+
const syncEngine = await (0, singleton_manager_1.getSyncEngine)((0, provider_utils_1.getSystemUser)());
|
|
75
|
+
// Show success after all initialization is complete
|
|
76
|
+
spinner.succeed('Configuration and metadata loaded');
|
|
47
77
|
let targetDir;
|
|
48
|
-
|
|
49
|
-
|
|
78
|
+
let entityConfig;
|
|
79
|
+
// Check if we should use a specific target directory
|
|
80
|
+
const envTargetDir = process.env.METADATA_SYNC_TARGET_DIR;
|
|
81
|
+
if (envTargetDir) {
|
|
82
|
+
if (flags.verbose) {
|
|
83
|
+
console.log(`Using specified target directory: ${envTargetDir}`);
|
|
84
|
+
}
|
|
85
|
+
process.chdir(envTargetDir);
|
|
86
|
+
targetDir = process.cwd();
|
|
87
|
+
// Load entity config from the current directory
|
|
88
|
+
entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
|
|
89
|
+
if (!entityConfig) {
|
|
90
|
+
this.error(`No .mj-sync.json found in ${targetDir}`);
|
|
91
|
+
}
|
|
92
|
+
if (entityConfig.entity !== flags.entity) {
|
|
93
|
+
this.error(`Directory ${targetDir} is configured for entity "${entityConfig.entity}", not "${flags.entity}"`);
|
|
94
|
+
}
|
|
50
95
|
}
|
|
51
96
|
else {
|
|
52
|
-
//
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
}
|
|
97
|
+
// Original behavior - find entity directory
|
|
98
|
+
const entityDirs = await this.findEntityDirectories(flags.entity);
|
|
99
|
+
if (entityDirs.length === 0) {
|
|
100
|
+
this.error(`No directory found for entity "${flags.entity}". Run "mj-sync init" first.`);
|
|
101
|
+
}
|
|
102
|
+
if (entityDirs.length === 1) {
|
|
103
|
+
targetDir = entityDirs[0];
|
|
104
|
+
}
|
|
105
|
+
else {
|
|
106
|
+
// Multiple directories found, ask user
|
|
107
|
+
targetDir = await (0, prompts_1.select)({
|
|
108
|
+
message: `Multiple directories found for entity "${flags.entity}". Which one to use?`,
|
|
109
|
+
choices: entityDirs.map(dir => ({ name: dir, value: dir }))
|
|
110
|
+
});
|
|
111
|
+
}
|
|
112
|
+
entityConfig = await (0, config_1.loadEntityConfig)(targetDir);
|
|
113
|
+
if (!entityConfig) {
|
|
114
|
+
this.error(`Invalid entity configuration in ${targetDir}`);
|
|
115
|
+
}
|
|
57
116
|
}
|
|
58
|
-
|
|
59
|
-
if (
|
|
60
|
-
|
|
117
|
+
// Show configuration notice only if relevant
|
|
118
|
+
if (entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
|
|
119
|
+
const targetFile = path_1.default.join(targetDir, entityConfig.pull.newFileName.endsWith('.json')
|
|
120
|
+
? entityConfig.pull.newFileName
|
|
121
|
+
: `${entityConfig.pull.newFileName}.json`);
|
|
122
|
+
if (await fs_extra_1.default.pathExists(targetFile)) {
|
|
123
|
+
// File exists - inform about append behavior
|
|
124
|
+
this.log(`\n📝 Configuration: New records will be appended to existing file '${path_1.default.basename(targetFile)}'`);
|
|
125
|
+
}
|
|
126
|
+
// If file doesn't exist, no need to mention anything special - we're just creating it
|
|
61
127
|
}
|
|
62
128
|
// Pull records
|
|
63
129
|
spinner.start(`Pulling ${flags.entity} records`);
|
|
@@ -71,7 +137,8 @@ class Pull extends core_1.Command {
|
|
|
71
137
|
}
|
|
72
138
|
const result = await rv.RunView({
|
|
73
139
|
EntityName: flags.entity,
|
|
74
|
-
ExtraFilter: filter
|
|
140
|
+
ExtraFilter: filter,
|
|
141
|
+
ResultType: 'entity_object'
|
|
75
142
|
}, (0, provider_utils_1.getSystemUser)());
|
|
76
143
|
if (!result.Success) {
|
|
77
144
|
this.error(`Failed to pull records: ${result.ErrorMessage}`);
|
|
@@ -81,6 +148,39 @@ class Pull extends core_1.Command {
|
|
|
81
148
|
this.log(`\nDry run mode - would pull ${result.Results.length} records to ${targetDir}`);
|
|
82
149
|
return;
|
|
83
150
|
}
|
|
151
|
+
// Check if we need to wait for async property loading
|
|
152
|
+
if (entityConfig.pull?.externalizeFields && result.Results.length > 0) {
|
|
153
|
+
const metadata = new core_2.Metadata();
|
|
154
|
+
const entityInfo = metadata.EntityByName(flags.entity);
|
|
155
|
+
if (entityInfo) {
|
|
156
|
+
const externalizeConfig = entityConfig.pull.externalizeFields;
|
|
157
|
+
let fieldsToExternalize = [];
|
|
158
|
+
if (Array.isArray(externalizeConfig)) {
|
|
159
|
+
if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
|
|
160
|
+
// Simple string array
|
|
161
|
+
fieldsToExternalize = externalizeConfig;
|
|
162
|
+
}
|
|
163
|
+
else {
|
|
164
|
+
// New pattern format
|
|
165
|
+
fieldsToExternalize = externalizeConfig
|
|
166
|
+
.map(item => item.field);
|
|
167
|
+
}
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
// Object format
|
|
171
|
+
fieldsToExternalize = Object.keys(externalizeConfig);
|
|
172
|
+
}
|
|
173
|
+
// Get all field names from entity metadata
|
|
174
|
+
const metadataFieldNames = entityInfo.Fields.map(f => f.Name);
|
|
175
|
+
// Check if any externalized fields are NOT in metadata (likely computed properties)
|
|
176
|
+
const computedFields = fieldsToExternalize.filter(f => !metadataFieldNames.includes(f));
|
|
177
|
+
if (computedFields.length > 0) {
|
|
178
|
+
spinner.start(`Waiting 5 seconds for async property loading in ${flags.entity} (${computedFields.join(', ')})...`);
|
|
179
|
+
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
180
|
+
spinner.succeed('Async property loading wait complete');
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
84
184
|
// Process each record
|
|
85
185
|
const entityInfo = syncEngine.getEntityInfo(flags.entity);
|
|
86
186
|
if (!entityInfo) {
|
|
@@ -88,33 +188,228 @@ class Pull extends core_1.Command {
|
|
|
88
188
|
}
|
|
89
189
|
spinner.start('Processing records');
|
|
90
190
|
let processed = 0;
|
|
91
|
-
|
|
92
|
-
|
|
191
|
+
let updated = 0;
|
|
192
|
+
let created = 0;
|
|
193
|
+
let skipped = 0;
|
|
194
|
+
// If multi-file flag is set, collect all records
|
|
195
|
+
if (flags['multi-file']) {
|
|
196
|
+
const allRecords = [];
|
|
197
|
+
for (const record of result.Results) {
|
|
198
|
+
try {
|
|
199
|
+
// Build primary key
|
|
200
|
+
const primaryKey = {};
|
|
201
|
+
for (const pk of entityInfo.PrimaryKeys) {
|
|
202
|
+
primaryKey[pk.Name] = record[pk.Name];
|
|
203
|
+
}
|
|
204
|
+
// Process record for multi-file
|
|
205
|
+
const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
|
|
206
|
+
allRecords.push(recordData);
|
|
207
|
+
processed++;
|
|
208
|
+
if (flags.verbose) {
|
|
209
|
+
spinner.text = `Processing records (${processed}/${result.Results.length})`;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
catch (error) {
|
|
213
|
+
this.warn(`Failed to process record: ${error.message || error}`);
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// Write all records to single file
|
|
217
|
+
if (allRecords.length > 0) {
|
|
218
|
+
const fileName = flags['multi-file'].endsWith('.json') ? flags['multi-file'] : `${flags['multi-file']}.json`;
|
|
219
|
+
const filePath = path_1.default.join(targetDir, fileName);
|
|
220
|
+
await fs_extra_1.default.writeJson(filePath, allRecords, { spaces: 2 });
|
|
221
|
+
spinner.succeed(`Pulled ${processed} records to ${filePath}`);
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
else {
|
|
225
|
+
// Smart update logic for single-file-per-record
|
|
226
|
+
spinner.text = 'Scanning for existing files...';
|
|
227
|
+
// Find existing files
|
|
228
|
+
const filePattern = entityConfig.pull?.filePattern || entityConfig.filePattern || '*.json';
|
|
229
|
+
const existingFiles = await this.findExistingFiles(targetDir, filePattern);
|
|
230
|
+
if (flags.verbose) {
|
|
231
|
+
this.log(`Found ${existingFiles.length} existing files matching pattern '${filePattern}'`);
|
|
232
|
+
existingFiles.forEach(f => this.log(` - ${path_1.default.basename(f)}`));
|
|
233
|
+
}
|
|
234
|
+
// Load existing records and build lookup map
|
|
235
|
+
const existingRecordsMap = await this.loadExistingRecords(existingFiles, entityInfo);
|
|
236
|
+
if (flags.verbose) {
|
|
237
|
+
this.log(`Loaded ${existingRecordsMap.size} existing records from files`);
|
|
238
|
+
}
|
|
239
|
+
// Separate records into new and existing
|
|
240
|
+
const newRecords = [];
|
|
241
|
+
const existingRecordsToUpdate = [];
|
|
242
|
+
for (const record of result.Results) {
|
|
93
243
|
// Build primary key
|
|
94
244
|
const primaryKey = {};
|
|
95
245
|
for (const pk of entityInfo.PrimaryKeys) {
|
|
96
246
|
primaryKey[pk.Name] = record[pk.Name];
|
|
97
247
|
}
|
|
98
|
-
//
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
248
|
+
// Create lookup key
|
|
249
|
+
const lookupKey = this.createPrimaryKeyLookup(primaryKey);
|
|
250
|
+
const existingFileInfo = existingRecordsMap.get(lookupKey);
|
|
251
|
+
if (existingFileInfo) {
|
|
252
|
+
// Record exists locally
|
|
253
|
+
if (entityConfig.pull?.updateExistingRecords !== false) {
|
|
254
|
+
existingRecordsToUpdate.push({ record, primaryKey, filePath: existingFileInfo.filePath });
|
|
255
|
+
}
|
|
256
|
+
else {
|
|
257
|
+
skipped++;
|
|
258
|
+
if (flags.verbose) {
|
|
259
|
+
this.log(`Skipping existing record: ${lookupKey}`);
|
|
260
|
+
}
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
else {
|
|
264
|
+
// Record doesn't exist locally
|
|
265
|
+
if (entityConfig.pull?.createNewFileIfNotFound !== false) {
|
|
266
|
+
newRecords.push({ record, primaryKey });
|
|
267
|
+
}
|
|
268
|
+
else {
|
|
269
|
+
skipped++;
|
|
270
|
+
if (flags.verbose) {
|
|
271
|
+
this.log(`Skipping new record (createNewFileIfNotFound=false): ${lookupKey}`);
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
}
|
|
102
275
|
}
|
|
103
|
-
|
|
104
|
-
|
|
276
|
+
// Track which files have been backed up to avoid duplicates
|
|
277
|
+
const backedUpFiles = new Set();
|
|
278
|
+
// Process existing records updates
|
|
279
|
+
for (const { record, primaryKey, filePath } of existingRecordsToUpdate) {
|
|
280
|
+
try {
|
|
281
|
+
spinner.text = `Updating existing records (${updated + 1}/${existingRecordsToUpdate.length})`;
|
|
282
|
+
// Create backup if configured (only once per file)
|
|
283
|
+
if (entityConfig.pull?.backupBeforeUpdate && !backedUpFiles.has(filePath)) {
|
|
284
|
+
await this.createBackup(filePath, entityConfig.pull?.backupDirectory);
|
|
285
|
+
backedUpFiles.add(filePath);
|
|
286
|
+
}
|
|
287
|
+
// Load existing file data
|
|
288
|
+
const existingData = await fs_extra_1.default.readJson(filePath);
|
|
289
|
+
// Find the specific existing record that matches this primary key
|
|
290
|
+
let existingRecordData;
|
|
291
|
+
if (Array.isArray(existingData)) {
|
|
292
|
+
// Find the matching record in the array
|
|
293
|
+
const matchingRecord = existingData.find(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
|
|
294
|
+
existingRecordData = matchingRecord || existingData[0]; // Fallback to first if not found
|
|
295
|
+
}
|
|
296
|
+
else {
|
|
297
|
+
existingRecordData = existingData;
|
|
298
|
+
}
|
|
299
|
+
// Process the new record data (isNewRecord = false for updates)
|
|
300
|
+
const newRecordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, false, existingRecordData);
|
|
301
|
+
// Apply merge strategy
|
|
302
|
+
const mergedData = await this.mergeRecords(existingRecordData, newRecordData, entityConfig.pull?.mergeStrategy || 'merge', entityConfig.pull?.preserveFields || []);
|
|
303
|
+
// Write updated data
|
|
304
|
+
if (Array.isArray(existingData)) {
|
|
305
|
+
// Update the record in the array
|
|
306
|
+
const index = existingData.findIndex(r => this.createPrimaryKeyLookup(r.primaryKey || {}) === this.createPrimaryKeyLookup(primaryKey));
|
|
307
|
+
if (index >= 0) {
|
|
308
|
+
existingData[index] = mergedData;
|
|
309
|
+
await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
else {
|
|
313
|
+
await fs_extra_1.default.writeJson(filePath, mergedData, { spaces: 2 });
|
|
314
|
+
}
|
|
315
|
+
updated++;
|
|
316
|
+
processed++;
|
|
317
|
+
if (flags.verbose) {
|
|
318
|
+
this.log(`Updated: ${filePath}`);
|
|
319
|
+
}
|
|
320
|
+
}
|
|
321
|
+
catch (error) {
|
|
322
|
+
this.warn(`Failed to update record: ${error.message || error}`);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
// Process new records
|
|
326
|
+
if (newRecords.length > 0) {
|
|
327
|
+
spinner.text = `Creating new records (0/${newRecords.length})`;
|
|
328
|
+
if (entityConfig.pull?.appendRecordsToExistingFile && entityConfig.pull?.newFileName) {
|
|
329
|
+
// Append all new records to a single file
|
|
330
|
+
const fileName = entityConfig.pull.newFileName.endsWith('.json')
|
|
331
|
+
? entityConfig.pull.newFileName
|
|
332
|
+
: `${entityConfig.pull.newFileName}.json`;
|
|
333
|
+
const filePath = path_1.default.join(targetDir, fileName);
|
|
334
|
+
// Load existing file if it exists
|
|
335
|
+
let existingData = [];
|
|
336
|
+
if (await fs_extra_1.default.pathExists(filePath)) {
|
|
337
|
+
const fileData = await fs_extra_1.default.readJson(filePath);
|
|
338
|
+
existingData = Array.isArray(fileData) ? fileData : [fileData];
|
|
339
|
+
}
|
|
340
|
+
// Process and append all new records
|
|
341
|
+
for (const { record, primaryKey } of newRecords) {
|
|
342
|
+
try {
|
|
343
|
+
// For new records, pass isNewRecord = true (default)
|
|
344
|
+
const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
|
|
345
|
+
existingData.push(recordData);
|
|
346
|
+
created++;
|
|
347
|
+
processed++;
|
|
348
|
+
if (flags.verbose) {
|
|
349
|
+
spinner.text = `Creating new records (${created}/${newRecords.length})`;
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
catch (error) {
|
|
353
|
+
this.warn(`Failed to process new record: ${error.message || error}`);
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
// Write the combined data
|
|
357
|
+
await fs_extra_1.default.writeJson(filePath, existingData, { spaces: 2 });
|
|
358
|
+
if (flags.verbose) {
|
|
359
|
+
this.log(`Appended ${created} new records to: ${filePath}`);
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
else {
|
|
363
|
+
// Create individual files for each new record
|
|
364
|
+
for (const { record, primaryKey } of newRecords) {
|
|
365
|
+
try {
|
|
366
|
+
await this.processRecord(record, primaryKey, targetDir, entityConfig, syncEngine, flags);
|
|
367
|
+
created++;
|
|
368
|
+
processed++;
|
|
369
|
+
if (flags.verbose) {
|
|
370
|
+
spinner.text = `Creating new records (${created}/${newRecords.length})`;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
catch (error) {
|
|
374
|
+
this.warn(`Failed to process new record: ${error.message || error}`);
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
}
|
|
105
378
|
}
|
|
379
|
+
// Final status
|
|
380
|
+
const statusParts = [`Processed ${processed} records`];
|
|
381
|
+
if (updated > 0)
|
|
382
|
+
statusParts.push(`updated ${updated}`);
|
|
383
|
+
if (created > 0)
|
|
384
|
+
statusParts.push(`created ${created}`);
|
|
385
|
+
if (skipped > 0)
|
|
386
|
+
statusParts.push(`skipped ${skipped}`);
|
|
387
|
+
spinner.succeed(statusParts.join(', '));
|
|
106
388
|
}
|
|
107
|
-
spinner.succeed(`Pulled ${processed} records to ${targetDir}`);
|
|
108
389
|
}
|
|
109
390
|
catch (error) {
|
|
110
391
|
spinner.fail('Pull failed');
|
|
111
392
|
this.error(error);
|
|
112
393
|
}
|
|
113
394
|
finally {
|
|
114
|
-
// Clean up database connection
|
|
395
|
+
// Clean up database connection and reset singletons
|
|
115
396
|
await (0, provider_utils_1.cleanupProvider)();
|
|
397
|
+
(0, singleton_manager_1.resetSyncEngine)();
|
|
398
|
+
// Exit process to prevent background MJ tasks from throwing errors
|
|
399
|
+
process.exit(0);
|
|
116
400
|
}
|
|
117
401
|
}
|
|
402
|
+
/**
|
|
403
|
+
* Find directories containing configuration for the specified entity
|
|
404
|
+
*
|
|
405
|
+
* Recursively searches the current working directory for .mj-sync.json files
|
|
406
|
+
* that specify the given entity name. Returns all matching directories to
|
|
407
|
+
* allow user selection when multiple locations exist.
|
|
408
|
+
*
|
|
409
|
+
* @param entityName - Name of the entity to search for
|
|
410
|
+
* @returns Promise resolving to array of directory paths
|
|
411
|
+
* @private
|
|
412
|
+
*/
|
|
118
413
|
async findEntityDirectories(entityName) {
|
|
119
414
|
const dirs = [];
|
|
120
415
|
// Search for directories with matching entity config
|
|
@@ -134,21 +429,90 @@ class Pull extends core_1.Command {
|
|
|
134
429
|
}
|
|
135
430
|
}
|
|
136
431
|
};
|
|
137
|
-
await searchDirs(
|
|
432
|
+
await searchDirs(config_manager_1.configManager.getOriginalCwd());
|
|
138
433
|
return dirs;
|
|
139
434
|
}
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
435
|
+
/**
|
|
436
|
+
* Process a single record and save to file
|
|
437
|
+
*
|
|
438
|
+
* Converts a database record into the file format and writes it to disk.
|
|
439
|
+
* This is a wrapper around processRecordData that handles file writing.
|
|
440
|
+
*
|
|
441
|
+
* @param record - Raw database record
|
|
442
|
+
* @param primaryKey - Primary key fields and values
|
|
443
|
+
* @param targetDir - Directory to save the file
|
|
444
|
+
* @param entityConfig - Entity configuration with pull settings
|
|
445
|
+
* @param syncEngine - Sync engine instance
|
|
446
|
+
* @returns Promise that resolves when file is written
|
|
447
|
+
* @private
|
|
448
|
+
*/
|
|
449
|
+
async processRecord(record, primaryKey, targetDir, entityConfig, syncEngine, flags) {
|
|
450
|
+
const recordData = await this.processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, true);
|
|
451
|
+
// Determine file path
|
|
452
|
+
const fileName = this.buildFileName(primaryKey, entityConfig);
|
|
453
|
+
const filePath = path_1.default.join(targetDir, fileName);
|
|
454
|
+
// Write JSON file
|
|
455
|
+
await fs_extra_1.default.writeJson(filePath, recordData, { spaces: 2 });
|
|
456
|
+
}
|
|
457
|
+
/**
|
|
458
|
+
* Process record data for storage
|
|
459
|
+
*
|
|
460
|
+
* Transforms a raw database record into the RecordData format used for file storage.
|
|
461
|
+
* Handles field externalization, related entity pulling, and checksum calculation.
|
|
462
|
+
*
|
|
463
|
+
* @param record - Raw database record
|
|
464
|
+
* @param primaryKey - Primary key fields and values
|
|
465
|
+
* @param targetDir - Directory where files will be saved
|
|
466
|
+
* @param entityConfig - Entity configuration with defaults and settings
|
|
467
|
+
* @param syncEngine - Sync engine for checksum calculation
|
|
468
|
+
* @param flags - Command flags
|
|
469
|
+
* @param isNewRecord - Whether this is a new record
|
|
470
|
+
* @param existingRecordData - Existing record data to preserve field selection
|
|
471
|
+
* @returns Promise resolving to formatted RecordData
|
|
472
|
+
* @private
|
|
473
|
+
*/
|
|
474
|
+
async processRecordData(record, primaryKey, targetDir, entityConfig, syncEngine, flags, isNewRecord = true, existingRecordData) {
|
|
475
|
+
// Build record data - we'll restructure at the end for proper ordering
|
|
476
|
+
const fields = {};
|
|
477
|
+
const relatedEntities = {};
|
|
478
|
+
// Debug: Log all fields in first record (only in verbose mode)
|
|
479
|
+
if (flags?.verbose) {
|
|
480
|
+
const recordKeys = Object.keys(record);
|
|
481
|
+
console.log('\n=== DEBUG: Processing record ===');
|
|
482
|
+
console.log('Entity:', entityConfig.entity);
|
|
483
|
+
console.log('Total fields:', recordKeys.length);
|
|
484
|
+
console.log('Field names:', recordKeys.filter(k => !k.startsWith('__mj_')).join(', '));
|
|
485
|
+
console.log('Has TemplateText?:', recordKeys.includes('TemplateText'));
|
|
486
|
+
console.log('externalizeFields config:', entityConfig.pull?.externalizeFields);
|
|
487
|
+
}
|
|
488
|
+
// Get the underlying data from the entity object
|
|
489
|
+
// If it's an entity object, it will have a GetAll() method
|
|
490
|
+
let dataToProcess = record;
|
|
491
|
+
if (typeof record.GetAll === 'function') {
|
|
492
|
+
// It's an entity object, get the underlying data
|
|
493
|
+
dataToProcess = record.GetAll();
|
|
494
|
+
}
|
|
495
|
+
// Get externalize configuration for pattern lookup
|
|
496
|
+
const externalizeConfig = entityConfig.pull?.externalizeFields;
|
|
497
|
+
let externalizeMap = new Map();
|
|
498
|
+
if (externalizeConfig) {
|
|
499
|
+
if (Array.isArray(externalizeConfig)) {
|
|
500
|
+
if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
|
|
501
|
+
// Simple string array
|
|
502
|
+
externalizeConfig.forEach(f => externalizeMap.set(f, undefined));
|
|
503
|
+
}
|
|
504
|
+
else {
|
|
505
|
+
// New pattern format
|
|
506
|
+
externalizeConfig.forEach(item => externalizeMap.set(item.field, item.pattern));
|
|
507
|
+
}
|
|
148
508
|
}
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
509
|
+
else {
|
|
510
|
+
// Object format
|
|
511
|
+
Object.keys(externalizeConfig).forEach(f => externalizeMap.set(f, undefined));
|
|
512
|
+
}
|
|
513
|
+
}
|
|
514
|
+
// Process regular fields from the underlying data
|
|
515
|
+
for (const [fieldName, fieldValue] of Object.entries(dataToProcess)) {
|
|
152
516
|
// Skip primary key fields
|
|
153
517
|
if (primaryKey[fieldName] !== undefined) {
|
|
154
518
|
continue;
|
|
@@ -157,67 +521,433 @@ class Pull extends core_1.Command {
|
|
|
157
521
|
if (fieldName.startsWith('__mj_')) {
|
|
158
522
|
continue;
|
|
159
523
|
}
|
|
524
|
+
// Skip excluded fields
|
|
525
|
+
if (entityConfig.pull?.excludeFields?.includes(fieldName)) {
|
|
526
|
+
continue;
|
|
527
|
+
}
|
|
528
|
+
// Skip fields already externalized
|
|
529
|
+
if (fields[fieldName]) {
|
|
530
|
+
continue;
|
|
531
|
+
}
|
|
532
|
+
// Skip virtual/computed fields - check entity metadata
|
|
533
|
+
const metadata = new core_2.Metadata();
|
|
534
|
+
const entityInfo = metadata.EntityByName(entityConfig.entity);
|
|
535
|
+
if (entityInfo) {
|
|
536
|
+
const fieldInfo = entityInfo.Fields.find(f => f.Name === fieldName);
|
|
537
|
+
if (fieldInfo && !fieldInfo.IsVirtual) {
|
|
538
|
+
// Field exists in metadata and is not virtual, keep it
|
|
539
|
+
}
|
|
540
|
+
else if (fieldInfo && fieldInfo.IsVirtual) {
|
|
541
|
+
// Skip virtual fields
|
|
542
|
+
continue;
|
|
543
|
+
}
|
|
544
|
+
else if (!fieldInfo) {
|
|
545
|
+
// Field not in metadata at all
|
|
546
|
+
// Check if it's explicitly configured for externalization, lookup, or exclusion
|
|
547
|
+
const isConfiguredField = entityConfig.pull?.externalizeFields?.includes(fieldName) ||
|
|
548
|
+
entityConfig.pull?.lookupFields?.[fieldName] ||
|
|
549
|
+
entityConfig.pull?.excludeFields?.includes(fieldName);
|
|
550
|
+
if (!isConfiguredField) {
|
|
551
|
+
// Skip fields not in metadata and not explicitly configured
|
|
552
|
+
continue;
|
|
553
|
+
}
|
|
554
|
+
// Otherwise, allow the field to be processed since it's explicitly configured
|
|
555
|
+
}
|
|
556
|
+
}
|
|
557
|
+
// Check if this field should be converted to a lookup
|
|
558
|
+
const lookupConfig = entityConfig.pull?.lookupFields?.[fieldName];
|
|
559
|
+
if (lookupConfig && fieldValue) {
|
|
560
|
+
// Convert foreign key to @lookup reference
|
|
561
|
+
const lookupValue = await this.convertToLookup(fieldValue, lookupConfig.entity, lookupConfig.field, syncEngine);
|
|
562
|
+
if (lookupValue) {
|
|
563
|
+
fields[fieldName] = lookupValue;
|
|
564
|
+
continue;
|
|
565
|
+
}
|
|
566
|
+
}
|
|
160
567
|
// Check if this is an external file field
|
|
161
568
|
if (await this.shouldExternalizeField(fieldName, fieldValue, entityConfig)) {
|
|
162
|
-
|
|
163
|
-
|
|
569
|
+
// Check if this field is preserved and already has a @file: reference
|
|
570
|
+
const isPreservedField = entityConfig.pull?.preserveFields?.includes(fieldName);
|
|
571
|
+
const existingFieldValue = existingRecordData?.fields?.[fieldName];
|
|
572
|
+
if (isPreservedField && existingFieldValue && typeof existingFieldValue === 'string' && existingFieldValue.startsWith('@file:')) {
|
|
573
|
+
// Field is preserved and has existing @file: reference - update the existing file
|
|
574
|
+
const existingFilePath = existingFieldValue.replace('@file:', '');
|
|
575
|
+
const fullPath = path_1.default.join(targetDir, existingFilePath);
|
|
576
|
+
// Ensure directory exists
|
|
577
|
+
await fs_extra_1.default.ensureDir(path_1.default.dirname(fullPath));
|
|
578
|
+
// Write the content to the existing file path
|
|
579
|
+
await fs_extra_1.default.writeFile(fullPath, String(fieldValue), 'utf-8');
|
|
580
|
+
// Keep the existing @file: reference
|
|
581
|
+
fields[fieldName] = existingFieldValue;
|
|
582
|
+
}
|
|
583
|
+
else {
|
|
584
|
+
// Normal externalization - create new file
|
|
585
|
+
const pattern = externalizeMap.get(fieldName);
|
|
586
|
+
const fileName = await this.createExternalFile(targetDir, record, primaryKey, fieldName, String(fieldValue), entityConfig, pattern);
|
|
587
|
+
fields[fieldName] = fileName; // fileName already includes @file: prefix if pattern-based
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
else {
|
|
591
|
+
fields[fieldName] = fieldValue;
|
|
592
|
+
}
|
|
593
|
+
}
|
|
594
|
+
// Now check for externalized fields that might be computed properties
|
|
595
|
+
// We process ALL externalized fields, including those not in the data
|
|
596
|
+
if (entityConfig.pull?.externalizeFields && typeof record.GetAll === 'function') {
|
|
597
|
+
const externalizeConfig = entityConfig.pull.externalizeFields;
|
|
598
|
+
// Normalize configuration to array format
|
|
599
|
+
let externalizeItems = [];
|
|
600
|
+
if (Array.isArray(externalizeConfig)) {
|
|
601
|
+
if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
|
|
602
|
+
// Simple string array
|
|
603
|
+
externalizeItems = externalizeConfig.map(f => ({ field: f }));
|
|
604
|
+
}
|
|
605
|
+
else {
|
|
606
|
+
// Already in the new format
|
|
607
|
+
externalizeItems = externalizeConfig;
|
|
608
|
+
}
|
|
164
609
|
}
|
|
165
610
|
else {
|
|
166
|
-
|
|
611
|
+
// Object format
|
|
612
|
+
externalizeItems = Object.entries(externalizeConfig).map(([field, config]) => ({
|
|
613
|
+
field,
|
|
614
|
+
pattern: undefined // Will use default pattern
|
|
615
|
+
}));
|
|
616
|
+
}
|
|
617
|
+
// Get the keys from the underlying data to identify computed properties
|
|
618
|
+
const dataKeys = Object.keys(dataToProcess);
|
|
619
|
+
for (const externalItem of externalizeItems) {
|
|
620
|
+
const externalField = externalItem.field;
|
|
621
|
+
// Only process fields that are NOT in the underlying data
|
|
622
|
+
// (these are likely computed properties)
|
|
623
|
+
if (dataKeys.includes(externalField)) {
|
|
624
|
+
continue; // This was already processed in the main loop
|
|
625
|
+
}
|
|
626
|
+
try {
|
|
627
|
+
// Use bracket notation to access properties (including getters)
|
|
628
|
+
const fieldValue = record[externalField];
|
|
629
|
+
if (fieldValue !== undefined && fieldValue !== null && fieldValue !== '') {
|
|
630
|
+
if (await this.shouldExternalizeField(externalField, fieldValue, entityConfig)) {
|
|
631
|
+
// Check if this field is preserved and already has a @file: reference
|
|
632
|
+
const isPreservedField = entityConfig.pull?.preserveFields?.includes(externalField);
|
|
633
|
+
const existingFieldValue = existingRecordData?.fields?.[externalField];
|
|
634
|
+
if (isPreservedField && existingFieldValue && typeof existingFieldValue === 'string' && existingFieldValue.startsWith('@file:')) {
|
|
635
|
+
// Field is preserved and has existing @file: reference - update the existing file
|
|
636
|
+
const existingFilePath = existingFieldValue.replace('@file:', '');
|
|
637
|
+
const fullPath = path_1.default.join(targetDir, existingFilePath);
|
|
638
|
+
// Ensure directory exists
|
|
639
|
+
await fs_extra_1.default.ensureDir(path_1.default.dirname(fullPath));
|
|
640
|
+
// Write the content to the existing file path
|
|
641
|
+
await fs_extra_1.default.writeFile(fullPath, String(fieldValue), 'utf-8');
|
|
642
|
+
// Keep the existing @file: reference
|
|
643
|
+
fields[externalField] = existingFieldValue;
|
|
644
|
+
}
|
|
645
|
+
else {
|
|
646
|
+
// Normal externalization - create new file
|
|
647
|
+
const fileName = await this.createExternalFile(targetDir, record, primaryKey, externalField, String(fieldValue), entityConfig, externalItem.pattern);
|
|
648
|
+
fields[externalField] = fileName; // fileName already includes @file: prefix if pattern-based
|
|
649
|
+
}
|
|
650
|
+
}
|
|
651
|
+
else {
|
|
652
|
+
// Include the field value if not externalized
|
|
653
|
+
fields[externalField] = fieldValue;
|
|
654
|
+
}
|
|
655
|
+
}
|
|
656
|
+
}
|
|
657
|
+
catch (error) {
|
|
658
|
+
// Property might not exist, that's okay
|
|
659
|
+
if (flags?.verbose) {
|
|
660
|
+
console.log(`Could not get property ${externalField}: ${error}`);
|
|
661
|
+
}
|
|
662
|
+
}
|
|
167
663
|
}
|
|
168
664
|
}
|
|
169
665
|
// Pull related entities if configured
|
|
170
666
|
if (entityConfig.pull?.relatedEntities) {
|
|
171
|
-
|
|
667
|
+
const related = await this.pullRelatedEntities(record, entityConfig.pull.relatedEntities, syncEngine, entityConfig, flags);
|
|
668
|
+
Object.assign(relatedEntities, related);
|
|
669
|
+
}
|
|
670
|
+
// Get entity metadata to check defaults
|
|
671
|
+
const metadata = new core_2.Metadata();
|
|
672
|
+
const entityInfo = metadata.EntityByName(entityConfig.entity);
|
|
673
|
+
// Filter out null values and fields matching their defaults
|
|
674
|
+
const cleanedFields = {};
|
|
675
|
+
// Get the set of fields that existed in the original record (if updating)
|
|
676
|
+
const existingFieldNames = existingRecordData?.fields ? new Set(Object.keys(existingRecordData.fields)) : new Set();
|
|
677
|
+
for (const [fieldName, fieldValue] of Object.entries(fields)) {
|
|
678
|
+
let includeField = false;
|
|
679
|
+
if (!isNewRecord && existingFieldNames.has(fieldName)) {
|
|
680
|
+
// For updates: Always preserve fields that existed in the original record
|
|
681
|
+
includeField = true;
|
|
682
|
+
}
|
|
683
|
+
else {
|
|
684
|
+
// For new records or new fields in existing records:
|
|
685
|
+
// Skip null/undefined/empty string values
|
|
686
|
+
if (fieldValue === null || fieldValue === undefined || fieldValue === '') {
|
|
687
|
+
includeField = false;
|
|
688
|
+
}
|
|
689
|
+
else if (entityInfo) {
|
|
690
|
+
// Check if value matches the field's default
|
|
691
|
+
const fieldInfo = entityInfo.Fields.find(f => f.Name === fieldName);
|
|
692
|
+
if (fieldInfo && fieldInfo.DefaultValue !== null && fieldInfo.DefaultValue !== undefined) {
|
|
693
|
+
// Compare with default value
|
|
694
|
+
if (fieldValue === fieldInfo.DefaultValue) {
|
|
695
|
+
includeField = false;
|
|
696
|
+
}
|
|
697
|
+
// Special handling for boolean defaults (might be stored as strings)
|
|
698
|
+
else if (typeof fieldValue === 'boolean' &&
|
|
699
|
+
(fieldInfo.DefaultValue === (fieldValue ? '1' : '0') ||
|
|
700
|
+
fieldInfo.DefaultValue === (fieldValue ? 'true' : 'false'))) {
|
|
701
|
+
includeField = false;
|
|
702
|
+
}
|
|
703
|
+
// Special handling for numeric defaults that might be strings
|
|
704
|
+
else if (typeof fieldValue === 'number' && String(fieldValue) === String(fieldInfo.DefaultValue)) {
|
|
705
|
+
includeField = false;
|
|
706
|
+
}
|
|
707
|
+
else {
|
|
708
|
+
includeField = true;
|
|
709
|
+
}
|
|
710
|
+
}
|
|
711
|
+
else {
|
|
712
|
+
// No default value defined, include if not null/empty
|
|
713
|
+
includeField = true;
|
|
714
|
+
}
|
|
715
|
+
}
|
|
716
|
+
else {
|
|
717
|
+
// No entity info, include if not null/empty
|
|
718
|
+
includeField = true;
|
|
719
|
+
}
|
|
720
|
+
}
|
|
721
|
+
if (includeField) {
|
|
722
|
+
cleanedFields[fieldName] = fieldValue;
|
|
723
|
+
}
|
|
724
|
+
}
|
|
725
|
+
// Calculate checksum on cleaned fields
|
|
726
|
+
const checksum = syncEngine.calculateChecksum(cleanedFields);
|
|
727
|
+
// Build the final record data with proper ordering
|
|
728
|
+
// Use a new object to ensure property order
|
|
729
|
+
const recordData = {};
|
|
730
|
+
// 1. User fields first
|
|
731
|
+
recordData.fields = cleanedFields;
|
|
732
|
+
// 2. Related entities (if any)
|
|
733
|
+
if (Object.keys(relatedEntities).length > 0) {
|
|
734
|
+
recordData.relatedEntities = relatedEntities;
|
|
735
|
+
}
|
|
736
|
+
// 3. Primary key (system field)
|
|
737
|
+
recordData.primaryKey = primaryKey;
|
|
738
|
+
// 4. Sync metadata (system field)
|
|
739
|
+
recordData.sync = {
|
|
740
|
+
lastModified: new Date().toISOString(),
|
|
741
|
+
checksum: checksum
|
|
742
|
+
};
|
|
743
|
+
return recordData;
|
|
744
|
+
}
|
|
745
|
+
/**
|
|
746
|
+
* Convert a foreign key value to a @lookup reference
|
|
747
|
+
*
|
|
748
|
+
* Looks up the related record and creates a @lookup string that can be
|
|
749
|
+
* resolved during push operations.
|
|
750
|
+
*
|
|
751
|
+
* @param foreignKeyValue - The foreign key value (ID)
|
|
752
|
+
* @param targetEntity - Name of the target entity
|
|
753
|
+
* @param targetField - Field in target entity to use for lookup
|
|
754
|
+
* @param syncEngine - Sync engine instance
|
|
755
|
+
* @returns @lookup string or null if lookup fails
|
|
756
|
+
* @private
|
|
757
|
+
*/
|
|
758
|
+
async convertToLookup(foreignKeyValue, targetEntity, targetField, syncEngine) {
|
|
759
|
+
try {
|
|
760
|
+
// Get the related record
|
|
761
|
+
const metadata = new core_2.Metadata();
|
|
762
|
+
const targetEntityInfo = metadata.EntityByName(targetEntity);
|
|
763
|
+
if (!targetEntityInfo) {
|
|
764
|
+
this.warn(`Could not find entity ${targetEntity} for lookup`);
|
|
765
|
+
return null;
|
|
766
|
+
}
|
|
767
|
+
// Load the related record
|
|
768
|
+
const primaryKeyField = targetEntityInfo.PrimaryKeys?.[0]?.Name || 'ID';
|
|
769
|
+
const rv = new core_2.RunView();
|
|
770
|
+
const result = await rv.RunView({
|
|
771
|
+
EntityName: targetEntity,
|
|
772
|
+
ExtraFilter: `${primaryKeyField} = '${String(foreignKeyValue).replace(/'/g, "''")}'`,
|
|
773
|
+
ResultType: 'entity_object'
|
|
774
|
+
}, (0, provider_utils_1.getSystemUser)());
|
|
775
|
+
if (!result.Success || result.Results.length === 0) {
|
|
776
|
+
this.warn(`Could not find ${targetEntity} with ${primaryKeyField} = ${foreignKeyValue}`);
|
|
777
|
+
return null;
|
|
778
|
+
}
|
|
779
|
+
const relatedRecord = result.Results[0];
|
|
780
|
+
const lookupValue = relatedRecord[targetField];
|
|
781
|
+
if (!lookupValue) {
|
|
782
|
+
this.warn(`${targetEntity} record missing ${targetField} field`);
|
|
783
|
+
return null;
|
|
784
|
+
}
|
|
785
|
+
// Return the @lookup reference
|
|
786
|
+
return `@lookup:${targetEntity}.${targetField}=${lookupValue}`;
|
|
787
|
+
}
|
|
788
|
+
catch (error) {
|
|
789
|
+
this.warn(`Failed to create lookup for ${targetEntity}: ${error}`);
|
|
790
|
+
return null;
|
|
172
791
|
}
|
|
173
|
-
// Calculate checksum
|
|
174
|
-
recordData.sync.checksum = syncEngine.calculateChecksum(recordData.fields);
|
|
175
|
-
// Determine file path
|
|
176
|
-
const fileName = this.buildFileName(primaryKey, entityConfig);
|
|
177
|
-
const filePath = path_1.default.join(targetDir, fileName);
|
|
178
|
-
// Write JSON file
|
|
179
|
-
await fs_extra_1.default.writeJson(filePath, recordData, { spaces: 2 });
|
|
180
792
|
}
|
|
793
|
+
/**
|
|
794
|
+
* Determine if a field should be saved to an external file
|
|
795
|
+
*
|
|
796
|
+
* Checks if a field is configured for externalization or contains substantial
|
|
797
|
+
* text content that would be better stored in a separate file.
|
|
798
|
+
*
|
|
799
|
+
* @param fieldName - Name of the field to check
|
|
800
|
+
* @param fieldValue - Value of the field
|
|
801
|
+
* @param entityConfig - Entity configuration with externalization settings
|
|
802
|
+
* @returns Promise resolving to true if field should be externalized
|
|
803
|
+
* @private
|
|
804
|
+
*/
|
|
181
805
|
async shouldExternalizeField(fieldName, fieldValue, entityConfig) {
|
|
182
|
-
// Only externalize string fields
|
|
806
|
+
// Only externalize string fields
|
|
183
807
|
if (typeof fieldValue !== 'string') {
|
|
184
808
|
return false;
|
|
185
809
|
}
|
|
186
|
-
// Check if
|
|
187
|
-
const
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
// Only externalize if content is substantial (more than 100 chars or has newlines)
|
|
191
|
-
return fieldValue.length > 100 || fieldValue.includes('\n');
|
|
810
|
+
// Check if field is configured for externalization
|
|
811
|
+
const externalizeConfig = entityConfig.pull?.externalizeFields;
|
|
812
|
+
if (!externalizeConfig) {
|
|
813
|
+
return false;
|
|
192
814
|
}
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
815
|
+
if (Array.isArray(externalizeConfig)) {
|
|
816
|
+
if (externalizeConfig.length > 0 && typeof externalizeConfig[0] === 'string') {
|
|
817
|
+
// Simple string array
|
|
818
|
+
return externalizeConfig.includes(fieldName);
|
|
819
|
+
}
|
|
820
|
+
else {
|
|
821
|
+
// New pattern format
|
|
822
|
+
return externalizeConfig
|
|
823
|
+
.some(item => item.field === fieldName);
|
|
824
|
+
}
|
|
200
825
|
}
|
|
201
|
-
else
|
|
202
|
-
|
|
203
|
-
|
|
826
|
+
else {
|
|
827
|
+
// Object format
|
|
828
|
+
return fieldName in externalizeConfig;
|
|
829
|
+
}
|
|
830
|
+
}
|
|
831
|
+
/**
|
|
832
|
+
* Create an external file for a field value
|
|
833
|
+
*
|
|
834
|
+
* Saves large text content to a separate file and returns the filename.
|
|
835
|
+
* Automatically determines appropriate file extension based on field name
|
|
836
|
+
* and content type (e.g., .md for prompts, .html for templates).
|
|
837
|
+
* Uses the entity's name field for the filename if available.
|
|
838
|
+
*
|
|
839
|
+
* @param targetDir - Directory to save the file
|
|
840
|
+
* @param record - Full record to extract name field from
|
|
841
|
+
* @param primaryKey - Primary key for filename generation fallback
|
|
842
|
+
* @param fieldName - Name of the field being externalized
|
|
843
|
+
* @param content - Content to write to the file
|
|
844
|
+
* @param entityConfig - Entity configuration
|
|
845
|
+
* @returns Promise resolving to the created filename
|
|
846
|
+
* @private
|
|
847
|
+
*/
|
|
848
|
+
async createExternalFile(targetDir, record, primaryKey, fieldName, content, entityConfig, pattern) {
|
|
849
|
+
// If pattern is provided, use it to generate the full path
|
|
850
|
+
if (pattern) {
|
|
851
|
+
// Replace placeholders in the pattern
|
|
852
|
+
let resolvedPattern = pattern;
|
|
853
|
+
// Get entity metadata for field lookups
|
|
854
|
+
const metadata = new core_2.Metadata();
|
|
855
|
+
const entityInfo = metadata.EntityByName(entityConfig.entity);
|
|
856
|
+
// Replace {Name} with the entity's name field value
|
|
857
|
+
if (entityInfo) {
|
|
858
|
+
const nameField = entityInfo.Fields.find(f => f.IsNameField);
|
|
859
|
+
if (nameField && record[nameField.Name]) {
|
|
860
|
+
const nameValue = String(record[nameField.Name])
|
|
861
|
+
.replace(/[^a-zA-Z0-9\-_ ]/g, '') // Remove disallowed characters
|
|
862
|
+
.replace(/\s+/g, '-') // Replace spaces with -
|
|
863
|
+
.toLowerCase(); // Make lowercase
|
|
864
|
+
resolvedPattern = resolvedPattern.replace(/{Name}/g, nameValue);
|
|
865
|
+
}
|
|
204
866
|
}
|
|
205
|
-
|
|
206
|
-
|
|
867
|
+
// Replace {ID} with the primary key
|
|
868
|
+
const idValue = primaryKey.ID || Object.values(primaryKey)[0];
|
|
869
|
+
if (idValue) {
|
|
870
|
+
resolvedPattern = resolvedPattern.replace(/{ID}/g, String(idValue).toLowerCase());
|
|
207
871
|
}
|
|
872
|
+
// Replace {FieldName} with the current field name
|
|
873
|
+
resolvedPattern = resolvedPattern.replace(/{FieldName}/g, fieldName.toLowerCase());
|
|
874
|
+
// Replace any other {field} placeholders with field values from the record
|
|
875
|
+
const placeholderRegex = /{(\w+)}/g;
|
|
876
|
+
resolvedPattern = resolvedPattern.replace(placeholderRegex, (match, fieldName) => {
|
|
877
|
+
const value = record[fieldName];
|
|
878
|
+
if (value !== undefined && value !== null) {
|
|
879
|
+
return String(value)
|
|
880
|
+
.replace(/[^a-zA-Z0-9\-_ ]/g, '')
|
|
881
|
+
.replace(/\s+/g, '-')
|
|
882
|
+
.toLowerCase();
|
|
883
|
+
}
|
|
884
|
+
return match; // Keep placeholder if field not found
|
|
885
|
+
});
|
|
886
|
+
// Extract the file path from the pattern
|
|
887
|
+
const filePath = path_1.default.join(targetDir, resolvedPattern.replace('@file:', ''));
|
|
888
|
+
// Ensure directory exists
|
|
889
|
+
await fs_extra_1.default.ensureDir(path_1.default.dirname(filePath));
|
|
890
|
+
// Write the file
|
|
891
|
+
await fs_extra_1.default.writeFile(filePath, content, 'utf-8');
|
|
892
|
+
// Return the pattern as-is (it includes @file: prefix)
|
|
893
|
+
return resolvedPattern;
|
|
208
894
|
}
|
|
209
|
-
|
|
210
|
-
|
|
895
|
+
// Original logic for non-pattern based externalization
|
|
896
|
+
let extension = '.md'; // default to markdown
|
|
897
|
+
const externalizeConfig = entityConfig.pull?.externalizeFields;
|
|
898
|
+
if (externalizeConfig && !Array.isArray(externalizeConfig) && externalizeConfig[fieldName]?.extension) {
|
|
899
|
+
extension = externalizeConfig[fieldName].extension;
|
|
900
|
+
// Ensure extension starts with a dot
|
|
901
|
+
if (!extension.startsWith('.')) {
|
|
902
|
+
extension = '.' + extension;
|
|
903
|
+
}
|
|
211
904
|
}
|
|
212
|
-
|
|
213
|
-
|
|
905
|
+
// Try to use the entity's name field for the filename
|
|
906
|
+
let baseFileName;
|
|
907
|
+
// Get entity metadata to find the name field
|
|
908
|
+
const metadata = new core_2.Metadata();
|
|
909
|
+
const entityInfo = metadata.EntityByName(entityConfig.entity);
|
|
910
|
+
if (entityInfo) {
|
|
911
|
+
// Find the name field
|
|
912
|
+
const nameField = entityInfo.Fields.find(f => f.IsNameField);
|
|
913
|
+
if (nameField && record[nameField.Name]) {
|
|
914
|
+
// Use the name field value, sanitized for filesystem
|
|
915
|
+
const nameValue = String(record[nameField.Name]);
|
|
916
|
+
// Remove disallowed characters (don't replace with _), replace spaces with -, and lowercase
|
|
917
|
+
baseFileName = nameValue
|
|
918
|
+
.replace(/[^a-zA-Z0-9\-_ ]/g, '') // Remove disallowed characters
|
|
919
|
+
.replace(/\s+/g, '-') // Replace spaces with -
|
|
920
|
+
.toLowerCase(); // Make lowercase
|
|
921
|
+
}
|
|
922
|
+
else {
|
|
923
|
+
// Fallback to primary key
|
|
924
|
+
baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
|
|
925
|
+
}
|
|
214
926
|
}
|
|
215
|
-
|
|
216
|
-
|
|
927
|
+
else {
|
|
928
|
+
// Fallback to primary key
|
|
929
|
+
baseFileName = this.buildFileName(primaryKey, null).replace('.json', '');
|
|
930
|
+
}
|
|
931
|
+
// Remove dot prefix from baseFileName if it exists (it will be a dot-prefixed name from buildFileName)
|
|
932
|
+
const cleanBaseFileName = baseFileName.startsWith('.') ? baseFileName.substring(1) : baseFileName;
|
|
933
|
+
const fileName = `.${cleanBaseFileName}.${fieldName.toLowerCase()}${extension}`;
|
|
217
934
|
const filePath = path_1.default.join(targetDir, fileName);
|
|
218
935
|
await fs_extra_1.default.writeFile(filePath, content, 'utf-8');
|
|
219
936
|
return fileName;
|
|
220
937
|
}
|
|
938
|
+
/**
|
|
939
|
+
* Build a filename from primary key values
|
|
940
|
+
*
|
|
941
|
+
* Creates a safe filename based on the entity's primary key values.
|
|
942
|
+
* Handles GUIDs by using first 8 characters, sanitizes special characters,
|
|
943
|
+
* and creates composite names for multi-field keys.
|
|
944
|
+
* Files are prefixed with a dot to follow the metadata file convention.
|
|
945
|
+
*
|
|
946
|
+
* @param primaryKey - Primary key fields and values
|
|
947
|
+
* @param entityConfig - Entity configuration (for future extension)
|
|
948
|
+
* @returns Filename with .json extension
|
|
949
|
+
* @private
|
|
950
|
+
*/
|
|
221
951
|
buildFileName(primaryKey, entityConfig) {
|
|
222
952
|
// Use primary key values to build filename
|
|
223
953
|
const keys = Object.values(primaryKey);
|
|
@@ -225,27 +955,50 @@ class Pull extends core_1.Command {
|
|
|
225
955
|
// Single string key - use as base if it's a guid
|
|
226
956
|
const key = keys[0];
|
|
227
957
|
if (key.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i)) {
|
|
228
|
-
// It's a GUID, use first 8 chars
|
|
229
|
-
return
|
|
958
|
+
// It's a GUID, use first 8 chars, prefixed with dot, lowercase
|
|
959
|
+
return `.${key.substring(0, 8).toLowerCase()}.json`;
|
|
230
960
|
}
|
|
231
|
-
// Use the whole key if not too long
|
|
961
|
+
// Use the whole key if not too long, prefixed with dot
|
|
232
962
|
if (key.length <= 50) {
|
|
233
|
-
return
|
|
963
|
+
return `.${key.replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()}.json`;
|
|
234
964
|
}
|
|
235
965
|
}
|
|
236
|
-
// Multiple keys or numeric - create composite name
|
|
237
|
-
return keys.map(k => String(k).replace(/[^a-zA-Z0-9
|
|
966
|
+
// Multiple keys or numeric - create composite name, prefixed with dot
|
|
967
|
+
return '.' + keys.map(k => String(k).replace(/[^a-zA-Z0-9\-_]/g, '').toLowerCase()).join('-') + '.json';
|
|
238
968
|
}
|
|
239
|
-
|
|
969
|
+
/**
|
|
970
|
+
* Pull related entities for a parent record
|
|
971
|
+
*
|
|
972
|
+
* Retrieves child records that have foreign key relationships to the parent.
|
|
973
|
+
* Converts foreign key values to @parent references and supports nested
|
|
974
|
+
* related entities for deep object graphs.
|
|
975
|
+
*
|
|
976
|
+
* @param parentRecord - Parent entity record
|
|
977
|
+
* @param relatedConfig - Configuration for related entities to pull
|
|
978
|
+
* @param syncEngine - Sync engine instance
|
|
979
|
+
* @returns Promise resolving to map of entity names to related records
|
|
980
|
+
* @private
|
|
981
|
+
*/
|
|
982
|
+
async pullRelatedEntities(parentRecord, relatedConfig, syncEngine, entityConfig, flags) {
|
|
240
983
|
const relatedEntities = {};
|
|
241
984
|
for (const [key, config] of Object.entries(relatedConfig)) {
|
|
242
985
|
try {
|
|
243
|
-
// Get
|
|
244
|
-
const
|
|
986
|
+
// Get entity metadata to find primary key
|
|
987
|
+
const metadata = new core_2.Metadata();
|
|
988
|
+
const parentEntity = metadata.EntityByName(entityConfig.entity);
|
|
989
|
+
if (!parentEntity) {
|
|
990
|
+
this.warn(`Could not find entity metadata for ${entityConfig.entity}`);
|
|
991
|
+
continue;
|
|
992
|
+
}
|
|
993
|
+
// Get the parent's primary key value (usually ID)
|
|
994
|
+
const primaryKeyField = parentEntity.PrimaryKeys?.[0]?.Name || 'ID';
|
|
995
|
+
const parentKeyValue = parentRecord[primaryKeyField];
|
|
245
996
|
if (!parentKeyValue) {
|
|
246
|
-
|
|
997
|
+
this.warn(`Parent record missing primary key field ${primaryKeyField}`);
|
|
998
|
+
continue;
|
|
247
999
|
}
|
|
248
1000
|
// Build filter for related records
|
|
1001
|
+
// The foreignKey is the field in the CHILD entity that points to this parent
|
|
249
1002
|
let filter = `${config.foreignKey} = '${String(parentKeyValue).replace(/'/g, "''")}'`;
|
|
250
1003
|
if (config.filter) {
|
|
251
1004
|
filter += ` AND (${config.filter})`;
|
|
@@ -254,39 +1007,72 @@ class Pull extends core_1.Command {
|
|
|
254
1007
|
const rv = new core_2.RunView();
|
|
255
1008
|
const result = await rv.RunView({
|
|
256
1009
|
EntityName: config.entity,
|
|
257
|
-
ExtraFilter: filter
|
|
1010
|
+
ExtraFilter: filter,
|
|
1011
|
+
ResultType: 'entity_object'
|
|
258
1012
|
}, (0, provider_utils_1.getSystemUser)());
|
|
259
1013
|
if (!result.Success) {
|
|
260
1014
|
this.warn(`Failed to pull related ${config.entity}: ${result.ErrorMessage}`);
|
|
261
1015
|
continue;
|
|
262
1016
|
}
|
|
1017
|
+
// Get child entity metadata
|
|
1018
|
+
const childEntity = metadata.EntityByName(config.entity);
|
|
1019
|
+
if (!childEntity) {
|
|
1020
|
+
this.warn(`Could not find entity metadata for ${config.entity}`);
|
|
1021
|
+
continue;
|
|
1022
|
+
}
|
|
1023
|
+
// Check if we need to wait for async property loading for related entities
|
|
1024
|
+
if (config.externalizeFields && result.Results.length > 0) {
|
|
1025
|
+
let fieldsToExternalize = [];
|
|
1026
|
+
if (Array.isArray(config.externalizeFields)) {
|
|
1027
|
+
if (config.externalizeFields.length > 0 && typeof config.externalizeFields[0] === 'string') {
|
|
1028
|
+
// Simple string array
|
|
1029
|
+
fieldsToExternalize = config.externalizeFields;
|
|
1030
|
+
}
|
|
1031
|
+
else {
|
|
1032
|
+
// New pattern format
|
|
1033
|
+
fieldsToExternalize = config.externalizeFields
|
|
1034
|
+
.map(item => item.field);
|
|
1035
|
+
}
|
|
1036
|
+
}
|
|
1037
|
+
else {
|
|
1038
|
+
// Object format
|
|
1039
|
+
fieldsToExternalize = Object.keys(config.externalizeFields);
|
|
1040
|
+
}
|
|
1041
|
+
// Get all field names from entity metadata
|
|
1042
|
+
const metadataFieldNames = childEntity.Fields.map(f => f.Name);
|
|
1043
|
+
// Check if any externalized fields are NOT in metadata (likely computed properties)
|
|
1044
|
+
const computedFields = fieldsToExternalize.filter(f => !metadataFieldNames.includes(f));
|
|
1045
|
+
if (computedFields.length > 0) {
|
|
1046
|
+
console.log(`Waiting 5 seconds for async property loading in related entity ${config.entity} (${computedFields.join(', ')})...`);
|
|
1047
|
+
await new Promise(resolve => setTimeout(resolve, 5000));
|
|
1048
|
+
}
|
|
1049
|
+
}
|
|
263
1050
|
// Process each related record
|
|
264
1051
|
const relatedRecords = [];
|
|
265
1052
|
for (const relatedRecord of result.Results) {
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
for (const [fieldName, fieldValue] of Object.entries(relatedRecord)) {
|
|
271
|
-
// Skip internal fields
|
|
272
|
-
if (fieldName.startsWith('__mj_')) {
|
|
273
|
-
continue;
|
|
274
|
-
}
|
|
275
|
-
// Convert foreign key reference to @parent
|
|
276
|
-
if (fieldName === config.foreignKey) {
|
|
277
|
-
const parentFieldName = this.findParentField(parentRecord, parentKeyValue);
|
|
278
|
-
if (parentFieldName) {
|
|
279
|
-
recordData.fields[fieldName] = `@parent:${parentFieldName}`;
|
|
280
|
-
}
|
|
281
|
-
continue;
|
|
282
|
-
}
|
|
283
|
-
recordData.fields[fieldName] = fieldValue;
|
|
1053
|
+
// Build primary key for the related record
|
|
1054
|
+
const relatedPrimaryKey = {};
|
|
1055
|
+
for (const pk of childEntity.PrimaryKeys) {
|
|
1056
|
+
relatedPrimaryKey[pk.Name] = relatedRecord[pk.Name];
|
|
284
1057
|
}
|
|
285
|
-
//
|
|
286
|
-
|
|
287
|
-
|
|
1058
|
+
// Process the related record using the same logic as parent records
|
|
1059
|
+
const relatedData = await this.processRecordData(relatedRecord, relatedPrimaryKey, '', // Not used for related entities since we don't externalize their fields
|
|
1060
|
+
{
|
|
1061
|
+
entity: config.entity,
|
|
1062
|
+
pull: {
|
|
1063
|
+
excludeFields: config.excludeFields || entityConfig.pull?.excludeFields,
|
|
1064
|
+
lookupFields: config.lookupFields || entityConfig.pull?.lookupFields,
|
|
1065
|
+
externalizeFields: config.externalizeFields,
|
|
1066
|
+
relatedEntities: config.relatedEntities
|
|
1067
|
+
}
|
|
1068
|
+
}, syncEngine, flags, true);
|
|
1069
|
+
// Convert foreign key reference to @parent
|
|
1070
|
+
if (relatedData.fields[config.foreignKey]) {
|
|
1071
|
+
relatedData.fields[config.foreignKey] = `@parent:${primaryKeyField}`;
|
|
288
1072
|
}
|
|
289
|
-
|
|
1073
|
+
// The processRecordData method already filters nulls and defaults
|
|
1074
|
+
// No need to do it again here
|
|
1075
|
+
relatedRecords.push(relatedData);
|
|
290
1076
|
}
|
|
291
1077
|
if (relatedRecords.length > 0) {
|
|
292
1078
|
relatedEntities[key] = relatedRecords;
|
|
@@ -298,6 +1084,18 @@ class Pull extends core_1.Command {
|
|
|
298
1084
|
}
|
|
299
1085
|
return relatedEntities;
|
|
300
1086
|
}
|
|
1087
|
+
/**
|
|
1088
|
+
* Find which field in the parent record contains a specific value
|
|
1089
|
+
*
|
|
1090
|
+
* Used to convert foreign key references to @parent references by finding
|
|
1091
|
+
* the parent field that contains the foreign key value. Typically finds
|
|
1092
|
+
* the primary key field but can match any field.
|
|
1093
|
+
*
|
|
1094
|
+
* @param parentRecord - Parent record to search
|
|
1095
|
+
* @param value - Value to search for
|
|
1096
|
+
* @returns Field name containing the value, or null if not found
|
|
1097
|
+
* @private
|
|
1098
|
+
*/
|
|
301
1099
|
findParentField(parentRecord, value) {
|
|
302
1100
|
// Find which field in the parent contains this value
|
|
303
1101
|
// Typically this will be the primary key field
|
|
@@ -308,6 +1106,189 @@ class Pull extends core_1.Command {
|
|
|
308
1106
|
}
|
|
309
1107
|
return null;
|
|
310
1108
|
}
|
|
1109
|
+
/**
|
|
1110
|
+
* Find existing files in a directory matching a pattern
|
|
1111
|
+
*
|
|
1112
|
+
* Searches for files that match the configured file pattern, used to identify
|
|
1113
|
+
* which records already exist locally for smart update functionality.
|
|
1114
|
+
*
|
|
1115
|
+
* @param dir - Directory to search in
|
|
1116
|
+
* @param pattern - Glob pattern to match files (e.g., "*.json")
|
|
1117
|
+
* @returns Promise resolving to array of file paths
|
|
1118
|
+
* @private
|
|
1119
|
+
*/
|
|
1120
|
+
async findExistingFiles(dir, pattern) {
|
|
1121
|
+
const files = [];
|
|
1122
|
+
try {
|
|
1123
|
+
const entries = await fs_extra_1.default.readdir(dir, { withFileTypes: true });
|
|
1124
|
+
for (const entry of entries) {
|
|
1125
|
+
if (entry.isFile()) {
|
|
1126
|
+
const fileName = entry.name;
|
|
1127
|
+
// Simple pattern matching - could be enhanced with proper glob support
|
|
1128
|
+
if (pattern === '*.json' && fileName.endsWith('.json')) {
|
|
1129
|
+
files.push(path_1.default.join(dir, fileName));
|
|
1130
|
+
}
|
|
1131
|
+
else if (pattern === '.*.json' && fileName.startsWith('.') && fileName.endsWith('.json')) {
|
|
1132
|
+
// Handle dot-prefixed JSON files
|
|
1133
|
+
files.push(path_1.default.join(dir, fileName));
|
|
1134
|
+
}
|
|
1135
|
+
else if (pattern === fileName) {
|
|
1136
|
+
files.push(path_1.default.join(dir, fileName));
|
|
1137
|
+
}
|
|
1138
|
+
// TODO: Add more sophisticated glob pattern matching if needed
|
|
1139
|
+
}
|
|
1140
|
+
}
|
|
1141
|
+
}
|
|
1142
|
+
catch (error) {
|
|
1143
|
+
// Directory might not exist yet
|
|
1144
|
+
if (error.code !== 'ENOENT') {
|
|
1145
|
+
throw error;
|
|
1146
|
+
}
|
|
1147
|
+
}
|
|
1148
|
+
return files;
|
|
1149
|
+
}
|
|
1150
|
+
/**
|
|
1151
|
+
* Load existing records from files and build a lookup map
|
|
1152
|
+
*
|
|
1153
|
+
* Reads all existing files and creates a map from primary key to file location,
|
|
1154
|
+
* enabling efficient lookup during the update process.
|
|
1155
|
+
*
|
|
1156
|
+
* @param files - Array of file paths to load
|
|
1157
|
+
* @param entityInfo - Entity metadata for primary key information
|
|
1158
|
+
* @returns Map from primary key string to file info
|
|
1159
|
+
* @private
|
|
1160
|
+
*/
|
|
1161
|
+
async loadExistingRecords(files, entityInfo) {
|
|
1162
|
+
const recordsMap = new Map();
|
|
1163
|
+
for (const filePath of files) {
|
|
1164
|
+
try {
|
|
1165
|
+
const fileData = await fs_extra_1.default.readJson(filePath);
|
|
1166
|
+
const records = Array.isArray(fileData) ? fileData : [fileData];
|
|
1167
|
+
for (const record of records) {
|
|
1168
|
+
if (record.primaryKey) {
|
|
1169
|
+
const lookupKey = this.createPrimaryKeyLookup(record.primaryKey);
|
|
1170
|
+
recordsMap.set(lookupKey, { filePath, recordData: record });
|
|
1171
|
+
}
|
|
1172
|
+
}
|
|
1173
|
+
}
|
|
1174
|
+
catch (error) {
|
|
1175
|
+
// Skip files that can't be parsed
|
|
1176
|
+
this.warn(`Could not load file ${filePath}: ${error}`);
|
|
1177
|
+
}
|
|
1178
|
+
}
|
|
1179
|
+
return recordsMap;
|
|
1180
|
+
}
|
|
1181
|
+
/**
|
|
1182
|
+
* Create a string lookup key from primary key values
|
|
1183
|
+
*
|
|
1184
|
+
* Generates a consistent string representation of primary key values
|
|
1185
|
+
* for use in maps and comparisons.
|
|
1186
|
+
*
|
|
1187
|
+
* @param primaryKey - Primary key field names and values
|
|
1188
|
+
* @returns String representation of the primary key
|
|
1189
|
+
* @private
|
|
1190
|
+
*/
|
|
1191
|
+
createPrimaryKeyLookup(primaryKey) {
|
|
1192
|
+
const keys = Object.keys(primaryKey).sort();
|
|
1193
|
+
return keys.map(k => `${k}:${primaryKey[k]}`).join('|');
|
|
1194
|
+
}
|
|
1195
|
+
/**
|
|
1196
|
+
* Merge two record data objects based on configured strategy
|
|
1197
|
+
*
|
|
1198
|
+
* Combines existing and new record data according to the merge strategy:
|
|
1199
|
+
* - 'overwrite': Replace all fields with new values
|
|
1200
|
+
* - 'merge': Combine fields, with new values taking precedence
|
|
1201
|
+
* - 'skip': Keep existing record unchanged
|
|
1202
|
+
*
|
|
1203
|
+
* @param existing - Existing record data
|
|
1204
|
+
* @param newData - New record data from database
|
|
1205
|
+
* @param strategy - Merge strategy to apply
|
|
1206
|
+
* @param preserveFields - Field names that should never be overwritten
|
|
1207
|
+
* @returns Merged record data
|
|
1208
|
+
* @private
|
|
1209
|
+
*/
|
|
1210
|
+
async mergeRecords(existing, newData, strategy, preserveFields) {
|
|
1211
|
+
if (strategy === 'skip') {
|
|
1212
|
+
return existing;
|
|
1213
|
+
}
|
|
1214
|
+
if (strategy === 'overwrite') {
|
|
1215
|
+
// Build with proper ordering
|
|
1216
|
+
const result = {};
|
|
1217
|
+
// 1. Fields first
|
|
1218
|
+
result.fields = { ...newData.fields };
|
|
1219
|
+
// Restore preserved fields from existing
|
|
1220
|
+
if (preserveFields.length > 0 && existing.fields) {
|
|
1221
|
+
for (const field of preserveFields) {
|
|
1222
|
+
if (field in existing.fields) {
|
|
1223
|
+
result.fields[field] = existing.fields[field];
|
|
1224
|
+
}
|
|
1225
|
+
}
|
|
1226
|
+
}
|
|
1227
|
+
// 2. Related entities (if any)
|
|
1228
|
+
if (newData.relatedEntities) {
|
|
1229
|
+
result.relatedEntities = newData.relatedEntities;
|
|
1230
|
+
}
|
|
1231
|
+
// 3. Primary key
|
|
1232
|
+
result.primaryKey = newData.primaryKey;
|
|
1233
|
+
// 4. Sync metadata
|
|
1234
|
+
result.sync = newData.sync;
|
|
1235
|
+
return result;
|
|
1236
|
+
}
|
|
1237
|
+
// Default 'merge' strategy
|
|
1238
|
+
// Build with proper ordering
|
|
1239
|
+
const result = {};
|
|
1240
|
+
// 1. Fields first
|
|
1241
|
+
result.fields = { ...existing.fields, ...newData.fields };
|
|
1242
|
+
// Restore preserved fields
|
|
1243
|
+
if (preserveFields.length > 0 && existing.fields) {
|
|
1244
|
+
for (const field of preserveFields) {
|
|
1245
|
+
if (field in existing.fields) {
|
|
1246
|
+
result.fields[field] = existing.fields[field];
|
|
1247
|
+
}
|
|
1248
|
+
}
|
|
1249
|
+
}
|
|
1250
|
+
// 2. Related entities (if any)
|
|
1251
|
+
if (existing.relatedEntities || newData.relatedEntities) {
|
|
1252
|
+
result.relatedEntities = {
|
|
1253
|
+
...existing.relatedEntities,
|
|
1254
|
+
...newData.relatedEntities
|
|
1255
|
+
};
|
|
1256
|
+
}
|
|
1257
|
+
// 3. Primary key
|
|
1258
|
+
result.primaryKey = newData.primaryKey || existing.primaryKey;
|
|
1259
|
+
// 4. Sync metadata
|
|
1260
|
+
result.sync = newData.sync;
|
|
1261
|
+
return result;
|
|
1262
|
+
}
|
|
1263
|
+
/**
|
|
1264
|
+
* Create a backup of a file before updating
|
|
1265
|
+
*
|
|
1266
|
+
* Creates a timestamped backup copy of the file in a backup directory
|
|
1267
|
+
* with the original filename, timestamp suffix, and .backup extension.
|
|
1268
|
+
* The backup directory defaults to .backups but can be configured.
|
|
1269
|
+
*
|
|
1270
|
+
* @param filePath - Path to the file to backup
|
|
1271
|
+
* @param backupDirName - Name of the backup directory (optional)
|
|
1272
|
+
* @returns Promise that resolves when backup is created
|
|
1273
|
+
* @private
|
|
1274
|
+
*/
|
|
1275
|
+
async createBackup(filePath, backupDirName) {
|
|
1276
|
+
const dir = path_1.default.dirname(filePath);
|
|
1277
|
+
const fileName = path_1.default.basename(filePath);
|
|
1278
|
+
const backupDir = path_1.default.join(dir, backupDirName || '.backups');
|
|
1279
|
+
// Ensure backup directory exists
|
|
1280
|
+
await fs_extra_1.default.ensureDir(backupDir);
|
|
1281
|
+
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
|
1282
|
+
// Remove .json extension, add timestamp, then add .backup extension
|
|
1283
|
+
const backupFileName = fileName.replace(/\.json$/, `.${timestamp}.backup`);
|
|
1284
|
+
const backupPath = path_1.default.join(backupDir, backupFileName);
|
|
1285
|
+
try {
|
|
1286
|
+
await fs_extra_1.default.copy(filePath, backupPath);
|
|
1287
|
+
}
|
|
1288
|
+
catch (error) {
|
|
1289
|
+
this.warn(`Could not create backup of ${filePath}: ${error}`);
|
|
1290
|
+
}
|
|
1291
|
+
}
|
|
311
1292
|
}
|
|
312
1293
|
exports.default = Pull;
|
|
313
1294
|
//# sourceMappingURL=index.js.map
|