@memberjunction/metadata-sync 2.54.0 → 2.56.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +92 -51
- package/dist/index.d.ts +21 -1
- package/dist/index.js +41 -3
- package/dist/index.js.map +1 -1
- package/dist/lib/file-backup-manager.js +2 -2
- package/dist/lib/file-backup-manager.js.map +1 -1
- package/dist/lib/sql-logger.d.ts +44 -0
- package/dist/lib/sql-logger.js +140 -0
- package/dist/lib/sql-logger.js.map +1 -0
- package/dist/lib/sync-engine.js +2 -2
- package/dist/lib/sync-engine.js.map +1 -1
- package/dist/lib/transaction-manager.d.ts +36 -0
- package/dist/lib/transaction-manager.js +117 -0
- package/dist/lib/transaction-manager.js.map +1 -0
- package/dist/services/FileResetService.d.ts +30 -0
- package/dist/services/FileResetService.js +182 -0
- package/dist/services/FileResetService.js.map +1 -0
- package/dist/services/InitService.d.ts +17 -0
- package/dist/services/InitService.js +118 -0
- package/dist/services/InitService.js.map +1 -0
- package/dist/services/PullService.d.ts +45 -0
- package/dist/services/PullService.js +564 -0
- package/dist/services/PullService.js.map +1 -0
- package/dist/services/PushService.d.ts +45 -0
- package/dist/services/PushService.js +394 -0
- package/dist/services/PushService.js.map +1 -0
- package/dist/services/StatusService.d.ts +32 -0
- package/dist/services/StatusService.js +138 -0
- package/dist/services/StatusService.js.map +1 -0
- package/dist/services/WatchService.d.ts +32 -0
- package/dist/services/WatchService.js +242 -0
- package/dist/services/WatchService.js.map +1 -0
- package/dist/services/index.d.ts +16 -0
- package/dist/services/index.js +28 -0
- package/dist/services/index.js.map +1 -0
- package/package.json +14 -45
- package/bin/debug.js +0 -7
- package/bin/run +0 -17
- package/bin/run.js +0 -6
- package/dist/commands/file-reset/index.d.ts +0 -15
- package/dist/commands/file-reset/index.js +0 -221
- package/dist/commands/file-reset/index.js.map +0 -1
- package/dist/commands/init/index.d.ts +0 -7
- package/dist/commands/init/index.js +0 -155
- package/dist/commands/init/index.js.map +0 -1
- package/dist/commands/pull/index.d.ts +0 -246
- package/dist/commands/pull/index.js +0 -1448
- package/dist/commands/pull/index.js.map +0 -1
- package/dist/commands/push/index.d.ts +0 -41
- package/dist/commands/push/index.js +0 -1131
- package/dist/commands/push/index.js.map +0 -1
- package/dist/commands/status/index.d.ts +0 -10
- package/dist/commands/status/index.js +0 -199
- package/dist/commands/status/index.js.map +0 -1
- package/dist/commands/validate/index.d.ts +0 -15
- package/dist/commands/validate/index.js +0 -149
- package/dist/commands/validate/index.js.map +0 -1
- package/dist/commands/watch/index.d.ts +0 -15
- package/dist/commands/watch/index.js +0 -300
- package/dist/commands/watch/index.js.map +0 -1
- package/dist/hooks/init.d.ts +0 -3
- package/dist/hooks/init.js +0 -59
- package/dist/hooks/init.js.map +0 -1
- package/oclif.manifest.json +0 -376
|
@@ -1,1131 +0,0 @@
|
|
|
1
|
-
"use strict";
|
|
2
|
-
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
|
3
|
-
if (k2 === undefined) k2 = k;
|
|
4
|
-
var desc = Object.getOwnPropertyDescriptor(m, k);
|
|
5
|
-
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
|
6
|
-
desc = { enumerable: true, get: function() { return m[k]; } };
|
|
7
|
-
}
|
|
8
|
-
Object.defineProperty(o, k2, desc);
|
|
9
|
-
}) : (function(o, m, k, k2) {
|
|
10
|
-
if (k2 === undefined) k2 = k;
|
|
11
|
-
o[k2] = m[k];
|
|
12
|
-
}));
|
|
13
|
-
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
|
14
|
-
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
|
15
|
-
}) : function(o, v) {
|
|
16
|
-
o["default"] = v;
|
|
17
|
-
});
|
|
18
|
-
var __importStar = (this && this.__importStar) || function (mod) {
|
|
19
|
-
if (mod && mod.__esModule) return mod;
|
|
20
|
-
var result = {};
|
|
21
|
-
if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
|
|
22
|
-
__setModuleDefault(result, mod);
|
|
23
|
-
return result;
|
|
24
|
-
};
|
|
25
|
-
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
26
|
-
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
27
|
-
};
|
|
28
|
-
Object.defineProperty(exports, "__esModule", { value: true });
|
|
29
|
-
const core_1 = require("@oclif/core");
|
|
30
|
-
const fs_extra_1 = __importDefault(require("fs-extra"));
|
|
31
|
-
const path_1 = __importDefault(require("path"));
|
|
32
|
-
const prompts_1 = require("@inquirer/prompts");
|
|
33
|
-
const ora_classic_1 = __importDefault(require("ora-classic"));
|
|
34
|
-
const fast_glob_1 = __importDefault(require("fast-glob"));
|
|
35
|
-
const chalk_1 = __importDefault(require("chalk"));
|
|
36
|
-
const config_1 = require("../../config");
|
|
37
|
-
const provider_utils_1 = require("../../lib/provider-utils");
|
|
38
|
-
const core_2 = require("@memberjunction/core");
|
|
39
|
-
const config_manager_1 = require("../../lib/config-manager");
|
|
40
|
-
const singleton_manager_1 = require("../../lib/singleton-manager");
|
|
41
|
-
const sqlserver_dataprovider_1 = require("@memberjunction/sqlserver-dataprovider");
|
|
42
|
-
const global_1 = require("@memberjunction/global");
|
|
43
|
-
const file_backup_manager_1 = require("../../lib/file-backup-manager");
|
|
44
|
-
class Push extends core_1.Command {
|
|
45
|
-
static description = 'Push local file changes to the database';
|
|
46
|
-
warnings = [];
|
|
47
|
-
errors = [];
|
|
48
|
-
processedRecords = new Map();
|
|
49
|
-
static examples = [
|
|
50
|
-
`<%= config.bin %> <%= command.id %>`,
|
|
51
|
-
`<%= config.bin %> <%= command.id %> --dry-run`,
|
|
52
|
-
`<%= config.bin %> <%= command.id %> --dir="ai-prompts"`,
|
|
53
|
-
`<%= config.bin %> <%= command.id %> --ci`,
|
|
54
|
-
];
|
|
55
|
-
static flags = {
|
|
56
|
-
dir: core_1.Flags.string({ description: 'Specific entity directory to push' }),
|
|
57
|
-
'dry-run': core_1.Flags.boolean({ description: 'Show what would be pushed without actually pushing' }),
|
|
58
|
-
ci: core_1.Flags.boolean({ description: 'CI mode - no prompts, fail on issues' }),
|
|
59
|
-
verbose: core_1.Flags.boolean({ char: 'v', description: 'Show detailed field-level output' }),
|
|
60
|
-
'no-validate': core_1.Flags.boolean({ description: 'Skip validation before push' }),
|
|
61
|
-
};
|
|
62
|
-
// Override warn to collect warnings
|
|
63
|
-
warn(input) {
|
|
64
|
-
const message = typeof input === 'string' ? input : input.message;
|
|
65
|
-
this.warnings.push(message);
|
|
66
|
-
return super.warn(input);
|
|
67
|
-
}
|
|
68
|
-
async run() {
|
|
69
|
-
const { flags } = await this.parse(Push);
|
|
70
|
-
const spinner = (0, ora_classic_1.default)();
|
|
71
|
-
let sqlLogger = null;
|
|
72
|
-
const fileBackupManager = new file_backup_manager_1.FileBackupManager();
|
|
73
|
-
let hasActiveTransaction = false;
|
|
74
|
-
const startTime = Date.now();
|
|
75
|
-
// Reset the processed records tracking for this push operation
|
|
76
|
-
this.processedRecords.clear();
|
|
77
|
-
try {
|
|
78
|
-
// Load configurations
|
|
79
|
-
spinner.start('Loading configuration');
|
|
80
|
-
const mjConfig = (0, config_1.loadMJConfig)();
|
|
81
|
-
if (!mjConfig) {
|
|
82
|
-
this.error('No mj.config.cjs found in current directory or parent directories');
|
|
83
|
-
}
|
|
84
|
-
// Load sync config from target directory if --dir is specified, otherwise from current directory
|
|
85
|
-
const syncConfigDir = flags.dir ? path_1.default.resolve(config_manager_1.configManager.getOriginalCwd(), flags.dir) : config_manager_1.configManager.getOriginalCwd();
|
|
86
|
-
const syncConfig = await (0, config_1.loadSyncConfig)(syncConfigDir);
|
|
87
|
-
// Stop spinner before provider initialization (which logs to console)
|
|
88
|
-
spinner.stop();
|
|
89
|
-
// Initialize data provider
|
|
90
|
-
await (0, provider_utils_1.initializeProvider)(mjConfig);
|
|
91
|
-
// Initialize sync engine using singleton pattern
|
|
92
|
-
const syncEngine = await (0, singleton_manager_1.getSyncEngine)((0, provider_utils_1.getSystemUser)());
|
|
93
|
-
// Show success after all initialization is complete
|
|
94
|
-
if (flags.verbose) {
|
|
95
|
-
spinner.succeed('Configuration and metadata loaded');
|
|
96
|
-
}
|
|
97
|
-
else {
|
|
98
|
-
spinner.stop();
|
|
99
|
-
}
|
|
100
|
-
// Initialize SQL logging AFTER provider setup is complete
|
|
101
|
-
if (syncConfig?.sqlLogging?.enabled) {
|
|
102
|
-
const outputDir = syncConfig.sqlLogging.outputDirectory || './sql_logging';
|
|
103
|
-
const formatAsMigration = syncConfig.sqlLogging.formatAsMigration || false;
|
|
104
|
-
// Ensure output directory exists
|
|
105
|
-
const fullOutputDir = path_1.default.resolve(outputDir);
|
|
106
|
-
await fs_extra_1.default.ensureDir(fullOutputDir);
|
|
107
|
-
// Generate filename with timestamp and directory name
|
|
108
|
-
const now = new Date();
|
|
109
|
-
const humanReadableTimestamp = now.toISOString()
|
|
110
|
-
.replace('T', '_')
|
|
111
|
-
.replace(/:/g, '-')
|
|
112
|
-
.slice(0, -5); // Remove milliseconds and Z
|
|
113
|
-
// Get directory name for filename
|
|
114
|
-
const targetDir = flags.dir ? path_1.default.resolve(config_manager_1.configManager.getOriginalCwd(), flags.dir) : config_manager_1.configManager.getOriginalCwd();
|
|
115
|
-
const dirName = path_1.default.basename(targetDir);
|
|
116
|
-
const filename = formatAsMigration
|
|
117
|
-
? `V${now.toISOString().replace(/[:.T-]/g, '').slice(0, -5)}__MetadataSync_Push.sql`
|
|
118
|
-
: `metadata-sync-push_${dirName}_${humanReadableTimestamp}.sql`;
|
|
119
|
-
const logFilePath = path_1.default.join(fullOutputDir, filename);
|
|
120
|
-
// Import and access the data provider from the provider utils
|
|
121
|
-
const { getDataProvider } = await Promise.resolve().then(() => __importStar(require('../../lib/provider-utils')));
|
|
122
|
-
const dataProvider = getDataProvider();
|
|
123
|
-
if (dataProvider && typeof dataProvider.CreateSqlLogger === 'function') {
|
|
124
|
-
sqlLogger = await dataProvider.CreateSqlLogger(logFilePath, {
|
|
125
|
-
formatAsMigration,
|
|
126
|
-
description: 'MetadataSync Push Operation',
|
|
127
|
-
statementTypes: 'mutations', // Only log mutations (data changes)
|
|
128
|
-
batchSeparator: 'GO', // Add GO statements for SQL Server batch processing
|
|
129
|
-
prettyPrint: true // Enable pretty printing for readable output
|
|
130
|
-
});
|
|
131
|
-
if (flags.verbose) {
|
|
132
|
-
this.log(`📝 SQL logging enabled: ${path_1.default.relative(process.cwd(), logFilePath)}`);
|
|
133
|
-
}
|
|
134
|
-
}
|
|
135
|
-
else {
|
|
136
|
-
this.warn('SQL logging requested but data provider does not support CreateSqlLogger');
|
|
137
|
-
}
|
|
138
|
-
}
|
|
139
|
-
// Find entity directories to process
|
|
140
|
-
const entityDirs = (0, provider_utils_1.findEntityDirectories)(config_manager_1.configManager.getOriginalCwd(), flags.dir, syncConfig?.directoryOrder, syncConfig?.ignoreDirectories);
|
|
141
|
-
if (entityDirs.length === 0) {
|
|
142
|
-
this.error('No entity directories found');
|
|
143
|
-
}
|
|
144
|
-
if (flags.verbose) {
|
|
145
|
-
this.log(`Found ${entityDirs.length} entity ${entityDirs.length === 1 ? 'directory' : 'directories'} to process`);
|
|
146
|
-
}
|
|
147
|
-
// Run validation unless disabled
|
|
148
|
-
if (!flags['no-validate']) {
|
|
149
|
-
const { ValidationService } = await Promise.resolve().then(() => __importStar(require('../../services/ValidationService')));
|
|
150
|
-
const { FormattingService } = await Promise.resolve().then(() => __importStar(require('../../services/FormattingService')));
|
|
151
|
-
spinner.start('Validating metadata...');
|
|
152
|
-
const validator = new ValidationService({ verbose: flags.verbose });
|
|
153
|
-
const formatter = new FormattingService();
|
|
154
|
-
const targetDir = flags.dir ? path_1.default.resolve(config_manager_1.configManager.getOriginalCwd(), flags.dir) : config_manager_1.configManager.getOriginalCwd();
|
|
155
|
-
const validationResult = await validator.validateDirectory(targetDir);
|
|
156
|
-
spinner.stop();
|
|
157
|
-
if (!validationResult.isValid || validationResult.warnings.length > 0) {
|
|
158
|
-
// Show validation results
|
|
159
|
-
this.log('\n' + formatter.formatValidationResult(validationResult, flags.verbose));
|
|
160
|
-
if (!validationResult.isValid) {
|
|
161
|
-
// In CI mode, fail immediately
|
|
162
|
-
if (flags.ci) {
|
|
163
|
-
this.error('Validation failed. Cannot proceed with push.');
|
|
164
|
-
}
|
|
165
|
-
// Otherwise, ask for confirmation
|
|
166
|
-
const shouldContinue = await (0, prompts_1.confirm)({
|
|
167
|
-
message: 'Validation failed with errors. Do you want to continue anyway?',
|
|
168
|
-
default: false
|
|
169
|
-
});
|
|
170
|
-
if (!shouldContinue) {
|
|
171
|
-
this.log(chalk_1.default.yellow('\n⚠️ Push cancelled due to validation errors.'));
|
|
172
|
-
// Exit cleanly without throwing an error
|
|
173
|
-
return;
|
|
174
|
-
}
|
|
175
|
-
}
|
|
176
|
-
}
|
|
177
|
-
else {
|
|
178
|
-
this.log(chalk_1.default.green('✓ Validation passed'));
|
|
179
|
-
}
|
|
180
|
-
}
|
|
181
|
-
// Initialize file backup manager (unless in dry-run mode)
|
|
182
|
-
if (!flags['dry-run']) {
|
|
183
|
-
await fileBackupManager.initialize();
|
|
184
|
-
if (flags.verbose) {
|
|
185
|
-
this.log('📁 File backup manager initialized');
|
|
186
|
-
}
|
|
187
|
-
}
|
|
188
|
-
// Start a database transaction for the entire push operation (unless in dry-run mode)
|
|
189
|
-
// IMPORTANT: We start the transaction AFTER metadata loading and validation to avoid
|
|
190
|
-
// transaction conflicts with background refresh operations
|
|
191
|
-
if (!flags['dry-run']) {
|
|
192
|
-
const { getDataProvider } = await Promise.resolve().then(() => __importStar(require('../../lib/provider-utils')));
|
|
193
|
-
const dataProvider = getDataProvider();
|
|
194
|
-
// Ensure we have SQLServerDataProvider for transaction support
|
|
195
|
-
if (!(dataProvider instanceof sqlserver_dataprovider_1.SQLServerDataProvider)) {
|
|
196
|
-
const errorMsg = 'MetadataSync requires SQLServerDataProvider for transaction support. Current provider does not support transactions.';
|
|
197
|
-
// Rollback file backups since we're not proceeding
|
|
198
|
-
try {
|
|
199
|
-
await fileBackupManager.rollback();
|
|
200
|
-
}
|
|
201
|
-
catch (rollbackError) {
|
|
202
|
-
this.warn(`Failed to rollback file backup initialization: ${rollbackError}`);
|
|
203
|
-
}
|
|
204
|
-
this.error(errorMsg);
|
|
205
|
-
}
|
|
206
|
-
if (dataProvider && typeof dataProvider.BeginTransaction === 'function') {
|
|
207
|
-
try {
|
|
208
|
-
await dataProvider.BeginTransaction();
|
|
209
|
-
hasActiveTransaction = true;
|
|
210
|
-
if (flags.verbose) {
|
|
211
|
-
this.log('🔄 Transaction started - all changes will be committed or rolled back as a unit');
|
|
212
|
-
}
|
|
213
|
-
}
|
|
214
|
-
catch (error) {
|
|
215
|
-
// Transaction start failure is critical - we should not proceed without it
|
|
216
|
-
const errorMsg = `Failed to start database transaction: ${error instanceof Error ? error.message : String(error)}`;
|
|
217
|
-
// Rollback file backups since we're not proceeding
|
|
218
|
-
try {
|
|
219
|
-
await fileBackupManager.rollback();
|
|
220
|
-
}
|
|
221
|
-
catch (rollbackError) {
|
|
222
|
-
this.warn(`Failed to rollback file backup initialization: ${rollbackError}`);
|
|
223
|
-
}
|
|
224
|
-
this.error(errorMsg);
|
|
225
|
-
}
|
|
226
|
-
}
|
|
227
|
-
else {
|
|
228
|
-
// No transaction support is also critical for data integrity
|
|
229
|
-
const errorMsg = 'Transaction support not available - cannot ensure data integrity';
|
|
230
|
-
// Rollback file backups since we're not proceeding
|
|
231
|
-
try {
|
|
232
|
-
await fileBackupManager.rollback();
|
|
233
|
-
}
|
|
234
|
-
catch (rollbackError) {
|
|
235
|
-
this.warn(`Failed to rollback file backup initialization: ${rollbackError}`);
|
|
236
|
-
}
|
|
237
|
-
this.error(errorMsg);
|
|
238
|
-
}
|
|
239
|
-
}
|
|
240
|
-
// Process each entity directory
|
|
241
|
-
let totalCreated = 0;
|
|
242
|
-
let totalUpdated = 0;
|
|
243
|
-
let totalUnchanged = 0;
|
|
244
|
-
let totalErrors = 0;
|
|
245
|
-
for (const entityDir of entityDirs) {
|
|
246
|
-
const entityConfig = await (0, config_1.loadEntityConfig)(entityDir);
|
|
247
|
-
if (!entityConfig) {
|
|
248
|
-
this.warn(`Skipping ${entityDir} - no valid entity configuration`);
|
|
249
|
-
continue;
|
|
250
|
-
}
|
|
251
|
-
if (flags.verbose) {
|
|
252
|
-
this.log(`\nProcessing ${entityConfig.entity} in ${entityDir}`);
|
|
253
|
-
}
|
|
254
|
-
// Combine root ignoreDirectories with entity-level ignoreDirectories
|
|
255
|
-
const initialIgnoreDirectories = [
|
|
256
|
-
...(syncConfig?.ignoreDirectories || []),
|
|
257
|
-
...(entityConfig?.ignoreDirectories || [])
|
|
258
|
-
];
|
|
259
|
-
const result = await this.processEntityDirectory(entityDir, entityConfig, syncEngine, flags, syncConfig, fileBackupManager, initialIgnoreDirectories);
|
|
260
|
-
// Show per-directory summary
|
|
261
|
-
const dirName = path_1.default.relative(process.cwd(), entityDir) || '.';
|
|
262
|
-
const dirTotal = result.created + result.updated + result.unchanged;
|
|
263
|
-
if (dirTotal > 0 || result.errors > 0) {
|
|
264
|
-
this.log(`\n📁 ${dirName}:`);
|
|
265
|
-
this.log(` Total processed: ${dirTotal} unique records`);
|
|
266
|
-
if (result.created > 0) {
|
|
267
|
-
this.log(` ✓ Created: ${result.created}`);
|
|
268
|
-
}
|
|
269
|
-
if (result.updated > 0) {
|
|
270
|
-
this.log(` ✓ Updated: ${result.updated}`);
|
|
271
|
-
}
|
|
272
|
-
if (result.unchanged > 0) {
|
|
273
|
-
this.log(` - Unchanged: ${result.unchanged}`);
|
|
274
|
-
}
|
|
275
|
-
if (result.errors > 0) {
|
|
276
|
-
this.log(` ✗ Errors: ${result.errors}`);
|
|
277
|
-
}
|
|
278
|
-
}
|
|
279
|
-
totalCreated += result.created;
|
|
280
|
-
totalUpdated += result.updated;
|
|
281
|
-
totalUnchanged += result.unchanged;
|
|
282
|
-
totalErrors += result.errors;
|
|
283
|
-
}
|
|
284
|
-
// Summary using FormattingService
|
|
285
|
-
const endTime = Date.now();
|
|
286
|
-
const { FormattingService } = await Promise.resolve().then(() => __importStar(require('../../services/FormattingService')));
|
|
287
|
-
const formatter = new FormattingService();
|
|
288
|
-
this.log('\n' + formatter.formatSyncSummary('push', {
|
|
289
|
-
created: totalCreated,
|
|
290
|
-
updated: totalUpdated,
|
|
291
|
-
unchanged: totalUnchanged,
|
|
292
|
-
deleted: 0,
|
|
293
|
-
skipped: 0,
|
|
294
|
-
errors: totalErrors,
|
|
295
|
-
duration: endTime - startTime
|
|
296
|
-
}));
|
|
297
|
-
// Handle transaction commit/rollback
|
|
298
|
-
if (!flags['dry-run'] && hasActiveTransaction) {
|
|
299
|
-
const dataProvider = core_2.Metadata.Provider;
|
|
300
|
-
// We know we have an active transaction at this point
|
|
301
|
-
if (dataProvider) {
|
|
302
|
-
let shouldCommit = true;
|
|
303
|
-
// If there are any errors, always rollback
|
|
304
|
-
if (totalErrors > 0 || this.errors.length > 0) {
|
|
305
|
-
shouldCommit = false;
|
|
306
|
-
this.log('\n❌ Errors detected - rolling back all changes');
|
|
307
|
-
}
|
|
308
|
-
// If there are warnings, ask user (unless in CI mode)
|
|
309
|
-
else if (this.warnings.length > 0) {
|
|
310
|
-
// Filter out transaction-related warnings since we're now using transactions
|
|
311
|
-
const nonTransactionWarnings = this.warnings.filter(w => !w.includes('Transaction support not available') &&
|
|
312
|
-
!w.includes('Failed to start transaction'));
|
|
313
|
-
if (nonTransactionWarnings.length > 0) {
|
|
314
|
-
if (flags.ci) {
|
|
315
|
-
// In CI mode, rollback on warnings
|
|
316
|
-
shouldCommit = false;
|
|
317
|
-
this.log('\n⚠️ Warnings detected in CI mode - rolling back all changes');
|
|
318
|
-
}
|
|
319
|
-
else {
|
|
320
|
-
// Show warnings to user
|
|
321
|
-
this.log('\n⚠️ The following warnings were encountered:');
|
|
322
|
-
for (const warning of nonTransactionWarnings) {
|
|
323
|
-
this.log(` - ${warning}`);
|
|
324
|
-
}
|
|
325
|
-
// Ask user whether to commit or rollback
|
|
326
|
-
shouldCommit = await (0, prompts_1.confirm)({
|
|
327
|
-
message: 'Do you want to commit these changes despite the warnings?',
|
|
328
|
-
default: false // Default to rollback
|
|
329
|
-
});
|
|
330
|
-
}
|
|
331
|
-
}
|
|
332
|
-
}
|
|
333
|
-
try {
|
|
334
|
-
if (shouldCommit) {
|
|
335
|
-
await dataProvider.CommitTransaction();
|
|
336
|
-
this.log('\n✅ All changes committed successfully');
|
|
337
|
-
// Clean up file backups after successful commit
|
|
338
|
-
await fileBackupManager.cleanup();
|
|
339
|
-
}
|
|
340
|
-
else {
|
|
341
|
-
// User chose to rollback or errors/warnings in CI mode
|
|
342
|
-
this.log('\n🔙 Rolling back all changes...');
|
|
343
|
-
// Rollback database transaction
|
|
344
|
-
await dataProvider.RollbackTransaction();
|
|
345
|
-
// Rollback file changes
|
|
346
|
-
this.log('🔙 Rolling back file changes...');
|
|
347
|
-
await fileBackupManager.rollback();
|
|
348
|
-
this.log('✅ Rollback completed - no changes were made to the database or files');
|
|
349
|
-
}
|
|
350
|
-
}
|
|
351
|
-
catch (error) {
|
|
352
|
-
// Try to rollback on any error
|
|
353
|
-
this.log('\n❌ Transaction error - attempting to roll back changes');
|
|
354
|
-
try {
|
|
355
|
-
await dataProvider.RollbackTransaction();
|
|
356
|
-
this.log('✅ Database rollback completed');
|
|
357
|
-
}
|
|
358
|
-
catch (rollbackError) {
|
|
359
|
-
this.log('❌ Database rollback failed: ' + (rollbackError instanceof Error ? rollbackError.message : String(rollbackError)));
|
|
360
|
-
}
|
|
361
|
-
// Also rollback file changes
|
|
362
|
-
try {
|
|
363
|
-
this.log('🔙 Rolling back file changes...');
|
|
364
|
-
await fileBackupManager.rollback();
|
|
365
|
-
this.log('✅ File rollback completed');
|
|
366
|
-
}
|
|
367
|
-
catch (fileRollbackError) {
|
|
368
|
-
this.log('❌ File rollback failed: ' + (fileRollbackError instanceof Error ? fileRollbackError.message : String(fileRollbackError)));
|
|
369
|
-
}
|
|
370
|
-
throw error;
|
|
371
|
-
}
|
|
372
|
-
}
|
|
373
|
-
}
|
|
374
|
-
// Exit with error if there were errors in CI mode
|
|
375
|
-
if ((totalErrors > 0 || this.errors.length > 0 || (this.warnings.length > 0 && flags.ci)) && flags.ci) {
|
|
376
|
-
this.error('Push failed in CI mode');
|
|
377
|
-
}
|
|
378
|
-
}
|
|
379
|
-
catch (error) {
|
|
380
|
-
spinner.fail('Push failed');
|
|
381
|
-
// Try to rollback the transaction and files if not in dry-run mode
|
|
382
|
-
if (!flags['dry-run']) {
|
|
383
|
-
const { getDataProvider } = await Promise.resolve().then(() => __importStar(require('../../lib/provider-utils')));
|
|
384
|
-
const dataProvider = getDataProvider();
|
|
385
|
-
// Rollback database transaction if we have one
|
|
386
|
-
if (hasActiveTransaction && dataProvider && typeof dataProvider.RollbackTransaction === 'function') {
|
|
387
|
-
try {
|
|
388
|
-
this.log('\n🔙 Rolling back database transaction due to error...');
|
|
389
|
-
await dataProvider.RollbackTransaction();
|
|
390
|
-
this.log('✅ Database rollback completed');
|
|
391
|
-
}
|
|
392
|
-
catch (rollbackError) {
|
|
393
|
-
this.log('❌ Database rollback failed: ' + (rollbackError instanceof Error ? rollbackError.message : String(rollbackError)));
|
|
394
|
-
}
|
|
395
|
-
}
|
|
396
|
-
// Rollback file changes
|
|
397
|
-
try {
|
|
398
|
-
this.log('🔙 Rolling back file changes...');
|
|
399
|
-
await fileBackupManager.rollback();
|
|
400
|
-
this.log('✅ File rollback completed - all files restored to original state');
|
|
401
|
-
}
|
|
402
|
-
catch (fileRollbackError) {
|
|
403
|
-
this.log('❌ File rollback failed: ' + (fileRollbackError instanceof Error ? fileRollbackError.message : String(fileRollbackError)));
|
|
404
|
-
}
|
|
405
|
-
}
|
|
406
|
-
// Enhanced error logging for debugging
|
|
407
|
-
this.log('\n=== Push Error Details ===');
|
|
408
|
-
this.log(`Error type: ${error?.constructor?.name || 'Unknown'}`);
|
|
409
|
-
this.log(`Error message: ${error instanceof Error ? error.message : String(error)}`);
|
|
410
|
-
if (error instanceof Error && error.stack) {
|
|
411
|
-
this.log(`\nStack trace:`);
|
|
412
|
-
this.log(error.stack);
|
|
413
|
-
}
|
|
414
|
-
// Log context information
|
|
415
|
-
this.log(`\nContext:`);
|
|
416
|
-
this.log(`- Working directory: ${config_manager_1.configManager.getOriginalCwd()}`);
|
|
417
|
-
this.log(`- Flags: ${JSON.stringify(flags, null, 2)}`);
|
|
418
|
-
// Check if error is related to common issues
|
|
419
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
420
|
-
if (errorMessage.includes('entity directories')) {
|
|
421
|
-
this.log(`\nHint: This appears to be an entity directory configuration issue.`);
|
|
422
|
-
this.log(`Make sure each entity directory has a .mj-sync.json file.`);
|
|
423
|
-
}
|
|
424
|
-
else if (errorMessage.includes('database') || errorMessage.includes('connection')) {
|
|
425
|
-
this.log(`\nHint: This appears to be a database connectivity issue.`);
|
|
426
|
-
this.log(`Check your mj.config.cjs configuration and database connectivity.`);
|
|
427
|
-
}
|
|
428
|
-
else if (errorMessage.includes('config') || errorMessage.includes('mj.config.cjs')) {
|
|
429
|
-
this.log(`\nHint: This appears to be a configuration file issue.`);
|
|
430
|
-
this.log(`Make sure mj.config.cjs exists and is properly configured.`);
|
|
431
|
-
}
|
|
432
|
-
this.error(error);
|
|
433
|
-
}
|
|
434
|
-
finally {
|
|
435
|
-
// Dispose SQL logging session if active
|
|
436
|
-
if (sqlLogger) {
|
|
437
|
-
try {
|
|
438
|
-
await sqlLogger.dispose();
|
|
439
|
-
if (flags.verbose) {
|
|
440
|
-
this.log('✅ SQL logging session closed');
|
|
441
|
-
}
|
|
442
|
-
}
|
|
443
|
-
catch (error) {
|
|
444
|
-
this.warn(`Failed to close SQL logging session: ${error}`);
|
|
445
|
-
}
|
|
446
|
-
}
|
|
447
|
-
// Reset sync engine singleton
|
|
448
|
-
(0, singleton_manager_1.resetSyncEngine)();
|
|
449
|
-
// Exit process to prevent background MJ tasks from throwing errors
|
|
450
|
-
// We don't explicitly close the connection - let the process termination handle it
|
|
451
|
-
process.exit(0);
|
|
452
|
-
}
|
|
453
|
-
}
|
|
454
|
-
async processEntityDirectory(entityDir, entityConfig, syncEngine, flags, syncConfig, fileBackupManager, parentIgnoreDirectories) {
|
|
455
|
-
const result = { created: 0, updated: 0, unchanged: 0, errors: 0 };
|
|
456
|
-
// Find files matching the configured pattern
|
|
457
|
-
const pattern = entityConfig.filePattern || '*.json';
|
|
458
|
-
const jsonFiles = await (0, fast_glob_1.default)(pattern, {
|
|
459
|
-
cwd: entityDir,
|
|
460
|
-
ignore: ['.mj-sync.json', '.mj-folder.json', '**/*.backup'],
|
|
461
|
-
dot: true, // Include dotfiles (files starting with .)
|
|
462
|
-
onlyFiles: true
|
|
463
|
-
});
|
|
464
|
-
// Check if no JSON files were found
|
|
465
|
-
if (jsonFiles.length === 0) {
|
|
466
|
-
const relativePath = path_1.default.relative(process.cwd(), entityDir) || '.';
|
|
467
|
-
const parentPath = path_1.default.dirname(entityDir);
|
|
468
|
-
const dirName = path_1.default.basename(entityDir);
|
|
469
|
-
// Check if this is a subdirectory (not a top-level entity directory)
|
|
470
|
-
const isSubdirectory = parentPath !== path_1.default.resolve(config_manager_1.configManager.getOriginalCwd(), flags.dir || '.');
|
|
471
|
-
if (isSubdirectory) {
|
|
472
|
-
// For subdirectories, make it a warning instead of an error
|
|
473
|
-
let warningMessage = `No JSON files found in ${relativePath} matching pattern: ${pattern}`;
|
|
474
|
-
// Try to be more helpful by checking what files do exist
|
|
475
|
-
const allFiles = await (0, fast_glob_1.default)('*', {
|
|
476
|
-
cwd: entityDir,
|
|
477
|
-
onlyFiles: true,
|
|
478
|
-
dot: true
|
|
479
|
-
});
|
|
480
|
-
if (allFiles.length > 0) {
|
|
481
|
-
warningMessage += `\n Files found: ${allFiles.slice(0, 3).join(', ')}`;
|
|
482
|
-
if (allFiles.length > 3) {
|
|
483
|
-
warningMessage += ` (and ${allFiles.length - 3} more)`;
|
|
484
|
-
}
|
|
485
|
-
}
|
|
486
|
-
const rootConfigPath = path_1.default.join(config_manager_1.configManager.getOriginalCwd(), flags.dir || '.', '.mj-sync.json');
|
|
487
|
-
warningMessage += `\n 💡 If this directory should be ignored, add "${dirName}" to the "ignoreDirectories" array in:\n ${rootConfigPath}`;
|
|
488
|
-
this.warn(warningMessage);
|
|
489
|
-
return result; // Return early without processing further
|
|
490
|
-
}
|
|
491
|
-
else {
|
|
492
|
-
// For top-level entity directories, this is still an error
|
|
493
|
-
const configFile = path_1.default.join(entityDir, '.mj-sync.json');
|
|
494
|
-
let errorMessage = `No JSON files found in ${relativePath} matching pattern: ${pattern}\n`;
|
|
495
|
-
errorMessage += `\nPlease check:\n`;
|
|
496
|
-
errorMessage += ` 1. Files exist with the expected extension (.json)\n`;
|
|
497
|
-
errorMessage += ` 2. The filePattern in ${configFile} matches your files\n`;
|
|
498
|
-
errorMessage += ` 3. Files are not in ignored patterns: .mj-sync.json, .mj-folder.json, *.backup\n`;
|
|
499
|
-
// Try to be more helpful by checking what files do exist
|
|
500
|
-
const allFiles = await (0, fast_glob_1.default)('*', {
|
|
501
|
-
cwd: entityDir,
|
|
502
|
-
onlyFiles: true,
|
|
503
|
-
dot: true
|
|
504
|
-
});
|
|
505
|
-
if (allFiles.length > 0) {
|
|
506
|
-
errorMessage += `\nFiles found in directory: ${allFiles.slice(0, 5).join(', ')}`;
|
|
507
|
-
if (allFiles.length > 5) {
|
|
508
|
-
errorMessage += ` (and ${allFiles.length - 5} more)`;
|
|
509
|
-
}
|
|
510
|
-
}
|
|
511
|
-
throw new Error(errorMessage);
|
|
512
|
-
}
|
|
513
|
-
}
|
|
514
|
-
if (flags.verbose) {
|
|
515
|
-
this.log(`Processing ${jsonFiles.length} records in ${path_1.default.relative(process.cwd(), entityDir) || '.'}`);
|
|
516
|
-
}
|
|
517
|
-
// First, process all JSON files in this directory
|
|
518
|
-
await this.processJsonFiles(jsonFiles, entityDir, entityConfig, syncEngine, flags, result, fileBackupManager);
|
|
519
|
-
// Then, recursively process subdirectories
|
|
520
|
-
const entries = await fs_extra_1.default.readdir(entityDir, { withFileTypes: true });
|
|
521
|
-
for (const entry of entries) {
|
|
522
|
-
if (entry.isDirectory() && !entry.name.startsWith('.')) {
|
|
523
|
-
// Build cumulative ignore list: parent + current directory's ignores
|
|
524
|
-
const currentDirConfig = await (0, config_1.loadSyncConfig)(entityDir);
|
|
525
|
-
const currentEntityConfig = await (0, config_1.loadEntityConfig)(entityDir);
|
|
526
|
-
const cumulativeIgnoreDirectories = [
|
|
527
|
-
...(parentIgnoreDirectories || []),
|
|
528
|
-
...(currentDirConfig?.ignoreDirectories || []),
|
|
529
|
-
...(currentEntityConfig?.ignoreDirectories || [])
|
|
530
|
-
];
|
|
531
|
-
// Check if this directory should be ignored
|
|
532
|
-
if (cumulativeIgnoreDirectories.some((pattern) => {
|
|
533
|
-
// Simple pattern matching: exact name or ends with pattern
|
|
534
|
-
return entry.name === pattern || entry.name.endsWith(pattern);
|
|
535
|
-
})) {
|
|
536
|
-
if (flags.verbose) {
|
|
537
|
-
this.log(` Ignoring directory: ${entry.name} (matched ignore pattern)`);
|
|
538
|
-
}
|
|
539
|
-
continue;
|
|
540
|
-
}
|
|
541
|
-
const subDir = path_1.default.join(entityDir, entry.name);
|
|
542
|
-
// Load subdirectory config and merge with parent config
|
|
543
|
-
let subEntityConfig = { ...entityConfig };
|
|
544
|
-
const subDirConfig = await (0, config_1.loadEntityConfig)(subDir);
|
|
545
|
-
if (subDirConfig) {
|
|
546
|
-
// Check if this is a new entity type (has different entity name)
|
|
547
|
-
if (subDirConfig.entity && subDirConfig.entity !== entityConfig.entity) {
|
|
548
|
-
// This is a different entity type, skip it (will be processed separately)
|
|
549
|
-
continue;
|
|
550
|
-
}
|
|
551
|
-
// Merge defaults: parent defaults + subdirectory overrides
|
|
552
|
-
subEntityConfig = {
|
|
553
|
-
...entityConfig,
|
|
554
|
-
...subDirConfig,
|
|
555
|
-
defaults: {
|
|
556
|
-
...entityConfig.defaults,
|
|
557
|
-
...(subDirConfig.defaults || {})
|
|
558
|
-
}
|
|
559
|
-
};
|
|
560
|
-
}
|
|
561
|
-
// Process subdirectory with merged config and cumulative ignore directories
|
|
562
|
-
const subResult = await this.processEntityDirectory(subDir, subEntityConfig, syncEngine, flags, syncConfig, fileBackupManager, cumulativeIgnoreDirectories);
|
|
563
|
-
result.created += subResult.created;
|
|
564
|
-
result.updated += subResult.updated;
|
|
565
|
-
result.unchanged += subResult.unchanged;
|
|
566
|
-
result.errors += subResult.errors;
|
|
567
|
-
}
|
|
568
|
-
}
|
|
569
|
-
return result;
|
|
570
|
-
}
|
|
571
|
-
async processJsonFiles(jsonFiles, entityDir, entityConfig, syncEngine, flags, result, fileBackupManager) {
|
|
572
|
-
if (jsonFiles.length === 0) {
|
|
573
|
-
return;
|
|
574
|
-
}
|
|
575
|
-
const spinner = (0, ora_classic_1.default)();
|
|
576
|
-
spinner.start('Processing records');
|
|
577
|
-
for (const file of jsonFiles) {
|
|
578
|
-
try {
|
|
579
|
-
const filePath = path_1.default.join(entityDir, file);
|
|
580
|
-
// Backup the file before any modifications (unless dry-run)
|
|
581
|
-
if (!flags['dry-run'] && fileBackupManager) {
|
|
582
|
-
await fileBackupManager.backupFile(filePath);
|
|
583
|
-
}
|
|
584
|
-
// Parse JSON with line number tracking
|
|
585
|
-
const { content: fileContent, lineNumbers } = await this.parseJsonWithLineNumbers(filePath);
|
|
586
|
-
// Process templates in the loaded content
|
|
587
|
-
const processedContent = await syncEngine.processTemplates(fileContent, entityDir);
|
|
588
|
-
// Check if the file contains a single record or an array of records
|
|
589
|
-
const isArray = Array.isArray(processedContent);
|
|
590
|
-
const records = isArray ? processedContent : [processedContent];
|
|
591
|
-
// Build and process defaults (including lookups)
|
|
592
|
-
const defaults = await syncEngine.buildDefaults(filePath, entityConfig);
|
|
593
|
-
// Process each record in the file
|
|
594
|
-
for (let i = 0; i < records.length; i++) {
|
|
595
|
-
const recordData = records[i];
|
|
596
|
-
// Process the record
|
|
597
|
-
const recordLineNumber = lineNumbers.get(i); // Get line number for this array index
|
|
598
|
-
const pushResult = await this.pushRecord(recordData, entityConfig.entity, path_1.default.dirname(filePath), file, defaults, syncEngine, flags['dry-run'], flags.verbose, isArray ? i : undefined, fileBackupManager, recordLineNumber);
|
|
599
|
-
if (!flags['dry-run']) {
|
|
600
|
-
// Don't count duplicates in stats
|
|
601
|
-
if (!pushResult.isDuplicate) {
|
|
602
|
-
if (pushResult.isNew) {
|
|
603
|
-
result.created++;
|
|
604
|
-
}
|
|
605
|
-
else if (pushResult.wasActuallyUpdated) {
|
|
606
|
-
result.updated++;
|
|
607
|
-
}
|
|
608
|
-
else {
|
|
609
|
-
result.unchanged++;
|
|
610
|
-
}
|
|
611
|
-
}
|
|
612
|
-
// Add related entity stats
|
|
613
|
-
if (pushResult.relatedStats) {
|
|
614
|
-
result.created += pushResult.relatedStats.created;
|
|
615
|
-
result.updated += pushResult.relatedStats.updated;
|
|
616
|
-
result.unchanged += pushResult.relatedStats.unchanged;
|
|
617
|
-
// Debug logging for related entities
|
|
618
|
-
if (flags.verbose && pushResult.relatedStats.unchanged > 0) {
|
|
619
|
-
this.log(` Related entities: ${pushResult.relatedStats.unchanged} unchanged`);
|
|
620
|
-
}
|
|
621
|
-
}
|
|
622
|
-
}
|
|
623
|
-
spinner.text = `Processing records (${result.created + result.updated + result.unchanged + result.errors} processed)`;
|
|
624
|
-
}
|
|
625
|
-
// Write back the entire file if it's an array
|
|
626
|
-
if (isArray && !flags['dry-run']) {
|
|
627
|
-
await fs_extra_1.default.writeJson(filePath, records, { spaces: 2 });
|
|
628
|
-
}
|
|
629
|
-
}
|
|
630
|
-
catch (error) {
|
|
631
|
-
result.errors++;
|
|
632
|
-
const errorMessage = error instanceof Error ? error.message : String(error);
|
|
633
|
-
const fullErrorMessage = `Failed to process ${file}: ${errorMessage}`;
|
|
634
|
-
this.errors.push(fullErrorMessage);
|
|
635
|
-
this.error(fullErrorMessage, { exit: false });
|
|
636
|
-
this.log(' ⚠️ This error will cause all changes to be rolled back at the end of processing');
|
|
637
|
-
}
|
|
638
|
-
}
|
|
639
|
-
if (flags.verbose) {
|
|
640
|
-
spinner.succeed(`Processed ${result.created + result.updated + result.unchanged} records from ${jsonFiles.length} files`);
|
|
641
|
-
}
|
|
642
|
-
else {
|
|
643
|
-
spinner.stop();
|
|
644
|
-
}
|
|
645
|
-
}
|
|
646
|
-
async pushRecord(recordData, entityName, baseDir, fileName, defaults, syncEngine, dryRun, verbose = false, arrayIndex, fileBackupManager, lineNumber) {
|
|
647
|
-
// Load or create entity
|
|
648
|
-
let entity = null;
|
|
649
|
-
let isNew = false;
|
|
650
|
-
if (recordData.primaryKey) {
|
|
651
|
-
entity = await syncEngine.loadEntity(entityName, recordData.primaryKey);
|
|
652
|
-
// Warn if record has primaryKey but wasn't found
|
|
653
|
-
if (!entity) {
|
|
654
|
-
const pkDisplay = Object.entries(recordData.primaryKey)
|
|
655
|
-
.map(([key, value]) => `${key}=${value}`)
|
|
656
|
-
.join(', ');
|
|
657
|
-
// Load sync config to check autoCreateMissingRecords setting
|
|
658
|
-
const syncConfig = await (0, config_1.loadSyncConfig)(config_manager_1.configManager.getOriginalCwd());
|
|
659
|
-
const autoCreate = syncConfig?.push?.autoCreateMissingRecords ?? false;
|
|
660
|
-
if (!autoCreate) {
|
|
661
|
-
const fileRef = lineNumber ? `${fileName}:${lineNumber}` : fileName;
|
|
662
|
-
this.warn(`⚠️ Record not found: ${entityName} with primaryKey {${pkDisplay}} at ${fileRef}`);
|
|
663
|
-
this.warn(` To auto-create missing records, set push.autoCreateMissingRecords=true in .mj-sync.json`);
|
|
664
|
-
// Skip this record
|
|
665
|
-
return { isNew: false, wasActuallyUpdated: false, isDuplicate: false };
|
|
666
|
-
}
|
|
667
|
-
else {
|
|
668
|
-
if (verbose) {
|
|
669
|
-
this.log(` Auto-creating missing ${entityName} record with primaryKey {${pkDisplay}}`);
|
|
670
|
-
}
|
|
671
|
-
}
|
|
672
|
-
}
|
|
673
|
-
}
|
|
674
|
-
if (!entity) {
|
|
675
|
-
// New record
|
|
676
|
-
entity = await syncEngine.createEntityObject(entityName);
|
|
677
|
-
entity.NewRecord();
|
|
678
|
-
isNew = true;
|
|
679
|
-
// Handle primary keys for new records
|
|
680
|
-
const entityInfo = syncEngine.getEntityInfo(entityName);
|
|
681
|
-
if (entityInfo) {
|
|
682
|
-
for (const pk of entityInfo.PrimaryKeys) {
|
|
683
|
-
if (!pk.AutoIncrement) {
|
|
684
|
-
// Check if we have a value in primaryKey object
|
|
685
|
-
if (recordData.primaryKey?.[pk.Name]) {
|
|
686
|
-
// User specified a primary key for new record, set it on entity directly
|
|
687
|
-
// Don't add to fields as it will be in primaryKey section
|
|
688
|
-
entity[pk.Name] = recordData.primaryKey[pk.Name];
|
|
689
|
-
if (verbose) {
|
|
690
|
-
this.log(` Using specified primary key ${pk.Name}: ${recordData.primaryKey[pk.Name]}`);
|
|
691
|
-
}
|
|
692
|
-
}
|
|
693
|
-
else if (pk.Type.toLowerCase() === 'uniqueidentifier' && !recordData.fields[pk.Name]) {
|
|
694
|
-
// Generate UUID for this primary key and set it on entity directly
|
|
695
|
-
const uuid = (0, global_1.uuidv4)();
|
|
696
|
-
// Don't add to fields as it will be in primaryKey section after save
|
|
697
|
-
if (verbose) {
|
|
698
|
-
this.log(` Generated UUID for primary key ${pk.Name}: ${uuid}`);
|
|
699
|
-
}
|
|
700
|
-
// Set the generated UUID on the entity
|
|
701
|
-
entity[pk.Name] = uuid;
|
|
702
|
-
}
|
|
703
|
-
}
|
|
704
|
-
}
|
|
705
|
-
}
|
|
706
|
-
}
|
|
707
|
-
// Apply defaults first
|
|
708
|
-
for (const [field, value] of Object.entries(defaults)) {
|
|
709
|
-
if (field in entity) {
|
|
710
|
-
entity[field] = value;
|
|
711
|
-
}
|
|
712
|
-
}
|
|
713
|
-
// Apply record fields
|
|
714
|
-
for (const [field, value] of Object.entries(recordData.fields)) {
|
|
715
|
-
if (field in entity) {
|
|
716
|
-
try {
|
|
717
|
-
const processedValue = await syncEngine.processFieldValue(value, baseDir, null, null);
|
|
718
|
-
if (verbose) {
|
|
719
|
-
this.log(` Setting ${field}: ${this.formatFieldValue(value)} -> ${this.formatFieldValue(processedValue)}`);
|
|
720
|
-
}
|
|
721
|
-
entity[field] = processedValue;
|
|
722
|
-
}
|
|
723
|
-
catch (error) {
|
|
724
|
-
throw new Error(`Failed to process field '${field}': ${error}`);
|
|
725
|
-
}
|
|
726
|
-
}
|
|
727
|
-
else {
|
|
728
|
-
this.warn(`Field '${field}' does not exist on entity '${entityName}'`);
|
|
729
|
-
}
|
|
730
|
-
}
|
|
731
|
-
if (dryRun) {
|
|
732
|
-
this.log(`Would ${isNew ? 'create' : 'update'} ${entityName} record`);
|
|
733
|
-
return { isNew, wasActuallyUpdated: true, isDuplicate: false, relatedStats: undefined };
|
|
734
|
-
}
|
|
735
|
-
// Check for duplicate processing (but only for existing records that were loaded)
|
|
736
|
-
let isDuplicate = false;
|
|
737
|
-
if (!isNew && entity) {
|
|
738
|
-
const fullFilePath = path_1.default.join(baseDir, fileName);
|
|
739
|
-
isDuplicate = this.checkAndTrackRecord(entityName, entity, fullFilePath, arrayIndex, lineNumber);
|
|
740
|
-
}
|
|
741
|
-
// Check if the record is dirty before saving
|
|
742
|
-
let wasActuallyUpdated = false;
|
|
743
|
-
if (!isNew && entity.Dirty) {
|
|
744
|
-
// Record is dirty, get the changes
|
|
745
|
-
const changes = entity.GetChangesSinceLastSave();
|
|
746
|
-
const changeKeys = Object.keys(changes);
|
|
747
|
-
if (changeKeys.length > 0) {
|
|
748
|
-
wasActuallyUpdated = true;
|
|
749
|
-
// Get primary key info for display
|
|
750
|
-
const entityInfo = syncEngine.getEntityInfo(entityName);
|
|
751
|
-
const primaryKeyDisplay = [];
|
|
752
|
-
if (entityInfo) {
|
|
753
|
-
for (const pk of entityInfo.PrimaryKeys) {
|
|
754
|
-
primaryKeyDisplay.push(`${pk.Name}: ${entity.Get(pk.Name)}`);
|
|
755
|
-
}
|
|
756
|
-
}
|
|
757
|
-
this.log(''); // Add newline before update output
|
|
758
|
-
this.log(`📝 Updating ${entityName} record:`);
|
|
759
|
-
if (primaryKeyDisplay.length > 0) {
|
|
760
|
-
this.log(` Primary Key: ${primaryKeyDisplay.join(', ')}`);
|
|
761
|
-
}
|
|
762
|
-
this.log(` Changes:`);
|
|
763
|
-
for (const fieldName of changeKeys) {
|
|
764
|
-
const field = entity.GetFieldByName(fieldName);
|
|
765
|
-
const oldValue = field ? field.OldValue : undefined;
|
|
766
|
-
const newValue = changes[fieldName];
|
|
767
|
-
this.log(` ${fieldName}: ${this.formatFieldValue(oldValue)} → ${this.formatFieldValue(newValue)}`);
|
|
768
|
-
}
|
|
769
|
-
}
|
|
770
|
-
}
|
|
771
|
-
else if (isNew) {
|
|
772
|
-
wasActuallyUpdated = true;
|
|
773
|
-
}
|
|
774
|
-
// Save the record
|
|
775
|
-
const saved = await entity.Save();
|
|
776
|
-
if (!saved) {
|
|
777
|
-
const message = entity.LatestResult?.Message;
|
|
778
|
-
if (message) {
|
|
779
|
-
throw new Error(`Failed to save record: ${message}`);
|
|
780
|
-
}
|
|
781
|
-
const errors = entity.LatestResult?.Errors?.map(err => typeof err === 'string' ? err : (err?.message || JSON.stringify(err)))?.join(', ') || 'Unknown error';
|
|
782
|
-
throw new Error(`Failed to save record: ${errors}`);
|
|
783
|
-
}
|
|
784
|
-
// Process related entities after saving parent
|
|
785
|
-
let relatedStats;
|
|
786
|
-
if (recordData.relatedEntities && !dryRun) {
|
|
787
|
-
const fullFilePath = path_1.default.join(baseDir, fileName);
|
|
788
|
-
relatedStats = await this.processRelatedEntities(recordData.relatedEntities, entity, entity, // root is same as parent for top level
|
|
789
|
-
baseDir, syncEngine, verbose, fileBackupManager, 1, // indentLevel
|
|
790
|
-
fullFilePath, arrayIndex);
|
|
791
|
-
}
|
|
792
|
-
// Update the local file with new primary key if created
|
|
793
|
-
if (isNew) {
|
|
794
|
-
const entityInfo = syncEngine.getEntityInfo(entityName);
|
|
795
|
-
if (entityInfo) {
|
|
796
|
-
const newPrimaryKey = {};
|
|
797
|
-
for (const pk of entityInfo.PrimaryKeys) {
|
|
798
|
-
newPrimaryKey[pk.Name] = entity.Get(pk.Name);
|
|
799
|
-
}
|
|
800
|
-
recordData.primaryKey = newPrimaryKey;
|
|
801
|
-
}
|
|
802
|
-
// Track the new record now that we have its primary key
|
|
803
|
-
const fullFilePath = path_1.default.join(baseDir, fileName);
|
|
804
|
-
this.checkAndTrackRecord(entityName, entity, fullFilePath, arrayIndex, lineNumber);
|
|
805
|
-
}
|
|
806
|
-
// Always update sync metadata
|
|
807
|
-
// This ensures related entities are persisted with their metadata
|
|
808
|
-
recordData.sync = {
|
|
809
|
-
lastModified: new Date().toISOString(),
|
|
810
|
-
checksum: syncEngine.calculateChecksum(recordData.fields)
|
|
811
|
-
};
|
|
812
|
-
// Write back to file only if it's a single record (not part of an array)
|
|
813
|
-
// Array records are written back in bulk after all records are processed
|
|
814
|
-
if (arrayIndex === undefined) {
|
|
815
|
-
const filePath = path_1.default.join(baseDir, fileName);
|
|
816
|
-
await fs_extra_1.default.writeJson(filePath, recordData, { spaces: 2 });
|
|
817
|
-
}
|
|
818
|
-
return { isNew, wasActuallyUpdated, isDuplicate, relatedStats };
|
|
819
|
-
}
|
|
820
|
-
async processRelatedEntities(relatedEntities, parentEntity, rootEntity, baseDir, syncEngine, verbose = false, fileBackupManager, indentLevel = 1, parentFilePath, parentArrayIndex) {
|
|
821
|
-
const indent = ' '.repeat(indentLevel);
|
|
822
|
-
const stats = { created: 0, updated: 0, unchanged: 0 };
|
|
823
|
-
for (const [entityName, records] of Object.entries(relatedEntities)) {
|
|
824
|
-
if (verbose) {
|
|
825
|
-
this.log(`${indent}↳ Processing ${records.length} related ${entityName} records`);
|
|
826
|
-
}
|
|
827
|
-
for (const relatedRecord of records) {
|
|
828
|
-
try {
|
|
829
|
-
// Load or create entity
|
|
830
|
-
let entity = null;
|
|
831
|
-
let isNew = false;
|
|
832
|
-
if (relatedRecord.primaryKey) {
|
|
833
|
-
entity = await syncEngine.loadEntity(entityName, relatedRecord.primaryKey);
|
|
834
|
-
// Warn if record has primaryKey but wasn't found
|
|
835
|
-
if (!entity) {
|
|
836
|
-
const pkDisplay = Object.entries(relatedRecord.primaryKey)
|
|
837
|
-
.map(([key, value]) => `${key}=${value}`)
|
|
838
|
-
.join(', ');
|
|
839
|
-
// Load sync config to check autoCreateMissingRecords setting
|
|
840
|
-
const syncConfig = await (0, config_1.loadSyncConfig)(config_manager_1.configManager.getOriginalCwd());
|
|
841
|
-
const autoCreate = syncConfig?.push?.autoCreateMissingRecords ?? false;
|
|
842
|
-
if (!autoCreate) {
|
|
843
|
-
const fileRef = parentFilePath ? path_1.default.relative(config_manager_1.configManager.getOriginalCwd(), parentFilePath) : 'unknown';
|
|
844
|
-
this.warn(`${indent}⚠️ Related record not found: ${entityName} with primaryKey {${pkDisplay}} at ${fileRef}`);
|
|
845
|
-
this.warn(`${indent} To auto-create missing records, set push.autoCreateMissingRecords=true in .mj-sync.json`);
|
|
846
|
-
// Skip this record
|
|
847
|
-
continue;
|
|
848
|
-
}
|
|
849
|
-
else {
|
|
850
|
-
if (verbose) {
|
|
851
|
-
this.log(`${indent} Auto-creating missing related ${entityName} record with primaryKey {${pkDisplay}}`);
|
|
852
|
-
}
|
|
853
|
-
}
|
|
854
|
-
}
|
|
855
|
-
}
|
|
856
|
-
if (!entity) {
|
|
857
|
-
entity = await syncEngine.createEntityObject(entityName);
|
|
858
|
-
entity.NewRecord();
|
|
859
|
-
isNew = true;
|
|
860
|
-
// Handle primary keys for new related entity records
|
|
861
|
-
const entityInfo = syncEngine.getEntityInfo(entityName);
|
|
862
|
-
if (entityInfo) {
|
|
863
|
-
for (const pk of entityInfo.PrimaryKeys) {
|
|
864
|
-
if (!pk.AutoIncrement) {
|
|
865
|
-
// Check if we have a value in primaryKey object
|
|
866
|
-
if (relatedRecord.primaryKey?.[pk.Name]) {
|
|
867
|
-
// User specified a primary key for new record, set it on entity directly
|
|
868
|
-
// Don't add to fields as it will be in primaryKey section
|
|
869
|
-
entity[pk.Name] = relatedRecord.primaryKey[pk.Name];
|
|
870
|
-
if (verbose) {
|
|
871
|
-
this.log(`${indent} Using specified primary key ${pk.Name}: ${relatedRecord.primaryKey[pk.Name]}`);
|
|
872
|
-
}
|
|
873
|
-
}
|
|
874
|
-
else if (pk.Type.toLowerCase() === 'uniqueidentifier' && !relatedRecord.fields[pk.Name]) {
|
|
875
|
-
// Generate UUID for this primary key and set it on entity directly
|
|
876
|
-
const uuid = (0, global_1.uuidv4)();
|
|
877
|
-
// Don't add to fields as it will be in primaryKey section after save
|
|
878
|
-
entity[pk.Name] = uuid;
|
|
879
|
-
if (verbose) {
|
|
880
|
-
this.log(`${indent} Generated UUID for primary key ${pk.Name}: ${uuid}`);
|
|
881
|
-
}
|
|
882
|
-
}
|
|
883
|
-
}
|
|
884
|
-
}
|
|
885
|
-
}
|
|
886
|
-
}
|
|
887
|
-
// Apply fields with parent/root context
|
|
888
|
-
for (const [field, value] of Object.entries(relatedRecord.fields)) {
|
|
889
|
-
if (field in entity) {
|
|
890
|
-
try {
|
|
891
|
-
const processedValue = await syncEngine.processFieldValue(value, baseDir, parentEntity, rootEntity);
|
|
892
|
-
if (verbose) {
|
|
893
|
-
this.log(`${indent} Setting ${field}: ${this.formatFieldValue(value)} -> ${this.formatFieldValue(processedValue)}`);
|
|
894
|
-
}
|
|
895
|
-
entity[field] = processedValue;
|
|
896
|
-
}
|
|
897
|
-
catch (error) {
|
|
898
|
-
throw new Error(`Failed to process field '${field}' in ${entityName}: ${error}`);
|
|
899
|
-
}
|
|
900
|
-
}
|
|
901
|
-
else {
|
|
902
|
-
this.warn(`${indent} Field '${field}' does not exist on entity '${entityName}'`);
|
|
903
|
-
}
|
|
904
|
-
}
|
|
905
|
-
// Check for duplicate processing (but only for existing records that were loaded)
|
|
906
|
-
let isDuplicate = false;
|
|
907
|
-
if (!isNew && entity) {
|
|
908
|
-
// Use parent file path for related entities since they're defined in the parent's file
|
|
909
|
-
const relatedFilePath = parentFilePath || path_1.default.join(baseDir, 'unknown');
|
|
910
|
-
isDuplicate = this.checkAndTrackRecord(entityName, entity, relatedFilePath, parentArrayIndex);
|
|
911
|
-
}
|
|
912
|
-
// Check if the record is dirty before saving
|
|
913
|
-
let wasActuallyUpdated = false;
|
|
914
|
-
if (!isNew && entity.Dirty) {
|
|
915
|
-
// Record is dirty, get the changes
|
|
916
|
-
const changes = entity.GetChangesSinceLastSave();
|
|
917
|
-
const changeKeys = Object.keys(changes);
|
|
918
|
-
if (changeKeys.length > 0) {
|
|
919
|
-
wasActuallyUpdated = true;
|
|
920
|
-
// Get primary key info for display
|
|
921
|
-
const entityInfo = syncEngine.getEntityInfo(entityName);
|
|
922
|
-
const primaryKeyDisplay = [];
|
|
923
|
-
if (entityInfo) {
|
|
924
|
-
for (const pk of entityInfo.PrimaryKeys) {
|
|
925
|
-
primaryKeyDisplay.push(`${pk.Name}: ${entity.Get(pk.Name)}`);
|
|
926
|
-
}
|
|
927
|
-
}
|
|
928
|
-
this.log(''); // Add newline before update output
|
|
929
|
-
this.log(`${indent}📝 Updating related ${entityName} record:`);
|
|
930
|
-
if (primaryKeyDisplay.length > 0) {
|
|
931
|
-
this.log(`${indent} Primary Key: ${primaryKeyDisplay.join(', ')}`);
|
|
932
|
-
}
|
|
933
|
-
this.log(`${indent} Changes:`);
|
|
934
|
-
for (const fieldName of changeKeys) {
|
|
935
|
-
const field = entity.GetFieldByName(fieldName);
|
|
936
|
-
const oldValue = field ? field.OldValue : undefined;
|
|
937
|
-
const newValue = changes[fieldName];
|
|
938
|
-
this.log(`${indent} ${fieldName}: ${this.formatFieldValue(oldValue)} → ${this.formatFieldValue(newValue)}`);
|
|
939
|
-
}
|
|
940
|
-
}
|
|
941
|
-
}
|
|
942
|
-
else if (isNew) {
|
|
943
|
-
wasActuallyUpdated = true;
|
|
944
|
-
}
|
|
945
|
-
// Save the related entity
|
|
946
|
-
const saved = await entity.Save();
|
|
947
|
-
if (!saved) {
|
|
948
|
-
const message = entity.LatestResult?.Message;
|
|
949
|
-
if (message) {
|
|
950
|
-
throw new Error(`Failed to save related ${entityName}: ${message}`);
|
|
951
|
-
}
|
|
952
|
-
const errors = entity.LatestResult?.Errors?.map(err => typeof err === 'string' ? err : (err?.message || JSON.stringify(err)))?.join(', ') || 'Unknown error';
|
|
953
|
-
throw new Error(`Failed to save related ${entityName}: ${errors}`);
|
|
954
|
-
}
|
|
955
|
-
// Update stats - don't count duplicates
|
|
956
|
-
if (!isDuplicate) {
|
|
957
|
-
if (isNew) {
|
|
958
|
-
stats.created++;
|
|
959
|
-
}
|
|
960
|
-
else if (wasActuallyUpdated) {
|
|
961
|
-
stats.updated++;
|
|
962
|
-
}
|
|
963
|
-
else {
|
|
964
|
-
stats.unchanged++;
|
|
965
|
-
}
|
|
966
|
-
}
|
|
967
|
-
if (verbose && wasActuallyUpdated) {
|
|
968
|
-
this.log(`${indent} ✓ ${isNew ? 'Created' : 'Updated'} ${entityName} record`);
|
|
969
|
-
}
|
|
970
|
-
else if (verbose && !wasActuallyUpdated) {
|
|
971
|
-
this.log(`${indent} - No changes to ${entityName} record`);
|
|
972
|
-
}
|
|
973
|
-
// Update the related record with primary key and sync metadata
|
|
974
|
-
const entityInfo = syncEngine.getEntityInfo(entityName);
|
|
975
|
-
if (entityInfo) {
|
|
976
|
-
// Update primary key if new
|
|
977
|
-
if (isNew) {
|
|
978
|
-
relatedRecord.primaryKey = {};
|
|
979
|
-
for (const pk of entityInfo.PrimaryKeys) {
|
|
980
|
-
relatedRecord.primaryKey[pk.Name] = entity.Get(pk.Name);
|
|
981
|
-
}
|
|
982
|
-
// Track the new related entity now that we have its primary key
|
|
983
|
-
const relatedFilePath = parentFilePath || path_1.default.join(baseDir, 'unknown');
|
|
984
|
-
this.checkAndTrackRecord(entityName, entity, relatedFilePath, parentArrayIndex);
|
|
985
|
-
}
|
|
986
|
-
// Always update sync metadata
|
|
987
|
-
relatedRecord.sync = {
|
|
988
|
-
lastModified: new Date().toISOString(),
|
|
989
|
-
checksum: syncEngine.calculateChecksum(relatedRecord.fields)
|
|
990
|
-
};
|
|
991
|
-
}
|
|
992
|
-
// Process nested related entities if any
|
|
993
|
-
if (relatedRecord.relatedEntities) {
|
|
994
|
-
const nestedStats = await this.processRelatedEntities(relatedRecord.relatedEntities, entity, rootEntity, baseDir, syncEngine, verbose, fileBackupManager, indentLevel + 1, parentFilePath, parentArrayIndex);
|
|
995
|
-
// Accumulate nested stats
|
|
996
|
-
stats.created += nestedStats.created;
|
|
997
|
-
stats.updated += nestedStats.updated;
|
|
998
|
-
stats.unchanged += nestedStats.unchanged;
|
|
999
|
-
}
|
|
1000
|
-
}
|
|
1001
|
-
catch (error) {
|
|
1002
|
-
throw new Error(`Failed to process related ${entityName}: ${error}`);
|
|
1003
|
-
}
|
|
1004
|
-
}
|
|
1005
|
-
}
|
|
1006
|
-
return stats;
|
|
1007
|
-
}
|
|
1008
|
-
/**
|
|
1009
|
-
* Generate a unique tracking key for a record based on entity name and primary key values
|
|
1010
|
-
*/
|
|
1011
|
-
generateRecordKey(entityName, entity) {
|
|
1012
|
-
const entityInfo = entity.EntityInfo;
|
|
1013
|
-
const primaryKeyValues = [];
|
|
1014
|
-
if (entityInfo && entityInfo.PrimaryKeys.length > 0) {
|
|
1015
|
-
for (const pk of entityInfo.PrimaryKeys) {
|
|
1016
|
-
const value = entity.Get(pk.Name);
|
|
1017
|
-
primaryKeyValues.push(`${pk.Name}:${value}`);
|
|
1018
|
-
}
|
|
1019
|
-
}
|
|
1020
|
-
return `${entityName}|${primaryKeyValues.join('|')}`;
|
|
1021
|
-
}
|
|
1022
|
-
/**
|
|
1023
|
-
* Check if a record has already been processed and warn if duplicate
|
|
1024
|
-
*/
|
|
1025
|
-
checkAndTrackRecord(entityName, entity, filePath, arrayIndex, lineNumber) {
|
|
1026
|
-
const recordKey = this.generateRecordKey(entityName, entity);
|
|
1027
|
-
const existing = this.processedRecords.get(recordKey);
|
|
1028
|
-
if (existing) {
|
|
1029
|
-
const primaryKeyDisplay = entity.EntityInfo?.PrimaryKeys
|
|
1030
|
-
.map(pk => `${pk.Name}: ${entity.Get(pk.Name)}`)
|
|
1031
|
-
.join(', ') || 'unknown';
|
|
1032
|
-
// Format file location with clickable link for VSCode
|
|
1033
|
-
// Create maps with just the line numbers we have
|
|
1034
|
-
const currentLineMap = lineNumber ? new Map([[arrayIndex || 0, lineNumber]]) : undefined;
|
|
1035
|
-
const originalLineMap = existing.lineNumber ? new Map([[existing.arrayIndex || 0, existing.lineNumber]]) : undefined;
|
|
1036
|
-
const currentLocation = this.formatFileLocation(filePath, arrayIndex, currentLineMap);
|
|
1037
|
-
const originalLocation = this.formatFileLocation(existing.filePath, existing.arrayIndex, originalLineMap);
|
|
1038
|
-
this.warn(`⚠️ Duplicate record detected for ${entityName} (${primaryKeyDisplay})`);
|
|
1039
|
-
this.warn(` Current location: ${currentLocation}`);
|
|
1040
|
-
this.warn(` Original location: ${originalLocation}`);
|
|
1041
|
-
this.warn(` The duplicate update will proceed, but you should review your data for unintended duplicates.`);
|
|
1042
|
-
return true; // is duplicate
|
|
1043
|
-
}
|
|
1044
|
-
// Track the record with its source location
|
|
1045
|
-
this.processedRecords.set(recordKey, {
|
|
1046
|
-
filePath: filePath || 'unknown',
|
|
1047
|
-
arrayIndex,
|
|
1048
|
-
lineNumber
|
|
1049
|
-
});
|
|
1050
|
-
return false; // not duplicate
|
|
1051
|
-
}
|
|
1052
|
-
/**
|
|
1053
|
-
* Format field value for console display
|
|
1054
|
-
*/
|
|
1055
|
-
formatFieldValue(value, maxLength = 50) {
|
|
1056
|
-
// Convert value to string representation
|
|
1057
|
-
let strValue = JSON.stringify(value);
|
|
1058
|
-
// Trim the string
|
|
1059
|
-
strValue = strValue.trim();
|
|
1060
|
-
// If it's longer than maxLength, truncate and add ellipsis
|
|
1061
|
-
if (strValue.length > maxLength) {
|
|
1062
|
-
return strValue.substring(0, maxLength) + '...';
|
|
1063
|
-
}
|
|
1064
|
-
return strValue;
|
|
1065
|
-
}
|
|
1066
|
-
/**
|
|
1067
|
-
* Parse JSON file and track line numbers for array elements
|
|
1068
|
-
*/
|
|
1069
|
-
async parseJsonWithLineNumbers(filePath) {
|
|
1070
|
-
const fileText = await fs_extra_1.default.readFile(filePath, 'utf-8');
|
|
1071
|
-
const lines = fileText.split('\n');
|
|
1072
|
-
const lineNumbers = new Map();
|
|
1073
|
-
// Parse the JSON
|
|
1074
|
-
const content = JSON.parse(fileText);
|
|
1075
|
-
// If it's an array, try to find where each element starts
|
|
1076
|
-
if (Array.isArray(content)) {
|
|
1077
|
-
let inString = false;
|
|
1078
|
-
let bracketDepth = 0;
|
|
1079
|
-
let currentIndex = -1;
|
|
1080
|
-
for (let lineNum = 0; lineNum < lines.length; lineNum++) {
|
|
1081
|
-
const line = lines[lineNum];
|
|
1082
|
-
// Simple tracking of string boundaries and bracket depth
|
|
1083
|
-
for (let i = 0; i < line.length; i++) {
|
|
1084
|
-
const char = line[i];
|
|
1085
|
-
const prevChar = i > 0 ? line[i - 1] : '';
|
|
1086
|
-
if (char === '"' && prevChar !== '\\') {
|
|
1087
|
-
inString = !inString;
|
|
1088
|
-
}
|
|
1089
|
-
if (!inString) {
|
|
1090
|
-
if (char === '{') {
|
|
1091
|
-
bracketDepth++;
|
|
1092
|
-
// If we're at depth 1 in the main array, this is a new object
|
|
1093
|
-
if (bracketDepth === 1 && line.trim().startsWith('{')) {
|
|
1094
|
-
currentIndex++;
|
|
1095
|
-
lineNumbers.set(currentIndex, lineNum + 1); // 1-based line numbers
|
|
1096
|
-
}
|
|
1097
|
-
}
|
|
1098
|
-
else if (char === '}') {
|
|
1099
|
-
bracketDepth--;
|
|
1100
|
-
}
|
|
1101
|
-
}
|
|
1102
|
-
}
|
|
1103
|
-
}
|
|
1104
|
-
}
|
|
1105
|
-
return { content, lineNumbers };
|
|
1106
|
-
}
|
|
1107
|
-
/**
|
|
1108
|
-
* Format file location with clickable link for VSCode
|
|
1109
|
-
*/
|
|
1110
|
-
formatFileLocation(filePath, arrayIndex, lineNumbers) {
|
|
1111
|
-
if (!filePath || filePath === 'unknown') {
|
|
1112
|
-
return 'unknown';
|
|
1113
|
-
}
|
|
1114
|
-
// Get absolute path for better VSCode integration
|
|
1115
|
-
const absolutePath = path_1.default.resolve(filePath);
|
|
1116
|
-
// Try to get actual line number from our tracking
|
|
1117
|
-
let lineNumber = 1;
|
|
1118
|
-
if (arrayIndex !== undefined && lineNumbers && lineNumbers.has(arrayIndex)) {
|
|
1119
|
-
lineNumber = lineNumbers.get(arrayIndex);
|
|
1120
|
-
}
|
|
1121
|
-
else if (arrayIndex !== undefined) {
|
|
1122
|
-
// Fallback estimation if we don't have actual line numbers
|
|
1123
|
-
lineNumber = 2 + (arrayIndex * 15);
|
|
1124
|
-
}
|
|
1125
|
-
// Create clickable file path for VSCode - format: file:line
|
|
1126
|
-
// VSCode will make this clickable in the terminal
|
|
1127
|
-
return `${absolutePath}:${lineNumber}`;
|
|
1128
|
-
}
|
|
1129
|
-
}
|
|
1130
|
-
exports.default = Push;
|
|
1131
|
-
//# sourceMappingURL=index.js.map
|