wfu-migrate 0.1.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/README.md +199 -0
  2. package/dist/commands/cleanup.d.ts +10 -0
  3. package/dist/commands/cleanup.d.ts.map +1 -0
  4. package/dist/commands/cleanup.js +231 -0
  5. package/dist/commands/cleanup.js.map +1 -0
  6. package/dist/commands/config.d.ts +15 -0
  7. package/dist/commands/config.d.ts.map +1 -0
  8. package/dist/commands/config.js +285 -0
  9. package/dist/commands/config.js.map +1 -0
  10. package/dist/commands/doctor.d.ts +15 -0
  11. package/dist/commands/doctor.d.ts.map +1 -0
  12. package/dist/commands/doctor.js +234 -0
  13. package/dist/commands/doctor.js.map +1 -0
  14. package/dist/commands/env-migrate.d.ts +37 -0
  15. package/dist/commands/env-migrate.d.ts.map +1 -0
  16. package/dist/commands/env-migrate.js +703 -0
  17. package/dist/commands/env-migrate.js.map +1 -0
  18. package/dist/commands/migrate.d.ts +19 -0
  19. package/dist/commands/migrate.d.ts.map +1 -0
  20. package/dist/commands/migrate.js +374 -0
  21. package/dist/commands/migrate.js.map +1 -0
  22. package/dist/index.d.ts +15 -0
  23. package/dist/index.d.ts.map +1 -0
  24. package/dist/index.js +31 -0
  25. package/dist/index.js.map +1 -0
  26. package/dist/lib/compress.d.ts +86 -0
  27. package/dist/lib/compress.d.ts.map +1 -0
  28. package/dist/lib/compress.js +203 -0
  29. package/dist/lib/compress.js.map +1 -0
  30. package/dist/lib/config-manager.d.ts +34 -0
  31. package/dist/lib/config-manager.d.ts.map +1 -0
  32. package/dist/lib/config-manager.js +332 -0
  33. package/dist/lib/config-manager.js.map +1 -0
  34. package/dist/lib/database.d.ts +460 -0
  35. package/dist/lib/database.d.ts.map +1 -0
  36. package/dist/lib/database.js +1267 -0
  37. package/dist/lib/database.js.map +1 -0
  38. package/dist/lib/docker.d.ts +240 -0
  39. package/dist/lib/docker.d.ts.map +1 -0
  40. package/dist/lib/docker.js +653 -0
  41. package/dist/lib/docker.js.map +1 -0
  42. package/dist/lib/health.d.ts +99 -0
  43. package/dist/lib/health.d.ts.map +1 -0
  44. package/dist/lib/health.js +139 -0
  45. package/dist/lib/health.js.map +1 -0
  46. package/dist/lib/logger.d.ts +105 -0
  47. package/dist/lib/logger.d.ts.map +1 -0
  48. package/dist/lib/logger.js +259 -0
  49. package/dist/lib/logger.js.map +1 -0
  50. package/dist/lib/migration-engine.d.ts +130 -0
  51. package/dist/lib/migration-engine.d.ts.map +1 -0
  52. package/dist/lib/migration-engine.js +1175 -0
  53. package/dist/lib/migration-engine.js.map +1 -0
  54. package/dist/lib/parallel.d.ts +85 -0
  55. package/dist/lib/parallel.d.ts.map +1 -0
  56. package/dist/lib/parallel.js +138 -0
  57. package/dist/lib/parallel.js.map +1 -0
  58. package/dist/lib/progress.d.ts +101 -0
  59. package/dist/lib/progress.d.ts.map +1 -0
  60. package/dist/lib/progress.js +179 -0
  61. package/dist/lib/progress.js.map +1 -0
  62. package/dist/lib/prompt.d.ts +71 -0
  63. package/dist/lib/prompt.d.ts.map +1 -0
  64. package/dist/lib/prompt.js +116 -0
  65. package/dist/lib/prompt.js.map +1 -0
  66. package/dist/lib/retry.d.ts +61 -0
  67. package/dist/lib/retry.d.ts.map +1 -0
  68. package/dist/lib/retry.js +136 -0
  69. package/dist/lib/retry.js.map +1 -0
  70. package/dist/lib/s3.d.ts +238 -0
  71. package/dist/lib/s3.d.ts.map +1 -0
  72. package/dist/lib/s3.js +507 -0
  73. package/dist/lib/s3.js.map +1 -0
  74. package/dist/lib/serialized.d.ts +114 -0
  75. package/dist/lib/serialized.d.ts.map +1 -0
  76. package/dist/lib/serialized.js +236 -0
  77. package/dist/lib/serialized.js.map +1 -0
  78. package/dist/lib/shutdown.d.ts +78 -0
  79. package/dist/lib/shutdown.d.ts.map +1 -0
  80. package/dist/lib/shutdown.js +153 -0
  81. package/dist/lib/shutdown.js.map +1 -0
  82. package/dist/lib/site-filter.d.ts +98 -0
  83. package/dist/lib/site-filter.d.ts.map +1 -0
  84. package/dist/lib/site-filter.js +213 -0
  85. package/dist/lib/site-filter.js.map +1 -0
  86. package/dist/lib/spinner.d.ts +174 -0
  87. package/dist/lib/spinner.d.ts.map +1 -0
  88. package/dist/lib/spinner.js +273 -0
  89. package/dist/lib/spinner.js.map +1 -0
  90. package/dist/lib/state.d.ts +363 -0
  91. package/dist/lib/state.d.ts.map +1 -0
  92. package/dist/lib/state.js +905 -0
  93. package/dist/lib/state.js.map +1 -0
  94. package/dist/lib/system.d.ts +227 -0
  95. package/dist/lib/system.d.ts.map +1 -0
  96. package/dist/lib/system.js +636 -0
  97. package/dist/lib/system.js.map +1 -0
  98. package/dist/lib/url-patterns.d.ts +46 -0
  99. package/dist/lib/url-patterns.d.ts.map +1 -0
  100. package/dist/lib/url-patterns.js +259 -0
  101. package/dist/lib/url-patterns.js.map +1 -0
  102. package/dist/lib/wp-tables.d.ts +139 -0
  103. package/dist/lib/wp-tables.d.ts.map +1 -0
  104. package/dist/lib/wp-tables.js +296 -0
  105. package/dist/lib/wp-tables.js.map +1 -0
  106. package/dist/types/index.d.ts +96 -0
  107. package/dist/types/index.d.ts.map +1 -0
  108. package/dist/types/index.js +46 -0
  109. package/dist/types/index.js.map +1 -0
  110. package/package.json +58 -0
@@ -0,0 +1,1175 @@
1
+ /**
2
+ * Migration Engine
3
+ *
4
+ * Implements the core migration workflow (Steps 3-9):
5
+ * - Step 3: Import to migration database
6
+ * - Step 4: WP-CLI search-replace via Docker
7
+ * - Step 5: Export transformed tables
8
+ * - Step 6: Backup target tables
9
+ * - Step 7: Import to target
10
+ * - Step 8: S3 file sync
11
+ * - Step 9: Archive and cleanup
12
+ */
13
+ import { spawn } from 'child_process';
14
+ import { existsSync, writeFileSync, mkdirSync, statSync } from 'fs';
15
+ import { rm } from 'fs/promises';
16
+ import { join } from 'path';
17
+ import { mysqlImport, mysqldump, cleanupSiteTables, discoverSiteTables, discoverNetworkTables, countSiteTables, } from './database.js';
18
+ import { getUrlReplacementPatterns, parseCustomDomain } from './url-patterns.js';
19
+ import { compressFile } from './compress.js';
20
+ import { s3Sync, s3Upload } from './s3.js';
21
+ import { logger } from './logger.js';
22
+ import { SITE_TABLES, getSiteTablePrefix } from './wp-tables.js';
23
+ /**
24
+ * Network tables to skip during search-replace (shared across all sites).
25
+ * These are WordPress multisite network tables that should not be modified
26
+ * during individual site migrations.
27
+ */
28
+ const SKIP_NETWORK_TABLES = [
29
+ 'wp_blogmeta', 'wp_blogs', 'wp_registration_log', 'wp_signups',
30
+ 'wp_site', 'wp_sitemeta', 'wp_usermeta', 'wp_users',
31
+ ];
32
+ /**
33
+ * Get the skip tables list for WP-CLI search-replace.
34
+ * WP-CLI uses --all-tables --skip-tables= approach, not a hardcoded include list.
35
+ *
36
+ * For homepage (site 1): Only skip network tables
37
+ * For subsites: Skip network tables AND main site tables (wp_posts, etc.)
38
+ * to prevent accidentally modifying the main site's data
39
+ */
40
+ function getSkipTables(homepage) {
41
+ if (homepage) {
42
+ return [...SKIP_NETWORK_TABLES];
43
+ }
44
+ const mainSiteTables = SITE_TABLES.map(table => `${getSiteTablePrefix(1)}${table}`);
45
+ return [...SKIP_NETWORK_TABLES, ...mainSiteTables];
46
+ }
47
+ /**
48
+ * Download WordPress core files using WP-CLI in Docker.
49
+ * Required for search-replace to work properly.
50
+ */
51
+ async function downloadWordPressCore(wpDir, verbose) {
52
+ return new Promise((resolve) => {
53
+ const uid = process.getuid?.() ?? 1000;
54
+ const gid = process.getgid?.() ?? 1000;
55
+ const cacheDir = join(wpDir, '.wp-cli-cache');
56
+ if (!existsSync(cacheDir)) {
57
+ mkdirSync(cacheDir, { recursive: true });
58
+ }
59
+ const args = [
60
+ 'run',
61
+ '--rm',
62
+ '-v', `${wpDir}:/var/www/html`,
63
+ '-v', `${cacheDir}:/var/www/.wp-cli/cache`,
64
+ '-e', 'WP_CLI_CACHE_DIR=/var/www/.wp-cli/cache',
65
+ '-u', `${uid}:${gid}`,
66
+ 'wordpress:cli',
67
+ 'wp', 'core', 'download',
68
+ '--path=/var/www/html',
69
+ '--skip-content',
70
+ ];
71
+ if (verbose) {
72
+ logger.debug('Downloading WordPress core files...');
73
+ }
74
+ const proc = spawn('docker', args, { stdio: ['pipe', 'pipe', 'pipe'] });
75
+ let stderr = '';
76
+ proc.stderr.on('data', (data) => { stderr += data.toString(); });
77
+ proc.on('close', (code) => {
78
+ if (code === 0) {
79
+ resolve({ success: true });
80
+ }
81
+ else {
82
+ resolve({ success: false, error: stderr || 'Download failed' });
83
+ }
84
+ });
85
+ proc.on('error', (err) => {
86
+ resolve({ success: false, error: err.message });
87
+ });
88
+ });
89
+ }
90
+ /**
91
+ * Get the Docker-accessible host for a database connection.
92
+ * On macOS/Windows, localhost must be replaced with host.docker.internal
93
+ * for containers to reach the host machine.
94
+ */
95
+ function getDockerDbHost(host) {
96
+ if (host === 'localhost' || host === '127.0.0.1') {
97
+ return 'host.docker.internal';
98
+ }
99
+ return host;
100
+ }
101
+ /**
102
+ * Create wp-config.php for WP-CLI to connect to the database.
103
+ * The table_prefix must match the site being migrated.
104
+ */
105
+ function createWpConfig(wpDir, dbConfig, siteId) {
106
+ const tablePrefix = siteId === 1 ? 'wp_' : `wp_${siteId}_`;
107
+ const dockerHost = getDockerDbHost(dbConfig.host);
108
+ const wpConfigContent = `<?php
109
+ define('DB_NAME', '${dbConfig.database}');
110
+ define('DB_USER', '${dbConfig.user}');
111
+ define('DB_PASSWORD', '${dbConfig.password}');
112
+ define('DB_HOST', '${dockerHost}:${dbConfig.port}');
113
+ define('DB_CHARSET', 'utf8mb4');
114
+ define('DB_COLLATE', '');
115
+ $table_prefix = '${tablePrefix}';
116
+ define('WP_DEBUG', false);
117
+ if ( ! defined( 'ABSPATH' ) ) {
118
+ define( 'ABSPATH', __DIR__ . '/' );
119
+ }
120
+ require_once ABSPATH . 'wp-settings.php';
121
+ `;
122
+ writeFileSync(join(wpDir, 'wp-config.php'), wpConfigContent);
123
+ }
124
+ /**
125
+ * Execute WP-CLI search-replace via Docker.
126
+ * This is the critical step that handles serialized PHP data correctly.
127
+ */
128
+ async function runWpCliSearchReplace(migrationDbConfig, patterns, homepage, siteId, workDir, verbose) {
129
+ const skipTables = getSkipTables(homepage);
130
+ const skipTablesArg = skipTables.join(',');
131
+ let totalReplacements = 0;
132
+ const wpCacheDir = join(workDir, 'wp-cache');
133
+ if (!existsSync(wpCacheDir)) {
134
+ mkdirSync(wpCacheDir, { recursive: true });
135
+ }
136
+ logger.debug(`Site ${siteId}: Downloading WordPress core files...`);
137
+ const downloadResult = await downloadWordPressCore(wpCacheDir, verbose);
138
+ if (!downloadResult.success) {
139
+ return {
140
+ success: false,
141
+ error: `Failed to download WordPress core: ${downloadResult.error}`,
142
+ replacements: 0,
143
+ };
144
+ }
145
+ logger.debug(`Site ${siteId}: WordPress core ready, starting search-replace...`);
146
+ createWpConfig(wpCacheDir, migrationDbConfig, siteId);
147
+ const totalPatterns = patterns.length;
148
+ for (let i = 0; i < patterns.length; i++) {
149
+ const pattern = patterns[i];
150
+ const patternNum = i + 1;
151
+ const patternStart = Date.now();
152
+ logger.debug(`Site ${siteId}: Pattern ${patternNum}/${totalPatterns}: "${pattern.search}" → "${pattern.replace}"`);
153
+ const result = await new Promise((resolve) => {
154
+ const args = [
155
+ 'run',
156
+ '--rm',
157
+ '--network', 'host',
158
+ '-v', `${wpCacheDir}:/var/www/html`,
159
+ 'wordpress:cli',
160
+ 'wp', 'search-replace',
161
+ pattern.search,
162
+ pattern.replace,
163
+ '--all-tables',
164
+ `--skip-tables=${skipTablesArg}`,
165
+ '--precise',
166
+ '--skip-columns=guid',
167
+ '--skip-plugins',
168
+ '--skip-themes',
169
+ '--path=/var/www/html',
170
+ ];
171
+ if (verbose) {
172
+ args.push('--verbose');
173
+ }
174
+ const proc = spawn('docker', args, { stdio: ['pipe', 'pipe', 'pipe'] });
175
+ let stdout = '';
176
+ let stderr = '';
177
+ proc.stdout.on('data', (data) => {
178
+ stdout += data.toString();
179
+ if (verbose) {
180
+ const lines = data.toString().split('\n');
181
+ for (const line of lines) {
182
+ if (filterVerboseOutput(line)) {
183
+ logger.debug(line);
184
+ }
185
+ }
186
+ }
187
+ });
188
+ proc.stderr.on('data', (data) => {
189
+ stderr += data.toString();
190
+ if (verbose) {
191
+ const lines = data.toString().split('\n');
192
+ for (const line of lines) {
193
+ if (filterVerboseOutput(line)) {
194
+ logger.debug(line);
195
+ }
196
+ }
197
+ }
198
+ });
199
+ proc.on('close', (code) => {
200
+ if (code === 0) {
201
+ resolve({ success: true, output: stdout });
202
+ }
203
+ else {
204
+ resolve({ success: false, output: stdout, error: stderr || stdout });
205
+ }
206
+ });
207
+ proc.on('error', (err) => {
208
+ resolve({ success: false, output: '', error: err.message });
209
+ });
210
+ });
211
+ if (!result.success) {
212
+ return {
213
+ success: false,
214
+ error: `Search-replace failed for pattern "${pattern.search}" → "${pattern.replace}": ${result.error}`,
215
+ replacements: totalReplacements,
216
+ };
217
+ }
218
+ const match = result.output.match(/Made (\d+) replacements/);
219
+ const patternReplacements = match ? parseInt(match[1], 10) : 0;
220
+ totalReplacements += patternReplacements;
221
+ const patternDuration = Math.round((Date.now() - patternStart) / 1000);
222
+ logger.debug(`Site ${siteId}: Pattern ${patternNum}/${totalPatterns} complete: ${patternReplacements} replacements in ${patternDuration}s`);
223
+ }
224
+ return { success: true, replacements: totalReplacements };
225
+ }
226
+ /**
227
+ * Run the full migration workflow (Steps 3-9)
228
+ */
229
+ export async function runMigrationEngine(options) {
230
+ const steps = [];
231
+ const { siteId, source, target, config, workDir, homepage, customDomain, skipBackup, skipS3, keepFiles, verbose, siteName, } = options;
232
+ const sourceExportFile = join(workDir, `source-export-${siteId}.sql`);
233
+ const transformedExportFile = join(workDir, `transformed-export-${siteId}.sql`);
234
+ const targetBackupFile = join(workDir, `target-backup-${siteId}.sql`);
235
+ if (!existsSync(sourceExportFile)) {
236
+ return {
237
+ success: false,
238
+ error: `Source export file not found: ${sourceExportFile}`,
239
+ steps,
240
+ };
241
+ }
242
+ const step3Start = Date.now();
243
+ logger.debug(`Site ${siteId}: Step 3 - Importing to migration database...`);
244
+ logger.debug(`Site ${siteId}: Cleaning up existing tables in migration DB...`);
245
+ const cleanupResult = await cleanupSiteTables(config.migration, siteId);
246
+ if (!cleanupResult.success) {
247
+ steps.push({
248
+ step: 3,
249
+ name: 'Import to migration database',
250
+ success: false,
251
+ error: `Failed to clean existing tables: ${cleanupResult.error}`,
252
+ duration: Date.now() - step3Start,
253
+ });
254
+ return { success: false, error: steps[0].error, steps };
255
+ }
256
+ logger.debug(`Site ${siteId}: Importing source export file...`);
257
+ const importResult = await mysqlImport(config.migration, sourceExportFile);
258
+ if (!importResult.success) {
259
+ steps.push({
260
+ step: 3,
261
+ name: 'Import to migration database',
262
+ success: false,
263
+ error: `Import failed: ${importResult.error}`,
264
+ duration: Date.now() - step3Start,
265
+ });
266
+ return { success: false, error: steps[0].error, steps };
267
+ }
268
+ const tableCount = await countSiteTables(config.migration, siteId);
269
+ const step3Duration = Math.round((Date.now() - step3Start) / 1000);
270
+ logger.debug(`Site ${siteId}: Step 3 complete - Imported ${tableCount.count} tables in ${step3Duration}s`);
271
+ steps.push({
272
+ step: 3,
273
+ name: 'Import to migration database',
274
+ success: true,
275
+ message: `Imported ${tableCount.count} tables to migration database`,
276
+ duration: Date.now() - step3Start,
277
+ });
278
+ const step4Start = Date.now();
279
+ logger.debug(`Site ${siteId}: Step 4 - Running WP-CLI search-replace...`);
280
+ const patterns = getUrlReplacementPatterns(source, target);
281
+ if (customDomain) {
282
+ const customPattern = parseCustomDomain(customDomain);
283
+ if (customPattern) {
284
+ patterns.push(customPattern);
285
+ }
286
+ }
287
+ logger.debug(`Site ${siteId}: Applying ${patterns.length} URL replacement patterns...`);
288
+ const searchReplaceResult = await runWpCliSearchReplace(config.migration, patterns, homepage, siteId, workDir, verbose);
289
+ if (!searchReplaceResult.success) {
290
+ steps.push({
291
+ step: 4,
292
+ name: 'WP-CLI search-replace',
293
+ success: false,
294
+ error: searchReplaceResult.error,
295
+ duration: Date.now() - step4Start,
296
+ });
297
+ return { success: false, error: steps[steps.length - 1].error, steps };
298
+ }
299
+ const step4Duration = Math.round((Date.now() - step4Start) / 1000);
300
+ logger.debug(`Site ${siteId}: Step 4 complete - ${searchReplaceResult.replacements} total replacements in ${step4Duration}s`);
301
+ steps.push({
302
+ step: 4,
303
+ name: 'WP-CLI search-replace',
304
+ success: true,
305
+ message: `Applied ${patterns.length} replacement patterns (${searchReplaceResult.replacements} total replacements)`,
306
+ duration: Date.now() - step4Start,
307
+ });
308
+ const step5Start = Date.now();
309
+ logger.debug(`Site ${siteId}: Step 5 - Exporting transformed tables...`);
310
+ const transformedTables = await discoverSiteTables(config.migration, siteId);
311
+ if (!transformedTables.success) {
312
+ steps.push({
313
+ step: 5,
314
+ name: 'Export transformed tables',
315
+ success: false,
316
+ error: `Failed to discover tables: ${transformedTables.error}`,
317
+ duration: Date.now() - step5Start,
318
+ });
319
+ return { success: false, error: steps[steps.length - 1].error, steps };
320
+ }
321
+ logger.debug(`Site ${siteId}: Exporting ${transformedTables.tables.length} tables...`);
322
+ const exportTransformedResult = await mysqldump(config.migration, transformedTables.tables, transformedExportFile);
323
+ if (!exportTransformedResult.success) {
324
+ steps.push({
325
+ step: 5,
326
+ name: 'Export transformed tables',
327
+ success: false,
328
+ error: `Export failed: ${exportTransformedResult.error}`,
329
+ duration: Date.now() - step5Start,
330
+ });
331
+ return { success: false, error: steps[steps.length - 1].error, steps };
332
+ }
333
+ const step5Duration = Math.round((Date.now() - step5Start) / 1000);
334
+ logger.debug(`Site ${siteId}: Step 5 complete - Exported ${transformedTables.tables.length} tables in ${step5Duration}s`);
335
+ steps.push({
336
+ step: 5,
337
+ name: 'Export transformed tables',
338
+ success: true,
339
+ message: `Exported ${transformedTables.tables.length} transformed tables`,
340
+ duration: Date.now() - step5Start,
341
+ });
342
+ const step6Start = Date.now();
343
+ logger.debug(`Site ${siteId}: Step 6 - Backing up target tables...`);
344
+ if (skipBackup) {
345
+ logger.debug(`Site ${siteId}: Step 6 skipped (--skip-backup)`);
346
+ steps.push({
347
+ step: 6,
348
+ name: 'Backup target tables',
349
+ success: true,
350
+ message: 'Skipped (--skip-backup)',
351
+ duration: 0,
352
+ });
353
+ }
354
+ else {
355
+ const targetDbConfig = target === 'local' ? config.environments.local : config.environments[target];
356
+ const targetTables = await discoverSiteTables(targetDbConfig, siteId);
357
+ if (targetTables.success && targetTables.tables.length > 0) {
358
+ logger.debug(`Site ${siteId}: Backing up ${targetTables.tables.length} tables from ${target}...`);
359
+ const backupResult = await mysqldump(targetDbConfig, targetTables.tables, targetBackupFile);
360
+ if (!backupResult.success) {
361
+ steps.push({
362
+ step: 6,
363
+ name: 'Backup target tables',
364
+ success: false,
365
+ error: `Backup failed: ${backupResult.error}`,
366
+ duration: Date.now() - step6Start,
367
+ });
368
+ return { success: false, error: steps[steps.length - 1].error, steps };
369
+ }
370
+ const step6Duration = Math.round((Date.now() - step6Start) / 1000);
371
+ logger.debug(`Site ${siteId}: Step 6 complete - Backed up ${targetTables.tables.length} tables in ${step6Duration}s`);
372
+ steps.push({
373
+ step: 6,
374
+ name: 'Backup target tables',
375
+ success: true,
376
+ message: `Backed up ${targetTables.tables.length} tables`,
377
+ duration: Date.now() - step6Start,
378
+ });
379
+ }
380
+ else {
381
+ logger.debug(`Site ${siteId}: Step 6 complete - No existing tables to backup`);
382
+ steps.push({
383
+ step: 6,
384
+ name: 'Backup target tables',
385
+ success: true,
386
+ message: 'No existing tables to backup',
387
+ duration: Date.now() - step6Start,
388
+ });
389
+ }
390
+ }
391
+ const step7Start = Date.now();
392
+ logger.debug(`Site ${siteId}: Step 7 - Importing to ${target}...`);
393
+ const targetDbConfig = target === 'local' ? config.environments.local : config.environments[target];
394
+ logger.debug(`Site ${siteId}: Cleaning up existing tables in ${target}...`);
395
+ const targetCleanup = await cleanupSiteTables(targetDbConfig, siteId);
396
+ if (!targetCleanup.success) {
397
+ logger.debug(`Site ${siteId}: Warning - Failed to clean target tables: ${targetCleanup.error}`);
398
+ }
399
+ logger.debug(`Site ${siteId}: Importing transformed data to ${target}...`);
400
+ const importToTargetResult = await mysqlImport(targetDbConfig, transformedExportFile);
401
+ if (!importToTargetResult.success) {
402
+ steps.push({
403
+ step: 7,
404
+ name: 'Import to target',
405
+ success: false,
406
+ error: `Import to target failed: ${importToTargetResult.error}`,
407
+ duration: Date.now() - step7Start,
408
+ });
409
+ return { success: false, error: steps[steps.length - 1].error, steps };
410
+ }
411
+ const targetTableCount = await countSiteTables(targetDbConfig, siteId);
412
+ const step7Duration = Math.round((Date.now() - step7Start) / 1000);
413
+ logger.debug(`Site ${siteId}: Step 7 complete - Imported ${targetTableCount.count} tables in ${step7Duration}s`);
414
+ steps.push({
415
+ step: 7,
416
+ name: 'Import to target',
417
+ success: true,
418
+ message: `Imported ${targetTableCount.count} tables to ${target}`,
419
+ duration: Date.now() - step7Start,
420
+ });
421
+ const step8Start = Date.now();
422
+ logger.debug(`Site ${siteId}: Step 8 - Syncing S3 files...`);
423
+ if (skipS3) {
424
+ logger.debug(`Site ${siteId}: Step 8 skipped (--skip-s3)`);
425
+ steps.push({
426
+ step: 8,
427
+ name: 'S3 file sync',
428
+ success: true,
429
+ message: 'Skipped (--skip-s3)',
430
+ duration: 0,
431
+ });
432
+ }
433
+ else if (config.s3) {
434
+ const sourceBucket = `wfu-cer-wordpress-${source}-us-east-1`;
435
+ const targetBucket = `wfu-cer-wordpress-${target === 'local' ? 'dev' : target}-us-east-1`;
436
+ const sourcePath = `s3://${sourceBucket}/sites/${siteId}/`;
437
+ const targetPath = `s3://${targetBucket}/sites/${siteId}/`;
438
+ logger.debug(`Site ${siteId}: Syncing files from ${sourcePath} to ${targetPath}...`);
439
+ const s3Result = await s3Sync(config.s3, sourcePath, targetPath);
440
+ if (!s3Result.success) {
441
+ steps.push({
442
+ step: 8,
443
+ name: 'S3 file sync',
444
+ success: false,
445
+ error: `S3 sync failed: ${s3Result.error}`,
446
+ duration: Date.now() - step8Start,
447
+ });
448
+ }
449
+ else {
450
+ const fileCount = s3Result.filesTransferred ?? 0;
451
+ const fileMsg = fileCount === 0 ? 'No files to sync' : `${fileCount} files synced`;
452
+ const step8Duration = Math.round((Date.now() - step8Start) / 1000);
453
+ logger.debug(`Site ${siteId}: Step 8 complete - ${fileMsg} in ${step8Duration}s`);
454
+ steps.push({
455
+ step: 8,
456
+ name: 'S3 file sync',
457
+ success: true,
458
+ message: `${fileMsg} from ${source} to ${target}`,
459
+ duration: Date.now() - step8Start,
460
+ });
461
+ }
462
+ }
463
+ else {
464
+ logger.debug(`Site ${siteId}: Step 8 skipped (S3 not configured)`);
465
+ steps.push({
466
+ step: 8,
467
+ name: 'S3 file sync',
468
+ success: true,
469
+ message: 'Skipped (S3 not configured)',
470
+ duration: 0,
471
+ });
472
+ }
473
+ const step9Start = Date.now();
474
+ logger.debug(`Site ${siteId}: Step 9 - Archive and cleanup...`);
475
+ let archiveLocation;
476
+ let archiveFilesUploaded = 0;
477
+ if (!skipS3 && config.s3?.bucket) {
478
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
479
+ const archivePrefix = siteName || 'site';
480
+ const archivePath = `${archivePrefix}-${siteId}-${source}-to-${target}-${timestamp}`;
481
+ try {
482
+ await compressFile(sourceExportFile);
483
+ await compressFile(transformedExportFile);
484
+ if (existsSync(targetBackupFile)) {
485
+ await compressFile(targetBackupFile);
486
+ }
487
+ const metadata = {
488
+ siteId,
489
+ source,
490
+ target,
491
+ timestamp: new Date().toISOString(),
492
+ tablesCount: tableCount.count,
493
+ replacementsApplied: searchReplaceResult.replacements,
494
+ files: [
495
+ `source-export-${siteId}.sql.gz`,
496
+ `transformed-export-${siteId}.sql.gz`,
497
+ existsSync(targetBackupFile + '.gz') ? `target-backup-${siteId}.sql.gz` : null,
498
+ ].filter(Boolean),
499
+ };
500
+ const metadataPath = join(workDir, 'metadata.json');
501
+ writeFileSync(metadataPath, JSON.stringify(metadata, null, 2));
502
+ const uploadFiles = [
503
+ { local: sourceExportFile + '.gz', key: `${archivePath}/source-export-${siteId}.sql.gz` },
504
+ { local: transformedExportFile + '.gz', key: `${archivePath}/transformed-export-${siteId}.sql.gz` },
505
+ { local: metadataPath, key: `${archivePath}/metadata.json` },
506
+ ];
507
+ if (existsSync(targetBackupFile + '.gz')) {
508
+ uploadFiles.push({ local: targetBackupFile + '.gz', key: `${archivePath}/target-backup-${siteId}.sql.gz` });
509
+ }
510
+ for (const file of uploadFiles) {
511
+ if (existsSync(file.local)) {
512
+ const uploadResult = await s3Upload(config.s3, file.local, file.key);
513
+ if (uploadResult.success) {
514
+ archiveFilesUploaded++;
515
+ logger.debug(`Uploaded: ${file.key}`);
516
+ }
517
+ else {
518
+ logger.debug(`Upload failed: ${file.key} - ${uploadResult.error}`);
519
+ }
520
+ }
521
+ }
522
+ archiveLocation = `s3://${config.s3.bucket}/${config.s3.prefix}/${archivePath}/`;
523
+ logger.debug(`Archive complete: ${archiveFilesUploaded}/${uploadFiles.length} files uploaded to ${archiveLocation}`);
524
+ }
525
+ catch (err) {
526
+ logger.debug(`Archive upload failed: ${err}`);
527
+ }
528
+ }
529
+ logger.debug(`Site ${siteId}: Cleaning up migration database tables...`);
530
+ await cleanupSiteTables(config.migration, siteId);
531
+ if (!keepFiles) {
532
+ logger.debug(`Site ${siteId}: Removing temporary work directory...`);
533
+ try {
534
+ await rm(workDir, { recursive: true, force: true });
535
+ }
536
+ catch (err) {
537
+ logger.debug(`Site ${siteId}: Cleanup warning: ${err}`);
538
+ }
539
+ }
540
+ const step9Duration = Math.round((Date.now() - step9Start) / 1000);
541
+ const archiveMsg = archiveLocation
542
+ ? `${archiveFilesUploaded} files archived to ${archiveLocation}`
543
+ : 'Cleanup complete';
544
+ logger.debug(`Site ${siteId}: Step 9 complete - ${archiveMsg} in ${step9Duration}s`);
545
+ steps.push({
546
+ step: 9,
547
+ name: 'Archive and cleanup',
548
+ success: true,
549
+ message: archiveMsg,
550
+ duration: Date.now() - step9Start,
551
+ });
552
+ return {
553
+ success: true,
554
+ steps,
555
+ archiveLocation,
556
+ tablesProcessed: tableCount.count,
557
+ replacementsApplied: searchReplaceResult.replacements,
558
+ };
559
+ }
560
+ /**
561
+ * Migrate network tables from source to target environment.
562
+ * Network tables are shared across all sites in a WordPress multisite:
563
+ * - wp_blogmeta, wp_blogs, wp_registration_log, wp_signups
564
+ * - wp_site, wp_sitemeta, wp_usermeta, wp_users
565
+ */
566
+ export async function migrateNetworkTables(options) {
567
+ const { source, target, config, workDir, skipBackup, verbose } = options;
568
+ if (!existsSync(workDir)) {
569
+ mkdirSync(workDir, { recursive: true });
570
+ }
571
+ const sourceDbConfig = config.environments[source];
572
+ const targetDbConfig = target === 'local' ? config.environments.local : config.environments[target];
573
+ logger.debug(`Discovering network tables from ${source}...`);
574
+ const discoveryResult = await discoverNetworkTables(sourceDbConfig);
575
+ if (!discoveryResult.success) {
576
+ return { success: false, error: `Table discovery failed: ${discoveryResult.error}`, tablesProcessed: 0 };
577
+ }
578
+ const networkTableNames = discoveryResult.tables;
579
+ logger.debug(`Found ${networkTableNames.length} network tables: ${networkTableNames.slice(0, 5).join(', ')}${networkTableNames.length > 5 ? '...' : ''}`);
580
+ logger.debug(`Exporting network tables from ${source}...`);
581
+ const exportFile = join(workDir, `network-tables-${source}.sql`);
582
+ const exportResult = await mysqldump(sourceDbConfig, networkTableNames, exportFile);
583
+ if (!exportResult.success) {
584
+ return { success: false, error: `Export failed: ${exportResult.error}`, tablesProcessed: 0 };
585
+ }
586
+ logger.debug('Cleaning up migration database...');
587
+ const cleanupResult = await cleanupAllMigrationDbTables(config.migration);
588
+ if (!cleanupResult.success) {
589
+ logger.debug(`Warning: cleanup failed: ${cleanupResult.error}`);
590
+ }
591
+ else if (cleanupResult.tablesDropped && cleanupResult.tablesDropped > 0) {
592
+ logger.debug(`Dropped ${cleanupResult.tablesDropped} leftover tables from migration database`);
593
+ }
594
+ logger.debug('Importing network tables to migration database...');
595
+ const importToMigrationResult = await mysqlImport(config.migration, exportFile);
596
+ if (!importToMigrationResult.success) {
597
+ return { success: false, error: `Import to migration DB failed: ${importToMigrationResult.error}`, tablesProcessed: 0 };
598
+ }
599
+ logger.debug('Running search-replace on network tables...');
600
+ const patterns = getUrlReplacementPatterns(source, target);
601
+ const searchReplaceResult = await runNetworkTableSearchReplace(config.migration, patterns, networkTableNames, workDir, verbose);
602
+ if (!searchReplaceResult.success) {
603
+ return { success: false, error: `Search-replace failed: ${searchReplaceResult.error}`, tablesProcessed: 0 };
604
+ }
605
+ const transformedExportFile = join(workDir, `network-tables-transformed.sql`);
606
+ logger.debug('Exporting transformed network tables...');
607
+ const transformedExportResult = await mysqldump(config.migration, networkTableNames, transformedExportFile);
608
+ if (!transformedExportResult.success) {
609
+ return { success: false, error: `Export transformed failed: ${transformedExportResult.error}`, tablesProcessed: 0 };
610
+ }
611
+ let backupPath;
612
+ if (!skipBackup) {
613
+ logger.debug('Backing up target network tables...');
614
+ const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
615
+ backupPath = join(workDir, `network-tables-backup-${target}-${timestamp}.sql`);
616
+ const backupResult = await mysqldump(targetDbConfig, networkTableNames, backupPath);
617
+ if (backupResult.success && existsSync(backupPath)) {
618
+ const stats = statSync(backupPath);
619
+ if (stats.size === 0) {
620
+ logger.debug('Warning: Backup file is empty');
621
+ }
622
+ else {
623
+ logger.debug(`Backup created: ${backupPath} (${stats.size} bytes)`);
624
+ }
625
+ }
626
+ }
627
+ logger.debug(`Importing network tables to ${target}...`);
628
+ const targetCleanup = await cleanupNetworkTables(targetDbConfig, networkTableNames);
629
+ if (!targetCleanup.success) {
630
+ logger.debug(`Warning: target cleanup failed: ${targetCleanup.error}`);
631
+ }
632
+ const importToTargetResult = await mysqlImport(targetDbConfig, transformedExportFile);
633
+ if (!importToTargetResult.success) {
634
+ return { success: false, error: `Import to target failed: ${importToTargetResult.error}`, tablesProcessed: 0 };
635
+ }
636
+ const verifyResult = await verifyNetworkTablesImport(targetDbConfig, networkTableNames);
637
+ if (!verifyResult.success) {
638
+ return { success: false, error: `Verification failed: ${verifyResult.error}`, tablesProcessed: networkTableNames.length };
639
+ }
640
+ await cleanupNetworkTables(config.migration, networkTableNames);
641
+ return {
642
+ success: true,
643
+ tablesProcessed: networkTableNames.length,
644
+ replacementsApplied: searchReplaceResult.replacements,
645
+ backupPath,
646
+ };
647
+ }
648
+ async function cleanupNetworkTables(dbConfig, tables) {
649
+ return new Promise((resolve) => {
650
+ const dropStatements = tables.map(t => `DROP TABLE IF EXISTS ${t}`).join('; ');
651
+ const args = [
652
+ '-h', dbConfig.host,
653
+ '-P', String(dbConfig.port),
654
+ '-u', dbConfig.user,
655
+ `-p${dbConfig.password}`,
656
+ dbConfig.database,
657
+ '-e', dropStatements,
658
+ ];
659
+ const proc = spawn('mysql', args, { stdio: ['pipe', 'pipe', 'pipe'] });
660
+ let stderr = '';
661
+ proc.stderr.on('data', (data) => { stderr += data.toString(); });
662
+ proc.on('close', (code) => {
663
+ if (code !== 0 && stderr) {
664
+ resolve({ success: false, error: stderr.trim() });
665
+ }
666
+ else {
667
+ resolve({ success: true });
668
+ }
669
+ });
670
+ proc.on('error', () => {
671
+ resolve({ success: true });
672
+ });
673
+ });
674
+ }
675
+ /**
676
+ * Clean up ALL tables in the migration database.
677
+ * This ensures no leftover tables from previous migrations interfere with search-replace.
678
+ */
679
+ async function cleanupAllMigrationDbTables(dbConfig) {
680
+ return new Promise((resolve) => {
681
+ const query = `SELECT table_name FROM information_schema.tables WHERE table_schema = '${dbConfig.database}' AND table_name LIKE 'wp\\_%'`;
682
+ const args = [
683
+ '-h', dbConfig.host,
684
+ '-P', String(dbConfig.port),
685
+ '-u', dbConfig.user,
686
+ `-p${dbConfig.password}`,
687
+ '-N',
688
+ '-e', query,
689
+ ];
690
+ const proc = spawn('mysql', args, { stdio: ['pipe', 'pipe', 'pipe'] });
691
+ let stdout = '';
692
+ let stderr = '';
693
+ proc.stdout.on('data', (data) => { stdout += data.toString(); });
694
+ proc.stderr.on('data', (data) => { stderr += data.toString(); });
695
+ proc.on('close', async (code) => {
696
+ if (code !== 0) {
697
+ resolve({ success: false, error: stderr.trim() || 'Failed to list tables' });
698
+ return;
699
+ }
700
+ const tables = stdout.trim().split('\n').filter(Boolean);
701
+ if (tables.length === 0) {
702
+ resolve({ success: true, tablesDropped: 0 });
703
+ return;
704
+ }
705
+ const dropResult = await cleanupNetworkTables(dbConfig, tables);
706
+ if (dropResult.success) {
707
+ resolve({ success: true, tablesDropped: tables.length });
708
+ }
709
+ else {
710
+ resolve({ success: false, error: dropResult.error });
711
+ }
712
+ });
713
+ proc.on('error', (err) => {
714
+ resolve({ success: false, error: err.message });
715
+ });
716
+ });
717
+ }
718
+ /**
719
+ * Create a minimal wp_options table for WP-CLI to work.
720
+ * WordPress requires wp_options to initialize, even for search-replace.
721
+ */
722
+ async function ensureWpOptionsTable(dbConfig) {
723
+ const createTableSql = `
724
+ CREATE TABLE IF NOT EXISTS wp_options (
725
+ option_id bigint(20) unsigned NOT NULL AUTO_INCREMENT,
726
+ option_name varchar(191) NOT NULL DEFAULT '',
727
+ option_value longtext NOT NULL,
728
+ autoload varchar(20) NOT NULL DEFAULT 'yes',
729
+ PRIMARY KEY (option_id),
730
+ UNIQUE KEY option_name (option_name)
731
+ ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci;
732
+ `;
733
+ const insertDefaultsSql = `
734
+ INSERT IGNORE INTO wp_options (option_name, option_value, autoload) VALUES
735
+ ('siteurl', 'http://localhost', 'yes'),
736
+ ('home', 'http://localhost', 'yes'),
737
+ ('blogname', 'Migration Temp', 'yes'),
738
+ ('admin_email', 'admin@localhost', 'yes');
739
+ `;
740
+ return new Promise((resolve) => {
741
+ const args = [
742
+ 'run', '--rm', '--network', 'host',
743
+ 'mysql:8.0', 'mysql',
744
+ '-h', dbConfig.host,
745
+ '-P', String(dbConfig.port),
746
+ '-u', dbConfig.user,
747
+ `-p${dbConfig.password}`,
748
+ dbConfig.database,
749
+ '-e', createTableSql + insertDefaultsSql,
750
+ ];
751
+ const proc = spawn('docker', args, { stdio: ['pipe', 'pipe', 'pipe'] });
752
+ let stderr = '';
753
+ proc.stderr.on('data', (data) => { stderr += data.toString(); });
754
+ proc.on('close', (code) => {
755
+ if (code === 0) {
756
+ resolve({ success: true });
757
+ }
758
+ else {
759
+ resolve({ success: false, error: stderr || 'Failed to create wp_options' });
760
+ }
761
+ });
762
+ proc.on('error', (err) => {
763
+ resolve({ success: false, error: err.message });
764
+ });
765
+ });
766
+ }
767
+ async function runNetworkTableSearchReplace(dbConfig, patterns, tables, workDir, verbose) {
768
+ let totalReplacements = 0;
769
+ const wpCacheDir = join(workDir, 'wp-cache');
770
+ if (!existsSync(wpCacheDir)) {
771
+ mkdirSync(wpCacheDir, { recursive: true });
772
+ }
773
+ const downloadResult = await downloadWordPressCore(wpCacheDir, verbose);
774
+ if (!downloadResult.success) {
775
+ return {
776
+ success: false,
777
+ error: `Failed to download WordPress core: ${downloadResult.error}`,
778
+ };
779
+ }
780
+ createWpConfig(wpCacheDir, dbConfig, 1);
781
+ const optionsResult = await ensureWpOptionsTable(dbConfig);
782
+ if (!optionsResult.success) {
783
+ logger.debug(`Warning: Could not create wp_options: ${optionsResult.error}`);
784
+ }
785
+ const totalPatterns = patterns.length;
786
+ for (let patternIdx = 0; patternIdx < patterns.length; patternIdx++) {
787
+ const pattern = patterns[patternIdx];
788
+ const patternNum = patternIdx + 1;
789
+ const patternStart = Date.now();
790
+ logger.debug(`Pattern ${patternNum}/${totalPatterns}: "${pattern.search}" → "${pattern.replace}" (${tables.length} tables)`);
791
+ const args = [
792
+ 'run', '--rm',
793
+ '--network', 'host',
794
+ '-v', `${wpCacheDir}:/var/www/html`,
795
+ 'wordpress:cli',
796
+ 'wp', 'search-replace',
797
+ pattern.search,
798
+ pattern.replace,
799
+ '--all-tables',
800
+ '--precise',
801
+ '--skip-columns=guid',
802
+ '--skip-plugins',
803
+ '--skip-themes',
804
+ '--path=/var/www/html',
805
+ ];
806
+ if (verbose) {
807
+ args.push('--verbose');
808
+ }
809
+ const result = await runDockerCommand(args, verbose);
810
+ if (!result.success) {
811
+ return { success: false, error: `Pattern '${pattern.search}' failed: ${result.error}` };
812
+ }
813
+ const match = result.output?.match(/Made (\d+) replacements/);
814
+ const patternReplacements = match ? parseInt(match[1], 10) : 0;
815
+ totalReplacements += patternReplacements;
816
+ const patternDuration = Math.round((Date.now() - patternStart) / 1000);
817
+ logger.debug(`Pattern ${patternNum}/${totalPatterns} complete: ${patternReplacements} replacements in ${patternDuration}s`);
818
+ }
819
+ return { success: true, replacements: totalReplacements };
820
+ }
821
+ async function verifyNetworkTablesImport(dbConfig, tables) {
822
+ const criticalTables = ['wp_users', 'wp_blogs', 'wp_site'].filter(t => tables.includes(t));
823
+ const tableCountResult = await verifyTableExistence(dbConfig, tables);
824
+ if (!tableCountResult.success) {
825
+ return tableCountResult;
826
+ }
827
+ const rowCountResult = await verifyRowCounts(dbConfig, tables, criticalTables);
828
+ return rowCountResult;
829
+ }
830
+ async function verifyTableExistence(dbConfig, tables) {
831
+ return new Promise((resolve) => {
832
+ const query = `SELECT COUNT(*) as cnt FROM information_schema.tables WHERE table_schema = '${dbConfig.database}' AND table_name IN (${tables.map(t => `'${t}'`).join(',')})`;
833
+ const args = [
834
+ '-h', dbConfig.host,
835
+ '-P', String(dbConfig.port),
836
+ '-u', dbConfig.user,
837
+ `-p${dbConfig.password}`,
838
+ '-N',
839
+ '-e', query,
840
+ ];
841
+ const proc = spawn('mysql', args, { stdio: ['pipe', 'pipe', 'pipe'] });
842
+ let stdout = '';
843
+ let stderr = '';
844
+ proc.stdout.on('data', (data) => { stdout += data.toString(); });
845
+ proc.stderr.on('data', (data) => { stderr += data.toString(); });
846
+ proc.on('close', (code) => {
847
+ if (code !== 0) {
848
+ resolve({ success: false, error: stderr.trim() || 'Table count query failed' });
849
+ return;
850
+ }
851
+ const count = parseInt(stdout.trim(), 10);
852
+ if (count !== tables.length) {
853
+ resolve({ success: false, error: `Expected ${tables.length} tables, found ${count}` });
854
+ return;
855
+ }
856
+ resolve({ success: true });
857
+ });
858
+ proc.on('error', (err) => {
859
+ resolve({ success: false, error: err.message });
860
+ });
861
+ });
862
+ }
863
+ async function verifyRowCounts(dbConfig, tables, criticalTables) {
864
+ return new Promise((resolve) => {
865
+ const queries = tables.map(t => `SELECT '${t}' as tbl, COUNT(*) as cnt FROM ${t}`).join(' UNION ALL ');
866
+ const args = [
867
+ '-h', dbConfig.host,
868
+ '-P', String(dbConfig.port),
869
+ '-u', dbConfig.user,
870
+ `-p${dbConfig.password}`,
871
+ dbConfig.database,
872
+ '-N',
873
+ '-e', queries,
874
+ ];
875
+ const proc = spawn('mysql', args, { stdio: ['pipe', 'pipe', 'pipe'] });
876
+ let stdout = '';
877
+ let stderr = '';
878
+ proc.stdout.on('data', (data) => { stdout += data.toString(); });
879
+ proc.stderr.on('data', (data) => { stderr += data.toString(); });
880
+ proc.on('close', (code) => {
881
+ if (code !== 0) {
882
+ resolve({ success: false, error: stderr.trim() || 'Row count query failed' });
883
+ return;
884
+ }
885
+ const rowCounts = {};
886
+ const warnings = [];
887
+ const lines = stdout.trim().split('\n').filter(Boolean);
888
+ for (const line of lines) {
889
+ const [table, countStr] = line.split('\t');
890
+ const count = parseInt(countStr, 10);
891
+ rowCounts[table] = count;
892
+ if (criticalTables.includes(table) && count === 0) {
893
+ resolve({ success: false, error: `Critical table ${table} has zero rows after import` });
894
+ return;
895
+ }
896
+ if (count === 0) {
897
+ warnings.push(`Table ${table} has zero rows`);
898
+ }
899
+ }
900
+ if (warnings.length > 0) {
901
+ logger.debug(`Verification warnings: ${warnings.join(', ')}`);
902
+ }
903
+ resolve({ success: true, warnings: warnings.length > 0 ? warnings : undefined, rowCounts });
904
+ });
905
+ proc.on('error', (err) => {
906
+ resolve({ success: false, error: err.message });
907
+ });
908
+ });
909
+ }
910
+ /**
911
+ * Filter verbose output to reduce log noise.
912
+ * Only shows lines that indicate actual work (non-zero replacements).
913
+ */
914
+ function filterVerboseOutput(line) {
915
+ const trimmed = line.trim();
916
+ if (!trimmed)
917
+ return false;
918
+ if (trimmed.startsWith('Checking:')) {
919
+ return false;
920
+ }
921
+ if (trimmed.includes('0 replacements')) {
922
+ return false;
923
+ }
924
+ if (trimmed.startsWith('Skipping an uninitialized class')) {
925
+ return false;
926
+ }
927
+ return true;
928
+ }
929
+ async function runDockerCommand(args, verbose) {
930
+ return new Promise((resolve) => {
931
+ const proc = spawn('docker', args, { stdio: ['pipe', 'pipe', 'pipe'] });
932
+ let stdout = '';
933
+ let stderr = '';
934
+ proc.stdout.on('data', (data) => {
935
+ stdout += data.toString();
936
+ if (verbose) {
937
+ const lines = data.toString().split('\n');
938
+ for (const line of lines) {
939
+ if (filterVerboseOutput(line)) {
940
+ logger.debug(line);
941
+ }
942
+ }
943
+ }
944
+ });
945
+ proc.stderr.on('data', (data) => {
946
+ stderr += data.toString();
947
+ if (verbose) {
948
+ const lines = data.toString().split('\n');
949
+ for (const line of lines) {
950
+ if (filterVerboseOutput(line)) {
951
+ logger.debug(line);
952
+ }
953
+ }
954
+ }
955
+ });
956
+ proc.on('close', (code) => {
957
+ if (code !== 0) {
958
+ resolve({ success: false, error: stderr.trim() || stdout.trim() });
959
+ }
960
+ else {
961
+ resolve({ success: true, output: stdout });
962
+ }
963
+ });
964
+ proc.on('error', (err) => {
965
+ resolve({ success: false, error: err.message });
966
+ });
967
+ });
968
+ }
969
+ /**
970
+ * Process sites in batches.
971
+ * Chunks sites into batches by batchSize and processes each batch.
972
+ * In sequential mode (default): processes sites one at a time.
973
+ * In parallel mode: processes sites concurrently within each batch.
974
+ */
975
+ export async function processSiteBatch(siteIds, options) {
976
+ const { source, target, config, workDir, batchSize, parallel, concurrency, skipBackup, skipS3, keepFiles, verbose, timeout, maxRetries, onSiteStart, onSiteComplete, onBatchStart, onBatchComplete, onRetry, onStateChange, } = options;
977
+ const { splitIntoBatches, runWithConcurrency, createSiteTasks } = await import('./parallel.js');
978
+ const { withRetry, DEFAULT_INITIAL_DELAY_MS, DEFAULT_MAX_DELAY_MS, DEFAULT_BACKOFF_MULTIPLIER } = await import('./retry.js');
979
+ const batches = splitIntoBatches(siteIds, batchSize);
980
+ const siteResults = [];
981
+ let successCount = 0;
982
+ let failureCount = 0;
983
+ const migrateSiteOnce = async (siteId) => {
984
+ const siteWorkDir = join(workDir, `site-${siteId}`);
985
+ if (!existsSync(siteWorkDir)) {
986
+ mkdirSync(siteWorkDir, { recursive: true });
987
+ }
988
+ const sourceExportFile = join(siteWorkDir, `source-export-${siteId}.sql`);
989
+ const sourceDbConfig = config.environments[source];
990
+ const tables = await discoverSiteTables(sourceDbConfig, siteId);
991
+ if (!tables.success) {
992
+ throw new Error(`Table discovery failed: ${tables.error}`);
993
+ }
994
+ const exportResult = await mysqldump(sourceDbConfig, tables.tables, sourceExportFile);
995
+ if (!exportResult.success) {
996
+ throw new Error(`Export failed: ${exportResult.error}`);
997
+ }
998
+ const result = await runMigrationEngine({
999
+ siteId,
1000
+ source,
1001
+ target,
1002
+ config,
1003
+ workDir: siteWorkDir,
1004
+ homepage: siteId === 1,
1005
+ skipBackup,
1006
+ skipS3,
1007
+ keepFiles,
1008
+ verbose,
1009
+ timeout,
1010
+ });
1011
+ if (!result.success) {
1012
+ throw new Error(result.error || 'Migration failed');
1013
+ }
1014
+ return result;
1015
+ };
1016
+ const migrateSiteWithRetry = async (siteId) => {
1017
+ if (onSiteStart)
1018
+ onSiteStart(siteId);
1019
+ if (onStateChange)
1020
+ onStateChange(siteId, 'in_progress');
1021
+ const retryResult = await withRetry(() => migrateSiteOnce(siteId), {
1022
+ maxRetries,
1023
+ initialDelayMs: DEFAULT_INITIAL_DELAY_MS,
1024
+ maxDelayMs: DEFAULT_MAX_DELAY_MS,
1025
+ backoffMultiplier: DEFAULT_BACKOFF_MULTIPLIER,
1026
+ onRetry: (attempt, error, delay) => {
1027
+ if (onRetry) {
1028
+ onRetry(siteId, attempt, maxRetries, delay, error.message);
1029
+ }
1030
+ logger.debug(`Site ${siteId}: Retry ${attempt}/${maxRetries} in ${delay}ms: ${error.message}`);
1031
+ },
1032
+ });
1033
+ if (retryResult.success && retryResult.value) {
1034
+ if (onStateChange)
1035
+ onStateChange(siteId, 'completed');
1036
+ return { result: retryResult.value, attempts: retryResult.attempts, errors: retryResult.errors };
1037
+ }
1038
+ const errorMsg = retryResult.errors[retryResult.errors.length - 1] || 'Unknown error';
1039
+ if (onStateChange)
1040
+ onStateChange(siteId, 'failed', errorMsg);
1041
+ return {
1042
+ result: { success: false, error: errorMsg, steps: [] },
1043
+ attempts: retryResult.attempts,
1044
+ errors: retryResult.errors,
1045
+ };
1046
+ };
1047
+ for (let batchIdx = 0; batchIdx < batches.length; batchIdx++) {
1048
+ const batch = batches[batchIdx];
1049
+ if (onBatchStart)
1050
+ onBatchStart(batchIdx, batch.items.length);
1051
+ let batchSuccesses = 0;
1052
+ let batchFailures = 0;
1053
+ if (parallel) {
1054
+ const tasks = createSiteTasks(batch.items, async (siteId) => {
1055
+ const { result, attempts } = await migrateSiteWithRetry(siteId);
1056
+ return { ...result, attempts };
1057
+ });
1058
+ await runWithConcurrency(tasks, concurrency, (result) => {
1059
+ const wasSuccess = Boolean(result.success && result.result?.success);
1060
+ const attempts = result.result?.attempts ?? 1;
1061
+ siteResults.push({
1062
+ siteId: result.id,
1063
+ success: wasSuccess,
1064
+ error: result.success ? result.result?.error : result.error?.message,
1065
+ attempts,
1066
+ });
1067
+ if (wasSuccess) {
1068
+ successCount++;
1069
+ batchSuccesses++;
1070
+ }
1071
+ else {
1072
+ failureCount++;
1073
+ batchFailures++;
1074
+ }
1075
+ if (onSiteComplete) {
1076
+ onSiteComplete(result.id, wasSuccess, result.success ? result.result?.error : result.error?.message, attempts);
1077
+ }
1078
+ });
1079
+ }
1080
+ else {
1081
+ for (const siteId of batch.items) {
1082
+ try {
1083
+ const { result, attempts } = await migrateSiteWithRetry(siteId);
1084
+ const wasSuccess = result?.success ?? false;
1085
+ siteResults.push({ siteId, success: wasSuccess, error: result?.error, attempts });
1086
+ if (wasSuccess) {
1087
+ successCount++;
1088
+ batchSuccesses++;
1089
+ }
1090
+ else {
1091
+ failureCount++;
1092
+ batchFailures++;
1093
+ }
1094
+ if (onSiteComplete)
1095
+ onSiteComplete(siteId, wasSuccess, result?.error, attempts);
1096
+ }
1097
+ catch (err) {
1098
+ const error = err instanceof Error ? err.message : String(err);
1099
+ siteResults.push({ siteId, success: false, error, attempts: 1 });
1100
+ failureCount++;
1101
+ batchFailures++;
1102
+ if (onStateChange)
1103
+ onStateChange(siteId, 'failed', error);
1104
+ if (onSiteComplete)
1105
+ onSiteComplete(siteId, false, error, 1);
1106
+ }
1107
+ }
1108
+ }
1109
+ if (onBatchComplete)
1110
+ onBatchComplete(batchIdx, batchSuccesses, batchFailures);
1111
+ }
1112
+ return {
1113
+ success: failureCount === 0,
1114
+ totalSites: siteIds.length,
1115
+ successCount,
1116
+ failureCount,
1117
+ siteResults,
1118
+ };
1119
+ }
1120
+ /**
1121
+ * Format a summary report after batch processing completes.
1122
+ * Returns structured lines that can be rendered with chalk or other formatters.
1123
+ */
1124
+ export function formatBatchSummary(options) {
1125
+ const { result, startTime, migrationId } = options;
1126
+ const lines = [];
1127
+ const elapsedMs = Date.now() - startTime;
1128
+ const elapsedSec = Math.round(elapsedMs / 1000);
1129
+ const minutes = Math.floor(elapsedSec / 60);
1130
+ const seconds = elapsedSec % 60;
1131
+ const elapsedStr = minutes > 0 ? `${minutes}m ${seconds}s` : `${seconds}s`;
1132
+ lines.push({ type: 'header', text: 'Migration Summary' });
1133
+ lines.push({ type: 'header', text: '=================' });
1134
+ lines.push({ type: 'info', text: `Total sites: ${result.totalSites}` });
1135
+ lines.push({ type: 'success', text: `Successful: ${result.successCount}` });
1136
+ lines.push({ type: 'failure', text: `Failed: ${result.failureCount}` });
1137
+ lines.push({ type: 'info', text: `Duration: ${elapsedStr}` });
1138
+ if (result.failureCount > 0) {
1139
+ lines.push({ type: 'header', text: '' });
1140
+ lines.push({ type: 'header', text: 'Failed Sites:' });
1141
+ const failedSites = result.siteResults.filter(r => !r.success);
1142
+ for (const site of failedSites) {
1143
+ const attemptsStr = site.attempts > 1 ? ` (${site.attempts} attempts)` : '';
1144
+ const errorStr = site.error ? `: ${site.error}` : '';
1145
+ lines.push({ type: 'failure', text: ` Site ${site.siteId}${attemptsStr}${errorStr}` });
1146
+ }
1147
+ if (migrationId) {
1148
+ lines.push({ type: 'header', text: '' });
1149
+ lines.push({ type: 'warning', text: 'To resume this migration:' });
1150
+ lines.push({ type: 'resume', text: ` wfu-migrate env-migrate --resume ${migrationId}` });
1151
+ lines.push({ type: 'info', text: '' });
1152
+ lines.push({ type: 'info', text: 'To retry only failed sites:' });
1153
+ lines.push({ type: 'resume', text: ` wfu-migrate env-migrate --resume ${migrationId} --retry-failed` });
1154
+ lines.push({ type: 'info', text: '' });
1155
+ lines.push({ type: 'info', text: 'To skip failed sites and continue:' });
1156
+ lines.push({ type: 'resume', text: ` wfu-migrate env-migrate --resume ${migrationId} --skip-failed` });
1157
+ }
1158
+ }
1159
+ else {
1160
+ lines.push({ type: 'header', text: '' });
1161
+ const avgMs = result.totalSites > 0 ? Math.round(elapsedMs / result.totalSites) : 0;
1162
+ const avgSec = Math.round(avgMs / 1000);
1163
+ lines.push({ type: 'success', text: `All sites migrated successfully!` });
1164
+ lines.push({ type: 'info', text: `Average time per site: ${avgSec}s` });
1165
+ }
1166
+ return lines;
1167
+ }
1168
+ /**
1169
+ * Get a plain text version of the batch summary (for logging).
1170
+ */
1171
+ export function getBatchSummaryText(options) {
1172
+ const lines = formatBatchSummary(options);
1173
+ return lines.map(l => l.text).join('\n');
1174
+ }
1175
+ //# sourceMappingURL=migration-engine.js.map