@njdamstra/appwrite-utils-cli 1.10.1 → 1.11.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,735 @@
1
+ import { Query } from "node-appwrite";
2
+ import yaml from "js-yaml";
3
+ import fs from "node:fs";
4
+ import path from "node:path";
5
+ import inquirer from "inquirer";
6
+ import chalk from "chalk";
7
+ import { MessageFormatter, tryAwaitWithRetry, } from "@njdamstra/appwrite-utils-helpers";
8
+ import { ProgressManager } from "../shared/progressManager.js";
9
+ import { MigrationPlanSchema, MigrationCheckpointSchema, suggestTargetType, generateBackupKey, } from "./migrateStringsTypes.js";
10
+ // ────────────────────────────────────────────────────────
11
+ // Phase 1: Analyze — queries Appwrite server for real state
12
+ // ────────────────────────────────────────────────────────
13
+ export async function analyzeStringAttributes(adapter, config, options = {}) {
14
+ const databases = config.databases || [];
15
+ if (databases.length === 0) {
16
+ MessageFormatter.warning("No databases configured. Nothing to analyze.", {
17
+ prefix: "Analyze",
18
+ });
19
+ return emptyPlan(config);
20
+ }
21
+ const entries = [];
22
+ for (const db of databases) {
23
+ MessageFormatter.info(`Scanning database: ${db.name} (${db.$id})`, {
24
+ prefix: "Analyze",
25
+ });
26
+ // Fetch all collections/tables from server
27
+ const tablesRes = await tryAwaitWithRetry(() => adapter.listTables({ databaseId: db.$id }));
28
+ const tables = tablesRes?.tables || tablesRes?.collections || tablesRes?.data || [];
29
+ for (const table of tables) {
30
+ const tableId = table.$id || table.key || table.name;
31
+ const tableName = table.name || tableId;
32
+ // Fetch full schema from server
33
+ const schemaRes = await tryAwaitWithRetry(() => adapter.getTable({ databaseId: db.$id, tableId }));
34
+ const attributes = schemaRes?.data?.columns || schemaRes?.data?.attributes || [];
35
+ // Fetch indexes from server
36
+ const indexRes = await tryAwaitWithRetry(() => adapter.listIndexes({ databaseId: db.$id, tableId }));
37
+ const indexes = indexRes?.data || [];
38
+ for (const attr of attributes) {
39
+ if (attr.type !== "string")
40
+ continue;
41
+ const size = attr.size || 50;
42
+ const isEncrypted = !!attr.encrypt;
43
+ const isRequired = !!attr.required;
44
+ const isArray = !!attr.array;
45
+ const hasDefault = attr.xdefault !== undefined && attr.xdefault !== null;
46
+ // Find indexes that reference this attribute
47
+ const affectedIndexes = indexes
48
+ .filter((idx) => idx.attributes?.includes(attr.key))
49
+ .map((idx) => idx.key);
50
+ const hasIndex = affectedIndexes.length > 0;
51
+ const suggested = suggestTargetType(size, hasIndex);
52
+ const entry = {
53
+ databaseId: db.$id,
54
+ databaseName: db.name,
55
+ collectionId: tableId,
56
+ collectionName: tableName,
57
+ attributeKey: attr.key,
58
+ currentType: "string",
59
+ currentSize: size,
60
+ isRequired,
61
+ isArray,
62
+ isEncrypted,
63
+ hasDefault,
64
+ defaultValue: hasDefault ? attr.xdefault : undefined,
65
+ suggestedType: suggested,
66
+ targetType: suggested,
67
+ targetSize: suggested === "varchar" ? size : undefined,
68
+ action: isEncrypted ? "skip" : "migrate",
69
+ skipReason: isEncrypted ? "encrypted" : undefined,
70
+ indexesAffected: affectedIndexes,
71
+ };
72
+ // Indexed attrs must stay varchar
73
+ if (hasIndex && suggested !== "varchar" && !isEncrypted) {
74
+ entry.targetType = "varchar";
75
+ entry.targetSize = size;
76
+ }
77
+ entries.push(entry);
78
+ }
79
+ }
80
+ }
81
+ const toMigrate = entries.filter((e) => e.action === "migrate").length;
82
+ const toSkip = entries.filter((e) => e.action === "skip").length;
83
+ const uniqueDbs = new Set(entries.map((e) => e.databaseId));
84
+ const uniqueColls = new Set(entries.map((e) => `${e.databaseId}:${e.collectionId}`));
85
+ const plan = {
86
+ version: 1,
87
+ generatedAt: new Date().toISOString(),
88
+ appwriteEndpoint: config.appwriteEndpoint,
89
+ appwriteProject: config.appwriteProject,
90
+ summary: {
91
+ totalStringAttributes: entries.length,
92
+ toMigrate,
93
+ toSkip,
94
+ databaseCount: uniqueDbs.size,
95
+ collectionCount: uniqueColls.size,
96
+ },
97
+ entries,
98
+ };
99
+ // Write YAML plan
100
+ const outputPath = options.outputPath || path.join(process.cwd(), "migrate-strings-plan.yaml");
101
+ const yamlContent = yaml.dump(plan, {
102
+ lineWidth: 120,
103
+ noRefs: true,
104
+ sortKeys: false,
105
+ });
106
+ fs.writeFileSync(outputPath, yamlContent, "utf8");
107
+ // Print summary
108
+ MessageFormatter.info(`Migration plan written to ${outputPath}`, {
109
+ prefix: "Analyze",
110
+ });
111
+ printPlanSummary(plan);
112
+ return plan;
113
+ }
114
+ function emptyPlan(config) {
115
+ return {
116
+ version: 1,
117
+ generatedAt: new Date().toISOString(),
118
+ appwriteEndpoint: config.appwriteEndpoint,
119
+ appwriteProject: config.appwriteProject,
120
+ summary: {
121
+ totalStringAttributes: 0,
122
+ toMigrate: 0,
123
+ toSkip: 0,
124
+ databaseCount: 0,
125
+ collectionCount: 0,
126
+ },
127
+ entries: [],
128
+ };
129
+ }
130
+ function printPlanSummary(plan) {
131
+ const { summary } = plan;
132
+ console.log("");
133
+ console.log(chalk.bold("String Attribute Migration Plan Summary"));
134
+ console.log(chalk.gray("─".repeat(50)));
135
+ console.log(` Total string attributes: ${chalk.yellow(summary.totalStringAttributes)}`);
136
+ console.log(` To migrate: ${chalk.green(summary.toMigrate)}`);
137
+ console.log(` To skip: ${chalk.red(summary.toSkip)}`);
138
+ console.log(` Databases: ${summary.databaseCount}`);
139
+ console.log(` Collections: ${summary.collectionCount}`);
140
+ console.log("");
141
+ // Type distribution
142
+ const typeDistribution = {};
143
+ for (const entry of plan.entries) {
144
+ if (entry.action === "migrate") {
145
+ typeDistribution[entry.targetType] =
146
+ (typeDistribution[entry.targetType] || 0) + 1;
147
+ }
148
+ }
149
+ if (Object.keys(typeDistribution).length > 0) {
150
+ console.log(chalk.bold(" Target type distribution:"));
151
+ for (const [type, count] of Object.entries(typeDistribution)) {
152
+ console.log(` ${type}: ${count}`);
153
+ }
154
+ console.log("");
155
+ }
156
+ // Skipped reasons
157
+ const skipReasons = {};
158
+ for (const entry of plan.entries) {
159
+ if (entry.action === "skip" && entry.skipReason) {
160
+ skipReasons[entry.skipReason] =
161
+ (skipReasons[entry.skipReason] || 0) + 1;
162
+ }
163
+ }
164
+ if (Object.keys(skipReasons).length > 0) {
165
+ console.log(chalk.bold(" Skip reasons:"));
166
+ for (const [reason, count] of Object.entries(skipReasons)) {
167
+ console.log(` ${reason}: ${count}`);
168
+ }
169
+ console.log("");
170
+ }
171
+ console.log(chalk.dim(" Edit the YAML plan file to change targetType or action before executing."));
172
+ console.log("");
173
+ }
174
+ // ────────────────────────────────────────────────────────
175
+ // Phase 2: Execute — server connection required
176
+ // ────────────────────────────────────────────────────────
177
+ export async function executeMigrationPlan(adapter, options) {
178
+ // Load and validate plan
179
+ const planYaml = fs.readFileSync(options.planPath, "utf8");
180
+ const planRaw = yaml.load(planYaml);
181
+ const plan = MigrationPlanSchema.parse(planRaw);
182
+ const migrateEntries = plan.entries.filter((e) => e.action === "migrate");
183
+ if (migrateEntries.length === 0) {
184
+ MessageFormatter.info("No attributes to migrate in plan.", {
185
+ prefix: "Execute",
186
+ });
187
+ return { succeeded: 0, failed: 0, skipped: plan.entries.length };
188
+ }
189
+ // Load or create checkpoint
190
+ const checkpointPath = options.checkpointPath ||
191
+ options.planPath.replace(/\.ya?ml$/, ".checkpoint.json");
192
+ const checkpoint = loadOrCreateCheckpoint(checkpointPath, options.planPath);
193
+ const batchSize = options.batchSize || 100;
194
+ const batchDelayMs = options.batchDelayMs || 50;
195
+ // Group by database/collection
196
+ const groups = new Map();
197
+ for (const entry of migrateEntries) {
198
+ const key = `${entry.databaseId}:${entry.collectionId}`;
199
+ if (!groups.has(key))
200
+ groups.set(key, []);
201
+ groups.get(key).push(entry);
202
+ }
203
+ let succeeded = 0;
204
+ let failed = 0;
205
+ let skipped = plan.entries.filter((e) => e.action === "skip").length;
206
+ MessageFormatter.info(`Executing migration: ${migrateEntries.length} attributes across ${groups.size} collections`, { prefix: "Execute" });
207
+ if (options.dryRun) {
208
+ MessageFormatter.info("DRY RUN — no changes will be made", {
209
+ prefix: "Execute",
210
+ });
211
+ printDryRunSummary(plan);
212
+ return { succeeded: 0, failed: 0, skipped: plan.entries.length };
213
+ }
214
+ for (const [groupKey, entries] of groups) {
215
+ const first = entries[0];
216
+ console.log("");
217
+ console.log(chalk.bold(`Collection: ${first.collectionName} (${first.databaseName}/${first.collectionId})`));
218
+ console.log(` Attributes to migrate: ${entries.map((e) => e.attributeKey).join(", ")}`);
219
+ // Per-collection confirmation
220
+ const { proceed } = await inquirer.prompt([
221
+ {
222
+ type: "list",
223
+ name: "proceed",
224
+ message: `Migrate ${entries.length} attribute(s) in ${first.collectionName}?`,
225
+ choices: [
226
+ { name: "Yes, proceed", value: "yes" },
227
+ { name: "Skip this collection", value: "skip" },
228
+ { name: "Abort entire migration", value: "abort" },
229
+ ],
230
+ },
231
+ ]);
232
+ if (proceed === "abort") {
233
+ MessageFormatter.info("Migration aborted by user.", {
234
+ prefix: "Execute",
235
+ });
236
+ break;
237
+ }
238
+ if (proceed === "skip") {
239
+ skipped += entries.length;
240
+ continue;
241
+ }
242
+ // Migrate each attribute in this collection
243
+ for (const entry of entries) {
244
+ const cpEntry = getOrCreateCheckpointEntry(checkpoint, entry);
245
+ if (cpEntry.phase === "completed") {
246
+ MessageFormatter.info(` ${entry.attributeKey}: already completed (checkpoint)`, { prefix: "Execute" });
247
+ succeeded++;
248
+ continue;
249
+ }
250
+ try {
251
+ await migrateOneAttribute(adapter, entry, cpEntry, checkpoint, checkpointPath, {
252
+ batchSize,
253
+ batchDelayMs,
254
+ keepBackups: options.keepBackups ?? true,
255
+ });
256
+ succeeded++;
257
+ MessageFormatter.success(` ${entry.attributeKey}: migrated to ${entry.targetType}`, { prefix: "Execute" });
258
+ }
259
+ catch (err) {
260
+ failed++;
261
+ cpEntry.phase = "failed";
262
+ cpEntry.error = err.message || String(err);
263
+ saveCheckpoint(checkpoint, checkpointPath);
264
+ MessageFormatter.error(` ${entry.attributeKey}: FAILED — ${cpEntry.error}`, undefined, { prefix: "Execute" });
265
+ }
266
+ }
267
+ // After collection completes, offer to update local YAML
268
+ const successInGroup = entries.filter((e) => {
269
+ const cp = findCheckpointEntry(checkpoint, e);
270
+ return cp?.phase === "completed";
271
+ }).length;
272
+ if (successInGroup > 0) {
273
+ const { updateYaml } = await inquirer.prompt([
274
+ {
275
+ type: "confirm",
276
+ name: "updateYaml",
277
+ message: `Update local YAML config for ${first.collectionName}? (change type: string → new types)`,
278
+ default: false,
279
+ },
280
+ ]);
281
+ if (updateYaml) {
282
+ MessageFormatter.info("Local YAML update: use your editor to change 'type: string' to the new types in your collection YAML files.", { prefix: "Execute" });
283
+ }
284
+ }
285
+ }
286
+ // Final summary
287
+ console.log("");
288
+ console.log(chalk.bold("Migration Results"));
289
+ console.log(chalk.gray("─".repeat(40)));
290
+ console.log(` Succeeded: ${chalk.green(succeeded)}`);
291
+ console.log(` Failed: ${chalk.red(failed)}`);
292
+ console.log(` Skipped: ${chalk.yellow(skipped)}`);
293
+ console.log("");
294
+ if (failed > 0) {
295
+ MessageFormatter.info(`Checkpoint saved at ${checkpointPath} — rerun to resume failed attributes.`, { prefix: "Execute" });
296
+ }
297
+ return { succeeded, failed, skipped };
298
+ }
299
+ async function migrateOneAttribute(adapter, entry, cpEntry, checkpoint, checkpointPath, opts) {
300
+ const { databaseId, collectionId, attributeKey, targetType, targetSize } = entry;
301
+ const backupKey = cpEntry.backupKey;
302
+ const advance = (phase) => {
303
+ cpEntry.phase = phase;
304
+ checkpoint.lastUpdatedAt = new Date().toISOString();
305
+ saveCheckpoint(checkpoint, checkpointPath);
306
+ };
307
+ // Step 1: Create backup attribute
308
+ if (phaseIndex(cpEntry.phase) < phaseIndex("backup_created")) {
309
+ MessageFormatter.info(` Creating backup attribute ${backupKey}...`, {
310
+ prefix: "Migrate",
311
+ });
312
+ await tryAwaitWithRetry(() => adapter.createAttribute({
313
+ databaseId,
314
+ tableId: collectionId,
315
+ key: backupKey,
316
+ type: "string", // backup keeps original type
317
+ size: entry.currentSize,
318
+ required: false, // always optional for backup
319
+ array: entry.isArray,
320
+ }));
321
+ const available = await waitForAttribute(adapter, databaseId, collectionId, backupKey);
322
+ if (!available)
323
+ throw new Error(`Backup attribute ${backupKey} stuck`);
324
+ advance("backup_created");
325
+ }
326
+ // Step 2: Copy data to backup
327
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_copied_to_backup")) {
328
+ MessageFormatter.info(` Copying data to backup ${backupKey}...`, {
329
+ prefix: "Migrate",
330
+ });
331
+ await copyAttributeData(adapter, databaseId, collectionId, attributeKey, backupKey, opts.batchSize, opts.batchDelayMs);
332
+ advance("data_copied_to_backup");
333
+ }
334
+ // Step 3: Verify backup
335
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_verified_backup")) {
336
+ await verifyDataCopy(adapter, databaseId, collectionId, attributeKey, backupKey);
337
+ advance("data_verified_backup");
338
+ }
339
+ // Step 4: Delete indexes + original attribute
340
+ if (phaseIndex(cpEntry.phase) < phaseIndex("original_deleted")) {
341
+ // Save and delete affected indexes
342
+ if (entry.indexesAffected.length > 0) {
343
+ MessageFormatter.info(` Removing ${entry.indexesAffected.length} affected index(es)...`, {
344
+ prefix: "Migrate",
345
+ });
346
+ await saveAndDeleteIndexes(adapter, databaseId, collectionId, entry.indexesAffected, cpEntry);
347
+ saveCheckpoint(checkpoint, checkpointPath);
348
+ }
349
+ MessageFormatter.info(` Deleting original attribute ${attributeKey}...`, {
350
+ prefix: "Migrate",
351
+ });
352
+ await tryAwaitWithRetry(() => adapter.deleteAttribute({
353
+ databaseId,
354
+ tableId: collectionId,
355
+ key: attributeKey,
356
+ }));
357
+ await waitForAttributeGone(adapter, databaseId, collectionId, attributeKey);
358
+ advance("original_deleted");
359
+ }
360
+ // Step 5: Create new attribute with target type
361
+ if (phaseIndex(cpEntry.phase) < phaseIndex("new_attr_created")) {
362
+ MessageFormatter.info(` Creating new attribute ${attributeKey} as ${targetType}...`, { prefix: "Migrate" });
363
+ const createParams = {
364
+ databaseId,
365
+ tableId: collectionId,
366
+ key: attributeKey,
367
+ type: targetType,
368
+ required: false, // create as optional first — data needs to be copied back
369
+ array: entry.isArray,
370
+ };
371
+ if (targetType === "varchar" && targetSize) {
372
+ createParams.size = targetSize;
373
+ }
374
+ if (entry.hasDefault && entry.defaultValue !== undefined) {
375
+ createParams.default = entry.defaultValue;
376
+ }
377
+ await tryAwaitWithRetry(() => adapter.createAttribute(createParams));
378
+ const available = await waitForAttribute(adapter, databaseId, collectionId, attributeKey);
379
+ if (!available)
380
+ throw new Error(`New attribute ${attributeKey} stuck after creation`);
381
+ advance("new_attr_created");
382
+ }
383
+ // Step 6: Copy data back from backup
384
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_copied_back")) {
385
+ MessageFormatter.info(` Copying data back from backup...`, {
386
+ prefix: "Migrate",
387
+ });
388
+ await copyAttributeData(adapter, databaseId, collectionId, backupKey, attributeKey, opts.batchSize, opts.batchDelayMs);
389
+ advance("data_copied_back");
390
+ }
391
+ // Step 7: Verify final data
392
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_verified_final")) {
393
+ await verifyDataCopy(adapter, databaseId, collectionId, backupKey, attributeKey);
394
+ advance("data_verified_final");
395
+ }
396
+ // Step 8: Recreate indexes + delete backup
397
+ if (phaseIndex(cpEntry.phase) < phaseIndex("backup_deleted")) {
398
+ // Recreate indexes
399
+ if (cpEntry.storedIndexes.length > 0) {
400
+ MessageFormatter.info(` Recreating ${cpEntry.storedIndexes.length} index(es)...`, { prefix: "Migrate" });
401
+ await recreateIndexes(adapter, databaseId, collectionId, cpEntry);
402
+ }
403
+ // Delete backup (unless keepBackups)
404
+ if (!opts.keepBackups) {
405
+ MessageFormatter.info(` Deleting backup attribute ${backupKey}...`, {
406
+ prefix: "Migrate",
407
+ });
408
+ await tryAwaitWithRetry(() => adapter.deleteAttribute({
409
+ databaseId,
410
+ tableId: collectionId,
411
+ key: backupKey,
412
+ }));
413
+ await waitForAttributeGone(adapter, databaseId, collectionId, backupKey);
414
+ }
415
+ advance("backup_deleted");
416
+ }
417
+ // Step 9: Mark completed
418
+ // If the original attribute was required, update it now (after data is in place)
419
+ if (entry.isRequired) {
420
+ try {
421
+ await tryAwaitWithRetry(() => adapter.updateAttribute({
422
+ databaseId,
423
+ tableId: collectionId,
424
+ key: attributeKey,
425
+ required: true,
426
+ }));
427
+ }
428
+ catch {
429
+ // Non-fatal — attribute is migrated, just not set back to required
430
+ MessageFormatter.info(` Warning: could not set ${attributeKey} back to required`, { prefix: "Migrate" });
431
+ }
432
+ }
433
+ advance("completed");
434
+ }
435
+ // ────────────────────────────────────────────────────────
436
+ // Helper: copy attribute data via cursor pagination
437
+ // ────────────────────────────────────────────────────────
438
+ async function copyAttributeData(adapter, databaseId, collectionId, sourceKey, targetKey, batchSize, batchDelayMs) {
439
+ let lastId;
440
+ let totalCopied = 0;
441
+ let totalDocs;
442
+ // Get initial count
443
+ const countRes = await tryAwaitWithRetry(() => adapter.listRows({
444
+ databaseId,
445
+ tableId: collectionId,
446
+ queries: [Query.limit(1)],
447
+ }));
448
+ totalDocs = countRes?.total ?? undefined;
449
+ const progress = totalDocs
450
+ ? ProgressManager.create(`copy-${sourceKey}-${targetKey}`, totalDocs, {
451
+ title: ` Copy ${sourceKey} → ${targetKey}`,
452
+ })
453
+ : undefined;
454
+ while (true) {
455
+ const queries = [Query.limit(batchSize)];
456
+ if (lastId)
457
+ queries.push(Query.cursorAfter(lastId));
458
+ const res = await tryAwaitWithRetry(() => adapter.listRows({ databaseId, tableId: collectionId, queries }));
459
+ const docs = res?.documents || res?.rows || [];
460
+ if (docs.length === 0)
461
+ break;
462
+ // Batch update: copy sourceKey → targetKey
463
+ if (adapter.supportsBulkOperations() && adapter.bulkUpsertRows) {
464
+ const rows = docs
465
+ .filter((d) => d[sourceKey] !== undefined)
466
+ .map((d) => ({
467
+ id: d.$id,
468
+ data: { [targetKey]: d[sourceKey] },
469
+ }));
470
+ if (rows.length > 0) {
471
+ await tryAwaitWithRetry(() => adapter.bulkUpsertRows({
472
+ databaseId,
473
+ tableId: collectionId,
474
+ rows,
475
+ }));
476
+ }
477
+ }
478
+ else {
479
+ for (const doc of docs) {
480
+ if (doc[sourceKey] === undefined)
481
+ continue;
482
+ await tryAwaitWithRetry(() => adapter.updateRow({
483
+ databaseId,
484
+ tableId: collectionId,
485
+ id: doc.$id,
486
+ data: { [targetKey]: doc[sourceKey] },
487
+ }));
488
+ }
489
+ }
490
+ totalCopied += docs.length;
491
+ lastId = docs[docs.length - 1].$id;
492
+ progress?.update(totalCopied);
493
+ if (docs.length < batchSize)
494
+ break; // last page
495
+ if (batchDelayMs > 0)
496
+ await delay(batchDelayMs);
497
+ }
498
+ progress?.stop();
499
+ }
500
+ // ────────────────────────────────────────────────────────
501
+ // Helper: verify data copy (count + spot check)
502
+ // ────────────────────────────────────────────────────────
503
+ async function verifyDataCopy(adapter, databaseId, collectionId, sourceKey, targetKey) {
504
+ // Spot-check first 5 documents
505
+ const res = await tryAwaitWithRetry(() => adapter.listRows({
506
+ databaseId,
507
+ tableId: collectionId,
508
+ queries: [Query.limit(5)],
509
+ }));
510
+ const docs = res?.documents || res?.rows || [];
511
+ for (const doc of docs) {
512
+ if (doc[sourceKey] === undefined)
513
+ continue;
514
+ if (doc[sourceKey] !== doc[targetKey]) {
515
+ throw new Error(`Verification failed: doc ${doc.$id} has ${sourceKey}=${JSON.stringify(doc[sourceKey])} but ${targetKey}=${JSON.stringify(doc[targetKey])}`);
516
+ }
517
+ }
518
+ }
519
+ // ────────────────────────────────────────────────────────
520
+ // Helper: wait for attribute to become available
521
+ // ────────────────────────────────────────────────────────
522
+ async function waitForAttribute(adapter, databaseId, collectionId, key, maxWaitMs = 60_000) {
523
+ const start = Date.now();
524
+ const checkInterval = 2000;
525
+ while (Date.now() - start < maxWaitMs) {
526
+ const res = await tryAwaitWithRetry(() => adapter.getTable({ databaseId, tableId: collectionId }));
527
+ const attrs = res?.data?.attributes || res?.data?.columns || [];
528
+ const attr = attrs.find((a) => a.key === key);
529
+ if (attr) {
530
+ if (attr.status === "available")
531
+ return true;
532
+ if (attr.status === "failed" || attr.status === "stuck")
533
+ return false;
534
+ }
535
+ await delay(checkInterval);
536
+ }
537
+ return false;
538
+ }
539
+ // ────────────────────────────────────────────────────────
540
+ // Helper: wait for attribute to be fully deleted
541
+ // ────────────────────────────────────────────────────────
542
+ async function waitForAttributeGone(adapter, databaseId, collectionId, key, maxWaitMs = 60_000) {
543
+ const start = Date.now();
544
+ const checkInterval = 2000;
545
+ while (Date.now() - start < maxWaitMs) {
546
+ const res = await tryAwaitWithRetry(() => adapter.getTable({ databaseId, tableId: collectionId }));
547
+ const attrs = res?.data?.attributes || res?.data?.columns || [];
548
+ const attr = attrs.find((a) => a.key === key);
549
+ if (!attr)
550
+ return true;
551
+ if (attr.status === "deleting") {
552
+ await delay(checkInterval);
553
+ continue;
554
+ }
555
+ // Still present and not deleting — wait
556
+ await delay(checkInterval);
557
+ }
558
+ return false;
559
+ }
560
+ // ────────────────────────────────────────────────────────
561
+ // Helper: index management
562
+ // ────────────────────────────────────────────────────────
563
+ async function saveAndDeleteIndexes(adapter, databaseId, collectionId, indexKeys, cpEntry) {
564
+ // Fetch current indexes from server
565
+ const res = await tryAwaitWithRetry(() => adapter.listIndexes({ databaseId, tableId: collectionId }));
566
+ const allIndexes = res?.data || [];
567
+ for (const idxKey of indexKeys) {
568
+ const idx = allIndexes.find((i) => i.key === idxKey);
569
+ if (!idx)
570
+ continue;
571
+ // Store definition for recreation
572
+ const alreadyStored = cpEntry.storedIndexes.some((s) => s.key === idxKey);
573
+ if (!alreadyStored) {
574
+ cpEntry.storedIndexes.push({
575
+ key: idx.key,
576
+ type: idx.type || "key",
577
+ attributes: idx.attributes || [],
578
+ orders: idx.orders,
579
+ });
580
+ }
581
+ // Delete
582
+ await tryAwaitWithRetry(() => adapter.deleteIndex({ databaseId, tableId: collectionId, key: idxKey }));
583
+ }
584
+ // Wait for indexes to be gone
585
+ for (const idxKey of indexKeys) {
586
+ await waitForIndexGone(adapter, databaseId, collectionId, idxKey);
587
+ }
588
+ }
589
+ async function recreateIndexes(adapter, databaseId, collectionId, cpEntry) {
590
+ for (const idx of cpEntry.storedIndexes) {
591
+ await tryAwaitWithRetry(() => adapter.createIndex({
592
+ databaseId,
593
+ tableId: collectionId,
594
+ key: idx.key,
595
+ type: idx.type,
596
+ attributes: idx.attributes,
597
+ orders: idx.orders,
598
+ }));
599
+ // Wait for index to become available
600
+ await waitForIndexAvailable(adapter, databaseId, collectionId, idx.key);
601
+ }
602
+ }
603
+ async function waitForIndexGone(adapter, databaseId, collectionId, key, maxWaitMs = 60_000) {
604
+ const start = Date.now();
605
+ while (Date.now() - start < maxWaitMs) {
606
+ const res = await tryAwaitWithRetry(() => adapter.listIndexes({ databaseId, tableId: collectionId }));
607
+ const indexes = res?.data || [];
608
+ if (!indexes.find((i) => i.key === key))
609
+ return;
610
+ await delay(2000);
611
+ }
612
+ }
613
+ async function waitForIndexAvailable(adapter, databaseId, collectionId, key, maxWaitMs = 60_000) {
614
+ const start = Date.now();
615
+ while (Date.now() - start < maxWaitMs) {
616
+ const res = await tryAwaitWithRetry(() => adapter.listIndexes({ databaseId, tableId: collectionId }));
617
+ const indexes = res?.data || [];
618
+ const idx = indexes.find((i) => i.key === key);
619
+ if (idx?.status === "available")
620
+ return;
621
+ if (idx?.status === "failed") {
622
+ throw new Error(`Index ${key} creation failed`);
623
+ }
624
+ await delay(2000);
625
+ }
626
+ }
627
+ // ────────────────────────────────────────────────────────
628
+ // Checkpoint management
629
+ // ────────────────────────────────────────────────────────
630
+ function loadOrCreateCheckpoint(checkpointPath, planFile) {
631
+ if (fs.existsSync(checkpointPath)) {
632
+ try {
633
+ const raw = JSON.parse(fs.readFileSync(checkpointPath, "utf8"));
634
+ const parsed = MigrationCheckpointSchema.parse(raw);
635
+ MessageFormatter.info(`Resuming from checkpoint: ${checkpointPath}`, { prefix: "Checkpoint" });
636
+ return parsed;
637
+ }
638
+ catch {
639
+ MessageFormatter.info("Corrupt checkpoint file, creating new one.", { prefix: "Checkpoint" });
640
+ }
641
+ }
642
+ const now = new Date().toISOString();
643
+ return {
644
+ planFile,
645
+ startedAt: now,
646
+ lastUpdatedAt: now,
647
+ entries: [],
648
+ };
649
+ }
650
+ function saveCheckpoint(checkpoint, checkpointPath) {
651
+ checkpoint.lastUpdatedAt = new Date().toISOString();
652
+ fs.writeFileSync(checkpointPath, JSON.stringify(checkpoint, null, 2), "utf8");
653
+ }
654
+ function getOrCreateCheckpointEntry(checkpoint, entry) {
655
+ const existing = findCheckpointEntry(checkpoint, entry);
656
+ if (existing)
657
+ return existing;
658
+ const cpEntry = {
659
+ databaseId: entry.databaseId,
660
+ collectionId: entry.collectionId,
661
+ attributeKey: entry.attributeKey,
662
+ backupKey: generateBackupKey(entry.attributeKey),
663
+ phase: "pending",
664
+ targetType: entry.targetType,
665
+ targetSize: entry.targetSize,
666
+ storedIndexes: [],
667
+ };
668
+ checkpoint.entries.push(cpEntry);
669
+ return cpEntry;
670
+ }
671
+ function findCheckpointEntry(checkpoint, entry) {
672
+ return checkpoint.entries.find((e) => e.databaseId === entry.databaseId &&
673
+ e.collectionId === entry.collectionId &&
674
+ e.attributeKey === entry.attributeKey);
675
+ }
676
+ // ────────────────────────────────────────────────────────
677
+ // Phase ordering for checkpoint resume
678
+ // ────────────────────────────────────────────────────────
679
+ const PHASE_ORDER = [
680
+ "pending",
681
+ "backup_created",
682
+ "data_copied_to_backup",
683
+ "data_verified_backup",
684
+ "original_deleted",
685
+ "new_attr_created",
686
+ "data_copied_back",
687
+ "data_verified_final",
688
+ "backup_deleted",
689
+ "completed",
690
+ ];
691
+ function phaseIndex(phase) {
692
+ const idx = PHASE_ORDER.indexOf(phase);
693
+ return idx >= 0 ? idx : -1;
694
+ }
695
+ // ────────────────────────────────────────────────────────
696
+ // Dry run summary
697
+ // ────────────────────────────────────────────────────────
698
+ function printDryRunSummary(plan) {
699
+ console.log("");
700
+ console.log(chalk.bold("Dry Run — What Would Happen:"));
701
+ console.log(chalk.gray("─".repeat(50)));
702
+ const groups = new Map();
703
+ for (const entry of plan.entries) {
704
+ if (entry.action !== "migrate")
705
+ continue;
706
+ const key = `${entry.databaseName}/${entry.collectionName}`;
707
+ if (!groups.has(key))
708
+ groups.set(key, []);
709
+ groups.get(key).push(entry);
710
+ }
711
+ for (const [groupName, entries] of groups) {
712
+ console.log(`\n ${chalk.cyan(groupName)}`);
713
+ for (const e of entries) {
714
+ const sizeInfo = e.targetType === "varchar" ? ` (size: ${e.targetSize})` : "";
715
+ const indexInfo = e.indexesAffected.length > 0
716
+ ? ` [indexes: ${e.indexesAffected.join(", ")}]`
717
+ : "";
718
+ console.log(` ${e.attributeKey}: string(${e.currentSize}) → ${e.targetType}${sizeInfo}${indexInfo}`);
719
+ }
720
+ }
721
+ const skipped = plan.entries.filter((e) => e.action === "skip");
722
+ if (skipped.length > 0) {
723
+ console.log(`\n ${chalk.yellow("Skipped:")}`);
724
+ for (const e of skipped) {
725
+ console.log(` ${e.attributeKey}: ${e.skipReason || "manual skip"}`);
726
+ }
727
+ }
728
+ console.log("");
729
+ }
730
+ // ────────────────────────────────────────────────────────
731
+ // Utility
732
+ // ────────────────────────────────────────────────────────
733
+ function delay(ms) {
734
+ return new Promise((resolve) => setTimeout(resolve, ms));
735
+ }