@njdamstra/appwrite-utils-cli 1.10.1 → 1.11.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,1084 @@
1
+ import { Query } from "node-appwrite";
2
+ import yaml from "js-yaml";
3
+ import fs from "node:fs";
4
+ import path from "node:path";
5
+ import inquirer from "inquirer";
6
+ import chalk from "chalk";
7
+ import {
8
+ type DatabaseAdapter,
9
+ MessageFormatter,
10
+ tryAwaitWithRetry,
11
+ } from "@njdamstra/appwrite-utils-helpers";
12
+ import type { AppwriteConfig } from "@njdamstra/appwrite-utils";
13
+ import { ProgressManager } from "../shared/progressManager.js";
14
+ import {
15
+ type MigrationPlan,
16
+ type MigrationPlanEntry,
17
+ type MigrationCheckpoint,
18
+ type CheckpointEntry,
19
+ type CheckpointPhase,
20
+ type AnalyzeOptions,
21
+ type ExecuteOptions,
22
+ MigrationPlanSchema,
23
+ MigrationCheckpointSchema,
24
+ suggestTargetType,
25
+ generateBackupKey,
26
+ } from "./migrateStringsTypes.js";
27
+
28
+ // ────────────────────────────────────────────────────────
29
+ // Phase 1: Analyze — queries Appwrite server for real state
30
+ // ────────────────────────────────────────────────────────
31
+
32
+ export async function analyzeStringAttributes(
33
+ adapter: DatabaseAdapter,
34
+ config: AppwriteConfig,
35
+ options: AnalyzeOptions = {}
36
+ ): Promise<MigrationPlan> {
37
+ const databases = config.databases || [];
38
+ if (databases.length === 0) {
39
+ MessageFormatter.warning("No databases configured. Nothing to analyze.", {
40
+ prefix: "Analyze",
41
+ });
42
+ return emptyPlan(config);
43
+ }
44
+
45
+ const entries: MigrationPlanEntry[] = [];
46
+
47
+ for (const db of databases) {
48
+ MessageFormatter.info(`Scanning database: ${db.name} (${db.$id})`, {
49
+ prefix: "Analyze",
50
+ });
51
+
52
+ // Fetch all collections/tables from server
53
+ const tablesRes = await tryAwaitWithRetry(() =>
54
+ adapter.listTables({ databaseId: db.$id })
55
+ );
56
+ const tables: any[] =
57
+ tablesRes?.tables || tablesRes?.collections || tablesRes?.data || [];
58
+
59
+ for (const table of tables) {
60
+ const tableId: string = table.$id || table.key || table.name;
61
+ const tableName: string = table.name || tableId;
62
+
63
+ // Fetch full schema from server
64
+ const schemaRes = await tryAwaitWithRetry(() =>
65
+ adapter.getTable({ databaseId: db.$id, tableId })
66
+ );
67
+ const attributes: any[] =
68
+ schemaRes?.data?.columns || schemaRes?.data?.attributes || [];
69
+
70
+ // Fetch indexes from server
71
+ const indexRes = await tryAwaitWithRetry(() =>
72
+ adapter.listIndexes({ databaseId: db.$id, tableId })
73
+ );
74
+ const indexes: any[] = indexRes?.data || [];
75
+
76
+ for (const attr of attributes) {
77
+ if (attr.type !== "string") continue;
78
+
79
+ const size: number = attr.size || 50;
80
+ const isEncrypted = !!(attr as any).encrypt;
81
+ const isRequired = !!attr.required;
82
+ const isArray = !!attr.array;
83
+ const hasDefault =
84
+ attr.xdefault !== undefined && attr.xdefault !== null;
85
+
86
+ // Find indexes that reference this attribute
87
+ const affectedIndexes = indexes
88
+ .filter((idx: any) => idx.attributes?.includes(attr.key))
89
+ .map((idx: any) => idx.key);
90
+ const hasIndex = affectedIndexes.length > 0;
91
+
92
+ const suggested = suggestTargetType(size, hasIndex);
93
+
94
+ const entry: MigrationPlanEntry = {
95
+ databaseId: db.$id,
96
+ databaseName: db.name,
97
+ collectionId: tableId,
98
+ collectionName: tableName,
99
+ attributeKey: attr.key,
100
+ currentType: "string",
101
+ currentSize: size,
102
+ isRequired,
103
+ isArray,
104
+ isEncrypted,
105
+ hasDefault,
106
+ defaultValue: hasDefault ? attr.xdefault : undefined,
107
+ suggestedType: suggested,
108
+ targetType: suggested,
109
+ targetSize: suggested === "varchar" ? size : undefined,
110
+ action: isEncrypted ? "skip" : "migrate",
111
+ skipReason: isEncrypted ? "encrypted" : undefined,
112
+ indexesAffected: affectedIndexes,
113
+ };
114
+
115
+ // Indexed attrs must stay varchar
116
+ if (hasIndex && suggested !== "varchar" && !isEncrypted) {
117
+ entry.targetType = "varchar";
118
+ entry.targetSize = size;
119
+ }
120
+
121
+ entries.push(entry);
122
+ }
123
+ }
124
+ }
125
+
126
+ const toMigrate = entries.filter((e) => e.action === "migrate").length;
127
+ const toSkip = entries.filter((e) => e.action === "skip").length;
128
+ const uniqueDbs = new Set(entries.map((e) => e.databaseId));
129
+ const uniqueColls = new Set(
130
+ entries.map((e) => `${e.databaseId}:${e.collectionId}`)
131
+ );
132
+
133
+ const plan: MigrationPlan = {
134
+ version: 1,
135
+ generatedAt: new Date().toISOString(),
136
+ appwriteEndpoint: config.appwriteEndpoint,
137
+ appwriteProject: config.appwriteProject,
138
+ summary: {
139
+ totalStringAttributes: entries.length,
140
+ toMigrate,
141
+ toSkip,
142
+ databaseCount: uniqueDbs.size,
143
+ collectionCount: uniqueColls.size,
144
+ },
145
+ entries,
146
+ };
147
+
148
+ // Write YAML plan
149
+ const outputPath =
150
+ options.outputPath || path.join(process.cwd(), "migrate-strings-plan.yaml");
151
+ const yamlContent = yaml.dump(plan, {
152
+ lineWidth: 120,
153
+ noRefs: true,
154
+ sortKeys: false,
155
+ });
156
+ fs.writeFileSync(outputPath, yamlContent, "utf8");
157
+
158
+ // Print summary
159
+ MessageFormatter.info(`Migration plan written to ${outputPath}`, {
160
+ prefix: "Analyze",
161
+ });
162
+ printPlanSummary(plan);
163
+
164
+ return plan;
165
+ }
166
+
167
+ function emptyPlan(config: AppwriteConfig): MigrationPlan {
168
+ return {
169
+ version: 1,
170
+ generatedAt: new Date().toISOString(),
171
+ appwriteEndpoint: config.appwriteEndpoint,
172
+ appwriteProject: config.appwriteProject,
173
+ summary: {
174
+ totalStringAttributes: 0,
175
+ toMigrate: 0,
176
+ toSkip: 0,
177
+ databaseCount: 0,
178
+ collectionCount: 0,
179
+ },
180
+ entries: [],
181
+ };
182
+ }
183
+
184
+ function printPlanSummary(plan: MigrationPlan): void {
185
+ const { summary } = plan;
186
+ console.log("");
187
+ console.log(chalk.bold("String Attribute Migration Plan Summary"));
188
+ console.log(chalk.gray("─".repeat(50)));
189
+ console.log(
190
+ ` Total string attributes: ${chalk.yellow(summary.totalStringAttributes)}`
191
+ );
192
+ console.log(` To migrate: ${chalk.green(summary.toMigrate)}`);
193
+ console.log(` To skip: ${chalk.red(summary.toSkip)}`);
194
+ console.log(` Databases: ${summary.databaseCount}`);
195
+ console.log(` Collections: ${summary.collectionCount}`);
196
+ console.log("");
197
+
198
+ // Type distribution
199
+ const typeDistribution: Record<string, number> = {};
200
+ for (const entry of plan.entries) {
201
+ if (entry.action === "migrate") {
202
+ typeDistribution[entry.targetType] =
203
+ (typeDistribution[entry.targetType] || 0) + 1;
204
+ }
205
+ }
206
+ if (Object.keys(typeDistribution).length > 0) {
207
+ console.log(chalk.bold(" Target type distribution:"));
208
+ for (const [type, count] of Object.entries(typeDistribution)) {
209
+ console.log(` ${type}: ${count}`);
210
+ }
211
+ console.log("");
212
+ }
213
+
214
+ // Skipped reasons
215
+ const skipReasons: Record<string, number> = {};
216
+ for (const entry of plan.entries) {
217
+ if (entry.action === "skip" && entry.skipReason) {
218
+ skipReasons[entry.skipReason] =
219
+ (skipReasons[entry.skipReason] || 0) + 1;
220
+ }
221
+ }
222
+ if (Object.keys(skipReasons).length > 0) {
223
+ console.log(chalk.bold(" Skip reasons:"));
224
+ for (const [reason, count] of Object.entries(skipReasons)) {
225
+ console.log(` ${reason}: ${count}`);
226
+ }
227
+ console.log("");
228
+ }
229
+
230
+ console.log(
231
+ chalk.dim(
232
+ " Edit the YAML plan file to change targetType or action before executing."
233
+ )
234
+ );
235
+ console.log("");
236
+ }
237
+
238
+ // ────────────────────────────────────────────────────────
239
+ // Phase 2: Execute — server connection required
240
+ // ────────────────────────────────────────────────────────
241
+
242
+ export async function executeMigrationPlan(
243
+ adapter: DatabaseAdapter,
244
+ options: ExecuteOptions
245
+ ): Promise<{ succeeded: number; failed: number; skipped: number }> {
246
+ // Load and validate plan
247
+ const planYaml = fs.readFileSync(options.planPath, "utf8");
248
+ const planRaw = yaml.load(planYaml);
249
+ const plan = MigrationPlanSchema.parse(planRaw);
250
+
251
+ const migrateEntries = plan.entries.filter((e) => e.action === "migrate");
252
+ if (migrateEntries.length === 0) {
253
+ MessageFormatter.info("No attributes to migrate in plan.", {
254
+ prefix: "Execute",
255
+ });
256
+ return { succeeded: 0, failed: 0, skipped: plan.entries.length };
257
+ }
258
+
259
+ // Load or create checkpoint
260
+ const checkpointPath =
261
+ options.checkpointPath ||
262
+ options.planPath.replace(/\.ya?ml$/, ".checkpoint.json");
263
+ const checkpoint = loadOrCreateCheckpoint(checkpointPath, options.planPath);
264
+
265
+ const batchSize = options.batchSize || 100;
266
+ const batchDelayMs = options.batchDelayMs || 50;
267
+
268
+ // Group by database/collection
269
+ const groups = new Map<string, MigrationPlanEntry[]>();
270
+ for (const entry of migrateEntries) {
271
+ const key = `${entry.databaseId}:${entry.collectionId}`;
272
+ if (!groups.has(key)) groups.set(key, []);
273
+ groups.get(key)!.push(entry);
274
+ }
275
+
276
+ let succeeded = 0;
277
+ let failed = 0;
278
+ let skipped = plan.entries.filter((e) => e.action === "skip").length;
279
+
280
+ MessageFormatter.info(
281
+ `Executing migration: ${migrateEntries.length} attributes across ${groups.size} collections`,
282
+ { prefix: "Execute" }
283
+ );
284
+
285
+ if (options.dryRun) {
286
+ MessageFormatter.info("DRY RUN — no changes will be made", {
287
+ prefix: "Execute",
288
+ });
289
+ printDryRunSummary(plan);
290
+ return { succeeded: 0, failed: 0, skipped: plan.entries.length };
291
+ }
292
+
293
+ for (const [groupKey, entries] of groups) {
294
+ const first = entries[0];
295
+ console.log("");
296
+ console.log(
297
+ chalk.bold(
298
+ `Collection: ${first.collectionName} (${first.databaseName}/${first.collectionId})`
299
+ )
300
+ );
301
+ console.log(
302
+ ` Attributes to migrate: ${entries.map((e) => e.attributeKey).join(", ")}`
303
+ );
304
+
305
+ // Per-collection confirmation
306
+ const { proceed } = await inquirer.prompt([
307
+ {
308
+ type: "list",
309
+ name: "proceed",
310
+ message: `Migrate ${entries.length} attribute(s) in ${first.collectionName}?`,
311
+ choices: [
312
+ { name: "Yes, proceed", value: "yes" },
313
+ { name: "Skip this collection", value: "skip" },
314
+ { name: "Abort entire migration", value: "abort" },
315
+ ],
316
+ },
317
+ ]);
318
+
319
+ if (proceed === "abort") {
320
+ MessageFormatter.info("Migration aborted by user.", {
321
+ prefix: "Execute",
322
+ });
323
+ break;
324
+ }
325
+ if (proceed === "skip") {
326
+ skipped += entries.length;
327
+ continue;
328
+ }
329
+
330
+ // Migrate each attribute in this collection
331
+ for (const entry of entries) {
332
+ const cpEntry = getOrCreateCheckpointEntry(checkpoint, entry);
333
+
334
+ if (cpEntry.phase === "completed") {
335
+ MessageFormatter.info(
336
+ ` ${entry.attributeKey}: already completed (checkpoint)`,
337
+ { prefix: "Execute" }
338
+ );
339
+ succeeded++;
340
+ continue;
341
+ }
342
+
343
+ try {
344
+ await migrateOneAttribute(
345
+ adapter,
346
+ entry,
347
+ cpEntry,
348
+ checkpoint,
349
+ checkpointPath,
350
+ {
351
+ batchSize,
352
+ batchDelayMs,
353
+ keepBackups: options.keepBackups ?? true,
354
+ }
355
+ );
356
+ succeeded++;
357
+ MessageFormatter.success(
358
+ ` ${entry.attributeKey}: migrated to ${entry.targetType}`,
359
+ { prefix: "Execute" }
360
+ );
361
+ } catch (err: any) {
362
+ failed++;
363
+ cpEntry.phase = "failed";
364
+ cpEntry.error = err.message || String(err);
365
+ saveCheckpoint(checkpoint, checkpointPath);
366
+ MessageFormatter.error(
367
+ ` ${entry.attributeKey}: FAILED — ${cpEntry.error}`,
368
+ undefined,
369
+ { prefix: "Execute" }
370
+ );
371
+ }
372
+ }
373
+
374
+ // After collection completes, offer to update local YAML
375
+ const successInGroup = entries.filter((e) => {
376
+ const cp = findCheckpointEntry(checkpoint, e);
377
+ return cp?.phase === "completed";
378
+ }).length;
379
+
380
+ if (successInGroup > 0) {
381
+ const { updateYaml } = await inquirer.prompt([
382
+ {
383
+ type: "confirm",
384
+ name: "updateYaml",
385
+ message: `Update local YAML config for ${first.collectionName}? (change type: string → new types)`,
386
+ default: false,
387
+ },
388
+ ]);
389
+ if (updateYaml) {
390
+ MessageFormatter.info(
391
+ "Local YAML update: use your editor to change 'type: string' to the new types in your collection YAML files.",
392
+ { prefix: "Execute" }
393
+ );
394
+ }
395
+ }
396
+ }
397
+
398
+ // Final summary
399
+ console.log("");
400
+ console.log(chalk.bold("Migration Results"));
401
+ console.log(chalk.gray("─".repeat(40)));
402
+ console.log(` Succeeded: ${chalk.green(succeeded)}`);
403
+ console.log(` Failed: ${chalk.red(failed)}`);
404
+ console.log(` Skipped: ${chalk.yellow(skipped)}`);
405
+ console.log("");
406
+
407
+ if (failed > 0) {
408
+ MessageFormatter.info(
409
+ `Checkpoint saved at ${checkpointPath} — rerun to resume failed attributes.`,
410
+ { prefix: "Execute" }
411
+ );
412
+ }
413
+
414
+ return { succeeded, failed, skipped };
415
+ }
416
+
417
+ // ────────────────────────────────────────────────────────
418
+ // Single attribute migration (9 phases)
419
+ // ────────────────────────────────────────────────────────
420
+
421
+ interface MigrateOneOptions {
422
+ batchSize: number;
423
+ batchDelayMs: number;
424
+ keepBackups: boolean;
425
+ }
426
+
427
+ async function migrateOneAttribute(
428
+ adapter: DatabaseAdapter,
429
+ entry: MigrationPlanEntry,
430
+ cpEntry: CheckpointEntry,
431
+ checkpoint: MigrationCheckpoint,
432
+ checkpointPath: string,
433
+ opts: MigrateOneOptions
434
+ ): Promise<void> {
435
+ const { databaseId, collectionId, attributeKey, targetType, targetSize } =
436
+ entry;
437
+ const backupKey = cpEntry.backupKey;
438
+
439
+ const advance = (phase: CheckpointPhase) => {
440
+ cpEntry.phase = phase;
441
+ checkpoint.lastUpdatedAt = new Date().toISOString();
442
+ saveCheckpoint(checkpoint, checkpointPath);
443
+ };
444
+
445
+ // Step 1: Create backup attribute
446
+ if (phaseIndex(cpEntry.phase) < phaseIndex("backup_created")) {
447
+ MessageFormatter.info(` Creating backup attribute ${backupKey}...`, {
448
+ prefix: "Migrate",
449
+ });
450
+ await tryAwaitWithRetry(() =>
451
+ adapter.createAttribute({
452
+ databaseId,
453
+ tableId: collectionId,
454
+ key: backupKey,
455
+ type: "string", // backup keeps original type
456
+ size: entry.currentSize,
457
+ required: false, // always optional for backup
458
+ array: entry.isArray,
459
+ })
460
+ );
461
+ const available = await waitForAttribute(
462
+ adapter,
463
+ databaseId,
464
+ collectionId,
465
+ backupKey
466
+ );
467
+ if (!available) throw new Error(`Backup attribute ${backupKey} stuck`);
468
+ advance("backup_created");
469
+ }
470
+
471
+ // Step 2: Copy data to backup
472
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_copied_to_backup")) {
473
+ MessageFormatter.info(` Copying data to backup ${backupKey}...`, {
474
+ prefix: "Migrate",
475
+ });
476
+ await copyAttributeData(
477
+ adapter,
478
+ databaseId,
479
+ collectionId,
480
+ attributeKey,
481
+ backupKey,
482
+ opts.batchSize,
483
+ opts.batchDelayMs
484
+ );
485
+ advance("data_copied_to_backup");
486
+ }
487
+
488
+ // Step 3: Verify backup
489
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_verified_backup")) {
490
+ await verifyDataCopy(
491
+ adapter,
492
+ databaseId,
493
+ collectionId,
494
+ attributeKey,
495
+ backupKey
496
+ );
497
+ advance("data_verified_backup");
498
+ }
499
+
500
+ // Step 4: Delete indexes + original attribute
501
+ if (phaseIndex(cpEntry.phase) < phaseIndex("original_deleted")) {
502
+ // Save and delete affected indexes
503
+ if (entry.indexesAffected.length > 0) {
504
+ MessageFormatter.info(` Removing ${entry.indexesAffected.length} affected index(es)...`, {
505
+ prefix: "Migrate",
506
+ });
507
+ await saveAndDeleteIndexes(
508
+ adapter,
509
+ databaseId,
510
+ collectionId,
511
+ entry.indexesAffected,
512
+ cpEntry
513
+ );
514
+ saveCheckpoint(checkpoint, checkpointPath);
515
+ }
516
+
517
+ MessageFormatter.info(` Deleting original attribute ${attributeKey}...`, {
518
+ prefix: "Migrate",
519
+ });
520
+ await tryAwaitWithRetry(() =>
521
+ adapter.deleteAttribute({
522
+ databaseId,
523
+ tableId: collectionId,
524
+ key: attributeKey,
525
+ })
526
+ );
527
+ await waitForAttributeGone(adapter, databaseId, collectionId, attributeKey);
528
+ advance("original_deleted");
529
+ }
530
+
531
+ // Step 5: Create new attribute with target type
532
+ if (phaseIndex(cpEntry.phase) < phaseIndex("new_attr_created")) {
533
+ MessageFormatter.info(
534
+ ` Creating new attribute ${attributeKey} as ${targetType}...`,
535
+ { prefix: "Migrate" }
536
+ );
537
+ const createParams: Record<string, any> = {
538
+ databaseId,
539
+ tableId: collectionId,
540
+ key: attributeKey,
541
+ type: targetType,
542
+ required: false, // create as optional first — data needs to be copied back
543
+ array: entry.isArray,
544
+ };
545
+ if (targetType === "varchar" && targetSize) {
546
+ createParams.size = targetSize;
547
+ }
548
+ if (entry.hasDefault && entry.defaultValue !== undefined) {
549
+ createParams.default = entry.defaultValue;
550
+ }
551
+
552
+ await tryAwaitWithRetry(() => adapter.createAttribute(createParams as any));
553
+ const available = await waitForAttribute(
554
+ adapter,
555
+ databaseId,
556
+ collectionId,
557
+ attributeKey
558
+ );
559
+ if (!available)
560
+ throw new Error(`New attribute ${attributeKey} stuck after creation`);
561
+ advance("new_attr_created");
562
+ }
563
+
564
+ // Step 6: Copy data back from backup
565
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_copied_back")) {
566
+ MessageFormatter.info(` Copying data back from backup...`, {
567
+ prefix: "Migrate",
568
+ });
569
+ await copyAttributeData(
570
+ adapter,
571
+ databaseId,
572
+ collectionId,
573
+ backupKey,
574
+ attributeKey,
575
+ opts.batchSize,
576
+ opts.batchDelayMs
577
+ );
578
+ advance("data_copied_back");
579
+ }
580
+
581
+ // Step 7: Verify final data
582
+ if (phaseIndex(cpEntry.phase) < phaseIndex("data_verified_final")) {
583
+ await verifyDataCopy(
584
+ adapter,
585
+ databaseId,
586
+ collectionId,
587
+ backupKey,
588
+ attributeKey
589
+ );
590
+ advance("data_verified_final");
591
+ }
592
+
593
+ // Step 8: Recreate indexes + delete backup
594
+ if (phaseIndex(cpEntry.phase) < phaseIndex("backup_deleted")) {
595
+ // Recreate indexes
596
+ if (cpEntry.storedIndexes.length > 0) {
597
+ MessageFormatter.info(
598
+ ` Recreating ${cpEntry.storedIndexes.length} index(es)...`,
599
+ { prefix: "Migrate" }
600
+ );
601
+ await recreateIndexes(adapter, databaseId, collectionId, cpEntry);
602
+ }
603
+
604
+ // Delete backup (unless keepBackups)
605
+ if (!opts.keepBackups) {
606
+ MessageFormatter.info(` Deleting backup attribute ${backupKey}...`, {
607
+ prefix: "Migrate",
608
+ });
609
+ await tryAwaitWithRetry(() =>
610
+ adapter.deleteAttribute({
611
+ databaseId,
612
+ tableId: collectionId,
613
+ key: backupKey,
614
+ })
615
+ );
616
+ await waitForAttributeGone(
617
+ adapter,
618
+ databaseId,
619
+ collectionId,
620
+ backupKey
621
+ );
622
+ }
623
+ advance("backup_deleted");
624
+ }
625
+
626
+ // Step 9: Mark completed
627
+ // If the original attribute was required, update it now (after data is in place)
628
+ if (entry.isRequired) {
629
+ try {
630
+ await tryAwaitWithRetry(() =>
631
+ adapter.updateAttribute({
632
+ databaseId,
633
+ tableId: collectionId,
634
+ key: attributeKey,
635
+ required: true,
636
+ } as any)
637
+ );
638
+ } catch {
639
+ // Non-fatal — attribute is migrated, just not set back to required
640
+ MessageFormatter.info(
641
+ ` Warning: could not set ${attributeKey} back to required`,
642
+ { prefix: "Migrate" }
643
+ );
644
+ }
645
+ }
646
+ advance("completed");
647
+ }
648
+
649
+ // ────────────────────────────────────────────────────────
650
+ // Helper: copy attribute data via cursor pagination
651
+ // ────────────────────────────────────────────────────────
652
+
653
+ async function copyAttributeData(
654
+ adapter: DatabaseAdapter,
655
+ databaseId: string,
656
+ collectionId: string,
657
+ sourceKey: string,
658
+ targetKey: string,
659
+ batchSize: number,
660
+ batchDelayMs: number
661
+ ): Promise<void> {
662
+ let lastId: string | undefined;
663
+ let totalCopied = 0;
664
+ let totalDocs: number | undefined;
665
+
666
+ // Get initial count
667
+ const countRes = await tryAwaitWithRetry(() =>
668
+ adapter.listRows({
669
+ databaseId,
670
+ tableId: collectionId,
671
+ queries: [Query.limit(1)],
672
+ })
673
+ );
674
+ totalDocs = countRes?.total ?? undefined;
675
+ const progress = totalDocs
676
+ ? ProgressManager.create(`copy-${sourceKey}-${targetKey}`, totalDocs, {
677
+ title: ` Copy ${sourceKey} → ${targetKey}`,
678
+ })
679
+ : undefined;
680
+
681
+ while (true) {
682
+ const queries: string[] = [Query.limit(batchSize)];
683
+ if (lastId) queries.push(Query.cursorAfter(lastId));
684
+
685
+ const res = await tryAwaitWithRetry(() =>
686
+ adapter.listRows({ databaseId, tableId: collectionId, queries })
687
+ );
688
+
689
+ const docs = res?.documents || res?.rows || [];
690
+ if (docs.length === 0) break;
691
+
692
+ // Batch update: copy sourceKey → targetKey
693
+ if (adapter.supportsBulkOperations() && adapter.bulkUpsertRows) {
694
+ const rows = docs
695
+ .filter((d: any) => d[sourceKey] !== undefined)
696
+ .map((d: any) => ({
697
+ id: d.$id,
698
+ data: { [targetKey]: d[sourceKey] },
699
+ }));
700
+
701
+ if (rows.length > 0) {
702
+ await tryAwaitWithRetry(() =>
703
+ adapter.bulkUpsertRows!({
704
+ databaseId,
705
+ tableId: collectionId,
706
+ rows,
707
+ })
708
+ );
709
+ }
710
+ } else {
711
+ for (const doc of docs) {
712
+ if (doc[sourceKey] === undefined) continue;
713
+ await tryAwaitWithRetry(() =>
714
+ adapter.updateRow({
715
+ databaseId,
716
+ tableId: collectionId,
717
+ id: doc.$id,
718
+ data: { [targetKey]: doc[sourceKey] },
719
+ })
720
+ );
721
+ }
722
+ }
723
+
724
+ totalCopied += docs.length;
725
+ lastId = docs[docs.length - 1].$id;
726
+ progress?.update(totalCopied);
727
+
728
+ if (docs.length < batchSize) break; // last page
729
+ if (batchDelayMs > 0) await delay(batchDelayMs);
730
+ }
731
+
732
+ progress?.stop();
733
+ }
734
+
735
+ // ────────────────────────────────────────────────────────
736
+ // Helper: verify data copy (count + spot check)
737
+ // ────────────────────────────────────────────────────────
738
+
739
+ async function verifyDataCopy(
740
+ adapter: DatabaseAdapter,
741
+ databaseId: string,
742
+ collectionId: string,
743
+ sourceKey: string,
744
+ targetKey: string
745
+ ): Promise<void> {
746
+ // Spot-check first 5 documents
747
+ const res = await tryAwaitWithRetry(() =>
748
+ adapter.listRows({
749
+ databaseId,
750
+ tableId: collectionId,
751
+ queries: [Query.limit(5)],
752
+ })
753
+ );
754
+ const docs = res?.documents || res?.rows || [];
755
+ for (const doc of docs) {
756
+ if (doc[sourceKey] === undefined) continue;
757
+ if (doc[sourceKey] !== doc[targetKey]) {
758
+ throw new Error(
759
+ `Verification failed: doc ${doc.$id} has ${sourceKey}=${JSON.stringify(doc[sourceKey])} but ${targetKey}=${JSON.stringify(doc[targetKey])}`
760
+ );
761
+ }
762
+ }
763
+ }
764
+
765
+ // ────────────────────────────────────────────────────────
766
+ // Helper: wait for attribute to become available
767
+ // ────────────────────────────────────────────────────────
768
+
769
+ async function waitForAttribute(
770
+ adapter: DatabaseAdapter,
771
+ databaseId: string,
772
+ collectionId: string,
773
+ key: string,
774
+ maxWaitMs: number = 60_000
775
+ ): Promise<boolean> {
776
+ const start = Date.now();
777
+ const checkInterval = 2000;
778
+
779
+ while (Date.now() - start < maxWaitMs) {
780
+ const res = await tryAwaitWithRetry(() =>
781
+ adapter.getTable({ databaseId, tableId: collectionId })
782
+ );
783
+ const attrs: any[] =
784
+ res?.data?.attributes || res?.data?.columns || [];
785
+ const attr = attrs.find((a: any) => a.key === key);
786
+ if (attr) {
787
+ if (attr.status === "available") return true;
788
+ if (attr.status === "failed" || attr.status === "stuck") return false;
789
+ }
790
+ await delay(checkInterval);
791
+ }
792
+ return false;
793
+ }
794
+
795
+ // ────────────────────────────────────────────────────────
796
+ // Helper: wait for attribute to be fully deleted
797
+ // ────────────────────────────────────────────────────────
798
+
799
+ async function waitForAttributeGone(
800
+ adapter: DatabaseAdapter,
801
+ databaseId: string,
802
+ collectionId: string,
803
+ key: string,
804
+ maxWaitMs: number = 60_000
805
+ ): Promise<boolean> {
806
+ const start = Date.now();
807
+ const checkInterval = 2000;
808
+
809
+ while (Date.now() - start < maxWaitMs) {
810
+ const res = await tryAwaitWithRetry(() =>
811
+ adapter.getTable({ databaseId, tableId: collectionId })
812
+ );
813
+ const attrs: any[] =
814
+ res?.data?.attributes || res?.data?.columns || [];
815
+ const attr = attrs.find((a: any) => a.key === key);
816
+ if (!attr) return true;
817
+ if (attr.status === "deleting") {
818
+ await delay(checkInterval);
819
+ continue;
820
+ }
821
+ // Still present and not deleting — wait
822
+ await delay(checkInterval);
823
+ }
824
+ return false;
825
+ }
826
+
827
+ // ────────────────────────────────────────────────────────
828
+ // Helper: index management
829
+ // ────────────────────────────────────────────────────────
830
+
831
+ async function saveAndDeleteIndexes(
832
+ adapter: DatabaseAdapter,
833
+ databaseId: string,
834
+ collectionId: string,
835
+ indexKeys: string[],
836
+ cpEntry: CheckpointEntry
837
+ ): Promise<void> {
838
+ // Fetch current indexes from server
839
+ const res = await tryAwaitWithRetry(() =>
840
+ adapter.listIndexes({ databaseId, tableId: collectionId })
841
+ );
842
+ const allIndexes: any[] = res?.data || [];
843
+
844
+ for (const idxKey of indexKeys) {
845
+ const idx = allIndexes.find((i: any) => i.key === idxKey);
846
+ if (!idx) continue;
847
+
848
+ // Store definition for recreation
849
+ const alreadyStored = cpEntry.storedIndexes.some(
850
+ (s) => s.key === idxKey
851
+ );
852
+ if (!alreadyStored) {
853
+ cpEntry.storedIndexes.push({
854
+ key: idx.key,
855
+ type: idx.type || "key",
856
+ attributes: idx.attributes || [],
857
+ orders: idx.orders,
858
+ });
859
+ }
860
+
861
+ // Delete
862
+ await tryAwaitWithRetry(() =>
863
+ adapter.deleteIndex({ databaseId, tableId: collectionId, key: idxKey })
864
+ );
865
+ }
866
+
867
+ // Wait for indexes to be gone
868
+ for (const idxKey of indexKeys) {
869
+ await waitForIndexGone(adapter, databaseId, collectionId, idxKey);
870
+ }
871
+ }
872
+
873
+ async function recreateIndexes(
874
+ adapter: DatabaseAdapter,
875
+ databaseId: string,
876
+ collectionId: string,
877
+ cpEntry: CheckpointEntry
878
+ ): Promise<void> {
879
+ for (const idx of cpEntry.storedIndexes) {
880
+ await tryAwaitWithRetry(() =>
881
+ adapter.createIndex({
882
+ databaseId,
883
+ tableId: collectionId,
884
+ key: idx.key,
885
+ type: idx.type as any,
886
+ attributes: idx.attributes,
887
+ orders: idx.orders,
888
+ })
889
+ );
890
+ // Wait for index to become available
891
+ await waitForIndexAvailable(adapter, databaseId, collectionId, idx.key);
892
+ }
893
+ }
894
+
895
+ async function waitForIndexGone(
896
+ adapter: DatabaseAdapter,
897
+ databaseId: string,
898
+ collectionId: string,
899
+ key: string,
900
+ maxWaitMs: number = 60_000
901
+ ): Promise<void> {
902
+ const start = Date.now();
903
+ while (Date.now() - start < maxWaitMs) {
904
+ const res = await tryAwaitWithRetry(() =>
905
+ adapter.listIndexes({ databaseId, tableId: collectionId })
906
+ );
907
+ const indexes: any[] = res?.data || [];
908
+ if (!indexes.find((i: any) => i.key === key)) return;
909
+ await delay(2000);
910
+ }
911
+ }
912
+
913
+ async function waitForIndexAvailable(
914
+ adapter: DatabaseAdapter,
915
+ databaseId: string,
916
+ collectionId: string,
917
+ key: string,
918
+ maxWaitMs: number = 60_000
919
+ ): Promise<void> {
920
+ const start = Date.now();
921
+ while (Date.now() - start < maxWaitMs) {
922
+ const res = await tryAwaitWithRetry(() =>
923
+ adapter.listIndexes({ databaseId, tableId: collectionId })
924
+ );
925
+ const indexes: any[] = res?.data || [];
926
+ const idx = indexes.find((i: any) => i.key === key);
927
+ if (idx?.status === "available") return;
928
+ if (idx?.status === "failed") {
929
+ throw new Error(`Index ${key} creation failed`);
930
+ }
931
+ await delay(2000);
932
+ }
933
+ }
934
+
935
+ // ────────────────────────────────────────────────────────
936
+ // Checkpoint management
937
+ // ────────────────────────────────────────────────────────
938
+
939
+ function loadOrCreateCheckpoint(
940
+ checkpointPath: string,
941
+ planFile: string
942
+ ): MigrationCheckpoint {
943
+ if (fs.existsSync(checkpointPath)) {
944
+ try {
945
+ const raw = JSON.parse(fs.readFileSync(checkpointPath, "utf8"));
946
+ const parsed = MigrationCheckpointSchema.parse(raw);
947
+ MessageFormatter.info(
948
+ `Resuming from checkpoint: ${checkpointPath}`,
949
+ { prefix: "Checkpoint" }
950
+ );
951
+ return parsed;
952
+ } catch {
953
+ MessageFormatter.info(
954
+ "Corrupt checkpoint file, creating new one.",
955
+ { prefix: "Checkpoint" }
956
+ );
957
+ }
958
+ }
959
+
960
+ const now = new Date().toISOString();
961
+ return {
962
+ planFile,
963
+ startedAt: now,
964
+ lastUpdatedAt: now,
965
+ entries: [],
966
+ };
967
+ }
968
+
969
+ function saveCheckpoint(
970
+ checkpoint: MigrationCheckpoint,
971
+ checkpointPath: string
972
+ ): void {
973
+ checkpoint.lastUpdatedAt = new Date().toISOString();
974
+ fs.writeFileSync(
975
+ checkpointPath,
976
+ JSON.stringify(checkpoint, null, 2),
977
+ "utf8"
978
+ );
979
+ }
980
+
981
+ function getOrCreateCheckpointEntry(
982
+ checkpoint: MigrationCheckpoint,
983
+ entry: MigrationPlanEntry
984
+ ): CheckpointEntry {
985
+ const existing = findCheckpointEntry(checkpoint, entry);
986
+ if (existing) return existing;
987
+
988
+ const cpEntry: CheckpointEntry = {
989
+ databaseId: entry.databaseId,
990
+ collectionId: entry.collectionId,
991
+ attributeKey: entry.attributeKey,
992
+ backupKey: generateBackupKey(entry.attributeKey),
993
+ phase: "pending",
994
+ targetType: entry.targetType,
995
+ targetSize: entry.targetSize,
996
+ storedIndexes: [],
997
+ };
998
+ checkpoint.entries.push(cpEntry);
999
+ return cpEntry;
1000
+ }
1001
+
1002
+ function findCheckpointEntry(
1003
+ checkpoint: MigrationCheckpoint,
1004
+ entry: MigrationPlanEntry
1005
+ ): CheckpointEntry | undefined {
1006
+ return checkpoint.entries.find(
1007
+ (e) =>
1008
+ e.databaseId === entry.databaseId &&
1009
+ e.collectionId === entry.collectionId &&
1010
+ e.attributeKey === entry.attributeKey
1011
+ );
1012
+ }
1013
+
1014
+ // ────────────────────────────────────────────────────────
1015
+ // Phase ordering for checkpoint resume
1016
+ // ────────────────────────────────────────────────────────
1017
+
1018
+ const PHASE_ORDER: CheckpointPhase[] = [
1019
+ "pending",
1020
+ "backup_created",
1021
+ "data_copied_to_backup",
1022
+ "data_verified_backup",
1023
+ "original_deleted",
1024
+ "new_attr_created",
1025
+ "data_copied_back",
1026
+ "data_verified_final",
1027
+ "backup_deleted",
1028
+ "completed",
1029
+ ];
1030
+
1031
+ function phaseIndex(phase: CheckpointPhase): number {
1032
+ const idx = PHASE_ORDER.indexOf(phase);
1033
+ return idx >= 0 ? idx : -1;
1034
+ }
1035
+
1036
+ // ────────────────────────────────────────────────────────
1037
+ // Dry run summary
1038
+ // ────────────────────────────────────────────────────────
1039
+
1040
+ function printDryRunSummary(plan: MigrationPlan): void {
1041
+ console.log("");
1042
+ console.log(chalk.bold("Dry Run — What Would Happen:"));
1043
+ console.log(chalk.gray("─".repeat(50)));
1044
+
1045
+ const groups = new Map<string, MigrationPlanEntry[]>();
1046
+ for (const entry of plan.entries) {
1047
+ if (entry.action !== "migrate") continue;
1048
+ const key = `${entry.databaseName}/${entry.collectionName}`;
1049
+ if (!groups.has(key)) groups.set(key, []);
1050
+ groups.get(key)!.push(entry);
1051
+ }
1052
+
1053
+ for (const [groupName, entries] of groups) {
1054
+ console.log(`\n ${chalk.cyan(groupName)}`);
1055
+ for (const e of entries) {
1056
+ const sizeInfo =
1057
+ e.targetType === "varchar" ? ` (size: ${e.targetSize})` : "";
1058
+ const indexInfo =
1059
+ e.indexesAffected.length > 0
1060
+ ? ` [indexes: ${e.indexesAffected.join(", ")}]`
1061
+ : "";
1062
+ console.log(
1063
+ ` ${e.attributeKey}: string(${e.currentSize}) → ${e.targetType}${sizeInfo}${indexInfo}`
1064
+ );
1065
+ }
1066
+ }
1067
+
1068
+ const skipped = plan.entries.filter((e) => e.action === "skip");
1069
+ if (skipped.length > 0) {
1070
+ console.log(`\n ${chalk.yellow("Skipped:")}`);
1071
+ for (const e of skipped) {
1072
+ console.log(` ${e.attributeKey}: ${e.skipReason || "manual skip"}`);
1073
+ }
1074
+ }
1075
+ console.log("");
1076
+ }
1077
+
1078
+ // ────────────────────────────────────────────────────────
1079
+ // Utility
1080
+ // ────────────────────────────────────────────────────────
1081
+
1082
+ function delay(ms: number): Promise<void> {
1083
+ return new Promise((resolve) => setTimeout(resolve, ms));
1084
+ }