@njdamstra/appwrite-utils-cli 1.10.0 → 1.11.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/commands/migrateCommands.d.ts +6 -0
- package/dist/cli/commands/migrateCommands.js +118 -0
- package/dist/collections/attributes.js +83 -0
- package/dist/collections/indexes.js +1 -1
- package/dist/collections/tableOperations.js +35 -0
- package/dist/interactiveCLI.js +7 -1
- package/dist/main.js +56 -0
- package/dist/migrations/appwriteToX.d.ts +96 -0
- package/dist/migrations/dataLoader.d.ts +194 -2
- package/dist/migrations/migrateStrings.d.ts +9 -0
- package/dist/migrations/migrateStrings.js +724 -0
- package/dist/migrations/migrateStringsTypes.d.ts +195 -0
- package/dist/migrations/migrateStringsTypes.js +117 -0
- package/dist/storage/schemas.d.ts +384 -0
- package/package.json +4 -4
- package/src/cli/commands/migrateCommands.ts +157 -0
- package/src/collections/attributes.ts +152 -0
- package/src/collections/indexes.ts +3 -3
- package/src/collections/tableOperations.ts +35 -0
- package/src/functions/methods.ts +2 -2
- package/src/interactiveCLI.ts +9 -3
- package/src/main.ts +69 -0
- package/src/migrations/migrateStrings.ts +1064 -0
- package/src/migrations/migrateStringsTypes.ts +158 -0
|
@@ -0,0 +1,1064 @@
|
|
|
1
|
+
import { Query } from "node-appwrite";
|
|
2
|
+
import yaml from "js-yaml";
|
|
3
|
+
import fs from "node:fs";
|
|
4
|
+
import path from "node:path";
|
|
5
|
+
import inquirer from "inquirer";
|
|
6
|
+
import chalk from "chalk";
|
|
7
|
+
import {
|
|
8
|
+
type DatabaseAdapter,
|
|
9
|
+
MessageFormatter,
|
|
10
|
+
tryAwaitWithRetry,
|
|
11
|
+
} from "@njdamstra/appwrite-utils-helpers";
|
|
12
|
+
import type {
|
|
13
|
+
AppwriteConfig,
|
|
14
|
+
Attribute,
|
|
15
|
+
Index,
|
|
16
|
+
} from "@njdamstra/appwrite-utils";
|
|
17
|
+
import { ProgressManager } from "../shared/progressManager.js";
|
|
18
|
+
import {
|
|
19
|
+
type MigrationPlan,
|
|
20
|
+
type MigrationPlanEntry,
|
|
21
|
+
type MigrationCheckpoint,
|
|
22
|
+
type CheckpointEntry,
|
|
23
|
+
type CheckpointPhase,
|
|
24
|
+
type AnalyzeOptions,
|
|
25
|
+
type ExecuteOptions,
|
|
26
|
+
MigrationPlanSchema,
|
|
27
|
+
MigrationCheckpointSchema,
|
|
28
|
+
suggestTargetType,
|
|
29
|
+
generateBackupKey,
|
|
30
|
+
} from "./migrateStringsTypes.js";
|
|
31
|
+
|
|
32
|
+
// ────────────────────────────────────────────────────────
|
|
33
|
+
// Phase 1: Analyze — offline, reads local YAML configs
|
|
34
|
+
// ────────────────────────────────────────────────────────
|
|
35
|
+
|
|
36
|
+
export function analyzeStringAttributes(
|
|
37
|
+
config: AppwriteConfig,
|
|
38
|
+
options: AnalyzeOptions = {}
|
|
39
|
+
): MigrationPlan {
|
|
40
|
+
const collections = [
|
|
41
|
+
...(config.collections || []),
|
|
42
|
+
...(config.tables || []),
|
|
43
|
+
];
|
|
44
|
+
const databases = config.databases || [];
|
|
45
|
+
|
|
46
|
+
const entries: MigrationPlanEntry[] = [];
|
|
47
|
+
const collectionsSeen = new Set<string>();
|
|
48
|
+
|
|
49
|
+
for (const db of databases) {
|
|
50
|
+
// Find collections assigned to this database (or unassigned = applied to all)
|
|
51
|
+
const dbCollections = collections.filter((c) => {
|
|
52
|
+
const coll = c as any;
|
|
53
|
+
if (coll.databaseId) return coll.databaseId === db.$id;
|
|
54
|
+
if (coll.databaseIds?.length) return coll.databaseIds.includes(db.$id);
|
|
55
|
+
return true; // unassigned → applied to all databases
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
for (const coll of dbCollections) {
|
|
59
|
+
const collId = (coll as any).$id || coll.name;
|
|
60
|
+
const dedupKey = `${db.$id}:${collId}`;
|
|
61
|
+
if (collectionsSeen.has(dedupKey)) continue;
|
|
62
|
+
collectionsSeen.add(dedupKey);
|
|
63
|
+
|
|
64
|
+
const attributes: Attribute[] = (coll as any).attributes || [];
|
|
65
|
+
const indexes: Index[] = (coll as any).indexes || [];
|
|
66
|
+
|
|
67
|
+
for (const attr of attributes) {
|
|
68
|
+
if (attr.type !== "string") continue;
|
|
69
|
+
|
|
70
|
+
const stringAttr = attr as any;
|
|
71
|
+
const size: number = stringAttr.size || 50;
|
|
72
|
+
const isEncrypted = !!stringAttr.encrypt;
|
|
73
|
+
const isRequired = !!attr.required;
|
|
74
|
+
const isArray = !!attr.array;
|
|
75
|
+
const hasDefault =
|
|
76
|
+
stringAttr.xdefault !== undefined && stringAttr.xdefault !== null;
|
|
77
|
+
|
|
78
|
+
// Find indexes that reference this attribute
|
|
79
|
+
const affectedIndexes = indexes
|
|
80
|
+
.filter((idx) => idx.attributes.includes(attr.key))
|
|
81
|
+
.map((idx) => idx.key);
|
|
82
|
+
const hasIndex = affectedIndexes.length > 0;
|
|
83
|
+
|
|
84
|
+
const suggested = suggestTargetType(size, hasIndex);
|
|
85
|
+
|
|
86
|
+
// Build entry
|
|
87
|
+
const entry: MigrationPlanEntry = {
|
|
88
|
+
databaseId: db.$id,
|
|
89
|
+
databaseName: db.name,
|
|
90
|
+
collectionId: collId,
|
|
91
|
+
collectionName: coll.name,
|
|
92
|
+
attributeKey: attr.key,
|
|
93
|
+
currentType: "string",
|
|
94
|
+
currentSize: size,
|
|
95
|
+
isRequired,
|
|
96
|
+
isArray,
|
|
97
|
+
isEncrypted,
|
|
98
|
+
hasDefault,
|
|
99
|
+
defaultValue: hasDefault ? stringAttr.xdefault : undefined,
|
|
100
|
+
suggestedType: suggested,
|
|
101
|
+
targetType: suggested,
|
|
102
|
+
targetSize: suggested === "varchar" ? size : undefined,
|
|
103
|
+
action: isEncrypted ? "skip" : "migrate",
|
|
104
|
+
skipReason: isEncrypted ? "encrypted" : undefined,
|
|
105
|
+
indexesAffected: affectedIndexes,
|
|
106
|
+
};
|
|
107
|
+
|
|
108
|
+
// Warn if indexed attr gets non-varchar suggestion
|
|
109
|
+
if (
|
|
110
|
+
hasIndex &&
|
|
111
|
+
suggested !== "varchar" &&
|
|
112
|
+
!isEncrypted
|
|
113
|
+
) {
|
|
114
|
+
entry.targetType = "varchar";
|
|
115
|
+
entry.targetSize = size;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
entries.push(entry);
|
|
119
|
+
}
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
const toMigrate = entries.filter((e) => e.action === "migrate").length;
|
|
124
|
+
const toSkip = entries.filter((e) => e.action === "skip").length;
|
|
125
|
+
const uniqueDbs = new Set(entries.map((e) => e.databaseId));
|
|
126
|
+
const uniqueColls = new Set(
|
|
127
|
+
entries.map((e) => `${e.databaseId}:${e.collectionId}`)
|
|
128
|
+
);
|
|
129
|
+
|
|
130
|
+
const plan: MigrationPlan = {
|
|
131
|
+
version: 1,
|
|
132
|
+
generatedAt: new Date().toISOString(),
|
|
133
|
+
appwriteEndpoint: config.appwriteEndpoint,
|
|
134
|
+
appwriteProject: config.appwriteProject,
|
|
135
|
+
summary: {
|
|
136
|
+
totalStringAttributes: entries.length,
|
|
137
|
+
toMigrate,
|
|
138
|
+
toSkip,
|
|
139
|
+
databaseCount: uniqueDbs.size,
|
|
140
|
+
collectionCount: uniqueColls.size,
|
|
141
|
+
},
|
|
142
|
+
entries,
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
// Write YAML plan
|
|
146
|
+
const outputPath =
|
|
147
|
+
options.outputPath || path.join(process.cwd(), "migrate-strings-plan.yaml");
|
|
148
|
+
const yamlContent = yaml.dump(plan, {
|
|
149
|
+
lineWidth: 120,
|
|
150
|
+
noRefs: true,
|
|
151
|
+
sortKeys: false,
|
|
152
|
+
});
|
|
153
|
+
fs.writeFileSync(outputPath, yamlContent, "utf8");
|
|
154
|
+
|
|
155
|
+
// Print summary
|
|
156
|
+
MessageFormatter.info(`Migration plan written to ${outputPath}`, {
|
|
157
|
+
prefix: "Analyze",
|
|
158
|
+
});
|
|
159
|
+
printPlanSummary(plan);
|
|
160
|
+
|
|
161
|
+
return plan;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
function printPlanSummary(plan: MigrationPlan): void {
|
|
165
|
+
const { summary } = plan;
|
|
166
|
+
console.log("");
|
|
167
|
+
console.log(chalk.bold("String Attribute Migration Plan Summary"));
|
|
168
|
+
console.log(chalk.gray("─".repeat(50)));
|
|
169
|
+
console.log(
|
|
170
|
+
` Total string attributes: ${chalk.yellow(summary.totalStringAttributes)}`
|
|
171
|
+
);
|
|
172
|
+
console.log(` To migrate: ${chalk.green(summary.toMigrate)}`);
|
|
173
|
+
console.log(` To skip: ${chalk.red(summary.toSkip)}`);
|
|
174
|
+
console.log(` Databases: ${summary.databaseCount}`);
|
|
175
|
+
console.log(` Collections: ${summary.collectionCount}`);
|
|
176
|
+
console.log("");
|
|
177
|
+
|
|
178
|
+
// Type distribution
|
|
179
|
+
const typeDistribution: Record<string, number> = {};
|
|
180
|
+
for (const entry of plan.entries) {
|
|
181
|
+
if (entry.action === "migrate") {
|
|
182
|
+
typeDistribution[entry.targetType] =
|
|
183
|
+
(typeDistribution[entry.targetType] || 0) + 1;
|
|
184
|
+
}
|
|
185
|
+
}
|
|
186
|
+
if (Object.keys(typeDistribution).length > 0) {
|
|
187
|
+
console.log(chalk.bold(" Target type distribution:"));
|
|
188
|
+
for (const [type, count] of Object.entries(typeDistribution)) {
|
|
189
|
+
console.log(` ${type}: ${count}`);
|
|
190
|
+
}
|
|
191
|
+
console.log("");
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
// Skipped reasons
|
|
195
|
+
const skipReasons: Record<string, number> = {};
|
|
196
|
+
for (const entry of plan.entries) {
|
|
197
|
+
if (entry.action === "skip" && entry.skipReason) {
|
|
198
|
+
skipReasons[entry.skipReason] =
|
|
199
|
+
(skipReasons[entry.skipReason] || 0) + 1;
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
if (Object.keys(skipReasons).length > 0) {
|
|
203
|
+
console.log(chalk.bold(" Skip reasons:"));
|
|
204
|
+
for (const [reason, count] of Object.entries(skipReasons)) {
|
|
205
|
+
console.log(` ${reason}: ${count}`);
|
|
206
|
+
}
|
|
207
|
+
console.log("");
|
|
208
|
+
}
|
|
209
|
+
|
|
210
|
+
console.log(
|
|
211
|
+
chalk.dim(
|
|
212
|
+
" Edit the YAML plan file to change targetType or action before executing."
|
|
213
|
+
)
|
|
214
|
+
);
|
|
215
|
+
console.log("");
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
// ────────────────────────────────────────────────────────
|
|
219
|
+
// Phase 2: Execute — server connection required
|
|
220
|
+
// ────────────────────────────────────────────────────────
|
|
221
|
+
|
|
222
|
+
export async function executeMigrationPlan(
|
|
223
|
+
adapter: DatabaseAdapter,
|
|
224
|
+
options: ExecuteOptions
|
|
225
|
+
): Promise<{ succeeded: number; failed: number; skipped: number }> {
|
|
226
|
+
// Load and validate plan
|
|
227
|
+
const planYaml = fs.readFileSync(options.planPath, "utf8");
|
|
228
|
+
const planRaw = yaml.load(planYaml);
|
|
229
|
+
const plan = MigrationPlanSchema.parse(planRaw);
|
|
230
|
+
|
|
231
|
+
const migrateEntries = plan.entries.filter((e) => e.action === "migrate");
|
|
232
|
+
if (migrateEntries.length === 0) {
|
|
233
|
+
MessageFormatter.info("No attributes to migrate in plan.", {
|
|
234
|
+
prefix: "Execute",
|
|
235
|
+
});
|
|
236
|
+
return { succeeded: 0, failed: 0, skipped: plan.entries.length };
|
|
237
|
+
}
|
|
238
|
+
|
|
239
|
+
// Load or create checkpoint
|
|
240
|
+
const checkpointPath =
|
|
241
|
+
options.checkpointPath ||
|
|
242
|
+
options.planPath.replace(/\.ya?ml$/, ".checkpoint.json");
|
|
243
|
+
const checkpoint = loadOrCreateCheckpoint(checkpointPath, options.planPath);
|
|
244
|
+
|
|
245
|
+
const batchSize = options.batchSize || 100;
|
|
246
|
+
const batchDelayMs = options.batchDelayMs || 50;
|
|
247
|
+
|
|
248
|
+
// Group by database/collection
|
|
249
|
+
const groups = new Map<string, MigrationPlanEntry[]>();
|
|
250
|
+
for (const entry of migrateEntries) {
|
|
251
|
+
const key = `${entry.databaseId}:${entry.collectionId}`;
|
|
252
|
+
if (!groups.has(key)) groups.set(key, []);
|
|
253
|
+
groups.get(key)!.push(entry);
|
|
254
|
+
}
|
|
255
|
+
|
|
256
|
+
let succeeded = 0;
|
|
257
|
+
let failed = 0;
|
|
258
|
+
let skipped = plan.entries.filter((e) => e.action === "skip").length;
|
|
259
|
+
|
|
260
|
+
MessageFormatter.info(
|
|
261
|
+
`Executing migration: ${migrateEntries.length} attributes across ${groups.size} collections`,
|
|
262
|
+
{ prefix: "Execute" }
|
|
263
|
+
);
|
|
264
|
+
|
|
265
|
+
if (options.dryRun) {
|
|
266
|
+
MessageFormatter.info("DRY RUN — no changes will be made", {
|
|
267
|
+
prefix: "Execute",
|
|
268
|
+
});
|
|
269
|
+
printDryRunSummary(plan);
|
|
270
|
+
return { succeeded: 0, failed: 0, skipped: plan.entries.length };
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
for (const [groupKey, entries] of groups) {
|
|
274
|
+
const first = entries[0];
|
|
275
|
+
console.log("");
|
|
276
|
+
console.log(
|
|
277
|
+
chalk.bold(
|
|
278
|
+
`Collection: ${first.collectionName} (${first.databaseName}/${first.collectionId})`
|
|
279
|
+
)
|
|
280
|
+
);
|
|
281
|
+
console.log(
|
|
282
|
+
` Attributes to migrate: ${entries.map((e) => e.attributeKey).join(", ")}`
|
|
283
|
+
);
|
|
284
|
+
|
|
285
|
+
// Per-collection confirmation
|
|
286
|
+
const { proceed } = await inquirer.prompt([
|
|
287
|
+
{
|
|
288
|
+
type: "list",
|
|
289
|
+
name: "proceed",
|
|
290
|
+
message: `Migrate ${entries.length} attribute(s) in ${first.collectionName}?`,
|
|
291
|
+
choices: [
|
|
292
|
+
{ name: "Yes, proceed", value: "yes" },
|
|
293
|
+
{ name: "Skip this collection", value: "skip" },
|
|
294
|
+
{ name: "Abort entire migration", value: "abort" },
|
|
295
|
+
],
|
|
296
|
+
},
|
|
297
|
+
]);
|
|
298
|
+
|
|
299
|
+
if (proceed === "abort") {
|
|
300
|
+
MessageFormatter.info("Migration aborted by user.", {
|
|
301
|
+
prefix: "Execute",
|
|
302
|
+
});
|
|
303
|
+
break;
|
|
304
|
+
}
|
|
305
|
+
if (proceed === "skip") {
|
|
306
|
+
skipped += entries.length;
|
|
307
|
+
continue;
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
// Migrate each attribute in this collection
|
|
311
|
+
for (const entry of entries) {
|
|
312
|
+
const cpEntry = getOrCreateCheckpointEntry(checkpoint, entry);
|
|
313
|
+
|
|
314
|
+
if (cpEntry.phase === "completed") {
|
|
315
|
+
MessageFormatter.info(
|
|
316
|
+
` ${entry.attributeKey}: already completed (checkpoint)`,
|
|
317
|
+
{ prefix: "Execute" }
|
|
318
|
+
);
|
|
319
|
+
succeeded++;
|
|
320
|
+
continue;
|
|
321
|
+
}
|
|
322
|
+
|
|
323
|
+
try {
|
|
324
|
+
await migrateOneAttribute(
|
|
325
|
+
adapter,
|
|
326
|
+
entry,
|
|
327
|
+
cpEntry,
|
|
328
|
+
checkpoint,
|
|
329
|
+
checkpointPath,
|
|
330
|
+
{
|
|
331
|
+
batchSize,
|
|
332
|
+
batchDelayMs,
|
|
333
|
+
keepBackups: options.keepBackups ?? true,
|
|
334
|
+
}
|
|
335
|
+
);
|
|
336
|
+
succeeded++;
|
|
337
|
+
MessageFormatter.success(
|
|
338
|
+
` ${entry.attributeKey}: migrated to ${entry.targetType}`,
|
|
339
|
+
{ prefix: "Execute" }
|
|
340
|
+
);
|
|
341
|
+
} catch (err: any) {
|
|
342
|
+
failed++;
|
|
343
|
+
cpEntry.phase = "failed";
|
|
344
|
+
cpEntry.error = err.message || String(err);
|
|
345
|
+
saveCheckpoint(checkpoint, checkpointPath);
|
|
346
|
+
MessageFormatter.error(
|
|
347
|
+
` ${entry.attributeKey}: FAILED — ${cpEntry.error}`,
|
|
348
|
+
undefined,
|
|
349
|
+
{ prefix: "Execute" }
|
|
350
|
+
);
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
|
|
354
|
+
// After collection completes, offer to update local YAML
|
|
355
|
+
const successInGroup = entries.filter((e) => {
|
|
356
|
+
const cp = findCheckpointEntry(checkpoint, e);
|
|
357
|
+
return cp?.phase === "completed";
|
|
358
|
+
}).length;
|
|
359
|
+
|
|
360
|
+
if (successInGroup > 0) {
|
|
361
|
+
const { updateYaml } = await inquirer.prompt([
|
|
362
|
+
{
|
|
363
|
+
type: "confirm",
|
|
364
|
+
name: "updateYaml",
|
|
365
|
+
message: `Update local YAML config for ${first.collectionName}? (change type: string → new types)`,
|
|
366
|
+
default: false,
|
|
367
|
+
},
|
|
368
|
+
]);
|
|
369
|
+
if (updateYaml) {
|
|
370
|
+
MessageFormatter.info(
|
|
371
|
+
"Local YAML update: use your editor to change 'type: string' to the new types in your collection YAML files.",
|
|
372
|
+
{ prefix: "Execute" }
|
|
373
|
+
);
|
|
374
|
+
}
|
|
375
|
+
}
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
// Final summary
|
|
379
|
+
console.log("");
|
|
380
|
+
console.log(chalk.bold("Migration Results"));
|
|
381
|
+
console.log(chalk.gray("─".repeat(40)));
|
|
382
|
+
console.log(` Succeeded: ${chalk.green(succeeded)}`);
|
|
383
|
+
console.log(` Failed: ${chalk.red(failed)}`);
|
|
384
|
+
console.log(` Skipped: ${chalk.yellow(skipped)}`);
|
|
385
|
+
console.log("");
|
|
386
|
+
|
|
387
|
+
if (failed > 0) {
|
|
388
|
+
MessageFormatter.info(
|
|
389
|
+
`Checkpoint saved at ${checkpointPath} — rerun to resume failed attributes.`,
|
|
390
|
+
{ prefix: "Execute" }
|
|
391
|
+
);
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
return { succeeded, failed, skipped };
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
// ────────────────────────────────────────────────────────
|
|
398
|
+
// Single attribute migration (9 phases)
|
|
399
|
+
// ────────────────────────────────────────────────────────
|
|
400
|
+
|
|
401
|
+
interface MigrateOneOptions {
|
|
402
|
+
batchSize: number;
|
|
403
|
+
batchDelayMs: number;
|
|
404
|
+
keepBackups: boolean;
|
|
405
|
+
}
|
|
406
|
+
|
|
407
|
+
async function migrateOneAttribute(
|
|
408
|
+
adapter: DatabaseAdapter,
|
|
409
|
+
entry: MigrationPlanEntry,
|
|
410
|
+
cpEntry: CheckpointEntry,
|
|
411
|
+
checkpoint: MigrationCheckpoint,
|
|
412
|
+
checkpointPath: string,
|
|
413
|
+
opts: MigrateOneOptions
|
|
414
|
+
): Promise<void> {
|
|
415
|
+
const { databaseId, collectionId, attributeKey, targetType, targetSize } =
|
|
416
|
+
entry;
|
|
417
|
+
const backupKey = cpEntry.backupKey;
|
|
418
|
+
|
|
419
|
+
const advance = (phase: CheckpointPhase) => {
|
|
420
|
+
cpEntry.phase = phase;
|
|
421
|
+
checkpoint.lastUpdatedAt = new Date().toISOString();
|
|
422
|
+
saveCheckpoint(checkpoint, checkpointPath);
|
|
423
|
+
};
|
|
424
|
+
|
|
425
|
+
// Step 1: Create backup attribute
|
|
426
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("backup_created")) {
|
|
427
|
+
MessageFormatter.info(` Creating backup attribute ${backupKey}...`, {
|
|
428
|
+
prefix: "Migrate",
|
|
429
|
+
});
|
|
430
|
+
await tryAwaitWithRetry(() =>
|
|
431
|
+
adapter.createAttribute({
|
|
432
|
+
databaseId,
|
|
433
|
+
tableId: collectionId,
|
|
434
|
+
key: backupKey,
|
|
435
|
+
type: "string", // backup keeps original type
|
|
436
|
+
size: entry.currentSize,
|
|
437
|
+
required: false, // always optional for backup
|
|
438
|
+
array: entry.isArray,
|
|
439
|
+
})
|
|
440
|
+
);
|
|
441
|
+
const available = await waitForAttribute(
|
|
442
|
+
adapter,
|
|
443
|
+
databaseId,
|
|
444
|
+
collectionId,
|
|
445
|
+
backupKey
|
|
446
|
+
);
|
|
447
|
+
if (!available) throw new Error(`Backup attribute ${backupKey} stuck`);
|
|
448
|
+
advance("backup_created");
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
// Step 2: Copy data to backup
|
|
452
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("data_copied_to_backup")) {
|
|
453
|
+
MessageFormatter.info(` Copying data to backup ${backupKey}...`, {
|
|
454
|
+
prefix: "Migrate",
|
|
455
|
+
});
|
|
456
|
+
await copyAttributeData(
|
|
457
|
+
adapter,
|
|
458
|
+
databaseId,
|
|
459
|
+
collectionId,
|
|
460
|
+
attributeKey,
|
|
461
|
+
backupKey,
|
|
462
|
+
opts.batchSize,
|
|
463
|
+
opts.batchDelayMs
|
|
464
|
+
);
|
|
465
|
+
advance("data_copied_to_backup");
|
|
466
|
+
}
|
|
467
|
+
|
|
468
|
+
// Step 3: Verify backup
|
|
469
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("data_verified_backup")) {
|
|
470
|
+
await verifyDataCopy(
|
|
471
|
+
adapter,
|
|
472
|
+
databaseId,
|
|
473
|
+
collectionId,
|
|
474
|
+
attributeKey,
|
|
475
|
+
backupKey
|
|
476
|
+
);
|
|
477
|
+
advance("data_verified_backup");
|
|
478
|
+
}
|
|
479
|
+
|
|
480
|
+
// Step 4: Delete indexes + original attribute
|
|
481
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("original_deleted")) {
|
|
482
|
+
// Save and delete affected indexes
|
|
483
|
+
if (entry.indexesAffected.length > 0) {
|
|
484
|
+
MessageFormatter.info(` Removing ${entry.indexesAffected.length} affected index(es)...`, {
|
|
485
|
+
prefix: "Migrate",
|
|
486
|
+
});
|
|
487
|
+
await saveAndDeleteIndexes(
|
|
488
|
+
adapter,
|
|
489
|
+
databaseId,
|
|
490
|
+
collectionId,
|
|
491
|
+
entry.indexesAffected,
|
|
492
|
+
cpEntry
|
|
493
|
+
);
|
|
494
|
+
saveCheckpoint(checkpoint, checkpointPath);
|
|
495
|
+
}
|
|
496
|
+
|
|
497
|
+
MessageFormatter.info(` Deleting original attribute ${attributeKey}...`, {
|
|
498
|
+
prefix: "Migrate",
|
|
499
|
+
});
|
|
500
|
+
await tryAwaitWithRetry(() =>
|
|
501
|
+
adapter.deleteAttribute({
|
|
502
|
+
databaseId,
|
|
503
|
+
tableId: collectionId,
|
|
504
|
+
key: attributeKey,
|
|
505
|
+
})
|
|
506
|
+
);
|
|
507
|
+
await waitForAttributeGone(adapter, databaseId, collectionId, attributeKey);
|
|
508
|
+
advance("original_deleted");
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
// Step 5: Create new attribute with target type
|
|
512
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("new_attr_created")) {
|
|
513
|
+
MessageFormatter.info(
|
|
514
|
+
` Creating new attribute ${attributeKey} as ${targetType}...`,
|
|
515
|
+
{ prefix: "Migrate" }
|
|
516
|
+
);
|
|
517
|
+
const createParams: Record<string, any> = {
|
|
518
|
+
databaseId,
|
|
519
|
+
tableId: collectionId,
|
|
520
|
+
key: attributeKey,
|
|
521
|
+
type: targetType,
|
|
522
|
+
required: false, // create as optional first — data needs to be copied back
|
|
523
|
+
array: entry.isArray,
|
|
524
|
+
};
|
|
525
|
+
if (targetType === "varchar" && targetSize) {
|
|
526
|
+
createParams.size = targetSize;
|
|
527
|
+
}
|
|
528
|
+
if (entry.hasDefault && entry.defaultValue !== undefined) {
|
|
529
|
+
createParams.default = entry.defaultValue;
|
|
530
|
+
}
|
|
531
|
+
|
|
532
|
+
await tryAwaitWithRetry(() => adapter.createAttribute(createParams as any));
|
|
533
|
+
const available = await waitForAttribute(
|
|
534
|
+
adapter,
|
|
535
|
+
databaseId,
|
|
536
|
+
collectionId,
|
|
537
|
+
attributeKey
|
|
538
|
+
);
|
|
539
|
+
if (!available)
|
|
540
|
+
throw new Error(`New attribute ${attributeKey} stuck after creation`);
|
|
541
|
+
advance("new_attr_created");
|
|
542
|
+
}
|
|
543
|
+
|
|
544
|
+
// Step 6: Copy data back from backup
|
|
545
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("data_copied_back")) {
|
|
546
|
+
MessageFormatter.info(` Copying data back from backup...`, {
|
|
547
|
+
prefix: "Migrate",
|
|
548
|
+
});
|
|
549
|
+
await copyAttributeData(
|
|
550
|
+
adapter,
|
|
551
|
+
databaseId,
|
|
552
|
+
collectionId,
|
|
553
|
+
backupKey,
|
|
554
|
+
attributeKey,
|
|
555
|
+
opts.batchSize,
|
|
556
|
+
opts.batchDelayMs
|
|
557
|
+
);
|
|
558
|
+
advance("data_copied_back");
|
|
559
|
+
}
|
|
560
|
+
|
|
561
|
+
// Step 7: Verify final data
|
|
562
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("data_verified_final")) {
|
|
563
|
+
await verifyDataCopy(
|
|
564
|
+
adapter,
|
|
565
|
+
databaseId,
|
|
566
|
+
collectionId,
|
|
567
|
+
backupKey,
|
|
568
|
+
attributeKey
|
|
569
|
+
);
|
|
570
|
+
advance("data_verified_final");
|
|
571
|
+
}
|
|
572
|
+
|
|
573
|
+
// Step 8: Recreate indexes + delete backup
|
|
574
|
+
if (phaseIndex(cpEntry.phase) < phaseIndex("backup_deleted")) {
|
|
575
|
+
// Recreate indexes
|
|
576
|
+
if (cpEntry.storedIndexes.length > 0) {
|
|
577
|
+
MessageFormatter.info(
|
|
578
|
+
` Recreating ${cpEntry.storedIndexes.length} index(es)...`,
|
|
579
|
+
{ prefix: "Migrate" }
|
|
580
|
+
);
|
|
581
|
+
await recreateIndexes(adapter, databaseId, collectionId, cpEntry);
|
|
582
|
+
}
|
|
583
|
+
|
|
584
|
+
// Delete backup (unless keepBackups)
|
|
585
|
+
if (!opts.keepBackups) {
|
|
586
|
+
MessageFormatter.info(` Deleting backup attribute ${backupKey}...`, {
|
|
587
|
+
prefix: "Migrate",
|
|
588
|
+
});
|
|
589
|
+
await tryAwaitWithRetry(() =>
|
|
590
|
+
adapter.deleteAttribute({
|
|
591
|
+
databaseId,
|
|
592
|
+
tableId: collectionId,
|
|
593
|
+
key: backupKey,
|
|
594
|
+
})
|
|
595
|
+
);
|
|
596
|
+
await waitForAttributeGone(
|
|
597
|
+
adapter,
|
|
598
|
+
databaseId,
|
|
599
|
+
collectionId,
|
|
600
|
+
backupKey
|
|
601
|
+
);
|
|
602
|
+
}
|
|
603
|
+
advance("backup_deleted");
|
|
604
|
+
}
|
|
605
|
+
|
|
606
|
+
// Step 9: Mark completed
|
|
607
|
+
// If the original attribute was required, update it now (after data is in place)
|
|
608
|
+
if (entry.isRequired) {
|
|
609
|
+
try {
|
|
610
|
+
await tryAwaitWithRetry(() =>
|
|
611
|
+
adapter.updateAttribute({
|
|
612
|
+
databaseId,
|
|
613
|
+
tableId: collectionId,
|
|
614
|
+
key: attributeKey,
|
|
615
|
+
required: true,
|
|
616
|
+
} as any)
|
|
617
|
+
);
|
|
618
|
+
} catch {
|
|
619
|
+
// Non-fatal — attribute is migrated, just not set back to required
|
|
620
|
+
MessageFormatter.info(
|
|
621
|
+
` Warning: could not set ${attributeKey} back to required`,
|
|
622
|
+
{ prefix: "Migrate" }
|
|
623
|
+
);
|
|
624
|
+
}
|
|
625
|
+
}
|
|
626
|
+
advance("completed");
|
|
627
|
+
}
|
|
628
|
+
|
|
629
|
+
// ────────────────────────────────────────────────────────
|
|
630
|
+
// Helper: copy attribute data via cursor pagination
|
|
631
|
+
// ────────────────────────────────────────────────────────
|
|
632
|
+
|
|
633
|
+
async function copyAttributeData(
|
|
634
|
+
adapter: DatabaseAdapter,
|
|
635
|
+
databaseId: string,
|
|
636
|
+
collectionId: string,
|
|
637
|
+
sourceKey: string,
|
|
638
|
+
targetKey: string,
|
|
639
|
+
batchSize: number,
|
|
640
|
+
batchDelayMs: number
|
|
641
|
+
): Promise<void> {
|
|
642
|
+
let lastId: string | undefined;
|
|
643
|
+
let totalCopied = 0;
|
|
644
|
+
let totalDocs: number | undefined;
|
|
645
|
+
|
|
646
|
+
// Get initial count
|
|
647
|
+
const countRes = await tryAwaitWithRetry(() =>
|
|
648
|
+
adapter.listRows({
|
|
649
|
+
databaseId,
|
|
650
|
+
tableId: collectionId,
|
|
651
|
+
queries: [Query.limit(1)],
|
|
652
|
+
})
|
|
653
|
+
);
|
|
654
|
+
totalDocs = countRes?.total ?? undefined;
|
|
655
|
+
const progress = totalDocs
|
|
656
|
+
? ProgressManager.create(`copy-${sourceKey}-${targetKey}`, totalDocs, {
|
|
657
|
+
title: ` Copy ${sourceKey} → ${targetKey}`,
|
|
658
|
+
})
|
|
659
|
+
: undefined;
|
|
660
|
+
|
|
661
|
+
while (true) {
|
|
662
|
+
const queries: string[] = [Query.limit(batchSize)];
|
|
663
|
+
if (lastId) queries.push(Query.cursorAfter(lastId));
|
|
664
|
+
|
|
665
|
+
const res = await tryAwaitWithRetry(() =>
|
|
666
|
+
adapter.listRows({ databaseId, tableId: collectionId, queries })
|
|
667
|
+
);
|
|
668
|
+
|
|
669
|
+
const docs = res?.documents || res?.rows || [];
|
|
670
|
+
if (docs.length === 0) break;
|
|
671
|
+
|
|
672
|
+
// Batch update: copy sourceKey → targetKey
|
|
673
|
+
if (adapter.supportsBulkOperations() && adapter.bulkUpsertRows) {
|
|
674
|
+
const rows = docs
|
|
675
|
+
.filter((d: any) => d[sourceKey] !== undefined)
|
|
676
|
+
.map((d: any) => ({
|
|
677
|
+
id: d.$id,
|
|
678
|
+
data: { [targetKey]: d[sourceKey] },
|
|
679
|
+
}));
|
|
680
|
+
|
|
681
|
+
if (rows.length > 0) {
|
|
682
|
+
await tryAwaitWithRetry(() =>
|
|
683
|
+
adapter.bulkUpsertRows!({
|
|
684
|
+
databaseId,
|
|
685
|
+
tableId: collectionId,
|
|
686
|
+
rows,
|
|
687
|
+
})
|
|
688
|
+
);
|
|
689
|
+
}
|
|
690
|
+
} else {
|
|
691
|
+
for (const doc of docs) {
|
|
692
|
+
if (doc[sourceKey] === undefined) continue;
|
|
693
|
+
await tryAwaitWithRetry(() =>
|
|
694
|
+
adapter.updateRow({
|
|
695
|
+
databaseId,
|
|
696
|
+
tableId: collectionId,
|
|
697
|
+
id: doc.$id,
|
|
698
|
+
data: { [targetKey]: doc[sourceKey] },
|
|
699
|
+
})
|
|
700
|
+
);
|
|
701
|
+
}
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
totalCopied += docs.length;
|
|
705
|
+
lastId = docs[docs.length - 1].$id;
|
|
706
|
+
progress?.update(totalCopied);
|
|
707
|
+
|
|
708
|
+
if (docs.length < batchSize) break; // last page
|
|
709
|
+
if (batchDelayMs > 0) await delay(batchDelayMs);
|
|
710
|
+
}
|
|
711
|
+
|
|
712
|
+
progress?.stop();
|
|
713
|
+
}
|
|
714
|
+
|
|
715
|
+
// ────────────────────────────────────────────────────────
|
|
716
|
+
// Helper: verify data copy (count + spot check)
|
|
717
|
+
// ────────────────────────────────────────────────────────
|
|
718
|
+
|
|
719
|
+
async function verifyDataCopy(
|
|
720
|
+
adapter: DatabaseAdapter,
|
|
721
|
+
databaseId: string,
|
|
722
|
+
collectionId: string,
|
|
723
|
+
sourceKey: string,
|
|
724
|
+
targetKey: string
|
|
725
|
+
): Promise<void> {
|
|
726
|
+
// Spot-check first 5 documents
|
|
727
|
+
const res = await tryAwaitWithRetry(() =>
|
|
728
|
+
adapter.listRows({
|
|
729
|
+
databaseId,
|
|
730
|
+
tableId: collectionId,
|
|
731
|
+
queries: [Query.limit(5)],
|
|
732
|
+
})
|
|
733
|
+
);
|
|
734
|
+
const docs = res?.documents || res?.rows || [];
|
|
735
|
+
for (const doc of docs) {
|
|
736
|
+
if (doc[sourceKey] === undefined) continue;
|
|
737
|
+
if (doc[sourceKey] !== doc[targetKey]) {
|
|
738
|
+
throw new Error(
|
|
739
|
+
`Verification failed: doc ${doc.$id} has ${sourceKey}=${JSON.stringify(doc[sourceKey])} but ${targetKey}=${JSON.stringify(doc[targetKey])}`
|
|
740
|
+
);
|
|
741
|
+
}
|
|
742
|
+
}
|
|
743
|
+
}
|
|
744
|
+
|
|
745
|
+
// ────────────────────────────────────────────────────────
|
|
746
|
+
// Helper: wait for attribute to become available
|
|
747
|
+
// ────────────────────────────────────────────────────────
|
|
748
|
+
|
|
749
|
+
async function waitForAttribute(
|
|
750
|
+
adapter: DatabaseAdapter,
|
|
751
|
+
databaseId: string,
|
|
752
|
+
collectionId: string,
|
|
753
|
+
key: string,
|
|
754
|
+
maxWaitMs: number = 60_000
|
|
755
|
+
): Promise<boolean> {
|
|
756
|
+
const start = Date.now();
|
|
757
|
+
const checkInterval = 2000;
|
|
758
|
+
|
|
759
|
+
while (Date.now() - start < maxWaitMs) {
|
|
760
|
+
const res = await tryAwaitWithRetry(() =>
|
|
761
|
+
adapter.getTable({ databaseId, tableId: collectionId })
|
|
762
|
+
);
|
|
763
|
+
const attrs: any[] =
|
|
764
|
+
res?.data?.attributes || res?.data?.columns || [];
|
|
765
|
+
const attr = attrs.find((a: any) => a.key === key);
|
|
766
|
+
if (attr) {
|
|
767
|
+
if (attr.status === "available") return true;
|
|
768
|
+
if (attr.status === "failed" || attr.status === "stuck") return false;
|
|
769
|
+
}
|
|
770
|
+
await delay(checkInterval);
|
|
771
|
+
}
|
|
772
|
+
return false;
|
|
773
|
+
}
|
|
774
|
+
|
|
775
|
+
// ────────────────────────────────────────────────────────
|
|
776
|
+
// Helper: wait for attribute to be fully deleted
|
|
777
|
+
// ────────────────────────────────────────────────────────
|
|
778
|
+
|
|
779
|
+
async function waitForAttributeGone(
|
|
780
|
+
adapter: DatabaseAdapter,
|
|
781
|
+
databaseId: string,
|
|
782
|
+
collectionId: string,
|
|
783
|
+
key: string,
|
|
784
|
+
maxWaitMs: number = 60_000
|
|
785
|
+
): Promise<boolean> {
|
|
786
|
+
const start = Date.now();
|
|
787
|
+
const checkInterval = 2000;
|
|
788
|
+
|
|
789
|
+
while (Date.now() - start < maxWaitMs) {
|
|
790
|
+
const res = await tryAwaitWithRetry(() =>
|
|
791
|
+
adapter.getTable({ databaseId, tableId: collectionId })
|
|
792
|
+
);
|
|
793
|
+
const attrs: any[] =
|
|
794
|
+
res?.data?.attributes || res?.data?.columns || [];
|
|
795
|
+
const attr = attrs.find((a: any) => a.key === key);
|
|
796
|
+
if (!attr) return true;
|
|
797
|
+
if (attr.status === "deleting") {
|
|
798
|
+
await delay(checkInterval);
|
|
799
|
+
continue;
|
|
800
|
+
}
|
|
801
|
+
// Still present and not deleting — wait
|
|
802
|
+
await delay(checkInterval);
|
|
803
|
+
}
|
|
804
|
+
return false;
|
|
805
|
+
}
|
|
806
|
+
|
|
807
|
+
// ────────────────────────────────────────────────────────
|
|
808
|
+
// Helper: index management
|
|
809
|
+
// ────────────────────────────────────────────────────────
|
|
810
|
+
|
|
811
|
+
async function saveAndDeleteIndexes(
|
|
812
|
+
adapter: DatabaseAdapter,
|
|
813
|
+
databaseId: string,
|
|
814
|
+
collectionId: string,
|
|
815
|
+
indexKeys: string[],
|
|
816
|
+
cpEntry: CheckpointEntry
|
|
817
|
+
): Promise<void> {
|
|
818
|
+
// Fetch current indexes from server
|
|
819
|
+
const res = await tryAwaitWithRetry(() =>
|
|
820
|
+
adapter.listIndexes({ databaseId, tableId: collectionId })
|
|
821
|
+
);
|
|
822
|
+
const allIndexes: any[] = res?.data || [];
|
|
823
|
+
|
|
824
|
+
for (const idxKey of indexKeys) {
|
|
825
|
+
const idx = allIndexes.find((i: any) => i.key === idxKey);
|
|
826
|
+
if (!idx) continue;
|
|
827
|
+
|
|
828
|
+
// Store definition for recreation
|
|
829
|
+
const alreadyStored = cpEntry.storedIndexes.some(
|
|
830
|
+
(s) => s.key === idxKey
|
|
831
|
+
);
|
|
832
|
+
if (!alreadyStored) {
|
|
833
|
+
cpEntry.storedIndexes.push({
|
|
834
|
+
key: idx.key,
|
|
835
|
+
type: idx.type || "key",
|
|
836
|
+
attributes: idx.attributes || [],
|
|
837
|
+
orders: idx.orders,
|
|
838
|
+
});
|
|
839
|
+
}
|
|
840
|
+
|
|
841
|
+
// Delete
|
|
842
|
+
await tryAwaitWithRetry(() =>
|
|
843
|
+
adapter.deleteIndex({ databaseId, tableId: collectionId, key: idxKey })
|
|
844
|
+
);
|
|
845
|
+
}
|
|
846
|
+
|
|
847
|
+
// Wait for indexes to be gone
|
|
848
|
+
for (const idxKey of indexKeys) {
|
|
849
|
+
await waitForIndexGone(adapter, databaseId, collectionId, idxKey);
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
|
|
853
|
+
async function recreateIndexes(
|
|
854
|
+
adapter: DatabaseAdapter,
|
|
855
|
+
databaseId: string,
|
|
856
|
+
collectionId: string,
|
|
857
|
+
cpEntry: CheckpointEntry
|
|
858
|
+
): Promise<void> {
|
|
859
|
+
for (const idx of cpEntry.storedIndexes) {
|
|
860
|
+
await tryAwaitWithRetry(() =>
|
|
861
|
+
adapter.createIndex({
|
|
862
|
+
databaseId,
|
|
863
|
+
tableId: collectionId,
|
|
864
|
+
key: idx.key,
|
|
865
|
+
type: idx.type as any,
|
|
866
|
+
attributes: idx.attributes,
|
|
867
|
+
orders: idx.orders,
|
|
868
|
+
})
|
|
869
|
+
);
|
|
870
|
+
// Wait for index to become available
|
|
871
|
+
await waitForIndexAvailable(adapter, databaseId, collectionId, idx.key);
|
|
872
|
+
}
|
|
873
|
+
}
|
|
874
|
+
|
|
875
|
+
async function waitForIndexGone(
|
|
876
|
+
adapter: DatabaseAdapter,
|
|
877
|
+
databaseId: string,
|
|
878
|
+
collectionId: string,
|
|
879
|
+
key: string,
|
|
880
|
+
maxWaitMs: number = 60_000
|
|
881
|
+
): Promise<void> {
|
|
882
|
+
const start = Date.now();
|
|
883
|
+
while (Date.now() - start < maxWaitMs) {
|
|
884
|
+
const res = await tryAwaitWithRetry(() =>
|
|
885
|
+
adapter.listIndexes({ databaseId, tableId: collectionId })
|
|
886
|
+
);
|
|
887
|
+
const indexes: any[] = res?.data || [];
|
|
888
|
+
if (!indexes.find((i: any) => i.key === key)) return;
|
|
889
|
+
await delay(2000);
|
|
890
|
+
}
|
|
891
|
+
}
|
|
892
|
+
|
|
893
|
+
async function waitForIndexAvailable(
|
|
894
|
+
adapter: DatabaseAdapter,
|
|
895
|
+
databaseId: string,
|
|
896
|
+
collectionId: string,
|
|
897
|
+
key: string,
|
|
898
|
+
maxWaitMs: number = 60_000
|
|
899
|
+
): Promise<void> {
|
|
900
|
+
const start = Date.now();
|
|
901
|
+
while (Date.now() - start < maxWaitMs) {
|
|
902
|
+
const res = await tryAwaitWithRetry(() =>
|
|
903
|
+
adapter.listIndexes({ databaseId, tableId: collectionId })
|
|
904
|
+
);
|
|
905
|
+
const indexes: any[] = res?.data || [];
|
|
906
|
+
const idx = indexes.find((i: any) => i.key === key);
|
|
907
|
+
if (idx?.status === "available") return;
|
|
908
|
+
if (idx?.status === "failed") {
|
|
909
|
+
throw new Error(`Index ${key} creation failed`);
|
|
910
|
+
}
|
|
911
|
+
await delay(2000);
|
|
912
|
+
}
|
|
913
|
+
}
|
|
914
|
+
|
|
915
|
+
// ────────────────────────────────────────────────────────
|
|
916
|
+
// Checkpoint management
|
|
917
|
+
// ────────────────────────────────────────────────────────
|
|
918
|
+
|
|
919
|
+
function loadOrCreateCheckpoint(
|
|
920
|
+
checkpointPath: string,
|
|
921
|
+
planFile: string
|
|
922
|
+
): MigrationCheckpoint {
|
|
923
|
+
if (fs.existsSync(checkpointPath)) {
|
|
924
|
+
try {
|
|
925
|
+
const raw = JSON.parse(fs.readFileSync(checkpointPath, "utf8"));
|
|
926
|
+
const parsed = MigrationCheckpointSchema.parse(raw);
|
|
927
|
+
MessageFormatter.info(
|
|
928
|
+
`Resuming from checkpoint: ${checkpointPath}`,
|
|
929
|
+
{ prefix: "Checkpoint" }
|
|
930
|
+
);
|
|
931
|
+
return parsed;
|
|
932
|
+
} catch {
|
|
933
|
+
MessageFormatter.info(
|
|
934
|
+
"Corrupt checkpoint file, creating new one.",
|
|
935
|
+
{ prefix: "Checkpoint" }
|
|
936
|
+
);
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
|
|
940
|
+
const now = new Date().toISOString();
|
|
941
|
+
return {
|
|
942
|
+
planFile,
|
|
943
|
+
startedAt: now,
|
|
944
|
+
lastUpdatedAt: now,
|
|
945
|
+
entries: [],
|
|
946
|
+
};
|
|
947
|
+
}
|
|
948
|
+
|
|
949
|
+
function saveCheckpoint(
|
|
950
|
+
checkpoint: MigrationCheckpoint,
|
|
951
|
+
checkpointPath: string
|
|
952
|
+
): void {
|
|
953
|
+
checkpoint.lastUpdatedAt = new Date().toISOString();
|
|
954
|
+
fs.writeFileSync(
|
|
955
|
+
checkpointPath,
|
|
956
|
+
JSON.stringify(checkpoint, null, 2),
|
|
957
|
+
"utf8"
|
|
958
|
+
);
|
|
959
|
+
}
|
|
960
|
+
|
|
961
|
+
function getOrCreateCheckpointEntry(
|
|
962
|
+
checkpoint: MigrationCheckpoint,
|
|
963
|
+
entry: MigrationPlanEntry
|
|
964
|
+
): CheckpointEntry {
|
|
965
|
+
const existing = findCheckpointEntry(checkpoint, entry);
|
|
966
|
+
if (existing) return existing;
|
|
967
|
+
|
|
968
|
+
const cpEntry: CheckpointEntry = {
|
|
969
|
+
databaseId: entry.databaseId,
|
|
970
|
+
collectionId: entry.collectionId,
|
|
971
|
+
attributeKey: entry.attributeKey,
|
|
972
|
+
backupKey: generateBackupKey(entry.attributeKey),
|
|
973
|
+
phase: "pending",
|
|
974
|
+
targetType: entry.targetType,
|
|
975
|
+
targetSize: entry.targetSize,
|
|
976
|
+
storedIndexes: [],
|
|
977
|
+
};
|
|
978
|
+
checkpoint.entries.push(cpEntry);
|
|
979
|
+
return cpEntry;
|
|
980
|
+
}
|
|
981
|
+
|
|
982
|
+
function findCheckpointEntry(
|
|
983
|
+
checkpoint: MigrationCheckpoint,
|
|
984
|
+
entry: MigrationPlanEntry
|
|
985
|
+
): CheckpointEntry | undefined {
|
|
986
|
+
return checkpoint.entries.find(
|
|
987
|
+
(e) =>
|
|
988
|
+
e.databaseId === entry.databaseId &&
|
|
989
|
+
e.collectionId === entry.collectionId &&
|
|
990
|
+
e.attributeKey === entry.attributeKey
|
|
991
|
+
);
|
|
992
|
+
}
|
|
993
|
+
|
|
994
|
+
// ────────────────────────────────────────────────────────
|
|
995
|
+
// Phase ordering for checkpoint resume
|
|
996
|
+
// ────────────────────────────────────────────────────────
|
|
997
|
+
|
|
998
|
+
const PHASE_ORDER: CheckpointPhase[] = [
|
|
999
|
+
"pending",
|
|
1000
|
+
"backup_created",
|
|
1001
|
+
"data_copied_to_backup",
|
|
1002
|
+
"data_verified_backup",
|
|
1003
|
+
"original_deleted",
|
|
1004
|
+
"new_attr_created",
|
|
1005
|
+
"data_copied_back",
|
|
1006
|
+
"data_verified_final",
|
|
1007
|
+
"backup_deleted",
|
|
1008
|
+
"completed",
|
|
1009
|
+
];
|
|
1010
|
+
|
|
1011
|
+
function phaseIndex(phase: CheckpointPhase): number {
|
|
1012
|
+
const idx = PHASE_ORDER.indexOf(phase);
|
|
1013
|
+
return idx >= 0 ? idx : -1;
|
|
1014
|
+
}
|
|
1015
|
+
|
|
1016
|
+
// ────────────────────────────────────────────────────────
|
|
1017
|
+
// Dry run summary
|
|
1018
|
+
// ────────────────────────────────────────────────────────
|
|
1019
|
+
|
|
1020
|
+
function printDryRunSummary(plan: MigrationPlan): void {
|
|
1021
|
+
console.log("");
|
|
1022
|
+
console.log(chalk.bold("Dry Run — What Would Happen:"));
|
|
1023
|
+
console.log(chalk.gray("─".repeat(50)));
|
|
1024
|
+
|
|
1025
|
+
const groups = new Map<string, MigrationPlanEntry[]>();
|
|
1026
|
+
for (const entry of plan.entries) {
|
|
1027
|
+
if (entry.action !== "migrate") continue;
|
|
1028
|
+
const key = `${entry.databaseName}/${entry.collectionName}`;
|
|
1029
|
+
if (!groups.has(key)) groups.set(key, []);
|
|
1030
|
+
groups.get(key)!.push(entry);
|
|
1031
|
+
}
|
|
1032
|
+
|
|
1033
|
+
for (const [groupName, entries] of groups) {
|
|
1034
|
+
console.log(`\n ${chalk.cyan(groupName)}`);
|
|
1035
|
+
for (const e of entries) {
|
|
1036
|
+
const sizeInfo =
|
|
1037
|
+
e.targetType === "varchar" ? ` (size: ${e.targetSize})` : "";
|
|
1038
|
+
const indexInfo =
|
|
1039
|
+
e.indexesAffected.length > 0
|
|
1040
|
+
? ` [indexes: ${e.indexesAffected.join(", ")}]`
|
|
1041
|
+
: "";
|
|
1042
|
+
console.log(
|
|
1043
|
+
` ${e.attributeKey}: string(${e.currentSize}) → ${e.targetType}${sizeInfo}${indexInfo}`
|
|
1044
|
+
);
|
|
1045
|
+
}
|
|
1046
|
+
}
|
|
1047
|
+
|
|
1048
|
+
const skipped = plan.entries.filter((e) => e.action === "skip");
|
|
1049
|
+
if (skipped.length > 0) {
|
|
1050
|
+
console.log(`\n ${chalk.yellow("Skipped:")}`);
|
|
1051
|
+
for (const e of skipped) {
|
|
1052
|
+
console.log(` ${e.attributeKey}: ${e.skipReason || "manual skip"}`);
|
|
1053
|
+
}
|
|
1054
|
+
}
|
|
1055
|
+
console.log("");
|
|
1056
|
+
}
|
|
1057
|
+
|
|
1058
|
+
// ────────────────────────────────────────────────────────
|
|
1059
|
+
// Utility
|
|
1060
|
+
// ────────────────────────────────────────────────────────
|
|
1061
|
+
|
|
1062
|
+
function delay(ms: number): Promise<void> {
|
|
1063
|
+
return new Promise((resolve) => setTimeout(resolve, ms));
|
|
1064
|
+
}
|