appwrite-utils-cli 1.7.6 → 1.7.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. package/SELECTION_DIALOGS.md +146 -0
  2. package/dist/adapters/DatabaseAdapter.d.ts +1 -0
  3. package/dist/adapters/LegacyAdapter.js +15 -3
  4. package/dist/adapters/TablesDBAdapter.js +15 -3
  5. package/dist/cli/commands/databaseCommands.js +90 -23
  6. package/dist/collections/wipeOperations.d.ts +2 -2
  7. package/dist/collections/wipeOperations.js +37 -139
  8. package/dist/main.js +175 -4
  9. package/dist/migrations/appwriteToX.d.ts +27 -2
  10. package/dist/migrations/appwriteToX.js +293 -69
  11. package/dist/migrations/yaml/YamlImportConfigLoader.d.ts +1 -1
  12. package/dist/migrations/yaml/generateImportSchemas.js +23 -8
  13. package/dist/shared/schemaGenerator.js +25 -12
  14. package/dist/shared/selectionDialogs.d.ts +214 -0
  15. package/dist/shared/selectionDialogs.js +516 -0
  16. package/dist/utils/configDiscovery.d.ts +4 -4
  17. package/dist/utils/configDiscovery.js +66 -30
  18. package/dist/utils/yamlConverter.d.ts +1 -0
  19. package/dist/utils/yamlConverter.js +26 -3
  20. package/dist/utilsController.d.ts +6 -1
  21. package/dist/utilsController.js +91 -2
  22. package/package.json +1 -1
  23. package/src/adapters/DatabaseAdapter.ts +2 -1
  24. package/src/adapters/LegacyAdapter.ts +95 -82
  25. package/src/adapters/TablesDBAdapter.ts +62 -47
  26. package/src/cli/commands/databaseCommands.ts +134 -34
  27. package/src/collections/wipeOperations.ts +62 -224
  28. package/src/main.ts +276 -34
  29. package/src/migrations/appwriteToX.ts +385 -90
  30. package/src/migrations/yaml/generateImportSchemas.ts +26 -8
  31. package/src/shared/schemaGenerator.ts +29 -12
  32. package/src/shared/selectionDialogs.ts +716 -0
  33. package/src/utils/configDiscovery.ts +83 -39
  34. package/src/utils/yamlConverter.ts +29 -3
  35. package/src/utilsController.ts +116 -4
@@ -3,6 +3,9 @@ import chalk from "chalk";
3
3
  import { join } from "node:path";
4
4
  import { MessageFormatter } from "../../shared/messageFormatter.js";
5
5
  import { ConfirmationDialogs } from "../../shared/confirmationDialogs.js";
6
+ import { SelectionDialogs } from "../../shared/selectionDialogs.js";
7
+ import type { DatabaseSelection, BucketSelection } from "../../shared/selectionDialogs.js";
8
+ import { logger } from "../../shared/logging.js";
6
9
  import { fetchAllDatabases } from "../../databases/methods.js";
7
10
  import { listBuckets } from "../../storage/methods.js";
8
11
  import { getFunction, downloadLatestFunctionDeployment } from "../../functions/methods.js";
@@ -12,23 +15,41 @@ export const databaseCommands = {
12
15
  async syncDb(cli: InteractiveCLI): Promise<void> {
13
16
  MessageFormatter.progress("Pushing local configuration to Appwrite...", { prefix: "Database" });
14
17
 
15
- const databases = await (cli as any).selectDatabases(
16
- (cli as any).getLocalDatabases(),
17
- chalk.blue("Select local databases to push:"),
18
- true
19
- );
18
+ try {
19
+ // Initialize controller
20
+ await (cli as any).controller!.init();
20
21
 
21
- if (!databases.length) {
22
- MessageFormatter.warning("No databases selected. Skipping database sync.", { prefix: "Database" });
23
- return;
24
- }
22
+ // Get available and configured databases
23
+ const availableDatabases = await fetchAllDatabases((cli as any).controller!.database!);
24
+ const configuredDatabases = (cli as any).controller!.config?.databases || [];
25
25
 
26
- try {
27
- // Loop through each database and prompt for collections specific to that database
28
- for (const database of databases) {
29
- MessageFormatter.info(`\nšŸ“¦ Configuring push for database: ${database.name}`, { prefix: "Database" });
26
+ // Get local collections for selection
27
+ const localCollections = (cli as any).getLocalCollections();
28
+
29
+ // Prompt about existing configuration
30
+ const { syncExisting, modifyConfiguration } = await SelectionDialogs.promptForExistingConfig(configuredDatabases);
31
+
32
+ // Select databases
33
+ const selectedDatabaseIds = await SelectionDialogs.selectDatabases(
34
+ availableDatabases,
35
+ configuredDatabases,
36
+ { showSelectAll: true, allowNewOnly: !syncExisting }
37
+ );
38
+
39
+ if (selectedDatabaseIds.length === 0) {
40
+ MessageFormatter.warning("No databases selected. Skipping database sync.", { prefix: "Database" });
41
+ return;
42
+ }
43
+
44
+ // Select tables/collections for each database using the existing method
45
+ const tableSelectionsMap = new Map<string, string[]>();
46
+ const availableTablesMap = new Map<string, any[]>();
30
47
 
31
- const collections = await (cli as any).selectCollectionsAndTables(
48
+ for (const databaseId of selectedDatabaseIds) {
49
+ const database = availableDatabases.find(db => db.$id === databaseId)!;
50
+
51
+ // Use the existing selectCollectionsAndTables method
52
+ const selectedCollections = await (cli as any).selectCollectionsAndTables(
32
53
  database,
33
54
  (cli as any).controller!.database!,
34
55
  chalk.blue(`Select collections/tables to push to "${database.name}":`),
@@ -36,19 +57,93 @@ export const databaseCommands = {
36
57
  true // prefer local
37
58
  );
38
59
 
39
- if (collections.length === 0) {
60
+ // Map selected collections to table IDs
61
+ const selectedTableIds = selectedCollections.map((c: any) => c.$id || c.id);
62
+
63
+ // Store selections
64
+ tableSelectionsMap.set(databaseId, selectedTableIds);
65
+ availableTablesMap.set(databaseId, selectedCollections);
66
+
67
+ if (selectedCollections.length === 0) {
40
68
  MessageFormatter.warning(`No collections selected for database "${database.name}". Skipping.`, { prefix: "Database" });
41
69
  continue;
42
70
  }
71
+ }
43
72
 
44
- // Push selected collections to this specific database
45
- await (cli as any).controller!.syncDb([database], collections);
46
- MessageFormatter.success(
47
- `Pushed ${collections.length} collection(s) to database "${database.name}"`,
48
- { prefix: "Database" }
49
- );
73
+ // Ask if user wants to select buckets
74
+ const { selectBuckets } = await inquirer.prompt([
75
+ {
76
+ type: "confirm",
77
+ name: "selectBuckets",
78
+ message: "Do you want to select storage buckets to sync as well?",
79
+ default: false,
80
+ },
81
+ ]);
82
+
83
+ let bucketSelections: BucketSelection[] = [];
84
+
85
+ if (selectBuckets) {
86
+ // Get available and configured buckets
87
+ try {
88
+ const availableBucketsResponse = await listBuckets((cli as any).controller!.storage!);
89
+ const availableBuckets = availableBucketsResponse.buckets || [];
90
+ const configuredBuckets = (cli as any).controller!.config?.buckets || [];
91
+
92
+ if (availableBuckets.length === 0) {
93
+ MessageFormatter.warning("No storage buckets available in remote instance.", { prefix: "Database" });
94
+ } else {
95
+ // Select buckets using SelectionDialogs
96
+ const selectedBucketIds = await SelectionDialogs.selectBucketsForDatabases(
97
+ selectedDatabaseIds,
98
+ availableBuckets,
99
+ configuredBuckets,
100
+ { showSelectAll: true, groupByDatabase: true }
101
+ );
102
+
103
+ if (selectedBucketIds.length > 0) {
104
+ // Create BucketSelection objects
105
+ bucketSelections = SelectionDialogs.createBucketSelection(
106
+ selectedBucketIds,
107
+ availableBuckets,
108
+ configuredBuckets,
109
+ availableDatabases
110
+ );
111
+
112
+ MessageFormatter.info(`Selected ${bucketSelections.length} storage bucket(s)`, { prefix: "Database" });
113
+ }
114
+ }
115
+ } catch (error) {
116
+ MessageFormatter.warning("Failed to fetch storage buckets. Continuing with databases only.", { prefix: "Database" });
117
+ logger.warn("Storage bucket fetch failed during syncDb", { error: error instanceof Error ? error.message : String(error) });
118
+ }
50
119
  }
51
120
 
121
+ // Create DatabaseSelection objects
122
+ const databaseSelections = SelectionDialogs.createDatabaseSelection(
123
+ selectedDatabaseIds,
124
+ availableDatabases,
125
+ tableSelectionsMap,
126
+ configuredDatabases,
127
+ availableTablesMap
128
+ );
129
+
130
+ // Show confirmation summary
131
+ const selectionSummary = SelectionDialogs.createSyncSelectionSummary(
132
+ databaseSelections,
133
+ bucketSelections
134
+ );
135
+
136
+ const confirmed = await SelectionDialogs.confirmSyncSelection(selectionSummary);
137
+
138
+ if (!confirmed) {
139
+ MessageFormatter.info("Sync operation cancelled by user", { prefix: "Database" });
140
+ return;
141
+ }
142
+
143
+ // Perform selective sync using the controller
144
+ MessageFormatter.progress("Starting selective sync...", { prefix: "Database" });
145
+ await (cli as any).controller!.selectiveSync(databaseSelections, bucketSelections);
146
+
52
147
  MessageFormatter.success("\nāœ… All database configurations pushed successfully!", { prefix: "Database" });
53
148
 
54
149
  // Then handle functions if requested
@@ -104,23 +199,28 @@ export const databaseCommands = {
104
199
  (cli as any).controller!.database!
105
200
  );
106
201
 
107
- // Use the controller's synchronizeConfigurations method which handles collections properly
108
- MessageFormatter.progress("Pulling collections and generating collection files...", { prefix: "Collections" });
109
- await (cli as any).controller!.synchronizeConfigurations(remoteDatabases);
110
-
111
- // Also configure buckets for any new databases
202
+ // First, prepare the combined database list for bucket configuration
112
203
  const localDatabases = (cli as any).controller!.config?.databases || [];
113
- const updatedConfig = await (cli as any).configureBuckets({
204
+ const allDatabases = [
205
+ ...localDatabases,
206
+ ...remoteDatabases.filter(
207
+ (rd: any) => !localDatabases.some((ld: any) => ld.name === rd.name)
208
+ ),
209
+ ];
210
+
211
+ // Configure buckets FIRST to get user selections before writing config
212
+ MessageFormatter.progress("Configuring storage buckets...", { prefix: "Buckets" });
213
+ const configWithBuckets = await (cli as any).configureBuckets({
114
214
  ...(cli as any).controller!.config!,
115
- databases: [
116
- ...localDatabases,
117
- ...remoteDatabases.filter(
118
- (rd: any) => !localDatabases.some((ld: any) => ld.name === rd.name)
119
- ),
120
- ],
215
+ databases: allDatabases,
121
216
  });
122
217
 
123
- (cli as any).controller!.config = updatedConfig;
218
+ // Update controller config with bucket selections
219
+ (cli as any).controller!.config = configWithBuckets;
220
+
221
+ // Now synchronize configurations with the updated config that includes bucket selections
222
+ MessageFormatter.progress("Pulling collections and generating collection files...", { prefix: "Collections" });
223
+ await (cli as any).controller!.synchronizeConfigurations(remoteDatabases, configWithBuckets);
124
224
  }
125
225
 
126
226
  // Then sync functions
@@ -7,7 +7,7 @@ import type { DatabaseAdapter } from "../adapters/DatabaseAdapter.js";
7
7
  import { tryAwaitWithRetry } from "../utils/helperFunctions.js";
8
8
  import { MessageFormatter } from "../shared/messageFormatter.js";
9
9
  import { ProgressManager } from "../shared/progressManager.js";
10
- import { isRetryableError, isBulkNotSupportedError, isCriticalError } from "../shared/errorUtils.js";
10
+ import { isRetryableError, isCriticalError } from "../shared/errorUtils.js";
11
11
  import { delay } from "../utils/helperFunctions.js";
12
12
  import { chunk } from "es-toolkit";
13
13
  import pLimit from "p-limit";
@@ -239,8 +239,8 @@ export const wipeAllTables = async (
239
239
  };
240
240
 
241
241
  /**
242
- * Optimized streaming deletion of all rows from a table
243
- * Uses bulk deletion when available, falls back to optimized individual deletion
242
+ * Optimized deletion of all rows from a table using direct bulk deletion
243
+ * Uses Query.limit() to delete rows without fetching IDs first
244
244
  */
245
245
  export const wipeTableRows = async (
246
246
  adapter: DatabaseAdapter,
@@ -248,123 +248,78 @@ export const wipeTableRows = async (
248
248
  tableId: string
249
249
  ): Promise<void> => {
250
250
  try {
251
- // Configuration for optimized deletion
252
- const FETCH_BATCH_SIZE = 1000; // How many to fetch per query
253
- const BULK_DELETE_BATCH_SIZE = 500; // How many to bulk delete at once
254
- const INDIVIDUAL_DELETE_BATCH_SIZE = 200; // For fallback individual deletion
255
- const MAX_CONCURRENT_OPERATIONS = 10; // Concurrent bulk/individual operations
251
+ // Check if bulk deletion is available
252
+ if (!adapter.bulkDeleteRows) {
253
+ MessageFormatter.error(
254
+ "Bulk deletion not available for this adapter - wipe operation not supported",
255
+ new Error("bulkDeleteRows not available"),
256
+ { prefix: "Wipe" }
257
+ );
258
+ throw new Error("Bulk deletion required for wipe operations");
259
+ }
256
260
 
261
+ const DELETE_BATCH_SIZE = 250; // How many rows to delete per batch
257
262
  let totalDeleted = 0;
258
- let cursor: string | undefined;
259
263
  let hasMoreRows = true;
260
264
 
261
265
  MessageFormatter.info("Starting optimized table row deletion...", { prefix: "Wipe" });
262
266
 
263
- // Create progress tracker (we'll update the total as we discover more rows)
264
267
  const progress = ProgressManager.create(
265
268
  `delete-${tableId}`,
266
- 1, // Start with 1, will update as we go
269
+ 1, // Start with 1, will update as we discover more
267
270
  { title: "Deleting table rows" }
268
271
  );
269
272
 
270
273
  while (hasMoreRows) {
271
- // Fetch next batch of rows
272
- const queries = [Query.limit(FETCH_BATCH_SIZE)];
273
- if (cursor) {
274
- queries.push(Query.cursorAfter(cursor));
274
+ try {
275
+ // Delete next batch using Query.limit() - no fetching needed!
276
+ const result = await tryAwaitWithRetry(async () =>
277
+ adapter.bulkDeleteRows!({
278
+ databaseId,
279
+ tableId,
280
+ rowIds: [], // Empty array signals we want to use Query.limit instead
281
+ batchSize: DELETE_BATCH_SIZE
282
+ })
283
+ );
284
+
285
+ const deletedCount = (result as any).total || 0;
286
+
287
+ if (deletedCount === 0) {
288
+ hasMoreRows = false;
289
+ break;
290
+ }
291
+
292
+ totalDeleted += deletedCount;
293
+ progress.setTotal(totalDeleted + 100); // Estimate more rows exist
294
+ progress.update(totalDeleted);
295
+
296
+ MessageFormatter.progress(
297
+ `Deleted ${deletedCount} rows (${totalDeleted} total so far)`,
298
+ { prefix: "Wipe" }
299
+ );
300
+
301
+ // Small delay between batches to be respectful to the API
302
+ await delay(10);
303
+
304
+ } catch (error: any) {
305
+ const errorMessage = error.message || String(error);
306
+
307
+ if (isCriticalError(errorMessage)) {
308
+ MessageFormatter.error(
309
+ `Critical error during bulk deletion: ${errorMessage}`,
310
+ error,
311
+ { prefix: "Wipe" }
312
+ );
313
+ throw error;
314
+ } else {
315
+ MessageFormatter.error(
316
+ `Error during deletion batch: ${errorMessage}`,
317
+ error,
318
+ { prefix: "Wipe" }
319
+ );
320
+ // Continue trying with next batch
321
+ }
275
322
  }
276
-
277
- const response = await adapter.listRows({ databaseId, tableId, queries });
278
- const rows: any[] = (response as any).rows || [];
279
-
280
- if (rows.length === 0) {
281
- hasMoreRows = false;
282
- break;
283
- }
284
-
285
- // Update progress total as we discover more rows
286
- if (rows.length === FETCH_BATCH_SIZE) {
287
- // There might be more rows, update progress total
288
- progress.setTotal(totalDeleted + rows.length + 1000); // Estimate more
289
- }
290
-
291
- MessageFormatter.progress(
292
- `Processing batch: ${rows.length} rows (${totalDeleted + rows.length} total so far)`,
293
- { prefix: "Wipe" }
294
- );
295
-
296
- // Try to use bulk deletion first, fall back to individual deletion
297
- const rowIds = rows.map((row: any) => row.$id);
298
-
299
- // Check if bulk deletion is available and try it first
300
- if (adapter.bulkDeleteRows) {
301
- try {
302
- // Attempt bulk deletion (available in TablesDB)
303
- const deletedCount = await tryBulkDeletion(adapter, databaseId, tableId, rowIds, BULK_DELETE_BATCH_SIZE, MAX_CONCURRENT_OPERATIONS);
304
- totalDeleted += deletedCount;
305
- progress.update(totalDeleted);
306
- } catch (bulkError) {
307
- // Enhanced error handling: categorize the error and decide on fallback strategy
308
- const errorMessage = bulkError instanceof Error ? bulkError.message : String(bulkError);
309
-
310
- if (isRetryableError(errorMessage)) {
311
- MessageFormatter.progress(
312
- `Bulk deletion encountered retryable error, retrying with individual deletion for ${rows.length} rows`,
313
- { prefix: "Wipe" }
314
- );
315
- } else if (isBulkNotSupportedError(errorMessage)) {
316
- MessageFormatter.progress(
317
- `Bulk deletion not supported by server, switching to individual deletion for ${rows.length} rows`,
318
- { prefix: "Wipe" }
319
- );
320
- } else {
321
- MessageFormatter.progress(
322
- `Bulk deletion failed (${errorMessage}), falling back to individual deletion for ${rows.length} rows`,
323
- { prefix: "Wipe" }
324
- );
325
- }
326
-
327
- const deletedCount = await tryIndividualDeletion(
328
- adapter,
329
- databaseId,
330
- tableId,
331
- rows,
332
- INDIVIDUAL_DELETE_BATCH_SIZE,
333
- MAX_CONCURRENT_OPERATIONS,
334
- progress,
335
- totalDeleted
336
- );
337
- totalDeleted += deletedCount;
338
- }
339
- } else {
340
- // Bulk deletion not available, use optimized individual deletion
341
- MessageFormatter.progress(
342
- `Using individual deletion for ${rows.length} rows (bulk deletion not available)`,
343
- { prefix: "Wipe" }
344
- );
345
-
346
- const deletedCount = await tryIndividualDeletion(
347
- adapter,
348
- databaseId,
349
- tableId,
350
- rows,
351
- INDIVIDUAL_DELETE_BATCH_SIZE,
352
- MAX_CONCURRENT_OPERATIONS,
353
- progress,
354
- totalDeleted
355
- );
356
- totalDeleted += deletedCount;
357
- }
358
-
359
- // Set up cursor for next iteration
360
- if (rows.length < FETCH_BATCH_SIZE) {
361
- hasMoreRows = false;
362
- } else {
363
- cursor = rows[rows.length - 1].$id;
364
- }
365
-
366
- // Small delay between fetch cycles to be respectful to the API
367
- await delay(10);
368
323
  }
369
324
 
370
325
  // Update final progress total
@@ -389,120 +344,3 @@ export const wipeTableRows = async (
389
344
  throw error;
390
345
  }
391
346
  };
392
-
393
- /**
394
- * Helper function to attempt bulk deletion of row IDs
395
- */
396
- async function tryBulkDeletion(
397
- adapter: DatabaseAdapter,
398
- databaseId: string,
399
- tableId: string,
400
- rowIds: string[],
401
- batchSize: number,
402
- maxConcurrent: number
403
- ): Promise<number> {
404
- if (!adapter.bulkDeleteRows) {
405
- throw new Error("Bulk deletion not available on this adapter");
406
- }
407
-
408
- const limit = pLimit(maxConcurrent);
409
- const batches = chunk(rowIds, batchSize);
410
- let successfullyDeleted = 0;
411
-
412
- const deletePromises = batches.map((batch) =>
413
- limit(async () => {
414
- try {
415
- const result = await tryAwaitWithRetry(async () =>
416
- adapter.bulkDeleteRows!({ databaseId, tableId, rowIds: batch })
417
- );
418
- successfullyDeleted += batch.length; // Assume success if no error thrown
419
- } catch (error: any) {
420
- const errorMessage = error.message || String(error);
421
-
422
- // Enhanced error handling for bulk deletion
423
- if (isCriticalError(errorMessage)) {
424
- MessageFormatter.error(
425
- `Critical error in bulk deletion batch: ${errorMessage}`,
426
- error,
427
- { prefix: "Wipe" }
428
- );
429
- throw error;
430
- } else {
431
- // For non-critical errors in bulk deletion, re-throw to trigger fallback
432
- throw new Error(`Bulk deletion batch failed: ${errorMessage}`);
433
- }
434
- }
435
- })
436
- );
437
-
438
- await Promise.all(deletePromises);
439
- return successfullyDeleted;
440
- }
441
-
442
- /**
443
- * Helper function for fallback individual deletion
444
- */
445
- async function tryIndividualDeletion(
446
- adapter: DatabaseAdapter,
447
- databaseId: string,
448
- tableId: string,
449
- rows: any[],
450
- batchSize: number,
451
- maxConcurrent: number,
452
- progress: any,
453
- baseDeleted: number
454
- ): Promise<number> {
455
- const limit = pLimit(maxConcurrent);
456
- const batches = chunk(rows, batchSize);
457
- let processedInBatch = 0;
458
- let successfullyDeleted = 0;
459
-
460
- const deletePromises = batches.map((batch) =>
461
- limit(async () => {
462
- const batchDeletePromises = batch.map(async (row: any) => {
463
- try {
464
- await tryAwaitWithRetry(async () =>
465
- adapter.deleteRow({ databaseId, tableId, id: row.$id })
466
- );
467
- successfullyDeleted++;
468
- } catch (error: any) {
469
- const errorMessage = error.message || String(error);
470
-
471
- // Enhanced error handling for row deletion
472
- if (errorMessage.includes("Row with the requested ID could not be found")) {
473
- // Row already deleted, count as success since it's gone
474
- successfullyDeleted++;
475
- } else if (isCriticalError(errorMessage)) {
476
- // Critical error, log and rethrow to stop operation
477
- MessageFormatter.error(
478
- `Critical error deleting row ${row.$id}: ${errorMessage}`,
479
- error,
480
- { prefix: "Wipe" }
481
- );
482
- throw error;
483
- } else if (isRetryableError(errorMessage)) {
484
- // Retryable error, will be handled by tryAwaitWithRetry
485
- MessageFormatter.progress(
486
- `Retryable error for row ${row.$id}, will retry`,
487
- { prefix: "Wipe" }
488
- );
489
- } else {
490
- // Other non-critical errors, log but continue
491
- MessageFormatter.error(
492
- `Failed to delete row ${row.$id}: ${errorMessage}`,
493
- error,
494
- { prefix: "Wipe" }
495
- );
496
- }
497
- }
498
- processedInBatch++;
499
- progress.update(baseDeleted + successfullyDeleted);
500
- });
501
-
502
- await Promise.all(batchDeletePromises);
503
- })
504
- );
505
-
506
- await Promise.all(deletePromises);
507
- return successfullyDeleted;
508
- }