appwrite-utils-cli 1.4.1 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. package/README.md +22 -1
  2. package/dist/adapters/TablesDBAdapter.js +7 -4
  3. package/dist/collections/attributes.d.ts +1 -1
  4. package/dist/collections/attributes.js +42 -7
  5. package/dist/collections/indexes.js +13 -3
  6. package/dist/collections/methods.d.ts +9 -0
  7. package/dist/collections/methods.js +268 -0
  8. package/dist/databases/setup.js +6 -2
  9. package/dist/interactiveCLI.js +2 -1
  10. package/dist/migrations/appwriteToX.d.ts +2 -2
  11. package/dist/migrations/comprehensiveTransfer.js +12 -0
  12. package/dist/migrations/dataLoader.d.ts +5 -5
  13. package/dist/migrations/relationships.d.ts +2 -2
  14. package/dist/shared/jsonSchemaGenerator.d.ts +1 -0
  15. package/dist/shared/jsonSchemaGenerator.js +6 -2
  16. package/dist/shared/operationQueue.js +14 -1
  17. package/dist/shared/schemaGenerator.d.ts +2 -1
  18. package/dist/shared/schemaGenerator.js +61 -78
  19. package/dist/storage/schemas.d.ts +8 -8
  20. package/dist/utils/loadConfigs.js +44 -19
  21. package/dist/utils/schemaStrings.d.ts +2 -1
  22. package/dist/utils/schemaStrings.js +61 -78
  23. package/dist/utils/setupFiles.js +19 -1
  24. package/dist/utils/versionDetection.d.ts +6 -0
  25. package/dist/utils/versionDetection.js +30 -0
  26. package/dist/utilsController.js +32 -5
  27. package/package.json +1 -1
  28. package/src/adapters/TablesDBAdapter.ts +20 -17
  29. package/src/collections/attributes.ts +198 -156
  30. package/src/collections/indexes.ts +36 -28
  31. package/src/collections/methods.ts +292 -19
  32. package/src/databases/setup.ts +11 -7
  33. package/src/interactiveCLI.ts +8 -7
  34. package/src/migrations/comprehensiveTransfer.ts +22 -8
  35. package/src/shared/jsonSchemaGenerator.ts +36 -29
  36. package/src/shared/operationQueue.ts +48 -33
  37. package/src/shared/schemaGenerator.ts +128 -134
  38. package/src/utils/loadConfigs.ts +48 -29
  39. package/src/utils/schemaStrings.ts +124 -130
  40. package/src/utils/setupFiles.ts +21 -5
  41. package/src/utils/versionDetection.ts +48 -21
  42. package/src/utilsController.ts +59 -32
@@ -1,5 +1,6 @@
1
- import { indexSchema, type Index } from "appwrite-utils";
2
- import { Databases, IndexType, Query, type Models } from "node-appwrite";
1
+ import { indexSchema, type Index } from "appwrite-utils";
2
+ import { Databases, IndexType, Query, type Models } from "node-appwrite";
3
+ import type { DatabaseAdapter } from "../adapters/DatabaseAdapter.js";
3
4
  import { delay, tryAwaitWithRetry } from "../utils/helperFunctions.js";
4
5
  import chalk from "chalk";
5
6
 
@@ -18,15 +19,15 @@ interface IndexWithStatus {
18
19
  /**
19
20
  * Wait for index to become available, with retry logic for stuck indexes and exponential backoff
20
21
  */
21
- const waitForIndexAvailable = async (
22
- db: Databases,
23
- dbId: string,
24
- collectionId: string,
25
- indexKey: string,
26
- maxWaitTime: number = 60000, // 1 minute
27
- retryCount: number = 0,
28
- maxRetries: number = 5
29
- ): Promise<boolean> => {
22
+ const waitForIndexAvailable = async (
23
+ db: Databases | DatabaseAdapter,
24
+ dbId: string,
25
+ collectionId: string,
26
+ indexKey: string,
27
+ maxWaitTime: number = 60000, // 1 minute
28
+ retryCount: number = 0,
29
+ maxRetries: number = 5
30
+ ): Promise<boolean> => {
30
31
  const startTime = Date.now();
31
32
  let checkInterval = 2000; // Start with 2 seconds
32
33
 
@@ -41,22 +42,29 @@ const waitForIndexAvailable = async (
41
42
 
42
43
  while (Date.now() - startTime < maxWaitTime) {
43
44
  try {
44
- const indexList = await db.listIndexes(dbId, collectionId);
45
- const index = indexList.indexes.find(
46
- (idx: any) => idx.key === indexKey
47
- ) as IndexWithStatus | undefined;
45
+ const indexList = await (db instanceof Databases
46
+ ? db.listIndexes(dbId, collectionId)
47
+ : (db as DatabaseAdapter).listIndexes({ databaseId: dbId, tableId: collectionId }));
48
+ const indexes: any[] = (db instanceof Databases)
49
+ ? (indexList as any).indexes
50
+ : ((indexList as any).data || (indexList as any).indexes || []);
51
+ const index = indexes.find((idx: any) => idx.key === indexKey) as IndexWithStatus | undefined;
48
52
 
49
53
  if (!index) {
50
54
  console.log(chalk.red(`Index '${indexKey}' not found`));
51
55
  return false;
52
56
  }
53
57
 
54
- console.log(chalk.gray(`Index '${indexKey}' status: ${index.status}`));
58
+ if (db instanceof Databases) {
59
+ console.log(chalk.gray(`Index '${indexKey}' status: ${(index as any).status}`));
60
+ } else {
61
+ console.log(chalk.gray(`Index '${indexKey}' detected (TablesDB)`));
62
+ }
55
63
 
56
64
  switch (index.status) {
57
- case 'available':
58
- console.log(chalk.green(`✅ Index '${indexKey}' is now available`));
59
- return true;
65
+ case 'available':
66
+ console.log(chalk.green(`✅ Index '${indexKey}' is now available`));
67
+ return true;
60
68
 
61
69
  case 'failed':
62
70
  console.log(chalk.red(`❌ Index '${indexKey}' failed: ${index.error}`));
@@ -66,9 +74,9 @@ const waitForIndexAvailable = async (
66
74
  console.log(chalk.yellow(`⚠️ Index '${indexKey}' is stuck, will retry...`));
67
75
  return false;
68
76
 
69
- case 'processing':
70
- // Continue waiting
71
- break;
77
+ case 'processing':
78
+ // Continue waiting
79
+ break;
72
80
 
73
81
  case 'deleting':
74
82
  console.log(chalk.yellow(`Index '${indexKey}' is being deleted`));
@@ -79,12 +87,12 @@ const waitForIndexAvailable = async (
79
87
  break;
80
88
  }
81
89
 
82
- await delay(checkInterval);
83
- } catch (error) {
84
- console.log(chalk.red(`Error checking index status: ${error}`));
85
- return false;
86
- }
87
- }
90
+ await delay(checkInterval);
91
+ } catch (error) {
92
+ console.log(chalk.red(`Error checking index status: ${error}`));
93
+ return false;
94
+ }
95
+ }
88
96
 
89
97
  // Timeout reached
90
98
  console.log(chalk.yellow(`⏰ Timeout waiting for index '${indexKey}' (${maxWaitTime}ms)`));
@@ -6,10 +6,10 @@ import {
6
6
  Query,
7
7
  type Models,
8
8
  } from "node-appwrite";
9
- import type { AppwriteConfig, CollectionCreate, Indexes } from "appwrite-utils";
10
- import type { DatabaseAdapter } from "../adapters/DatabaseAdapter.js";
11
- import { getAdapterFromConfig } from "../utils/getClientFromConfig.js";
12
- import { nameToIdMapping, processQueue, queuedOperations } from "../shared/operationQueue.js";
9
+ import type { AppwriteConfig, CollectionCreate, Indexes, Attribute } from "appwrite-utils";
10
+ import type { DatabaseAdapter } from "../adapters/DatabaseAdapter.js";
11
+ import { getAdapterFromConfig } from "../utils/getClientFromConfig.js";
12
+ import { nameToIdMapping, processQueue, queuedOperations } from "../shared/operationQueue.js";
13
13
  import { createUpdateCollectionAttributes, createUpdateCollectionAttributesWithStatusCheck } from "./attributes.js";
14
14
  import { createOrUpdateIndexes, createOrUpdateIndexesWithStatusCheck } from "./indexes.js";
15
15
  import { SchemaGenerator } from "../shared/schemaGenerator.js";
@@ -290,11 +290,11 @@ export const wipeDatabase = async (
290
290
  return collectionsDeleted;
291
291
  };
292
292
 
293
- export const wipeCollection = async (
294
- database: Databases,
295
- databaseId: string,
296
- collectionId: string
297
- ): Promise<void> => {
293
+ export const wipeCollection = async (
294
+ database: Databases,
295
+ databaseId: string,
296
+ collectionId: string
297
+ ): Promise<void> => {
298
298
  const collections = await database.listCollections(databaseId, [
299
299
  Query.equal("$id", collectionId),
300
300
  ]);
@@ -303,8 +303,78 @@ export const wipeCollection = async (
303
303
  return;
304
304
  }
305
305
  const collection = collections.collections[0];
306
- await wipeDocumentsFromCollection(database, databaseId, collection.$id);
307
- };
306
+ await wipeDocumentsFromCollection(database, databaseId, collection.$id);
307
+ };
308
+
309
+ // TablesDB helpers for wiping
310
+ export const wipeAllTables = async (
311
+ adapter: DatabaseAdapter,
312
+ databaseId: string
313
+ ): Promise<{ tableId: string; tableName: string }[]> => {
314
+ MessageFormatter.info(`Wiping tables in database: ${databaseId}`, { prefix: 'Wipe' });
315
+ const res = await adapter.listTables({ databaseId, queries: [Query.limit(500)] });
316
+ const tables: any[] = (res as any).tables || [];
317
+ const deleted: { tableId: string; tableName: string }[] = [];
318
+ const progress = ProgressManager.create(`wipe-db-${databaseId}`, tables.length, { title: 'Deleting tables' });
319
+ let processed = 0;
320
+ for (const t of tables) {
321
+ try {
322
+ await adapter.deleteTable({ databaseId, tableId: t.$id });
323
+ deleted.push({ tableId: t.$id, tableName: t.name });
324
+ } catch (e) {
325
+ MessageFormatter.error(`Failed deleting table ${t.$id}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Wipe' });
326
+ }
327
+ processed++; progress.update(processed);
328
+ await delay(100);
329
+ }
330
+ progress.stop();
331
+ return deleted;
332
+ };
333
+
334
+ export const wipeTableRows = async (
335
+ adapter: DatabaseAdapter,
336
+ databaseId: string,
337
+ tableId: string
338
+ ): Promise<void> => {
339
+ try {
340
+ const initial = await adapter.listRows({ databaseId, tableId, queries: [Query.limit(1000)] });
341
+ let rows: any[] = (initial as any).rows || [];
342
+ let total = rows.length;
343
+ let cursor = rows.length >= 1000 ? rows[rows.length - 1].$id : undefined;
344
+ while (cursor) {
345
+ const resp = await adapter.listRows({ databaseId, tableId, queries: [Query.limit(1000), ...(cursor ? [Query.cursorAfter(cursor)] : [])] });
346
+ const more: any[] = (resp as any).rows || [];
347
+ rows.push(...more);
348
+ total = rows.length;
349
+ cursor = more.length >= 1000 ? more[more.length - 1].$id : undefined;
350
+ if (total % 10000 === 0) {
351
+ MessageFormatter.progress(`Found ${total} rows...`, { prefix: 'Wipe' });
352
+ }
353
+ }
354
+ MessageFormatter.info(`Found ${total} rows to delete`, { prefix: 'Wipe' });
355
+ if (total === 0) return;
356
+ const progress = ProgressManager.create(`delete-${tableId}`, total, { title: 'Deleting rows' });
357
+ let processed = 0;
358
+ const maxStackSize = 50;
359
+ const batches = chunk(rows, maxStackSize);
360
+ for (const batch of batches) {
361
+ await Promise.all(batch.map(async (row: any) => {
362
+ try {
363
+ await adapter.deleteRow({ databaseId, tableId, id: row.$id });
364
+ } catch (e: any) {
365
+ // ignore missing rows
366
+ }
367
+ processed++; progress.update(processed);
368
+ }));
369
+ await delay(50);
370
+ }
371
+ progress.stop();
372
+ MessageFormatter.success(`Completed deletion of ${total} rows from table ${tableId}`, { prefix: 'Wipe' });
373
+ } catch (error) {
374
+ MessageFormatter.error(`Error wiping rows from table ${tableId}`, error instanceof Error ? error : new Error(String(error)), { prefix: 'Wipe' });
375
+ throw error;
376
+ }
377
+ };
308
378
 
309
379
  export const generateSchemas = async (
310
380
  config: AppwriteConfig,
@@ -314,13 +384,23 @@ export const generateSchemas = async (
314
384
  schemaGenerator.generateSchemas();
315
385
  };
316
386
 
317
- export const createOrUpdateCollections = async (
318
- database: Databases,
319
- databaseId: string,
320
- config: AppwriteConfig,
321
- deletedCollections?: { collectionId: string; collectionName: string }[],
322
- selectedCollections: Models.Collection[] = []
323
- ): Promise<void> => {
387
+ export const createOrUpdateCollections = async (
388
+ database: Databases,
389
+ databaseId: string,
390
+ config: AppwriteConfig,
391
+ deletedCollections?: { collectionId: string; collectionName: string }[],
392
+ selectedCollections: Models.Collection[] = []
393
+ ): Promise<void> => {
394
+ // If API mode is tablesdb, route to adapter-based implementation
395
+ try {
396
+ const { adapter, apiMode } = await getAdapterFromConfig(config);
397
+ if (apiMode === 'tablesdb') {
398
+ await createOrUpdateCollectionsViaAdapter(adapter, databaseId, config, deletedCollections, selectedCollections);
399
+ return;
400
+ }
401
+ } catch {
402
+ // Fallback to legacy path below
403
+ }
324
404
  const collectionsToProcess =
325
405
  selectedCollections.length > 0 ? selectedCollections : config.collections;
326
406
  if (!collectionsToProcess) {
@@ -472,7 +552,200 @@ export const createOrUpdateCollections = async (
472
552
  } else {
473
553
  MessageFormatter.info("No queued operations to process", { prefix: "Collections" });
474
554
  }
475
- };
555
+ };
556
+
557
+ // New: Adapter-based implementation for TablesDB
558
+ export const createOrUpdateCollectionsViaAdapter = async (
559
+ adapter: DatabaseAdapter,
560
+ databaseId: string,
561
+ config: AppwriteConfig,
562
+ deletedCollections?: { collectionId: string; collectionName: string }[],
563
+ selectedCollections: Models.Collection[] = []
564
+ ): Promise<void> => {
565
+ const collectionsToProcess =
566
+ selectedCollections.length > 0 ? selectedCollections : (config.collections || []);
567
+ if (!collectionsToProcess || collectionsToProcess.length === 0) return;
568
+
569
+ const usedIds = new Set<string>();
570
+
571
+ // Helper: create attributes through adapter
572
+ const createAttr = async (tableId: string, attr: Attribute) => {
573
+ const base: any = {
574
+ databaseId,
575
+ tableId,
576
+ key: attr.key,
577
+ type: (attr as any).type,
578
+ size: (attr as any).size,
579
+ required: !!(attr as any).required,
580
+ default: (attr as any).xdefault,
581
+ array: !!(attr as any).array,
582
+ min: (attr as any).min,
583
+ max: (attr as any).max,
584
+ elements: (attr as any).elements,
585
+ encrypt: (attr as any).encrypted,
586
+ relatedCollection: (attr as any).relatedCollection,
587
+ relationType: (attr as any).relationType,
588
+ twoWay: (attr as any).twoWay,
589
+ twoWayKey: (attr as any).twoWayKey,
590
+ onDelete: (attr as any).onDelete,
591
+ side: (attr as any).side,
592
+ };
593
+ await adapter.createAttribute(base);
594
+ await delay(150);
595
+ };
596
+
597
+ // Local queue for unresolved relationships
598
+ const relQueue: { tableId: string; attr: Attribute }[] = [];
599
+
600
+ for (const collection of collectionsToProcess) {
601
+ const { attributes, indexes, ...collectionData } = collection as any;
602
+
603
+ // Prepare permissions as strings (reuse Permission helper)
604
+ const permissions: string[] = [];
605
+ if (collection.$permissions && collection.$permissions.length > 0) {
606
+ for (const p of collection.$permissions as any[]) {
607
+ if (typeof p === 'string') permissions.push(p);
608
+ else {
609
+ switch (p.permission) {
610
+ case 'read': permissions.push(Permission.read(p.target)); break;
611
+ case 'create': permissions.push(Permission.create(p.target)); break;
612
+ case 'update': permissions.push(Permission.update(p.target)); break;
613
+ case 'delete': permissions.push(Permission.delete(p.target)); break;
614
+ case 'write': permissions.push(Permission.write(p.target)); break;
615
+ default: break;
616
+ }
617
+ }
618
+ }
619
+ }
620
+
621
+ // Find existing table by name
622
+ const list = await adapter.listTables({ databaseId, queries: [Query.equal('name', collectionData.name)] });
623
+ const items: any[] = (list as any).tables || [];
624
+ let table = items[0];
625
+ let tableId: string;
626
+
627
+ if (!table) {
628
+ // Determine ID (prefer provided $id or re-use deleted one)
629
+ let foundColl = deletedCollections?.find(
630
+ (coll) => coll.collectionName.toLowerCase().trim().replace(" ", "") === collectionData.name.toLowerCase().trim().replace(" ", "")
631
+ );
632
+ if (collectionData.$id) tableId = collectionData.$id;
633
+ else if (foundColl && !usedIds.has(foundColl.collectionId)) tableId = foundColl.collectionId;
634
+ else tableId = ID.unique();
635
+ usedIds.add(tableId);
636
+
637
+ const res = await adapter.createTable({
638
+ databaseId,
639
+ id: tableId,
640
+ name: collectionData.name,
641
+ permissions,
642
+ documentSecurity: !!collectionData.documentSecurity,
643
+ enabled: collectionData.enabled !== false
644
+ });
645
+ table = (res as any).data || res;
646
+ nameToIdMapping.set(collectionData.name, tableId);
647
+ } else {
648
+ tableId = table.$id;
649
+ await adapter.updateTable({
650
+ databaseId,
651
+ id: tableId,
652
+ name: collectionData.name,
653
+ permissions,
654
+ documentSecurity: !!collectionData.documentSecurity,
655
+ enabled: collectionData.enabled !== false
656
+ });
657
+ }
658
+
659
+ // Add small delay after table create/update
660
+ await delay(250);
661
+
662
+ // Create attributes: non-relationship first
663
+ const nonRel = (attributes || []).filter((a: Attribute) => a.type !== 'relationship');
664
+ for (const attr of nonRel) {
665
+ await createAttr(tableId, attr as Attribute);
666
+ }
667
+
668
+ // Relationship attributes — resolve relatedCollection to ID
669
+ const rels = (attributes || []).filter((a: Attribute) => a.type === 'relationship');
670
+ for (const attr of rels as any[]) {
671
+ const relNameOrId = attr.relatedCollection as string | undefined;
672
+ if (!relNameOrId) continue;
673
+ let relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
674
+
675
+ // If looks like a name (not ULID) and not in cache, try query by name
676
+ if (!nameToIdMapping.has(relNameOrId)) {
677
+ try {
678
+ const relList = await adapter.listTables({ databaseId, queries: [Query.equal('name', relNameOrId)] });
679
+ const relItems: any[] = (relList as any).tables || [];
680
+ if (relItems[0]?.$id) {
681
+ relId = relItems[0].$id;
682
+ nameToIdMapping.set(relNameOrId, relId);
683
+ }
684
+ } catch {}
685
+ }
686
+
687
+ if (relId && typeof relId === 'string') {
688
+ attr.relatedCollection = relId;
689
+ await createAttr(tableId, attr as Attribute);
690
+ } else {
691
+ // Defer if unresolved
692
+ relQueue.push({ tableId, attr: attr as Attribute });
693
+ }
694
+ }
695
+
696
+ // Indexes
697
+ const idxs = (indexes || []) as any[];
698
+ for (const idx of idxs) {
699
+ try {
700
+ await adapter.createIndex({
701
+ databaseId,
702
+ tableId,
703
+ key: idx.key,
704
+ type: idx.type,
705
+ attributes: idx.attributes,
706
+ orders: idx.orders || []
707
+ });
708
+ await delay(150);
709
+ } catch (e) {
710
+ MessageFormatter.error(`Failed to create index ${idx.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Indexes' });
711
+ }
712
+ }
713
+ }
714
+
715
+ // Process queued relationships once mapping likely populated
716
+ for (const { tableId, attr } of relQueue) {
717
+ const relNameOrId = (attr as any).relatedCollection as string | undefined;
718
+ if (!relNameOrId) continue;
719
+ const relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
720
+ if (relId) {
721
+ (attr as any).relatedCollection = relId;
722
+ try {
723
+ await adapter.createAttribute({
724
+ databaseId,
725
+ tableId,
726
+ key: (attr as any).key,
727
+ type: (attr as any).type,
728
+ size: (attr as any).size,
729
+ required: !!(attr as any).required,
730
+ default: (attr as any).xdefault,
731
+ array: !!(attr as any).array,
732
+ min: (attr as any).min,
733
+ max: (attr as any).max,
734
+ elements: (attr as any).elements,
735
+ relatedCollection: relId,
736
+ relationType: (attr as any).relationType,
737
+ twoWay: (attr as any).twoWay,
738
+ twoWayKey: (attr as any).twoWayKey,
739
+ onDelete: (attr as any).onDelete,
740
+ side: (attr as any).side
741
+ });
742
+ await delay(150);
743
+ } catch (e) {
744
+ MessageFormatter.error(`Failed queued relationship ${attr.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Attributes' });
745
+ }
746
+ }
747
+ }
748
+ };
476
749
 
477
750
  export const generateMockData = async (
478
751
  database: Databases,
@@ -111,12 +111,13 @@ export const setupMigrationDatabase = async (config: AppwriteConfig) => {
111
111
  console.log("---------------------------------");
112
112
  };
113
113
 
114
- export const ensureDatabasesExist = async (config: AppwriteConfig, databasesToEnsure?: Models.Database[]) => {
114
+ export const ensureDatabasesExist = async (config: AppwriteConfig, databasesToEnsure?: Models.Database[]) => {
115
115
  if (!config.appwriteClient) {
116
116
  throw new Error("Appwrite client is not initialized in the config");
117
117
  }
118
- const database = new Databases(config.appwriteClient);
119
- const databasesToCreate = databasesToEnsure || config.databases || [];
118
+ const database = new Databases(config.appwriteClient);
119
+ // Work on a shallow copy so we don't mutate caller-provided arrays
120
+ const databasesToCreate = [...(databasesToEnsure || config.databases || [])];
120
121
 
121
122
  if (!databasesToCreate.length) {
122
123
  console.log("No databases to create");
@@ -130,10 +131,13 @@ export const ensureDatabasesExist = async (config: AppwriteConfig, databasesToEn
130
131
  const migrationsDatabase = existingDatabases.databases.find(
131
132
  (d) => d.name.toLowerCase().trim().replace(" ", "") === "migrations"
132
133
  );
133
- if (config.useMigrations && existingDatabases.databases.length !== 0 && migrationsDatabase) {
134
- console.log("Creating all databases including migrations");
135
- databasesToCreate.push(migrationsDatabase);
136
- }
134
+ if (config.useMigrations && existingDatabases.databases.length !== 0 && migrationsDatabase) {
135
+ console.log("Creating all databases including migrations");
136
+ // Ensure migrations exists, but do not mutate the caller's array
137
+ if (!databasesToCreate.some((d) => d.$id === migrationsDatabase.$id)) {
138
+ databasesToCreate.push(migrationsDatabase);
139
+ }
140
+ }
137
141
 
138
142
  for (const db of databasesToCreate) {
139
143
  if (!existingDatabases.databases.some((d) => d.name === db.name)) {
@@ -1159,13 +1159,14 @@ export class InteractiveCLI {
1159
1159
  return;
1160
1160
  }
1161
1161
 
1162
- const collections = await this.selectCollections(
1163
- databases[0],
1164
- this.controller!.database!,
1165
- chalk.blue("Select local collections to push:"),
1166
- true,
1167
- true // prefer local
1168
- );
1162
+ const collections = await this.selectCollections(
1163
+ databases[0],
1164
+ this.controller!.database!,
1165
+ chalk.blue("Select local collections to push:"),
1166
+ true,
1167
+ true, // prefer local
1168
+ true // filter by selected database
1169
+ );
1169
1170
 
1170
1171
  const { syncFunctions } = await inquirer.prompt([
1171
1172
  {
@@ -16,7 +16,8 @@ import {
16
16
  AppwriteException,
17
17
  } from "node-appwrite";
18
18
  import { InputFile } from "node-appwrite/file";
19
- import { MessageFormatter } from "../shared/messageFormatter.js";
19
+ import { MessageFormatter } from "../shared/messageFormatter.js";
20
+ import { processQueue, queuedOperations } from "../shared/operationQueue.js";
20
21
  import { ProgressManager } from "../shared/progressManager.js";
21
22
  import { getClient } from "../utils/getClientFromConfig.js";
22
23
  import {
@@ -624,13 +625,26 @@ export class ComprehensiveTransfer {
624
625
  { prefix: "Transfer" }
625
626
  );
626
627
  }
627
- }
628
- } catch (error) {
629
- MessageFormatter.error(
630
- `Failed to create database structure for ${dbId}`,
631
- error instanceof Error ? error : new Error(String(error)),
632
- { prefix: "Transfer" }
633
- );
628
+ }
629
+ // After processing all collections' attributes and indexes, process any queued
630
+ // relationship attributes so dependencies are resolved within this phase.
631
+ if (queuedOperations.length > 0) {
632
+ MessageFormatter.info(
633
+ `Processing ${queuedOperations.length} queued relationship operations`,
634
+ { prefix: "Transfer" }
635
+ );
636
+ await processQueue(this.targetDatabases, dbId);
637
+ } else {
638
+ MessageFormatter.info("No queued relationship operations to process", {
639
+ prefix: "Transfer",
640
+ });
641
+ }
642
+ } catch (error) {
643
+ MessageFormatter.error(
644
+ `Failed to create database structure for ${dbId}`,
645
+ error instanceof Error ? error : new Error(String(error)),
646
+ { prefix: "Transfer" }
647
+ );
634
648
  throw error;
635
649
  }
636
650
  }
@@ -33,16 +33,23 @@ export interface JsonSchema {
33
33
  definitions?: Record<string, JsonSchemaProperty>;
34
34
  }
35
35
 
36
- export class JsonSchemaGenerator {
37
- private config: AppwriteConfig;
38
- private appwriteFolderPath: string;
39
- private relationshipMap = new Map<string, any[]>();
40
-
41
- constructor(config: AppwriteConfig, appwriteFolderPath: string) {
42
- this.config = config;
43
- this.appwriteFolderPath = appwriteFolderPath;
44
- this.extractRelationships();
45
- }
36
+ export class JsonSchemaGenerator {
37
+ private config: AppwriteConfig;
38
+ private appwriteFolderPath: string;
39
+ private relationshipMap = new Map<string, any[]>();
40
+
41
+ constructor(config: AppwriteConfig, appwriteFolderPath: string) {
42
+ this.config = config;
43
+ this.appwriteFolderPath = appwriteFolderPath;
44
+ this.extractRelationships();
45
+ }
46
+
47
+ private resolveCollectionName = (idOrName: string): string => {
48
+ const col = this.config.collections?.find(
49
+ (c) => c.$id === (idOrName as any) || c.name === idOrName
50
+ );
51
+ return col?.name ?? idOrName;
52
+ };
46
53
 
47
54
  private extractRelationships(): void {
48
55
  if (!this.config.collections) return;
@@ -51,16 +58,16 @@ export class JsonSchemaGenerator {
51
58
  if (!collection.attributes) return;
52
59
 
53
60
  collection.attributes.forEach((attr) => {
54
- if (attr.type === "relationship" && attr.relatedCollection) {
55
- const relationships = this.relationshipMap.get(collection.name) || [];
56
- relationships.push({
57
- attributeKey: attr.key,
58
- relatedCollection: attr.relatedCollection,
59
- relationType: attr.relationType,
60
- isArray: attr.relationType === "oneToMany" || attr.relationType === "manyToMany"
61
- });
62
- this.relationshipMap.set(collection.name, relationships);
63
- }
61
+ if (attr.type === "relationship" && attr.relatedCollection) {
62
+ const relationships = this.relationshipMap.get(collection.name) || [];
63
+ relationships.push({
64
+ attributeKey: attr.key,
65
+ relatedCollection: this.resolveCollectionName(attr.relatedCollection),
66
+ relationType: attr.relationType,
67
+ isArray: attr.relationType === "oneToMany" || attr.relationType === "manyToMany"
68
+ });
69
+ this.relationshipMap.set(collection.name, relationships);
70
+ }
64
71
  });
65
72
  });
66
73
  }
@@ -153,14 +160,14 @@ export class JsonSchemaGenerator {
153
160
  }
154
161
  break;
155
162
 
156
- case "relationship":
157
- if (attribute.relatedCollection) {
158
- // For relationships, reference the related collection schema
159
- schema.$ref = `#/definitions/${toPascalCase(attribute.relatedCollection)}`;
160
- } else {
161
- schema.type = "string";
162
- }
163
- break;
163
+ case "relationship":
164
+ if (attribute.relatedCollection) {
165
+ // For relationships, reference the related collection schema
166
+ schema.$ref = `#/definitions/${toPascalCase(this.resolveCollectionName(attribute.relatedCollection))}`;
167
+ } else {
168
+ schema.type = "string";
169
+ }
170
+ break;
164
171
 
165
172
  default:
166
173
  schema.type = "string";
@@ -385,4 +392,4 @@ export default jsonSchemas;
385
392
 
386
393
  return { valid: errors.length === 0, errors };
387
394
  }
388
- }
395
+ }