appwrite-utils-cli 1.2.6 → 1.2.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -102,9 +102,33 @@ export declare class ComprehensiveTransfer {
102
102
  */
103
103
  private createCollectionIndexesWithStatusCheck;
104
104
  /**
105
- * Helper method to transfer documents between databases
105
+ * Helper method to transfer documents between databases using bulk operations with content and permission-based filtering
106
106
  */
107
107
  private transferDocumentsBetweenDatabases;
108
+ /**
109
+ * Fetch target documents by IDs in batches to check existence and permissions
110
+ */
111
+ private fetchTargetDocumentsBatch;
112
+ /**
113
+ * Transfer documents using bulk operations with proper batch size handling
114
+ */
115
+ private transferDocumentsBulk;
116
+ /**
117
+ * Direct HTTP implementation of bulk upsert API
118
+ */
119
+ private bulkUpsertDocuments;
120
+ /**
121
+ * Transfer documents individually with rate limiting
122
+ */
123
+ private transferDocumentsIndividual;
124
+ /**
125
+ * Update documents individually with content and/or permission changes
126
+ */
127
+ private updateDocumentsIndividual;
128
+ /**
129
+ * Utility method to chunk arrays
130
+ */
131
+ private chunkArray;
108
132
  /**
109
133
  * Helper method to fetch all teams with pagination
110
134
  */
@@ -1,4 +1,4 @@
1
- import { converterFunctions, tryAwaitWithRetry, parseAttribute } from "appwrite-utils";
1
+ import { converterFunctions, tryAwaitWithRetry, parseAttribute, objectNeedsUpdate } from "appwrite-utils";
2
2
  import { Client, Databases, Storage, Users, Functions, Teams, Query, } from "node-appwrite";
3
3
  import { InputFile } from "node-appwrite/file";
4
4
  import { MessageFormatter } from "../shared/messageFormatter.js";
@@ -361,6 +361,27 @@ export class ComprehensiveTransfer {
361
361
  await this.createBucketWithFallback(bucket);
362
362
  MessageFormatter.success(`Created bucket: ${bucket.name}`, { prefix: "Transfer" });
363
363
  }
364
+ else {
365
+ // Compare bucket permissions and update if needed
366
+ const sourcePermissions = JSON.stringify(bucket.$permissions?.sort() || []);
367
+ const targetPermissions = JSON.stringify(existingBucket.$permissions?.sort() || []);
368
+ if (sourcePermissions !== targetPermissions ||
369
+ existingBucket.name !== bucket.name ||
370
+ existingBucket.fileSecurity !== bucket.fileSecurity ||
371
+ existingBucket.enabled !== bucket.enabled) {
372
+ MessageFormatter.warning(`Bucket ${bucket.name} exists but has different settings. Updating to match source.`, { prefix: "Transfer" });
373
+ try {
374
+ await this.targetStorage.updateBucket(bucket.$id, bucket.name, bucket.$permissions, bucket.fileSecurity, bucket.enabled, bucket.maximumFileSize, bucket.allowedFileExtensions, bucket.compression, bucket.encryption, bucket.antivirus);
375
+ MessageFormatter.success(`Updated bucket ${bucket.name} to match source`, { prefix: "Transfer" });
376
+ }
377
+ catch (updateError) {
378
+ MessageFormatter.error(`Failed to update bucket ${bucket.name}`, updateError instanceof Error ? updateError : new Error(String(updateError)), { prefix: "Transfer" });
379
+ }
380
+ }
381
+ else {
382
+ MessageFormatter.info(`Bucket ${bucket.name} already exists with matching settings`, { prefix: "Transfer" });
383
+ }
384
+ }
364
385
  // Transfer bucket files with enhanced validation
365
386
  await this.transferBucketFiles(bucket.$id, bucket.$id);
366
387
  this.results.buckets.transferred++;
@@ -481,10 +502,27 @@ export class ComprehensiveTransfer {
481
502
  // Process files with rate limiting
482
503
  const fileTasks = files.files.map(file => this.fileLimit(async () => {
483
504
  try {
484
- // Check if file already exists
505
+ // Check if file already exists and compare permissions
506
+ let existingFile = null;
485
507
  try {
486
- await this.targetStorage.getFile(targetBucketId, file.$id);
487
- MessageFormatter.info(`File ${file.name} already exists, skipping`, { prefix: "Transfer" });
508
+ existingFile = await this.targetStorage.getFile(targetBucketId, file.$id);
509
+ // Compare permissions between source and target file
510
+ const sourcePermissions = JSON.stringify(file.$permissions?.sort() || []);
511
+ const targetPermissions = JSON.stringify(existingFile.$permissions?.sort() || []);
512
+ if (sourcePermissions !== targetPermissions) {
513
+ MessageFormatter.warning(`File ${file.name} (${file.$id}) exists but has different permissions. Source: ${sourcePermissions}, Target: ${targetPermissions}`, { prefix: "Transfer" });
514
+ // Update file permissions to match source
515
+ try {
516
+ await this.targetStorage.updateFile(targetBucketId, file.$id, file.name, file.$permissions);
517
+ MessageFormatter.success(`Updated file ${file.name} permissions to match source`, { prefix: "Transfer" });
518
+ }
519
+ catch (updateError) {
520
+ MessageFormatter.error(`Failed to update permissions for file ${file.name}`, updateError instanceof Error ? updateError : new Error(String(updateError)), { prefix: "Transfer" });
521
+ }
522
+ }
523
+ else {
524
+ MessageFormatter.info(`File ${file.name} already exists with matching permissions, skipping`, { prefix: "Transfer" });
525
+ }
488
526
  return;
489
527
  }
490
528
  catch (error) {
@@ -702,50 +740,259 @@ export class ComprehensiveTransfer {
702
740
  return await createOrUpdateIndexesWithStatusCheck(dbId, databases, collectionId, collection, indexes);
703
741
  }
704
742
  /**
705
- * Helper method to transfer documents between databases
743
+ * Helper method to transfer documents between databases using bulk operations with content and permission-based filtering
706
744
  */
707
745
  async transferDocumentsBetweenDatabases(sourceDb, targetDb, sourceDbId, targetDbId, sourceCollectionId, targetCollectionId) {
708
- MessageFormatter.info(`Transferring documents from ${sourceCollectionId} to ${targetCollectionId}`, { prefix: "Transfer" });
746
+ MessageFormatter.info(`Transferring documents from ${sourceCollectionId} to ${targetCollectionId} with bulk operations, content comparison, and permission filtering`, { prefix: "Transfer" });
709
747
  let lastId;
710
748
  let totalTransferred = 0;
749
+ let totalSkipped = 0;
750
+ let totalUpdated = 0;
751
+ // Check if bulk operations are supported
752
+ const supportsBulk = this.options.sourceEndpoint.includes('cloud.appwrite.io') ||
753
+ this.options.targetEndpoint.includes('cloud.appwrite.io');
754
+ if (supportsBulk) {
755
+ MessageFormatter.info(`Using bulk operations for enhanced performance`, { prefix: "Transfer" });
756
+ }
711
757
  while (true) {
712
- const queries = [Query.limit(50)]; // Smaller batch size for better performance
758
+ // Fetch source documents in larger batches (1000 instead of 50)
759
+ const queries = [Query.limit(1000)];
713
760
  if (lastId) {
714
761
  queries.push(Query.cursorAfter(lastId));
715
762
  }
716
- const documents = await tryAwaitWithRetry(async () => sourceDb.listDocuments(sourceDbId, sourceCollectionId, queries));
717
- if (documents.documents.length === 0) {
763
+ const sourceDocuments = await tryAwaitWithRetry(async () => sourceDb.listDocuments(sourceDbId, sourceCollectionId, queries));
764
+ if (sourceDocuments.documents.length === 0) {
718
765
  break;
719
766
  }
720
- // Transfer documents with rate limiting
721
- const transferTasks = documents.documents.map(doc => this.limit(async () => {
722
- try {
723
- // Check if document already exists
724
- try {
725
- await targetDb.getDocument(targetDbId, targetCollectionId, doc.$id);
726
- MessageFormatter.info(`Document ${doc.$id} already exists, skipping`, { prefix: "Transfer" });
727
- return;
767
+ MessageFormatter.info(`Processing batch of ${sourceDocuments.documents.length} source documents`, { prefix: "Transfer" });
768
+ // Extract document IDs from the current batch
769
+ const sourceDocIds = sourceDocuments.documents.map(doc => doc.$id);
770
+ // Fetch existing documents from target in a single query
771
+ const existingTargetDocs = await this.fetchTargetDocumentsBatch(targetDb, targetDbId, targetCollectionId, sourceDocIds);
772
+ // Create a map for quick lookup of existing documents
773
+ const existingDocsMap = new Map();
774
+ existingTargetDocs.forEach(doc => {
775
+ existingDocsMap.set(doc.$id, doc);
776
+ });
777
+ // Filter documents based on existence, content comparison, and permission comparison
778
+ const documentsToTransfer = [];
779
+ const documentsToUpdate = [];
780
+ for (const sourceDoc of sourceDocuments.documents) {
781
+ const existingTargetDoc = existingDocsMap.get(sourceDoc.$id);
782
+ if (!existingTargetDoc) {
783
+ // Document doesn't exist in target, needs to be transferred
784
+ documentsToTransfer.push(sourceDoc);
785
+ }
786
+ else {
787
+ // Document exists, compare both content and permissions
788
+ const sourcePermissions = JSON.stringify((sourceDoc.$permissions || []).sort());
789
+ const targetPermissions = JSON.stringify((existingTargetDoc.$permissions || []).sort());
790
+ const permissionsDiffer = sourcePermissions !== targetPermissions;
791
+ // Use objectNeedsUpdate to compare document content (excluding system fields)
792
+ const contentDiffers = objectNeedsUpdate(existingTargetDoc, sourceDoc);
793
+ if (contentDiffers && permissionsDiffer) {
794
+ // Both content and permissions differ
795
+ documentsToUpdate.push({
796
+ doc: sourceDoc,
797
+ targetDoc: existingTargetDoc,
798
+ reason: "content and permissions differ"
799
+ });
800
+ MessageFormatter.info(`Document ${sourceDoc.$id} exists but content and permissions differ - will update`, { prefix: "Transfer" });
728
801
  }
729
- catch (error) {
730
- // Document doesn't exist, proceed with creation
802
+ else if (contentDiffers) {
803
+ // Only content differs
804
+ documentsToUpdate.push({
805
+ doc: sourceDoc,
806
+ targetDoc: existingTargetDoc,
807
+ reason: "content differs"
808
+ });
809
+ MessageFormatter.info(`Document ${sourceDoc.$id} exists but content differs - will update`, { prefix: "Transfer" });
810
+ }
811
+ else if (permissionsDiffer) {
812
+ // Only permissions differ
813
+ documentsToUpdate.push({
814
+ doc: sourceDoc,
815
+ targetDoc: existingTargetDoc,
816
+ reason: "permissions differ"
817
+ });
818
+ MessageFormatter.info(`Document ${sourceDoc.$id} exists but permissions differ - will update`, { prefix: "Transfer" });
819
+ }
820
+ else {
821
+ // Document exists with identical content AND permissions, skip
822
+ totalSkipped++;
823
+ MessageFormatter.info(`Document ${sourceDoc.$id} exists with matching content and permissions - skipping`, { prefix: "Transfer" });
731
824
  }
732
- // Create document in target
733
- const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
734
- await tryAwaitWithRetry(async () => targetDb.createDocument(targetDbId, targetCollectionId, doc.$id, docData, doc.$permissions));
735
- totalTransferred++;
736
- MessageFormatter.success(`Transferred document ${doc.$id}`, { prefix: "Transfer" });
737
825
  }
738
- catch (error) {
739
- MessageFormatter.error(`Failed to transfer document ${doc.$id}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Transfer" });
826
+ }
827
+ MessageFormatter.info(`Batch analysis: ${documentsToTransfer.length} to create, ${documentsToUpdate.length} to update, ${totalSkipped} skipped so far`, { prefix: "Transfer" });
828
+ // Process new documents with bulk operations if supported and available
829
+ if (documentsToTransfer.length > 0) {
830
+ if (supportsBulk && documentsToTransfer.length >= 10) {
831
+ // Use bulk operations for large batches
832
+ await this.transferDocumentsBulk(targetDb, targetDbId, targetCollectionId, documentsToTransfer);
833
+ totalTransferred += documentsToTransfer.length;
834
+ MessageFormatter.success(`Bulk transferred ${documentsToTransfer.length} new documents`, { prefix: "Transfer" });
740
835
  }
741
- }));
742
- await Promise.all(transferTasks);
743
- if (documents.documents.length < 50) {
836
+ else {
837
+ // Use individual transfers for smaller batches or non-bulk endpoints
838
+ const transferCount = await this.transferDocumentsIndividual(targetDb, targetDbId, targetCollectionId, documentsToTransfer);
839
+ totalTransferred += transferCount;
840
+ }
841
+ }
842
+ // Process document updates (always individual since bulk update with permissions needs special handling)
843
+ if (documentsToUpdate.length > 0) {
844
+ const updateCount = await this.updateDocumentsIndividual(targetDb, targetDbId, targetCollectionId, documentsToUpdate);
845
+ totalUpdated += updateCount;
846
+ }
847
+ if (sourceDocuments.documents.length < 1000) {
744
848
  break;
745
849
  }
746
- lastId = documents.documents[documents.documents.length - 1].$id;
850
+ lastId = sourceDocuments.documents[sourceDocuments.documents.length - 1].$id;
851
+ }
852
+ MessageFormatter.info(`Transfer complete: ${totalTransferred} new, ${totalUpdated} updated, ${totalSkipped} skipped from ${sourceCollectionId} to ${targetCollectionId}`, { prefix: "Transfer" });
853
+ }
854
+ /**
855
+ * Fetch target documents by IDs in batches to check existence and permissions
856
+ */
857
+ async fetchTargetDocumentsBatch(targetDb, targetDbId, targetCollectionId, docIds) {
858
+ const documents = [];
859
+ // Split IDs into chunks of 100 for Query.equal limitations
860
+ const idChunks = this.chunkArray(docIds, 100);
861
+ for (const chunk of idChunks) {
862
+ try {
863
+ const result = await tryAwaitWithRetry(async () => targetDb.listDocuments(targetDbId, targetCollectionId, [
864
+ Query.equal('$id', chunk),
865
+ Query.limit(100)
866
+ ]));
867
+ documents.push(...result.documents);
868
+ }
869
+ catch (error) {
870
+ // If query fails, fall back to individual gets (less efficient but more reliable)
871
+ MessageFormatter.warning(`Batch query failed for ${chunk.length} documents, falling back to individual checks`, { prefix: "Transfer" });
872
+ for (const docId of chunk) {
873
+ try {
874
+ const doc = await targetDb.getDocument(targetDbId, targetCollectionId, docId);
875
+ documents.push(doc);
876
+ }
877
+ catch (getError) {
878
+ // Document doesn't exist, which is fine
879
+ }
880
+ }
881
+ }
882
+ }
883
+ return documents;
884
+ }
885
+ /**
886
+ * Transfer documents using bulk operations with proper batch size handling
887
+ */
888
+ async transferDocumentsBulk(targetDb, targetDbId, targetCollectionId, documents) {
889
+ // Prepare documents for bulk upsert
890
+ const preparedDocs = documents.map(doc => {
891
+ const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
892
+ return {
893
+ $id,
894
+ $permissions,
895
+ ...docData
896
+ };
897
+ });
898
+ // Process in smaller chunks for bulk operations (1000 for Pro, 100 for Free tier)
899
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
900
+ let processed = false;
901
+ for (const maxBatchSize of batchSizes) {
902
+ const documentBatches = this.chunkArray(preparedDocs, maxBatchSize);
903
+ try {
904
+ for (const batch of documentBatches) {
905
+ MessageFormatter.info(`Bulk upserting ${batch.length} documents...`, { prefix: "Transfer" });
906
+ await this.bulkUpsertDocuments(this.targetClient, targetDbId, targetCollectionId, batch);
907
+ MessageFormatter.success(`✅ Bulk upserted ${batch.length} documents`, { prefix: "Transfer" });
908
+ // Add delay between batches to respect rate limits
909
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
910
+ await new Promise(resolve => setTimeout(resolve, 200));
911
+ }
912
+ }
913
+ processed = true;
914
+ break; // Success, exit batch size loop
915
+ }
916
+ catch (error) {
917
+ MessageFormatter.warning(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`, { prefix: "Transfer" });
918
+ continue; // Try next smaller batch size
919
+ }
920
+ }
921
+ if (!processed) {
922
+ MessageFormatter.warning(`All bulk operations failed, falling back to individual transfers`, { prefix: "Transfer" });
923
+ // Fall back to individual transfers
924
+ await this.transferDocumentsIndividual(targetDb, targetDbId, targetCollectionId, documents);
925
+ }
926
+ }
927
+ /**
928
+ * Direct HTTP implementation of bulk upsert API
929
+ */
930
+ async bulkUpsertDocuments(client, dbId, collectionId, documents) {
931
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
932
+ const url = new URL(client.config.endpoint + apiPath);
933
+ const headers = {
934
+ 'Content-Type': 'application/json',
935
+ 'X-Appwrite-Project': client.config.project,
936
+ 'X-Appwrite-Key': client.config.key
937
+ };
938
+ const response = await fetch(url.toString(), {
939
+ method: 'PUT',
940
+ headers,
941
+ body: JSON.stringify({ documents })
942
+ });
943
+ if (!response.ok) {
944
+ const errorData = await response.json().catch(() => ({ message: 'Unknown error' }));
945
+ throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
946
+ }
947
+ return await response.json();
948
+ }
949
+ /**
950
+ * Transfer documents individually with rate limiting
951
+ */
952
+ async transferDocumentsIndividual(targetDb, targetDbId, targetCollectionId, documents) {
953
+ let successCount = 0;
954
+ const transferTasks = documents.map(doc => this.limit(async () => {
955
+ try {
956
+ const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
957
+ await tryAwaitWithRetry(async () => targetDb.createDocument(targetDbId, targetCollectionId, doc.$id, docData, doc.$permissions));
958
+ successCount++;
959
+ MessageFormatter.success(`Transferred document ${doc.$id}`, { prefix: "Transfer" });
960
+ }
961
+ catch (error) {
962
+ MessageFormatter.error(`Failed to transfer document ${doc.$id}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Transfer" });
963
+ }
964
+ }));
965
+ await Promise.all(transferTasks);
966
+ return successCount;
967
+ }
968
+ /**
969
+ * Update documents individually with content and/or permission changes
970
+ */
971
+ async updateDocumentsIndividual(targetDb, targetDbId, targetCollectionId, documentPairs) {
972
+ let successCount = 0;
973
+ const updateTasks = documentPairs.map(({ doc, targetDoc, reason }) => this.limit(async () => {
974
+ try {
975
+ const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
976
+ await tryAwaitWithRetry(async () => targetDb.updateDocument(targetDbId, targetCollectionId, doc.$id, docData, doc.$permissions));
977
+ successCount++;
978
+ MessageFormatter.success(`Updated document ${doc.$id} (${reason}) - permissions: [${targetDoc.$permissions?.join(', ')}] → [${doc.$permissions?.join(', ')}]`, { prefix: "Transfer" });
979
+ }
980
+ catch (error) {
981
+ MessageFormatter.error(`Failed to update document ${doc.$id} (${reason})`, error instanceof Error ? error : new Error(String(error)), { prefix: "Transfer" });
982
+ }
983
+ }));
984
+ await Promise.all(updateTasks);
985
+ return successCount;
986
+ }
987
+ /**
988
+ * Utility method to chunk arrays
989
+ */
990
+ chunkArray(array, size) {
991
+ const chunks = [];
992
+ for (let i = 0; i < array.length; i += size) {
993
+ chunks.push(array.slice(i, i + size));
747
994
  }
748
- MessageFormatter.info(`Transferred ${totalTransferred} documents from ${sourceCollectionId} to ${targetCollectionId}`, { prefix: "Transfer" });
995
+ return chunks;
749
996
  }
750
997
  /**
751
998
  * Helper method to fetch all teams with pagination
@@ -810,10 +1057,27 @@ export class ComprehensiveTransfer {
810
1057
  // Transfer memberships with rate limiting
811
1058
  const transferTasks = memberships.map(membership => this.userLimit(async () => {
812
1059
  try {
813
- // Check if membership already exists
1060
+ // Check if membership already exists and compare roles
1061
+ let existingMembership = null;
814
1062
  try {
815
- await this.targetTeams.getMembership(teamId, membership.$id);
816
- MessageFormatter.info(`Membership ${membership.$id} already exists, skipping`, { prefix: "Transfer" });
1063
+ existingMembership = await this.targetTeams.getMembership(teamId, membership.$id);
1064
+ // Compare roles between source and target membership
1065
+ const sourceRoles = JSON.stringify(membership.roles?.sort() || []);
1066
+ const targetRoles = JSON.stringify(existingMembership.roles?.sort() || []);
1067
+ if (sourceRoles !== targetRoles) {
1068
+ MessageFormatter.warning(`Membership ${membership.$id} exists but has different roles. Source: ${sourceRoles}, Target: ${targetRoles}`, { prefix: "Transfer" });
1069
+ // Update membership roles to match source
1070
+ try {
1071
+ await this.targetTeams.updateMembership(teamId, membership.$id, membership.roles);
1072
+ MessageFormatter.success(`Updated membership ${membership.$id} roles to match source`, { prefix: "Transfer" });
1073
+ }
1074
+ catch (updateError) {
1075
+ MessageFormatter.error(`Failed to update roles for membership ${membership.$id}`, updateError instanceof Error ? updateError : new Error(String(updateError)), { prefix: "Transfer" });
1076
+ }
1077
+ }
1078
+ else {
1079
+ MessageFormatter.info(`Membership ${membership.$id} already exists with matching roles, skipping`, { prefix: "Transfer" });
1080
+ }
817
1081
  return;
818
1082
  }
819
1083
  catch (error) {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "1.2.6",
4
+ "version": "1.2.7",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -1,4 +1,4 @@
1
- import { converterFunctions, tryAwaitWithRetry, parseAttribute } from "appwrite-utils";
1
+ import { converterFunctions, tryAwaitWithRetry, parseAttribute, objectNeedsUpdate } from "appwrite-utils";
2
2
  import {
3
3
  Client,
4
4
  Databases,
@@ -519,6 +519,45 @@ export class ComprehensiveTransfer {
519
519
  // Create bucket with fallback strategy for maximumFileSize
520
520
  await this.createBucketWithFallback(bucket);
521
521
  MessageFormatter.success(`Created bucket: ${bucket.name}`, { prefix: "Transfer" });
522
+ } else {
523
+ // Compare bucket permissions and update if needed
524
+ const sourcePermissions = JSON.stringify(bucket.$permissions?.sort() || []);
525
+ const targetPermissions = JSON.stringify(existingBucket.$permissions?.sort() || []);
526
+
527
+ if (sourcePermissions !== targetPermissions ||
528
+ existingBucket.name !== bucket.name ||
529
+ existingBucket.fileSecurity !== bucket.fileSecurity ||
530
+ existingBucket.enabled !== bucket.enabled) {
531
+
532
+ MessageFormatter.warning(
533
+ `Bucket ${bucket.name} exists but has different settings. Updating to match source.`,
534
+ { prefix: "Transfer" }
535
+ );
536
+
537
+ try {
538
+ await this.targetStorage.updateBucket(
539
+ bucket.$id,
540
+ bucket.name,
541
+ bucket.$permissions,
542
+ bucket.fileSecurity,
543
+ bucket.enabled,
544
+ bucket.maximumFileSize,
545
+ bucket.allowedFileExtensions,
546
+ bucket.compression as any,
547
+ bucket.encryption,
548
+ bucket.antivirus
549
+ );
550
+ MessageFormatter.success(`Updated bucket ${bucket.name} to match source`, { prefix: "Transfer" });
551
+ } catch (updateError) {
552
+ MessageFormatter.error(
553
+ `Failed to update bucket ${bucket.name}`,
554
+ updateError instanceof Error ? updateError : new Error(String(updateError)),
555
+ { prefix: "Transfer" }
556
+ );
557
+ }
558
+ } else {
559
+ MessageFormatter.info(`Bucket ${bucket.name} already exists with matching settings`, { prefix: "Transfer" });
560
+ }
522
561
  }
523
562
 
524
563
  // Transfer bucket files with enhanced validation
@@ -703,10 +742,40 @@ export class ComprehensiveTransfer {
703
742
  const fileTasks = files.files.map(file =>
704
743
  this.fileLimit(async () => {
705
744
  try {
706
- // Check if file already exists
745
+ // Check if file already exists and compare permissions
746
+ let existingFile: Models.File | null = null;
707
747
  try {
708
- await this.targetStorage.getFile(targetBucketId, file.$id);
709
- MessageFormatter.info(`File ${file.name} already exists, skipping`, { prefix: "Transfer" });
748
+ existingFile = await this.targetStorage.getFile(targetBucketId, file.$id);
749
+
750
+ // Compare permissions between source and target file
751
+ const sourcePermissions = JSON.stringify(file.$permissions?.sort() || []);
752
+ const targetPermissions = JSON.stringify(existingFile.$permissions?.sort() || []);
753
+
754
+ if (sourcePermissions !== targetPermissions) {
755
+ MessageFormatter.warning(
756
+ `File ${file.name} (${file.$id}) exists but has different permissions. Source: ${sourcePermissions}, Target: ${targetPermissions}`,
757
+ { prefix: "Transfer" }
758
+ );
759
+
760
+ // Update file permissions to match source
761
+ try {
762
+ await this.targetStorage.updateFile(
763
+ targetBucketId,
764
+ file.$id,
765
+ file.name,
766
+ file.$permissions
767
+ );
768
+ MessageFormatter.success(`Updated file ${file.name} permissions to match source`, { prefix: "Transfer" });
769
+ } catch (updateError) {
770
+ MessageFormatter.error(
771
+ `Failed to update permissions for file ${file.name}`,
772
+ updateError instanceof Error ? updateError : new Error(String(updateError)),
773
+ { prefix: "Transfer" }
774
+ );
775
+ }
776
+ } else {
777
+ MessageFormatter.info(`File ${file.name} already exists with matching permissions, skipping`, { prefix: "Transfer" });
778
+ }
710
779
  return;
711
780
  } catch (error) {
712
781
  // File doesn't exist, proceed with transfer
@@ -998,7 +1067,7 @@ export class ComprehensiveTransfer {
998
1067
  }
999
1068
 
1000
1069
  /**
1001
- * Helper method to transfer documents between databases
1070
+ * Helper method to transfer documents between databases using bulk operations with content and permission-based filtering
1002
1071
  */
1003
1072
  private async transferDocumentsBetweenDatabases(
1004
1073
  sourceDb: Databases,
@@ -1008,69 +1077,407 @@ export class ComprehensiveTransfer {
1008
1077
  sourceCollectionId: string,
1009
1078
  targetCollectionId: string
1010
1079
  ): Promise<void> {
1011
- MessageFormatter.info(`Transferring documents from ${sourceCollectionId} to ${targetCollectionId}`, { prefix: "Transfer" });
1080
+ MessageFormatter.info(`Transferring documents from ${sourceCollectionId} to ${targetCollectionId} with bulk operations, content comparison, and permission filtering`, { prefix: "Transfer" });
1012
1081
 
1013
1082
  let lastId: string | undefined;
1014
1083
  let totalTransferred = 0;
1084
+ let totalSkipped = 0;
1085
+ let totalUpdated = 0;
1086
+
1087
+ // Check if bulk operations are supported
1088
+ const supportsBulk = this.options.sourceEndpoint.includes('cloud.appwrite.io') ||
1089
+ this.options.targetEndpoint.includes('cloud.appwrite.io');
1090
+
1091
+ if (supportsBulk) {
1092
+ MessageFormatter.info(`Using bulk operations for enhanced performance`, { prefix: "Transfer" });
1093
+ }
1015
1094
 
1016
1095
  while (true) {
1017
- const queries = [Query.limit(50)]; // Smaller batch size for better performance
1096
+ // Fetch source documents in larger batches (1000 instead of 50)
1097
+ const queries = [Query.limit(1000)];
1018
1098
  if (lastId) {
1019
1099
  queries.push(Query.cursorAfter(lastId));
1020
1100
  }
1021
1101
 
1022
- const documents = await tryAwaitWithRetry(async () =>
1102
+ const sourceDocuments = await tryAwaitWithRetry(async () =>
1023
1103
  sourceDb.listDocuments(sourceDbId, sourceCollectionId, queries)
1024
1104
  );
1025
1105
 
1026
- if (documents.documents.length === 0) {
1106
+ if (sourceDocuments.documents.length === 0) {
1027
1107
  break;
1028
1108
  }
1029
1109
 
1030
- // Transfer documents with rate limiting
1031
- const transferTasks = documents.documents.map(doc =>
1032
- this.limit(async () => {
1033
- try {
1034
- // Check if document already exists
1035
- try {
1036
- await targetDb.getDocument(targetDbId, targetCollectionId, doc.$id);
1037
- MessageFormatter.info(`Document ${doc.$id} already exists, skipping`, { prefix: "Transfer" });
1038
- return;
1039
- } catch (error) {
1040
- // Document doesn't exist, proceed with creation
1041
- }
1110
+ MessageFormatter.info(`Processing batch of ${sourceDocuments.documents.length} source documents`, { prefix: "Transfer" });
1042
1111
 
1043
- // Create document in target
1044
- const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
1045
-
1046
- await tryAwaitWithRetry(async () =>
1047
- targetDb.createDocument(
1048
- targetDbId,
1049
- targetCollectionId,
1050
- doc.$id,
1051
- docData,
1052
- doc.$permissions
1053
- )
1054
- );
1112
+ // Extract document IDs from the current batch
1113
+ const sourceDocIds = sourceDocuments.documents.map(doc => doc.$id);
1114
+
1115
+ // Fetch existing documents from target in a single query
1116
+ const existingTargetDocs = await this.fetchTargetDocumentsBatch(
1117
+ targetDb,
1118
+ targetDbId,
1119
+ targetCollectionId,
1120
+ sourceDocIds
1121
+ );
1055
1122
 
1056
- totalTransferred++;
1057
- MessageFormatter.success(`Transferred document ${doc.$id}`, { prefix: "Transfer" });
1058
- } catch (error) {
1059
- MessageFormatter.error(`Failed to transfer document ${doc.$id}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Transfer" });
1123
+ // Create a map for quick lookup of existing documents
1124
+ const existingDocsMap = new Map<string, Models.Document>();
1125
+ existingTargetDocs.forEach(doc => {
1126
+ existingDocsMap.set(doc.$id, doc);
1127
+ });
1128
+
1129
+ // Filter documents based on existence, content comparison, and permission comparison
1130
+ const documentsToTransfer: Models.Document[] = [];
1131
+ const documentsToUpdate: { doc: Models.Document; targetDoc: Models.Document; reason: string }[] = [];
1132
+
1133
+ for (const sourceDoc of sourceDocuments.documents) {
1134
+ const existingTargetDoc = existingDocsMap.get(sourceDoc.$id);
1135
+
1136
+ if (!existingTargetDoc) {
1137
+ // Document doesn't exist in target, needs to be transferred
1138
+ documentsToTransfer.push(sourceDoc);
1139
+ } else {
1140
+ // Document exists, compare both content and permissions
1141
+ const sourcePermissions = JSON.stringify((sourceDoc.$permissions || []).sort());
1142
+ const targetPermissions = JSON.stringify((existingTargetDoc.$permissions || []).sort());
1143
+ const permissionsDiffer = sourcePermissions !== targetPermissions;
1144
+
1145
+ // Use objectNeedsUpdate to compare document content (excluding system fields)
1146
+ const contentDiffers = objectNeedsUpdate(existingTargetDoc, sourceDoc);
1147
+
1148
+ if (contentDiffers && permissionsDiffer) {
1149
+ // Both content and permissions differ
1150
+ documentsToUpdate.push({
1151
+ doc: sourceDoc,
1152
+ targetDoc: existingTargetDoc,
1153
+ reason: "content and permissions differ"
1154
+ });
1155
+ MessageFormatter.info(
1156
+ `Document ${sourceDoc.$id} exists but content and permissions differ - will update`,
1157
+ { prefix: "Transfer" }
1158
+ );
1159
+ } else if (contentDiffers) {
1160
+ // Only content differs
1161
+ documentsToUpdate.push({
1162
+ doc: sourceDoc,
1163
+ targetDoc: existingTargetDoc,
1164
+ reason: "content differs"
1165
+ });
1166
+ MessageFormatter.info(
1167
+ `Document ${sourceDoc.$id} exists but content differs - will update`,
1168
+ { prefix: "Transfer" }
1169
+ );
1170
+ } else if (permissionsDiffer) {
1171
+ // Only permissions differ
1172
+ documentsToUpdate.push({
1173
+ doc: sourceDoc,
1174
+ targetDoc: existingTargetDoc,
1175
+ reason: "permissions differ"
1176
+ });
1177
+ MessageFormatter.info(
1178
+ `Document ${sourceDoc.$id} exists but permissions differ - will update`,
1179
+ { prefix: "Transfer" }
1180
+ );
1181
+ } else {
1182
+ // Document exists with identical content AND permissions, skip
1183
+ totalSkipped++;
1184
+ MessageFormatter.info(`Document ${sourceDoc.$id} exists with matching content and permissions - skipping`, { prefix: "Transfer" });
1060
1185
  }
1061
- })
1186
+ }
1187
+ }
1188
+
1189
+ MessageFormatter.info(
1190
+ `Batch analysis: ${documentsToTransfer.length} to create, ${documentsToUpdate.length} to update, ${totalSkipped} skipped so far`,
1191
+ { prefix: "Transfer" }
1062
1192
  );
1063
1193
 
1064
- await Promise.all(transferTasks);
1194
+ // Process new documents with bulk operations if supported and available
1195
+ if (documentsToTransfer.length > 0) {
1196
+ if (supportsBulk && documentsToTransfer.length >= 10) {
1197
+ // Use bulk operations for large batches
1198
+ await this.transferDocumentsBulk(
1199
+ targetDb,
1200
+ targetDbId,
1201
+ targetCollectionId,
1202
+ documentsToTransfer
1203
+ );
1204
+ totalTransferred += documentsToTransfer.length;
1205
+ MessageFormatter.success(`Bulk transferred ${documentsToTransfer.length} new documents`, { prefix: "Transfer" });
1206
+ } else {
1207
+ // Use individual transfers for smaller batches or non-bulk endpoints
1208
+ const transferCount = await this.transferDocumentsIndividual(
1209
+ targetDb,
1210
+ targetDbId,
1211
+ targetCollectionId,
1212
+ documentsToTransfer
1213
+ );
1214
+ totalTransferred += transferCount;
1215
+ }
1216
+ }
1065
1217
 
1066
- if (documents.documents.length < 50) {
1218
+ // Process document updates (always individual since bulk update with permissions needs special handling)
1219
+ if (documentsToUpdate.length > 0) {
1220
+ const updateCount = await this.updateDocumentsIndividual(
1221
+ targetDb,
1222
+ targetDbId,
1223
+ targetCollectionId,
1224
+ documentsToUpdate
1225
+ );
1226
+ totalUpdated += updateCount;
1227
+ }
1228
+
1229
+ if (sourceDocuments.documents.length < 1000) {
1067
1230
  break;
1068
1231
  }
1069
1232
 
1070
- lastId = documents.documents[documents.documents.length - 1].$id;
1233
+ lastId = sourceDocuments.documents[sourceDocuments.documents.length - 1].$id;
1071
1234
  }
1072
1235
 
1073
- MessageFormatter.info(`Transferred ${totalTransferred} documents from ${sourceCollectionId} to ${targetCollectionId}`, { prefix: "Transfer" });
1236
+ MessageFormatter.info(
1237
+ `Transfer complete: ${totalTransferred} new, ${totalUpdated} updated, ${totalSkipped} skipped from ${sourceCollectionId} to ${targetCollectionId}`,
1238
+ { prefix: "Transfer" }
1239
+ );
1240
+ }
1241
+
1242
+ /**
1243
+ * Fetch target documents by IDs in batches to check existence and permissions
1244
+ */
1245
+ private async fetchTargetDocumentsBatch(
1246
+ targetDb: Databases,
1247
+ targetDbId: string,
1248
+ targetCollectionId: string,
1249
+ docIds: string[]
1250
+ ): Promise<Models.Document[]> {
1251
+ const documents: Models.Document[] = [];
1252
+
1253
+ // Split IDs into chunks of 100 for Query.equal limitations
1254
+ const idChunks = this.chunkArray(docIds, 100);
1255
+
1256
+ for (const chunk of idChunks) {
1257
+ try {
1258
+ const result = await tryAwaitWithRetry(async () =>
1259
+ targetDb.listDocuments(targetDbId, targetCollectionId, [
1260
+ Query.equal('$id', chunk),
1261
+ Query.limit(100)
1262
+ ])
1263
+ );
1264
+ documents.push(...result.documents);
1265
+ } catch (error) {
1266
+ // If query fails, fall back to individual gets (less efficient but more reliable)
1267
+ MessageFormatter.warning(
1268
+ `Batch query failed for ${chunk.length} documents, falling back to individual checks`,
1269
+ { prefix: "Transfer" }
1270
+ );
1271
+
1272
+ for (const docId of chunk) {
1273
+ try {
1274
+ const doc = await targetDb.getDocument(targetDbId, targetCollectionId, docId);
1275
+ documents.push(doc);
1276
+ } catch (getError) {
1277
+ // Document doesn't exist, which is fine
1278
+ }
1279
+ }
1280
+ }
1281
+ }
1282
+
1283
+ return documents;
1284
+ }
1285
+
1286
+ /**
1287
+ * Transfer documents using bulk operations with proper batch size handling
1288
+ */
1289
+ private async transferDocumentsBulk(
1290
+ targetDb: Databases,
1291
+ targetDbId: string,
1292
+ targetCollectionId: string,
1293
+ documents: Models.Document[]
1294
+ ): Promise<void> {
1295
+ // Prepare documents for bulk upsert
1296
+ const preparedDocs = documents.map(doc => {
1297
+ const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
1298
+ return {
1299
+ $id,
1300
+ $permissions,
1301
+ ...docData
1302
+ };
1303
+ });
1304
+
1305
+ // Process in smaller chunks for bulk operations (1000 for Pro, 100 for Free tier)
1306
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
1307
+ let processed = false;
1308
+
1309
+ for (const maxBatchSize of batchSizes) {
1310
+ const documentBatches = this.chunkArray(preparedDocs, maxBatchSize);
1311
+
1312
+ try {
1313
+ for (const batch of documentBatches) {
1314
+ MessageFormatter.info(`Bulk upserting ${batch.length} documents...`, { prefix: "Transfer" });
1315
+
1316
+ await this.bulkUpsertDocuments(
1317
+ this.targetClient,
1318
+ targetDbId,
1319
+ targetCollectionId,
1320
+ batch
1321
+ );
1322
+
1323
+ MessageFormatter.success(`✅ Bulk upserted ${batch.length} documents`, { prefix: "Transfer" });
1324
+
1325
+ // Add delay between batches to respect rate limits
1326
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
1327
+ await new Promise(resolve => setTimeout(resolve, 200));
1328
+ }
1329
+ }
1330
+
1331
+ processed = true;
1332
+ break; // Success, exit batch size loop
1333
+ } catch (error) {
1334
+ MessageFormatter.warning(
1335
+ `Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`,
1336
+ { prefix: "Transfer" }
1337
+ );
1338
+ continue; // Try next smaller batch size
1339
+ }
1340
+ }
1341
+
1342
+ if (!processed) {
1343
+ MessageFormatter.warning(
1344
+ `All bulk operations failed, falling back to individual transfers`,
1345
+ { prefix: "Transfer" }
1346
+ );
1347
+
1348
+ // Fall back to individual transfers
1349
+ await this.transferDocumentsIndividual(targetDb, targetDbId, targetCollectionId, documents);
1350
+ }
1351
+ }
1352
+
1353
+ /**
1354
+ * Direct HTTP implementation of bulk upsert API
1355
+ */
1356
+ private async bulkUpsertDocuments(
1357
+ client: any,
1358
+ dbId: string,
1359
+ collectionId: string,
1360
+ documents: any[]
1361
+ ): Promise<any> {
1362
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
1363
+ const url = new URL(client.config.endpoint + apiPath);
1364
+
1365
+ const headers = {
1366
+ 'Content-Type': 'application/json',
1367
+ 'X-Appwrite-Project': client.config.project,
1368
+ 'X-Appwrite-Key': client.config.key
1369
+ };
1370
+
1371
+ const response = await fetch(url.toString(), {
1372
+ method: 'PUT',
1373
+ headers,
1374
+ body: JSON.stringify({ documents })
1375
+ });
1376
+
1377
+ if (!response.ok) {
1378
+ const errorData: any = await response.json().catch(() => ({ message: 'Unknown error' }));
1379
+ throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
1380
+ }
1381
+
1382
+ return await response.json();
1383
+ }
1384
+
1385
+ /**
1386
+ * Transfer documents individually with rate limiting
1387
+ */
1388
+ private async transferDocumentsIndividual(
1389
+ targetDb: Databases,
1390
+ targetDbId: string,
1391
+ targetCollectionId: string,
1392
+ documents: Models.Document[]
1393
+ ): Promise<number> {
1394
+ let successCount = 0;
1395
+
1396
+ const transferTasks = documents.map(doc =>
1397
+ this.limit(async () => {
1398
+ try {
1399
+ const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
1400
+
1401
+ await tryAwaitWithRetry(async () =>
1402
+ targetDb.createDocument(
1403
+ targetDbId,
1404
+ targetCollectionId,
1405
+ doc.$id,
1406
+ docData,
1407
+ doc.$permissions
1408
+ )
1409
+ );
1410
+
1411
+ successCount++;
1412
+ MessageFormatter.success(`Transferred document ${doc.$id}`, { prefix: "Transfer" });
1413
+ } catch (error) {
1414
+ MessageFormatter.error(
1415
+ `Failed to transfer document ${doc.$id}`,
1416
+ error instanceof Error ? error : new Error(String(error)),
1417
+ { prefix: "Transfer" }
1418
+ );
1419
+ }
1420
+ })
1421
+ );
1422
+
1423
+ await Promise.all(transferTasks);
1424
+ return successCount;
1425
+ }
1426
+
1427
+ /**
1428
+ * Update documents individually with content and/or permission changes
1429
+ */
1430
+ private async updateDocumentsIndividual(
1431
+ targetDb: Databases,
1432
+ targetDbId: string,
1433
+ targetCollectionId: string,
1434
+ documentPairs: { doc: Models.Document; targetDoc: Models.Document; reason: string }[]
1435
+ ): Promise<number> {
1436
+ let successCount = 0;
1437
+
1438
+ const updateTasks = documentPairs.map(({ doc, targetDoc, reason }) =>
1439
+ this.limit(async () => {
1440
+ try {
1441
+ const { $id, $createdAt, $updatedAt, $permissions, $databaseId, $collectionId, ...docData } = doc;
1442
+
1443
+ await tryAwaitWithRetry(async () =>
1444
+ targetDb.updateDocument(
1445
+ targetDbId,
1446
+ targetCollectionId,
1447
+ doc.$id,
1448
+ docData,
1449
+ doc.$permissions
1450
+ )
1451
+ );
1452
+
1453
+ successCount++;
1454
+ MessageFormatter.success(
1455
+ `Updated document ${doc.$id} (${reason}) - permissions: [${targetDoc.$permissions?.join(', ')}] → [${doc.$permissions?.join(', ')}]`,
1456
+ { prefix: "Transfer" }
1457
+ );
1458
+ } catch (error) {
1459
+ MessageFormatter.error(
1460
+ `Failed to update document ${doc.$id} (${reason})`,
1461
+ error instanceof Error ? error : new Error(String(error)),
1462
+ { prefix: "Transfer" }
1463
+ );
1464
+ }
1465
+ })
1466
+ );
1467
+
1468
+ await Promise.all(updateTasks);
1469
+ return successCount;
1470
+ }
1471
+
1472
+ /**
1473
+ * Utility method to chunk arrays
1474
+ */
1475
+ private chunkArray<T>(array: T[], size: number): T[][] {
1476
+ const chunks: T[][] = [];
1477
+ for (let i = 0; i < array.length; i += size) {
1478
+ chunks.push(array.slice(i, i + size));
1479
+ }
1480
+ return chunks;
1074
1481
  }
1075
1482
 
1076
1483
  /**
@@ -1160,10 +1567,39 @@ export class ComprehensiveTransfer {
1160
1567
  const transferTasks = memberships.map(membership =>
1161
1568
  this.userLimit(async () => { // Use userLimit for team operations (more sensitive)
1162
1569
  try {
1163
- // Check if membership already exists
1570
+ // Check if membership already exists and compare roles
1571
+ let existingMembership: Models.Membership | null = null;
1164
1572
  try {
1165
- await this.targetTeams.getMembership(teamId, membership.$id);
1166
- MessageFormatter.info(`Membership ${membership.$id} already exists, skipping`, { prefix: "Transfer" });
1573
+ existingMembership = await this.targetTeams.getMembership(teamId, membership.$id);
1574
+
1575
+ // Compare roles between source and target membership
1576
+ const sourceRoles = JSON.stringify(membership.roles?.sort() || []);
1577
+ const targetRoles = JSON.stringify(existingMembership.roles?.sort() || []);
1578
+
1579
+ if (sourceRoles !== targetRoles) {
1580
+ MessageFormatter.warning(
1581
+ `Membership ${membership.$id} exists but has different roles. Source: ${sourceRoles}, Target: ${targetRoles}`,
1582
+ { prefix: "Transfer" }
1583
+ );
1584
+
1585
+ // Update membership roles to match source
1586
+ try {
1587
+ await this.targetTeams.updateMembership(
1588
+ teamId,
1589
+ membership.$id,
1590
+ membership.roles
1591
+ );
1592
+ MessageFormatter.success(`Updated membership ${membership.$id} roles to match source`, { prefix: "Transfer" });
1593
+ } catch (updateError) {
1594
+ MessageFormatter.error(
1595
+ `Failed to update roles for membership ${membership.$id}`,
1596
+ updateError instanceof Error ? updateError : new Error(String(updateError)),
1597
+ { prefix: "Transfer" }
1598
+ );
1599
+ }
1600
+ } else {
1601
+ MessageFormatter.info(`Membership ${membership.$id} already exists with matching roles, skipping`, { prefix: "Transfer" });
1602
+ }
1167
1603
  return;
1168
1604
  } catch (error) {
1169
1605
  // Membership doesn't exist, proceed with creation