appwrite-utils-cli 1.2.0 → 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -413,9 +413,53 @@ const supportsBulkOperations = (endpoint) => {
413
413
  return endpoint.includes('cloud.appwrite.io');
414
414
  };
415
415
  /**
416
- * Enhanced bulk document creation using the new bulk upsert API for cloud.appwrite.io
416
+ * Direct HTTP implementation of bulk upsert API
417
417
  */
418
- const transferDocumentsBulkUpsert = async (db, dbId, collectionId, documents, maxBatchSize = 1000) => {
418
+ const bulkUpsertDocuments = async (client, dbId, collectionId, documents) => {
419
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
420
+ const url = new URL(client.config.endpoint + apiPath);
421
+ const headers = {
422
+ 'Content-Type': 'application/json',
423
+ 'X-Appwrite-Project': client.config.project,
424
+ 'X-Appwrite-Key': client.config.key
425
+ };
426
+ const response = await fetch(url.toString(), {
427
+ method: 'PUT',
428
+ headers,
429
+ body: JSON.stringify({ documents })
430
+ });
431
+ if (!response.ok) {
432
+ const errorData = await response.json().catch(() => ({ message: 'Unknown error' }));
433
+ throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
434
+ }
435
+ return await response.json();
436
+ };
437
+ /**
438
+ * Direct HTTP implementation of bulk create API
439
+ */
440
+ const bulkCreateDocuments = async (client, dbId, collectionId, documents) => {
441
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
442
+ const url = new URL(client.config.endpoint + apiPath);
443
+ const headers = {
444
+ 'Content-Type': 'application/json',
445
+ 'X-Appwrite-Project': client.config.project,
446
+ 'X-Appwrite-Key': client.config.key
447
+ };
448
+ const response = await fetch(url.toString(), {
449
+ method: 'POST',
450
+ headers,
451
+ body: JSON.stringify({ documents })
452
+ });
453
+ if (!response.ok) {
454
+ const errorData = await response.json().catch(() => ({ message: 'Unknown error' }));
455
+ throw new Error(`Bulk create failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
456
+ }
457
+ return await response.json();
458
+ };
459
+ /**
460
+ * Enhanced bulk document creation using direct HTTP calls
461
+ */
462
+ const transferDocumentsBulkUpsert = async (client, dbId, collectionId, documents, maxBatchSize = 1000) => {
419
463
  let successful = 0;
420
464
  let failed = 0;
421
465
  // Prepare documents for bulk upsert
@@ -426,7 +470,6 @@ const transferDocumentsBulkUpsert = async (db, dbId, collectionId, documents, ma
426
470
  delete toCreateObject.$createdAt;
427
471
  delete toCreateObject.$updatedAt;
428
472
  // Keep $id and $permissions for upsert functionality
429
- // Appwrite bulk API expects $permissions to be preserved
430
473
  return toCreateObject;
431
474
  });
432
475
  // Process in batches based on plan limits
@@ -434,8 +477,8 @@ const transferDocumentsBulkUpsert = async (db, dbId, collectionId, documents, ma
434
477
  for (const batch of documentBatches) {
435
478
  console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
436
479
  try {
437
- // Try bulk upsert with the highest batch size first
438
- const result = await db.upsertDocuments(dbId, collectionId, batch);
480
+ // Try bulk upsert with direct HTTP call
481
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, batch);
439
482
  successful += result.documents?.length || batch.length;
440
483
  console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
441
484
  }
@@ -446,13 +489,14 @@ const transferDocumentsBulkUpsert = async (db, dbId, collectionId, documents, ma
446
489
  const smallerBatches = chunk(batch, 100);
447
490
  for (const smallBatch of smallerBatches) {
448
491
  try {
449
- const result = await db.upsertDocuments(dbId, collectionId, smallBatch);
492
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, smallBatch);
450
493
  successful += result.documents?.length || smallBatch.length;
451
494
  console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
452
495
  }
453
496
  catch (smallBatchError) {
454
497
  console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
455
498
  // Fall back to individual document transfer for this batch
499
+ const db = new Databases(client);
456
500
  const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, smallBatch.map((doc, index) => ({
457
501
  ...doc,
458
502
  $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
@@ -467,6 +511,7 @@ const transferDocumentsBulkUpsert = async (db, dbId, collectionId, documents, ma
467
511
  }
468
512
  else {
469
513
  // Fall back to individual document transfer
514
+ const db = new Databases(client);
470
515
  const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, batch.map((doc, index) => ({
471
516
  ...doc,
472
517
  $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
@@ -529,15 +574,15 @@ const transferDocumentBatchWithRetryFallback = async (db, dbId, collectionId, do
529
574
  /**
530
575
  * Enhanced batch document transfer with fault tolerance and bulk API support
531
576
  */
532
- const transferDocumentBatchWithRetry = async (db, endpoint, dbId, collectionId, documents, batchSize = 10) => {
577
+ const transferDocumentBatchWithRetry = async (db, client, dbId, collectionId, documents, batchSize = 10) => {
533
578
  // Check if we can use bulk operations
534
- if (supportsBulkOperations(endpoint)) {
579
+ if (supportsBulkOperations(client.config.endpoint)) {
535
580
  console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
536
581
  // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
537
582
  const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
538
583
  for (const maxBatchSize of batchSizes) {
539
584
  try {
540
- return await transferDocumentsBulkUpsert(db, dbId, collectionId, documents, maxBatchSize);
585
+ return await transferDocumentsBulkUpsert(client, dbId, collectionId, documents, maxBatchSize);
541
586
  }
542
587
  catch (error) {
543
588
  console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
@@ -574,7 +619,7 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint
574
619
  break;
575
620
  }
576
621
  console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
577
- const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, endpoint, toDbId, toCollId, fromCollDocs.documents);
622
+ const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, client, toDbId, toCollId, fromCollDocs.documents);
578
623
  totalDocumentsProcessed += fromCollDocs.documents.length;
579
624
  totalSuccessful += successful;
580
625
  totalFailed += failed;
@@ -74,6 +74,10 @@ export declare class ComprehensiveTransfer {
74
74
  * Helper method to fetch all collections from a database
75
75
  */
76
76
  private fetchAllCollections;
77
+ /**
78
+ * Helper method to fetch all buckets with pagination
79
+ */
80
+ private fetchAllBuckets;
77
81
  /**
78
82
  * Helper method to parse attribute objects (simplified version of parseAttribute)
79
83
  */
@@ -272,21 +272,22 @@ export class ComprehensiveTransfer {
272
272
  async transferAllBuckets() {
273
273
  MessageFormatter.info("Starting bucket transfer phase", { prefix: "Transfer" });
274
274
  try {
275
- const sourceBuckets = await this.sourceStorage.listBuckets();
276
- const targetBuckets = await this.targetStorage.listBuckets();
275
+ // Get all buckets from source with pagination
276
+ const allSourceBuckets = await this.fetchAllBuckets(this.sourceStorage);
277
+ const allTargetBuckets = await this.fetchAllBuckets(this.targetStorage);
277
278
  if (this.options.dryRun) {
278
279
  let totalFiles = 0;
279
- for (const bucket of sourceBuckets.buckets) {
280
+ for (const bucket of allSourceBuckets) {
280
281
  const files = await this.sourceStorage.listFiles(bucket.$id, [Query.limit(1)]);
281
282
  totalFiles += files.total;
282
283
  }
283
- MessageFormatter.info(`DRY RUN: Would transfer ${sourceBuckets.buckets.length} buckets with ${totalFiles} files`, { prefix: "Transfer" });
284
+ MessageFormatter.info(`DRY RUN: Would transfer ${allSourceBuckets.length} buckets with ${totalFiles} files`, { prefix: "Transfer" });
284
285
  return;
285
286
  }
286
- const transferTasks = sourceBuckets.buckets.map(bucket => this.limit(async () => {
287
+ const transferTasks = allSourceBuckets.map(bucket => this.limit(async () => {
287
288
  try {
288
289
  // Check if bucket exists in target
289
- const existingBucket = targetBuckets.buckets.find(tb => tb.$id === bucket.$id);
290
+ const existingBucket = allTargetBuckets.find(tb => tb.$id === bucket.$id);
290
291
  if (!existingBucket) {
291
292
  // Create bucket in target
292
293
  await this.targetStorage.createBucket(bucket.$id, bucket.name, bucket.$permissions, bucket.fileSecurity, bucket.enabled, bucket.maximumFileSize, bucket.allowedFileExtensions, bucket.compression, bucket.encryption, bucket.antivirus);
@@ -480,6 +481,29 @@ export class ComprehensiveTransfer {
480
481
  }
481
482
  return collections;
482
483
  }
484
+ /**
485
+ * Helper method to fetch all buckets with pagination
486
+ */
487
+ async fetchAllBuckets(storage) {
488
+ const buckets = [];
489
+ let lastId;
490
+ while (true) {
491
+ const queries = [Query.limit(100)];
492
+ if (lastId) {
493
+ queries.push(Query.cursorAfter(lastId));
494
+ }
495
+ const result = await tryAwaitWithRetry(async () => storage.listBuckets(queries));
496
+ if (result.buckets.length === 0) {
497
+ break;
498
+ }
499
+ buckets.push(...result.buckets);
500
+ if (result.buckets.length < 100) {
501
+ break;
502
+ }
503
+ lastId = result.buckets[result.buckets.length - 1].$id;
504
+ }
505
+ return buckets;
506
+ }
483
507
  /**
484
508
  * Helper method to parse attribute objects (simplified version of parseAttribute)
485
509
  */
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "1.2.0",
4
+ "version": "1.2.2",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -43,7 +43,7 @@
43
43
  "js-yaml": "^4.1.0",
44
44
  "luxon": "^3.6.1",
45
45
  "nanostores": "^0.10.3",
46
- "node-appwrite": "^17",
46
+ "node-appwrite": "^16",
47
47
  "p-limit": "^6.2.0",
48
48
  "tar": "^7.4.3",
49
49
  "tsx": "^4.20.3",
@@ -677,10 +677,74 @@ const supportsBulkOperations = (endpoint: string): boolean => {
677
677
  };
678
678
 
679
679
  /**
680
- * Enhanced bulk document creation using the new bulk upsert API for cloud.appwrite.io
680
+ * Direct HTTP implementation of bulk upsert API
681
+ */
682
+ const bulkUpsertDocuments = async (
683
+ client: any,
684
+ dbId: string,
685
+ collectionId: string,
686
+ documents: any[]
687
+ ): Promise<any> => {
688
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
689
+ const url = new URL(client.config.endpoint + apiPath);
690
+
691
+ const headers = {
692
+ 'Content-Type': 'application/json',
693
+ 'X-Appwrite-Project': client.config.project,
694
+ 'X-Appwrite-Key': client.config.key
695
+ };
696
+
697
+ const response = await fetch(url.toString(), {
698
+ method: 'PUT',
699
+ headers,
700
+ body: JSON.stringify({ documents })
701
+ });
702
+
703
+ if (!response.ok) {
704
+ const errorData: any = await response.json().catch(() => ({ message: 'Unknown error' }));
705
+ throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
706
+ }
707
+
708
+ return await response.json();
709
+ };
710
+
711
+ /**
712
+ * Direct HTTP implementation of bulk create API
713
+ */
714
+ const bulkCreateDocuments = async (
715
+ client: any,
716
+ dbId: string,
717
+ collectionId: string,
718
+ documents: any[]
719
+ ): Promise<any> => {
720
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
721
+ const url = new URL(client.config.endpoint + apiPath);
722
+
723
+ const headers = {
724
+ 'Content-Type': 'application/json',
725
+ 'X-Appwrite-Project': client.config.project,
726
+ 'X-Appwrite-Key': client.config.key
727
+ };
728
+
729
+ const response = await fetch(url.toString(), {
730
+ method: 'POST',
731
+ headers,
732
+ body: JSON.stringify({ documents })
733
+ });
734
+
735
+ if (!response.ok) {
736
+ const errorData: any = await response.json().catch(() => ({ message: 'Unknown error' }));
737
+ throw new Error(`Bulk create failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
738
+ }
739
+
740
+ return await response.json();
741
+ };
742
+
743
+ /**
744
+ * Enhanced bulk document creation using direct HTTP calls
681
745
  */
682
746
  const transferDocumentsBulkUpsert = async (
683
- db: Databases,
747
+ client: any,
684
748
  dbId: string,
685
749
  collectionId: string,
686
750
  documents: any[],
@@ -698,7 +762,6 @@ const transferDocumentsBulkUpsert = async (
698
762
  delete toCreateObject.$updatedAt;
699
763
 
700
764
  // Keep $id and $permissions for upsert functionality
701
- // Appwrite bulk API expects $permissions to be preserved
702
765
  return toCreateObject;
703
766
  });
704
767
 
@@ -709,8 +772,8 @@ const transferDocumentsBulkUpsert = async (
709
772
  console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
710
773
 
711
774
  try {
712
- // Try bulk upsert with the highest batch size first
713
- const result = await (db as any).upsertDocuments(dbId, collectionId, batch);
775
+ // Try bulk upsert with direct HTTP call
776
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, batch);
714
777
  successful += result.documents?.length || batch.length;
715
778
  console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
716
779
 
@@ -723,13 +786,14 @@ const transferDocumentsBulkUpsert = async (
723
786
 
724
787
  for (const smallBatch of smallerBatches) {
725
788
  try {
726
- const result = await (db as any).upsertDocuments(dbId, collectionId, smallBatch);
789
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, smallBatch);
727
790
  successful += result.documents?.length || smallBatch.length;
728
791
  console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
729
792
  } catch (smallBatchError: any) {
730
793
  console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
731
794
 
732
795
  // Fall back to individual document transfer for this batch
796
+ const db = new Databases(client);
733
797
  const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
734
798
  db, dbId, collectionId, smallBatch.map((doc, index) => ({
735
799
  ...doc,
@@ -746,6 +810,7 @@ const transferDocumentsBulkUpsert = async (
746
810
  }
747
811
  } else {
748
812
  // Fall back to individual document transfer
813
+ const db = new Databases(client);
749
814
  const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
750
815
  db, dbId, collectionId, batch.map((doc, index) => ({
751
816
  ...doc,
@@ -836,14 +901,14 @@ const transferDocumentBatchWithRetryFallback = async (
836
901
  */
837
902
  const transferDocumentBatchWithRetry = async (
838
903
  db: Databases,
839
- endpoint: string,
904
+ client: any,
840
905
  dbId: string,
841
906
  collectionId: string,
842
907
  documents: any[],
843
908
  batchSize: number = 10
844
909
  ): Promise<{ successful: number; failed: number }> => {
845
910
  // Check if we can use bulk operations
846
- if (supportsBulkOperations(endpoint)) {
911
+ if (supportsBulkOperations(client.config.endpoint)) {
847
912
  console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
848
913
 
849
914
  // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
@@ -851,7 +916,7 @@ const transferDocumentBatchWithRetry = async (
851
916
 
852
917
  for (const maxBatchSize of batchSizes) {
853
918
  try {
854
- return await transferDocumentsBulkUpsert(db, dbId, collectionId, documents, maxBatchSize);
919
+ return await transferDocumentsBulkUpsert(client, dbId, collectionId, documents, maxBatchSize);
855
920
  } catch (error: any) {
856
921
  console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
857
922
  continue;
@@ -911,7 +976,7 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
911
976
 
912
977
  const { successful, failed } = await transferDocumentBatchWithRetry(
913
978
  remoteDb,
914
- endpoint,
979
+ client,
915
980
  toDbId,
916
981
  toCollId,
917
982
  fromCollDocs.documents
@@ -408,24 +408,25 @@ export class ComprehensiveTransfer {
408
408
  MessageFormatter.info("Starting bucket transfer phase", { prefix: "Transfer" });
409
409
 
410
410
  try {
411
- const sourceBuckets = await this.sourceStorage.listBuckets();
412
- const targetBuckets = await this.targetStorage.listBuckets();
411
+ // Get all buckets from source with pagination
412
+ const allSourceBuckets = await this.fetchAllBuckets(this.sourceStorage);
413
+ const allTargetBuckets = await this.fetchAllBuckets(this.targetStorage);
413
414
 
414
415
  if (this.options.dryRun) {
415
416
  let totalFiles = 0;
416
- for (const bucket of sourceBuckets.buckets) {
417
+ for (const bucket of allSourceBuckets) {
417
418
  const files = await this.sourceStorage.listFiles(bucket.$id, [Query.limit(1)]);
418
419
  totalFiles += files.total;
419
420
  }
420
- MessageFormatter.info(`DRY RUN: Would transfer ${sourceBuckets.buckets.length} buckets with ${totalFiles} files`, { prefix: "Transfer" });
421
+ MessageFormatter.info(`DRY RUN: Would transfer ${allSourceBuckets.length} buckets with ${totalFiles} files`, { prefix: "Transfer" });
421
422
  return;
422
423
  }
423
424
 
424
- const transferTasks = sourceBuckets.buckets.map(bucket =>
425
+ const transferTasks = allSourceBuckets.map(bucket =>
425
426
  this.limit(async () => {
426
427
  try {
427
428
  // Check if bucket exists in target
428
- const existingBucket = targetBuckets.buckets.find(tb => tb.$id === bucket.$id);
429
+ const existingBucket = allTargetBuckets.find(tb => tb.$id === bucket.$id);
429
430
 
430
431
  if (!existingBucket) {
431
432
  // Create bucket in target
@@ -676,6 +677,37 @@ export class ComprehensiveTransfer {
676
677
  return collections;
677
678
  }
678
679
 
680
+ /**
681
+ * Helper method to fetch all buckets with pagination
682
+ */
683
+ private async fetchAllBuckets(storage: Storage): Promise<Models.Bucket[]> {
684
+ const buckets: Models.Bucket[] = [];
685
+ let lastId: string | undefined;
686
+
687
+ while (true) {
688
+ const queries = [Query.limit(100)];
689
+ if (lastId) {
690
+ queries.push(Query.cursorAfter(lastId));
691
+ }
692
+
693
+ const result = await tryAwaitWithRetry(async () => storage.listBuckets(queries));
694
+
695
+ if (result.buckets.length === 0) {
696
+ break;
697
+ }
698
+
699
+ buckets.push(...result.buckets);
700
+
701
+ if (result.buckets.length < 100) {
702
+ break;
703
+ }
704
+
705
+ lastId = result.buckets[result.buckets.length - 1].$id;
706
+ }
707
+
708
+ return buckets;
709
+ }
710
+
679
711
  /**
680
712
  * Helper method to parse attribute objects (simplified version of parseAttribute)
681
713
  */