appwrite-utils-cli 1.1.4 → 1.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -218,9 +218,7 @@ export const createOrUpdateAttribute = async (db, dbId, collection, attribute) =
218
218
  const updateEnabled = true;
219
219
  let finalAttribute = attribute;
220
220
  try {
221
- const collectionAttr = collection.attributes.find(
222
- // @ts-expect-error
223
- (attr) => attr.key === attribute.key);
221
+ const collectionAttr = collection.attributes.find((attr) => attr.key === attribute.key);
224
222
  foundAttribute = parseAttribute(collectionAttr);
225
223
  // console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
226
224
  }
@@ -407,9 +407,131 @@ const transferDocumentWithRetry = async (db, dbId, collectionId, documentId, doc
407
407
  }
408
408
  };
409
409
  /**
410
- * Enhanced batch document transfer with fault tolerance
410
+ * Check if endpoint supports bulk operations (cloud.appwrite.io)
411
411
  */
412
- const transferDocumentBatchWithRetry = async (db, dbId, collectionId, documents, batchSize = 10) => {
412
+ const supportsBulkOperations = (endpoint) => {
413
+ return endpoint.includes('cloud.appwrite.io');
414
+ };
415
+ /**
416
+ * Direct HTTP implementation of bulk upsert API
417
+ */
418
+ const bulkUpsertDocuments = async (client, dbId, collectionId, documents) => {
419
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
420
+ const url = new URL(client.config.endpoint + apiPath);
421
+ const headers = {
422
+ 'Content-Type': 'application/json',
423
+ 'X-Appwrite-Project': client.config.project,
424
+ 'X-Appwrite-Key': client.config.key
425
+ };
426
+ const response = await fetch(url.toString(), {
427
+ method: 'PUT',
428
+ headers,
429
+ body: JSON.stringify({ documents })
430
+ });
431
+ if (!response.ok) {
432
+ const errorData = await response.json().catch(() => ({ message: 'Unknown error' }));
433
+ throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
434
+ }
435
+ return await response.json();
436
+ };
437
+ /**
438
+ * Direct HTTP implementation of bulk create API
439
+ */
440
+ const bulkCreateDocuments = async (client, dbId, collectionId, documents) => {
441
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
442
+ const url = new URL(client.config.endpoint + apiPath);
443
+ const headers = {
444
+ 'Content-Type': 'application/json',
445
+ 'X-Appwrite-Project': client.config.project,
446
+ 'X-Appwrite-Key': client.config.key
447
+ };
448
+ const response = await fetch(url.toString(), {
449
+ method: 'POST',
450
+ headers,
451
+ body: JSON.stringify({ documents })
452
+ });
453
+ if (!response.ok) {
454
+ const errorData = await response.json().catch(() => ({ message: 'Unknown error' }));
455
+ throw new Error(`Bulk create failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
456
+ }
457
+ return await response.json();
458
+ };
459
+ /**
460
+ * Enhanced bulk document creation using direct HTTP calls
461
+ */
462
+ const transferDocumentsBulkUpsert = async (client, dbId, collectionId, documents, maxBatchSize = 1000) => {
463
+ let successful = 0;
464
+ let failed = 0;
465
+ // Prepare documents for bulk upsert
466
+ const preparedDocs = documents.map(doc => {
467
+ const toCreateObject = { ...doc };
468
+ delete toCreateObject.$databaseId;
469
+ delete toCreateObject.$collectionId;
470
+ delete toCreateObject.$createdAt;
471
+ delete toCreateObject.$updatedAt;
472
+ // Keep $id and $permissions for upsert functionality
473
+ return toCreateObject;
474
+ });
475
+ // Process in batches based on plan limits
476
+ const documentBatches = chunk(preparedDocs, maxBatchSize);
477
+ for (const batch of documentBatches) {
478
+ console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
479
+ try {
480
+ // Try bulk upsert with direct HTTP call
481
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, batch);
482
+ successful += result.documents?.length || batch.length;
483
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
484
+ }
485
+ catch (error) {
486
+ console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
487
+ // If bulk upsert fails, try with smaller batch size (Pro plan limit)
488
+ if (maxBatchSize > 100) {
489
+ const smallerBatches = chunk(batch, 100);
490
+ for (const smallBatch of smallerBatches) {
491
+ try {
492
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, smallBatch);
493
+ successful += result.documents?.length || smallBatch.length;
494
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
495
+ }
496
+ catch (smallBatchError) {
497
+ console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
498
+ // Fall back to individual document transfer for this batch
499
+ const db = new Databases(client);
500
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, smallBatch.map((doc, index) => ({
501
+ ...doc,
502
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
503
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
504
+ })));
505
+ successful += indivSuccessful;
506
+ failed += indivFailed;
507
+ }
508
+ // Add delay between batches
509
+ await delay(200);
510
+ }
511
+ }
512
+ else {
513
+ // Fall back to individual document transfer
514
+ const db = new Databases(client);
515
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, batch.map((doc, index) => ({
516
+ ...doc,
517
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
518
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
519
+ })));
520
+ successful += indivSuccessful;
521
+ failed += indivFailed;
522
+ }
523
+ }
524
+ // Add delay between major batches
525
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
526
+ await delay(500);
527
+ }
528
+ }
529
+ return { successful, failed };
530
+ };
531
+ /**
532
+ * Fallback batch document transfer with individual retry logic
533
+ */
534
+ const transferDocumentBatchWithRetryFallback = async (db, dbId, collectionId, documents, batchSize = 10) => {
413
535
  let successful = 0;
414
536
  let failed = 0;
415
537
  // Process documents in smaller batches to avoid overwhelming the server
@@ -449,6 +571,30 @@ const transferDocumentBatchWithRetry = async (db, dbId, collectionId, documents,
449
571
  }
450
572
  return { successful, failed };
451
573
  };
574
+ /**
575
+ * Enhanced batch document transfer with fault tolerance and bulk API support
576
+ */
577
+ const transferDocumentBatchWithRetry = async (db, client, dbId, collectionId, documents, batchSize = 10) => {
578
+ // Check if we can use bulk operations
579
+ if (supportsBulkOperations(client.config.endpoint)) {
580
+ console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
581
+ // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
582
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
583
+ for (const maxBatchSize of batchSizes) {
584
+ try {
585
+ return await transferDocumentsBulkUpsert(client, dbId, collectionId, documents, maxBatchSize);
586
+ }
587
+ catch (error) {
588
+ console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
589
+ continue;
590
+ }
591
+ }
592
+ // If all bulk operations fail, fall back to individual transfers
593
+ console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
594
+ }
595
+ // Fall back to individual document transfer
596
+ return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
597
+ };
452
598
  export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint, projectId, apiKey, fromDbId, toDbId, fromCollId, toCollId) => {
453
599
  console.log(chalk.blue(`Starting enhanced document transfer from ${fromCollId} to ${toCollId}...`));
454
600
  const client = new Client()
@@ -459,11 +605,11 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint
459
605
  let totalDocumentsProcessed = 0;
460
606
  let totalSuccessful = 0;
461
607
  let totalFailed = 0;
462
- // Fetch documents in batches
608
+ // Fetch documents in larger batches (1000 at a time)
463
609
  let hasMoreDocuments = true;
464
610
  let lastDocumentId;
465
611
  while (hasMoreDocuments) {
466
- const queries = [Query.limit(50)];
612
+ const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
467
613
  if (lastDocumentId) {
468
614
  queries.push(Query.cursorAfter(lastDocumentId));
469
615
  }
@@ -472,13 +618,13 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint
472
618
  hasMoreDocuments = false;
473
619
  break;
474
620
  }
475
- console.log(chalk.blue(`Processing ${fromCollDocs.documents.length} documents...`));
476
- const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, toDbId, toCollId, fromCollDocs.documents);
621
+ console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
622
+ const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, client, toDbId, toCollId, fromCollDocs.documents);
477
623
  totalDocumentsProcessed += fromCollDocs.documents.length;
478
624
  totalSuccessful += successful;
479
625
  totalFailed += failed;
480
626
  // Check if we have more documents to process
481
- if (fromCollDocs.documents.length < 50) {
627
+ if (fromCollDocs.documents.length < 1000) {
482
628
  hasMoreDocuments = false;
483
629
  }
484
630
  else {
@@ -58,7 +58,7 @@ export const deleteFunction = async (client, functionId) => {
58
58
  };
59
59
  export const createFunction = async (client, functionConfig) => {
60
60
  const functions = new Functions(client);
61
- const functionResponse = await functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute, functionConfig.events, functionConfig.schedule, functionConfig.timeout, functionConfig.enabled, functionConfig.logging, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes, functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory, functionConfig.templateRepository, functionConfig.templateOwner, functionConfig.templateRootDirectory, functionConfig.templateVersion, functionConfig.specification);
61
+ const functionResponse = await functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute, functionConfig.events, functionConfig.schedule, functionConfig.timeout, functionConfig.enabled, functionConfig.logging, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes, functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory);
62
62
  return functionResponse;
63
63
  };
64
64
  export const updateFunctionSpecifications = async (client, functionId, specification) => {
@@ -79,9 +79,7 @@ export const createOrUpdateAttribute = async (db, dbId, collection, attribute, o
79
79
  let foundAttribute;
80
80
  let finalAttribute = attribute;
81
81
  try {
82
- const collectionAttr = collection.attributes.find(
83
- // @ts-expect-error - Appwrite type issues
84
- (attr) => attr.key === attribute.key);
82
+ const collectionAttr = collection.attributes.find((attr) => attr.key === attribute.key);
85
83
  foundAttribute = parseAttribute(collectionAttr);
86
84
  if (verbose) {
87
85
  console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
@@ -235,9 +233,7 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
235
233
  const configAttributes = collectionConfig.attributes || [];
236
234
  const configAttributeKeys = new Set(configAttributes.map(attr => attr.key));
237
235
  // Find attributes that exist in the database but not in the config
238
- const obsoleteAttributes = collection.attributes.filter(
239
- // @ts-expect-error - Appwrite type issues
240
- (attr) => !configAttributeKeys.has(attr.key));
236
+ const obsoleteAttributes = collection.attributes.filter((attr) => !configAttributeKeys.has(attr.key));
241
237
  if (obsoleteAttributes.length === 0) {
242
238
  return;
243
239
  }
@@ -248,13 +244,11 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
248
244
  const queuedOperation = {
249
245
  type: "attribute",
250
246
  collectionId: collection.$id,
251
- // @ts-expect-error - Appwrite type issues
252
247
  attribute: { key: attr.key, type: "delete" },
253
248
  collection,
254
249
  };
255
250
  const executeOperation = async () => {
256
251
  await attributeLimit(() => tryAwaitWithRetry(async () => {
257
- // @ts-expect-error - Appwrite type issues
258
252
  await db.deleteAttribute(dbId, collection.$id, attr.key);
259
253
  }));
260
254
  };
@@ -266,7 +260,6 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
266
260
  await delay(250);
267
261
  }
268
262
  if (verbose) {
269
- // @ts-expect-error - Appwrite type issues
270
263
  console.log(chalk.gray(`🗑️ Deleted obsolete attribute ${attr.key}`));
271
264
  }
272
265
  }
@@ -200,7 +200,7 @@ export class FunctionManager {
200
200
  console.log(chalk.blue(`Creating function: ${functionConfig.name}`));
201
201
  }
202
202
  return await tryAwaitWithRetry(async () => {
203
- return await this.functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute || [], functionConfig.events || [], functionConfig.schedule || "", functionConfig.timeout || 15, functionConfig.enabled !== false, functionConfig.logging !== false, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes || [], functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory, functionConfig.templateRepository, functionConfig.templateOwner, functionConfig.templateRootDirectory, functionConfig.templateVersion, functionConfig.specification);
203
+ return await this.functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute || [], functionConfig.events || [], functionConfig.schedule || "", functionConfig.timeout || 15, functionConfig.enabled !== false, functionConfig.logging !== false, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes || [], functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory);
204
204
  });
205
205
  }
206
206
  async updateFunction(functionConfig, options = {}) {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "1.1.4",
4
+ "version": "1.2.1",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -43,7 +43,7 @@
43
43
  "js-yaml": "^4.1.0",
44
44
  "luxon": "^3.6.1",
45
45
  "nanostores": "^0.10.3",
46
- "node-appwrite": "^14.2.0",
46
+ "node-appwrite": "^16",
47
47
  "p-limit": "^6.2.0",
48
48
  "tar": "^7.4.3",
49
49
  "tsx": "^4.20.3",
@@ -363,8 +363,7 @@ export const createOrUpdateAttribute = async (
363
363
  let finalAttribute: any = attribute;
364
364
  try {
365
365
  const collectionAttr = collection.attributes.find(
366
- // @ts-expect-error
367
- (attr) => attr.key === attribute.key
366
+ (attr: any) => attr.key === attribute.key
368
367
  ) as unknown as any;
369
368
  foundAttribute = parseAttribute(collectionAttr);
370
369
  // console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
@@ -670,9 +670,172 @@ const transferDocumentWithRetry = async (
670
670
  };
671
671
 
672
672
  /**
673
- * Enhanced batch document transfer with fault tolerance
673
+ * Check if endpoint supports bulk operations (cloud.appwrite.io)
674
674
  */
675
- const transferDocumentBatchWithRetry = async (
675
+ const supportsBulkOperations = (endpoint: string): boolean => {
676
+ return endpoint.includes('cloud.appwrite.io');
677
+ };
678
+
679
+ /**
680
+ * Direct HTTP implementation of bulk upsert API
681
+ */
682
+ const bulkUpsertDocuments = async (
683
+ client: any,
684
+ dbId: string,
685
+ collectionId: string,
686
+ documents: any[]
687
+ ): Promise<any> => {
688
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
689
+ const url = new URL(client.config.endpoint + apiPath);
690
+
691
+ const headers = {
692
+ 'Content-Type': 'application/json',
693
+ 'X-Appwrite-Project': client.config.project,
694
+ 'X-Appwrite-Key': client.config.key
695
+ };
696
+
697
+ const response = await fetch(url.toString(), {
698
+ method: 'PUT',
699
+ headers,
700
+ body: JSON.stringify({ documents })
701
+ });
702
+
703
+ if (!response.ok) {
704
+ const errorData: any = await response.json().catch(() => ({ message: 'Unknown error' }));
705
+ throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
706
+ }
707
+
708
+ return await response.json();
709
+ };
710
+
711
+ /**
712
+ * Direct HTTP implementation of bulk create API
713
+ */
714
+ const bulkCreateDocuments = async (
715
+ client: any,
716
+ dbId: string,
717
+ collectionId: string,
718
+ documents: any[]
719
+ ): Promise<any> => {
720
+ const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
721
+ const url = new URL(client.config.endpoint + apiPath);
722
+
723
+ const headers = {
724
+ 'Content-Type': 'application/json',
725
+ 'X-Appwrite-Project': client.config.project,
726
+ 'X-Appwrite-Key': client.config.key
727
+ };
728
+
729
+ const response = await fetch(url.toString(), {
730
+ method: 'POST',
731
+ headers,
732
+ body: JSON.stringify({ documents })
733
+ });
734
+
735
+ if (!response.ok) {
736
+ const errorData: any = await response.json().catch(() => ({ message: 'Unknown error' }));
737
+ throw new Error(`Bulk create failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
738
+ }
739
+
740
+ return await response.json();
741
+ };
742
+
743
+ /**
744
+ * Enhanced bulk document creation using direct HTTP calls
745
+ */
746
+ const transferDocumentsBulkUpsert = async (
747
+ client: any,
748
+ dbId: string,
749
+ collectionId: string,
750
+ documents: any[],
751
+ maxBatchSize: number = 1000
752
+ ): Promise<{ successful: number; failed: number }> => {
753
+ let successful = 0;
754
+ let failed = 0;
755
+
756
+ // Prepare documents for bulk upsert
757
+ const preparedDocs = documents.map(doc => {
758
+ const toCreateObject: any = { ...doc };
759
+ delete toCreateObject.$databaseId;
760
+ delete toCreateObject.$collectionId;
761
+ delete toCreateObject.$createdAt;
762
+ delete toCreateObject.$updatedAt;
763
+
764
+ // Keep $id and $permissions for upsert functionality
765
+ return toCreateObject;
766
+ });
767
+
768
+ // Process in batches based on plan limits
769
+ const documentBatches = chunk(preparedDocs, maxBatchSize);
770
+
771
+ for (const batch of documentBatches) {
772
+ console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
773
+
774
+ try {
775
+ // Try bulk upsert with direct HTTP call
776
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, batch);
777
+ successful += result.documents?.length || batch.length;
778
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
779
+
780
+ } catch (error: any) {
781
+ console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
782
+
783
+ // If bulk upsert fails, try with smaller batch size (Pro plan limit)
784
+ if (maxBatchSize > 100) {
785
+ const smallerBatches = chunk(batch, 100);
786
+
787
+ for (const smallBatch of smallerBatches) {
788
+ try {
789
+ const result = await bulkUpsertDocuments(client, dbId, collectionId, smallBatch);
790
+ successful += result.documents?.length || smallBatch.length;
791
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
792
+ } catch (smallBatchError: any) {
793
+ console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
794
+
795
+ // Fall back to individual document transfer for this batch
796
+ const db = new Databases(client);
797
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
798
+ db, dbId, collectionId, smallBatch.map((doc, index) => ({
799
+ ...doc,
800
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
801
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
802
+ }))
803
+ );
804
+ successful += indivSuccessful;
805
+ failed += indivFailed;
806
+ }
807
+
808
+ // Add delay between batches
809
+ await delay(200);
810
+ }
811
+ } else {
812
+ // Fall back to individual document transfer
813
+ const db = new Databases(client);
814
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
815
+ db, dbId, collectionId, batch.map((doc, index) => ({
816
+ ...doc,
817
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
818
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
819
+ }))
820
+ );
821
+ successful += indivSuccessful;
822
+ failed += indivFailed;
823
+ }
824
+ }
825
+
826
+ // Add delay between major batches
827
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
828
+ await delay(500);
829
+ }
830
+ }
831
+
832
+ return { successful, failed };
833
+ };
834
+
835
+ /**
836
+ * Fallback batch document transfer with individual retry logic
837
+ */
838
+ const transferDocumentBatchWithRetryFallback = async (
676
839
  db: Databases,
677
840
  dbId: string,
678
841
  collectionId: string,
@@ -733,6 +896,41 @@ const transferDocumentBatchWithRetry = async (
733
896
  return { successful, failed };
734
897
  };
735
898
 
899
+ /**
900
+ * Enhanced batch document transfer with fault tolerance and bulk API support
901
+ */
902
+ const transferDocumentBatchWithRetry = async (
903
+ db: Databases,
904
+ client: any,
905
+ dbId: string,
906
+ collectionId: string,
907
+ documents: any[],
908
+ batchSize: number = 10
909
+ ): Promise<{ successful: number; failed: number }> => {
910
+ // Check if we can use bulk operations
911
+ if (supportsBulkOperations(client.config.endpoint)) {
912
+ console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
913
+
914
+ // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
915
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
916
+
917
+ for (const maxBatchSize of batchSizes) {
918
+ try {
919
+ return await transferDocumentsBulkUpsert(client, dbId, collectionId, documents, maxBatchSize);
920
+ } catch (error: any) {
921
+ console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
922
+ continue;
923
+ }
924
+ }
925
+
926
+ // If all bulk operations fail, fall back to individual transfers
927
+ console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
928
+ }
929
+
930
+ // Fall back to individual document transfer
931
+ return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
932
+ };
933
+
736
934
  export const transferDocumentsBetweenDbsLocalToRemote = async (
737
935
  localDb: Databases,
738
936
  endpoint: string,
@@ -755,12 +953,12 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
755
953
  let totalSuccessful = 0;
756
954
  let totalFailed = 0;
757
955
 
758
- // Fetch documents in batches
956
+ // Fetch documents in larger batches (1000 at a time)
759
957
  let hasMoreDocuments = true;
760
958
  let lastDocumentId: string | undefined;
761
959
 
762
960
  while (hasMoreDocuments) {
763
- const queries = [Query.limit(50)];
961
+ const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
764
962
  if (lastDocumentId) {
765
963
  queries.push(Query.cursorAfter(lastDocumentId));
766
964
  }
@@ -774,10 +972,11 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
774
972
  break;
775
973
  }
776
974
 
777
- console.log(chalk.blue(`Processing ${fromCollDocs.documents.length} documents...`));
975
+ console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
778
976
 
779
977
  const { successful, failed } = await transferDocumentBatchWithRetry(
780
978
  remoteDb,
979
+ client,
781
980
  toDbId,
782
981
  toCollId,
783
982
  fromCollDocs.documents
@@ -788,7 +987,7 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
788
987
  totalFailed += failed;
789
988
 
790
989
  // Check if we have more documents to process
791
- if (fromCollDocs.documents.length < 50) {
990
+ if (fromCollDocs.documents.length < 1000) {
792
991
  hasMoreDocuments = false;
793
992
  } else {
794
993
  lastDocumentId = fromCollDocs.documents[fromCollDocs.documents.length - 1].$id;
@@ -113,12 +113,7 @@ export const createFunction = async (
113
113
  functionConfig.providerRepositoryId,
114
114
  functionConfig.providerBranch,
115
115
  functionConfig.providerSilentMode,
116
- functionConfig.providerRootDirectory,
117
- functionConfig.templateRepository,
118
- functionConfig.templateOwner,
119
- functionConfig.templateRootDirectory,
120
- functionConfig.templateVersion,
121
- functionConfig.specification
116
+ functionConfig.providerRootDirectory
122
117
  );
123
118
  return functionResponse;
124
119
  };
@@ -114,8 +114,7 @@ export const createOrUpdateAttribute = async (
114
114
 
115
115
  try {
116
116
  const collectionAttr = collection.attributes.find(
117
- // @ts-expect-error - Appwrite type issues
118
- (attr) => attr.key === attribute.key
117
+ (attr: any) => attr.key === attribute.key
119
118
  ) as unknown as any;
120
119
  foundAttribute = parseAttribute(collectionAttr);
121
120
 
@@ -384,8 +383,7 @@ export const deleteObsoleteAttributes = async (
384
383
 
385
384
  // Find attributes that exist in the database but not in the config
386
385
  const obsoleteAttributes = collection.attributes.filter(
387
- // @ts-expect-error - Appwrite type issues
388
- (attr) => !configAttributeKeys.has(attr.key)
386
+ (attr: any) => !configAttributeKeys.has(attr.key)
389
387
  );
390
388
 
391
389
  if (obsoleteAttributes.length === 0) {
@@ -400,16 +398,14 @@ export const deleteObsoleteAttributes = async (
400
398
  const queuedOperation: QueuedOperation = {
401
399
  type: "attribute",
402
400
  collectionId: collection.$id,
403
- // @ts-expect-error - Appwrite type issues
404
- attribute: { key: attr.key, type: "delete" } as Attribute,
401
+ attribute: { key: (attr as any).key, type: "delete" } as unknown as Attribute,
405
402
  collection,
406
403
  };
407
404
 
408
405
  const executeOperation = async () => {
409
406
  await attributeLimit(() =>
410
407
  tryAwaitWithRetry(async () => {
411
- // @ts-expect-error - Appwrite type issues
412
- await db.deleteAttribute(dbId, collection.$id, attr.key);
408
+ await db.deleteAttribute(dbId, collection.$id, (attr as any).key);
413
409
  })
414
410
  );
415
411
  };
@@ -422,8 +418,7 @@ export const deleteObsoleteAttributes = async (
422
418
  }
423
419
 
424
420
  if (verbose) {
425
- // @ts-expect-error - Appwrite type issues
426
- console.log(chalk.gray(`🗑️ Deleted obsolete attribute ${attr.key}`));
421
+ console.log(chalk.gray(`🗑️ Deleted obsolete attribute ${(attr as any).key}`));
427
422
  }
428
423
  }
429
424
  };
@@ -310,12 +310,7 @@ export class FunctionManager {
310
310
  functionConfig.providerRepositoryId,
311
311
  functionConfig.providerBranch,
312
312
  functionConfig.providerSilentMode,
313
- functionConfig.providerRootDirectory,
314
- functionConfig.templateRepository,
315
- functionConfig.templateOwner,
316
- functionConfig.templateRootDirectory,
317
- functionConfig.templateVersion,
318
- functionConfig.specification
313
+ functionConfig.providerRootDirectory
319
314
  );
320
315
  });
321
316
  }