appwrite-utils-cli 1.1.3 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -218,9 +218,7 @@ export const createOrUpdateAttribute = async (db, dbId, collection, attribute) =
218
218
  const updateEnabled = true;
219
219
  let finalAttribute = attribute;
220
220
  try {
221
- const collectionAttr = collection.attributes.find(
222
- // @ts-expect-error
223
- (attr) => attr.key === attribute.key);
221
+ const collectionAttr = collection.attributes.find((attr) => attr.key === attribute.key);
224
222
  foundAttribute = parseAttribute(collectionAttr);
225
223
  // console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
226
224
  }
@@ -459,17 +457,53 @@ export const createUpdateCollectionAttributesWithStatusCheck = async (db, dbId,
459
457
  await delay(500); // Longer delay for deletions
460
458
  }
461
459
  }
462
- // Create attributes ONE BY ONE with proper status checking and persistent retry logic
463
- console.log(chalk.blue(`Creating ${attributes.length} attributes sequentially with status monitoring...`));
460
+ // First, get fresh collection data and determine which attributes actually need processing
461
+ console.log(chalk.blue(`Analyzing ${attributes.length} attributes to determine which need processing...`));
464
462
  let currentCollection = collection;
465
- let attributesToProcess = [...attributes];
463
+ try {
464
+ currentCollection = await db.getCollection(dbId, collection.$id);
465
+ }
466
+ catch (error) {
467
+ console.log(chalk.yellow(`Warning: Could not refresh collection data: ${error}`));
468
+ }
469
+ const existingAttributesMap = new Map();
470
+ try {
471
+ // @ts-expect-error
472
+ const parsedAttributes = currentCollection.attributes.map((attr) => parseAttribute(attr));
473
+ parsedAttributes.forEach(attr => existingAttributesMap.set(attr.key, attr));
474
+ }
475
+ catch (error) {
476
+ console.log(chalk.yellow(`Warning: Could not parse existing attributes: ${error}`));
477
+ }
478
+ // Filter to only attributes that need processing (new or changed)
479
+ const attributesToProcess = attributes.filter(attribute => {
480
+ const existing = existingAttributesMap.get(attribute.key);
481
+ if (!existing) {
482
+ console.log(chalk.blue(`āž• New attribute: ${attribute.key}`));
483
+ return true;
484
+ }
485
+ const needsUpdate = !attributesSame(existing, attribute);
486
+ if (needsUpdate) {
487
+ console.log(chalk.blue(`šŸ”„ Changed attribute: ${attribute.key}`));
488
+ }
489
+ else {
490
+ console.log(chalk.gray(`āœ… Unchanged attribute: ${attribute.key} (skipping)`));
491
+ }
492
+ return needsUpdate;
493
+ });
494
+ if (attributesToProcess.length === 0) {
495
+ console.log(chalk.green(`āœ… All ${attributes.length} attributes are already up to date for collection: ${collection.name}`));
496
+ return true;
497
+ }
498
+ console.log(chalk.blue(`Creating ${attributesToProcess.length} attributes sequentially with status monitoring...`));
499
+ let remainingAttributes = [...attributesToProcess];
466
500
  let overallRetryCount = 0;
467
501
  const maxOverallRetries = 3;
468
- while (attributesToProcess.length > 0 && overallRetryCount < maxOverallRetries) {
469
- const remainingAttributes = [...attributesToProcess];
470
- attributesToProcess = []; // Reset for next iteration
471
- console.log(chalk.blue(`\n=== Attempt ${overallRetryCount + 1}/${maxOverallRetries} - Processing ${remainingAttributes.length} attributes ===`));
472
- for (const attribute of remainingAttributes) {
502
+ while (remainingAttributes.length > 0 && overallRetryCount < maxOverallRetries) {
503
+ const attributesToProcessThisRound = [...remainingAttributes];
504
+ remainingAttributes = []; // Reset for next iteration
505
+ console.log(chalk.blue(`\n=== Attempt ${overallRetryCount + 1}/${maxOverallRetries} - Processing ${attributesToProcessThisRound.length} attributes ===`));
506
+ for (const attribute of attributesToProcessThisRound) {
473
507
  console.log(chalk.blue(`\n--- Processing attribute: ${attribute.key} ---`));
474
508
  const success = await createOrUpdateAttributeWithStatusCheck(db, dbId, currentCollection, attribute);
475
509
  if (success) {
@@ -486,11 +520,11 @@ export const createUpdateCollectionAttributesWithStatusCheck = async (db, dbId,
486
520
  }
487
521
  else {
488
522
  console.log(chalk.red(`āŒ Failed to create attribute: ${attribute.key}, will retry in next round`));
489
- attributesToProcess.push(attribute); // Add back to retry list
523
+ remainingAttributes.push(attribute); // Add back to retry list
490
524
  }
491
525
  }
492
- if (attributesToProcess.length === 0) {
493
- console.log(chalk.green(`\nāœ… Successfully created all ${attributes.length} attributes for collection: ${collection.name}`));
526
+ if (remainingAttributes.length === 0) {
527
+ console.log(chalk.green(`\nāœ… Successfully created all ${attributesToProcess.length} attributes for collection: ${collection.name}`));
494
528
  return true;
495
529
  }
496
530
  overallRetryCount++;
@@ -69,7 +69,16 @@ retryCount = 0, maxRetries = 5) => {
69
69
  export const createOrUpdateIndexWithStatusCheck = async (dbId, db, collectionId, collection, index, retryCount = 0, maxRetries = 5) => {
70
70
  console.log(chalk.blue(`Creating/updating index '${index.key}' (attempt ${retryCount + 1}/${maxRetries + 1})`));
71
71
  try {
72
- // First, try to create/update the index using existing logic
72
+ // First, validate that all required attributes exist
73
+ const freshCollection = await db.getCollection(dbId, collectionId);
74
+ const existingAttributeKeys = freshCollection.attributes.map((attr) => attr.key);
75
+ const missingAttributes = index.attributes.filter(attr => !existingAttributeKeys.includes(attr));
76
+ if (missingAttributes.length > 0) {
77
+ console.log(chalk.red(`āŒ Index '${index.key}' cannot be created: missing attributes [${missingAttributes.join(', ')}]`));
78
+ console.log(chalk.red(`Available attributes: [${existingAttributeKeys.join(', ')}]`));
79
+ return false; // Don't retry if attributes are missing
80
+ }
81
+ // Try to create/update the index using existing logic
73
82
  await createOrUpdateIndex(dbId, db, collectionId, index);
74
83
  // Now wait for the index to become available
75
84
  const success = await waitForIndexAvailable(db, dbId, collectionId, index.key, 60000, // 1 minute timeout
@@ -89,7 +98,16 @@ export const createOrUpdateIndexWithStatusCheck = async (dbId, db, collectionId,
89
98
  return false;
90
99
  }
91
100
  catch (error) {
92
- console.log(chalk.red(`Error creating index '${index.key}': ${error}`));
101
+ const errorMessage = error instanceof Error ? error.message : String(error);
102
+ console.log(chalk.red(`Error creating index '${index.key}': ${errorMessage}`));
103
+ // Check if this is a permanent error that shouldn't be retried
104
+ if (errorMessage.includes('not found') ||
105
+ errorMessage.includes('missing') ||
106
+ errorMessage.includes('does not exist') ||
107
+ errorMessage.includes('attribute') && errorMessage.includes('not found')) {
108
+ console.log(chalk.red(`āŒ Index '${index.key}' has permanent error - not retrying`));
109
+ return false;
110
+ }
93
111
  if (retryCount < maxRetries) {
94
112
  console.log(chalk.yellow(`Retrying index '${index.key}' due to error...`));
95
113
  // Wait a bit before retry
@@ -407,9 +407,86 @@ const transferDocumentWithRetry = async (db, dbId, collectionId, documentId, doc
407
407
  }
408
408
  };
409
409
  /**
410
- * Enhanced batch document transfer with fault tolerance
410
+ * Check if endpoint supports bulk operations (cloud.appwrite.io)
411
411
  */
412
- const transferDocumentBatchWithRetry = async (db, dbId, collectionId, documents, batchSize = 10) => {
412
+ const supportsBulkOperations = (endpoint) => {
413
+ return endpoint.includes('cloud.appwrite.io');
414
+ };
415
+ /**
416
+ * Enhanced bulk document creation using the new bulk upsert API for cloud.appwrite.io
417
+ */
418
+ const transferDocumentsBulkUpsert = async (db, dbId, collectionId, documents, maxBatchSize = 1000) => {
419
+ let successful = 0;
420
+ let failed = 0;
421
+ // Prepare documents for bulk upsert
422
+ const preparedDocs = documents.map(doc => {
423
+ const toCreateObject = { ...doc };
424
+ delete toCreateObject.$databaseId;
425
+ delete toCreateObject.$collectionId;
426
+ delete toCreateObject.$createdAt;
427
+ delete toCreateObject.$updatedAt;
428
+ // Keep $id and $permissions for upsert functionality
429
+ // Appwrite bulk API expects $permissions to be preserved
430
+ return toCreateObject;
431
+ });
432
+ // Process in batches based on plan limits
433
+ const documentBatches = chunk(preparedDocs, maxBatchSize);
434
+ for (const batch of documentBatches) {
435
+ console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
436
+ try {
437
+ // Try bulk upsert with the highest batch size first
438
+ const result = await db.upsertDocuments(dbId, collectionId, batch);
439
+ successful += result.documents?.length || batch.length;
440
+ console.log(chalk.green(`āœ… Bulk upserted ${result.documents?.length || batch.length} documents`));
441
+ }
442
+ catch (error) {
443
+ console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
444
+ // If bulk upsert fails, try with smaller batch size (Pro plan limit)
445
+ if (maxBatchSize > 100) {
446
+ const smallerBatches = chunk(batch, 100);
447
+ for (const smallBatch of smallerBatches) {
448
+ try {
449
+ const result = await db.upsertDocuments(dbId, collectionId, smallBatch);
450
+ successful += result.documents?.length || smallBatch.length;
451
+ console.log(chalk.green(`āœ… Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
452
+ }
453
+ catch (smallBatchError) {
454
+ console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
455
+ // Fall back to individual document transfer for this batch
456
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, smallBatch.map((doc, index) => ({
457
+ ...doc,
458
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
459
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
460
+ })));
461
+ successful += indivSuccessful;
462
+ failed += indivFailed;
463
+ }
464
+ // Add delay between batches
465
+ await delay(200);
466
+ }
467
+ }
468
+ else {
469
+ // Fall back to individual document transfer
470
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, batch.map((doc, index) => ({
471
+ ...doc,
472
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
473
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
474
+ })));
475
+ successful += indivSuccessful;
476
+ failed += indivFailed;
477
+ }
478
+ }
479
+ // Add delay between major batches
480
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
481
+ await delay(500);
482
+ }
483
+ }
484
+ return { successful, failed };
485
+ };
486
+ /**
487
+ * Fallback batch document transfer with individual retry logic
488
+ */
489
+ const transferDocumentBatchWithRetryFallback = async (db, dbId, collectionId, documents, batchSize = 10) => {
413
490
  let successful = 0;
414
491
  let failed = 0;
415
492
  // Process documents in smaller batches to avoid overwhelming the server
@@ -449,6 +526,30 @@ const transferDocumentBatchWithRetry = async (db, dbId, collectionId, documents,
449
526
  }
450
527
  return { successful, failed };
451
528
  };
529
+ /**
530
+ * Enhanced batch document transfer with fault tolerance and bulk API support
531
+ */
532
+ const transferDocumentBatchWithRetry = async (db, endpoint, dbId, collectionId, documents, batchSize = 10) => {
533
+ // Check if we can use bulk operations
534
+ if (supportsBulkOperations(endpoint)) {
535
+ console.log(chalk.green(`šŸš€ Using bulk upsert API for faster document transfer`));
536
+ // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
537
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
538
+ for (const maxBatchSize of batchSizes) {
539
+ try {
540
+ return await transferDocumentsBulkUpsert(db, dbId, collectionId, documents, maxBatchSize);
541
+ }
542
+ catch (error) {
543
+ console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
544
+ continue;
545
+ }
546
+ }
547
+ // If all bulk operations fail, fall back to individual transfers
548
+ console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
549
+ }
550
+ // Fall back to individual document transfer
551
+ return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
552
+ };
452
553
  export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint, projectId, apiKey, fromDbId, toDbId, fromCollId, toCollId) => {
453
554
  console.log(chalk.blue(`Starting enhanced document transfer from ${fromCollId} to ${toCollId}...`));
454
555
  const client = new Client()
@@ -459,11 +560,11 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint
459
560
  let totalDocumentsProcessed = 0;
460
561
  let totalSuccessful = 0;
461
562
  let totalFailed = 0;
462
- // Fetch documents in batches
563
+ // Fetch documents in larger batches (1000 at a time)
463
564
  let hasMoreDocuments = true;
464
565
  let lastDocumentId;
465
566
  while (hasMoreDocuments) {
466
- const queries = [Query.limit(50)];
567
+ const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
467
568
  if (lastDocumentId) {
468
569
  queries.push(Query.cursorAfter(lastDocumentId));
469
570
  }
@@ -472,13 +573,13 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint
472
573
  hasMoreDocuments = false;
473
574
  break;
474
575
  }
475
- console.log(chalk.blue(`Processing ${fromCollDocs.documents.length} documents...`));
476
- const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, toDbId, toCollId, fromCollDocs.documents);
576
+ console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
577
+ const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, endpoint, toDbId, toCollId, fromCollDocs.documents);
477
578
  totalDocumentsProcessed += fromCollDocs.documents.length;
478
579
  totalSuccessful += successful;
479
580
  totalFailed += failed;
480
581
  // Check if we have more documents to process
481
- if (fromCollDocs.documents.length < 50) {
582
+ if (fromCollDocs.documents.length < 1000) {
482
583
  hasMoreDocuments = false;
483
584
  }
484
585
  else {
@@ -58,7 +58,7 @@ export const deleteFunction = async (client, functionId) => {
58
58
  };
59
59
  export const createFunction = async (client, functionConfig) => {
60
60
  const functions = new Functions(client);
61
- const functionResponse = await functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute, functionConfig.events, functionConfig.schedule, functionConfig.timeout, functionConfig.enabled, functionConfig.logging, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes, functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory, functionConfig.templateRepository, functionConfig.templateOwner, functionConfig.templateRootDirectory, functionConfig.templateVersion, functionConfig.specification);
61
+ const functionResponse = await functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute, functionConfig.events, functionConfig.schedule, functionConfig.timeout, functionConfig.enabled, functionConfig.logging, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes, functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory);
62
62
  return functionResponse;
63
63
  };
64
64
  export const updateFunctionSpecifications = async (client, functionId, specification) => {
@@ -79,9 +79,7 @@ export const createOrUpdateAttribute = async (db, dbId, collection, attribute, o
79
79
  let foundAttribute;
80
80
  let finalAttribute = attribute;
81
81
  try {
82
- const collectionAttr = collection.attributes.find(
83
- // @ts-expect-error - Appwrite type issues
84
- (attr) => attr.key === attribute.key);
82
+ const collectionAttr = collection.attributes.find((attr) => attr.key === attribute.key);
85
83
  foundAttribute = parseAttribute(collectionAttr);
86
84
  if (verbose) {
87
85
  console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
@@ -235,9 +233,7 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
235
233
  const configAttributes = collectionConfig.attributes || [];
236
234
  const configAttributeKeys = new Set(configAttributes.map(attr => attr.key));
237
235
  // Find attributes that exist in the database but not in the config
238
- const obsoleteAttributes = collection.attributes.filter(
239
- // @ts-expect-error - Appwrite type issues
240
- (attr) => !configAttributeKeys.has(attr.key));
236
+ const obsoleteAttributes = collection.attributes.filter((attr) => !configAttributeKeys.has(attr.key));
241
237
  if (obsoleteAttributes.length === 0) {
242
238
  return;
243
239
  }
@@ -248,13 +244,11 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
248
244
  const queuedOperation = {
249
245
  type: "attribute",
250
246
  collectionId: collection.$id,
251
- // @ts-expect-error - Appwrite type issues
252
247
  attribute: { key: attr.key, type: "delete" },
253
248
  collection,
254
249
  };
255
250
  const executeOperation = async () => {
256
251
  await attributeLimit(() => tryAwaitWithRetry(async () => {
257
- // @ts-expect-error - Appwrite type issues
258
252
  await db.deleteAttribute(dbId, collection.$id, attr.key);
259
253
  }));
260
254
  };
@@ -266,7 +260,6 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
266
260
  await delay(250);
267
261
  }
268
262
  if (verbose) {
269
- // @ts-expect-error - Appwrite type issues
270
263
  console.log(chalk.gray(`šŸ—‘ļø Deleted obsolete attribute ${attr.key}`));
271
264
  }
272
265
  }
@@ -200,7 +200,7 @@ export class FunctionManager {
200
200
  console.log(chalk.blue(`Creating function: ${functionConfig.name}`));
201
201
  }
202
202
  return await tryAwaitWithRetry(async () => {
203
- return await this.functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute || [], functionConfig.events || [], functionConfig.schedule || "", functionConfig.timeout || 15, functionConfig.enabled !== false, functionConfig.logging !== false, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes || [], functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory, functionConfig.templateRepository, functionConfig.templateOwner, functionConfig.templateRootDirectory, functionConfig.templateVersion, functionConfig.specification);
203
+ return await this.functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute || [], functionConfig.events || [], functionConfig.schedule || "", functionConfig.timeout || 15, functionConfig.enabled !== false, functionConfig.logging !== false, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes || [], functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory);
204
204
  });
205
205
  }
206
206
  async updateFunction(functionConfig, options = {}) {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "1.1.3",
4
+ "version": "1.2.0",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -43,7 +43,7 @@
43
43
  "js-yaml": "^4.1.0",
44
44
  "luxon": "^3.6.1",
45
45
  "nanostores": "^0.10.3",
46
- "node-appwrite": "^14.2.0",
46
+ "node-appwrite": "^17",
47
47
  "p-limit": "^6.2.0",
48
48
  "tar": "^7.4.3",
49
49
  "tsx": "^4.20.3",
@@ -363,8 +363,7 @@ export const createOrUpdateAttribute = async (
363
363
  let finalAttribute: any = attribute;
364
364
  try {
365
365
  const collectionAttr = collection.attributes.find(
366
- // @ts-expect-error
367
- (attr) => attr.key === attribute.key
366
+ (attr: any) => attr.key === attribute.key
368
367
  ) as unknown as any;
369
368
  foundAttribute = parseAttribute(collectionAttr);
370
369
  // console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
@@ -849,21 +848,60 @@ export const createUpdateCollectionAttributesWithStatusCheck = async (
849
848
  }
850
849
  }
851
850
 
852
- // Create attributes ONE BY ONE with proper status checking and persistent retry logic
853
- console.log(chalk.blue(`Creating ${attributes.length} attributes sequentially with status monitoring...`));
851
+ // First, get fresh collection data and determine which attributes actually need processing
852
+ console.log(chalk.blue(`Analyzing ${attributes.length} attributes to determine which need processing...`));
854
853
 
855
854
  let currentCollection = collection;
856
- let attributesToProcess = [...attributes];
855
+ try {
856
+ currentCollection = await db.getCollection(dbId, collection.$id);
857
+ } catch (error) {
858
+ console.log(chalk.yellow(`Warning: Could not refresh collection data: ${error}`));
859
+ }
860
+
861
+ const existingAttributesMap = new Map<string, Attribute>();
862
+ try {
863
+ // @ts-expect-error
864
+ const parsedAttributes = currentCollection.attributes.map((attr) => parseAttribute(attr));
865
+ parsedAttributes.forEach(attr => existingAttributesMap.set(attr.key, attr));
866
+ } catch (error) {
867
+ console.log(chalk.yellow(`Warning: Could not parse existing attributes: ${error}`));
868
+ }
869
+
870
+ // Filter to only attributes that need processing (new or changed)
871
+ const attributesToProcess = attributes.filter(attribute => {
872
+ const existing = existingAttributesMap.get(attribute.key);
873
+ if (!existing) {
874
+ console.log(chalk.blue(`āž• New attribute: ${attribute.key}`));
875
+ return true;
876
+ }
877
+
878
+ const needsUpdate = !attributesSame(existing, attribute);
879
+ if (needsUpdate) {
880
+ console.log(chalk.blue(`šŸ”„ Changed attribute: ${attribute.key}`));
881
+ } else {
882
+ console.log(chalk.gray(`āœ… Unchanged attribute: ${attribute.key} (skipping)`));
883
+ }
884
+ return needsUpdate;
885
+ });
886
+
887
+ if (attributesToProcess.length === 0) {
888
+ console.log(chalk.green(`āœ… All ${attributes.length} attributes are already up to date for collection: ${collection.name}`));
889
+ return true;
890
+ }
891
+
892
+ console.log(chalk.blue(`Creating ${attributesToProcess.length} attributes sequentially with status monitoring...`));
893
+
894
+ let remainingAttributes = [...attributesToProcess];
857
895
  let overallRetryCount = 0;
858
896
  const maxOverallRetries = 3;
859
897
 
860
- while (attributesToProcess.length > 0 && overallRetryCount < maxOverallRetries) {
861
- const remainingAttributes = [...attributesToProcess];
862
- attributesToProcess = []; // Reset for next iteration
898
+ while (remainingAttributes.length > 0 && overallRetryCount < maxOverallRetries) {
899
+ const attributesToProcessThisRound = [...remainingAttributes];
900
+ remainingAttributes = []; // Reset for next iteration
863
901
 
864
- console.log(chalk.blue(`\n=== Attempt ${overallRetryCount + 1}/${maxOverallRetries} - Processing ${remainingAttributes.length} attributes ===`));
902
+ console.log(chalk.blue(`\n=== Attempt ${overallRetryCount + 1}/${maxOverallRetries} - Processing ${attributesToProcessThisRound.length} attributes ===`));
865
903
 
866
- for (const attribute of remainingAttributes) {
904
+ for (const attribute of attributesToProcessThisRound) {
867
905
  console.log(chalk.blue(`\n--- Processing attribute: ${attribute.key} ---`));
868
906
 
869
907
  const success = await createOrUpdateAttributeWithStatusCheck(
@@ -887,12 +925,12 @@ export const createUpdateCollectionAttributesWithStatusCheck = async (
887
925
  await delay(1000);
888
926
  } else {
889
927
  console.log(chalk.red(`āŒ Failed to create attribute: ${attribute.key}, will retry in next round`));
890
- attributesToProcess.push(attribute); // Add back to retry list
928
+ remainingAttributes.push(attribute); // Add back to retry list
891
929
  }
892
930
  }
893
931
 
894
- if (attributesToProcess.length === 0) {
895
- console.log(chalk.green(`\nāœ… Successfully created all ${attributes.length} attributes for collection: ${collection.name}`));
932
+ if (remainingAttributes.length === 0) {
933
+ console.log(chalk.green(`\nāœ… Successfully created all ${attributesToProcess.length} attributes for collection: ${collection.name}`));
896
934
  return true;
897
935
  }
898
936
 
@@ -114,7 +114,19 @@ export const createOrUpdateIndexWithStatusCheck = async (
114
114
  console.log(chalk.blue(`Creating/updating index '${index.key}' (attempt ${retryCount + 1}/${maxRetries + 1})`));
115
115
 
116
116
  try {
117
- // First, try to create/update the index using existing logic
117
+ // First, validate that all required attributes exist
118
+ const freshCollection = await db.getCollection(dbId, collectionId);
119
+ const existingAttributeKeys = freshCollection.attributes.map((attr: any) => attr.key);
120
+
121
+ const missingAttributes = index.attributes.filter(attr => !existingAttributeKeys.includes(attr));
122
+
123
+ if (missingAttributes.length > 0) {
124
+ console.log(chalk.red(`āŒ Index '${index.key}' cannot be created: missing attributes [${missingAttributes.join(', ')}]`));
125
+ console.log(chalk.red(`Available attributes: [${existingAttributeKeys.join(', ')}]`));
126
+ return false; // Don't retry if attributes are missing
127
+ }
128
+
129
+ // Try to create/update the index using existing logic
118
130
  await createOrUpdateIndex(dbId, db, collectionId, index);
119
131
 
120
132
  // Now wait for the index to become available
@@ -155,7 +167,17 @@ export const createOrUpdateIndexWithStatusCheck = async (
155
167
  return false;
156
168
 
157
169
  } catch (error) {
158
- console.log(chalk.red(`Error creating index '${index.key}': ${error}`));
170
+ const errorMessage = error instanceof Error ? error.message : String(error);
171
+ console.log(chalk.red(`Error creating index '${index.key}': ${errorMessage}`));
172
+
173
+ // Check if this is a permanent error that shouldn't be retried
174
+ if (errorMessage.includes('not found') ||
175
+ errorMessage.includes('missing') ||
176
+ errorMessage.includes('does not exist') ||
177
+ errorMessage.includes('attribute') && errorMessage.includes('not found')) {
178
+ console.log(chalk.red(`āŒ Index '${index.key}' has permanent error - not retrying`));
179
+ return false;
180
+ }
159
181
 
160
182
  if (retryCount < maxRetries) {
161
183
  console.log(chalk.yellow(`Retrying index '${index.key}' due to error...`));
@@ -670,9 +670,107 @@ const transferDocumentWithRetry = async (
670
670
  };
671
671
 
672
672
  /**
673
- * Enhanced batch document transfer with fault tolerance
673
+ * Check if endpoint supports bulk operations (cloud.appwrite.io)
674
674
  */
675
- const transferDocumentBatchWithRetry = async (
675
+ const supportsBulkOperations = (endpoint: string): boolean => {
676
+ return endpoint.includes('cloud.appwrite.io');
677
+ };
678
+
679
+ /**
680
+ * Enhanced bulk document creation using the new bulk upsert API for cloud.appwrite.io
681
+ */
682
+ const transferDocumentsBulkUpsert = async (
683
+ db: Databases,
684
+ dbId: string,
685
+ collectionId: string,
686
+ documents: any[],
687
+ maxBatchSize: number = 1000
688
+ ): Promise<{ successful: number; failed: number }> => {
689
+ let successful = 0;
690
+ let failed = 0;
691
+
692
+ // Prepare documents for bulk upsert
693
+ const preparedDocs = documents.map(doc => {
694
+ const toCreateObject: any = { ...doc };
695
+ delete toCreateObject.$databaseId;
696
+ delete toCreateObject.$collectionId;
697
+ delete toCreateObject.$createdAt;
698
+ delete toCreateObject.$updatedAt;
699
+
700
+ // Keep $id and $permissions for upsert functionality
701
+ // Appwrite bulk API expects $permissions to be preserved
702
+ return toCreateObject;
703
+ });
704
+
705
+ // Process in batches based on plan limits
706
+ const documentBatches = chunk(preparedDocs, maxBatchSize);
707
+
708
+ for (const batch of documentBatches) {
709
+ console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
710
+
711
+ try {
712
+ // Try bulk upsert with the highest batch size first
713
+ const result = await (db as any).upsertDocuments(dbId, collectionId, batch);
714
+ successful += result.documents?.length || batch.length;
715
+ console.log(chalk.green(`āœ… Bulk upserted ${result.documents?.length || batch.length} documents`));
716
+
717
+ } catch (error: any) {
718
+ console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
719
+
720
+ // If bulk upsert fails, try with smaller batch size (Pro plan limit)
721
+ if (maxBatchSize > 100) {
722
+ const smallerBatches = chunk(batch, 100);
723
+
724
+ for (const smallBatch of smallerBatches) {
725
+ try {
726
+ const result = await (db as any).upsertDocuments(dbId, collectionId, smallBatch);
727
+ successful += result.documents?.length || smallBatch.length;
728
+ console.log(chalk.green(`āœ… Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
729
+ } catch (smallBatchError: any) {
730
+ console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
731
+
732
+ // Fall back to individual document transfer for this batch
733
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
734
+ db, dbId, collectionId, smallBatch.map((doc, index) => ({
735
+ ...doc,
736
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
737
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
738
+ }))
739
+ );
740
+ successful += indivSuccessful;
741
+ failed += indivFailed;
742
+ }
743
+
744
+ // Add delay between batches
745
+ await delay(200);
746
+ }
747
+ } else {
748
+ // Fall back to individual document transfer
749
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
750
+ db, dbId, collectionId, batch.map((doc, index) => ({
751
+ ...doc,
752
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
753
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
754
+ }))
755
+ );
756
+ successful += indivSuccessful;
757
+ failed += indivFailed;
758
+ }
759
+ }
760
+
761
+ // Add delay between major batches
762
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
763
+ await delay(500);
764
+ }
765
+ }
766
+
767
+ return { successful, failed };
768
+ };
769
+
770
+ /**
771
+ * Fallback batch document transfer with individual retry logic
772
+ */
773
+ const transferDocumentBatchWithRetryFallback = async (
676
774
  db: Databases,
677
775
  dbId: string,
678
776
  collectionId: string,
@@ -733,6 +831,41 @@ const transferDocumentBatchWithRetry = async (
733
831
  return { successful, failed };
734
832
  };
735
833
 
834
+ /**
835
+ * Enhanced batch document transfer with fault tolerance and bulk API support
836
+ */
837
+ const transferDocumentBatchWithRetry = async (
838
+ db: Databases,
839
+ endpoint: string,
840
+ dbId: string,
841
+ collectionId: string,
842
+ documents: any[],
843
+ batchSize: number = 10
844
+ ): Promise<{ successful: number; failed: number }> => {
845
+ // Check if we can use bulk operations
846
+ if (supportsBulkOperations(endpoint)) {
847
+ console.log(chalk.green(`šŸš€ Using bulk upsert API for faster document transfer`));
848
+
849
+ // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
850
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
851
+
852
+ for (const maxBatchSize of batchSizes) {
853
+ try {
854
+ return await transferDocumentsBulkUpsert(db, dbId, collectionId, documents, maxBatchSize);
855
+ } catch (error: any) {
856
+ console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
857
+ continue;
858
+ }
859
+ }
860
+
861
+ // If all bulk operations fail, fall back to individual transfers
862
+ console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
863
+ }
864
+
865
+ // Fall back to individual document transfer
866
+ return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
867
+ };
868
+
736
869
  export const transferDocumentsBetweenDbsLocalToRemote = async (
737
870
  localDb: Databases,
738
871
  endpoint: string,
@@ -755,12 +888,12 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
755
888
  let totalSuccessful = 0;
756
889
  let totalFailed = 0;
757
890
 
758
- // Fetch documents in batches
891
+ // Fetch documents in larger batches (1000 at a time)
759
892
  let hasMoreDocuments = true;
760
893
  let lastDocumentId: string | undefined;
761
894
 
762
895
  while (hasMoreDocuments) {
763
- const queries = [Query.limit(50)];
896
+ const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
764
897
  if (lastDocumentId) {
765
898
  queries.push(Query.cursorAfter(lastDocumentId));
766
899
  }
@@ -774,10 +907,11 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
774
907
  break;
775
908
  }
776
909
 
777
- console.log(chalk.blue(`Processing ${fromCollDocs.documents.length} documents...`));
910
+ console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
778
911
 
779
912
  const { successful, failed } = await transferDocumentBatchWithRetry(
780
913
  remoteDb,
914
+ endpoint,
781
915
  toDbId,
782
916
  toCollId,
783
917
  fromCollDocs.documents
@@ -788,7 +922,7 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
788
922
  totalFailed += failed;
789
923
 
790
924
  // Check if we have more documents to process
791
- if (fromCollDocs.documents.length < 50) {
925
+ if (fromCollDocs.documents.length < 1000) {
792
926
  hasMoreDocuments = false;
793
927
  } else {
794
928
  lastDocumentId = fromCollDocs.documents[fromCollDocs.documents.length - 1].$id;
@@ -113,12 +113,7 @@ export const createFunction = async (
113
113
  functionConfig.providerRepositoryId,
114
114
  functionConfig.providerBranch,
115
115
  functionConfig.providerSilentMode,
116
- functionConfig.providerRootDirectory,
117
- functionConfig.templateRepository,
118
- functionConfig.templateOwner,
119
- functionConfig.templateRootDirectory,
120
- functionConfig.templateVersion,
121
- functionConfig.specification
116
+ functionConfig.providerRootDirectory
122
117
  );
123
118
  return functionResponse;
124
119
  };
@@ -114,8 +114,7 @@ export const createOrUpdateAttribute = async (
114
114
 
115
115
  try {
116
116
  const collectionAttr = collection.attributes.find(
117
- // @ts-expect-error - Appwrite type issues
118
- (attr) => attr.key === attribute.key
117
+ (attr: any) => attr.key === attribute.key
119
118
  ) as unknown as any;
120
119
  foundAttribute = parseAttribute(collectionAttr);
121
120
 
@@ -384,8 +383,7 @@ export const deleteObsoleteAttributes = async (
384
383
 
385
384
  // Find attributes that exist in the database but not in the config
386
385
  const obsoleteAttributes = collection.attributes.filter(
387
- // @ts-expect-error - Appwrite type issues
388
- (attr) => !configAttributeKeys.has(attr.key)
386
+ (attr: any) => !configAttributeKeys.has(attr.key)
389
387
  );
390
388
 
391
389
  if (obsoleteAttributes.length === 0) {
@@ -400,16 +398,14 @@ export const deleteObsoleteAttributes = async (
400
398
  const queuedOperation: QueuedOperation = {
401
399
  type: "attribute",
402
400
  collectionId: collection.$id,
403
- // @ts-expect-error - Appwrite type issues
404
- attribute: { key: attr.key, type: "delete" } as Attribute,
401
+ attribute: { key: (attr as any).key, type: "delete" } as unknown as Attribute,
405
402
  collection,
406
403
  };
407
404
 
408
405
  const executeOperation = async () => {
409
406
  await attributeLimit(() =>
410
407
  tryAwaitWithRetry(async () => {
411
- // @ts-expect-error - Appwrite type issues
412
- await db.deleteAttribute(dbId, collection.$id, attr.key);
408
+ await db.deleteAttribute(dbId, collection.$id, (attr as any).key);
413
409
  })
414
410
  );
415
411
  };
@@ -422,8 +418,7 @@ export const deleteObsoleteAttributes = async (
422
418
  }
423
419
 
424
420
  if (verbose) {
425
- // @ts-expect-error - Appwrite type issues
426
- console.log(chalk.gray(`šŸ—‘ļø Deleted obsolete attribute ${attr.key}`));
421
+ console.log(chalk.gray(`šŸ—‘ļø Deleted obsolete attribute ${(attr as any).key}`));
427
422
  }
428
423
  }
429
424
  };
@@ -310,12 +310,7 @@ export class FunctionManager {
310
310
  functionConfig.providerRepositoryId,
311
311
  functionConfig.providerBranch,
312
312
  functionConfig.providerSilentMode,
313
- functionConfig.providerRootDirectory,
314
- functionConfig.templateRepository,
315
- functionConfig.templateOwner,
316
- functionConfig.templateRootDirectory,
317
- functionConfig.templateVersion,
318
- functionConfig.specification
313
+ functionConfig.providerRootDirectory
319
314
  );
320
315
  });
321
316
  }