appwrite-utils-cli 1.1.4 → 1.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -218,9 +218,7 @@ export const createOrUpdateAttribute = async (db, dbId, collection, attribute) =
218
218
  const updateEnabled = true;
219
219
  let finalAttribute = attribute;
220
220
  try {
221
- const collectionAttr = collection.attributes.find(
222
- // @ts-expect-error
223
- (attr) => attr.key === attribute.key);
221
+ const collectionAttr = collection.attributes.find((attr) => attr.key === attribute.key);
224
222
  foundAttribute = parseAttribute(collectionAttr);
225
223
  // console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
226
224
  }
@@ -407,9 +407,86 @@ const transferDocumentWithRetry = async (db, dbId, collectionId, documentId, doc
407
407
  }
408
408
  };
409
409
  /**
410
- * Enhanced batch document transfer with fault tolerance
410
+ * Check if endpoint supports bulk operations (cloud.appwrite.io)
411
411
  */
412
- const transferDocumentBatchWithRetry = async (db, dbId, collectionId, documents, batchSize = 10) => {
412
+ const supportsBulkOperations = (endpoint) => {
413
+ return endpoint.includes('cloud.appwrite.io');
414
+ };
415
+ /**
416
+ * Enhanced bulk document creation using the new bulk upsert API for cloud.appwrite.io
417
+ */
418
+ const transferDocumentsBulkUpsert = async (db, dbId, collectionId, documents, maxBatchSize = 1000) => {
419
+ let successful = 0;
420
+ let failed = 0;
421
+ // Prepare documents for bulk upsert
422
+ const preparedDocs = documents.map(doc => {
423
+ const toCreateObject = { ...doc };
424
+ delete toCreateObject.$databaseId;
425
+ delete toCreateObject.$collectionId;
426
+ delete toCreateObject.$createdAt;
427
+ delete toCreateObject.$updatedAt;
428
+ // Keep $id and $permissions for upsert functionality
429
+ // Appwrite bulk API expects $permissions to be preserved
430
+ return toCreateObject;
431
+ });
432
+ // Process in batches based on plan limits
433
+ const documentBatches = chunk(preparedDocs, maxBatchSize);
434
+ for (const batch of documentBatches) {
435
+ console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
436
+ try {
437
+ // Try bulk upsert with the highest batch size first
438
+ const result = await db.upsertDocuments(dbId, collectionId, batch);
439
+ successful += result.documents?.length || batch.length;
440
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
441
+ }
442
+ catch (error) {
443
+ console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
444
+ // If bulk upsert fails, try with smaller batch size (Pro plan limit)
445
+ if (maxBatchSize > 100) {
446
+ const smallerBatches = chunk(batch, 100);
447
+ for (const smallBatch of smallerBatches) {
448
+ try {
449
+ const result = await db.upsertDocuments(dbId, collectionId, smallBatch);
450
+ successful += result.documents?.length || smallBatch.length;
451
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
452
+ }
453
+ catch (smallBatchError) {
454
+ console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
455
+ // Fall back to individual document transfer for this batch
456
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, smallBatch.map((doc, index) => ({
457
+ ...doc,
458
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
459
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
460
+ })));
461
+ successful += indivSuccessful;
462
+ failed += indivFailed;
463
+ }
464
+ // Add delay between batches
465
+ await delay(200);
466
+ }
467
+ }
468
+ else {
469
+ // Fall back to individual document transfer
470
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, batch.map((doc, index) => ({
471
+ ...doc,
472
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
473
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
474
+ })));
475
+ successful += indivSuccessful;
476
+ failed += indivFailed;
477
+ }
478
+ }
479
+ // Add delay between major batches
480
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
481
+ await delay(500);
482
+ }
483
+ }
484
+ return { successful, failed };
485
+ };
486
+ /**
487
+ * Fallback batch document transfer with individual retry logic
488
+ */
489
+ const transferDocumentBatchWithRetryFallback = async (db, dbId, collectionId, documents, batchSize = 10) => {
413
490
  let successful = 0;
414
491
  let failed = 0;
415
492
  // Process documents in smaller batches to avoid overwhelming the server
@@ -449,6 +526,30 @@ const transferDocumentBatchWithRetry = async (db, dbId, collectionId, documents,
449
526
  }
450
527
  return { successful, failed };
451
528
  };
529
+ /**
530
+ * Enhanced batch document transfer with fault tolerance and bulk API support
531
+ */
532
+ const transferDocumentBatchWithRetry = async (db, endpoint, dbId, collectionId, documents, batchSize = 10) => {
533
+ // Check if we can use bulk operations
534
+ if (supportsBulkOperations(endpoint)) {
535
+ console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
536
+ // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
537
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
538
+ for (const maxBatchSize of batchSizes) {
539
+ try {
540
+ return await transferDocumentsBulkUpsert(db, dbId, collectionId, documents, maxBatchSize);
541
+ }
542
+ catch (error) {
543
+ console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
544
+ continue;
545
+ }
546
+ }
547
+ // If all bulk operations fail, fall back to individual transfers
548
+ console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
549
+ }
550
+ // Fall back to individual document transfer
551
+ return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
552
+ };
452
553
  export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint, projectId, apiKey, fromDbId, toDbId, fromCollId, toCollId) => {
453
554
  console.log(chalk.blue(`Starting enhanced document transfer from ${fromCollId} to ${toCollId}...`));
454
555
  const client = new Client()
@@ -459,11 +560,11 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint
459
560
  let totalDocumentsProcessed = 0;
460
561
  let totalSuccessful = 0;
461
562
  let totalFailed = 0;
462
- // Fetch documents in batches
563
+ // Fetch documents in larger batches (1000 at a time)
463
564
  let hasMoreDocuments = true;
464
565
  let lastDocumentId;
465
566
  while (hasMoreDocuments) {
466
- const queries = [Query.limit(50)];
567
+ const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
467
568
  if (lastDocumentId) {
468
569
  queries.push(Query.cursorAfter(lastDocumentId));
469
570
  }
@@ -472,13 +573,13 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint
472
573
  hasMoreDocuments = false;
473
574
  break;
474
575
  }
475
- console.log(chalk.blue(`Processing ${fromCollDocs.documents.length} documents...`));
476
- const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, toDbId, toCollId, fromCollDocs.documents);
576
+ console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
577
+ const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, endpoint, toDbId, toCollId, fromCollDocs.documents);
477
578
  totalDocumentsProcessed += fromCollDocs.documents.length;
478
579
  totalSuccessful += successful;
479
580
  totalFailed += failed;
480
581
  // Check if we have more documents to process
481
- if (fromCollDocs.documents.length < 50) {
582
+ if (fromCollDocs.documents.length < 1000) {
482
583
  hasMoreDocuments = false;
483
584
  }
484
585
  else {
@@ -58,7 +58,7 @@ export const deleteFunction = async (client, functionId) => {
58
58
  };
59
59
  export const createFunction = async (client, functionConfig) => {
60
60
  const functions = new Functions(client);
61
- const functionResponse = await functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute, functionConfig.events, functionConfig.schedule, functionConfig.timeout, functionConfig.enabled, functionConfig.logging, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes, functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory, functionConfig.templateRepository, functionConfig.templateOwner, functionConfig.templateRootDirectory, functionConfig.templateVersion, functionConfig.specification);
61
+ const functionResponse = await functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute, functionConfig.events, functionConfig.schedule, functionConfig.timeout, functionConfig.enabled, functionConfig.logging, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes, functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory);
62
62
  return functionResponse;
63
63
  };
64
64
  export const updateFunctionSpecifications = async (client, functionId, specification) => {
@@ -79,9 +79,7 @@ export const createOrUpdateAttribute = async (db, dbId, collection, attribute, o
79
79
  let foundAttribute;
80
80
  let finalAttribute = attribute;
81
81
  try {
82
- const collectionAttr = collection.attributes.find(
83
- // @ts-expect-error - Appwrite type issues
84
- (attr) => attr.key === attribute.key);
82
+ const collectionAttr = collection.attributes.find((attr) => attr.key === attribute.key);
85
83
  foundAttribute = parseAttribute(collectionAttr);
86
84
  if (verbose) {
87
85
  console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
@@ -235,9 +233,7 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
235
233
  const configAttributes = collectionConfig.attributes || [];
236
234
  const configAttributeKeys = new Set(configAttributes.map(attr => attr.key));
237
235
  // Find attributes that exist in the database but not in the config
238
- const obsoleteAttributes = collection.attributes.filter(
239
- // @ts-expect-error - Appwrite type issues
240
- (attr) => !configAttributeKeys.has(attr.key));
236
+ const obsoleteAttributes = collection.attributes.filter((attr) => !configAttributeKeys.has(attr.key));
241
237
  if (obsoleteAttributes.length === 0) {
242
238
  return;
243
239
  }
@@ -248,13 +244,11 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
248
244
  const queuedOperation = {
249
245
  type: "attribute",
250
246
  collectionId: collection.$id,
251
- // @ts-expect-error - Appwrite type issues
252
247
  attribute: { key: attr.key, type: "delete" },
253
248
  collection,
254
249
  };
255
250
  const executeOperation = async () => {
256
251
  await attributeLimit(() => tryAwaitWithRetry(async () => {
257
- // @ts-expect-error - Appwrite type issues
258
252
  await db.deleteAttribute(dbId, collection.$id, attr.key);
259
253
  }));
260
254
  };
@@ -266,7 +260,6 @@ export const deleteObsoleteAttributes = async (db, dbId, collection, collectionC
266
260
  await delay(250);
267
261
  }
268
262
  if (verbose) {
269
- // @ts-expect-error - Appwrite type issues
270
263
  console.log(chalk.gray(`🗑️ Deleted obsolete attribute ${attr.key}`));
271
264
  }
272
265
  }
@@ -200,7 +200,7 @@ export class FunctionManager {
200
200
  console.log(chalk.blue(`Creating function: ${functionConfig.name}`));
201
201
  }
202
202
  return await tryAwaitWithRetry(async () => {
203
- return await this.functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute || [], functionConfig.events || [], functionConfig.schedule || "", functionConfig.timeout || 15, functionConfig.enabled !== false, functionConfig.logging !== false, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes || [], functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory, functionConfig.templateRepository, functionConfig.templateOwner, functionConfig.templateRootDirectory, functionConfig.templateVersion, functionConfig.specification);
203
+ return await this.functions.create(functionConfig.$id, functionConfig.name, functionConfig.runtime, functionConfig.execute || [], functionConfig.events || [], functionConfig.schedule || "", functionConfig.timeout || 15, functionConfig.enabled !== false, functionConfig.logging !== false, functionConfig.entrypoint, functionConfig.commands, functionConfig.scopes || [], functionConfig.installationId, functionConfig.providerRepositoryId, functionConfig.providerBranch, functionConfig.providerSilentMode, functionConfig.providerRootDirectory);
204
204
  });
205
205
  }
206
206
  async updateFunction(functionConfig, options = {}) {
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "1.1.4",
4
+ "version": "1.2.0",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -43,7 +43,7 @@
43
43
  "js-yaml": "^4.1.0",
44
44
  "luxon": "^3.6.1",
45
45
  "nanostores": "^0.10.3",
46
- "node-appwrite": "^14.2.0",
46
+ "node-appwrite": "^17",
47
47
  "p-limit": "^6.2.0",
48
48
  "tar": "^7.4.3",
49
49
  "tsx": "^4.20.3",
@@ -363,8 +363,7 @@ export const createOrUpdateAttribute = async (
363
363
  let finalAttribute: any = attribute;
364
364
  try {
365
365
  const collectionAttr = collection.attributes.find(
366
- // @ts-expect-error
367
- (attr) => attr.key === attribute.key
366
+ (attr: any) => attr.key === attribute.key
368
367
  ) as unknown as any;
369
368
  foundAttribute = parseAttribute(collectionAttr);
370
369
  // console.log(`Found attribute: ${JSON.stringify(foundAttribute)}`);
@@ -670,9 +670,107 @@ const transferDocumentWithRetry = async (
670
670
  };
671
671
 
672
672
  /**
673
- * Enhanced batch document transfer with fault tolerance
673
+ * Check if endpoint supports bulk operations (cloud.appwrite.io)
674
674
  */
675
- const transferDocumentBatchWithRetry = async (
675
+ const supportsBulkOperations = (endpoint: string): boolean => {
676
+ return endpoint.includes('cloud.appwrite.io');
677
+ };
678
+
679
+ /**
680
+ * Enhanced bulk document creation using the new bulk upsert API for cloud.appwrite.io
681
+ */
682
+ const transferDocumentsBulkUpsert = async (
683
+ db: Databases,
684
+ dbId: string,
685
+ collectionId: string,
686
+ documents: any[],
687
+ maxBatchSize: number = 1000
688
+ ): Promise<{ successful: number; failed: number }> => {
689
+ let successful = 0;
690
+ let failed = 0;
691
+
692
+ // Prepare documents for bulk upsert
693
+ const preparedDocs = documents.map(doc => {
694
+ const toCreateObject: any = { ...doc };
695
+ delete toCreateObject.$databaseId;
696
+ delete toCreateObject.$collectionId;
697
+ delete toCreateObject.$createdAt;
698
+ delete toCreateObject.$updatedAt;
699
+
700
+ // Keep $id and $permissions for upsert functionality
701
+ // Appwrite bulk API expects $permissions to be preserved
702
+ return toCreateObject;
703
+ });
704
+
705
+ // Process in batches based on plan limits
706
+ const documentBatches = chunk(preparedDocs, maxBatchSize);
707
+
708
+ for (const batch of documentBatches) {
709
+ console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
710
+
711
+ try {
712
+ // Try bulk upsert with the highest batch size first
713
+ const result = await (db as any).upsertDocuments(dbId, collectionId, batch);
714
+ successful += result.documents?.length || batch.length;
715
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
716
+
717
+ } catch (error: any) {
718
+ console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
719
+
720
+ // If bulk upsert fails, try with smaller batch size (Pro plan limit)
721
+ if (maxBatchSize > 100) {
722
+ const smallerBatches = chunk(batch, 100);
723
+
724
+ for (const smallBatch of smallerBatches) {
725
+ try {
726
+ const result = await (db as any).upsertDocuments(dbId, collectionId, smallBatch);
727
+ successful += result.documents?.length || smallBatch.length;
728
+ console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
729
+ } catch (smallBatchError: any) {
730
+ console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
731
+
732
+ // Fall back to individual document transfer for this batch
733
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
734
+ db, dbId, collectionId, smallBatch.map((doc, index) => ({
735
+ ...doc,
736
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
737
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
738
+ }))
739
+ );
740
+ successful += indivSuccessful;
741
+ failed += indivFailed;
742
+ }
743
+
744
+ // Add delay between batches
745
+ await delay(200);
746
+ }
747
+ } else {
748
+ // Fall back to individual document transfer
749
+ const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
750
+ db, dbId, collectionId, batch.map((doc, index) => ({
751
+ ...doc,
752
+ $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
753
+ $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
754
+ }))
755
+ );
756
+ successful += indivSuccessful;
757
+ failed += indivFailed;
758
+ }
759
+ }
760
+
761
+ // Add delay between major batches
762
+ if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
763
+ await delay(500);
764
+ }
765
+ }
766
+
767
+ return { successful, failed };
768
+ };
769
+
770
+ /**
771
+ * Fallback batch document transfer with individual retry logic
772
+ */
773
+ const transferDocumentBatchWithRetryFallback = async (
676
774
  db: Databases,
677
775
  dbId: string,
678
776
  collectionId: string,
@@ -733,6 +831,41 @@ const transferDocumentBatchWithRetry = async (
733
831
  return { successful, failed };
734
832
  };
735
833
 
834
+ /**
835
+ * Enhanced batch document transfer with fault tolerance and bulk API support
836
+ */
837
+ const transferDocumentBatchWithRetry = async (
838
+ db: Databases,
839
+ endpoint: string,
840
+ dbId: string,
841
+ collectionId: string,
842
+ documents: any[],
843
+ batchSize: number = 10
844
+ ): Promise<{ successful: number; failed: number }> => {
845
+ // Check if we can use bulk operations
846
+ if (supportsBulkOperations(endpoint)) {
847
+ console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
848
+
849
+ // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
850
+ const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
851
+
852
+ for (const maxBatchSize of batchSizes) {
853
+ try {
854
+ return await transferDocumentsBulkUpsert(db, dbId, collectionId, documents, maxBatchSize);
855
+ } catch (error: any) {
856
+ console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
857
+ continue;
858
+ }
859
+ }
860
+
861
+ // If all bulk operations fail, fall back to individual transfers
862
+ console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
863
+ }
864
+
865
+ // Fall back to individual document transfer
866
+ return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
867
+ };
868
+
736
869
  export const transferDocumentsBetweenDbsLocalToRemote = async (
737
870
  localDb: Databases,
738
871
  endpoint: string,
@@ -755,12 +888,12 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
755
888
  let totalSuccessful = 0;
756
889
  let totalFailed = 0;
757
890
 
758
- // Fetch documents in batches
891
+ // Fetch documents in larger batches (1000 at a time)
759
892
  let hasMoreDocuments = true;
760
893
  let lastDocumentId: string | undefined;
761
894
 
762
895
  while (hasMoreDocuments) {
763
- const queries = [Query.limit(50)];
896
+ const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
764
897
  if (lastDocumentId) {
765
898
  queries.push(Query.cursorAfter(lastDocumentId));
766
899
  }
@@ -774,10 +907,11 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
774
907
  break;
775
908
  }
776
909
 
777
- console.log(chalk.blue(`Processing ${fromCollDocs.documents.length} documents...`));
910
+ console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
778
911
 
779
912
  const { successful, failed } = await transferDocumentBatchWithRetry(
780
913
  remoteDb,
914
+ endpoint,
781
915
  toDbId,
782
916
  toCollId,
783
917
  fromCollDocs.documents
@@ -788,7 +922,7 @@ export const transferDocumentsBetweenDbsLocalToRemote = async (
788
922
  totalFailed += failed;
789
923
 
790
924
  // Check if we have more documents to process
791
- if (fromCollDocs.documents.length < 50) {
925
+ if (fromCollDocs.documents.length < 1000) {
792
926
  hasMoreDocuments = false;
793
927
  } else {
794
928
  lastDocumentId = fromCollDocs.documents[fromCollDocs.documents.length - 1].$id;
@@ -113,12 +113,7 @@ export const createFunction = async (
113
113
  functionConfig.providerRepositoryId,
114
114
  functionConfig.providerBranch,
115
115
  functionConfig.providerSilentMode,
116
- functionConfig.providerRootDirectory,
117
- functionConfig.templateRepository,
118
- functionConfig.templateOwner,
119
- functionConfig.templateRootDirectory,
120
- functionConfig.templateVersion,
121
- functionConfig.specification
116
+ functionConfig.providerRootDirectory
122
117
  );
123
118
  return functionResponse;
124
119
  };
@@ -114,8 +114,7 @@ export const createOrUpdateAttribute = async (
114
114
 
115
115
  try {
116
116
  const collectionAttr = collection.attributes.find(
117
- // @ts-expect-error - Appwrite type issues
118
- (attr) => attr.key === attribute.key
117
+ (attr: any) => attr.key === attribute.key
119
118
  ) as unknown as any;
120
119
  foundAttribute = parseAttribute(collectionAttr);
121
120
 
@@ -384,8 +383,7 @@ export const deleteObsoleteAttributes = async (
384
383
 
385
384
  // Find attributes that exist in the database but not in the config
386
385
  const obsoleteAttributes = collection.attributes.filter(
387
- // @ts-expect-error - Appwrite type issues
388
- (attr) => !configAttributeKeys.has(attr.key)
386
+ (attr: any) => !configAttributeKeys.has(attr.key)
389
387
  );
390
388
 
391
389
  if (obsoleteAttributes.length === 0) {
@@ -400,16 +398,14 @@ export const deleteObsoleteAttributes = async (
400
398
  const queuedOperation: QueuedOperation = {
401
399
  type: "attribute",
402
400
  collectionId: collection.$id,
403
- // @ts-expect-error - Appwrite type issues
404
- attribute: { key: attr.key, type: "delete" } as Attribute,
401
+ attribute: { key: (attr as any).key, type: "delete" } as unknown as Attribute,
405
402
  collection,
406
403
  };
407
404
 
408
405
  const executeOperation = async () => {
409
406
  await attributeLimit(() =>
410
407
  tryAwaitWithRetry(async () => {
411
- // @ts-expect-error - Appwrite type issues
412
- await db.deleteAttribute(dbId, collection.$id, attr.key);
408
+ await db.deleteAttribute(dbId, collection.$id, (attr as any).key);
413
409
  })
414
410
  );
415
411
  };
@@ -422,8 +418,7 @@ export const deleteObsoleteAttributes = async (
422
418
  }
423
419
 
424
420
  if (verbose) {
425
- // @ts-expect-error - Appwrite type issues
426
- console.log(chalk.gray(`🗑️ Deleted obsolete attribute ${attr.key}`));
421
+ console.log(chalk.gray(`🗑️ Deleted obsolete attribute ${(attr as any).key}`));
427
422
  }
428
423
  }
429
424
  };
@@ -310,12 +310,7 @@ export class FunctionManager {
310
310
  functionConfig.providerRepositoryId,
311
311
  functionConfig.providerBranch,
312
312
  functionConfig.providerSilentMode,
313
- functionConfig.providerRootDirectory,
314
- functionConfig.templateRepository,
315
- functionConfig.templateOwner,
316
- functionConfig.templateRootDirectory,
317
- functionConfig.templateVersion,
318
- functionConfig.specification
313
+ functionConfig.providerRootDirectory
319
314
  );
320
315
  });
321
316
  }