appwrite-utils-cli 0.0.286 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/README.md +162 -96
  2. package/dist/collections/attributes.d.ts +4 -0
  3. package/dist/collections/attributes.js +224 -0
  4. package/dist/collections/indexes.d.ts +4 -0
  5. package/dist/collections/indexes.js +27 -0
  6. package/dist/collections/methods.d.ts +16 -0
  7. package/dist/collections/methods.js +216 -0
  8. package/dist/databases/methods.d.ts +6 -0
  9. package/dist/databases/methods.js +33 -0
  10. package/dist/interactiveCLI.d.ts +19 -0
  11. package/dist/interactiveCLI.js +555 -0
  12. package/dist/main.js +224 -62
  13. package/dist/migrations/afterImportActions.js +37 -40
  14. package/dist/migrations/appwriteToX.d.ts +26 -25
  15. package/dist/migrations/appwriteToX.js +42 -6
  16. package/dist/migrations/attributes.js +21 -20
  17. package/dist/migrations/backup.d.ts +93 -87
  18. package/dist/migrations/collections.d.ts +6 -0
  19. package/dist/migrations/collections.js +149 -20
  20. package/dist/migrations/converters.d.ts +2 -18
  21. package/dist/migrations/converters.js +13 -2
  22. package/dist/migrations/dataLoader.d.ts +276 -161
  23. package/dist/migrations/dataLoader.js +535 -292
  24. package/dist/migrations/databases.js +8 -2
  25. package/dist/migrations/helper.d.ts +3 -0
  26. package/dist/migrations/helper.js +21 -0
  27. package/dist/migrations/importController.d.ts +5 -2
  28. package/dist/migrations/importController.js +125 -88
  29. package/dist/migrations/importDataActions.d.ts +9 -1
  30. package/dist/migrations/importDataActions.js +15 -3
  31. package/dist/migrations/indexes.js +3 -2
  32. package/dist/migrations/logging.js +20 -8
  33. package/dist/migrations/migrationHelper.d.ts +9 -4
  34. package/dist/migrations/migrationHelper.js +6 -5
  35. package/dist/migrations/openapi.d.ts +1 -1
  36. package/dist/migrations/openapi.js +33 -18
  37. package/dist/migrations/queue.js +3 -2
  38. package/dist/migrations/relationships.d.ts +2 -2
  39. package/dist/migrations/schemaStrings.js +53 -41
  40. package/dist/migrations/setupDatabase.d.ts +2 -4
  41. package/dist/migrations/setupDatabase.js +24 -105
  42. package/dist/migrations/storage.d.ts +3 -1
  43. package/dist/migrations/storage.js +110 -16
  44. package/dist/migrations/transfer.d.ts +30 -0
  45. package/dist/migrations/transfer.js +337 -0
  46. package/dist/migrations/users.d.ts +2 -1
  47. package/dist/migrations/users.js +78 -43
  48. package/dist/schemas/authUser.d.ts +2 -2
  49. package/dist/storage/methods.d.ts +15 -0
  50. package/dist/storage/methods.js +207 -0
  51. package/dist/storage/schemas.d.ts +687 -0
  52. package/dist/storage/schemas.js +175 -0
  53. package/dist/utils/getClientFromConfig.d.ts +4 -0
  54. package/dist/utils/getClientFromConfig.js +16 -0
  55. package/dist/utils/helperFunctions.d.ts +11 -1
  56. package/dist/utils/helperFunctions.js +38 -0
  57. package/dist/utils/retryFailedPromises.d.ts +2 -0
  58. package/dist/utils/retryFailedPromises.js +21 -0
  59. package/dist/utils/schemaStrings.d.ts +13 -0
  60. package/dist/utils/schemaStrings.js +403 -0
  61. package/dist/utils/setupFiles.js +110 -61
  62. package/dist/utilsController.d.ts +40 -22
  63. package/dist/utilsController.js +164 -84
  64. package/package.json +13 -15
  65. package/src/collections/attributes.ts +483 -0
  66. package/src/collections/indexes.ts +53 -0
  67. package/src/collections/methods.ts +331 -0
  68. package/src/databases/methods.ts +47 -0
  69. package/src/init.ts +64 -64
  70. package/src/interactiveCLI.ts +767 -0
  71. package/src/main.ts +289 -83
  72. package/src/migrations/afterImportActions.ts +553 -490
  73. package/src/migrations/appwriteToX.ts +237 -174
  74. package/src/migrations/attributes.ts +483 -422
  75. package/src/migrations/backup.ts +205 -205
  76. package/src/migrations/collections.ts +545 -300
  77. package/src/migrations/converters.ts +161 -150
  78. package/src/migrations/dataLoader.ts +1615 -1304
  79. package/src/migrations/databases.ts +44 -25
  80. package/src/migrations/dbHelpers.ts +92 -92
  81. package/src/migrations/helper.ts +40 -0
  82. package/src/migrations/importController.ts +448 -384
  83. package/src/migrations/importDataActions.ts +315 -307
  84. package/src/migrations/indexes.ts +40 -37
  85. package/src/migrations/logging.ts +29 -16
  86. package/src/migrations/migrationHelper.ts +207 -201
  87. package/src/migrations/openapi.ts +83 -70
  88. package/src/migrations/queue.ts +118 -119
  89. package/src/migrations/relationships.ts +324 -324
  90. package/src/migrations/schemaStrings.ts +472 -460
  91. package/src/migrations/setupDatabase.ts +118 -219
  92. package/src/migrations/storage.ts +538 -358
  93. package/src/migrations/transfer.ts +608 -0
  94. package/src/migrations/users.ts +362 -285
  95. package/src/migrations/validationRules.ts +63 -63
  96. package/src/schemas/authUser.ts +23 -23
  97. package/src/setup.ts +8 -8
  98. package/src/storage/methods.ts +371 -0
  99. package/src/storage/schemas.ts +205 -0
  100. package/src/types.ts +9 -9
  101. package/src/utils/getClientFromConfig.ts +17 -0
  102. package/src/utils/helperFunctions.ts +181 -127
  103. package/src/utils/index.ts +2 -2
  104. package/src/utils/loadConfigs.ts +59 -59
  105. package/src/utils/retryFailedPromises.ts +27 -0
  106. package/src/utils/schemaStrings.ts +473 -0
  107. package/src/utils/setupFiles.ts +228 -182
  108. package/src/utilsController.ts +325 -194
  109. package/tsconfig.json +37 -37
package/dist/main.js CHANGED
@@ -1,67 +1,229 @@
1
1
  #!/usr/bin/env node
2
- import { program } from "commander";
2
+ import yargs from "yargs";
3
+ import {} from "yargs";
4
+ import { hideBin } from "yargs/helpers";
5
+ import { InteractiveCLI } from "./interactiveCLI.js";
3
6
  import { UtilsController } from "./utilsController.js";
4
- // Setup the main CLI program
5
- program
6
- .version("1.0.0")
7
- .description("Utility CLI for Appwrite configurations and operations")
8
- .option("--endpoint <endpoint>", "Set the Appwrite endpoint", undefined)
9
- .option("--project <project>", "Set the Appwrite project ID", undefined)
10
- .option("--key <key>", "Set the Appwrite API key", undefined)
11
- .option("--backup", "Perform a backup before executing the command", false)
12
- .option("--dev", "Run in development environment", false)
13
- .option("--prod", "Run in production environment", false)
14
- .option("--staging", "Run in staging environment", false)
15
- .option("--sync", "Synchronize configurations", false)
16
- .option("--wipe", "Wipe databases", false)
17
- .option("--wipe-docs", "Wipe documents", false)
18
- .option("--wipe-users", "Wipe users", false)
19
- .option("--generate", "Generate schemas", false)
20
- .option("--import", "Import data", false)
21
- .option("--write-data", "Write data to file", false)
22
- .option("-h, --help", "Display help for command", false);
23
- program.on("--help", () => {
24
- console.log("");
25
- console.log("Examples:");
26
- console.log(" $ npx appwrite-utils-cli appwrite-migrate --sync --endpoint https://appwrite.example.com --project 123456 --key 7890");
27
- console.log(" $ npx appwrite-utils-cli appwrite-migrate --sync --dev --backup");
28
- console.log(" $ npx appwrite-utils-cli appwrite-migrate --wipe --wipe-docs --wipe-users --dev");
29
- console.log(" $ npx appwrite-utils-cli appwrite-migrate --generate --import --write-data --dev");
30
- console.log(" $ npx appwrite-utils-cli appwrite-migrate --sync --generate --import --write-data --dev --backup");
31
- console.log(" $ npx appwrite-utils-cli appwrite-create");
32
- console.log("For more information, visit https://github.com/zachhandley/appwrite-utils");
33
- console.log("");
34
- });
35
- // Parse and handle options
36
- program.action(async (options) => {
37
- const currentUserDir = process.cwd();
38
- const controller = new UtilsController(currentUserDir);
39
- try {
40
- // Convert Commander options to the format expected by UtilsController
41
- const setupOptions = {
42
- sync: options.sync,
43
- runProd: options.prod,
44
- runStaging: options.staging,
45
- runDev: options.dev,
46
- doBackup: options.backup,
47
- wipeDatabases: options.wipe,
48
- wipeDocumentStorage: options.wipeDocs,
49
- wipeUsers: options.wipeUsers,
50
- generateSchemas: options.generate,
51
- generateMockData: false, // Assuming this needs to be set based on other conditions
52
- importData: options.import,
53
- checkDuplicates: false, // Assuming this needs to be set based on other conditions
54
- shouldWriteFile: options.writeData,
55
- endpoint: options.endpoint,
56
- project: options.project,
57
- key: options.key,
58
- };
59
- console.log("Running operation...", setupOptions);
60
- await controller.run(setupOptions);
61
- console.log("Operation completed successfully.");
7
+ import { Databases, Storage } from "node-appwrite";
8
+ import { getClient } from "./utils/getClientFromConfig.js";
9
+ const argv = yargs(hideBin(process.argv))
10
+ .option("it", {
11
+ alias: ["interactive", "i"],
12
+ type: "boolean",
13
+ description: "Run in interactive mode",
14
+ })
15
+ .option("dbIds", {
16
+ type: "string",
17
+ description: "Comma-separated list of database IDs to operate on",
18
+ })
19
+ .option("collectionIds", {
20
+ alias: ["collIds"],
21
+ type: "string",
22
+ description: "Comma-separated list of collection IDs to operate on",
23
+ })
24
+ .option("bucketIds", {
25
+ type: "string",
26
+ description: "Comma-separated list of bucket IDs to operate on",
27
+ })
28
+ .option("wipe", {
29
+ choices: ["all", "docs", "users"],
30
+ description: "Wipe data (all: everything, docs: only documents, users: only user data)",
31
+ })
32
+ .option("generate", {
33
+ type: "boolean",
34
+ description: "Generate TypeScript schemas from database schemas",
35
+ })
36
+ .option("import", {
37
+ type: "boolean",
38
+ description: "Import data into your databases",
39
+ })
40
+ .option("backup", {
41
+ type: "boolean",
42
+ description: "Perform a backup of your databases",
43
+ })
44
+ .option("writeData", {
45
+ type: "boolean",
46
+ description: "Write converted imported data to file",
47
+ })
48
+ .option("push", {
49
+ type: "boolean",
50
+ description: "Push your local Appwrite config to your configured Appwrite Project",
51
+ })
52
+ .option("sync", {
53
+ type: "boolean",
54
+ description: "Synchronize by pulling your Appwrite config from your configured Appwrite Project",
55
+ })
56
+ .option("endpoint", {
57
+ type: "string",
58
+ description: "Set the Appwrite endpoint",
59
+ })
60
+ .option("projectId", {
61
+ type: "string",
62
+ description: "Set the Appwrite project ID",
63
+ })
64
+ .option("apiKey", {
65
+ type: "string",
66
+ description: "Set the Appwrite API key",
67
+ })
68
+ .option("transfer", {
69
+ type: "boolean",
70
+ description: "Transfer data between databases or collections",
71
+ })
72
+ .option("fromDbId", {
73
+ alias: ["fromDb"],
74
+ type: "string",
75
+ description: "Set the source database ID for transfer",
76
+ })
77
+ .option("toDbId", {
78
+ alias: ["toDb"],
79
+ type: "string",
80
+ description: "Set the destination database ID for transfer",
81
+ })
82
+ .option("fromCollectionId", {
83
+ alias: ["fromCollId", "fromColl"],
84
+ type: "string",
85
+ description: "Set the source collection ID for transfer",
86
+ })
87
+ .option("toCollectionId", {
88
+ alias: ["toCollId", "toColl"],
89
+ type: "string",
90
+ description: "Set the destination collection ID for transfer",
91
+ })
92
+ .option("fromBucketId", {
93
+ alias: ["fromBucket"],
94
+ type: "string",
95
+ description: "Set the source bucket ID for transfer",
96
+ })
97
+ .option("toBucketId", {
98
+ alias: ["toBucket"],
99
+ type: "string",
100
+ description: "Set the destination bucket ID for transfer",
101
+ })
102
+ .option("remoteEndpoint", {
103
+ type: "string",
104
+ description: "Set the remote Appwrite endpoint for transfers",
105
+ })
106
+ .option("remoteProjectId", {
107
+ type: "string",
108
+ description: "Set the remote Appwrite project ID for transfers",
109
+ })
110
+ .option("remoteApiKey", {
111
+ type: "string",
112
+ description: "Set the remote Appwrite API key for transfers",
113
+ })
114
+ .help()
115
+ .parse();
116
+ async function main() {
117
+ const parsedArgv = (await argv);
118
+ const controller = new UtilsController(process.cwd());
119
+ await controller.init();
120
+ if (parsedArgv.it) {
121
+ const cli = new InteractiveCLI(process.cwd(), controller);
122
+ await cli.run();
62
123
  }
63
- catch (error) {
64
- console.error("Error during operation:", error);
124
+ else {
125
+ // Handle non-interactive mode with the new options
126
+ const options = {
127
+ databases: parsedArgv.dbIds
128
+ ? await controller.getDatabasesByIds(parsedArgv.dbIds.replace(" ", "").split(","))
129
+ : undefined,
130
+ collections: parsedArgv.collectionIds
131
+ ? parsedArgv.collectionIds.replace(" ", "").split(",")
132
+ : undefined,
133
+ doBackup: parsedArgv.backup,
134
+ wipeDatabase: parsedArgv.wipe === "all" || parsedArgv.wipe === "docs",
135
+ wipeDocumentStorage: parsedArgv.wipe === "all",
136
+ wipeUsers: parsedArgv.wipe === "all" || parsedArgv.wipe === "users",
137
+ generateSchemas: parsedArgv.generate,
138
+ importData: parsedArgv.import,
139
+ checkDuplicates: false,
140
+ shouldWriteFile: parsedArgv.writeData,
141
+ };
142
+ if (parsedArgv.push) {
143
+ await controller.syncDb();
144
+ }
145
+ if (options.wipeDatabase ||
146
+ options.wipeDocumentStorage ||
147
+ options.wipeUsers) {
148
+ if (options.wipeDatabase) {
149
+ for (const db of options.databases || []) {
150
+ await controller.wipeDatabase(db);
151
+ }
152
+ }
153
+ if (options.wipeDocumentStorage && parsedArgv.bucketIds) {
154
+ for (const bucketId of parsedArgv.bucketIds.split(",")) {
155
+ await controller.wipeDocumentStorage(bucketId);
156
+ }
157
+ }
158
+ if (options.wipeUsers) {
159
+ await controller.wipeUsers();
160
+ }
161
+ }
162
+ if (options.doBackup) {
163
+ for (const db of options.databases || []) {
164
+ await controller.backupDatabase(db);
165
+ }
166
+ }
167
+ if (options.generateSchemas) {
168
+ await controller.generateSchemas();
169
+ }
170
+ if (options.importData) {
171
+ await controller.importData(options);
172
+ }
173
+ if (parsedArgv.transfer) {
174
+ const isRemote = !!parsedArgv.remoteEndpoint;
175
+ const fromDb = await controller.getDatabasesByIds([parsedArgv.fromDbId]);
176
+ let toDb;
177
+ let targetDatabases;
178
+ let targetStorage;
179
+ if (isRemote) {
180
+ if (!parsedArgv.remoteEndpoint ||
181
+ !parsedArgv.remoteProjectId ||
182
+ !parsedArgv.remoteApiKey) {
183
+ throw new Error("Remote transfer details are missing");
184
+ }
185
+ const remoteClient = getClient(parsedArgv.remoteEndpoint, parsedArgv.remoteProjectId, parsedArgv.remoteApiKey);
186
+ targetDatabases = new Databases(remoteClient);
187
+ targetStorage = new Storage(remoteClient);
188
+ const remoteDbs = await targetDatabases.list();
189
+ toDb = remoteDbs.databases.find((db) => db.$id === parsedArgv.toDbId);
190
+ }
191
+ else {
192
+ toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId]))[0];
193
+ }
194
+ if (!fromDb[0] || !toDb) {
195
+ throw new Error("Source or target database not found");
196
+ }
197
+ let sourceBucket, targetBucket;
198
+ if (parsedArgv.fromBucketId) {
199
+ sourceBucket = await controller.storage?.getBucket(parsedArgv.fromBucketId);
200
+ }
201
+ if (parsedArgv.toBucketId) {
202
+ if (isRemote) {
203
+ targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
204
+ }
205
+ else {
206
+ targetBucket = await controller.storage?.getBucket(parsedArgv.toBucketId);
207
+ }
208
+ }
209
+ const transferOptions = {
210
+ isRemote,
211
+ fromDb: fromDb[0],
212
+ targetDb: toDb,
213
+ transferEndpoint: parsedArgv.remoteEndpoint,
214
+ transferProject: parsedArgv.remoteProjectId,
215
+ transferKey: parsedArgv.remoteApiKey,
216
+ sourceBucket: sourceBucket,
217
+ targetBucket: targetBucket,
218
+ };
219
+ await controller.transferData(transferOptions);
220
+ }
221
+ if (parsedArgv.sync) {
222
+ await controller.synchronizeConfigurations(options.databases);
223
+ }
65
224
  }
225
+ }
226
+ main().catch((error) => {
227
+ console.error("An error occurred:", error);
228
+ process.exit(1);
66
229
  });
67
- program.parse(process.argv);
@@ -1,9 +1,10 @@
1
- import { Databases, Storage, InputFile, Query, ID, Client, } from "node-appwrite";
1
+ import { Databases, Storage, Query, ID, Client, Compression, } from "node-appwrite";
2
+ import { InputFile } from "node-appwrite/file";
2
3
  import path from "path";
3
4
  import fs from "fs";
4
5
  import os from "os";
5
6
  import { logger } from "./logging.js";
6
- import {} from "appwrite-utils";
7
+ import { tryAwaitWithRetry, } from "appwrite-utils";
7
8
  export const getDatabaseFromConfig = (config) => {
8
9
  if (!config.appwriteClient) {
9
10
  config.appwriteClient = new Client()
@@ -26,7 +27,7 @@ export const afterImportActions = {
26
27
  updateCreatedDocument: async (config, dbId, collId, docId, data) => {
27
28
  try {
28
29
  const db = getDatabaseFromConfig(config);
29
- await db.updateDocument(dbId, collId, docId, data);
30
+ await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, docId, data));
30
31
  }
31
32
  catch (error) {
32
33
  console.error("Error updating document: ", error);
@@ -35,11 +36,11 @@ export const afterImportActions = {
35
36
  checkAndUpdateFieldInDocument: async (config, dbId, collId, docId, fieldName, oldFieldValue, newFieldValue) => {
36
37
  try {
37
38
  const db = getDatabaseFromConfig(config);
38
- const doc = await db.getDocument(dbId, collId, docId);
39
+ const doc = await tryAwaitWithRetry(async () => await db.getDocument(dbId, collId, docId));
39
40
  if (doc[fieldName] == oldFieldValue) {
40
- await db.updateDocument(dbId, collId, docId, {
41
+ await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, docId, {
41
42
  [fieldName]: newFieldValue,
42
- });
43
+ }));
43
44
  }
44
45
  }
45
46
  catch (error) {
@@ -50,10 +51,10 @@ export const afterImportActions = {
50
51
  const db = getDatabaseFromConfig(config);
51
52
  // Helper function to find a collection ID by name or return the ID if given
52
53
  const findCollectionId = async (collectionIdentifier) => {
53
- const collectionsPulled = await db.listCollections(dbId, [
54
+ const collectionsPulled = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
54
55
  Query.limit(25),
55
56
  Query.equal("name", collectionIdentifier),
56
- ]);
57
+ ]));
57
58
  if (collectionsPulled.total > 0) {
58
59
  return collectionsPulled.collections[0].$id;
59
60
  }
@@ -71,9 +72,9 @@ export const afterImportActions = {
71
72
  const valueToSet = otherDoc[otherFieldName];
72
73
  if (valueToSet) {
73
74
  // Update the target document
74
- await db.updateDocument(dbId, targetCollectionId, docId, {
75
+ await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, {
75
76
  [fieldName]: valueToSet,
76
- });
77
+ }));
77
78
  }
78
79
  console.log(`Field ${fieldName} updated successfully in document ${docId}.`);
79
80
  }
@@ -89,17 +90,17 @@ export const afterImportActions = {
89
90
  const db = getDatabaseFromConfig(config);
90
91
  // Helper function to find a collection ID by name or return the ID if given
91
92
  const findCollectionId = async (collectionIdentifier) => {
92
- const collections = await db.listCollections(dbId, [
93
+ const collections = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
93
94
  Query.equal("name", collectionIdentifier),
94
95
  Query.limit(1),
95
- ]);
96
+ ]));
96
97
  return collections.total > 0
97
98
  ? collections.collections[0].$id
98
99
  : collectionIdentifier;
99
100
  };
100
101
  // Function to check if the target field is an array
101
102
  const isTargetFieldArray = async (collectionId, fieldName) => {
102
- const collection = await db.getCollection(dbId, collectionId);
103
+ const collection = await tryAwaitWithRetry(async () => await db.getCollection(dbId, collectionId));
103
104
  const attribute = collection.attributes.find((attr) => attr.key === fieldName);
104
105
  // @ts-ignore
105
106
  return attribute?.array === true;
@@ -119,7 +120,7 @@ export const afterImportActions = {
119
120
  queries.push(Query.cursorAfter(cursor));
120
121
  }
121
122
  queries.push(Query.limit(docLimit));
122
- const response = await db.listDocuments(dbId, otherCollectionId, queries);
123
+ const response = await tryAwaitWithRetry(async () => await db.listDocuments(dbId, otherCollectionId, queries));
123
124
  const documents = response.documents;
124
125
  if (documents.length === 0 || documents.length < docLimit) {
125
126
  return documents;
@@ -134,7 +135,7 @@ export const afterImportActions = {
134
135
  const updatePayload = targetFieldIsArray
135
136
  ? { [fieldName]: documentIds }
136
137
  : { [fieldName]: documentIds[0] };
137
- await db.updateDocument(dbId, targetCollectionId, docId, updatePayload);
138
+ await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, updatePayload));
138
139
  console.log(`Field ${fieldName} updated successfully in document ${docId} with ${documentIds.length} document IDs.`);
139
140
  }
140
141
  }
@@ -145,10 +146,10 @@ export const afterImportActions = {
145
146
  setTargetFieldFromOtherCollectionDocumentsByMatchingField: async (config, dbId, collIdOrName, docId, fieldName, otherCollIdOrName, matchingFieldName, matchingFieldValue, targetField) => {
146
147
  const db = getDatabaseFromConfig(config);
147
148
  const findCollectionId = async (collectionIdentifier) => {
148
- const collections = await db.listCollections(dbId, [
149
+ const collections = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
149
150
  Query.equal("name", collectionIdentifier),
150
151
  Query.limit(1),
151
- ]);
152
+ ]));
152
153
  return collections.total > 0
153
154
  ? collections.collections[0].$id
154
155
  : collectionIdentifier;
@@ -172,7 +173,7 @@ export const afterImportActions = {
172
173
  if (cursor) {
173
174
  queries.push(Query.cursorAfter(cursor));
174
175
  }
175
- const response = await db.listDocuments(dbId, otherCollectionId, queries);
176
+ const response = await tryAwaitWithRetry(async () => await db.listDocuments(dbId, otherCollectionId, queries));
176
177
  const documents = response.documents;
177
178
  if (documents.length === 0 || documents.length < docLimit) {
178
179
  return documents;
@@ -188,7 +189,7 @@ export const afterImportActions = {
188
189
  const updatePayload = targetFieldIsArray
189
190
  ? { [fieldName]: targetFieldValues }
190
191
  : { [fieldName]: targetFieldValues[0] };
191
- await db.updateDocument(dbId, targetCollectionId, docId, updatePayload);
192
+ await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, updatePayload));
192
193
  console.log(`Field ${fieldName} updated successfully in document ${docId} with values from field ${targetField}.`);
193
194
  }
194
195
  }
@@ -199,22 +200,20 @@ export const afterImportActions = {
199
200
  createOrGetBucket: async (config, bucketName, bucketId, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus) => {
200
201
  try {
201
202
  const storage = getStorageFromConfig(config);
202
- const bucket = await storage.listBuckets([
203
- Query.equal("name", bucketName),
204
- ]);
203
+ const bucket = await tryAwaitWithRetry(async () => await storage.listBuckets([Query.equal("name", bucketName)]));
205
204
  if (bucket.buckets.length > 0) {
206
205
  return bucket.buckets[0];
207
206
  }
208
207
  else if (bucketId) {
209
208
  try {
210
- return await storage.getBucket(bucketId);
209
+ return await tryAwaitWithRetry(async () => await storage.getBucket(bucketId));
211
210
  }
212
211
  catch (error) {
213
- return await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus);
212
+ return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression ? Compression.Gzip : undefined, encryption, antivirus));
214
213
  }
215
214
  }
216
215
  else {
217
- return await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus);
216
+ return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression ? Compression.Gzip : undefined, encryption, antivirus));
218
217
  }
219
218
  }
220
219
  catch (error) {
@@ -225,12 +224,16 @@ export const afterImportActions = {
225
224
  try {
226
225
  const db = getDatabaseFromConfig(config);
227
226
  const storage = getStorageFromConfig(config);
228
- const collection = await db.getCollection(dbId, collId);
227
+ const collection = await tryAwaitWithRetry(async () => await db.getCollection(dbId, collId));
229
228
  const attributes = collection.attributes;
230
229
  const attribute = attributes.find((a) => a.key === fieldName);
231
230
  // console.log(
232
231
  // `Processing field ${fieldName} in collection ${collId} for document ${docId} in database ${dbId} in bucket ${bucketId} with path ${filePath} and name ${fileName}...`
233
232
  // );
233
+ if (filePath.length === 0 || fileName.length === 0) {
234
+ console.error(`File path or name is empty for field ${fieldName} in collection ${collId}, skipping...`);
235
+ return;
236
+ }
234
237
  let isArray = false;
235
238
  if (!attribute) {
236
239
  console.log(`Field ${fieldName} not found in collection ${collId}, weird, skipping...`);
@@ -242,7 +245,7 @@ export const afterImportActions = {
242
245
  // Define a helper function to check if a value is a URL
243
246
  const isUrl = (value) => typeof value === "string" &&
244
247
  (value.startsWith("http://") || value.startsWith("https://"));
245
- const doc = await db.getDocument(dbId, collId, docId);
248
+ const doc = await tryAwaitWithRetry(async () => await db.getDocument(dbId, collId, docId));
246
249
  const existingFieldValue = doc[fieldName];
247
250
  // Handle the case where the field is an array
248
251
  let updateData = isArray ? [] : "";
@@ -255,7 +258,7 @@ export const afterImportActions = {
255
258
  const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "appwrite_tmp"));
256
259
  const tempFilePath = path.join(tempDir, fileName);
257
260
  // Download the file using fetch
258
- const response = await fetch(filePath);
261
+ const response = await tryAwaitWithRetry(async () => await fetch(filePath));
259
262
  if (!response.ok)
260
263
  console.error(`Failed to fetch ${filePath}: ${response.statusText} for document ${docId} with field ${fieldName}`);
261
264
  // Use arrayBuffer if buffer is not available
@@ -265,7 +268,7 @@ export const afterImportActions = {
265
268
  // Create InputFile from the downloaded file
266
269
  const inputFile = InputFile.fromPath(tempFilePath, fileName);
267
270
  // Use the full file name (with extension) for creating the file
268
- const file = await storage.createFile(bucketId, ID.unique(), inputFile);
271
+ const file = await tryAwaitWithRetry(async () => await storage.createFile(bucketId, ID.unique(), inputFile));
269
272
  console.log("Created file from URL: ", file.$id);
270
273
  // After uploading, adjust the updateData based on whether the field is an array or not
271
274
  if (isArray) {
@@ -274,15 +277,9 @@ export const afterImportActions = {
274
277
  else {
275
278
  updateData = file.$id; // Set the new file ID
276
279
  }
277
- await db.updateDocument(dbId, collId, doc.$id, {
280
+ await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, doc.$id, {
278
281
  [fieldName]: updateData,
279
- });
280
- // console.log(
281
- // "Updating document with file: ",
282
- // doc.$id,
283
- // `${fieldName}: `,
284
- // updateData
285
- // );
282
+ }));
286
283
  // If the file was downloaded, delete it after uploading
287
284
  fs.unlinkSync(tempFilePath);
288
285
  }
@@ -295,16 +292,16 @@ export const afterImportActions = {
295
292
  }
296
293
  const pathToFile = path.join(filePath, fileFullName);
297
294
  const inputFile = InputFile.fromPath(pathToFile, fileName);
298
- const file = await storage.createFile(bucketId, ID.unique(), inputFile);
295
+ const file = await tryAwaitWithRetry(async () => await storage.createFile(bucketId, ID.unique(), inputFile));
299
296
  if (isArray) {
300
297
  updateData = [...updateData, file.$id]; // Append the new file ID
301
298
  }
302
299
  else {
303
300
  updateData = file.$id; // Set the new file ID
304
301
  }
305
- await db.updateDocument(dbId, collId, doc.$id, {
302
+ tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, doc.$id, {
306
303
  [fieldName]: updateData,
307
- });
304
+ }));
308
305
  console.log("Created file from path: ", file.$id);
309
306
  }
310
307
  }