appwrite-utils-cli 0.0.285 → 0.9.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +122 -96
- package/dist/collections/attributes.d.ts +4 -0
- package/dist/collections/attributes.js +224 -0
- package/dist/collections/indexes.d.ts +4 -0
- package/dist/collections/indexes.js +27 -0
- package/dist/collections/methods.d.ts +16 -0
- package/dist/collections/methods.js +216 -0
- package/dist/databases/methods.d.ts +6 -0
- package/dist/databases/methods.js +33 -0
- package/dist/interactiveCLI.d.ts +19 -0
- package/dist/interactiveCLI.js +555 -0
- package/dist/main.js +227 -62
- package/dist/migrations/afterImportActions.js +37 -40
- package/dist/migrations/appwriteToX.d.ts +26 -25
- package/dist/migrations/appwriteToX.js +42 -6
- package/dist/migrations/attributes.js +21 -20
- package/dist/migrations/backup.d.ts +93 -87
- package/dist/migrations/collections.d.ts +6 -0
- package/dist/migrations/collections.js +149 -20
- package/dist/migrations/converters.d.ts +2 -18
- package/dist/migrations/converters.js +13 -2
- package/dist/migrations/dataLoader.d.ts +276 -161
- package/dist/migrations/dataLoader.js +535 -292
- package/dist/migrations/databases.js +8 -2
- package/dist/migrations/helper.d.ts +3 -0
- package/dist/migrations/helper.js +21 -0
- package/dist/migrations/importController.d.ts +5 -2
- package/dist/migrations/importController.js +125 -88
- package/dist/migrations/importDataActions.d.ts +9 -1
- package/dist/migrations/importDataActions.js +15 -3
- package/dist/migrations/indexes.js +3 -2
- package/dist/migrations/logging.js +20 -8
- package/dist/migrations/migrationHelper.d.ts +9 -4
- package/dist/migrations/migrationHelper.js +6 -5
- package/dist/migrations/openapi.d.ts +1 -1
- package/dist/migrations/openapi.js +33 -18
- package/dist/migrations/queue.js +3 -2
- package/dist/migrations/relationships.d.ts +2 -2
- package/dist/migrations/schemaStrings.js +53 -41
- package/dist/migrations/setupDatabase.d.ts +2 -4
- package/dist/migrations/setupDatabase.js +24 -105
- package/dist/migrations/storage.d.ts +3 -1
- package/dist/migrations/storage.js +110 -16
- package/dist/migrations/transfer.d.ts +30 -0
- package/dist/migrations/transfer.js +337 -0
- package/dist/migrations/users.d.ts +2 -1
- package/dist/migrations/users.js +78 -43
- package/dist/schemas/authUser.d.ts +2 -2
- package/dist/storage/methods.d.ts +15 -0
- package/dist/storage/methods.js +207 -0
- package/dist/storage/schemas.d.ts +687 -0
- package/dist/storage/schemas.js +175 -0
- package/dist/utils/getClientFromConfig.d.ts +4 -0
- package/dist/utils/getClientFromConfig.js +16 -0
- package/dist/utils/helperFunctions.d.ts +11 -1
- package/dist/utils/helperFunctions.js +38 -0
- package/dist/utils/retryFailedPromises.d.ts +2 -0
- package/dist/utils/retryFailedPromises.js +21 -0
- package/dist/utils/schemaStrings.d.ts +13 -0
- package/dist/utils/schemaStrings.js +403 -0
- package/dist/utils/setupFiles.js +110 -61
- package/dist/utilsController.d.ts +40 -22
- package/dist/utilsController.js +164 -84
- package/package.json +13 -15
- package/src/collections/attributes.ts +483 -0
- package/src/collections/indexes.ts +53 -0
- package/src/collections/methods.ts +331 -0
- package/src/databases/methods.ts +47 -0
- package/src/init.ts +64 -64
- package/src/interactiveCLI.ts +767 -0
- package/src/main.ts +292 -83
- package/src/migrations/afterImportActions.ts +553 -490
- package/src/migrations/appwriteToX.ts +237 -174
- package/src/migrations/attributes.ts +483 -422
- package/src/migrations/backup.ts +205 -205
- package/src/migrations/collections.ts +545 -300
- package/src/migrations/converters.ts +161 -150
- package/src/migrations/dataLoader.ts +1615 -1304
- package/src/migrations/databases.ts +44 -25
- package/src/migrations/dbHelpers.ts +92 -92
- package/src/migrations/helper.ts +40 -0
- package/src/migrations/importController.ts +448 -384
- package/src/migrations/importDataActions.ts +315 -307
- package/src/migrations/indexes.ts +40 -37
- package/src/migrations/logging.ts +29 -16
- package/src/migrations/migrationHelper.ts +207 -201
- package/src/migrations/openapi.ts +83 -70
- package/src/migrations/queue.ts +118 -119
- package/src/migrations/relationships.ts +324 -324
- package/src/migrations/schemaStrings.ts +472 -460
- package/src/migrations/setupDatabase.ts +118 -219
- package/src/migrations/storage.ts +538 -358
- package/src/migrations/transfer.ts +608 -0
- package/src/migrations/users.ts +362 -285
- package/src/migrations/validationRules.ts +63 -63
- package/src/schemas/authUser.ts +23 -23
- package/src/setup.ts +8 -8
- package/src/storage/methods.ts +371 -0
- package/src/storage/schemas.ts +205 -0
- package/src/types.ts +9 -9
- package/src/utils/getClientFromConfig.ts +17 -0
- package/src/utils/helperFunctions.ts +181 -127
- package/src/utils/index.ts +2 -2
- package/src/utils/loadConfigs.ts +59 -59
- package/src/utils/retryFailedPromises.ts +27 -0
- package/src/utils/schemaStrings.ts +473 -0
- package/src/utils/setupFiles.ts +228 -182
- package/src/utilsController.ts +325 -194
- package/tsconfig.json +37 -37
package/dist/main.js
CHANGED
@@ -1,67 +1,232 @@
|
|
1
1
|
#!/usr/bin/env node
|
2
|
-
import
|
2
|
+
import yargs from "yargs";
|
3
|
+
import {} from "yargs";
|
4
|
+
import { hideBin } from "yargs/helpers";
|
5
|
+
import { InteractiveCLI } from "./interactiveCLI.js";
|
3
6
|
import { UtilsController } from "./utilsController.js";
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
.
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
})
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
7
|
+
import { Databases, Storage } from "node-appwrite";
|
8
|
+
import { getClient } from "./utils/getClientFromConfig.js";
|
9
|
+
const argv = yargs(hideBin(process.argv))
|
10
|
+
.command("migrate", "Run Appwrite migrations", (yargs) => {
|
11
|
+
return yargs
|
12
|
+
.option("it", {
|
13
|
+
alias: ["interactive", "i"],
|
14
|
+
type: "boolean",
|
15
|
+
description: "Run in interactive mode",
|
16
|
+
})
|
17
|
+
.option("dbIds", {
|
18
|
+
type: "string",
|
19
|
+
description: "Comma-separated list of database IDs to operate on",
|
20
|
+
})
|
21
|
+
.option("collectionIds", {
|
22
|
+
alias: ["collIds"],
|
23
|
+
type: "string",
|
24
|
+
description: "Comma-separated list of collection IDs to operate on",
|
25
|
+
})
|
26
|
+
.option("bucketIds", {
|
27
|
+
type: "string",
|
28
|
+
description: "Comma-separated list of bucket IDs to operate on",
|
29
|
+
})
|
30
|
+
.option("wipe", {
|
31
|
+
choices: ["all", "docs", "users"],
|
32
|
+
description: "Wipe data (all: everything, docs: only documents, users: only user data)",
|
33
|
+
})
|
34
|
+
.option("generate", {
|
35
|
+
type: "boolean",
|
36
|
+
description: "Generate TypeScript schemas from database schemas",
|
37
|
+
})
|
38
|
+
.option("import", {
|
39
|
+
type: "boolean",
|
40
|
+
description: "Import data into your databases",
|
41
|
+
})
|
42
|
+
.option("backup", {
|
43
|
+
type: "boolean",
|
44
|
+
description: "Perform a backup of your databases",
|
45
|
+
})
|
46
|
+
.option("writeData", {
|
47
|
+
type: "boolean",
|
48
|
+
description: "Write converted imported data to file",
|
49
|
+
})
|
50
|
+
.option("push", {
|
51
|
+
type: "boolean",
|
52
|
+
description: "Push your local Appwrite config to your configured Appwrite Project",
|
53
|
+
})
|
54
|
+
.option("sync", {
|
55
|
+
type: "boolean",
|
56
|
+
description: "Synchronize by pulling your Appwrite config from your configured Appwrite Project",
|
57
|
+
})
|
58
|
+
.option("endpoint", {
|
59
|
+
type: "string",
|
60
|
+
description: "Set the Appwrite endpoint",
|
61
|
+
})
|
62
|
+
.option("projectId", {
|
63
|
+
type: "string",
|
64
|
+
description: "Set the Appwrite project ID",
|
65
|
+
})
|
66
|
+
.option("apiKey", {
|
67
|
+
type: "string",
|
68
|
+
description: "Set the Appwrite API key",
|
69
|
+
})
|
70
|
+
.option("transfer", {
|
71
|
+
type: "boolean",
|
72
|
+
description: "Transfer data between databases or collections",
|
73
|
+
})
|
74
|
+
.option("fromDbId", {
|
75
|
+
alias: ["fromDb"],
|
76
|
+
type: "string",
|
77
|
+
description: "Set the source database ID for transfer",
|
78
|
+
})
|
79
|
+
.option("toDbId", {
|
80
|
+
alias: ["toDb"],
|
81
|
+
type: "string",
|
82
|
+
description: "Set the destination database ID for transfer",
|
83
|
+
})
|
84
|
+
.option("fromCollectionId", {
|
85
|
+
alias: ["fromCollId", "fromColl"],
|
86
|
+
type: "string",
|
87
|
+
description: "Set the source collection ID for transfer",
|
88
|
+
})
|
89
|
+
.option("toCollectionId", {
|
90
|
+
alias: ["toCollId", "toColl"],
|
91
|
+
type: "string",
|
92
|
+
description: "Set the destination collection ID for transfer",
|
93
|
+
})
|
94
|
+
.option("fromBucketId", {
|
95
|
+
alias: ["fromBucket"],
|
96
|
+
type: "string",
|
97
|
+
description: "Set the source bucket ID for transfer",
|
98
|
+
})
|
99
|
+
.option("toBucketId", {
|
100
|
+
alias: ["toBucket"],
|
101
|
+
type: "string",
|
102
|
+
description: "Set the destination bucket ID for transfer",
|
103
|
+
})
|
104
|
+
.option("remoteEndpoint", {
|
105
|
+
type: "string",
|
106
|
+
description: "Set the remote Appwrite endpoint for transfers",
|
107
|
+
})
|
108
|
+
.option("remoteProjectId", {
|
109
|
+
type: "string",
|
110
|
+
description: "Set the remote Appwrite project ID for transfers",
|
111
|
+
})
|
112
|
+
.option("remoteApiKey", {
|
113
|
+
type: "string",
|
114
|
+
description: "Set the remote Appwrite API key for transfers",
|
115
|
+
});
|
116
|
+
})
|
117
|
+
.help()
|
118
|
+
.parse();
|
119
|
+
async function main() {
|
120
|
+
const parsedArgv = (await argv);
|
121
|
+
const controller = new UtilsController(process.cwd());
|
122
|
+
await controller.init();
|
123
|
+
if (parsedArgv.it) {
|
124
|
+
const cli = new InteractiveCLI(process.cwd(), controller);
|
125
|
+
await cli.run();
|
62
126
|
}
|
63
|
-
|
64
|
-
|
127
|
+
else {
|
128
|
+
// Handle non-interactive mode with the new options
|
129
|
+
const options = {
|
130
|
+
databases: parsedArgv.dbIds
|
131
|
+
? await controller.getDatabasesByIds(parsedArgv.dbIds.replace(" ", "").split(","))
|
132
|
+
: undefined,
|
133
|
+
collections: parsedArgv.collectionIds
|
134
|
+
? parsedArgv.collectionIds.replace(" ", "").split(",")
|
135
|
+
: undefined,
|
136
|
+
doBackup: parsedArgv.backup,
|
137
|
+
wipeDatabase: parsedArgv.wipe === "all" || parsedArgv.wipe === "docs",
|
138
|
+
wipeDocumentStorage: parsedArgv.wipe === "all",
|
139
|
+
wipeUsers: parsedArgv.wipe === "all" || parsedArgv.wipe === "users",
|
140
|
+
generateSchemas: parsedArgv.generate,
|
141
|
+
importData: parsedArgv.import,
|
142
|
+
checkDuplicates: false,
|
143
|
+
shouldWriteFile: parsedArgv.writeData,
|
144
|
+
};
|
145
|
+
if (parsedArgv.push) {
|
146
|
+
await controller.syncDb();
|
147
|
+
}
|
148
|
+
if (options.wipeDatabase ||
|
149
|
+
options.wipeDocumentStorage ||
|
150
|
+
options.wipeUsers) {
|
151
|
+
if (options.wipeDatabase) {
|
152
|
+
for (const db of options.databases || []) {
|
153
|
+
await controller.wipeDatabase(db);
|
154
|
+
}
|
155
|
+
}
|
156
|
+
if (options.wipeDocumentStorage && parsedArgv.bucketIds) {
|
157
|
+
for (const bucketId of parsedArgv.bucketIds.split(",")) {
|
158
|
+
await controller.wipeDocumentStorage(bucketId);
|
159
|
+
}
|
160
|
+
}
|
161
|
+
if (options.wipeUsers) {
|
162
|
+
await controller.wipeUsers();
|
163
|
+
}
|
164
|
+
}
|
165
|
+
if (options.doBackup) {
|
166
|
+
for (const db of options.databases || []) {
|
167
|
+
await controller.backupDatabase(db);
|
168
|
+
}
|
169
|
+
}
|
170
|
+
if (options.generateSchemas) {
|
171
|
+
await controller.generateSchemas();
|
172
|
+
}
|
173
|
+
if (options.importData) {
|
174
|
+
await controller.importData(options);
|
175
|
+
}
|
176
|
+
if (parsedArgv.transfer) {
|
177
|
+
const isRemote = !!parsedArgv.remoteEndpoint;
|
178
|
+
const fromDb = await controller.getDatabasesByIds([parsedArgv.fromDbId]);
|
179
|
+
let toDb;
|
180
|
+
let targetDatabases;
|
181
|
+
let targetStorage;
|
182
|
+
if (isRemote) {
|
183
|
+
if (!parsedArgv.remoteEndpoint ||
|
184
|
+
!parsedArgv.remoteProjectId ||
|
185
|
+
!parsedArgv.remoteApiKey) {
|
186
|
+
throw new Error("Remote transfer details are missing");
|
187
|
+
}
|
188
|
+
const remoteClient = getClient(parsedArgv.remoteEndpoint, parsedArgv.remoteProjectId, parsedArgv.remoteApiKey);
|
189
|
+
targetDatabases = new Databases(remoteClient);
|
190
|
+
targetStorage = new Storage(remoteClient);
|
191
|
+
const remoteDbs = await targetDatabases.list();
|
192
|
+
toDb = remoteDbs.databases.find((db) => db.$id === parsedArgv.toDbId);
|
193
|
+
}
|
194
|
+
else {
|
195
|
+
toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId]))[0];
|
196
|
+
}
|
197
|
+
if (!fromDb[0] || !toDb) {
|
198
|
+
throw new Error("Source or target database not found");
|
199
|
+
}
|
200
|
+
let sourceBucket, targetBucket;
|
201
|
+
if (parsedArgv.fromBucketId) {
|
202
|
+
sourceBucket = await controller.storage?.getBucket(parsedArgv.fromBucketId);
|
203
|
+
}
|
204
|
+
if (parsedArgv.toBucketId) {
|
205
|
+
if (isRemote) {
|
206
|
+
targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
|
207
|
+
}
|
208
|
+
else {
|
209
|
+
targetBucket = await controller.storage?.getBucket(parsedArgv.toBucketId);
|
210
|
+
}
|
211
|
+
}
|
212
|
+
const transferOptions = {
|
213
|
+
isRemote,
|
214
|
+
fromDb: fromDb[0],
|
215
|
+
targetDb: toDb,
|
216
|
+
transferEndpoint: parsedArgv.remoteEndpoint,
|
217
|
+
transferProject: parsedArgv.remoteProjectId,
|
218
|
+
transferKey: parsedArgv.remoteApiKey,
|
219
|
+
sourceBucket: sourceBucket,
|
220
|
+
targetBucket: targetBucket,
|
221
|
+
};
|
222
|
+
await controller.transferData(transferOptions);
|
223
|
+
}
|
224
|
+
if (parsedArgv.sync) {
|
225
|
+
await controller.synchronizeConfigurations(options.databases);
|
226
|
+
}
|
65
227
|
}
|
228
|
+
}
|
229
|
+
main().catch((error) => {
|
230
|
+
console.error("An error occurred:", error);
|
231
|
+
process.exit(1);
|
66
232
|
});
|
67
|
-
program.parse(process.argv);
|
@@ -1,9 +1,10 @@
|
|
1
|
-
import { Databases, Storage,
|
1
|
+
import { Databases, Storage, Query, ID, Client, Compression, } from "node-appwrite";
|
2
|
+
import { InputFile } from "node-appwrite/file";
|
2
3
|
import path from "path";
|
3
4
|
import fs from "fs";
|
4
5
|
import os from "os";
|
5
6
|
import { logger } from "./logging.js";
|
6
|
-
import {} from "appwrite-utils";
|
7
|
+
import { tryAwaitWithRetry, } from "appwrite-utils";
|
7
8
|
export const getDatabaseFromConfig = (config) => {
|
8
9
|
if (!config.appwriteClient) {
|
9
10
|
config.appwriteClient = new Client()
|
@@ -26,7 +27,7 @@ export const afterImportActions = {
|
|
26
27
|
updateCreatedDocument: async (config, dbId, collId, docId, data) => {
|
27
28
|
try {
|
28
29
|
const db = getDatabaseFromConfig(config);
|
29
|
-
await db.updateDocument(dbId, collId, docId, data);
|
30
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, docId, data));
|
30
31
|
}
|
31
32
|
catch (error) {
|
32
33
|
console.error("Error updating document: ", error);
|
@@ -35,11 +36,11 @@ export const afterImportActions = {
|
|
35
36
|
checkAndUpdateFieldInDocument: async (config, dbId, collId, docId, fieldName, oldFieldValue, newFieldValue) => {
|
36
37
|
try {
|
37
38
|
const db = getDatabaseFromConfig(config);
|
38
|
-
const doc = await db.getDocument(dbId, collId, docId);
|
39
|
+
const doc = await tryAwaitWithRetry(async () => await db.getDocument(dbId, collId, docId));
|
39
40
|
if (doc[fieldName] == oldFieldValue) {
|
40
|
-
await db.updateDocument(dbId, collId, docId, {
|
41
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, docId, {
|
41
42
|
[fieldName]: newFieldValue,
|
42
|
-
});
|
43
|
+
}));
|
43
44
|
}
|
44
45
|
}
|
45
46
|
catch (error) {
|
@@ -50,10 +51,10 @@ export const afterImportActions = {
|
|
50
51
|
const db = getDatabaseFromConfig(config);
|
51
52
|
// Helper function to find a collection ID by name or return the ID if given
|
52
53
|
const findCollectionId = async (collectionIdentifier) => {
|
53
|
-
const collectionsPulled = await db.listCollections(dbId, [
|
54
|
+
const collectionsPulled = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
|
54
55
|
Query.limit(25),
|
55
56
|
Query.equal("name", collectionIdentifier),
|
56
|
-
]);
|
57
|
+
]));
|
57
58
|
if (collectionsPulled.total > 0) {
|
58
59
|
return collectionsPulled.collections[0].$id;
|
59
60
|
}
|
@@ -71,9 +72,9 @@ export const afterImportActions = {
|
|
71
72
|
const valueToSet = otherDoc[otherFieldName];
|
72
73
|
if (valueToSet) {
|
73
74
|
// Update the target document
|
74
|
-
await db.updateDocument(dbId, targetCollectionId, docId, {
|
75
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, {
|
75
76
|
[fieldName]: valueToSet,
|
76
|
-
});
|
77
|
+
}));
|
77
78
|
}
|
78
79
|
console.log(`Field ${fieldName} updated successfully in document ${docId}.`);
|
79
80
|
}
|
@@ -89,17 +90,17 @@ export const afterImportActions = {
|
|
89
90
|
const db = getDatabaseFromConfig(config);
|
90
91
|
// Helper function to find a collection ID by name or return the ID if given
|
91
92
|
const findCollectionId = async (collectionIdentifier) => {
|
92
|
-
const collections = await db.listCollections(dbId, [
|
93
|
+
const collections = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
|
93
94
|
Query.equal("name", collectionIdentifier),
|
94
95
|
Query.limit(1),
|
95
|
-
]);
|
96
|
+
]));
|
96
97
|
return collections.total > 0
|
97
98
|
? collections.collections[0].$id
|
98
99
|
: collectionIdentifier;
|
99
100
|
};
|
100
101
|
// Function to check if the target field is an array
|
101
102
|
const isTargetFieldArray = async (collectionId, fieldName) => {
|
102
|
-
const collection = await db.getCollection(dbId, collectionId);
|
103
|
+
const collection = await tryAwaitWithRetry(async () => await db.getCollection(dbId, collectionId));
|
103
104
|
const attribute = collection.attributes.find((attr) => attr.key === fieldName);
|
104
105
|
// @ts-ignore
|
105
106
|
return attribute?.array === true;
|
@@ -119,7 +120,7 @@ export const afterImportActions = {
|
|
119
120
|
queries.push(Query.cursorAfter(cursor));
|
120
121
|
}
|
121
122
|
queries.push(Query.limit(docLimit));
|
122
|
-
const response = await db.listDocuments(dbId, otherCollectionId, queries);
|
123
|
+
const response = await tryAwaitWithRetry(async () => await db.listDocuments(dbId, otherCollectionId, queries));
|
123
124
|
const documents = response.documents;
|
124
125
|
if (documents.length === 0 || documents.length < docLimit) {
|
125
126
|
return documents;
|
@@ -134,7 +135,7 @@ export const afterImportActions = {
|
|
134
135
|
const updatePayload = targetFieldIsArray
|
135
136
|
? { [fieldName]: documentIds }
|
136
137
|
: { [fieldName]: documentIds[0] };
|
137
|
-
await db.updateDocument(dbId, targetCollectionId, docId, updatePayload);
|
138
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, updatePayload));
|
138
139
|
console.log(`Field ${fieldName} updated successfully in document ${docId} with ${documentIds.length} document IDs.`);
|
139
140
|
}
|
140
141
|
}
|
@@ -145,10 +146,10 @@ export const afterImportActions = {
|
|
145
146
|
setTargetFieldFromOtherCollectionDocumentsByMatchingField: async (config, dbId, collIdOrName, docId, fieldName, otherCollIdOrName, matchingFieldName, matchingFieldValue, targetField) => {
|
146
147
|
const db = getDatabaseFromConfig(config);
|
147
148
|
const findCollectionId = async (collectionIdentifier) => {
|
148
|
-
const collections = await db.listCollections(dbId, [
|
149
|
+
const collections = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
|
149
150
|
Query.equal("name", collectionIdentifier),
|
150
151
|
Query.limit(1),
|
151
|
-
]);
|
152
|
+
]));
|
152
153
|
return collections.total > 0
|
153
154
|
? collections.collections[0].$id
|
154
155
|
: collectionIdentifier;
|
@@ -172,7 +173,7 @@ export const afterImportActions = {
|
|
172
173
|
if (cursor) {
|
173
174
|
queries.push(Query.cursorAfter(cursor));
|
174
175
|
}
|
175
|
-
const response = await db.listDocuments(dbId, otherCollectionId, queries);
|
176
|
+
const response = await tryAwaitWithRetry(async () => await db.listDocuments(dbId, otherCollectionId, queries));
|
176
177
|
const documents = response.documents;
|
177
178
|
if (documents.length === 0 || documents.length < docLimit) {
|
178
179
|
return documents;
|
@@ -188,7 +189,7 @@ export const afterImportActions = {
|
|
188
189
|
const updatePayload = targetFieldIsArray
|
189
190
|
? { [fieldName]: targetFieldValues }
|
190
191
|
: { [fieldName]: targetFieldValues[0] };
|
191
|
-
await db.updateDocument(dbId, targetCollectionId, docId, updatePayload);
|
192
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, updatePayload));
|
192
193
|
console.log(`Field ${fieldName} updated successfully in document ${docId} with values from field ${targetField}.`);
|
193
194
|
}
|
194
195
|
}
|
@@ -199,22 +200,20 @@ export const afterImportActions = {
|
|
199
200
|
createOrGetBucket: async (config, bucketName, bucketId, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus) => {
|
200
201
|
try {
|
201
202
|
const storage = getStorageFromConfig(config);
|
202
|
-
const bucket = await storage.listBuckets([
|
203
|
-
Query.equal("name", bucketName),
|
204
|
-
]);
|
203
|
+
const bucket = await tryAwaitWithRetry(async () => await storage.listBuckets([Query.equal("name", bucketName)]));
|
205
204
|
if (bucket.buckets.length > 0) {
|
206
205
|
return bucket.buckets[0];
|
207
206
|
}
|
208
207
|
else if (bucketId) {
|
209
208
|
try {
|
210
|
-
return await storage.getBucket(bucketId);
|
209
|
+
return await tryAwaitWithRetry(async () => await storage.getBucket(bucketId));
|
211
210
|
}
|
212
211
|
catch (error) {
|
213
|
-
return await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus);
|
212
|
+
return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression ? Compression.Gzip : undefined, encryption, antivirus));
|
214
213
|
}
|
215
214
|
}
|
216
215
|
else {
|
217
|
-
return await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus);
|
216
|
+
return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression ? Compression.Gzip : undefined, encryption, antivirus));
|
218
217
|
}
|
219
218
|
}
|
220
219
|
catch (error) {
|
@@ -225,12 +224,16 @@ export const afterImportActions = {
|
|
225
224
|
try {
|
226
225
|
const db = getDatabaseFromConfig(config);
|
227
226
|
const storage = getStorageFromConfig(config);
|
228
|
-
const collection = await db.getCollection(dbId, collId);
|
227
|
+
const collection = await tryAwaitWithRetry(async () => await db.getCollection(dbId, collId));
|
229
228
|
const attributes = collection.attributes;
|
230
229
|
const attribute = attributes.find((a) => a.key === fieldName);
|
231
230
|
// console.log(
|
232
231
|
// `Processing field ${fieldName} in collection ${collId} for document ${docId} in database ${dbId} in bucket ${bucketId} with path ${filePath} and name ${fileName}...`
|
233
232
|
// );
|
233
|
+
if (filePath.length === 0 || fileName.length === 0) {
|
234
|
+
console.error(`File path or name is empty for field ${fieldName} in collection ${collId}, skipping...`);
|
235
|
+
return;
|
236
|
+
}
|
234
237
|
let isArray = false;
|
235
238
|
if (!attribute) {
|
236
239
|
console.log(`Field ${fieldName} not found in collection ${collId}, weird, skipping...`);
|
@@ -242,7 +245,7 @@ export const afterImportActions = {
|
|
242
245
|
// Define a helper function to check if a value is a URL
|
243
246
|
const isUrl = (value) => typeof value === "string" &&
|
244
247
|
(value.startsWith("http://") || value.startsWith("https://"));
|
245
|
-
const doc = await db.getDocument(dbId, collId, docId);
|
248
|
+
const doc = await tryAwaitWithRetry(async () => await db.getDocument(dbId, collId, docId));
|
246
249
|
const existingFieldValue = doc[fieldName];
|
247
250
|
// Handle the case where the field is an array
|
248
251
|
let updateData = isArray ? [] : "";
|
@@ -255,7 +258,7 @@ export const afterImportActions = {
|
|
255
258
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "appwrite_tmp"));
|
256
259
|
const tempFilePath = path.join(tempDir, fileName);
|
257
260
|
// Download the file using fetch
|
258
|
-
const response = await fetch(filePath);
|
261
|
+
const response = await tryAwaitWithRetry(async () => await fetch(filePath));
|
259
262
|
if (!response.ok)
|
260
263
|
console.error(`Failed to fetch ${filePath}: ${response.statusText} for document ${docId} with field ${fieldName}`);
|
261
264
|
// Use arrayBuffer if buffer is not available
|
@@ -265,7 +268,7 @@ export const afterImportActions = {
|
|
265
268
|
// Create InputFile from the downloaded file
|
266
269
|
const inputFile = InputFile.fromPath(tempFilePath, fileName);
|
267
270
|
// Use the full file name (with extension) for creating the file
|
268
|
-
const file = await storage.createFile(bucketId, ID.unique(), inputFile);
|
271
|
+
const file = await tryAwaitWithRetry(async () => await storage.createFile(bucketId, ID.unique(), inputFile));
|
269
272
|
console.log("Created file from URL: ", file.$id);
|
270
273
|
// After uploading, adjust the updateData based on whether the field is an array or not
|
271
274
|
if (isArray) {
|
@@ -274,15 +277,9 @@ export const afterImportActions = {
|
|
274
277
|
else {
|
275
278
|
updateData = file.$id; // Set the new file ID
|
276
279
|
}
|
277
|
-
await db.updateDocument(dbId, collId, doc.$id, {
|
280
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, doc.$id, {
|
278
281
|
[fieldName]: updateData,
|
279
|
-
});
|
280
|
-
// console.log(
|
281
|
-
// "Updating document with file: ",
|
282
|
-
// doc.$id,
|
283
|
-
// `${fieldName}: `,
|
284
|
-
// updateData
|
285
|
-
// );
|
282
|
+
}));
|
286
283
|
// If the file was downloaded, delete it after uploading
|
287
284
|
fs.unlinkSync(tempFilePath);
|
288
285
|
}
|
@@ -295,16 +292,16 @@ export const afterImportActions = {
|
|
295
292
|
}
|
296
293
|
const pathToFile = path.join(filePath, fileFullName);
|
297
294
|
const inputFile = InputFile.fromPath(pathToFile, fileName);
|
298
|
-
const file = await storage.createFile(bucketId, ID.unique(), inputFile);
|
295
|
+
const file = await tryAwaitWithRetry(async () => await storage.createFile(bucketId, ID.unique(), inputFile));
|
299
296
|
if (isArray) {
|
300
297
|
updateData = [...updateData, file.$id]; // Append the new file ID
|
301
298
|
}
|
302
299
|
else {
|
303
300
|
updateData = file.$id; // Set the new file ID
|
304
301
|
}
|
305
|
-
await db.updateDocument(dbId, collId, doc.$id, {
|
302
|
+
tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, doc.$id, {
|
306
303
|
[fieldName]: updateData,
|
307
|
-
});
|
304
|
+
}));
|
308
305
|
console.log("Created file from path: ", file.$id);
|
309
306
|
}
|
310
307
|
}
|