appwrite-utils-cli 0.0.286 → 0.9.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/README.md +162 -96
  2. package/dist/collections/attributes.d.ts +4 -0
  3. package/dist/collections/attributes.js +224 -0
  4. package/dist/collections/indexes.d.ts +4 -0
  5. package/dist/collections/indexes.js +27 -0
  6. package/dist/collections/methods.d.ts +16 -0
  7. package/dist/collections/methods.js +216 -0
  8. package/dist/databases/methods.d.ts +6 -0
  9. package/dist/databases/methods.js +33 -0
  10. package/dist/interactiveCLI.d.ts +19 -0
  11. package/dist/interactiveCLI.js +555 -0
  12. package/dist/main.js +224 -62
  13. package/dist/migrations/afterImportActions.js +37 -40
  14. package/dist/migrations/appwriteToX.d.ts +26 -25
  15. package/dist/migrations/appwriteToX.js +42 -6
  16. package/dist/migrations/attributes.js +21 -20
  17. package/dist/migrations/backup.d.ts +93 -87
  18. package/dist/migrations/collections.d.ts +6 -0
  19. package/dist/migrations/collections.js +149 -20
  20. package/dist/migrations/converters.d.ts +2 -18
  21. package/dist/migrations/converters.js +13 -2
  22. package/dist/migrations/dataLoader.d.ts +276 -161
  23. package/dist/migrations/dataLoader.js +535 -292
  24. package/dist/migrations/databases.js +8 -2
  25. package/dist/migrations/helper.d.ts +3 -0
  26. package/dist/migrations/helper.js +21 -0
  27. package/dist/migrations/importController.d.ts +5 -2
  28. package/dist/migrations/importController.js +125 -88
  29. package/dist/migrations/importDataActions.d.ts +9 -1
  30. package/dist/migrations/importDataActions.js +15 -3
  31. package/dist/migrations/indexes.js +3 -2
  32. package/dist/migrations/logging.js +20 -8
  33. package/dist/migrations/migrationHelper.d.ts +9 -4
  34. package/dist/migrations/migrationHelper.js +6 -5
  35. package/dist/migrations/openapi.d.ts +1 -1
  36. package/dist/migrations/openapi.js +33 -18
  37. package/dist/migrations/queue.js +3 -2
  38. package/dist/migrations/relationships.d.ts +2 -2
  39. package/dist/migrations/schemaStrings.js +53 -41
  40. package/dist/migrations/setupDatabase.d.ts +2 -4
  41. package/dist/migrations/setupDatabase.js +24 -105
  42. package/dist/migrations/storage.d.ts +3 -1
  43. package/dist/migrations/storage.js +110 -16
  44. package/dist/migrations/transfer.d.ts +30 -0
  45. package/dist/migrations/transfer.js +337 -0
  46. package/dist/migrations/users.d.ts +2 -1
  47. package/dist/migrations/users.js +78 -43
  48. package/dist/schemas/authUser.d.ts +2 -2
  49. package/dist/storage/methods.d.ts +15 -0
  50. package/dist/storage/methods.js +207 -0
  51. package/dist/storage/schemas.d.ts +687 -0
  52. package/dist/storage/schemas.js +175 -0
  53. package/dist/utils/getClientFromConfig.d.ts +4 -0
  54. package/dist/utils/getClientFromConfig.js +16 -0
  55. package/dist/utils/helperFunctions.d.ts +11 -1
  56. package/dist/utils/helperFunctions.js +38 -0
  57. package/dist/utils/retryFailedPromises.d.ts +2 -0
  58. package/dist/utils/retryFailedPromises.js +21 -0
  59. package/dist/utils/schemaStrings.d.ts +13 -0
  60. package/dist/utils/schemaStrings.js +403 -0
  61. package/dist/utils/setupFiles.js +110 -61
  62. package/dist/utilsController.d.ts +40 -22
  63. package/dist/utilsController.js +164 -84
  64. package/package.json +13 -15
  65. package/src/collections/attributes.ts +483 -0
  66. package/src/collections/indexes.ts +53 -0
  67. package/src/collections/methods.ts +331 -0
  68. package/src/databases/methods.ts +47 -0
  69. package/src/init.ts +64 -64
  70. package/src/interactiveCLI.ts +767 -0
  71. package/src/main.ts +289 -83
  72. package/src/migrations/afterImportActions.ts +553 -490
  73. package/src/migrations/appwriteToX.ts +237 -174
  74. package/src/migrations/attributes.ts +483 -422
  75. package/src/migrations/backup.ts +205 -205
  76. package/src/migrations/collections.ts +545 -300
  77. package/src/migrations/converters.ts +161 -150
  78. package/src/migrations/dataLoader.ts +1615 -1304
  79. package/src/migrations/databases.ts +44 -25
  80. package/src/migrations/dbHelpers.ts +92 -92
  81. package/src/migrations/helper.ts +40 -0
  82. package/src/migrations/importController.ts +448 -384
  83. package/src/migrations/importDataActions.ts +315 -307
  84. package/src/migrations/indexes.ts +40 -37
  85. package/src/migrations/logging.ts +29 -16
  86. package/src/migrations/migrationHelper.ts +207 -201
  87. package/src/migrations/openapi.ts +83 -70
  88. package/src/migrations/queue.ts +118 -119
  89. package/src/migrations/relationships.ts +324 -324
  90. package/src/migrations/schemaStrings.ts +472 -460
  91. package/src/migrations/setupDatabase.ts +118 -219
  92. package/src/migrations/storage.ts +538 -358
  93. package/src/migrations/transfer.ts +608 -0
  94. package/src/migrations/users.ts +362 -285
  95. package/src/migrations/validationRules.ts +63 -63
  96. package/src/schemas/authUser.ts +23 -23
  97. package/src/setup.ts +8 -8
  98. package/src/storage/methods.ts +371 -0
  99. package/src/storage/schemas.ts +205 -0
  100. package/src/types.ts +9 -9
  101. package/src/utils/getClientFromConfig.ts +17 -0
  102. package/src/utils/helperFunctions.ts +181 -127
  103. package/src/utils/index.ts +2 -2
  104. package/src/utils/loadConfigs.ts +59 -59
  105. package/src/utils/retryFailedPromises.ts +27 -0
  106. package/src/utils/schemaStrings.ts +473 -0
  107. package/src/utils/setupFiles.ts +228 -182
  108. package/src/utilsController.ts +325 -194
  109. package/tsconfig.json +37 -37
@@ -1,358 +1,538 @@
1
- import {
2
- Storage,
3
- Databases,
4
- Query,
5
- InputFile,
6
- type Models,
7
- ID,
8
- Permission,
9
- } from "node-appwrite";
10
- import { type OperationCreate, type BackupCreate } from "./backup.js";
11
- import { splitIntoBatches } from "./migrationHelper.js";
12
- import type { AppwriteConfig } from "appwrite-utils";
13
-
14
- export const logOperation = async (
15
- db: Databases,
16
- dbId: string,
17
- operationDetails: OperationCreate,
18
- operationId?: string
19
- ): Promise<Models.Document> => {
20
- try {
21
- let operation;
22
- if (operationId) {
23
- // Update existing operation log
24
- operation = await db.updateDocument(
25
- "migrations",
26
- "currentOperations",
27
- operationId,
28
- operationDetails
29
- );
30
- } else {
31
- // Create new operation log
32
- operation = await db.createDocument(
33
- "migrations",
34
- "currentOperations",
35
- ID.unique(),
36
- operationDetails
37
- );
38
- }
39
- console.log(`Operation logged: ${operation.$id}`);
40
- return operation;
41
- } catch (error) {
42
- console.error(`Error logging operation: ${error}`);
43
- throw error;
44
- }
45
- };
46
-
47
- export const initOrGetBackupStorage = async (storage: Storage) => {
48
- try {
49
- const backupStorage = await storage.getBucket("backupStorage");
50
- return backupStorage;
51
- } catch (e) {
52
- // ID backupStorage
53
- // Name Backups Storage
54
- const backupStorage = await storage.createBucket(
55
- "backupStorage",
56
- "Backups Storage"
57
- );
58
- return backupStorage;
59
- }
60
- };
61
-
62
- export const initOrGetDocumentStorage = async (
63
- storage: Storage,
64
- config: AppwriteConfig,
65
- dbName: string
66
- ) => {
67
- try {
68
- await storage.getBucket(
69
- `${config.documentBucketId}_${dbName.toLowerCase().replace(" ", "")}`
70
- );
71
- } catch (e) {
72
- // ID documentStorage
73
- // Name Document Storage
74
- const documentStorage = await storage.createBucket(
75
- `${config.documentBucketId}_${dbName.toLowerCase().replace(" ", "")}`,
76
- `Document Storage ${dbName}`,
77
- [
78
- Permission.read("any"),
79
- Permission.create("users"),
80
- Permission.update("users"),
81
- Permission.delete("users"),
82
- ]
83
- );
84
- return documentStorage;
85
- }
86
- };
87
-
88
- export const wipeDocumentStorage = async (
89
- storage: Storage,
90
- config: AppwriteConfig,
91
- dbName: string
92
- ): Promise<void> => {
93
- const bucketId = `${config.documentBucketId
94
- .toLowerCase()
95
- .replace(" ", "")}_${dbName.toLowerCase().replace(" ", "")}`;
96
- console.log(`Wiping storage for bucket ID: ${bucketId}`);
97
- let moreFiles = true;
98
- let lastFileId: string | undefined;
99
- const allFiles: string[] = [];
100
- while (moreFiles) {
101
- const queries = [Query.limit(100)]; // Adjust the limit as needed
102
- if (lastFileId) {
103
- queries.push(Query.cursorAfter(lastFileId));
104
- }
105
- const filesPulled = await storage.listFiles(bucketId, queries);
106
- if (filesPulled.files.length === 0) {
107
- console.log("No files found, done!");
108
- moreFiles = false;
109
- break;
110
- } else if (filesPulled.files.length > 0) {
111
- const fileIds = filesPulled.files.map((file) => file.$id);
112
- allFiles.push(...fileIds);
113
- }
114
- moreFiles = filesPulled.files.length === 100; // Adjust based on the limit
115
- if (moreFiles) {
116
- lastFileId = filesPulled.files[filesPulled.files.length - 1].$id;
117
- }
118
- }
119
-
120
- for (const fileId of allFiles) {
121
- console.log(`Deleting file: ${fileId}`);
122
- await storage.deleteFile(bucketId, fileId);
123
- }
124
- console.log(`All files in bucket ${bucketId} have been deleted.`);
125
- };
126
-
127
- async function retryFailedPromises(
128
- batch: Promise<Models.Document>[],
129
- maxRetries = 3
130
- ): Promise<PromiseSettledResult<Models.Document>[]> {
131
- const results = await Promise.allSettled(batch);
132
- const toRetry: Promise<any>[] = [];
133
-
134
- results.forEach((result, index) => {
135
- if (result.status === "rejected") {
136
- console.error("Promise rejected with reason:", result.reason);
137
- if (maxRetries > 0) {
138
- toRetry.push(batch[index]);
139
- }
140
- }
141
- });
142
-
143
- if (toRetry.length > 0) {
144
- console.log(`Retrying ${toRetry.length} promises`);
145
- return retryFailedPromises(toRetry, maxRetries - 1);
146
- } else {
147
- return results
148
- .filter((result) => result.status === "fulfilled")
149
- .map((result) => result);
150
- }
151
- }
152
-
153
- export const backupDatabase = async (
154
- database: Databases,
155
- databaseId: string,
156
- storage: Storage
157
- ): Promise<void> => {
158
- console.log("---------------------------------");
159
- console.log("Starting Database Backup of " + databaseId);
160
- console.log("---------------------------------");
161
- let data: BackupCreate = {
162
- database: "",
163
- collections: [],
164
- documents: [],
165
- };
166
-
167
- const backupOperation = await logOperation(database, databaseId, {
168
- operationType: "backup",
169
- collectionId: "",
170
- data: "Starting backup...",
171
- progress: 0,
172
- total: 100, // This will be dynamically updated later
173
- error: "",
174
- status: "in_progress",
175
- });
176
-
177
- // Fetch and backup the database details
178
- let db: Models.Database;
179
- try {
180
- db = await database.get(databaseId);
181
- } catch (e) {
182
- console.error(`Error fetching database: ${e}`);
183
- await logOperation(
184
- database,
185
- databaseId,
186
- {
187
- operationType: "backup",
188
- collectionId: "",
189
- data: "Error fetching database, skipping...",
190
- progress: 0,
191
- total: 100, // This will be dynamically updated later
192
- error: `Error fetching database: ${e}`,
193
- status: "error",
194
- },
195
- backupOperation.$id
196
- );
197
- return;
198
- }
199
- data.database = JSON.stringify(db);
200
-
201
- // Initialize pagination for collections
202
- let lastCollectionId = "";
203
- let moreCollections = true;
204
- let progress = 0;
205
- let total = 0; // Initialize total to 0, will be updated dynamically
206
-
207
- while (moreCollections) {
208
- const collectionResponse = await database.listCollections(databaseId, [
209
- Query.limit(500), // Adjust the limit as needed
210
- ...(lastCollectionId ? [Query.cursorAfter(lastCollectionId)] : []),
211
- ]);
212
-
213
- total += collectionResponse.collections.length; // Update total with number of collections
214
-
215
- for (const {
216
- $id: collectionId,
217
- name: collectionName,
218
- } of collectionResponse.collections) {
219
- let collectionDocumentCount = 0; // Initialize document count for the current collection
220
- try {
221
- const collection = await database.getCollection(
222
- databaseId,
223
- collectionId
224
- );
225
- progress++;
226
- data.collections.push(JSON.stringify(collection));
227
-
228
- // Initialize pagination for documents within the current collection
229
- let lastDocumentId = "";
230
- let moreDocuments = true;
231
-
232
- while (moreDocuments) {
233
- const documentResponse = await database.listDocuments(
234
- databaseId,
235
- collectionId,
236
- [
237
- Query.limit(500), // Adjust the limit as needed
238
- ...(lastDocumentId ? [Query.cursorAfter(lastDocumentId)] : []),
239
- ]
240
- );
241
-
242
- total += documentResponse.documents.length; // Update total with number of documents
243
- collectionDocumentCount += documentResponse.documents.length; // Update document count for the current collection
244
- let documentPromises: Promise<Models.Document>[] = [];
245
- for (const { $id: documentId } of documentResponse.documents) {
246
- documentPromises.push(
247
- database.getDocument(databaseId, collectionId, documentId)
248
- );
249
- }
250
- const promiseBatches = splitIntoBatches(documentPromises);
251
- const documentsPulled = [];
252
- for (const batch of promiseBatches) {
253
- const successfulDocuments = await retryFailedPromises(batch);
254
- documentsPulled.push(...successfulDocuments);
255
- }
256
- const documents = documentsPulled;
257
- data.documents.push({
258
- collectionId: collectionId,
259
- data: JSON.stringify(documents),
260
- });
261
- progress += documents.length;
262
-
263
- console.log(
264
- `Collection ${collectionName} backed up ${collectionDocumentCount} documents (so far)`
265
- );
266
-
267
- // Update the operation log with the current progress
268
- await logOperation(
269
- database,
270
- databaseId,
271
- {
272
- operationType: "backup",
273
- collectionId: collectionId,
274
- data: `Still backing up, ${data.collections.length} collections so far`,
275
- progress: progress,
276
- total: total,
277
- error: "",
278
- status: "in_progress",
279
- },
280
- backupOperation.$id
281
- );
282
-
283
- // Check if there are more documents to fetch
284
- moreDocuments = documentResponse.documents.length === 500;
285
- if (moreDocuments) {
286
- lastDocumentId =
287
- documentResponse.documents[documentResponse.documents.length - 1]
288
- .$id;
289
- }
290
- }
291
- console.log(
292
- `Collection ${collectionName} backed up with ${collectionDocumentCount} documents.`
293
- );
294
- } catch (error) {
295
- console.log(
296
- `Collection ${collectionName} must not exist, continuing...`
297
- );
298
- continue;
299
- }
300
- }
301
-
302
- // Check if there are more collections to fetch
303
- moreCollections = collectionResponse.collections.length === 500;
304
- if (moreCollections) {
305
- lastCollectionId =
306
- collectionResponse.collections[
307
- collectionResponse.collections.length - 1
308
- ].$id;
309
- }
310
- }
311
-
312
- // Update the backup operation with the current progress and total
313
- await logOperation(
314
- database,
315
- databaseId,
316
- {
317
- operationType: "backup",
318
- collectionId: "",
319
- data: `Still backing up, ${data.collections.length} collections so far`,
320
- progress: progress,
321
- total: total,
322
- error: "",
323
- status: "in_progress",
324
- },
325
- backupOperation.$id
326
- );
327
-
328
- // Create the backup with the accumulated data
329
- const bucket = await initOrGetBackupStorage(storage);
330
- const inputFile = InputFile.fromPlainText(
331
- JSON.stringify(data),
332
- `${new Date().toISOString()}-${databaseId}.json`
333
- );
334
- const fileCreated = await storage.createFile(
335
- bucket.$id,
336
- ID.unique(),
337
- inputFile
338
- );
339
-
340
- // Final update to the backup operation marking it as completed
341
- await logOperation(
342
- database,
343
- databaseId,
344
- {
345
- operationType: "backup",
346
- collectionId: "",
347
- data: fileCreated.$id,
348
- progress: 100,
349
- total: total, // Ensure the total reflects the actual total processed
350
- error: "",
351
- status: "completed",
352
- },
353
- backupOperation.$id
354
- );
355
- console.log("---------------------------------");
356
- console.log("Database Backup Complete");
357
- console.log("---------------------------------");
358
- };
1
+ import {
2
+ Storage,
3
+ Databases,
4
+ Query,
5
+ type Models,
6
+ ID,
7
+ Permission,
8
+ } from "node-appwrite";
9
+ import { InputFile } from "node-appwrite/file";
10
+ import { type OperationCreate, type BackupCreate } from "./backup.js";
11
+ import { splitIntoBatches } from "./migrationHelper.js";
12
+ import type { AppwriteConfig } from "appwrite-utils";
13
+ import {
14
+ getAppwriteClient,
15
+ tryAwaitWithRetry,
16
+ } from "../utils/helperFunctions.js";
17
+
18
+ export const logOperation = async (
19
+ db: Databases,
20
+ dbId: string,
21
+ operationDetails: OperationCreate,
22
+ operationId?: string
23
+ ): Promise<Models.Document> => {
24
+ try {
25
+ let operation;
26
+ if (operationId) {
27
+ // Update existing operation log
28
+ operation = await tryAwaitWithRetry(
29
+ async () =>
30
+ await db.updateDocument(
31
+ "migrations",
32
+ "currentOperations",
33
+ operationId,
34
+ operationDetails
35
+ )
36
+ );
37
+ } else {
38
+ // Create new operation log
39
+ operation = await db.createDocument(
40
+ "migrations",
41
+ "currentOperations",
42
+ ID.unique(),
43
+ operationDetails
44
+ );
45
+ }
46
+ console.log(`Operation logged: ${operation.$id}`);
47
+ return operation;
48
+ } catch (error) {
49
+ console.error(`Error logging operation: ${error}`);
50
+ throw error;
51
+ }
52
+ };
53
+
54
+ export const initOrGetBackupStorage = async (storage: Storage) => {
55
+ try {
56
+ const backupStorage = await tryAwaitWithRetry(
57
+ async () => await storage.getBucket("backupStorage")
58
+ );
59
+ return backupStorage;
60
+ } catch (e) {
61
+ // ID backupStorage
62
+ // Name Backups Storage
63
+ const backupStorage = await tryAwaitWithRetry(
64
+ async () => await storage.createBucket("backupStorage", "Backups Storage")
65
+ );
66
+ return backupStorage;
67
+ }
68
+ };
69
+
70
+ export const initOrGetDocumentStorage = async (
71
+ storage: Storage,
72
+ config: AppwriteConfig,
73
+ dbId: string,
74
+ bucketName?: string
75
+ ) => {
76
+ try {
77
+ await tryAwaitWithRetry(
78
+ async () =>
79
+ await storage.getBucket(
80
+ bucketName ??
81
+ `${config.documentBucketId}_${dbId.toLowerCase().replace(" ", "")}`
82
+ )
83
+ );
84
+ } catch (e) {
85
+ // ID documentStorage
86
+ // Name Document Storage
87
+ const documentStorage = await tryAwaitWithRetry(
88
+ async () =>
89
+ await storage.createBucket(
90
+ `${config.documentBucketId}_${dbId.toLowerCase().replace(" ", "")}`,
91
+ `${dbId} Storage`,
92
+ [
93
+ Permission.read("any"),
94
+ Permission.create("users"),
95
+ Permission.update("users"),
96
+ Permission.delete("users"),
97
+ ]
98
+ )
99
+ );
100
+ return documentStorage;
101
+ }
102
+ };
103
+
104
+ export const wipeDocumentStorage = async (
105
+ storage: Storage,
106
+ config: AppwriteConfig,
107
+ dbName: string
108
+ ): Promise<void> => {
109
+ const bucketId = `${config.documentBucketId
110
+ .toLowerCase()
111
+ .replace(" ", "")}_${dbName.toLowerCase().replace(" ", "")}`;
112
+ console.log(`Wiping storage for bucket ID: ${bucketId}`);
113
+ let moreFiles = true;
114
+ let lastFileId: string | undefined;
115
+ const allFiles: string[] = [];
116
+ while (moreFiles) {
117
+ const queries = [Query.limit(100)]; // Adjust the limit as needed
118
+ if (lastFileId) {
119
+ queries.push(Query.cursorAfter(lastFileId));
120
+ }
121
+ const filesPulled = await tryAwaitWithRetry(
122
+ async () => await storage.listFiles(bucketId, queries)
123
+ );
124
+ if (filesPulled.files.length === 0) {
125
+ console.log("No files found, done!");
126
+ moreFiles = false;
127
+ break;
128
+ } else if (filesPulled.files.length > 0) {
129
+ const fileIds = filesPulled.files.map((file) => file.$id);
130
+ allFiles.push(...fileIds);
131
+ }
132
+ moreFiles = filesPulled.files.length === 100; // Adjust based on the limit
133
+ if (moreFiles) {
134
+ lastFileId = filesPulled.files[filesPulled.files.length - 1].$id;
135
+ }
136
+ }
137
+
138
+ for (const fileId of allFiles) {
139
+ console.log(`Deleting file: ${fileId}`);
140
+ await tryAwaitWithRetry(
141
+ async () => await storage.deleteFile(bucketId, fileId)
142
+ );
143
+ }
144
+ console.log(`All files in bucket ${bucketId} have been deleted.`);
145
+ };
146
+
147
+ async function retryFailedPromises(
148
+ batch: Promise<Models.Document>[],
149
+ maxRetries = 3
150
+ ): Promise<PromiseSettledResult<Models.Document>[]> {
151
+ const results = await Promise.allSettled(batch);
152
+ const toRetry: Promise<any>[] = [];
153
+
154
+ results.forEach((result, index) => {
155
+ if (result.status === "rejected") {
156
+ console.error("Promise rejected with reason:", result.reason);
157
+ if (maxRetries > 0) {
158
+ toRetry.push(batch[index]);
159
+ }
160
+ }
161
+ });
162
+
163
+ if (toRetry.length > 0) {
164
+ console.log(`Retrying ${toRetry.length} promises`);
165
+ return retryFailedPromises(toRetry, maxRetries - 1);
166
+ } else {
167
+ return results
168
+ .filter((result) => result.status === "fulfilled")
169
+ .map((result) => result);
170
+ }
171
+ }
172
+
173
+ export const backupDatabase = async (
174
+ database: Databases,
175
+ databaseId: string,
176
+ storage: Storage
177
+ ): Promise<void> => {
178
+ console.log("---------------------------------");
179
+ console.log("Starting Database Backup of " + databaseId);
180
+ console.log("---------------------------------");
181
+ let data: BackupCreate = {
182
+ database: "",
183
+ collections: [],
184
+ documents: [],
185
+ };
186
+
187
+ const backupOperation = await logOperation(database, databaseId, {
188
+ operationType: "backup",
189
+ collectionId: "",
190
+ data: "Starting backup...",
191
+ progress: 0,
192
+ total: 100, // This will be dynamically updated later
193
+ error: "",
194
+ status: "in_progress",
195
+ });
196
+
197
+ // Fetch and backup the database details
198
+ let db: Models.Database;
199
+ try {
200
+ db = await tryAwaitWithRetry(async () => await database.get(databaseId));
201
+ } catch (e) {
202
+ console.error(`Error fetching database: ${e}`);
203
+ await logOperation(
204
+ database,
205
+ databaseId,
206
+ {
207
+ operationType: "backup",
208
+ collectionId: "",
209
+ data: "Error fetching database, skipping...",
210
+ progress: 0,
211
+ total: 100, // This will be dynamically updated later
212
+ error: `Error fetching database: ${e}`,
213
+ status: "error",
214
+ },
215
+ backupOperation.$id
216
+ );
217
+ return;
218
+ }
219
+ data.database = JSON.stringify(db);
220
+
221
+ // Initialize pagination for collections
222
+ let lastCollectionId = "";
223
+ let moreCollections = true;
224
+ let progress = 0;
225
+ let total = 0; // Initialize total to 0, will be updated dynamically
226
+
227
+ while (moreCollections) {
228
+ const collectionResponse = await tryAwaitWithRetry(
229
+ async () =>
230
+ await database.listCollections(databaseId, [
231
+ Query.limit(500), // Adjust the limit as needed
232
+ ...(lastCollectionId ? [Query.cursorAfter(lastCollectionId)] : []),
233
+ ])
234
+ );
235
+
236
+ total += collectionResponse.collections.length; // Update total with number of collections
237
+
238
+ for (const {
239
+ $id: collectionId,
240
+ name: collectionName,
241
+ } of collectionResponse.collections) {
242
+ let collectionDocumentCount = 0; // Initialize document count for the current collection
243
+ try {
244
+ const collection = await tryAwaitWithRetry(
245
+ async () => await database.getCollection(databaseId, collectionId)
246
+ );
247
+ progress++;
248
+ data.collections.push(JSON.stringify(collection));
249
+
250
+ // Initialize pagination for documents within the current collection
251
+ let lastDocumentId = "";
252
+ let moreDocuments = true;
253
+
254
+ while (moreDocuments) {
255
+ const documentResponse = await tryAwaitWithRetry(
256
+ async () =>
257
+ await database.listDocuments(databaseId, collectionId, [
258
+ Query.limit(500), // Adjust the limit as needed
259
+ ...(lastDocumentId ? [Query.cursorAfter(lastDocumentId)] : []),
260
+ ])
261
+ );
262
+
263
+ total += documentResponse.documents.length; // Update total with number of documents
264
+ collectionDocumentCount += documentResponse.documents.length; // Update document count for the current collection
265
+ let documentPromises: Promise<Models.Document>[] = [];
266
+ for (const { $id: documentId } of documentResponse.documents) {
267
+ documentPromises.push(
268
+ database.getDocument(databaseId, collectionId, documentId)
269
+ );
270
+ }
271
+ const promiseBatches = splitIntoBatches(documentPromises);
272
+ const documentsPulled = [];
273
+ for (const batch of promiseBatches) {
274
+ const successfulDocuments = await retryFailedPromises(batch);
275
+ documentsPulled.push(...successfulDocuments);
276
+ }
277
+ const documents = documentsPulled;
278
+ data.documents.push({
279
+ collectionId: collectionId,
280
+ data: JSON.stringify(documents),
281
+ });
282
+ progress += documents.length;
283
+
284
+ console.log(
285
+ `Collection ${collectionName} backed up ${collectionDocumentCount} documents (so far)`
286
+ );
287
+
288
+ // Update the operation log with the current progress
289
+ await logOperation(
290
+ database,
291
+ databaseId,
292
+ {
293
+ operationType: "backup",
294
+ collectionId: collectionId,
295
+ data: `Still backing up, ${data.collections.length} collections so far`,
296
+ progress: progress,
297
+ total: total,
298
+ error: "",
299
+ status: "in_progress",
300
+ },
301
+ backupOperation.$id
302
+ );
303
+
304
+ // Check if there are more documents to fetch
305
+ moreDocuments = documentResponse.documents.length === 500;
306
+ if (moreDocuments) {
307
+ lastDocumentId =
308
+ documentResponse.documents[documentResponse.documents.length - 1]
309
+ .$id;
310
+ }
311
+ }
312
+ console.log(
313
+ `Collection ${collectionName} backed up with ${collectionDocumentCount} documents.`
314
+ );
315
+ } catch (error) {
316
+ console.log(
317
+ `Collection ${collectionName} must not exist, continuing...`
318
+ );
319
+ continue;
320
+ }
321
+ }
322
+
323
+ // Check if there are more collections to fetch
324
+ moreCollections = collectionResponse.collections.length === 500;
325
+ if (moreCollections) {
326
+ lastCollectionId =
327
+ collectionResponse.collections[
328
+ collectionResponse.collections.length - 1
329
+ ].$id;
330
+ }
331
+ }
332
+
333
+ // Update the backup operation with the current progress and total
334
+ await logOperation(
335
+ database,
336
+ databaseId,
337
+ {
338
+ operationType: "backup",
339
+ collectionId: "",
340
+ data: `Still backing up, ${data.collections.length} collections so far`,
341
+ progress: progress,
342
+ total: total,
343
+ error: "",
344
+ status: "in_progress",
345
+ },
346
+ backupOperation.$id
347
+ );
348
+
349
+ // Create the backup with the accumulated data
350
+ const bucket = await initOrGetBackupStorage(storage);
351
+ const inputFile = InputFile.fromPlainText(
352
+ JSON.stringify(data),
353
+ `${new Date().toISOString()}-${databaseId}.json`
354
+ );
355
+ const fileCreated = await storage.createFile(
356
+ bucket.$id,
357
+ ID.unique(),
358
+ inputFile
359
+ );
360
+
361
+ // Final update to the backup operation marking it as completed
362
+ await logOperation(
363
+ database,
364
+ databaseId,
365
+ {
366
+ operationType: "backup",
367
+ collectionId: "",
368
+ data: fileCreated.$id,
369
+ progress: 100,
370
+ total: total, // Ensure the total reflects the actual total processed
371
+ error: "",
372
+ status: "completed",
373
+ },
374
+ backupOperation.$id
375
+ );
376
+ console.log("---------------------------------");
377
+ console.log("Database Backup Complete");
378
+ console.log("---------------------------------");
379
+ };
380
+
381
+ export const transferStorageLocalToLocal = async (
382
+ storage: Storage,
383
+ fromBucketId: string,
384
+ toBucketId: string
385
+ ) => {
386
+ console.log(`Transferring files from ${fromBucketId} to ${toBucketId}`);
387
+ let lastFileId: string | undefined;
388
+ let fromFiles = await tryAwaitWithRetry(
389
+ async () => await storage.listFiles(fromBucketId, [Query.limit(100)])
390
+ );
391
+ const allFromFiles = fromFiles.files;
392
+ let numberOfFiles = 0;
393
+
394
+ const downloadFileWithRetry = async (bucketId: string, fileId: string) => {
395
+ let attempts = 3;
396
+ while (attempts > 0) {
397
+ try {
398
+ return await storage.getFileDownload(bucketId, fileId);
399
+ } catch (error) {
400
+ console.error(`Error downloading file ${fileId}: ${error}`);
401
+ attempts--;
402
+ if (attempts === 0) throw error;
403
+ }
404
+ }
405
+ };
406
+
407
+ if (fromFiles.files.length < 100) {
408
+ for (const file of allFromFiles) {
409
+ const fileData = await tryAwaitWithRetry(
410
+ async () => await downloadFileWithRetry(file.bucketId, file.$id)
411
+ );
412
+ if (!fileData) {
413
+ console.error(`Error downloading file ${file.$id}`);
414
+ continue;
415
+ }
416
+ const fileToCreate = InputFile.fromBuffer(
417
+ new Uint8Array(fileData),
418
+ file.name
419
+ );
420
+ console.log(`Creating file: ${file.name}`);
421
+ tryAwaitWithRetry(
422
+ async () =>
423
+ await storage.createFile(
424
+ toBucketId,
425
+ file.$id,
426
+ fileToCreate,
427
+ file.$permissions
428
+ )
429
+ );
430
+ numberOfFiles++;
431
+ }
432
+ } else {
433
+ lastFileId = fromFiles.files[fromFiles.files.length - 1].$id;
434
+ while (lastFileId) {
435
+ const files = await tryAwaitWithRetry(
436
+ async () =>
437
+ await storage.listFiles(fromBucketId, [
438
+ Query.limit(100),
439
+ Query.cursorAfter(lastFileId!),
440
+ ])
441
+ );
442
+ allFromFiles.push(...files.files);
443
+ if (files.files.length < 100) {
444
+ lastFileId = undefined;
445
+ } else {
446
+ lastFileId = files.files[files.files.length - 1].$id;
447
+ }
448
+ }
449
+ for (const file of allFromFiles) {
450
+ const fileData = await tryAwaitWithRetry(
451
+ async () => await downloadFileWithRetry(file.bucketId, file.$id)
452
+ );
453
+ if (!fileData) {
454
+ console.error(`Error downloading file ${file.$id}`);
455
+ continue;
456
+ }
457
+ const fileToCreate = InputFile.fromBuffer(
458
+ new Uint8Array(fileData),
459
+ file.name
460
+ );
461
+ await tryAwaitWithRetry(
462
+ async () =>
463
+ await storage.createFile(
464
+ toBucketId,
465
+ file.$id,
466
+ fileToCreate,
467
+ file.$permissions
468
+ )
469
+ );
470
+ numberOfFiles++;
471
+ }
472
+ }
473
+
474
+ console.log(
475
+ `Transferred ${numberOfFiles} files from ${fromBucketId} to ${toBucketId}`
476
+ );
477
+ };
478
+
479
+ export const transferStorageLocalToRemote = async (
480
+ localStorage: Storage,
481
+ endpoint: string,
482
+ projectId: string,
483
+ apiKey: string,
484
+ fromBucketId: string,
485
+ toBucketId: string
486
+ ) => {
487
+ console.log(
488
+ `Transferring files from current storage ${fromBucketId} to ${endpoint} bucket ${toBucketId}`
489
+ );
490
+ const client = getAppwriteClient(endpoint, apiKey, projectId);
491
+ const remoteStorage = new Storage(client);
492
+ let numberOfFiles = 0;
493
+ let lastFileId: string | undefined;
494
+ let fromFiles = await tryAwaitWithRetry(
495
+ async () => await localStorage.listFiles(fromBucketId, [Query.limit(100)])
496
+ );
497
+ const allFromFiles = fromFiles.files;
498
+ if (fromFiles.files.length === 100) {
499
+ lastFileId = fromFiles.files[fromFiles.files.length - 1].$id;
500
+ while (lastFileId) {
501
+ const files = await tryAwaitWithRetry(
502
+ async () =>
503
+ await localStorage.listFiles(fromBucketId, [
504
+ Query.limit(100),
505
+ Query.cursorAfter(lastFileId!),
506
+ ])
507
+ );
508
+ allFromFiles.push(...files.files);
509
+ if (files.files.length < 100) {
510
+ break;
511
+ }
512
+ lastFileId = files.files[files.files.length - 1].$id;
513
+ }
514
+ }
515
+
516
+ for (const file of allFromFiles) {
517
+ const fileData = await tryAwaitWithRetry(
518
+ async () => await localStorage.getFileDownload(file.bucketId, file.$id)
519
+ );
520
+ const fileToCreate = InputFile.fromBuffer(
521
+ new Uint8Array(fileData),
522
+ file.name
523
+ );
524
+ await tryAwaitWithRetry(
525
+ async () =>
526
+ await remoteStorage.createFile(
527
+ toBucketId,
528
+ file.$id,
529
+ fileToCreate,
530
+ file.$permissions
531
+ )
532
+ );
533
+ numberOfFiles++;
534
+ }
535
+ console.log(
536
+ `Transferred ${numberOfFiles} files from ${fromBucketId} to ${toBucketId}`
537
+ );
538
+ };