appwrite-utils-cli 0.9.2 → 0.9.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,371 +1,371 @@
1
- import {
2
- Compression,
3
- Databases,
4
- Permission,
5
- Query,
6
- Role,
7
- Storage,
8
- type Models,
9
- } from "node-appwrite";
10
- import { tryAwaitWithRetry, type AppwriteConfig } from "appwrite-utils";
11
- import { getClientFromConfig } from "../utils/getClientFromConfig.js";
12
- import { ulid } from "ulidx";
13
- import type { BackupCreate } from "./schemas.js";
14
- import { logOperation } from "../migrations/helper.js";
15
- import { splitIntoBatches } from "../migrations/migrationHelper.js";
16
- import { retryFailedPromises } from "../utils/retryFailedPromises.js";
17
- import { InputFile } from "node-appwrite/file";
18
-
19
- export const getStorage = (config: AppwriteConfig) => {
20
- const client = getClientFromConfig(config);
21
- return new Storage(client!);
22
- };
23
-
24
- export const listBuckets = async (
25
- storage: Storage,
26
- queries?: string[],
27
- search?: string
28
- ) => {
29
- return await storage.listBuckets(queries, search);
30
- };
31
-
32
- export const getBucket = async (storage: Storage, bucketId: string) => {
33
- return await storage.getBucket(bucketId);
34
- };
35
-
36
- export const createBucket = async (
37
- storage: Storage,
38
- bucket: Omit<Models.Bucket, "$id" | "$createdAt" | "$updatedAt">,
39
- bucketId?: string
40
- ) => {
41
- return await storage.createBucket(
42
- bucketId ?? ulid(),
43
- bucket.name,
44
- bucket.$permissions,
45
- bucket.fileSecurity,
46
- bucket.enabled,
47
- bucket.maximumFileSize,
48
- bucket.allowedFileExtensions,
49
- bucket.compression as Compression,
50
- bucket.encryption,
51
- bucket.antivirus
52
- );
53
- };
54
-
55
- export const updateBucket = async (
56
- storage: Storage,
57
- bucket: Models.Bucket,
58
- bucketId: string
59
- ) => {
60
- return await storage.updateBucket(
61
- bucketId,
62
- bucket.name,
63
- bucket.$permissions,
64
- bucket.fileSecurity,
65
- bucket.enabled,
66
- bucket.maximumFileSize,
67
- bucket.allowedFileExtensions,
68
- bucket.compression as Compression,
69
- bucket.encryption,
70
- bucket.antivirus
71
- );
72
- };
73
-
74
- export const deleteBucket = async (storage: Storage, bucketId: string) => {
75
- return await storage.deleteBucket(bucketId);
76
- };
77
-
78
- export const getFile = async (
79
- storage: Storage,
80
- bucketId: string,
81
- fileId: string
82
- ) => {
83
- return await storage.getFile(bucketId, fileId);
84
- };
85
-
86
- export const listFiles = async (
87
- storage: Storage,
88
- bucketId: string,
89
- queries?: string[],
90
- search?: string
91
- ) => {
92
- return await storage.listFiles(bucketId, queries, search);
93
- };
94
-
95
- export const deleteFile = async (
96
- storage: Storage,
97
- bucketId: string,
98
- fileId: string
99
- ) => {
100
- return await storage.deleteFile(bucketId, fileId);
101
- };
102
-
103
- export const wipeDocumentStorage = async (
104
- storage: Storage,
105
- bucketId: string
106
- ): Promise<void> => {
107
- console.log(`Wiping storage for bucket ID: ${bucketId}`);
108
- let moreFiles = true;
109
- let lastFileId: string | undefined;
110
- const allFiles: string[] = [];
111
- while (moreFiles) {
112
- const queries = [Query.limit(100)]; // Adjust the limit as needed
113
- if (lastFileId) {
114
- queries.push(Query.cursorAfter(lastFileId));
115
- }
116
- const filesPulled = await tryAwaitWithRetry(
117
- async () => await storage.listFiles(bucketId, queries)
118
- );
119
- if (filesPulled.files.length === 0) {
120
- console.log("No files found, done!");
121
- moreFiles = false;
122
- break;
123
- } else if (filesPulled.files.length > 0) {
124
- const fileIds = filesPulled.files.map((file) => file.$id);
125
- allFiles.push(...fileIds);
126
- }
127
- moreFiles = filesPulled.files.length === 100; // Adjust based on the limit
128
- if (moreFiles) {
129
- lastFileId = filesPulled.files[filesPulled.files.length - 1].$id;
130
- }
131
- }
132
-
133
- for (const fileId of allFiles) {
134
- console.log(`Deleting file: ${fileId}`);
135
- await tryAwaitWithRetry(
136
- async () => await storage.deleteFile(bucketId, fileId)
137
- );
138
- }
139
- console.log(`All files in bucket ${bucketId} have been deleted.`);
140
- };
141
-
142
- export const initOrGetDocumentStorage = async (
143
- storage: Storage,
144
- config: AppwriteConfig,
145
- dbId: string,
146
- bucketName?: string
147
- ) => {
148
- const bucketId =
149
- bucketName ??
150
- `${config.documentBucketId}_${dbId.toLowerCase().replace(" ", "")}`;
151
- try {
152
- return await tryAwaitWithRetry(
153
- async () => await storage.getBucket(bucketId)
154
- );
155
- } catch (e) {
156
- return await tryAwaitWithRetry(
157
- async () =>
158
- await storage.createBucket(bucketId, `${dbId} Storage`, [
159
- Permission.read(Role.any()),
160
- Permission.create(Role.users()),
161
- Permission.update(Role.users()),
162
- Permission.delete(Role.users()),
163
- ])
164
- );
165
- }
166
- };
167
-
168
- export const initOrGetBackupStorage = async (
169
- config: AppwriteConfig,
170
- storage: Storage
171
- ) => {
172
- try {
173
- return await tryAwaitWithRetry(
174
- async () => await storage.getBucket("backup")
175
- );
176
- } catch (e) {
177
- return await initOrGetDocumentStorage(
178
- storage,
179
- config,
180
- "backups",
181
- "Database Backups"
182
- );
183
- }
184
- };
185
-
186
- export const backupDatabase = async (
187
- config: AppwriteConfig,
188
- database: Databases,
189
- databaseId: string,
190
- storage: Storage
191
- ): Promise<void> => {
192
- console.log("---------------------------------");
193
- console.log("Starting Database Backup of " + databaseId);
194
- console.log("---------------------------------");
195
-
196
- let data: BackupCreate = {
197
- database: "",
198
- collections: [],
199
- documents: [],
200
- };
201
-
202
- const backupOperation = await logOperation(database, databaseId, {
203
- operationType: "backup",
204
- collectionId: "",
205
- data: "Starting backup...",
206
- progress: 0,
207
- total: 100,
208
- error: "",
209
- status: "in_progress",
210
- });
211
-
212
- try {
213
- const db = await tryAwaitWithRetry(
214
- async () => await database.get(databaseId)
215
- );
216
- data.database = JSON.stringify(db);
217
-
218
- let lastCollectionId = "";
219
- let moreCollections = true;
220
- let progress = 0;
221
- let total = 0;
222
-
223
- while (moreCollections) {
224
- const collectionResponse = await tryAwaitWithRetry(
225
- async () =>
226
- await database.listCollections(databaseId, [
227
- Query.limit(500),
228
- ...(lastCollectionId ? [Query.cursorAfter(lastCollectionId)] : []),
229
- ])
230
- );
231
-
232
- total += collectionResponse.collections.length;
233
-
234
- for (const {
235
- $id: collectionId,
236
- name: collectionName,
237
- } of collectionResponse.collections) {
238
- try {
239
- const collection = await tryAwaitWithRetry(
240
- async () => await database.getCollection(databaseId, collectionId)
241
- );
242
- progress++;
243
- data.collections.push(JSON.stringify(collection));
244
-
245
- let lastDocumentId = "";
246
- let moreDocuments = true;
247
- let collectionDocumentCount = 0;
248
-
249
- while (moreDocuments) {
250
- const documentResponse = await tryAwaitWithRetry(
251
- async () =>
252
- await database.listDocuments(databaseId, collectionId, [
253
- Query.limit(500),
254
- ...(lastDocumentId
255
- ? [Query.cursorAfter(lastDocumentId)]
256
- : []),
257
- ])
258
- );
259
-
260
- total += documentResponse.documents.length;
261
- collectionDocumentCount += documentResponse.documents.length;
262
-
263
- const documentPromises = documentResponse.documents.map(
264
- ({ $id: documentId }) =>
265
- database.getDocument(databaseId, collectionId, documentId)
266
- );
267
-
268
- const promiseBatches = splitIntoBatches(documentPromises);
269
- const documentsPulled = [];
270
- for (const batch of promiseBatches) {
271
- const successfulDocuments = await retryFailedPromises(batch);
272
- documentsPulled.push(...successfulDocuments);
273
- }
274
-
275
- data.documents.push({
276
- collectionId: collectionId,
277
- data: JSON.stringify(documentsPulled),
278
- });
279
- progress += documentsPulled.length;
280
-
281
- await logOperation(
282
- database,
283
- databaseId,
284
- {
285
- operationType: "backup",
286
- collectionId: collectionId,
287
- data: `Backing up, ${data.collections.length} collections so far`,
288
- progress: progress,
289
- total: total,
290
- error: "",
291
- status: "in_progress",
292
- },
293
- backupOperation.$id
294
- );
295
-
296
- moreDocuments = documentResponse.documents.length === 500;
297
- if (moreDocuments) {
298
- lastDocumentId =
299
- documentResponse.documents[
300
- documentResponse.documents.length - 1
301
- ].$id;
302
- }
303
- }
304
-
305
- console.log(
306
- `Collection ${collectionName} backed up with ${collectionDocumentCount} documents.`
307
- );
308
- } catch (error) {
309
- console.log(
310
- `Collection ${collectionName} must not exist, continuing...`
311
- );
312
- continue;
313
- }
314
- }
315
-
316
- moreCollections = collectionResponse.collections.length === 500;
317
- if (moreCollections) {
318
- lastCollectionId =
319
- collectionResponse.collections[
320
- collectionResponse.collections.length - 1
321
- ].$id;
322
- }
323
- }
324
-
325
- const bucket = await initOrGetDocumentStorage(storage, config, databaseId);
326
- const inputFile = InputFile.fromPlainText(
327
- JSON.stringify(data),
328
- `${new Date().toISOString()}-${databaseId}.json`
329
- );
330
- const fileCreated = await storage.createFile(
331
- bucket!.$id,
332
- ulid(),
333
- inputFile
334
- );
335
-
336
- await logOperation(
337
- database,
338
- databaseId,
339
- {
340
- operationType: "backup",
341
- collectionId: "",
342
- data: fileCreated.$id,
343
- progress: 100,
344
- total: total,
345
- error: "",
346
- status: "completed",
347
- },
348
- backupOperation.$id
349
- );
350
-
351
- console.log("---------------------------------");
352
- console.log("Database Backup Complete");
353
- console.log("---------------------------------");
354
- } catch (error) {
355
- console.error("Error during backup:", error);
356
- await logOperation(
357
- database,
358
- databaseId,
359
- {
360
- operationType: "backup",
361
- collectionId: "",
362
- data: "Backup failed",
363
- progress: 0,
364
- total: 100,
365
- error: String(error),
366
- status: "error",
367
- },
368
- backupOperation.$id
369
- );
370
- }
371
- };
1
+ import {
2
+ Compression,
3
+ Databases,
4
+ Permission,
5
+ Query,
6
+ Role,
7
+ Storage,
8
+ type Models,
9
+ } from "node-appwrite";
10
+ import { tryAwaitWithRetry, type AppwriteConfig } from "appwrite-utils";
11
+ import { getClientFromConfig } from "../utils/getClientFromConfig.js";
12
+ import { ulid } from "ulidx";
13
+ import type { BackupCreate } from "./schemas.js";
14
+ import { logOperation } from "../migrations/helper.js";
15
+ import { splitIntoBatches } from "../migrations/migrationHelper.js";
16
+ import { retryFailedPromises } from "../utils/retryFailedPromises.js";
17
+ import { InputFile } from "node-appwrite/file";
18
+
19
+ export const getStorage = (config: AppwriteConfig) => {
20
+ const client = getClientFromConfig(config);
21
+ return new Storage(client!);
22
+ };
23
+
24
+ export const listBuckets = async (
25
+ storage: Storage,
26
+ queries?: string[],
27
+ search?: string
28
+ ) => {
29
+ return await storage.listBuckets(queries, search);
30
+ };
31
+
32
+ export const getBucket = async (storage: Storage, bucketId: string) => {
33
+ return await storage.getBucket(bucketId);
34
+ };
35
+
36
+ export const createBucket = async (
37
+ storage: Storage,
38
+ bucket: Omit<Models.Bucket, "$id" | "$createdAt" | "$updatedAt">,
39
+ bucketId?: string
40
+ ) => {
41
+ return await storage.createBucket(
42
+ bucketId ?? ulid(),
43
+ bucket.name,
44
+ bucket.$permissions,
45
+ bucket.fileSecurity,
46
+ bucket.enabled,
47
+ bucket.maximumFileSize,
48
+ bucket.allowedFileExtensions,
49
+ bucket.compression as Compression,
50
+ bucket.encryption,
51
+ bucket.antivirus
52
+ );
53
+ };
54
+
55
+ export const updateBucket = async (
56
+ storage: Storage,
57
+ bucket: Models.Bucket,
58
+ bucketId: string
59
+ ) => {
60
+ return await storage.updateBucket(
61
+ bucketId,
62
+ bucket.name,
63
+ bucket.$permissions,
64
+ bucket.fileSecurity,
65
+ bucket.enabled,
66
+ bucket.maximumFileSize,
67
+ bucket.allowedFileExtensions,
68
+ bucket.compression as Compression,
69
+ bucket.encryption,
70
+ bucket.antivirus
71
+ );
72
+ };
73
+
74
+ export const deleteBucket = async (storage: Storage, bucketId: string) => {
75
+ return await storage.deleteBucket(bucketId);
76
+ };
77
+
78
+ export const getFile = async (
79
+ storage: Storage,
80
+ bucketId: string,
81
+ fileId: string
82
+ ) => {
83
+ return await storage.getFile(bucketId, fileId);
84
+ };
85
+
86
+ export const listFiles = async (
87
+ storage: Storage,
88
+ bucketId: string,
89
+ queries?: string[],
90
+ search?: string
91
+ ) => {
92
+ return await storage.listFiles(bucketId, queries, search);
93
+ };
94
+
95
+ export const deleteFile = async (
96
+ storage: Storage,
97
+ bucketId: string,
98
+ fileId: string
99
+ ) => {
100
+ return await storage.deleteFile(bucketId, fileId);
101
+ };
102
+
103
+ export const wipeDocumentStorage = async (
104
+ storage: Storage,
105
+ bucketId: string
106
+ ): Promise<void> => {
107
+ console.log(`Wiping storage for bucket ID: ${bucketId}`);
108
+ let moreFiles = true;
109
+ let lastFileId: string | undefined;
110
+ const allFiles: string[] = [];
111
+ while (moreFiles) {
112
+ const queries = [Query.limit(100)]; // Adjust the limit as needed
113
+ if (lastFileId) {
114
+ queries.push(Query.cursorAfter(lastFileId));
115
+ }
116
+ const filesPulled = await tryAwaitWithRetry(
117
+ async () => await storage.listFiles(bucketId, queries)
118
+ );
119
+ if (filesPulled.files.length === 0) {
120
+ console.log("No files found, done!");
121
+ moreFiles = false;
122
+ break;
123
+ } else if (filesPulled.files.length > 0) {
124
+ const fileIds = filesPulled.files.map((file) => file.$id);
125
+ allFiles.push(...fileIds);
126
+ }
127
+ moreFiles = filesPulled.files.length === 100; // Adjust based on the limit
128
+ if (moreFiles) {
129
+ lastFileId = filesPulled.files[filesPulled.files.length - 1].$id;
130
+ }
131
+ }
132
+
133
+ for (const fileId of allFiles) {
134
+ console.log(`Deleting file: ${fileId}`);
135
+ await tryAwaitWithRetry(
136
+ async () => await storage.deleteFile(bucketId, fileId)
137
+ );
138
+ }
139
+ console.log(`All files in bucket ${bucketId} have been deleted.`);
140
+ };
141
+
142
+ export const initOrGetDocumentStorage = async (
143
+ storage: Storage,
144
+ config: AppwriteConfig,
145
+ dbId: string,
146
+ bucketName?: string
147
+ ) => {
148
+ const bucketId =
149
+ bucketName ??
150
+ `${config.documentBucketId}_${dbId.toLowerCase().replace(" ", "")}`;
151
+ try {
152
+ return await tryAwaitWithRetry(
153
+ async () => await storage.getBucket(bucketId)
154
+ );
155
+ } catch (e) {
156
+ return await tryAwaitWithRetry(
157
+ async () =>
158
+ await storage.createBucket(bucketId, `${dbId} Storage`, [
159
+ Permission.read(Role.any()),
160
+ Permission.create(Role.users()),
161
+ Permission.update(Role.users()),
162
+ Permission.delete(Role.users()),
163
+ ])
164
+ );
165
+ }
166
+ };
167
+
168
+ export const initOrGetBackupStorage = async (
169
+ config: AppwriteConfig,
170
+ storage: Storage
171
+ ) => {
172
+ try {
173
+ return await tryAwaitWithRetry(
174
+ async () => await storage.getBucket("backup")
175
+ );
176
+ } catch (e) {
177
+ return await initOrGetDocumentStorage(
178
+ storage,
179
+ config,
180
+ "backups",
181
+ "Database Backups"
182
+ );
183
+ }
184
+ };
185
+
186
+ export const backupDatabase = async (
187
+ config: AppwriteConfig,
188
+ database: Databases,
189
+ databaseId: string,
190
+ storage: Storage
191
+ ): Promise<void> => {
192
+ console.log("---------------------------------");
193
+ console.log("Starting Database Backup of " + databaseId);
194
+ console.log("---------------------------------");
195
+
196
+ let data: BackupCreate = {
197
+ database: "",
198
+ collections: [],
199
+ documents: [],
200
+ };
201
+
202
+ const backupOperation = await logOperation(database, databaseId, {
203
+ operationType: "backup",
204
+ collectionId: "",
205
+ data: "Starting backup...",
206
+ progress: 0,
207
+ total: 100,
208
+ error: "",
209
+ status: "in_progress",
210
+ });
211
+
212
+ try {
213
+ const db = await tryAwaitWithRetry(
214
+ async () => await database.get(databaseId)
215
+ );
216
+ data.database = JSON.stringify(db);
217
+
218
+ let lastCollectionId = "";
219
+ let moreCollections = true;
220
+ let progress = 0;
221
+ let total = 0;
222
+
223
+ while (moreCollections) {
224
+ const collectionResponse = await tryAwaitWithRetry(
225
+ async () =>
226
+ await database.listCollections(databaseId, [
227
+ Query.limit(500),
228
+ ...(lastCollectionId ? [Query.cursorAfter(lastCollectionId)] : []),
229
+ ])
230
+ );
231
+
232
+ total += collectionResponse.collections.length;
233
+
234
+ for (const {
235
+ $id: collectionId,
236
+ name: collectionName,
237
+ } of collectionResponse.collections) {
238
+ try {
239
+ const collection = await tryAwaitWithRetry(
240
+ async () => await database.getCollection(databaseId, collectionId)
241
+ );
242
+ progress++;
243
+ data.collections.push(JSON.stringify(collection));
244
+
245
+ let lastDocumentId = "";
246
+ let moreDocuments = true;
247
+ let collectionDocumentCount = 0;
248
+
249
+ while (moreDocuments) {
250
+ const documentResponse = await tryAwaitWithRetry(
251
+ async () =>
252
+ await database.listDocuments(databaseId, collectionId, [
253
+ Query.limit(500),
254
+ ...(lastDocumentId
255
+ ? [Query.cursorAfter(lastDocumentId)]
256
+ : []),
257
+ ])
258
+ );
259
+
260
+ total += documentResponse.documents.length;
261
+ collectionDocumentCount += documentResponse.documents.length;
262
+
263
+ const documentPromises = documentResponse.documents.map(
264
+ ({ $id: documentId }) =>
265
+ database.getDocument(databaseId, collectionId, documentId)
266
+ );
267
+
268
+ const promiseBatches = splitIntoBatches(documentPromises);
269
+ const documentsPulled = [];
270
+ for (const batch of promiseBatches) {
271
+ const successfulDocuments = await retryFailedPromises(batch);
272
+ documentsPulled.push(...successfulDocuments);
273
+ }
274
+
275
+ data.documents.push({
276
+ collectionId: collectionId,
277
+ data: JSON.stringify(documentsPulled),
278
+ });
279
+ progress += documentsPulled.length;
280
+
281
+ await logOperation(
282
+ database,
283
+ databaseId,
284
+ {
285
+ operationType: "backup",
286
+ collectionId: collectionId,
287
+ data: `Backing up, ${data.collections.length} collections so far`,
288
+ progress: progress,
289
+ total: total,
290
+ error: "",
291
+ status: "in_progress",
292
+ },
293
+ backupOperation.$id
294
+ );
295
+
296
+ moreDocuments = documentResponse.documents.length === 500;
297
+ if (moreDocuments) {
298
+ lastDocumentId =
299
+ documentResponse.documents[
300
+ documentResponse.documents.length - 1
301
+ ].$id;
302
+ }
303
+ }
304
+
305
+ console.log(
306
+ `Collection ${collectionName} backed up with ${collectionDocumentCount} documents.`
307
+ );
308
+ } catch (error) {
309
+ console.log(
310
+ `Collection ${collectionName} must not exist, continuing...`
311
+ );
312
+ continue;
313
+ }
314
+ }
315
+
316
+ moreCollections = collectionResponse.collections.length === 500;
317
+ if (moreCollections) {
318
+ lastCollectionId =
319
+ collectionResponse.collections[
320
+ collectionResponse.collections.length - 1
321
+ ].$id;
322
+ }
323
+ }
324
+
325
+ const bucket = await initOrGetDocumentStorage(storage, config, databaseId);
326
+ const inputFile = InputFile.fromPlainText(
327
+ JSON.stringify(data),
328
+ `${new Date().toISOString()}-${databaseId}.json`
329
+ );
330
+ const fileCreated = await storage.createFile(
331
+ bucket!.$id,
332
+ ulid(),
333
+ inputFile
334
+ );
335
+
336
+ await logOperation(
337
+ database,
338
+ databaseId,
339
+ {
340
+ operationType: "backup",
341
+ collectionId: "",
342
+ data: fileCreated.$id,
343
+ progress: 100,
344
+ total: total,
345
+ error: "",
346
+ status: "completed",
347
+ },
348
+ backupOperation.$id
349
+ );
350
+
351
+ console.log("---------------------------------");
352
+ console.log("Database Backup Complete");
353
+ console.log("---------------------------------");
354
+ } catch (error) {
355
+ console.error("Error during backup:", error);
356
+ await logOperation(
357
+ database,
358
+ databaseId,
359
+ {
360
+ operationType: "backup",
361
+ collectionId: "",
362
+ data: "Backup failed",
363
+ progress: 0,
364
+ total: 100,
365
+ error: String(error),
366
+ status: "error",
367
+ },
368
+ backupOperation.$id
369
+ );
370
+ }
371
+ };