appwrite-utils-cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/README.md +80 -0
  2. package/dist/main.d.ts +2 -0
  3. package/dist/main.js +74 -0
  4. package/dist/migrations/afterImportActions.d.ts +12 -0
  5. package/dist/migrations/afterImportActions.js +196 -0
  6. package/dist/migrations/attributes.d.ts +4 -0
  7. package/dist/migrations/attributes.js +158 -0
  8. package/dist/migrations/backup.d.ts +621 -0
  9. package/dist/migrations/backup.js +159 -0
  10. package/dist/migrations/collections.d.ts +16 -0
  11. package/dist/migrations/collections.js +207 -0
  12. package/dist/migrations/converters.d.ts +179 -0
  13. package/dist/migrations/converters.js +575 -0
  14. package/dist/migrations/dbHelpers.d.ts +5 -0
  15. package/dist/migrations/dbHelpers.js +54 -0
  16. package/dist/migrations/importController.d.ts +44 -0
  17. package/dist/migrations/importController.js +312 -0
  18. package/dist/migrations/importDataActions.d.ts +44 -0
  19. package/dist/migrations/importDataActions.js +219 -0
  20. package/dist/migrations/indexes.d.ts +4 -0
  21. package/dist/migrations/indexes.js +18 -0
  22. package/dist/migrations/logging.d.ts +2 -0
  23. package/dist/migrations/logging.js +14 -0
  24. package/dist/migrations/migrationHelper.d.ts +18 -0
  25. package/dist/migrations/migrationHelper.js +66 -0
  26. package/dist/migrations/queue.d.ts +13 -0
  27. package/dist/migrations/queue.js +79 -0
  28. package/dist/migrations/relationships.d.ts +90 -0
  29. package/dist/migrations/relationships.js +209 -0
  30. package/dist/migrations/schema.d.ts +3142 -0
  31. package/dist/migrations/schema.js +485 -0
  32. package/dist/migrations/schemaStrings.d.ts +12 -0
  33. package/dist/migrations/schemaStrings.js +261 -0
  34. package/dist/migrations/setupDatabase.d.ts +7 -0
  35. package/dist/migrations/setupDatabase.js +151 -0
  36. package/dist/migrations/storage.d.ts +8 -0
  37. package/dist/migrations/storage.js +241 -0
  38. package/dist/migrations/users.d.ts +11 -0
  39. package/dist/migrations/users.js +114 -0
  40. package/dist/migrations/validationRules.d.ts +43 -0
  41. package/dist/migrations/validationRules.js +42 -0
  42. package/dist/schemas/authUser.d.ts +62 -0
  43. package/dist/schemas/authUser.js +17 -0
  44. package/dist/setup.d.ts +2 -0
  45. package/dist/setup.js +5 -0
  46. package/dist/types.d.ts +9 -0
  47. package/dist/types.js +5 -0
  48. package/dist/utils/configSchema.json +742 -0
  49. package/dist/utils/helperFunctions.d.ts +34 -0
  50. package/dist/utils/helperFunctions.js +72 -0
  51. package/dist/utils/index.d.ts +2 -0
  52. package/dist/utils/index.js +2 -0
  53. package/dist/utils/setupFiles.d.ts +2 -0
  54. package/dist/utils/setupFiles.js +276 -0
  55. package/dist/utilsController.d.ts +30 -0
  56. package/dist/utilsController.js +106 -0
  57. package/package.json +34 -0
  58. package/src/main.ts +77 -0
  59. package/src/migrations/afterImportActions.ts +300 -0
  60. package/src/migrations/attributes.ts +315 -0
  61. package/src/migrations/backup.ts +189 -0
  62. package/src/migrations/collections.ts +303 -0
  63. package/src/migrations/converters.ts +628 -0
  64. package/src/migrations/dbHelpers.ts +89 -0
  65. package/src/migrations/importController.ts +509 -0
  66. package/src/migrations/importDataActions.ts +313 -0
  67. package/src/migrations/indexes.ts +37 -0
  68. package/src/migrations/logging.ts +15 -0
  69. package/src/migrations/migrationHelper.ts +100 -0
  70. package/src/migrations/queue.ts +119 -0
  71. package/src/migrations/relationships.ts +336 -0
  72. package/src/migrations/schema.ts +590 -0
  73. package/src/migrations/schemaStrings.ts +310 -0
  74. package/src/migrations/setupDatabase.ts +219 -0
  75. package/src/migrations/storage.ts +351 -0
  76. package/src/migrations/users.ts +148 -0
  77. package/src/migrations/validationRules.ts +63 -0
  78. package/src/schemas/authUser.ts +23 -0
  79. package/src/setup.ts +8 -0
  80. package/src/types.ts +14 -0
  81. package/src/utils/configSchema.json +742 -0
  82. package/src/utils/helperFunctions.ts +111 -0
  83. package/src/utils/index.ts +2 -0
  84. package/src/utils/setupFiles.ts +295 -0
  85. package/src/utilsController.ts +173 -0
  86. package/tsconfig.json +37 -0
@@ -0,0 +1,261 @@
1
+ import { toCamelCase, toPascalCase } from "../utils/index.js";
2
+ import { z } from "zod";
3
+ import fs from "fs";
4
+ import path from "path";
5
+ export class SchemaGenerator {
6
+ relationshipMap = new Map();
7
+ config;
8
+ appwriteFolderPath;
9
+ constructor(config, appwriteFolderPath) {
10
+ this.config = config;
11
+ this.appwriteFolderPath = appwriteFolderPath;
12
+ this.extractRelationships();
13
+ }
14
+ extractRelationships() {
15
+ this.config.collections.forEach((collection) => {
16
+ collection.attributes.forEach((attr) => {
17
+ if (attr.type === "relationship" && attr.twoWay && attr.twoWayKey) {
18
+ const relationshipAttr = attr;
19
+ console.log(`Extracting relationship: ${attr.key}`);
20
+ let isArrayParent = false;
21
+ let isArrayChild = false;
22
+ switch (relationshipAttr.relationType) {
23
+ case "oneToMany":
24
+ isArrayParent = true;
25
+ isArrayChild = false;
26
+ break;
27
+ case "manyToMany":
28
+ isArrayParent = true;
29
+ isArrayChild = true;
30
+ break;
31
+ case "oneToOne":
32
+ isArrayParent = false;
33
+ isArrayChild = false;
34
+ break;
35
+ case "manyToOne":
36
+ isArrayParent = false;
37
+ isArrayChild = true;
38
+ break;
39
+ default:
40
+ break;
41
+ }
42
+ this.addRelationship(collection.name, relationshipAttr.relatedCollection, attr.key, relationshipAttr.twoWayKey, isArrayParent, isArrayChild);
43
+ }
44
+ });
45
+ });
46
+ }
47
+ addRelationship(parentCollection, childCollection, parentKey, childKey, isArrayParent, isArrayChild) {
48
+ const relationshipsChild = this.relationshipMap.get(childCollection) || [];
49
+ const relationshipsParent = this.relationshipMap.get(parentCollection) || [];
50
+ relationshipsParent.push({
51
+ parentCollection,
52
+ childCollection,
53
+ parentKey,
54
+ childKey,
55
+ isArray: isArrayParent,
56
+ isChild: false,
57
+ });
58
+ relationshipsChild.push({
59
+ parentCollection,
60
+ childCollection,
61
+ parentKey,
62
+ childKey,
63
+ isArray: isArrayChild,
64
+ isChild: true,
65
+ });
66
+ this.relationshipMap.set(childCollection, relationshipsChild);
67
+ this.relationshipMap.set(parentCollection, relationshipsParent);
68
+ }
69
+ generateSchemas() {
70
+ this.config.collections.forEach((collection) => {
71
+ const schemaString = this.createSchemaString(collection.name, collection.attributes);
72
+ const camelCaseName = toCamelCase(collection.name);
73
+ const schemaPath = path.join(this.appwriteFolderPath, "schemas", `${camelCaseName}.ts`);
74
+ fs.writeFileSync(schemaPath, schemaString, { encoding: "utf-8" });
75
+ console.log(`Schema written to ${schemaPath}`);
76
+ });
77
+ }
78
+ createSchemaString = (name, attributes) => {
79
+ const pascalName = toPascalCase(name);
80
+ let imports = `import { z } from "zod";\n`;
81
+ // Use the relationshipMap to find related collections
82
+ const relationshipDetails = this.relationshipMap.get(name) || [];
83
+ const relatedCollections = relationshipDetails.map((detail) => {
84
+ const relatedCollectionName = detail.isChild
85
+ ? detail.parentCollection
86
+ : detail.childCollection;
87
+ const key = detail.isChild ? detail.childKey : detail.parentKey;
88
+ const isArray = detail.isArray ? "array" : "";
89
+ return [relatedCollectionName, key, isArray];
90
+ });
91
+ console.log(relatedCollections);
92
+ let relatedTypes = "";
93
+ let relatedTypesLazy = "";
94
+ let curNum = 0;
95
+ let maxNum = relatedCollections.length;
96
+ relatedCollections.forEach((relatedCollection) => {
97
+ const relatedPascalName = toPascalCase(relatedCollection[0]);
98
+ const relatedCamelName = toCamelCase(relatedCollection[0]);
99
+ curNum++;
100
+ let endNameTypes = relatedPascalName;
101
+ let endNameLazy = `${relatedPascalName}Schema`;
102
+ if (relatedCollection[2] === "array") {
103
+ endNameTypes += "[]";
104
+ endNameLazy += ".array()";
105
+ }
106
+ endNameLazy += ".nullish()";
107
+ imports += `import { ${relatedPascalName}Schema, type ${relatedPascalName} } from "./${relatedCamelName}";\n`;
108
+ relatedTypes += `${relatedCollection[1]}?: ${endNameTypes};\n`;
109
+ if (relatedTypes.length > 0 && curNum !== maxNum) {
110
+ relatedTypes += " ";
111
+ }
112
+ relatedTypesLazy += `${relatedCollection[1]}: z.lazy(() => ${endNameLazy}),\n`;
113
+ if (relatedTypesLazy.length > 0 && curNum !== maxNum) {
114
+ relatedTypesLazy += " ";
115
+ }
116
+ });
117
+ let schemaString = `${imports}\n\n`;
118
+ schemaString += `export const ${pascalName}SchemaBase = z.object({\n`;
119
+ schemaString += ` $id: z.string().optional(),\n`;
120
+ schemaString += ` $createdAt: z.date().or(z.string()).optional(),\n`;
121
+ schemaString += ` $updatedAt: z.date().or(z.string()).optional(),\n`;
122
+ for (const attribute of attributes) {
123
+ if (attribute.type === "relationship") {
124
+ continue;
125
+ }
126
+ schemaString += ` ${attribute.key}: ${this.typeToZod(attribute)},\n`;
127
+ }
128
+ schemaString += `});\n\n`;
129
+ schemaString += `export type ${pascalName}Base = z.infer<typeof ${pascalName}SchemaBase>`;
130
+ if (relatedTypes.length > 0) {
131
+ schemaString += ` & {\n ${relatedTypes}};\n\n`;
132
+ }
133
+ else {
134
+ schemaString += `;\n\n`;
135
+ }
136
+ schemaString += `export const ${pascalName}Schema: z.ZodType<${pascalName}Base> = ${pascalName}SchemaBase`;
137
+ if (relatedTypes.length > 0) {
138
+ schemaString += `.extend({\n ${relatedTypesLazy}});\n\n`;
139
+ }
140
+ else {
141
+ schemaString += `;\n`;
142
+ }
143
+ schemaString += `export type ${pascalName} = z.infer<typeof ${pascalName}Schema>;\n\n`;
144
+ return schemaString;
145
+ };
146
+ typeToZod = (attribute) => {
147
+ let baseSchemaCode = "";
148
+ switch (attribute.type) {
149
+ case "string":
150
+ baseSchemaCode = "z.string()";
151
+ if (attribute.size) {
152
+ baseSchemaCode += `.max(${attribute.size}, "Maximum length of ${attribute.size} characters exceeded")`;
153
+ }
154
+ if (attribute.xdefault !== undefined) {
155
+ baseSchemaCode += `.default("${attribute.xdefault}")`;
156
+ }
157
+ if (!attribute.required && !attribute.array) {
158
+ baseSchemaCode += ".nullish()";
159
+ }
160
+ break;
161
+ case "integer":
162
+ baseSchemaCode = "z.number().int()";
163
+ if (attribute.min !== undefined) {
164
+ baseSchemaCode += `.min(${attribute.min}, "Minimum value of ${attribute.min} not met")`;
165
+ }
166
+ if (attribute.max !== undefined) {
167
+ baseSchemaCode += `.max(${attribute.max}, "Maximum value of ${attribute.max} exceeded")`;
168
+ }
169
+ if (attribute.xdefault !== undefined) {
170
+ baseSchemaCode += `.default(${attribute.xdefault})`;
171
+ }
172
+ if (!attribute.required && !attribute.array) {
173
+ baseSchemaCode += ".nullish()";
174
+ }
175
+ break;
176
+ case "float":
177
+ baseSchemaCode = "z.number()";
178
+ if (attribute.min !== undefined) {
179
+ baseSchemaCode += `.min(${attribute.min}, "Minimum value of ${attribute.min} not met")`;
180
+ }
181
+ if (attribute.max !== undefined) {
182
+ baseSchemaCode += `.max(${attribute.max}, "Maximum value of ${attribute.max} exceeded")`;
183
+ }
184
+ if (attribute.xdefault !== undefined) {
185
+ baseSchemaCode += `.default(${attribute.xdefault})`;
186
+ }
187
+ if (!attribute.required && !attribute.array) {
188
+ baseSchemaCode += ".nullish()";
189
+ }
190
+ break;
191
+ case "boolean":
192
+ baseSchemaCode = "z.boolean()";
193
+ if (attribute.xdefault !== undefined) {
194
+ baseSchemaCode += `.default(${attribute.xdefault})`;
195
+ }
196
+ if (!attribute.required && !attribute.array) {
197
+ baseSchemaCode += ".nullish()";
198
+ }
199
+ break;
200
+ case "datetime":
201
+ baseSchemaCode = "z.date()";
202
+ if (attribute.xdefault !== undefined) {
203
+ baseSchemaCode += `.default(new Date("${attribute.xdefault}"))`;
204
+ }
205
+ if (!attribute.required && !attribute.array) {
206
+ baseSchemaCode += ".nullish()";
207
+ }
208
+ break;
209
+ case "email":
210
+ baseSchemaCode = "z.string().email()";
211
+ if (attribute.xdefault !== undefined) {
212
+ baseSchemaCode += `.default("${attribute.xdefault}")`;
213
+ }
214
+ if (!attribute.required && !attribute.array) {
215
+ baseSchemaCode += ".nullish()";
216
+ }
217
+ break;
218
+ case "ip":
219
+ baseSchemaCode = "z.string()"; // Add custom validation as needed
220
+ if (attribute.xdefault !== undefined) {
221
+ baseSchemaCode += `.default("${attribute.xdefault}")`;
222
+ }
223
+ if (!attribute.required && !attribute.array) {
224
+ baseSchemaCode += ".nullish()";
225
+ }
226
+ break;
227
+ case "url":
228
+ baseSchemaCode = "z.string().url()";
229
+ if (attribute.xdefault !== undefined) {
230
+ baseSchemaCode += `.default("${attribute.xdefault}")`;
231
+ }
232
+ if (!attribute.required && !attribute.array) {
233
+ baseSchemaCode += ".nullish()";
234
+ }
235
+ break;
236
+ case "enum":
237
+ baseSchemaCode = `z.enum([${attribute.elements
238
+ .map((element) => `"${element}"`)
239
+ .join(", ")}])`;
240
+ if (attribute.xdefault !== undefined) {
241
+ baseSchemaCode += `.default("${attribute.xdefault}")`;
242
+ }
243
+ if (!attribute.required && !attribute.array) {
244
+ baseSchemaCode += ".nullish()";
245
+ }
246
+ break;
247
+ case "relationship":
248
+ break;
249
+ default:
250
+ baseSchemaCode = "z.any()";
251
+ }
252
+ // Handle arrays
253
+ if (attribute.array) {
254
+ baseSchemaCode = `z.array(${baseSchemaCode})`;
255
+ }
256
+ if (attribute.array && !attribute.required) {
257
+ baseSchemaCode += ".nullish()";
258
+ }
259
+ return baseSchemaCode;
260
+ };
261
+ }
@@ -0,0 +1,7 @@
1
+ import { Databases, Storage } from "node-appwrite";
2
+ import { type AppwriteConfig } from "./schema.js";
3
+ import type { SetupOptions } from "../utilsController.js";
4
+ export declare const setupMigrationDatabase: (config: AppwriteConfig) => Promise<void>;
5
+ export declare const ensureDatabasesExist: (config: AppwriteConfig) => Promise<void>;
6
+ export declare const wipeOtherDatabases: (database: Databases, config: AppwriteConfig) => Promise<void>;
7
+ export declare const startSetup: (database: Databases, storage: Storage, config: AppwriteConfig, setupOptions: SetupOptions, appwriteFolderPath: string) => Promise<void>;
@@ -0,0 +1,151 @@
1
+ import { Databases, ID, Query, Storage } from "node-appwrite";
2
+ import { createOrUpdateAttribute } from "./attributes.js";
3
+ import { createOrUpdateCollections, generateSchemas, wipeDatabase, } from "./collections.js";
4
+ import { getMigrationCollectionSchemas } from "./backup.js";
5
+ import { areCollectionNamesSame, toCamelCase } from "../utils/index.js";
6
+ import { backupDatabase, initOrGetBackupStorage, initOrGetDocumentStorage, wipeDocumentStorage, } from "./storage.js";
7
+ import {} from "./schema.js";
8
+ import { nameToIdMapping } from "./queue.js";
9
+ import { UsersController } from "./users.js";
10
+ export const setupMigrationDatabase = async (config) => {
11
+ // Create the migrations database if needed
12
+ console.log("---------------------------------");
13
+ console.log("Starting Migrations Setup");
14
+ console.log("---------------------------------");
15
+ const database = new Databases(config.appwriteClient);
16
+ let db = null;
17
+ const dbCollections = [];
18
+ const migrationCollectionsSetup = getMigrationCollectionSchemas();
19
+ try {
20
+ db = await database.get("migrations");
21
+ console.log("Migrations database found");
22
+ }
23
+ catch (e) {
24
+ db = await database.create("migrations", "Migrations", true);
25
+ console.log("Migrations database created");
26
+ }
27
+ if (db) {
28
+ const collectionsPulled = await database.listCollections(db.$id, [
29
+ Query.limit(25),
30
+ ]);
31
+ dbCollections.push(...collectionsPulled.collections);
32
+ }
33
+ console.log(`Collections in migrations database: ${dbCollections.length}`);
34
+ // Iterate over each key in the migrationCollectionsSetup object
35
+ for (const [collectionName, { collection, attributes }] of Object.entries(migrationCollectionsSetup)) {
36
+ const collectionId = toCamelCase(collectionName); // Convert name to toCamelCase for the ID
37
+ let collectionFound = null;
38
+ try {
39
+ collectionFound = await database.getCollection(db.$id, collectionId);
40
+ }
41
+ catch (e) {
42
+ console.log(`Collection not found: ${collectionId}`);
43
+ }
44
+ if (!collectionFound) {
45
+ // Create the collection with the provided configuration
46
+ collectionFound = await database.createCollection(db.$id, collectionId, collectionName, undefined, collection.documentSecurity, collection.enabled);
47
+ }
48
+ for (const attribute of attributes) {
49
+ await createOrUpdateAttribute(database, db.$id, collectionFound, attribute);
50
+ }
51
+ }
52
+ console.log("---------------------------------");
53
+ console.log("Migrations Setup Complete");
54
+ console.log("---------------------------------");
55
+ };
56
+ export const ensureDatabasesExist = async (config) => {
57
+ const database = new Databases(config.appwriteClient);
58
+ const databasesToEnsure = config.databases;
59
+ databasesToEnsure.push({
60
+ $id: "migrations",
61
+ name: "Migrations",
62
+ });
63
+ const dbNames = databasesToEnsure.map((db) => db.name);
64
+ const existingDatabases = await database.list([Query.equal("name", dbNames)]);
65
+ for (const db of databasesToEnsure) {
66
+ if (!existingDatabases.databases.some((d) => d.name === db.name)) {
67
+ await database.create(db.$id || ID.unique(), db.name, true);
68
+ console.log(`${db.name} database created`);
69
+ }
70
+ }
71
+ };
72
+ export const wipeOtherDatabases = async (database, config) => {
73
+ const databasesToKeep = config.databases.map((db) => db.name.toLowerCase().trim().replace(" ", ""));
74
+ databasesToKeep.push("migrations");
75
+ console.log(`Databases to keep: ${databasesToKeep.join(", ")}`);
76
+ const allDatabases = await database.list([Query.limit(500)]);
77
+ for (const db of allDatabases.databases) {
78
+ if (!databasesToKeep.includes(db.name.toLowerCase().trim().replace(" ", ""))) {
79
+ await database.delete(db.$id);
80
+ console.log(`Deleted database: ${db.name}`);
81
+ }
82
+ }
83
+ };
84
+ export const startSetup = async (database, storage, config, setupOptions, appwriteFolderPath) => {
85
+ await setupMigrationDatabase(config);
86
+ if (config.enableBackups) {
87
+ await initOrGetBackupStorage(storage);
88
+ if (setupOptions.wipeDocumentStorage) {
89
+ if (setupOptions.runProd) {
90
+ await initOrGetDocumentStorage(storage, config, config.databases[0].name);
91
+ await wipeDocumentStorage(storage, config, config.databases[0].name);
92
+ }
93
+ if (setupOptions.runStaging) {
94
+ await initOrGetDocumentStorage(storage, config, config.databases[1].name);
95
+ await wipeDocumentStorage(storage, config, config.databases[1].name);
96
+ }
97
+ if (setupOptions.runDev) {
98
+ await initOrGetDocumentStorage(storage, config, config.databases[2].name);
99
+ await wipeDocumentStorage(storage, config, config.databases[2].name);
100
+ }
101
+ }
102
+ }
103
+ if (config.enableWipeOtherDatabases) {
104
+ await wipeOtherDatabases(database, config);
105
+ }
106
+ if (setupOptions.wipeUsers) {
107
+ const usersController = new UsersController(config, database);
108
+ console.log("Wiping users");
109
+ await usersController.wipeUsers();
110
+ console.log("Users wiped");
111
+ }
112
+ await ensureDatabasesExist(config);
113
+ const databaseNames = config.databases.map((db) => db.name);
114
+ // Move to here so it always runs if it's set to true
115
+ if (setupOptions.generateSchemas) {
116
+ await generateSchemas(config, appwriteFolderPath);
117
+ }
118
+ for (const db of config.databases) {
119
+ // Determine if the current database should be processed based on the setup options
120
+ const processDatabase = (setupOptions.runProd &&
121
+ areCollectionNamesSame(db.name, databaseNames[0])) ||
122
+ (setupOptions.runStaging &&
123
+ areCollectionNamesSame(db.name, databaseNames[1])) ||
124
+ (setupOptions.runDev &&
125
+ areCollectionNamesSame(db.name, databaseNames[2]));
126
+ if (!processDatabase) {
127
+ continue;
128
+ }
129
+ else {
130
+ await initOrGetDocumentStorage(storage, config, db.name);
131
+ }
132
+ console.log(`---------------------------------`);
133
+ console.log(`Starting setup for database: ${db.name}`);
134
+ console.log(`---------------------------------`);
135
+ let deletedCollections;
136
+ if (setupOptions.wipeDatabases && processDatabase) {
137
+ if (config.enableBackups && setupOptions.doBackup) {
138
+ await backupDatabase(database, db.$id, storage);
139
+ }
140
+ deletedCollections = await wipeDatabase(database, db.$id);
141
+ }
142
+ if (processDatabase) {
143
+ await createOrUpdateCollections(database, db.$id, config, deletedCollections);
144
+ }
145
+ deletedCollections = undefined;
146
+ nameToIdMapping.clear();
147
+ console.log(`---------------------------------`);
148
+ console.log(`Finished setup for database: ${db.name}`);
149
+ console.log(`---------------------------------`);
150
+ }
151
+ };
@@ -0,0 +1,8 @@
1
+ import { Storage, Databases, type Models } from "node-appwrite";
2
+ import { type OperationCreate } from "./backup.js";
3
+ import type { AppwriteConfig } from "./schema.js";
4
+ export declare const logOperation: (db: Databases, dbId: string, operationDetails: OperationCreate, operationId?: string) => Promise<Models.Document>;
5
+ export declare const initOrGetBackupStorage: (storage: Storage) => Promise<Models.Bucket>;
6
+ export declare const initOrGetDocumentStorage: (storage: Storage, config: AppwriteConfig, dbName: string) => Promise<Models.Bucket | undefined>;
7
+ export declare const wipeDocumentStorage: (storage: Storage, config: AppwriteConfig, dbName: string) => Promise<void>;
8
+ export declare const backupDatabase: (database: Databases, databaseId: string, storage: Storage) => Promise<void>;
@@ -0,0 +1,241 @@
1
+ import { Storage, Databases, Query, InputFile, ID, } from "node-appwrite";
2
+ import {} from "./backup.js";
3
+ import { splitIntoBatches } from "./migrationHelper.js";
4
+ export const logOperation = async (db, dbId, operationDetails, operationId) => {
5
+ try {
6
+ let operation;
7
+ if (operationId) {
8
+ // Update existing operation log
9
+ operation = await db.updateDocument("migrations", "currentOperations", operationId, operationDetails);
10
+ }
11
+ else {
12
+ // Create new operation log
13
+ operation = await db.createDocument("migrations", "currentOperations", ID.unique(), operationDetails);
14
+ }
15
+ console.log(`Operation logged: ${operation.$id}`);
16
+ return operation;
17
+ }
18
+ catch (error) {
19
+ console.error(`Error logging operation: ${error}`);
20
+ throw error;
21
+ }
22
+ };
23
+ export const initOrGetBackupStorage = async (storage) => {
24
+ try {
25
+ const backupStorage = await storage.getBucket("backupStorage");
26
+ return backupStorage;
27
+ }
28
+ catch (e) {
29
+ // ID backupStorage
30
+ // Name Backups Storage
31
+ const backupStorage = await storage.createBucket("backupStorage", "Backups Storage");
32
+ return backupStorage;
33
+ }
34
+ };
35
+ export const initOrGetDocumentStorage = async (storage, config, dbName) => {
36
+ try {
37
+ await storage.getBucket(`${config.documentBucketId}_${dbName.toLowerCase().replace(" ", "")}`);
38
+ }
39
+ catch (e) {
40
+ // ID documentStorage
41
+ // Name Document Storage
42
+ const documentStorage = await storage.createBucket(`${config.documentBucketId}_${dbName.toLowerCase().replace(" ", "")}`, "Document Storage");
43
+ return documentStorage;
44
+ }
45
+ };
46
+ export const wipeDocumentStorage = async (storage, config, dbName) => {
47
+ const bucketId = `${config.documentBucketId
48
+ .toLowerCase()
49
+ .replace(" ", "")}_${dbName.toLowerCase().replace(" ", "")}`;
50
+ console.log(`Wiping storage for bucket ID: ${bucketId}`);
51
+ let moreFiles = true;
52
+ let lastFileId;
53
+ const allFiles = [];
54
+ while (moreFiles) {
55
+ const queries = [Query.limit(100)]; // Adjust the limit as needed
56
+ if (lastFileId) {
57
+ queries.push(Query.cursorAfter(lastFileId));
58
+ }
59
+ const filesPulled = await storage.listFiles(bucketId, queries);
60
+ if (filesPulled.files.length === 0) {
61
+ console.log("No files found, done!");
62
+ moreFiles = false;
63
+ break;
64
+ }
65
+ else if (filesPulled.files.length > 0) {
66
+ const fileIds = filesPulled.files.map((file) => file.$id);
67
+ allFiles.push(...fileIds);
68
+ }
69
+ moreFiles = filesPulled.files.length > 100; // Adjust based on the limit
70
+ if (moreFiles) {
71
+ lastFileId = filesPulled.files[filesPulled.files.length - 1].$id;
72
+ }
73
+ }
74
+ for (const fileId of allFiles) {
75
+ console.log(`Deleting file: ${fileId}`);
76
+ await storage.deleteFile(bucketId, fileId);
77
+ }
78
+ console.log(`All files in bucket ${bucketId} have been deleted.`);
79
+ };
80
+ async function retryFailedPromises(batch, maxRetries = 3) {
81
+ const results = await Promise.allSettled(batch);
82
+ const toRetry = [];
83
+ results.forEach((result, index) => {
84
+ if (result.status === "rejected") {
85
+ console.error("Promise rejected with reason:", result.reason);
86
+ if (maxRetries > 0) {
87
+ toRetry.push(batch[index]);
88
+ }
89
+ }
90
+ });
91
+ if (toRetry.length > 0) {
92
+ console.log(`Retrying ${toRetry.length} promises`);
93
+ return retryFailedPromises(toRetry, maxRetries - 1);
94
+ }
95
+ else {
96
+ return results
97
+ .filter((result) => result.status === "fulfilled")
98
+ .map((result) => result);
99
+ }
100
+ }
101
+ export const backupDatabase = async (database, databaseId, storage) => {
102
+ console.log("---------------------------------");
103
+ console.log("Starting Database Backup of " + databaseId);
104
+ console.log("---------------------------------");
105
+ let data = {
106
+ database: "",
107
+ collections: [],
108
+ documents: [],
109
+ };
110
+ const backupOperation = await logOperation(database, databaseId, {
111
+ operationType: "backup",
112
+ collectionId: "",
113
+ data: "Starting backup...",
114
+ progress: 0,
115
+ total: 100, // This will be dynamically updated later
116
+ error: "",
117
+ status: "in_progress",
118
+ });
119
+ // Fetch and backup the database details
120
+ let db;
121
+ try {
122
+ db = await database.get(databaseId);
123
+ }
124
+ catch (e) {
125
+ console.error(`Error fetching database: ${e}`);
126
+ await logOperation(database, databaseId, {
127
+ operationType: "backup",
128
+ collectionId: "",
129
+ data: "Error fetching database, skipping...",
130
+ progress: 0,
131
+ total: 100, // This will be dynamically updated later
132
+ error: `Error fetching database: ${e}`,
133
+ status: "error",
134
+ }, backupOperation.$id);
135
+ return;
136
+ }
137
+ data.database = JSON.stringify(db);
138
+ // Initialize pagination for collections
139
+ let lastCollectionId = "";
140
+ let moreCollections = true;
141
+ let progress = 0;
142
+ let total = 0; // Initialize total to 0, will be updated dynamically
143
+ while (moreCollections) {
144
+ const collectionResponse = await database.listCollections(databaseId, [
145
+ Query.limit(500), // Adjust the limit as needed
146
+ ...(lastCollectionId ? [Query.cursorAfter(lastCollectionId)] : []),
147
+ ]);
148
+ total += collectionResponse.collections.length; // Update total with number of collections
149
+ for (const { $id: collectionId, name: collectionName, } of collectionResponse.collections) {
150
+ let collectionDocumentCount = 0; // Initialize document count for the current collection
151
+ try {
152
+ const collection = await database.getCollection(databaseId, collectionId);
153
+ progress++;
154
+ data.collections.push(JSON.stringify(collection));
155
+ // Initialize pagination for documents within the current collection
156
+ let lastDocumentId = "";
157
+ let moreDocuments = true;
158
+ while (moreDocuments) {
159
+ const documentResponse = await database.listDocuments(databaseId, collectionId, [
160
+ Query.limit(500), // Adjust the limit as needed
161
+ ...(lastDocumentId ? [Query.cursorAfter(lastDocumentId)] : []),
162
+ ]);
163
+ total += documentResponse.documents.length; // Update total with number of documents
164
+ collectionDocumentCount += documentResponse.documents.length; // Update document count for the current collection
165
+ let documentPromises = [];
166
+ for (const { $id: documentId } of documentResponse.documents) {
167
+ documentPromises.push(database.getDocument(databaseId, collectionId, documentId));
168
+ }
169
+ const promiseBatches = splitIntoBatches(documentPromises);
170
+ const documentsPulled = [];
171
+ for (const batch of promiseBatches) {
172
+ const successfulDocuments = await retryFailedPromises(batch);
173
+ documentsPulled.push(...successfulDocuments);
174
+ }
175
+ const documents = documentsPulled;
176
+ data.documents.push({
177
+ collectionId: collectionId,
178
+ data: JSON.stringify(documents),
179
+ });
180
+ progress += documents.length;
181
+ console.log(`Collection ${collectionName} backed up ${collectionDocumentCount} documents (so far)`);
182
+ // Update the operation log with the current progress
183
+ await logOperation(database, databaseId, {
184
+ operationType: "backup",
185
+ collectionId: collectionId,
186
+ data: `Still backing up, ${data.collections.length} collections so far`,
187
+ progress: progress,
188
+ total: total,
189
+ error: "",
190
+ status: "in_progress",
191
+ }, backupOperation.$id);
192
+ // Check if there are more documents to fetch
193
+ moreDocuments = documentResponse.documents.length === 500;
194
+ if (moreDocuments) {
195
+ lastDocumentId =
196
+ documentResponse.documents[documentResponse.documents.length - 1]
197
+ .$id;
198
+ }
199
+ }
200
+ console.log(`Collection ${collectionName} backed up with ${collectionDocumentCount} documents.`);
201
+ }
202
+ catch (error) {
203
+ console.log(`Collection ${collectionName} must not exist, continuing...`);
204
+ continue;
205
+ }
206
+ }
207
+ // Check if there are more collections to fetch
208
+ moreCollections = collectionResponse.collections.length === 500;
209
+ if (moreCollections) {
210
+ lastCollectionId =
211
+ collectionResponse.collections[collectionResponse.collections.length - 1].$id;
212
+ }
213
+ }
214
+ // Update the backup operation with the current progress and total
215
+ await logOperation(database, databaseId, {
216
+ operationType: "backup",
217
+ collectionId: "",
218
+ data: `Still backing up, ${data.collections.length} collections so far`,
219
+ progress: progress,
220
+ total: total,
221
+ error: "",
222
+ status: "in_progress",
223
+ }, backupOperation.$id);
224
+ // Create the backup with the accumulated data
225
+ const bucket = await initOrGetBackupStorage(storage);
226
+ const inputFile = InputFile.fromPlainText(JSON.stringify(data), `${new Date().toISOString()}-${databaseId}.json`);
227
+ const fileCreated = await storage.createFile(bucket.$id, ID.unique(), inputFile);
228
+ // Final update to the backup operation marking it as completed
229
+ await logOperation(database, databaseId, {
230
+ operationType: "backup",
231
+ collectionId: "",
232
+ data: fileCreated.$id,
233
+ progress: 100,
234
+ total: total, // Ensure the total reflects the actual total processed
235
+ error: "",
236
+ status: "completed",
237
+ }, backupOperation.$id);
238
+ console.log("---------------------------------");
239
+ console.log("Database Backup Complete");
240
+ console.log("---------------------------------");
241
+ };
@@ -0,0 +1,11 @@
1
+ import type { AppwriteConfig } from "./schema.js";
2
+ import { Databases, type Models } from "node-appwrite";
3
+ import { type AuthUserCreate } from "../schemas/authUser.js";
4
+ export declare class UsersController {
5
+ private config;
6
+ private users;
7
+ static userFields: string[];
8
+ constructor(config: AppwriteConfig, db: Databases);
9
+ wipeUsers(): Promise<void>;
10
+ createUserAndReturn(item: AuthUserCreate): Promise<Models.User<Models.Preferences>>;
11
+ }