appwrite-utils-cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. package/README.md +80 -0
  2. package/dist/main.d.ts +2 -0
  3. package/dist/main.js +74 -0
  4. package/dist/migrations/afterImportActions.d.ts +12 -0
  5. package/dist/migrations/afterImportActions.js +196 -0
  6. package/dist/migrations/attributes.d.ts +4 -0
  7. package/dist/migrations/attributes.js +158 -0
  8. package/dist/migrations/backup.d.ts +621 -0
  9. package/dist/migrations/backup.js +159 -0
  10. package/dist/migrations/collections.d.ts +16 -0
  11. package/dist/migrations/collections.js +207 -0
  12. package/dist/migrations/converters.d.ts +179 -0
  13. package/dist/migrations/converters.js +575 -0
  14. package/dist/migrations/dbHelpers.d.ts +5 -0
  15. package/dist/migrations/dbHelpers.js +54 -0
  16. package/dist/migrations/importController.d.ts +44 -0
  17. package/dist/migrations/importController.js +312 -0
  18. package/dist/migrations/importDataActions.d.ts +44 -0
  19. package/dist/migrations/importDataActions.js +219 -0
  20. package/dist/migrations/indexes.d.ts +4 -0
  21. package/dist/migrations/indexes.js +18 -0
  22. package/dist/migrations/logging.d.ts +2 -0
  23. package/dist/migrations/logging.js +14 -0
  24. package/dist/migrations/migrationHelper.d.ts +18 -0
  25. package/dist/migrations/migrationHelper.js +66 -0
  26. package/dist/migrations/queue.d.ts +13 -0
  27. package/dist/migrations/queue.js +79 -0
  28. package/dist/migrations/relationships.d.ts +90 -0
  29. package/dist/migrations/relationships.js +209 -0
  30. package/dist/migrations/schema.d.ts +3142 -0
  31. package/dist/migrations/schema.js +485 -0
  32. package/dist/migrations/schemaStrings.d.ts +12 -0
  33. package/dist/migrations/schemaStrings.js +261 -0
  34. package/dist/migrations/setupDatabase.d.ts +7 -0
  35. package/dist/migrations/setupDatabase.js +151 -0
  36. package/dist/migrations/storage.d.ts +8 -0
  37. package/dist/migrations/storage.js +241 -0
  38. package/dist/migrations/users.d.ts +11 -0
  39. package/dist/migrations/users.js +114 -0
  40. package/dist/migrations/validationRules.d.ts +43 -0
  41. package/dist/migrations/validationRules.js +42 -0
  42. package/dist/schemas/authUser.d.ts +62 -0
  43. package/dist/schemas/authUser.js +17 -0
  44. package/dist/setup.d.ts +2 -0
  45. package/dist/setup.js +5 -0
  46. package/dist/types.d.ts +9 -0
  47. package/dist/types.js +5 -0
  48. package/dist/utils/configSchema.json +742 -0
  49. package/dist/utils/helperFunctions.d.ts +34 -0
  50. package/dist/utils/helperFunctions.js +72 -0
  51. package/dist/utils/index.d.ts +2 -0
  52. package/dist/utils/index.js +2 -0
  53. package/dist/utils/setupFiles.d.ts +2 -0
  54. package/dist/utils/setupFiles.js +276 -0
  55. package/dist/utilsController.d.ts +30 -0
  56. package/dist/utilsController.js +106 -0
  57. package/package.json +34 -0
  58. package/src/main.ts +77 -0
  59. package/src/migrations/afterImportActions.ts +300 -0
  60. package/src/migrations/attributes.ts +315 -0
  61. package/src/migrations/backup.ts +189 -0
  62. package/src/migrations/collections.ts +303 -0
  63. package/src/migrations/converters.ts +628 -0
  64. package/src/migrations/dbHelpers.ts +89 -0
  65. package/src/migrations/importController.ts +509 -0
  66. package/src/migrations/importDataActions.ts +313 -0
  67. package/src/migrations/indexes.ts +37 -0
  68. package/src/migrations/logging.ts +15 -0
  69. package/src/migrations/migrationHelper.ts +100 -0
  70. package/src/migrations/queue.ts +119 -0
  71. package/src/migrations/relationships.ts +336 -0
  72. package/src/migrations/schema.ts +590 -0
  73. package/src/migrations/schemaStrings.ts +310 -0
  74. package/src/migrations/setupDatabase.ts +219 -0
  75. package/src/migrations/storage.ts +351 -0
  76. package/src/migrations/users.ts +148 -0
  77. package/src/migrations/validationRules.ts +63 -0
  78. package/src/schemas/authUser.ts +23 -0
  79. package/src/setup.ts +8 -0
  80. package/src/types.ts +14 -0
  81. package/src/utils/configSchema.json +742 -0
  82. package/src/utils/helperFunctions.ts +111 -0
  83. package/src/utils/index.ts +2 -0
  84. package/src/utils/setupFiles.ts +295 -0
  85. package/src/utilsController.ts +173 -0
  86. package/tsconfig.json +37 -0
@@ -0,0 +1,66 @@
1
+ import { ID, Query } from "node-appwrite";
2
+ import { OperationSchema } from "./backup.js";
3
+ export const findOrCreateOperation = async (database, collectionId, operationType) => {
4
+ // Here you would query your database for an existing operation
5
+ // If it doesn't exist, create a new one
6
+ // This is a simplified example
7
+ const operations = await database.listDocuments("migrations", "currentOperations", [
8
+ Query.equal("collectionId", collectionId),
9
+ Query.equal("operationType", operationType),
10
+ Query.equal("status", "in_progress"),
11
+ ]);
12
+ if (operations.documents.length > 0) {
13
+ return OperationSchema.parse(operations.documents[0]); // Assuming the first document is the operation we want
14
+ }
15
+ else {
16
+ // Create a new operation document
17
+ const op = await database.createDocument("migrations", "currentOperations", ID.unique(), {
18
+ operationType,
19
+ collectionId,
20
+ status: "in_progress",
21
+ batches: [],
22
+ progress: 0,
23
+ total: 0,
24
+ error: "",
25
+ });
26
+ return OperationSchema.parse(op);
27
+ }
28
+ };
29
+ export const updateOperation = async (database, operationId, updateFields) => {
30
+ await database.updateDocument("migrations", "currentOperations", operationId, updateFields);
31
+ };
32
+ // Actual max 1073741824
33
+ export const maxDataLength = 1073741820;
34
+ export const maxBatchItems = 100;
35
+ export const splitIntoBatches = (data) => {
36
+ let batches = [];
37
+ let currentBatch = [];
38
+ let currentBatchLength = 0;
39
+ let currentBatchItemCount = 0;
40
+ data.forEach((item, index) => {
41
+ const itemLength = JSON.stringify(item).length;
42
+ if (itemLength > maxDataLength) {
43
+ console.log(item, `Large item found at index ${index} with length ${itemLength}:`);
44
+ }
45
+ // Check if adding the current item would exceed the max length or max items per batch
46
+ if (currentBatchLength + itemLength >= maxDataLength ||
47
+ currentBatchItemCount >= maxBatchItems) {
48
+ // If so, start a new batch
49
+ batches.push(currentBatch);
50
+ currentBatch = [item];
51
+ currentBatchLength = itemLength;
52
+ currentBatchItemCount = 1; // Reset item count for the new batch
53
+ }
54
+ else {
55
+ // Otherwise, add the item to the current batch
56
+ currentBatch.push(item);
57
+ currentBatchLength += itemLength;
58
+ currentBatchItemCount++;
59
+ }
60
+ });
61
+ // Don't forget to add the last batch if it's not empty
62
+ if (currentBatch.length > 0) {
63
+ batches.push(currentBatch);
64
+ }
65
+ return batches;
66
+ };
@@ -0,0 +1,13 @@
1
+ import { type Databases, type Models } from "node-appwrite";
2
+ import type { Attribute } from "./schema.js";
3
+ export interface QueuedOperation {
4
+ type: "attribute";
5
+ collectionId?: string;
6
+ attribute?: Attribute;
7
+ collection?: Models.Collection;
8
+ dependencies?: string[];
9
+ }
10
+ export declare const queuedOperations: QueuedOperation[];
11
+ export declare const nameToIdMapping: Map<string, string>;
12
+ export declare const enqueueOperation: (operation: QueuedOperation) => void;
13
+ export declare const processQueue: (db: Databases, dbId: string) => Promise<void>;
@@ -0,0 +1,79 @@
1
+ import { Query } from "node-appwrite";
2
+ import { createOrUpdateAttribute } from "./attributes.js";
3
+ import _ from "lodash";
4
+ import { fetchAndCacheCollectionByName } from "./collections.js";
5
+ export const queuedOperations = [];
6
+ export const nameToIdMapping = new Map();
7
+ export const enqueueOperation = (operation) => {
8
+ queuedOperations.push(operation);
9
+ };
10
+ export const processQueue = async (db, dbId) => {
11
+ console.log("---------------------------------");
12
+ console.log(`Starting Queue processing of ${dbId}`);
13
+ console.log("---------------------------------");
14
+ let progress = true;
15
+ while (progress) {
16
+ progress = false;
17
+ console.log("Processing queued operations:");
18
+ for (let i = 0; i < queuedOperations.length; i++) {
19
+ const operation = queuedOperations[i];
20
+ let collectionFound;
21
+ // Handle relationship attribute operations
22
+ if (operation.attribute?.type === "relationship") {
23
+ // Attempt to resolve the collection directly if collectionId is specified
24
+ if (operation.collectionId) {
25
+ console.log(`\tFetching collection by ID: ${operation.collectionId}`);
26
+ try {
27
+ collectionFound = await db.getCollection(dbId, operation.collectionId);
28
+ }
29
+ catch (e) {
30
+ console.log(`\tCollection not found by ID: ${operation.collectionId}`);
31
+ }
32
+ }
33
+ // Attempt to resolve related collection if specified and not already found
34
+ if (!collectionFound && operation.attribute?.relatedCollection) {
35
+ collectionFound = await fetchAndCacheCollectionByName(db, dbId, operation.attribute.relatedCollection);
36
+ }
37
+ // Handle dependencies if collection still not found
38
+ if (!collectionFound) {
39
+ for (const dep of operation.dependencies || []) {
40
+ collectionFound = await fetchAndCacheCollectionByName(db, dbId, dep);
41
+ if (collectionFound)
42
+ break; // Break early if collection is found
43
+ }
44
+ }
45
+ }
46
+ else if (operation.collectionId) {
47
+ // Handle non-relationship operations with a specified collectionId
48
+ console.log(`\tFetching collection by ID: ${operation.collectionId}`);
49
+ try {
50
+ collectionFound = await db.getCollection(dbId, operation.collectionId);
51
+ }
52
+ catch (e) {
53
+ console.log(`\tCollection not found by ID: ${operation.collectionId}`);
54
+ }
55
+ }
56
+ // Process the operation if the collection is found
57
+ if (collectionFound && operation.attribute) {
58
+ console.log(`\tProcessing attribute: ${operation.attribute.key} for collection ID: ${collectionFound.$id}`);
59
+ await createOrUpdateAttribute(db, dbId, collectionFound, operation.attribute);
60
+ queuedOperations.splice(i, 1);
61
+ i--; // Adjust index since we're modifying the array
62
+ progress = true;
63
+ }
64
+ else {
65
+ console.error(`\tCollection not found for operation, removing from queue: ${JSON.stringify(operation)}`);
66
+ queuedOperations.splice(i, 1);
67
+ i--; // Adjust index since we're modifying the array
68
+ }
69
+ }
70
+ console.log(`\tFinished processing queued operations`);
71
+ }
72
+ if (queuedOperations.length > 0) {
73
+ console.error("Unresolved operations remain due to unmet dependencies.");
74
+ console.log(queuedOperations);
75
+ }
76
+ console.log("---------------------------------");
77
+ console.log(`Queue processing complete for ${dbId}`);
78
+ console.log("---------------------------------");
79
+ };
@@ -0,0 +1,90 @@
1
+ import { Databases } from "node-appwrite";
2
+ import type { AppwriteConfig } from "./schema.js";
3
+ export declare const findCollectionsWithRelationships: (config: AppwriteConfig) => Map<string, ({
4
+ key: string;
5
+ type: "string";
6
+ error: string;
7
+ required: boolean;
8
+ array: boolean;
9
+ size: number;
10
+ xdefault?: string | null | undefined;
11
+ encrypted?: boolean | undefined;
12
+ } | {
13
+ key: string;
14
+ type: "integer";
15
+ error: string;
16
+ required: boolean;
17
+ array: boolean;
18
+ min?: number | undefined;
19
+ max?: number | undefined;
20
+ xdefault?: number | null | undefined;
21
+ } | {
22
+ key: string;
23
+ type: "float";
24
+ error: string;
25
+ required: boolean;
26
+ array: boolean;
27
+ min?: number | undefined;
28
+ max?: number | undefined;
29
+ xdefault?: number | null | undefined;
30
+ } | {
31
+ key: string;
32
+ type: "boolean";
33
+ error: string;
34
+ required: boolean;
35
+ array: boolean;
36
+ xdefault?: boolean | null | undefined;
37
+ } | {
38
+ key: string;
39
+ type: "datetime";
40
+ error: string;
41
+ required: boolean;
42
+ array: boolean;
43
+ xdefault?: string | null | undefined;
44
+ } | {
45
+ key: string;
46
+ type: "email";
47
+ error: string;
48
+ required: boolean;
49
+ array: boolean;
50
+ xdefault?: string | null | undefined;
51
+ } | {
52
+ key: string;
53
+ type: "ip";
54
+ error: string;
55
+ required: boolean;
56
+ array: boolean;
57
+ xdefault?: string | null | undefined;
58
+ } | {
59
+ key: string;
60
+ type: "url";
61
+ error: string;
62
+ required: boolean;
63
+ array: boolean;
64
+ xdefault?: string | null | undefined;
65
+ } | {
66
+ key: string;
67
+ type: "enum";
68
+ error: string;
69
+ required: boolean;
70
+ array: boolean;
71
+ elements: string[];
72
+ xdefault?: string | null | undefined;
73
+ } | {
74
+ key: string;
75
+ type: "relationship";
76
+ error: string;
77
+ required: boolean;
78
+ relatedCollection: string;
79
+ relationType: "oneToMany" | "manyToOne" | "oneToOne" | "manyToMany";
80
+ twoWay: boolean;
81
+ twoWayKey: string;
82
+ onDelete: "setNull" | "cascade" | "restrict";
83
+ side: "parent" | "child";
84
+ array?: boolean | undefined;
85
+ importMapping?: {
86
+ originalIdField: string;
87
+ targetField?: string | undefined;
88
+ } | undefined;
89
+ })[]>;
90
+ export declare function resolveAndUpdateRelationships(dbId: string, database: Databases, config: AppwriteConfig): Promise<void>;
@@ -0,0 +1,209 @@
1
+ import { Databases, Query } from "node-appwrite";
2
+ import { fetchAllCollections } from "./collections.js";
3
+ export const findCollectionsWithRelationships = (config) => {
4
+ const toReturn = new Map();
5
+ // Map of collection name to array of attributes so we can update the relationships
6
+ for (const collection of config.collections) {
7
+ if (collection.attributes) {
8
+ for (const attribute of collection.attributes) {
9
+ if (attribute.type === "relationship") {
10
+ if (!toReturn.has(collection.name)) {
11
+ toReturn.set(collection.name, []);
12
+ }
13
+ toReturn.get(collection.name)?.push(attribute);
14
+ if (!toReturn.has(attribute.relatedCollection)) {
15
+ toReturn.set(attribute.relatedCollection, []);
16
+ }
17
+ toReturn.get(attribute.relatedCollection)?.push(attribute);
18
+ }
19
+ }
20
+ }
21
+ }
22
+ return toReturn;
23
+ };
24
+ async function fetchAllDocuments(dbId, database, collectionId) {
25
+ let allDocuments = [];
26
+ let after; // This will be used for pagination
27
+ while (true) {
28
+ const response = await database.listDocuments(dbId, collectionId, [
29
+ Query.limit(100), // Adjust based on the maximum limit your database allows
30
+ ...(after ? [Query.cursorAfter(after)] : []),
31
+ ]);
32
+ allDocuments = allDocuments.concat(response.documents);
33
+ if (response.documents.length === 0 || response.total === 0) {
34
+ break; // Exit the loop if there are no more documents to fetch
35
+ }
36
+ after = response.documents[response.documents.length - 1].$id; // Prepare for the next page
37
+ }
38
+ return allDocuments;
39
+ }
40
+ export async function resolveAndUpdateRelationships(dbId, database, config) {
41
+ const collections = await fetchAllCollections(dbId, database);
42
+ const collectionsWithRelationships = findCollectionsWithRelationships(config);
43
+ // Process each collection sequentially
44
+ for (const collection of collections) {
45
+ console.log(`Processing collection: ${collection.name} (${collection.$id})`);
46
+ const relAttributeMap = collectionsWithRelationships.get(collection.name); // Get the relationship attributes for the collections
47
+ if (!relAttributeMap) {
48
+ console.log(`No mapping found for collection: ${collection.name}, skipping...`);
49
+ continue;
50
+ }
51
+ await processCollection(dbId, database, collection, relAttributeMap);
52
+ }
53
+ console.log(`Completed relationship resolution and update for database ID: ${dbId}`);
54
+ }
55
+ async function processCollection(dbId, database, collection, relAttributeMap) {
56
+ const allDocuments = await fetchAllDocuments(dbId, database, collection.$id);
57
+ console.log(`Fetched ${allDocuments.length} documents from collection: ${collection.name}`);
58
+ const batchSize = 10; // Process documents in batches of 10
59
+ for (let i = 0; i < allDocuments.length; i += batchSize) {
60
+ const batch = allDocuments.slice(i, i + batchSize);
61
+ const updates = await prepareDocumentUpdates(database, dbId, collection.name, batch, relAttributeMap);
62
+ // Execute updates for the current batch
63
+ await executeUpdatesInBatches(dbId, database, updates);
64
+ }
65
+ }
66
+ async function findDocumentsByOriginalId(database, dbId, relatedCollectionId, targetKey, originalId) {
67
+ const collection = await database.listCollections(dbId, [
68
+ Query.equal("$id", relatedCollectionId),
69
+ ]);
70
+ if (collection.total === 0) {
71
+ console.log(`Collection ${relatedCollectionId} doesn't exist, skipping...`);
72
+ return undefined;
73
+ }
74
+ const targetAttr = collection.collections[0].attributes.find(
75
+ // @ts-ignore
76
+ (attr) => attr.key === targetKey);
77
+ if (!targetAttr) {
78
+ console.log(`Attribute ${targetKey} not found in collection ${relatedCollectionId}, skipping...`);
79
+ return undefined;
80
+ }
81
+ let queries = [];
82
+ if (targetAttr.array) {
83
+ // @ts-ignore
84
+ queries.push(Query.contains(targetKey, originalId));
85
+ }
86
+ else {
87
+ queries.push(Query.equal(targetKey, originalId));
88
+ }
89
+ const response = await database.listDocuments(dbId, relatedCollectionId, [
90
+ ...queries,
91
+ Query.limit(500), // Adjust the limit based on your needs or implement pagination
92
+ ]);
93
+ if (response.total > 0) {
94
+ return undefined;
95
+ }
96
+ if (response.documents.length > 0) {
97
+ return response.documents;
98
+ }
99
+ else {
100
+ return undefined;
101
+ }
102
+ }
103
+ async function prepareDocumentUpdates(database, dbId, collectionName, documents, relationships) {
104
+ console.log(`Preparing updates for collection: ${collectionName}`);
105
+ const updates = [];
106
+ const thisCollection = (await database.listCollections(dbId, [Query.equal("name", collectionName)])).collections[0];
107
+ const thisCollectionId = thisCollection?.$id;
108
+ if (!thisCollectionId) {
109
+ console.log(`No collection found with name: ${collectionName}`);
110
+ return [];
111
+ }
112
+ // Function to process a batch of documents
113
+ const processDocumentBatch = async (docBatch) => {
114
+ for (const doc of docBatch) {
115
+ let updatePayload = {};
116
+ for (const rel of relationships) {
117
+ // Check if the relationship has importMapping defined
118
+ if (!rel.importMapping) {
119
+ continue;
120
+ }
121
+ // Skip if not dealing with the parent side of a two-way relationship
122
+ if (rel.twoWay && rel.side !== "parent") {
123
+ console.log("Not processing child side of two-way relationship, skipping...");
124
+ continue;
125
+ }
126
+ const isSingleReference = rel.relationType === "oneToOne" || rel.relationType === "manyToOne";
127
+ const originalIdField = rel.importMapping.originalIdField;
128
+ const targetField = rel.importMapping.targetField || originalIdField; // Use originalIdField if targetField is not specified
129
+ const originalId = doc[originalIdField];
130
+ if (!originalId) {
131
+ continue; // Skip if the document doesn't have the original ID field
132
+ }
133
+ const collection = await database.listCollections(dbId, [
134
+ Query.equal("name", rel.relatedCollection),
135
+ ]);
136
+ if (collection.total === 0) {
137
+ console.log(`Collection ${rel.relatedCollection} doesn't exist, skipping...`);
138
+ continue; // Skip if the related collection doesn't exist
139
+ }
140
+ const relatedCollectionId = collection.collections[0].$id;
141
+ // Find documents in the related collection that match the original ID
142
+ const foundDocuments = await findDocumentsByOriginalId(database, dbId, relatedCollectionId, targetField, originalId);
143
+ if (foundDocuments && foundDocuments.length > 0) {
144
+ const relationshipKey = rel.key;
145
+ const existingRefs = doc[relationshipKey] || [];
146
+ let existingRefIds = [];
147
+ if (Array.isArray(existingRefs)) {
148
+ // @ts-ignore
149
+ existingRefIds = existingRefs.map((ref) => ref.$id);
150
+ }
151
+ else if (existingRefs) {
152
+ // @ts-ignore
153
+ existingRefIds = [existingRefs.$id];
154
+ }
155
+ const newRefs = foundDocuments.map((fd) => fd.$id);
156
+ const allRefs = [...new Set([...existingRefIds, ...newRefs])]; // Combine and remove duplicates
157
+ // Update logic based on the relationship cardinality
158
+ updatePayload[relationshipKey] = isSingleReference
159
+ ? newRefs[0] || existingRefIds[0]
160
+ : allRefs;
161
+ }
162
+ }
163
+ if (Object.keys(updatePayload).length > 0) {
164
+ updates.push({
165
+ collectionId: thisCollectionId,
166
+ documentId: doc.$id,
167
+ updatePayload: updatePayload,
168
+ });
169
+ }
170
+ }
171
+ };
172
+ // Process documents in batches
173
+ await processInBatches(documents, 25, processDocumentBatch);
174
+ return updates;
175
+ }
176
+ async function processInBatches(items, batchSize, processFunction) {
177
+ const maxParallelBatches = 25; // Adjust this value to control the number of parallel batches
178
+ let currentIndex = 0;
179
+ let activeBatchPromises = [];
180
+ while (currentIndex < items.length) {
181
+ // While there's still data to process and we haven't reached our parallel limit
182
+ while (currentIndex < items.length &&
183
+ activeBatchPromises.length < maxParallelBatches) {
184
+ const batch = items.slice(currentIndex, currentIndex + batchSize);
185
+ currentIndex += batchSize;
186
+ // Add new batch processing promise to the array
187
+ activeBatchPromises.push(processFunction(batch));
188
+ }
189
+ // Wait for one of the batch processes to complete
190
+ await Promise.race(activeBatchPromises).then(() => {
191
+ // Remove the resolved promise from the activeBatchPromises array
192
+ activeBatchPromises = activeBatchPromises.filter((p) => p !== Promise.race(activeBatchPromises));
193
+ });
194
+ }
195
+ // After processing all batches, ensure all active promises are resolved
196
+ await Promise.all(activeBatchPromises);
197
+ }
198
+ async function executeUpdatesInBatches(dbId, database, updates) {
199
+ const batchSize = 25; // Adjust based on your rate limit and performance testing
200
+ for (let i = 0; i < updates.length; i += batchSize) {
201
+ const batch = updates.slice(i, i + batchSize);
202
+ await Promise.all(batch.map((update) => database
203
+ .updateDocument(dbId, update.collectionId, update.documentId, update.updatePayload)
204
+ .catch((error) => {
205
+ console.error("Error updating document: ", error);
206
+ console.error("Document ID: ", update.documentId, "Collection ID: ", update.collectionId, "Document update payload: ", JSON.stringify(update.updatePayload, undefined, 4));
207
+ })));
208
+ }
209
+ }