appwrite-utils-cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +80 -0
- package/dist/main.d.ts +2 -0
- package/dist/main.js +74 -0
- package/dist/migrations/afterImportActions.d.ts +12 -0
- package/dist/migrations/afterImportActions.js +196 -0
- package/dist/migrations/attributes.d.ts +4 -0
- package/dist/migrations/attributes.js +158 -0
- package/dist/migrations/backup.d.ts +621 -0
- package/dist/migrations/backup.js +159 -0
- package/dist/migrations/collections.d.ts +16 -0
- package/dist/migrations/collections.js +207 -0
- package/dist/migrations/converters.d.ts +179 -0
- package/dist/migrations/converters.js +575 -0
- package/dist/migrations/dbHelpers.d.ts +5 -0
- package/dist/migrations/dbHelpers.js +54 -0
- package/dist/migrations/importController.d.ts +44 -0
- package/dist/migrations/importController.js +312 -0
- package/dist/migrations/importDataActions.d.ts +44 -0
- package/dist/migrations/importDataActions.js +219 -0
- package/dist/migrations/indexes.d.ts +4 -0
- package/dist/migrations/indexes.js +18 -0
- package/dist/migrations/logging.d.ts +2 -0
- package/dist/migrations/logging.js +14 -0
- package/dist/migrations/migrationHelper.d.ts +18 -0
- package/dist/migrations/migrationHelper.js +66 -0
- package/dist/migrations/queue.d.ts +13 -0
- package/dist/migrations/queue.js +79 -0
- package/dist/migrations/relationships.d.ts +90 -0
- package/dist/migrations/relationships.js +209 -0
- package/dist/migrations/schema.d.ts +3142 -0
- package/dist/migrations/schema.js +485 -0
- package/dist/migrations/schemaStrings.d.ts +12 -0
- package/dist/migrations/schemaStrings.js +261 -0
- package/dist/migrations/setupDatabase.d.ts +7 -0
- package/dist/migrations/setupDatabase.js +151 -0
- package/dist/migrations/storage.d.ts +8 -0
- package/dist/migrations/storage.js +241 -0
- package/dist/migrations/users.d.ts +11 -0
- package/dist/migrations/users.js +114 -0
- package/dist/migrations/validationRules.d.ts +43 -0
- package/dist/migrations/validationRules.js +42 -0
- package/dist/schemas/authUser.d.ts +62 -0
- package/dist/schemas/authUser.js +17 -0
- package/dist/setup.d.ts +2 -0
- package/dist/setup.js +5 -0
- package/dist/types.d.ts +9 -0
- package/dist/types.js +5 -0
- package/dist/utils/configSchema.json +742 -0
- package/dist/utils/helperFunctions.d.ts +34 -0
- package/dist/utils/helperFunctions.js +72 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.js +2 -0
- package/dist/utils/setupFiles.d.ts +2 -0
- package/dist/utils/setupFiles.js +276 -0
- package/dist/utilsController.d.ts +30 -0
- package/dist/utilsController.js +106 -0
- package/package.json +34 -0
- package/src/main.ts +77 -0
- package/src/migrations/afterImportActions.ts +300 -0
- package/src/migrations/attributes.ts +315 -0
- package/src/migrations/backup.ts +189 -0
- package/src/migrations/collections.ts +303 -0
- package/src/migrations/converters.ts +628 -0
- package/src/migrations/dbHelpers.ts +89 -0
- package/src/migrations/importController.ts +509 -0
- package/src/migrations/importDataActions.ts +313 -0
- package/src/migrations/indexes.ts +37 -0
- package/src/migrations/logging.ts +15 -0
- package/src/migrations/migrationHelper.ts +100 -0
- package/src/migrations/queue.ts +119 -0
- package/src/migrations/relationships.ts +336 -0
- package/src/migrations/schema.ts +590 -0
- package/src/migrations/schemaStrings.ts +310 -0
- package/src/migrations/setupDatabase.ts +219 -0
- package/src/migrations/storage.ts +351 -0
- package/src/migrations/users.ts +148 -0
- package/src/migrations/validationRules.ts +63 -0
- package/src/schemas/authUser.ts +23 -0
- package/src/setup.ts +8 -0
- package/src/types.ts +14 -0
- package/src/utils/configSchema.json +742 -0
- package/src/utils/helperFunctions.ts +111 -0
- package/src/utils/index.ts +2 -0
- package/src/utils/setupFiles.ts +295 -0
- package/src/utilsController.ts +173 -0
- package/tsconfig.json +37 -0
|
@@ -0,0 +1,336 @@
|
|
|
1
|
+
import { Databases, Query, type Models } from "node-appwrite";
|
|
2
|
+
import { fetchAllCollections } from "./collections.js";
|
|
3
|
+
import type {
|
|
4
|
+
AppwriteConfig,
|
|
5
|
+
Attribute,
|
|
6
|
+
RelationshipAttribute,
|
|
7
|
+
} from "./schema.js";
|
|
8
|
+
|
|
9
|
+
export const findCollectionsWithRelationships = (config: AppwriteConfig) => {
|
|
10
|
+
const toReturn = new Map<string, Attribute[]>();
|
|
11
|
+
// Map of collection name to array of attributes so we can update the relationships
|
|
12
|
+
for (const collection of config.collections) {
|
|
13
|
+
if (collection.attributes) {
|
|
14
|
+
for (const attribute of collection.attributes) {
|
|
15
|
+
if (attribute.type === "relationship") {
|
|
16
|
+
if (!toReturn.has(collection.name)) {
|
|
17
|
+
toReturn.set(collection.name, []);
|
|
18
|
+
}
|
|
19
|
+
toReturn.get(collection.name)?.push(attribute);
|
|
20
|
+
if (!toReturn.has(attribute.relatedCollection)) {
|
|
21
|
+
toReturn.set(attribute.relatedCollection, []);
|
|
22
|
+
}
|
|
23
|
+
toReturn.get(attribute.relatedCollection)?.push(attribute);
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
return toReturn;
|
|
29
|
+
};
|
|
30
|
+
|
|
31
|
+
async function fetchAllDocuments(
|
|
32
|
+
dbId: string,
|
|
33
|
+
database: Databases,
|
|
34
|
+
collectionId: string
|
|
35
|
+
): Promise<Models.Document[]> {
|
|
36
|
+
let allDocuments: Models.Document[] = [];
|
|
37
|
+
let after; // This will be used for pagination
|
|
38
|
+
|
|
39
|
+
while (true) {
|
|
40
|
+
const response: Models.DocumentList<Models.Document> =
|
|
41
|
+
await database.listDocuments(dbId, collectionId, [
|
|
42
|
+
Query.limit(100), // Adjust based on the maximum limit your database allows
|
|
43
|
+
...(after ? [Query.cursorAfter(after)] : []),
|
|
44
|
+
]);
|
|
45
|
+
|
|
46
|
+
allDocuments = allDocuments.concat(response.documents);
|
|
47
|
+
|
|
48
|
+
if (response.documents.length === 0 || response.total === 0) {
|
|
49
|
+
break; // Exit the loop if there are no more documents to fetch
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
after = response.documents[response.documents.length - 1].$id; // Prepare for the next page
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
return allDocuments;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
export async function resolveAndUpdateRelationships(
|
|
59
|
+
dbId: string,
|
|
60
|
+
database: Databases,
|
|
61
|
+
config: AppwriteConfig
|
|
62
|
+
) {
|
|
63
|
+
const collections = await fetchAllCollections(dbId, database);
|
|
64
|
+
const collectionsWithRelationships = findCollectionsWithRelationships(config);
|
|
65
|
+
|
|
66
|
+
// Process each collection sequentially
|
|
67
|
+
for (const collection of collections) {
|
|
68
|
+
console.log(
|
|
69
|
+
`Processing collection: ${collection.name} (${collection.$id})`
|
|
70
|
+
);
|
|
71
|
+
const relAttributeMap = collectionsWithRelationships.get(
|
|
72
|
+
collection.name
|
|
73
|
+
) as RelationshipAttribute[]; // Get the relationship attributes for the collections
|
|
74
|
+
|
|
75
|
+
if (!relAttributeMap) {
|
|
76
|
+
console.log(
|
|
77
|
+
`No mapping found for collection: ${collection.name}, skipping...`
|
|
78
|
+
);
|
|
79
|
+
continue;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
await processCollection(dbId, database, collection, relAttributeMap);
|
|
83
|
+
}
|
|
84
|
+
console.log(
|
|
85
|
+
`Completed relationship resolution and update for database ID: ${dbId}`
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
async function processCollection(
|
|
90
|
+
dbId: string,
|
|
91
|
+
database: Databases,
|
|
92
|
+
collection: Models.Collection,
|
|
93
|
+
relAttributeMap: RelationshipAttribute[]
|
|
94
|
+
) {
|
|
95
|
+
const allDocuments = await fetchAllDocuments(dbId, database, collection.$id);
|
|
96
|
+
console.log(
|
|
97
|
+
`Fetched ${allDocuments.length} documents from collection: ${collection.name}`
|
|
98
|
+
);
|
|
99
|
+
|
|
100
|
+
const batchSize = 10; // Process documents in batches of 10
|
|
101
|
+
|
|
102
|
+
for (let i = 0; i < allDocuments.length; i += batchSize) {
|
|
103
|
+
const batch = allDocuments.slice(i, i + batchSize);
|
|
104
|
+
|
|
105
|
+
const updates = await prepareDocumentUpdates(
|
|
106
|
+
database,
|
|
107
|
+
dbId,
|
|
108
|
+
collection.name,
|
|
109
|
+
batch,
|
|
110
|
+
relAttributeMap
|
|
111
|
+
);
|
|
112
|
+
|
|
113
|
+
// Execute updates for the current batch
|
|
114
|
+
await executeUpdatesInBatches(dbId, database, updates);
|
|
115
|
+
}
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
async function findDocumentsByOriginalId(
|
|
119
|
+
database: Databases,
|
|
120
|
+
dbId: string,
|
|
121
|
+
relatedCollectionId: string,
|
|
122
|
+
targetKey: string,
|
|
123
|
+
originalId: string | string[]
|
|
124
|
+
): Promise<Models.Document[] | undefined> {
|
|
125
|
+
const collection = await database.listCollections(dbId, [
|
|
126
|
+
Query.equal("$id", relatedCollectionId),
|
|
127
|
+
]);
|
|
128
|
+
if (collection.total === 0) {
|
|
129
|
+
console.log(`Collection ${relatedCollectionId} doesn't exist, skipping...`);
|
|
130
|
+
return undefined;
|
|
131
|
+
}
|
|
132
|
+
const targetAttr = collection.collections[0].attributes.find(
|
|
133
|
+
// @ts-ignore
|
|
134
|
+
(attr) => attr.key === targetKey
|
|
135
|
+
) as any;
|
|
136
|
+
if (!targetAttr) {
|
|
137
|
+
console.log(
|
|
138
|
+
`Attribute ${targetKey} not found in collection ${relatedCollectionId}, skipping...`
|
|
139
|
+
);
|
|
140
|
+
return undefined;
|
|
141
|
+
}
|
|
142
|
+
let queries: string[] = [];
|
|
143
|
+
if (targetAttr.array) {
|
|
144
|
+
// @ts-ignore
|
|
145
|
+
queries.push(Query.contains(targetKey, originalId));
|
|
146
|
+
} else {
|
|
147
|
+
queries.push(Query.equal(targetKey, originalId));
|
|
148
|
+
}
|
|
149
|
+
const response = await database.listDocuments(dbId, relatedCollectionId, [
|
|
150
|
+
...queries,
|
|
151
|
+
Query.limit(500), // Adjust the limit based on your needs or implement pagination
|
|
152
|
+
]);
|
|
153
|
+
if (response.total > 0) {
|
|
154
|
+
return undefined;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
if (response.documents.length > 0) {
|
|
158
|
+
return response.documents;
|
|
159
|
+
} else {
|
|
160
|
+
return undefined;
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
async function prepareDocumentUpdates(
|
|
165
|
+
database: Databases,
|
|
166
|
+
dbId: string,
|
|
167
|
+
collectionName: string,
|
|
168
|
+
documents: Models.Document[],
|
|
169
|
+
relationships: RelationshipAttribute[]
|
|
170
|
+
): Promise<{ collectionId: string; documentId: string; updatePayload: any }[]> {
|
|
171
|
+
console.log(`Preparing updates for collection: ${collectionName}`);
|
|
172
|
+
const updates: {
|
|
173
|
+
collectionId: string;
|
|
174
|
+
documentId: string;
|
|
175
|
+
updatePayload: any;
|
|
176
|
+
}[] = [];
|
|
177
|
+
|
|
178
|
+
const thisCollection = (
|
|
179
|
+
await database.listCollections(dbId, [Query.equal("name", collectionName)])
|
|
180
|
+
).collections[0];
|
|
181
|
+
const thisCollectionId = thisCollection?.$id;
|
|
182
|
+
|
|
183
|
+
if (!thisCollectionId) {
|
|
184
|
+
console.log(`No collection found with name: ${collectionName}`);
|
|
185
|
+
return [];
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
// Function to process a batch of documents
|
|
189
|
+
const processDocumentBatch = async (docBatch: Models.Document[]) => {
|
|
190
|
+
for (const doc of docBatch) {
|
|
191
|
+
let updatePayload: { [key: string]: any } = {};
|
|
192
|
+
|
|
193
|
+
for (const rel of relationships) {
|
|
194
|
+
// Check if the relationship has importMapping defined
|
|
195
|
+
if (!rel.importMapping) {
|
|
196
|
+
continue;
|
|
197
|
+
}
|
|
198
|
+
// Skip if not dealing with the parent side of a two-way relationship
|
|
199
|
+
if (rel.twoWay && rel.side !== "parent") {
|
|
200
|
+
console.log(
|
|
201
|
+
"Not processing child side of two-way relationship, skipping..."
|
|
202
|
+
);
|
|
203
|
+
continue;
|
|
204
|
+
}
|
|
205
|
+
const isSingleReference =
|
|
206
|
+
rel.relationType === "oneToOne" || rel.relationType === "manyToOne";
|
|
207
|
+
const originalIdField = rel.importMapping.originalIdField;
|
|
208
|
+
const targetField = rel.importMapping.targetField || originalIdField; // Use originalIdField if targetField is not specified
|
|
209
|
+
const originalId = doc[originalIdField as keyof typeof doc];
|
|
210
|
+
if (!originalId) {
|
|
211
|
+
continue; // Skip if the document doesn't have the original ID field
|
|
212
|
+
}
|
|
213
|
+
const collection = await database.listCollections(dbId, [
|
|
214
|
+
Query.equal("name", rel.relatedCollection),
|
|
215
|
+
]);
|
|
216
|
+
if (collection.total === 0) {
|
|
217
|
+
console.log(
|
|
218
|
+
`Collection ${rel.relatedCollection} doesn't exist, skipping...`
|
|
219
|
+
);
|
|
220
|
+
continue; // Skip if the related collection doesn't exist
|
|
221
|
+
}
|
|
222
|
+
const relatedCollectionId = collection.collections[0].$id;
|
|
223
|
+
|
|
224
|
+
// Find documents in the related collection that match the original ID
|
|
225
|
+
const foundDocuments = await findDocumentsByOriginalId(
|
|
226
|
+
database,
|
|
227
|
+
dbId,
|
|
228
|
+
relatedCollectionId,
|
|
229
|
+
targetField,
|
|
230
|
+
originalId
|
|
231
|
+
);
|
|
232
|
+
|
|
233
|
+
if (foundDocuments && foundDocuments.length > 0) {
|
|
234
|
+
const relationshipKey = rel.key;
|
|
235
|
+
const existingRefs = doc[relationshipKey as keyof typeof doc] || [];
|
|
236
|
+
let existingRefIds: string[] = [];
|
|
237
|
+
if (Array.isArray(existingRefs)) {
|
|
238
|
+
// @ts-ignore
|
|
239
|
+
existingRefIds = existingRefs.map((ref) => ref.$id);
|
|
240
|
+
} else if (existingRefs) {
|
|
241
|
+
// @ts-ignore
|
|
242
|
+
existingRefIds = [existingRefs.$id];
|
|
243
|
+
}
|
|
244
|
+
|
|
245
|
+
const newRefs = foundDocuments.map((fd) => fd.$id);
|
|
246
|
+
const allRefs = [...new Set([...existingRefIds, ...newRefs])]; // Combine and remove duplicates
|
|
247
|
+
|
|
248
|
+
// Update logic based on the relationship cardinality
|
|
249
|
+
updatePayload[relationshipKey] = isSingleReference
|
|
250
|
+
? newRefs[0] || existingRefIds[0]
|
|
251
|
+
: allRefs;
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
if (Object.keys(updatePayload).length > 0) {
|
|
256
|
+
updates.push({
|
|
257
|
+
collectionId: thisCollectionId,
|
|
258
|
+
documentId: doc.$id,
|
|
259
|
+
updatePayload: updatePayload,
|
|
260
|
+
});
|
|
261
|
+
}
|
|
262
|
+
}
|
|
263
|
+
};
|
|
264
|
+
|
|
265
|
+
// Process documents in batches
|
|
266
|
+
await processInBatches(documents, 25, processDocumentBatch);
|
|
267
|
+
|
|
268
|
+
return updates;
|
|
269
|
+
}
|
|
270
|
+
|
|
271
|
+
async function processInBatches<T>(
|
|
272
|
+
items: T[],
|
|
273
|
+
batchSize: number,
|
|
274
|
+
processFunction: (batch: T[]) => Promise<void>
|
|
275
|
+
) {
|
|
276
|
+
const maxParallelBatches = 25; // Adjust this value to control the number of parallel batches
|
|
277
|
+
let currentIndex = 0;
|
|
278
|
+
let activeBatchPromises: Promise<void>[] = [];
|
|
279
|
+
|
|
280
|
+
while (currentIndex < items.length) {
|
|
281
|
+
// While there's still data to process and we haven't reached our parallel limit
|
|
282
|
+
while (
|
|
283
|
+
currentIndex < items.length &&
|
|
284
|
+
activeBatchPromises.length < maxParallelBatches
|
|
285
|
+
) {
|
|
286
|
+
const batch = items.slice(currentIndex, currentIndex + batchSize);
|
|
287
|
+
currentIndex += batchSize;
|
|
288
|
+
// Add new batch processing promise to the array
|
|
289
|
+
activeBatchPromises.push(processFunction(batch));
|
|
290
|
+
}
|
|
291
|
+
|
|
292
|
+
// Wait for one of the batch processes to complete
|
|
293
|
+
await Promise.race(activeBatchPromises).then(() => {
|
|
294
|
+
// Remove the resolved promise from the activeBatchPromises array
|
|
295
|
+
activeBatchPromises = activeBatchPromises.filter(
|
|
296
|
+
(p) => p !== Promise.race(activeBatchPromises)
|
|
297
|
+
);
|
|
298
|
+
});
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
// After processing all batches, ensure all active promises are resolved
|
|
302
|
+
await Promise.all(activeBatchPromises);
|
|
303
|
+
}
|
|
304
|
+
|
|
305
|
+
async function executeUpdatesInBatches(
|
|
306
|
+
dbId: string,
|
|
307
|
+
database: Databases,
|
|
308
|
+
updates: { collectionId: string; documentId: string; updatePayload: any }[]
|
|
309
|
+
) {
|
|
310
|
+
const batchSize = 25; // Adjust based on your rate limit and performance testing
|
|
311
|
+
for (let i = 0; i < updates.length; i += batchSize) {
|
|
312
|
+
const batch = updates.slice(i, i + batchSize);
|
|
313
|
+
await Promise.all(
|
|
314
|
+
batch.map((update) =>
|
|
315
|
+
database
|
|
316
|
+
.updateDocument(
|
|
317
|
+
dbId,
|
|
318
|
+
update.collectionId,
|
|
319
|
+
update.documentId,
|
|
320
|
+
update.updatePayload
|
|
321
|
+
)
|
|
322
|
+
.catch((error) => {
|
|
323
|
+
console.error("Error updating document: ", error);
|
|
324
|
+
console.error(
|
|
325
|
+
"Document ID: ",
|
|
326
|
+
update.documentId,
|
|
327
|
+
"Collection ID: ",
|
|
328
|
+
update.collectionId,
|
|
329
|
+
"Document update payload: ",
|
|
330
|
+
JSON.stringify(update.updatePayload, undefined, 4)
|
|
331
|
+
);
|
|
332
|
+
})
|
|
333
|
+
)
|
|
334
|
+
);
|
|
335
|
+
}
|
|
336
|
+
}
|