appwrite-utils-cli 0.0.3 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/migrations/afterImportActions.js +3 -0
- package/dist/migrations/backup.d.ts +8 -8
- package/dist/migrations/backup.js +18 -2
- package/dist/migrations/importController.d.ts +2 -2
- package/dist/migrations/importController.js +78 -12
- package/dist/migrations/migrationHelper.d.ts +154 -2
- package/dist/migrations/migrationHelper.js +101 -7
- package/dist/migrations/relationships.d.ts +5 -72
- package/dist/migrations/relationships.js +88 -97
- package/dist/migrations/schema.d.ts +80 -10
- package/dist/migrations/schema.js +43 -42
- package/dist/schemas/authUser.d.ts +3 -3
- package/package.json +1 -1
- package/src/migrations/afterImportActions.ts +3 -0
- package/src/migrations/backup.ts +18 -2
- package/src/migrations/importController.ts +119 -28
- package/src/migrations/migrationHelper.ts +168 -7
- package/src/migrations/relationships.ts +122 -137
- package/src/migrations/schema.ts +60 -58
|
@@ -5,22 +5,25 @@ import type {
|
|
|
5
5
|
Attribute,
|
|
6
6
|
RelationshipAttribute,
|
|
7
7
|
} from "./schema.js";
|
|
8
|
+
import { logger } from "./logging.js";
|
|
8
9
|
|
|
10
|
+
/**
|
|
11
|
+
* Finds collections that have defined relationship attributes.
|
|
12
|
+
*/
|
|
9
13
|
export const findCollectionsWithRelationships = (config: AppwriteConfig) => {
|
|
10
|
-
const toReturn = new Map<string,
|
|
11
|
-
// Map of collection name to array of attributes so we can update the relationships
|
|
14
|
+
const toReturn = new Map<string, RelationshipAttribute[]>();
|
|
12
15
|
for (const collection of config.collections) {
|
|
13
16
|
if (collection.attributes) {
|
|
14
17
|
for (const attribute of collection.attributes) {
|
|
15
|
-
if (
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
18
|
+
if (
|
|
19
|
+
attribute.type === "relationship" &&
|
|
20
|
+
attribute.twoWay &&
|
|
21
|
+
attribute.side === "parent"
|
|
22
|
+
) {
|
|
23
|
+
toReturn.set(collection.name, toReturn.get(collection.name) || []);
|
|
24
|
+
toReturn
|
|
25
|
+
.get(collection.name)
|
|
26
|
+
?.push(attribute as RelationshipAttribute);
|
|
24
27
|
}
|
|
25
28
|
}
|
|
26
29
|
}
|
|
@@ -28,33 +31,6 @@ export const findCollectionsWithRelationships = (config: AppwriteConfig) => {
|
|
|
28
31
|
return toReturn;
|
|
29
32
|
};
|
|
30
33
|
|
|
31
|
-
async function fetchAllDocuments(
|
|
32
|
-
dbId: string,
|
|
33
|
-
database: Databases,
|
|
34
|
-
collectionId: string
|
|
35
|
-
): Promise<Models.Document[]> {
|
|
36
|
-
let allDocuments: Models.Document[] = [];
|
|
37
|
-
let after; // This will be used for pagination
|
|
38
|
-
|
|
39
|
-
while (true) {
|
|
40
|
-
const response: Models.DocumentList<Models.Document> =
|
|
41
|
-
await database.listDocuments(dbId, collectionId, [
|
|
42
|
-
Query.limit(100), // Adjust based on the maximum limit your database allows
|
|
43
|
-
...(after ? [Query.cursorAfter(after)] : []),
|
|
44
|
-
]);
|
|
45
|
-
|
|
46
|
-
allDocuments = allDocuments.concat(response.documents);
|
|
47
|
-
|
|
48
|
-
if (response.documents.length === 0 || response.total === 0) {
|
|
49
|
-
break; // Exit the loop if there are no more documents to fetch
|
|
50
|
-
}
|
|
51
|
-
|
|
52
|
-
after = response.documents[response.documents.length - 1].$id; // Prepare for the next page
|
|
53
|
-
}
|
|
54
|
-
|
|
55
|
-
return allDocuments;
|
|
56
|
-
}
|
|
57
|
-
|
|
58
34
|
export async function resolveAndUpdateRelationships(
|
|
59
35
|
dbId: string,
|
|
60
36
|
database: Databases,
|
|
@@ -92,36 +68,50 @@ async function processCollection(
|
|
|
92
68
|
collection: Models.Collection,
|
|
93
69
|
relAttributeMap: RelationshipAttribute[]
|
|
94
70
|
) {
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
`Fetched ${allDocuments.length} documents from collection: ${collection.name}`
|
|
98
|
-
);
|
|
99
|
-
|
|
100
|
-
const batchSize = 10; // Process documents in batches of 10
|
|
71
|
+
let after; // For pagination
|
|
72
|
+
let hasMore = true;
|
|
101
73
|
|
|
102
|
-
|
|
103
|
-
const
|
|
74
|
+
while (hasMore) {
|
|
75
|
+
const response: Models.DocumentList<Models.Document> =
|
|
76
|
+
await database.listDocuments(dbId, collection.$id, [
|
|
77
|
+
Query.limit(100), // Fetch documents in batches of 100
|
|
78
|
+
...(after ? [Query.cursorAfter(after)] : []),
|
|
79
|
+
]);
|
|
104
80
|
|
|
105
|
-
const
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
collection.name,
|
|
109
|
-
batch,
|
|
110
|
-
relAttributeMap
|
|
81
|
+
const documents = response.documents;
|
|
82
|
+
console.log(
|
|
83
|
+
`Fetched ${documents.length} documents from collection: ${collection.name}`
|
|
111
84
|
);
|
|
112
85
|
|
|
113
|
-
|
|
114
|
-
|
|
86
|
+
if (documents.length > 0) {
|
|
87
|
+
const updates = await prepareDocumentUpdates(
|
|
88
|
+
database,
|
|
89
|
+
dbId,
|
|
90
|
+
collection.name,
|
|
91
|
+
documents,
|
|
92
|
+
relAttributeMap
|
|
93
|
+
);
|
|
94
|
+
|
|
95
|
+
// Execute updates for the current batch
|
|
96
|
+
await executeUpdatesInBatches(dbId, database, updates);
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
if (documents.length === 100) {
|
|
100
|
+
after = documents[documents.length - 1].$id; // Prepare for the next page
|
|
101
|
+
} else {
|
|
102
|
+
hasMore = false; // No more documents to fetch
|
|
103
|
+
}
|
|
115
104
|
}
|
|
116
105
|
}
|
|
117
106
|
|
|
118
107
|
async function findDocumentsByOriginalId(
|
|
119
108
|
database: Databases,
|
|
120
109
|
dbId: string,
|
|
121
|
-
|
|
110
|
+
targetCollection: Models.Collection,
|
|
122
111
|
targetKey: string,
|
|
123
112
|
originalId: string | string[]
|
|
124
113
|
): Promise<Models.Document[] | undefined> {
|
|
114
|
+
const relatedCollectionId = targetCollection.$id;
|
|
125
115
|
const collection = await database.listCollections(dbId, [
|
|
126
116
|
Query.equal("$id", relatedCollectionId),
|
|
127
117
|
]);
|
|
@@ -150,11 +140,10 @@ async function findDocumentsByOriginalId(
|
|
|
150
140
|
...queries,
|
|
151
141
|
Query.limit(500), // Adjust the limit based on your needs or implement pagination
|
|
152
142
|
]);
|
|
153
|
-
if (response.total > 0) {
|
|
154
|
-
return undefined;
|
|
155
|
-
}
|
|
156
143
|
|
|
157
|
-
if (response.documents.length
|
|
144
|
+
if (response.documents.length < 0) {
|
|
145
|
+
return undefined;
|
|
146
|
+
} else if (response.documents.length > 0) {
|
|
158
147
|
return response.documents;
|
|
159
148
|
} else {
|
|
160
149
|
return undefined;
|
|
@@ -185,85 +174,81 @@ async function prepareDocumentUpdates(
|
|
|
185
174
|
return [];
|
|
186
175
|
}
|
|
187
176
|
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
for (const doc of docBatch) {
|
|
191
|
-
let updatePayload: { [key: string]: any } = {};
|
|
177
|
+
for (const doc of documents) {
|
|
178
|
+
let updatePayload: { [key: string]: any } = {};
|
|
192
179
|
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
180
|
+
for (const rel of relationships) {
|
|
181
|
+
// Skip if not dealing with the parent side of a two-way relationship
|
|
182
|
+
if (rel.twoWay && rel.side !== "parent") {
|
|
183
|
+
console.log("Skipping non-parent side of two-way relationship...");
|
|
184
|
+
continue;
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const isSingleReference =
|
|
188
|
+
rel.relationType === "oneToOne" || rel.relationType === "manyToOne";
|
|
189
|
+
const originalIdField = rel.importMapping?.originalIdField;
|
|
190
|
+
const targetField = rel.importMapping?.targetField || originalIdField; // Use originalIdField if targetField is not specified
|
|
191
|
+
if (!originalIdField) {
|
|
192
|
+
console.log("Missing originalIdField in importMapping, skipping...");
|
|
193
|
+
continue;
|
|
194
|
+
}
|
|
195
|
+
const originalId = doc[originalIdField as keyof typeof doc];
|
|
196
|
+
if (!originalId) {
|
|
197
|
+
continue;
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
const relatedCollection = (
|
|
201
|
+
await database.listCollections(dbId, [
|
|
214
202
|
Query.equal("name", rel.relatedCollection),
|
|
215
|
-
])
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
}
|
|
222
|
-
const relatedCollectionId = collection.collections[0].$id;
|
|
223
|
-
|
|
224
|
-
// Find documents in the related collection that match the original ID
|
|
225
|
-
const foundDocuments = await findDocumentsByOriginalId(
|
|
226
|
-
database,
|
|
227
|
-
dbId,
|
|
228
|
-
relatedCollectionId,
|
|
229
|
-
targetField,
|
|
230
|
-
originalId
|
|
203
|
+
])
|
|
204
|
+
).collections[0];
|
|
205
|
+
|
|
206
|
+
if (!relatedCollection) {
|
|
207
|
+
console.log(
|
|
208
|
+
`Related collection ${rel.relatedCollection} not found, skipping...`
|
|
231
209
|
);
|
|
210
|
+
continue;
|
|
211
|
+
}
|
|
232
212
|
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
: allRefs;
|
|
213
|
+
const foundDocuments = await findDocumentsByOriginalId(
|
|
214
|
+
database,
|
|
215
|
+
dbId,
|
|
216
|
+
relatedCollection,
|
|
217
|
+
targetField!,
|
|
218
|
+
originalId
|
|
219
|
+
);
|
|
220
|
+
|
|
221
|
+
if (foundDocuments && foundDocuments.length > 0) {
|
|
222
|
+
const relationshipKey = rel.key;
|
|
223
|
+
const existingRefs = doc[relationshipKey as keyof typeof doc] || [];
|
|
224
|
+
let existingRefIds: string[] = [];
|
|
225
|
+
if (Array.isArray(existingRefs)) {
|
|
226
|
+
// @ts-ignore
|
|
227
|
+
existingRefIds = existingRefs.map((ref) => ref.$id);
|
|
228
|
+
} else if (existingRefs) {
|
|
229
|
+
// @ts-ignore
|
|
230
|
+
existingRefIds = [existingRefs.$id];
|
|
252
231
|
}
|
|
253
|
-
}
|
|
254
232
|
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
233
|
+
const newRefs = foundDocuments.map((fd) => fd.$id);
|
|
234
|
+
const allRefs = [...new Set([...existingRefIds, ...newRefs])]; // Combine and remove duplicates
|
|
235
|
+
|
|
236
|
+
// Update logic based on the relationship cardinality
|
|
237
|
+
updatePayload[relationshipKey] = isSingleReference
|
|
238
|
+
? newRefs[0] || existingRefIds[0]
|
|
239
|
+
: allRefs;
|
|
240
|
+
console.log(`Updating ${relationshipKey} with ${allRefs.length} refs`);
|
|
261
241
|
}
|
|
262
242
|
}
|
|
263
|
-
};
|
|
264
243
|
|
|
265
|
-
|
|
266
|
-
|
|
244
|
+
if (Object.keys(updatePayload).length > 0) {
|
|
245
|
+
updates.push({
|
|
246
|
+
collectionId: thisCollectionId,
|
|
247
|
+
documentId: doc.$id,
|
|
248
|
+
updatePayload: updatePayload,
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
}
|
|
267
252
|
|
|
268
253
|
return updates;
|
|
269
254
|
}
|
|
@@ -320,14 +305,14 @@ async function executeUpdatesInBatches(
|
|
|
320
305
|
update.updatePayload
|
|
321
306
|
)
|
|
322
307
|
.catch((error) => {
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
update.
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
308
|
+
logger.error(
|
|
309
|
+
`Error updating doc ${
|
|
310
|
+
update.documentId
|
|
311
|
+
} in ${dbId}, update payload: ${JSON.stringify(
|
|
312
|
+
update.updatePayload,
|
|
313
|
+
undefined,
|
|
314
|
+
4
|
|
315
|
+
)}, error: ${error}`
|
|
331
316
|
);
|
|
332
317
|
})
|
|
333
318
|
)
|
package/src/migrations/schema.ts
CHANGED
|
@@ -378,6 +378,63 @@ export const indexSchema = z.object({
|
|
|
378
378
|
|
|
379
379
|
export type Index = z.infer<typeof indexSchema>;
|
|
380
380
|
|
|
381
|
+
export const AttributeMappingsSchema = z.array(
|
|
382
|
+
z.object({
|
|
383
|
+
oldKey: z
|
|
384
|
+
.string()
|
|
385
|
+
.optional()
|
|
386
|
+
.describe("The key of the attribute in the old document"),
|
|
387
|
+
oldKeys: z
|
|
388
|
+
.array(z.string())
|
|
389
|
+
.optional()
|
|
390
|
+
.describe(
|
|
391
|
+
"The keys of the attribute in the old document, if there are more than one"
|
|
392
|
+
),
|
|
393
|
+
targetKey: z
|
|
394
|
+
.string()
|
|
395
|
+
.describe("The key of the attribute in the new document"),
|
|
396
|
+
fileData: z
|
|
397
|
+
.object({
|
|
398
|
+
name: z
|
|
399
|
+
.string()
|
|
400
|
+
.describe("The name of the file, can use template strings"),
|
|
401
|
+
path: z
|
|
402
|
+
.string()
|
|
403
|
+
.describe("The path of the file, relative to the appwrite folder"),
|
|
404
|
+
})
|
|
405
|
+
.optional()
|
|
406
|
+
.describe(
|
|
407
|
+
"The file data to use for the import, if defined it will upload and replace with ID"
|
|
408
|
+
),
|
|
409
|
+
converters: z
|
|
410
|
+
.array(z.string())
|
|
411
|
+
.describe("The converters to use for the import")
|
|
412
|
+
.default([]),
|
|
413
|
+
validationActions: z
|
|
414
|
+
.array(
|
|
415
|
+
z.object({
|
|
416
|
+
action: z.string(),
|
|
417
|
+
params: z.array(z.string().startsWith("{").endsWith("}")),
|
|
418
|
+
})
|
|
419
|
+
)
|
|
420
|
+
.describe(
|
|
421
|
+
"The after import actions and parameter placeholders (they'll be replaced with the actual data) to use for the import"
|
|
422
|
+
)
|
|
423
|
+
.default([]),
|
|
424
|
+
postImportActions: z
|
|
425
|
+
.array(
|
|
426
|
+
z.object({
|
|
427
|
+
action: z.string(),
|
|
428
|
+
params: z.array(z.string().or(z.record(z.string(), z.any()))),
|
|
429
|
+
})
|
|
430
|
+
)
|
|
431
|
+
.describe(
|
|
432
|
+
"The after import actions and parameter placeholders (they'll be replaced with the actual data) to use for the import"
|
|
433
|
+
)
|
|
434
|
+
.default([]),
|
|
435
|
+
})
|
|
436
|
+
);
|
|
437
|
+
|
|
381
438
|
export const collectionSchema = z.object({
|
|
382
439
|
$id: z
|
|
383
440
|
.string()
|
|
@@ -450,63 +507,8 @@ export const collectionSchema = z.object({
|
|
|
450
507
|
.describe(
|
|
451
508
|
"Configuration for mapping and resolving the update during data import"
|
|
452
509
|
),
|
|
453
|
-
attributeMappings:
|
|
454
|
-
|
|
455
|
-
oldKey: z
|
|
456
|
-
.string()
|
|
457
|
-
.optional()
|
|
458
|
-
.describe("The key of the attribute in the old document"),
|
|
459
|
-
oldKeys: z
|
|
460
|
-
.array(z.string())
|
|
461
|
-
.optional()
|
|
462
|
-
.describe(
|
|
463
|
-
"The keys of the attribute in the old document, if there are more than one"
|
|
464
|
-
),
|
|
465
|
-
targetKey: z
|
|
466
|
-
.string()
|
|
467
|
-
.describe("The key of the attribute in the new document"),
|
|
468
|
-
fileData: z
|
|
469
|
-
.object({
|
|
470
|
-
name: z
|
|
471
|
-
.string()
|
|
472
|
-
.describe("The name of the file, can use template strings"),
|
|
473
|
-
path: z
|
|
474
|
-
.string()
|
|
475
|
-
.describe(
|
|
476
|
-
"The path of the file, relative to the appwrite folder"
|
|
477
|
-
),
|
|
478
|
-
})
|
|
479
|
-
.optional()
|
|
480
|
-
.describe(
|
|
481
|
-
"The file data to use for the import, if defined it will upload and replace with ID"
|
|
482
|
-
),
|
|
483
|
-
converters: z
|
|
484
|
-
.array(z.string())
|
|
485
|
-
.describe("The converters to use for the import")
|
|
486
|
-
.default([]),
|
|
487
|
-
validationActions: z
|
|
488
|
-
.array(
|
|
489
|
-
z.object({
|
|
490
|
-
action: z.string(),
|
|
491
|
-
params: z.array(z.string().startsWith("{").endsWith("}")),
|
|
492
|
-
})
|
|
493
|
-
)
|
|
494
|
-
.describe(
|
|
495
|
-
"The after import actions and parameter placeholders (they'll be replaced with the actual data) to use for the import"
|
|
496
|
-
)
|
|
497
|
-
.default([]),
|
|
498
|
-
postImportActions: z
|
|
499
|
-
.array(
|
|
500
|
-
z.object({
|
|
501
|
-
action: z.string(),
|
|
502
|
-
params: z.array(z.string().or(z.record(z.string(), z.any()))),
|
|
503
|
-
})
|
|
504
|
-
)
|
|
505
|
-
.describe(
|
|
506
|
-
"The after import actions and parameter placeholders (they'll be replaced with the actual data) to use for the import"
|
|
507
|
-
)
|
|
508
|
-
.default([]),
|
|
509
|
-
})
|
|
510
|
+
attributeMappings: AttributeMappingsSchema.describe(
|
|
511
|
+
"The attribute mappings to use for the import"
|
|
510
512
|
),
|
|
511
513
|
})
|
|
512
514
|
)
|
|
@@ -586,5 +588,5 @@ export type ConfigDatabases = AppwriteConfig["databases"];
|
|
|
586
588
|
export type ConfigDatabase = ConfigDatabases[number];
|
|
587
589
|
export type ImportDefs = ConfigCollections[number]["importDefs"];
|
|
588
590
|
export type ImportDef = ImportDefs[number];
|
|
589
|
-
export type AttributeMappings =
|
|
591
|
+
export type AttributeMappings = z.infer<typeof AttributeMappingsSchema>;
|
|
590
592
|
export type AttributeMapping = AttributeMappings[number];
|