appwrite-utils-cli 0.0.2 → 0.0.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/migrations/afterImportActions.js +3 -0
- package/dist/migrations/backup.d.ts +8 -8
- package/dist/migrations/backup.js +18 -2
- package/dist/migrations/importController.d.ts +2 -2
- package/dist/migrations/importController.js +78 -12
- package/dist/migrations/migrationHelper.d.ts +154 -2
- package/dist/migrations/migrationHelper.js +101 -7
- package/dist/migrations/relationships.d.ts +5 -72
- package/dist/migrations/relationships.js +88 -97
- package/dist/migrations/schema.d.ts +80 -10
- package/dist/migrations/schema.js +43 -42
- package/dist/migrations/schemaStrings.js +5 -2
- package/dist/schemas/authUser.d.ts +3 -3
- package/package.json +1 -1
- package/src/appwrite/.appwrite/appwriteUtilsConfigSchema.json +667 -0
- package/src/appwrite/customDefinitions.ts +11 -0
- package/src/appwrite/importData/dogs.json +76 -0
- package/src/appwrite/importData/members.json +16 -0
- package/src/appwrite/importData/profilePhotos/profilePhoto_123.jpg +0 -0
- package/src/appwrite/importData/profilePhotos/profilePhoto_456.png +0 -0
- package/src/appwrite/schemas/dogs.ts +27 -0
- package/src/appwrite/schemas/members.ts +24 -0
- package/src/migrations/afterImportActions.ts +3 -0
- package/src/migrations/backup.ts +18 -2
- package/src/migrations/importController.ts +119 -28
- package/src/migrations/migrationHelper.ts +168 -7
- package/src/migrations/relationships.ts +122 -137
- package/src/migrations/schema.ts +60 -58
- package/src/migrations/schemaStrings.ts +4 -2
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
{
|
|
2
|
+
"RECORDS": [
|
|
3
|
+
{
|
|
4
|
+
"id": "1",
|
|
5
|
+
"name": "Charlie",
|
|
6
|
+
"breed": "Golden Retriever",
|
|
7
|
+
"age": 2,
|
|
8
|
+
"ownerId": "123",
|
|
9
|
+
"vetRecords": {
|
|
10
|
+
"1": {
|
|
11
|
+
"id": 1,
|
|
12
|
+
"visitDate": "2022-01-01",
|
|
13
|
+
"vetName": "Dr. Smith"
|
|
14
|
+
},
|
|
15
|
+
"2": {
|
|
16
|
+
"id": 2,
|
|
17
|
+
"visitDate": "2022-02-01",
|
|
18
|
+
"vetName": "Dr. Johnson"
|
|
19
|
+
},
|
|
20
|
+
"3": {
|
|
21
|
+
"id": 3,
|
|
22
|
+
"visitDate": "2022-03-01",
|
|
23
|
+
"vetName": "Dr. Williams"
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
},
|
|
27
|
+
{
|
|
28
|
+
"id": "2",
|
|
29
|
+
"name": "Buddy",
|
|
30
|
+
"breed": "Labrador",
|
|
31
|
+
"age": 3,
|
|
32
|
+
"ownerId": "123",
|
|
33
|
+
"vetRecords": {
|
|
34
|
+
"4": {
|
|
35
|
+
"id": 4,
|
|
36
|
+
"visitDate": "2022-01-02",
|
|
37
|
+
"vetName": "Dr. Smith"
|
|
38
|
+
},
|
|
39
|
+
"5": {
|
|
40
|
+
"id": 5,
|
|
41
|
+
"visitDate": "2022-02-02",
|
|
42
|
+
"vetName": "Dr. Johnson"
|
|
43
|
+
},
|
|
44
|
+
"6": {
|
|
45
|
+
"id": 6,
|
|
46
|
+
"visitDate": "2022-03-02",
|
|
47
|
+
"vetName": "Dr. Williams"
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
},
|
|
51
|
+
{
|
|
52
|
+
"id": "3",
|
|
53
|
+
"name": "Max",
|
|
54
|
+
"breed": "Poodle",
|
|
55
|
+
"age": 1,
|
|
56
|
+
"ownerId": "456",
|
|
57
|
+
"vetRecords": {
|
|
58
|
+
"7": {
|
|
59
|
+
"id": 7,
|
|
60
|
+
"visitDate": "2022-01-03",
|
|
61
|
+
"vetName": "Dr. Smith"
|
|
62
|
+
},
|
|
63
|
+
"8": {
|
|
64
|
+
"id": 8,
|
|
65
|
+
"visitDate": "2022-02-03",
|
|
66
|
+
"vetName": "Dr. Johnson"
|
|
67
|
+
},
|
|
68
|
+
"9": {
|
|
69
|
+
"id": 9,
|
|
70
|
+
"visitDate": "2022-03-03",
|
|
71
|
+
"vetName": "Dr. Williams"
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
}
|
|
75
|
+
]
|
|
76
|
+
}
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
{
|
|
2
|
+
"RECORDS": [
|
|
3
|
+
{
|
|
4
|
+
"id": 123,
|
|
5
|
+
"name": "John Doe",
|
|
6
|
+
"email": "john@doe.com",
|
|
7
|
+
"photoUrl": "https://picsum.photos/200/300"
|
|
8
|
+
},
|
|
9
|
+
{
|
|
10
|
+
"id": 456,
|
|
11
|
+
"name": "Jane Doe",
|
|
12
|
+
"email": "jane@doe.com",
|
|
13
|
+
"photoUrl": "https://picsum.photos/200/300"
|
|
14
|
+
}
|
|
15
|
+
]
|
|
16
|
+
}
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import { MembersSchema, type Members } from "./members";
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
export const DogsSchemaBase = z.object({
|
|
6
|
+
$id: z.string().optional(),
|
|
7
|
+
$createdAt: z.date().or(z.string()).optional(),
|
|
8
|
+
$updatedAt: z.date().or(z.string()).optional(),
|
|
9
|
+
name: z.string().max(255, "Maximum length of 255 characters exceeded"),
|
|
10
|
+
breed: z.string().max(255, "Maximum length of 255 characters exceeded").nullish(),
|
|
11
|
+
age: z.number().int().min(0, "Minimum value of 0 not met").max(100, "Maximum value of 100 exceeded").nullish(),
|
|
12
|
+
idOrig: z.string().max(20, "Maximum length of 20 characters exceeded").nullish(),
|
|
13
|
+
ownerIdOrig: z.string().max(255, "Maximum length of 255 characters exceeded").nullish(),
|
|
14
|
+
vetRecords: z.string().max(255, "Maximum length of 255 characters exceeded").nullish(),
|
|
15
|
+
vetRecordIds: z.array(z.string().max(255, "Maximum length of 255 characters exceeded")).nullish(),
|
|
16
|
+
});
|
|
17
|
+
|
|
18
|
+
export type DogsBase = z.infer<typeof DogsSchemaBase> & {
|
|
19
|
+
owner?: Members | null;
|
|
20
|
+
};
|
|
21
|
+
|
|
22
|
+
export const DogsSchema: z.ZodType<DogsBase> = DogsSchemaBase.extend({
|
|
23
|
+
owner: z.lazy(() => MembersSchema.nullish()),
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
export type Dogs = z.infer<typeof DogsSchema>;
|
|
27
|
+
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import { DogsSchema, type Dogs } from "./dogs";
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
export const MembersSchemaBase = z.object({
|
|
6
|
+
$id: z.string().optional(),
|
|
7
|
+
$createdAt: z.date().or(z.string()).optional(),
|
|
8
|
+
$updatedAt: z.date().or(z.string()).optional(),
|
|
9
|
+
idOrig: z.string().max(255, "Maximum length of 255 characters exceeded").nullish(),
|
|
10
|
+
dogIds: z.array(z.string().max(255, "Maximum length of 255 characters exceeded")).nullish(),
|
|
11
|
+
profilePhoto: z.string().max(255, "Maximum length of 255 characters exceeded").nullish(),
|
|
12
|
+
profilePhotoTest: z.string().max(255, "Maximum length of 255 characters exceeded").nullish(),
|
|
13
|
+
});
|
|
14
|
+
|
|
15
|
+
export type MembersBase = z.infer<typeof MembersSchemaBase> & {
|
|
16
|
+
dogs?: Dogs[];
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
export const MembersSchema: z.ZodType<MembersBase> = MembersSchemaBase.extend({
|
|
20
|
+
dogs: z.lazy(() => DogsSchema.array().default([])),
|
|
21
|
+
});
|
|
22
|
+
|
|
23
|
+
export type Members = z.infer<typeof MembersSchema>;
|
|
24
|
+
|
|
@@ -258,6 +258,9 @@ export const afterImportActions = {
|
|
|
258
258
|
} else {
|
|
259
259
|
updateData = file.$id; // Set the new file ID
|
|
260
260
|
}
|
|
261
|
+
await db.updateDocument(dbId, collId, doc.$id, {
|
|
262
|
+
[fieldName]: updateData,
|
|
263
|
+
});
|
|
261
264
|
// console.log(
|
|
262
265
|
// "Updating document with file: ",
|
|
263
266
|
// doc.$id,
|
package/src/migrations/backup.ts
CHANGED
|
@@ -64,7 +64,16 @@ export const OperationSchema = z.object({
|
|
|
64
64
|
progress: z.number(),
|
|
65
65
|
total: z.number(),
|
|
66
66
|
error: z.string(),
|
|
67
|
-
status: z
|
|
67
|
+
status: z
|
|
68
|
+
.enum([
|
|
69
|
+
"pending",
|
|
70
|
+
"ready",
|
|
71
|
+
"in_progress",
|
|
72
|
+
"completed",
|
|
73
|
+
"error",
|
|
74
|
+
"cancelled",
|
|
75
|
+
])
|
|
76
|
+
.default("pending"),
|
|
68
77
|
});
|
|
69
78
|
|
|
70
79
|
export type Operation = z.infer<typeof OperationSchema>;
|
|
@@ -133,7 +142,14 @@ export const getMigrationCollectionSchemas = () => {
|
|
|
133
142
|
attributeSchema.parse({
|
|
134
143
|
key: "status",
|
|
135
144
|
type: "enum",
|
|
136
|
-
elements: [
|
|
145
|
+
elements: [
|
|
146
|
+
"pending",
|
|
147
|
+
"ready",
|
|
148
|
+
"in_progress",
|
|
149
|
+
"completed",
|
|
150
|
+
"error",
|
|
151
|
+
"cancelled",
|
|
152
|
+
],
|
|
137
153
|
error: "Invalid Status",
|
|
138
154
|
array: false,
|
|
139
155
|
xdefault: "pending",
|
|
@@ -24,6 +24,19 @@ import type { SetupOptions } from "../utilsController.js";
|
|
|
24
24
|
import { resolveAndUpdateRelationships } from "./relationships.js";
|
|
25
25
|
import { AuthUserCreateSchema } from "../types.js";
|
|
26
26
|
import { UsersController } from "./users.js";
|
|
27
|
+
import { logger } from "./logging.js";
|
|
28
|
+
import {
|
|
29
|
+
ContextObject,
|
|
30
|
+
createOrFindAfterImportOperation,
|
|
31
|
+
getAfterImportOperations,
|
|
32
|
+
setAllPendingAfterImportActionsToReady,
|
|
33
|
+
updateOperation,
|
|
34
|
+
} from "./migrationHelper.js";
|
|
35
|
+
import {
|
|
36
|
+
BatchSchema,
|
|
37
|
+
OperationCreateSchema,
|
|
38
|
+
OperationSchema,
|
|
39
|
+
} from "./backup.js";
|
|
27
40
|
|
|
28
41
|
export class ImportController {
|
|
29
42
|
private config: AppwriteConfig;
|
|
@@ -34,11 +47,11 @@ export class ImportController {
|
|
|
34
47
|
private setupOptions: SetupOptions;
|
|
35
48
|
private documentCache: Map<string, any>;
|
|
36
49
|
private batchLimit: number = 25; // Define batch size limit
|
|
37
|
-
private postImportActionsQueue: {
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
}[] = [];
|
|
50
|
+
// private postImportActionsQueue: {
|
|
51
|
+
// context: any;
|
|
52
|
+
// finalItem: any;
|
|
53
|
+
// attributeMappings: AttributeMappings;
|
|
54
|
+
// }[] = [];
|
|
42
55
|
|
|
43
56
|
constructor(
|
|
44
57
|
config: AppwriteConfig,
|
|
@@ -90,7 +103,7 @@ export class ImportController {
|
|
|
90
103
|
console.log(`---------------------------------`);
|
|
91
104
|
await this.importCollections(db);
|
|
92
105
|
await resolveAndUpdateRelationships(db.$id, this.database!, this.config!);
|
|
93
|
-
await this.executePostImportActions();
|
|
106
|
+
await this.executePostImportActions(db.$id);
|
|
94
107
|
console.log(`---------------------------------`);
|
|
95
108
|
console.log(`Finished import data for database: ${db.name}`);
|
|
96
109
|
console.log(`---------------------------------`);
|
|
@@ -188,6 +201,12 @@ export class ImportController {
|
|
|
188
201
|
);
|
|
189
202
|
await this.processBatch(db, collection, importDef, dataToImport);
|
|
190
203
|
}
|
|
204
|
+
|
|
205
|
+
await setAllPendingAfterImportActionsToReady(
|
|
206
|
+
this.database,
|
|
207
|
+
db.$id,
|
|
208
|
+
collection.$id
|
|
209
|
+
);
|
|
191
210
|
}
|
|
192
211
|
|
|
193
212
|
async loadData(importDef: ImportDef): Promise<any[]> {
|
|
@@ -268,6 +287,7 @@ export class ImportController {
|
|
|
268
287
|
userToCreate.data
|
|
269
288
|
);
|
|
270
289
|
createIdToUse = user.$id;
|
|
290
|
+
context.docId = createIdToUse;
|
|
271
291
|
context = { ...context, ...user };
|
|
272
292
|
console.log(
|
|
273
293
|
"Created user, deleting keys in finalItem that exist in user..."
|
|
@@ -347,11 +367,26 @@ export class ImportController {
|
|
|
347
367
|
finalItem
|
|
348
368
|
);
|
|
349
369
|
if (attributeMappingsWithActions.some((m) => m.postImportActions)) {
|
|
350
|
-
|
|
351
|
-
context
|
|
370
|
+
logger.info(
|
|
371
|
+
`Pushing to post-import actions queue for ${context.docId}`
|
|
372
|
+
);
|
|
373
|
+
const afterImportOperationContext = ContextObject.parse({
|
|
374
|
+
dbId: db.$id,
|
|
375
|
+
collectionId: collection.$id,
|
|
352
376
|
finalItem: finalItem,
|
|
353
377
|
attributeMappings: attributeMappingsWithActions,
|
|
378
|
+
context: afterImportActionContext,
|
|
354
379
|
});
|
|
380
|
+
await createOrFindAfterImportOperation(
|
|
381
|
+
this.database,
|
|
382
|
+
context.collId,
|
|
383
|
+
afterImportOperationContext
|
|
384
|
+
);
|
|
385
|
+
// this.postImportActionsQueue.push({
|
|
386
|
+
// context: afterImportActionContext,
|
|
387
|
+
// finalItem: finalItem,
|
|
388
|
+
// attributeMappings: attributeMappingsWithActions,
|
|
389
|
+
// });
|
|
355
390
|
}
|
|
356
391
|
})
|
|
357
392
|
);
|
|
@@ -480,30 +515,86 @@ export class ImportController {
|
|
|
480
515
|
});
|
|
481
516
|
}
|
|
482
517
|
|
|
483
|
-
async executePostImportActions() {
|
|
484
|
-
const
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
finalItem,
|
|
492
|
-
attributeMappings,
|
|
493
|
-
context
|
|
494
|
-
);
|
|
495
|
-
})
|
|
496
|
-
);
|
|
497
|
-
|
|
518
|
+
async executePostImportActions(dbId: string) {
|
|
519
|
+
const collectionActionsPromises = [];
|
|
520
|
+
for (const collection of this.config.collections) {
|
|
521
|
+
collectionActionsPromises.push(
|
|
522
|
+
this.executeActionsInParallel(dbId, collection)
|
|
523
|
+
);
|
|
524
|
+
}
|
|
525
|
+
const results = await Promise.allSettled(collectionActionsPromises);
|
|
498
526
|
results.forEach((result) => {
|
|
499
527
|
if (result.status === "rejected") {
|
|
500
|
-
console.error(
|
|
501
|
-
"A post-import action promise was rejected:",
|
|
502
|
-
result.reason
|
|
503
|
-
);
|
|
528
|
+
console.error("A process batch promise was rejected:", result.reason);
|
|
504
529
|
}
|
|
505
530
|
});
|
|
531
|
+
}
|
|
506
532
|
|
|
507
|
-
|
|
533
|
+
async executeActionsInParallel(dbId: string, collection: ConfigCollection) {
|
|
534
|
+
const collectionExists = await checkForCollection(
|
|
535
|
+
this.database,
|
|
536
|
+
dbId,
|
|
537
|
+
collection
|
|
538
|
+
);
|
|
539
|
+
if (!collectionExists) {
|
|
540
|
+
logger.error(`No collection found for ${collection.name}`);
|
|
541
|
+
return; // Skip this iteration
|
|
542
|
+
}
|
|
543
|
+
const operations = await getAfterImportOperations(
|
|
544
|
+
this.database,
|
|
545
|
+
collectionExists.$id
|
|
546
|
+
);
|
|
547
|
+
|
|
548
|
+
for (const operation of operations) {
|
|
549
|
+
if (!operation.batches) {
|
|
550
|
+
continue;
|
|
551
|
+
}
|
|
552
|
+
const batches = operation.batches;
|
|
553
|
+
const promises = [];
|
|
554
|
+
for (const batch of batches) {
|
|
555
|
+
const batchId = batch;
|
|
556
|
+
promises.push(
|
|
557
|
+
this.database.getDocument("migrations", "batches", batchId)
|
|
558
|
+
);
|
|
559
|
+
}
|
|
560
|
+
const results = await Promise.allSettled(promises);
|
|
561
|
+
results.forEach((result) => {
|
|
562
|
+
if (result.status === "rejected") {
|
|
563
|
+
logger.error("A process batch promise was rejected:", result.reason);
|
|
564
|
+
}
|
|
565
|
+
});
|
|
566
|
+
const resultsData = results
|
|
567
|
+
.map((result) => (result.status === "fulfilled" ? result.value : null))
|
|
568
|
+
.filter((result: any) => result !== null && !result.processed)
|
|
569
|
+
.map((result) => BatchSchema.parse(result));
|
|
570
|
+
for (const batch of resultsData) {
|
|
571
|
+
const actionOperation = ContextObject.parse(JSON.parse(batch.data));
|
|
572
|
+
const { context, finalItem, attributeMappings } = actionOperation;
|
|
573
|
+
try {
|
|
574
|
+
await this.importDataActions.executeAfterImportActions(
|
|
575
|
+
finalItem,
|
|
576
|
+
attributeMappings,
|
|
577
|
+
context
|
|
578
|
+
);
|
|
579
|
+
// Mark batch as processed
|
|
580
|
+
await this.database.deleteDocument(
|
|
581
|
+
"migrations",
|
|
582
|
+
"batches",
|
|
583
|
+
batch.$id
|
|
584
|
+
);
|
|
585
|
+
await updateOperation(this.database, operation.$id, {
|
|
586
|
+
status: "completed",
|
|
587
|
+
batches: [],
|
|
588
|
+
});
|
|
589
|
+
} catch (error) {
|
|
590
|
+
logger.error(`Failed to execute batch ${batch.$id}:`, error);
|
|
591
|
+
}
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
// After processing all batches, update the operation status
|
|
595
|
+
await updateOperation(this.database, operation.$id, {
|
|
596
|
+
status: "completed", // Or determine based on batch success/failure
|
|
597
|
+
});
|
|
598
|
+
}
|
|
508
599
|
}
|
|
509
600
|
}
|
|
@@ -1,21 +1,182 @@
|
|
|
1
1
|
import { ID, Query, type Databases } from "node-appwrite";
|
|
2
|
-
import { OperationSchema } from "./backup.js";
|
|
2
|
+
import { BatchSchema, OperationSchema, type Operation } from "./backup.js";
|
|
3
|
+
import { type AttributeMappings, AttributeMappingsSchema } from "./schema.js";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
import { logger } from "./logging.js";
|
|
6
|
+
|
|
7
|
+
/**
|
|
8
|
+
* Object that contains the context for an action that needs to be executed after import
|
|
9
|
+
* Used in the afterImportActionsDefinitions
|
|
10
|
+
* @type {ContextObject}
|
|
11
|
+
* @typedef {Object} ContextObject
|
|
12
|
+
* @property {string} collectionId - The ID of the collection
|
|
13
|
+
* @property {any} finalItem - The final item that was imported
|
|
14
|
+
* @property {string} action - The name of the action
|
|
15
|
+
* @property {string[]} params - The parameters for the action
|
|
16
|
+
* @property {Object} context - The context object for the action (all the data of this specific item)
|
|
17
|
+
*/
|
|
18
|
+
export const ContextObject = z.object({
|
|
19
|
+
dbId: z.string(),
|
|
20
|
+
collectionId: z.string(),
|
|
21
|
+
finalItem: z.any(),
|
|
22
|
+
attributeMappings: AttributeMappingsSchema,
|
|
23
|
+
context: z.any(),
|
|
24
|
+
});
|
|
25
|
+
|
|
26
|
+
export type ContextObject = z.infer<typeof ContextObject>;
|
|
27
|
+
|
|
28
|
+
export const createOrFindAfterImportOperation = async (
|
|
29
|
+
database: Databases,
|
|
30
|
+
collectionId: string,
|
|
31
|
+
context: ContextObject
|
|
32
|
+
) => {
|
|
33
|
+
let operation = await findOrCreateOperation(
|
|
34
|
+
database,
|
|
35
|
+
collectionId,
|
|
36
|
+
"afterImportAction"
|
|
37
|
+
);
|
|
38
|
+
if (!operation.batches) {
|
|
39
|
+
operation.batches = [];
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
// Directly create a new batch for the context without checking for an existing batch
|
|
43
|
+
const contextData = JSON.stringify(context);
|
|
44
|
+
// Create a new batch with the contextData
|
|
45
|
+
const newBatchId = await addBatch(database, operation, contextData);
|
|
46
|
+
// Update the operation with the new batch's $id
|
|
47
|
+
operation.batches.push(newBatchId);
|
|
48
|
+
await database.updateDocument(
|
|
49
|
+
"migrations",
|
|
50
|
+
"currentOperations",
|
|
51
|
+
operation.$id,
|
|
52
|
+
{ batches: operation.batches }
|
|
53
|
+
);
|
|
54
|
+
};
|
|
55
|
+
|
|
56
|
+
export const addBatch = async (
|
|
57
|
+
database: Databases,
|
|
58
|
+
operation: Operation,
|
|
59
|
+
data: string
|
|
60
|
+
) => {
|
|
61
|
+
const batch = await database.createDocument(
|
|
62
|
+
"migrations",
|
|
63
|
+
"batches",
|
|
64
|
+
ID.unique(),
|
|
65
|
+
{
|
|
66
|
+
data,
|
|
67
|
+
processed: false,
|
|
68
|
+
}
|
|
69
|
+
);
|
|
70
|
+
await database.updateDocument(
|
|
71
|
+
"migrations",
|
|
72
|
+
"currentOperations",
|
|
73
|
+
operation.$id,
|
|
74
|
+
{
|
|
75
|
+
batches: [...(operation.batches || []), batch.$id],
|
|
76
|
+
}
|
|
77
|
+
);
|
|
78
|
+
return batch.$id;
|
|
79
|
+
};
|
|
80
|
+
|
|
81
|
+
export const getAfterImportOperations = async (
|
|
82
|
+
database: Databases,
|
|
83
|
+
collectionId: string
|
|
84
|
+
) => {
|
|
85
|
+
let lastDocumentId: string | undefined;
|
|
86
|
+
const allOperations = [];
|
|
87
|
+
let total = 0;
|
|
88
|
+
|
|
89
|
+
do {
|
|
90
|
+
const query = [
|
|
91
|
+
Query.equal("collectionId", collectionId),
|
|
92
|
+
Query.equal("operationType", "afterImportAction"),
|
|
93
|
+
Query.equal("status", "ready"),
|
|
94
|
+
Query.limit(100),
|
|
95
|
+
];
|
|
96
|
+
|
|
97
|
+
if (lastDocumentId) {
|
|
98
|
+
query.push(Query.cursorAfter(lastDocumentId));
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const operations = await database.listDocuments(
|
|
102
|
+
"migrations",
|
|
103
|
+
"currentOperations",
|
|
104
|
+
query
|
|
105
|
+
);
|
|
106
|
+
total = operations.total; // Update total with the latest fetch
|
|
107
|
+
allOperations.push(...operations.documents);
|
|
108
|
+
|
|
109
|
+
if (operations.documents.length > 0) {
|
|
110
|
+
lastDocumentId =
|
|
111
|
+
operations.documents[operations.documents.length - 1].$id;
|
|
112
|
+
}
|
|
113
|
+
} while (allOperations.length < total);
|
|
114
|
+
|
|
115
|
+
const allOps = allOperations.map((op) => OperationSchema.parse(op));
|
|
116
|
+
return allOps;
|
|
117
|
+
};
|
|
118
|
+
|
|
119
|
+
export const setAllPendingAfterImportActionsToReady = async (
|
|
120
|
+
database: Databases,
|
|
121
|
+
dbId: string,
|
|
122
|
+
collectionId: string
|
|
123
|
+
) => {
|
|
124
|
+
let lastDocumentId: string | undefined;
|
|
125
|
+
do {
|
|
126
|
+
const query = [
|
|
127
|
+
Query.equal("collectionId", collectionId),
|
|
128
|
+
Query.equal("status", "pending"),
|
|
129
|
+
Query.limit(100),
|
|
130
|
+
];
|
|
131
|
+
|
|
132
|
+
if (lastDocumentId) {
|
|
133
|
+
query.push(Query.cursorAfter(lastDocumentId));
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
const operations = await database.listDocuments(
|
|
137
|
+
"migrations",
|
|
138
|
+
"currentOperations",
|
|
139
|
+
query
|
|
140
|
+
);
|
|
141
|
+
|
|
142
|
+
// Update each pending operation to 'ready'
|
|
143
|
+
for (const operation of operations.documents) {
|
|
144
|
+
await database.updateDocument(
|
|
145
|
+
"migrations",
|
|
146
|
+
"currentOperations",
|
|
147
|
+
operation.$id,
|
|
148
|
+
{ status: "ready" }
|
|
149
|
+
);
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// Prepare for the next iteration in case there are more than 100 documents
|
|
153
|
+
if (operations.documents.length > 0) {
|
|
154
|
+
lastDocumentId =
|
|
155
|
+
operations.documents[operations.documents.length - 1].$id;
|
|
156
|
+
} else {
|
|
157
|
+
lastDocumentId = undefined; // No more documents to process
|
|
158
|
+
}
|
|
159
|
+
} while (lastDocumentId); // Continue if there's a last document indicating more documents might exist
|
|
160
|
+
|
|
161
|
+
logger.info(
|
|
162
|
+
`All pending operations for collection ${collectionId} are now set to ready.`
|
|
163
|
+
);
|
|
164
|
+
};
|
|
3
165
|
|
|
4
166
|
export const findOrCreateOperation = async (
|
|
5
167
|
database: Databases,
|
|
6
168
|
collectionId: string,
|
|
7
|
-
operationType: string
|
|
169
|
+
operationType: string,
|
|
170
|
+
additionalQueries?: string[]
|
|
8
171
|
) => {
|
|
9
|
-
// Here you would query your database for an existing operation
|
|
10
|
-
// If it doesn't exist, create a new one
|
|
11
|
-
// This is a simplified example
|
|
12
172
|
const operations = await database.listDocuments(
|
|
13
173
|
"migrations",
|
|
14
174
|
"currentOperations",
|
|
15
175
|
[
|
|
16
176
|
Query.equal("collectionId", collectionId),
|
|
17
177
|
Query.equal("operationType", operationType),
|
|
18
|
-
Query.equal("status", "
|
|
178
|
+
Query.equal("status", "pending"),
|
|
179
|
+
...(additionalQueries || []),
|
|
19
180
|
]
|
|
20
181
|
);
|
|
21
182
|
|
|
@@ -30,7 +191,7 @@ export const findOrCreateOperation = async (
|
|
|
30
191
|
{
|
|
31
192
|
operationType,
|
|
32
193
|
collectionId,
|
|
33
|
-
status: "
|
|
194
|
+
status: "pending",
|
|
34
195
|
batches: [],
|
|
35
196
|
progress: 0,
|
|
36
197
|
total: 0,
|