appwrite-utils-cli 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +5 -1
- package/dist/migrations/afterImportActions.d.ts +5 -0
- package/dist/migrations/afterImportActions.js +64 -0
- package/dist/migrations/backup.d.ts +8 -8
- package/dist/migrations/backup.js +18 -2
- package/dist/migrations/importController.d.ts +2 -2
- package/dist/migrations/importController.js +78 -12
- package/dist/migrations/migrationHelper.d.ts +154 -2
- package/dist/migrations/migrationHelper.js +101 -7
- package/dist/migrations/relationships.d.ts +5 -72
- package/dist/migrations/relationships.js +88 -97
- package/dist/migrations/schema.d.ts +80 -10
- package/dist/migrations/schema.js +43 -42
- package/dist/schemas/authUser.d.ts +3 -3
- package/package.json +1 -1
- package/src/migrations/afterImportActions.ts +98 -0
- package/src/migrations/backup.ts +18 -2
- package/src/migrations/importController.ts +119 -28
- package/src/migrations/migrationHelper.ts +168 -7
- package/src/migrations/relationships.ts +122 -137
- package/src/migrations/schema.ts +60 -58
package/README.md
CHANGED
|
@@ -77,4 +77,8 @@ This setup ensures that developers have robust tools at their fingertips to mana
|
|
|
77
77
|
|
|
78
78
|
- Syncing configuration
|
|
79
79
|
- Better file format for config (potentially)
|
|
80
|
-
- Separation of collections and import configuration from main config
|
|
80
|
+
- Separation of collections and import configuration from main config
|
|
81
|
+
|
|
82
|
+
### Changelog
|
|
83
|
+
|
|
84
|
+
- 0.0.5: Added `setFieldFromOtherCollectionDocuments` to set an array of ID's for instance from another collection as a `postImportAction`
|
|
@@ -7,6 +7,11 @@ export declare const afterImportActions: {
|
|
|
7
7
|
updateCreatedDocument: (config: AppwriteConfig, dbId: string, collId: string, docId: string, data: any) => Promise<void>;
|
|
8
8
|
checkAndUpdateFieldInDocument: (config: AppwriteConfig, dbId: string, collId: string, docId: string, fieldName: string, oldFieldValue: any, newFieldValue: any) => Promise<void>;
|
|
9
9
|
setFieldFromOtherCollectionDocument: (config: AppwriteConfig, dbId: string, collIdOrName: string, docId: string, fieldName: string, otherCollIdOrName: string, otherDocId: string, otherFieldName: string) => Promise<void>;
|
|
10
|
+
/**
|
|
11
|
+
* Updates a field in a document by setting it with document IDs from another collection
|
|
12
|
+
* based on a matching field value.
|
|
13
|
+
*/
|
|
14
|
+
setFieldFromOtherCollectionDocuments: (config: AppwriteConfig, dbId: string, collIdOrName: string, docId: string, fieldName: string, otherCollIdOrName: string, matchingFieldName: string, matchingFieldValue: any) => Promise<void>;
|
|
10
15
|
createOrGetBucket: (config: AppwriteConfig, bucketName: string, bucketId?: string, permissions?: string[], fileSecurity?: boolean, enabled?: boolean, maxFileSize?: number, allowedExtensions?: string[], compression?: string, encryption?: boolean, antivirus?: boolean) => Promise<Models.Bucket | undefined>;
|
|
11
16
|
createFileAndUpdateField: (config: AppwriteConfig, dbId: string, collId: string, docId: string, fieldName: string, bucketId: string, filePath: string, fileName: string) => Promise<void>;
|
|
12
17
|
};
|
|
@@ -79,6 +79,67 @@ export const afterImportActions = {
|
|
|
79
79
|
console.error("Error setting field from other collection document: ", error);
|
|
80
80
|
}
|
|
81
81
|
},
|
|
82
|
+
/**
|
|
83
|
+
* Updates a field in a document by setting it with document IDs from another collection
|
|
84
|
+
* based on a matching field value.
|
|
85
|
+
*/
|
|
86
|
+
setFieldFromOtherCollectionDocuments: async (config, dbId, collIdOrName, docId, fieldName, otherCollIdOrName, matchingFieldName, matchingFieldValue) => {
|
|
87
|
+
const db = getDatabaseFromConfig(config);
|
|
88
|
+
// Helper function to find a collection ID by name or return the ID if given
|
|
89
|
+
const findCollectionId = async (collectionIdentifier) => {
|
|
90
|
+
const collections = await db.listCollections(dbId, [
|
|
91
|
+
Query.equal("name", collectionIdentifier),
|
|
92
|
+
Query.limit(1),
|
|
93
|
+
]);
|
|
94
|
+
return collections.total > 0
|
|
95
|
+
? collections.collections[0].$id
|
|
96
|
+
: collectionIdentifier;
|
|
97
|
+
};
|
|
98
|
+
// Function to check if the target field is an array
|
|
99
|
+
const isTargetFieldArray = async (collectionId, fieldName) => {
|
|
100
|
+
const collection = await db.getCollection(dbId, collectionId);
|
|
101
|
+
const attribute = collection.attributes.find((attr) => attr.key === fieldName);
|
|
102
|
+
// @ts-ignore
|
|
103
|
+
return attribute?.array === true;
|
|
104
|
+
};
|
|
105
|
+
try {
|
|
106
|
+
const targetCollectionId = await findCollectionId(collIdOrName);
|
|
107
|
+
const otherCollectionId = await findCollectionId(otherCollIdOrName);
|
|
108
|
+
const targetFieldIsArray = await isTargetFieldArray(targetCollectionId, fieldName);
|
|
109
|
+
// Function to recursively fetch all matching documents from the other collection
|
|
110
|
+
const fetchAllMatchingDocuments = async (cursor) => {
|
|
111
|
+
const docLimit = 100;
|
|
112
|
+
const queries = targetFieldIsArray
|
|
113
|
+
? // @ts-ignore
|
|
114
|
+
[Query.contains(matchingFieldName, [matchingFieldValue])]
|
|
115
|
+
: [Query.equal(matchingFieldName, matchingFieldValue)];
|
|
116
|
+
if (cursor) {
|
|
117
|
+
queries.push(Query.cursorAfter(cursor));
|
|
118
|
+
}
|
|
119
|
+
queries.push(Query.limit(docLimit));
|
|
120
|
+
const response = await db.listDocuments(dbId, otherCollectionId, queries);
|
|
121
|
+
const documents = response.documents;
|
|
122
|
+
if (documents.length === 0 || documents.length < docLimit) {
|
|
123
|
+
return documents;
|
|
124
|
+
}
|
|
125
|
+
const nextCursor = documents[documents.length - 1].$id;
|
|
126
|
+
const nextBatch = await fetchAllMatchingDocuments(nextCursor);
|
|
127
|
+
return documents.concat(nextBatch);
|
|
128
|
+
};
|
|
129
|
+
const matchingDocuments = await fetchAllMatchingDocuments();
|
|
130
|
+
const documentIds = matchingDocuments.map((doc) => doc.$id);
|
|
131
|
+
if (documentIds.length > 0) {
|
|
132
|
+
const updatePayload = targetFieldIsArray
|
|
133
|
+
? { [fieldName]: documentIds }
|
|
134
|
+
: { [fieldName]: documentIds[0] };
|
|
135
|
+
await db.updateDocument(dbId, targetCollectionId, docId, updatePayload);
|
|
136
|
+
console.log(`Field ${fieldName} updated successfully in document ${docId} with ${documentIds.length} document IDs.`);
|
|
137
|
+
}
|
|
138
|
+
}
|
|
139
|
+
catch (error) {
|
|
140
|
+
console.error("Error setting field from other collection documents: ", error);
|
|
141
|
+
}
|
|
142
|
+
},
|
|
82
143
|
createOrGetBucket: async (config, bucketName, bucketId, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus) => {
|
|
83
144
|
try {
|
|
84
145
|
const storage = getStorageFromConfig(config);
|
|
@@ -157,6 +218,9 @@ export const afterImportActions = {
|
|
|
157
218
|
else {
|
|
158
219
|
updateData = file.$id; // Set the new file ID
|
|
159
220
|
}
|
|
221
|
+
await db.updateDocument(dbId, collId, doc.$id, {
|
|
222
|
+
[fieldName]: updateData,
|
|
223
|
+
});
|
|
160
224
|
// console.log(
|
|
161
225
|
// "Updating document with file: ",
|
|
162
226
|
// doc.$id,
|
|
@@ -114,10 +114,10 @@ export declare const OperationSchema: z.ZodObject<{
|
|
|
114
114
|
progress: z.ZodNumber;
|
|
115
115
|
total: z.ZodNumber;
|
|
116
116
|
error: z.ZodString;
|
|
117
|
-
status: z.ZodEnum<["pending", "in_progress", "completed", "error"]
|
|
117
|
+
status: z.ZodDefault<z.ZodEnum<["pending", "ready", "in_progress", "completed", "error", "cancelled"]>>;
|
|
118
118
|
}, "strip", z.ZodTypeAny, {
|
|
119
119
|
error: string;
|
|
120
|
-
status: "error" | "pending" | "in_progress" | "completed";
|
|
120
|
+
status: "error" | "pending" | "ready" | "in_progress" | "completed" | "cancelled";
|
|
121
121
|
$id: string;
|
|
122
122
|
$createdAt: string;
|
|
123
123
|
$updatedAt: string;
|
|
@@ -129,7 +129,6 @@ export declare const OperationSchema: z.ZodObject<{
|
|
|
129
129
|
batches?: string[] | undefined;
|
|
130
130
|
}, {
|
|
131
131
|
error: string;
|
|
132
|
-
status: "error" | "pending" | "in_progress" | "completed";
|
|
133
132
|
$id: string;
|
|
134
133
|
$createdAt: string;
|
|
135
134
|
$updatedAt: string;
|
|
@@ -139,6 +138,7 @@ export declare const OperationSchema: z.ZodObject<{
|
|
|
139
138
|
total: number;
|
|
140
139
|
data?: any;
|
|
141
140
|
batches?: string[] | undefined;
|
|
141
|
+
status?: "error" | "pending" | "ready" | "in_progress" | "completed" | "cancelled" | undefined;
|
|
142
142
|
}>;
|
|
143
143
|
export type Operation = z.infer<typeof OperationSchema>;
|
|
144
144
|
export declare const OperationCreateSchema: z.ZodObject<Omit<{
|
|
@@ -152,10 +152,10 @@ export declare const OperationCreateSchema: z.ZodObject<Omit<{
|
|
|
152
152
|
progress: z.ZodNumber;
|
|
153
153
|
total: z.ZodNumber;
|
|
154
154
|
error: z.ZodString;
|
|
155
|
-
status: z.ZodEnum<["pending", "in_progress", "completed", "error"]
|
|
155
|
+
status: z.ZodDefault<z.ZodEnum<["pending", "ready", "in_progress", "completed", "error", "cancelled"]>>;
|
|
156
156
|
}, "$id" | "$createdAt" | "$updatedAt">, "strip", z.ZodTypeAny, {
|
|
157
157
|
error: string;
|
|
158
|
-
status: "error" | "pending" | "in_progress" | "completed";
|
|
158
|
+
status: "error" | "pending" | "ready" | "in_progress" | "completed" | "cancelled";
|
|
159
159
|
collectionId: string;
|
|
160
160
|
operationType: string;
|
|
161
161
|
progress: number;
|
|
@@ -164,11 +164,11 @@ export declare const OperationCreateSchema: z.ZodObject<Omit<{
|
|
|
164
164
|
batches?: string[] | undefined;
|
|
165
165
|
}, {
|
|
166
166
|
error: string;
|
|
167
|
-
status: "error" | "pending" | "in_progress" | "completed";
|
|
168
167
|
collectionId: string;
|
|
169
168
|
operationType: string;
|
|
170
169
|
progress: number;
|
|
171
170
|
total: number;
|
|
171
|
+
status?: "error" | "pending" | "ready" | "in_progress" | "completed" | "cancelled" | undefined;
|
|
172
172
|
data?: any;
|
|
173
173
|
batches?: string[] | undefined;
|
|
174
174
|
}>;
|
|
@@ -263,12 +263,12 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
|
263
263
|
targetField?: string | undefined;
|
|
264
264
|
} | undefined;
|
|
265
265
|
})[];
|
|
266
|
+
name: string;
|
|
266
267
|
$id: string;
|
|
267
268
|
$permissions: {
|
|
268
269
|
permission: string;
|
|
269
270
|
target: string;
|
|
270
271
|
}[];
|
|
271
|
-
name: string;
|
|
272
272
|
enabled: boolean;
|
|
273
273
|
documentSecurity: boolean;
|
|
274
274
|
indexes: {
|
|
@@ -485,12 +485,12 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
|
485
485
|
targetField?: string | undefined;
|
|
486
486
|
} | undefined;
|
|
487
487
|
})[];
|
|
488
|
+
name: string;
|
|
488
489
|
$id: string;
|
|
489
490
|
$permissions: {
|
|
490
491
|
permission: string;
|
|
491
492
|
target: string;
|
|
492
493
|
}[];
|
|
493
|
-
name: string;
|
|
494
494
|
enabled: boolean;
|
|
495
495
|
documentSecurity: boolean;
|
|
496
496
|
indexes: {
|
|
@@ -44,7 +44,16 @@ export const OperationSchema = z.object({
|
|
|
44
44
|
progress: z.number(),
|
|
45
45
|
total: z.number(),
|
|
46
46
|
error: z.string(),
|
|
47
|
-
status: z
|
|
47
|
+
status: z
|
|
48
|
+
.enum([
|
|
49
|
+
"pending",
|
|
50
|
+
"ready",
|
|
51
|
+
"in_progress",
|
|
52
|
+
"completed",
|
|
53
|
+
"error",
|
|
54
|
+
"cancelled",
|
|
55
|
+
])
|
|
56
|
+
.default("pending"),
|
|
48
57
|
});
|
|
49
58
|
export const OperationCreateSchema = OperationSchema.omit({
|
|
50
59
|
$id: true,
|
|
@@ -107,7 +116,14 @@ export const getMigrationCollectionSchemas = () => {
|
|
|
107
116
|
attributeSchema.parse({
|
|
108
117
|
key: "status",
|
|
109
118
|
type: "enum",
|
|
110
|
-
elements: [
|
|
119
|
+
elements: [
|
|
120
|
+
"pending",
|
|
121
|
+
"ready",
|
|
122
|
+
"in_progress",
|
|
123
|
+
"completed",
|
|
124
|
+
"error",
|
|
125
|
+
"cancelled",
|
|
126
|
+
],
|
|
111
127
|
error: "Invalid Status",
|
|
112
128
|
array: false,
|
|
113
129
|
xdefault: "pending",
|
|
@@ -11,7 +11,6 @@ export declare class ImportController {
|
|
|
11
11
|
private setupOptions;
|
|
12
12
|
private documentCache;
|
|
13
13
|
private batchLimit;
|
|
14
|
-
private postImportActionsQueue;
|
|
15
14
|
constructor(config: AppwriteConfig, database: Databases, storage: Storage, appwriteFolderPath: string, importDataActions: ImportDataActions, setupOptions: SetupOptions);
|
|
16
15
|
run(): Promise<void>;
|
|
17
16
|
importCollections(db: ConfigDatabase): Promise<void>;
|
|
@@ -40,5 +39,6 @@ export declare class ImportController {
|
|
|
40
39
|
name: string;
|
|
41
40
|
} | undefined;
|
|
42
41
|
}[];
|
|
43
|
-
executePostImportActions(): Promise<void>;
|
|
42
|
+
executePostImportActions(dbId: string): Promise<void>;
|
|
43
|
+
executeActionsInParallel(dbId: string, collection: ConfigCollection): Promise<void>;
|
|
44
44
|
}
|
|
@@ -9,6 +9,9 @@ import { areCollectionNamesSame } from "../utils/index.js";
|
|
|
9
9
|
import { resolveAndUpdateRelationships } from "./relationships.js";
|
|
10
10
|
import { AuthUserCreateSchema } from "../types.js";
|
|
11
11
|
import { UsersController } from "./users.js";
|
|
12
|
+
import { logger } from "./logging.js";
|
|
13
|
+
import { ContextObject, createOrFindAfterImportOperation, getAfterImportOperations, setAllPendingAfterImportActionsToReady, updateOperation, } from "./migrationHelper.js";
|
|
14
|
+
import { BatchSchema, OperationCreateSchema, OperationSchema, } from "./backup.js";
|
|
12
15
|
export class ImportController {
|
|
13
16
|
config;
|
|
14
17
|
database;
|
|
@@ -18,7 +21,11 @@ export class ImportController {
|
|
|
18
21
|
setupOptions;
|
|
19
22
|
documentCache;
|
|
20
23
|
batchLimit = 25; // Define batch size limit
|
|
21
|
-
postImportActionsQueue
|
|
24
|
+
// private postImportActionsQueue: {
|
|
25
|
+
// context: any;
|
|
26
|
+
// finalItem: any;
|
|
27
|
+
// attributeMappings: AttributeMappings;
|
|
28
|
+
// }[] = [];
|
|
22
29
|
constructor(config, database, storage, appwriteFolderPath, importDataActions, setupOptions) {
|
|
23
30
|
this.config = config;
|
|
24
31
|
this.database = database;
|
|
@@ -55,7 +62,7 @@ export class ImportController {
|
|
|
55
62
|
console.log(`---------------------------------`);
|
|
56
63
|
await this.importCollections(db);
|
|
57
64
|
await resolveAndUpdateRelationships(db.$id, this.database, this.config);
|
|
58
|
-
await this.executePostImportActions();
|
|
65
|
+
await this.executePostImportActions(db.$id);
|
|
59
66
|
console.log(`---------------------------------`);
|
|
60
67
|
console.log(`Finished import data for database: ${db.name}`);
|
|
61
68
|
console.log(`---------------------------------`);
|
|
@@ -113,6 +120,7 @@ export class ImportController {
|
|
|
113
120
|
console.log(`Processing update definitions for collection ID: ${collection.$id}`);
|
|
114
121
|
await this.processBatch(db, collection, importDef, dataToImport);
|
|
115
122
|
}
|
|
123
|
+
await setAllPendingAfterImportActionsToReady(this.database, db.$id, collection.$id);
|
|
116
124
|
}
|
|
117
125
|
async loadData(importDef) {
|
|
118
126
|
const filePath = path.resolve(this.appwriteFolderPath, importDef.filePath);
|
|
@@ -161,6 +169,7 @@ export class ImportController {
|
|
|
161
169
|
}
|
|
162
170
|
const user = await usersController.createUserAndReturn(userToCreate.data);
|
|
163
171
|
createIdToUse = user.$id;
|
|
172
|
+
context.docId = createIdToUse;
|
|
164
173
|
context = { ...context, ...user };
|
|
165
174
|
console.log("Created user, deleting keys in finalItem that exist in user...");
|
|
166
175
|
const associatedDocFound = await this.database.listDocuments(db.$id, context.collId, [Query.equal("$id", createIdToUse)]);
|
|
@@ -205,11 +214,20 @@ export class ImportController {
|
|
|
205
214
|
const afterImportActionContext = structuredClone(context);
|
|
206
215
|
const attributeMappingsWithActions = this.getAttributeMappingsWithActions(importDef.attributeMappings, context, finalItem);
|
|
207
216
|
if (attributeMappingsWithActions.some((m) => m.postImportActions)) {
|
|
208
|
-
|
|
209
|
-
|
|
217
|
+
logger.info(`Pushing to post-import actions queue for ${context.docId}`);
|
|
218
|
+
const afterImportOperationContext = ContextObject.parse({
|
|
219
|
+
dbId: db.$id,
|
|
220
|
+
collectionId: collection.$id,
|
|
210
221
|
finalItem: finalItem,
|
|
211
222
|
attributeMappings: attributeMappingsWithActions,
|
|
223
|
+
context: afterImportActionContext,
|
|
212
224
|
});
|
|
225
|
+
await createOrFindAfterImportOperation(this.database, context.collId, afterImportOperationContext);
|
|
226
|
+
// this.postImportActionsQueue.push({
|
|
227
|
+
// context: afterImportActionContext,
|
|
228
|
+
// finalItem: finalItem,
|
|
229
|
+
// attributeMappings: attributeMappingsWithActions,
|
|
230
|
+
// });
|
|
213
231
|
}
|
|
214
232
|
}));
|
|
215
233
|
results.forEach((result) => {
|
|
@@ -296,17 +314,65 @@ export class ImportController {
|
|
|
296
314
|
return mapping;
|
|
297
315
|
});
|
|
298
316
|
}
|
|
299
|
-
async executePostImportActions() {
|
|
300
|
-
const
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
317
|
+
async executePostImportActions(dbId) {
|
|
318
|
+
const collectionActionsPromises = [];
|
|
319
|
+
for (const collection of this.config.collections) {
|
|
320
|
+
collectionActionsPromises.push(this.executeActionsInParallel(dbId, collection));
|
|
321
|
+
}
|
|
322
|
+
const results = await Promise.allSettled(collectionActionsPromises);
|
|
305
323
|
results.forEach((result) => {
|
|
306
324
|
if (result.status === "rejected") {
|
|
307
|
-
console.error("A
|
|
325
|
+
console.error("A process batch promise was rejected:", result.reason);
|
|
308
326
|
}
|
|
309
327
|
});
|
|
310
|
-
|
|
328
|
+
}
|
|
329
|
+
async executeActionsInParallel(dbId, collection) {
|
|
330
|
+
const collectionExists = await checkForCollection(this.database, dbId, collection);
|
|
331
|
+
if (!collectionExists) {
|
|
332
|
+
logger.error(`No collection found for ${collection.name}`);
|
|
333
|
+
return; // Skip this iteration
|
|
334
|
+
}
|
|
335
|
+
const operations = await getAfterImportOperations(this.database, collectionExists.$id);
|
|
336
|
+
for (const operation of operations) {
|
|
337
|
+
if (!operation.batches) {
|
|
338
|
+
continue;
|
|
339
|
+
}
|
|
340
|
+
const batches = operation.batches;
|
|
341
|
+
const promises = [];
|
|
342
|
+
for (const batch of batches) {
|
|
343
|
+
const batchId = batch;
|
|
344
|
+
promises.push(this.database.getDocument("migrations", "batches", batchId));
|
|
345
|
+
}
|
|
346
|
+
const results = await Promise.allSettled(promises);
|
|
347
|
+
results.forEach((result) => {
|
|
348
|
+
if (result.status === "rejected") {
|
|
349
|
+
logger.error("A process batch promise was rejected:", result.reason);
|
|
350
|
+
}
|
|
351
|
+
});
|
|
352
|
+
const resultsData = results
|
|
353
|
+
.map((result) => (result.status === "fulfilled" ? result.value : null))
|
|
354
|
+
.filter((result) => result !== null && !result.processed)
|
|
355
|
+
.map((result) => BatchSchema.parse(result));
|
|
356
|
+
for (const batch of resultsData) {
|
|
357
|
+
const actionOperation = ContextObject.parse(JSON.parse(batch.data));
|
|
358
|
+
const { context, finalItem, attributeMappings } = actionOperation;
|
|
359
|
+
try {
|
|
360
|
+
await this.importDataActions.executeAfterImportActions(finalItem, attributeMappings, context);
|
|
361
|
+
// Mark batch as processed
|
|
362
|
+
await this.database.deleteDocument("migrations", "batches", batch.$id);
|
|
363
|
+
await updateOperation(this.database, operation.$id, {
|
|
364
|
+
status: "completed",
|
|
365
|
+
batches: [],
|
|
366
|
+
});
|
|
367
|
+
}
|
|
368
|
+
catch (error) {
|
|
369
|
+
logger.error(`Failed to execute batch ${batch.$id}:`, error);
|
|
370
|
+
}
|
|
371
|
+
}
|
|
372
|
+
// After processing all batches, update the operation status
|
|
373
|
+
await updateOperation(this.database, operation.$id, {
|
|
374
|
+
status: "completed", // Or determine based on batch success/failure
|
|
375
|
+
});
|
|
376
|
+
}
|
|
311
377
|
}
|
|
312
378
|
}
|
|
@@ -1,7 +1,159 @@
|
|
|
1
1
|
import { type Databases } from "node-appwrite";
|
|
2
|
-
|
|
2
|
+
import { type Operation } from "./backup.js";
|
|
3
|
+
import { z } from "zod";
|
|
4
|
+
/**
|
|
5
|
+
* Object that contains the context for an action that needs to be executed after import
|
|
6
|
+
* Used in the afterImportActionsDefinitions
|
|
7
|
+
* @type {ContextObject}
|
|
8
|
+
* @typedef {Object} ContextObject
|
|
9
|
+
* @property {string} collectionId - The ID of the collection
|
|
10
|
+
* @property {any} finalItem - The final item that was imported
|
|
11
|
+
* @property {string} action - The name of the action
|
|
12
|
+
* @property {string[]} params - The parameters for the action
|
|
13
|
+
* @property {Object} context - The context object for the action (all the data of this specific item)
|
|
14
|
+
*/
|
|
15
|
+
export declare const ContextObject: z.ZodObject<{
|
|
16
|
+
dbId: z.ZodString;
|
|
17
|
+
collectionId: z.ZodString;
|
|
18
|
+
finalItem: z.ZodAny;
|
|
19
|
+
attributeMappings: z.ZodArray<z.ZodObject<{
|
|
20
|
+
oldKey: z.ZodOptional<z.ZodString>;
|
|
21
|
+
oldKeys: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
22
|
+
targetKey: z.ZodString;
|
|
23
|
+
fileData: z.ZodOptional<z.ZodObject<{
|
|
24
|
+
name: z.ZodString;
|
|
25
|
+
path: z.ZodString;
|
|
26
|
+
}, "strip", z.ZodTypeAny, {
|
|
27
|
+
path: string;
|
|
28
|
+
name: string;
|
|
29
|
+
}, {
|
|
30
|
+
path: string;
|
|
31
|
+
name: string;
|
|
32
|
+
}>>;
|
|
33
|
+
converters: z.ZodDefault<z.ZodArray<z.ZodString, "many">>;
|
|
34
|
+
validationActions: z.ZodDefault<z.ZodArray<z.ZodObject<{
|
|
35
|
+
action: z.ZodString;
|
|
36
|
+
params: z.ZodArray<z.ZodString, "many">;
|
|
37
|
+
}, "strip", z.ZodTypeAny, {
|
|
38
|
+
params: string[];
|
|
39
|
+
action: string;
|
|
40
|
+
}, {
|
|
41
|
+
params: string[];
|
|
42
|
+
action: string;
|
|
43
|
+
}>, "many">>;
|
|
44
|
+
postImportActions: z.ZodDefault<z.ZodArray<z.ZodObject<{
|
|
45
|
+
action: z.ZodString;
|
|
46
|
+
params: z.ZodArray<z.ZodUnion<[z.ZodString, z.ZodRecord<z.ZodString, z.ZodAny>]>, "many">;
|
|
47
|
+
}, "strip", z.ZodTypeAny, {
|
|
48
|
+
params: (string | Record<string, any>)[];
|
|
49
|
+
action: string;
|
|
50
|
+
}, {
|
|
51
|
+
params: (string | Record<string, any>)[];
|
|
52
|
+
action: string;
|
|
53
|
+
}>, "many">>;
|
|
54
|
+
}, "strip", z.ZodTypeAny, {
|
|
55
|
+
targetKey: string;
|
|
56
|
+
converters: string[];
|
|
57
|
+
validationActions: {
|
|
58
|
+
params: string[];
|
|
59
|
+
action: string;
|
|
60
|
+
}[];
|
|
61
|
+
postImportActions: {
|
|
62
|
+
params: (string | Record<string, any>)[];
|
|
63
|
+
action: string;
|
|
64
|
+
}[];
|
|
65
|
+
oldKey?: string | undefined;
|
|
66
|
+
oldKeys?: string[] | undefined;
|
|
67
|
+
fileData?: {
|
|
68
|
+
path: string;
|
|
69
|
+
name: string;
|
|
70
|
+
} | undefined;
|
|
71
|
+
}, {
|
|
72
|
+
targetKey: string;
|
|
73
|
+
oldKey?: string | undefined;
|
|
74
|
+
oldKeys?: string[] | undefined;
|
|
75
|
+
fileData?: {
|
|
76
|
+
path: string;
|
|
77
|
+
name: string;
|
|
78
|
+
} | undefined;
|
|
79
|
+
converters?: string[] | undefined;
|
|
80
|
+
validationActions?: {
|
|
81
|
+
params: string[];
|
|
82
|
+
action: string;
|
|
83
|
+
}[] | undefined;
|
|
84
|
+
postImportActions?: {
|
|
85
|
+
params: (string | Record<string, any>)[];
|
|
86
|
+
action: string;
|
|
87
|
+
}[] | undefined;
|
|
88
|
+
}>, "many">;
|
|
89
|
+
context: z.ZodAny;
|
|
90
|
+
}, "strip", z.ZodTypeAny, {
|
|
91
|
+
attributeMappings: {
|
|
92
|
+
targetKey: string;
|
|
93
|
+
converters: string[];
|
|
94
|
+
validationActions: {
|
|
95
|
+
params: string[];
|
|
96
|
+
action: string;
|
|
97
|
+
}[];
|
|
98
|
+
postImportActions: {
|
|
99
|
+
params: (string | Record<string, any>)[];
|
|
100
|
+
action: string;
|
|
101
|
+
}[];
|
|
102
|
+
oldKey?: string | undefined;
|
|
103
|
+
oldKeys?: string[] | undefined;
|
|
104
|
+
fileData?: {
|
|
105
|
+
path: string;
|
|
106
|
+
name: string;
|
|
107
|
+
} | undefined;
|
|
108
|
+
}[];
|
|
109
|
+
collectionId: string;
|
|
110
|
+
dbId: string;
|
|
111
|
+
finalItem?: any;
|
|
112
|
+
context?: any;
|
|
113
|
+
}, {
|
|
114
|
+
attributeMappings: {
|
|
115
|
+
targetKey: string;
|
|
116
|
+
oldKey?: string | undefined;
|
|
117
|
+
oldKeys?: string[] | undefined;
|
|
118
|
+
fileData?: {
|
|
119
|
+
path: string;
|
|
120
|
+
name: string;
|
|
121
|
+
} | undefined;
|
|
122
|
+
converters?: string[] | undefined;
|
|
123
|
+
validationActions?: {
|
|
124
|
+
params: string[];
|
|
125
|
+
action: string;
|
|
126
|
+
}[] | undefined;
|
|
127
|
+
postImportActions?: {
|
|
128
|
+
params: (string | Record<string, any>)[];
|
|
129
|
+
action: string;
|
|
130
|
+
}[] | undefined;
|
|
131
|
+
}[];
|
|
132
|
+
collectionId: string;
|
|
133
|
+
dbId: string;
|
|
134
|
+
finalItem?: any;
|
|
135
|
+
context?: any;
|
|
136
|
+
}>;
|
|
137
|
+
export type ContextObject = z.infer<typeof ContextObject>;
|
|
138
|
+
export declare const createOrFindAfterImportOperation: (database: Databases, collectionId: string, context: ContextObject) => Promise<void>;
|
|
139
|
+
export declare const addBatch: (database: Databases, operation: Operation, data: string) => Promise<string>;
|
|
140
|
+
export declare const getAfterImportOperations: (database: Databases, collectionId: string) => Promise<{
|
|
141
|
+
error: string;
|
|
142
|
+
status: "error" | "pending" | "ready" | "in_progress" | "completed" | "cancelled";
|
|
143
|
+
$id: string;
|
|
144
|
+
$createdAt: string;
|
|
145
|
+
$updatedAt: string;
|
|
146
|
+
collectionId: string;
|
|
147
|
+
operationType: string;
|
|
148
|
+
progress: number;
|
|
149
|
+
total: number;
|
|
150
|
+
data?: any;
|
|
151
|
+
batches?: string[] | undefined;
|
|
152
|
+
}[]>;
|
|
153
|
+
export declare const setAllPendingAfterImportActionsToReady: (database: Databases, dbId: string, collectionId: string) => Promise<void>;
|
|
154
|
+
export declare const findOrCreateOperation: (database: Databases, collectionId: string, operationType: string, additionalQueries?: string[]) => Promise<{
|
|
3
155
|
error: string;
|
|
4
|
-
status: "error" | "pending" | "in_progress" | "completed";
|
|
156
|
+
status: "error" | "pending" | "ready" | "in_progress" | "completed" | "cancelled";
|
|
5
157
|
$id: string;
|
|
6
158
|
$createdAt: string;
|
|
7
159
|
$updatedAt: string;
|
|
@@ -1,13 +1,107 @@
|
|
|
1
1
|
import { ID, Query } from "node-appwrite";
|
|
2
|
-
import { OperationSchema } from "./backup.js";
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
2
|
+
import { BatchSchema, OperationSchema } from "./backup.js";
|
|
3
|
+
import { AttributeMappingsSchema } from "./schema.js";
|
|
4
|
+
import { z } from "zod";
|
|
5
|
+
import { logger } from "./logging.js";
|
|
6
|
+
/**
|
|
7
|
+
* Object that contains the context for an action that needs to be executed after import
|
|
8
|
+
* Used in the afterImportActionsDefinitions
|
|
9
|
+
* @type {ContextObject}
|
|
10
|
+
* @typedef {Object} ContextObject
|
|
11
|
+
* @property {string} collectionId - The ID of the collection
|
|
12
|
+
* @property {any} finalItem - The final item that was imported
|
|
13
|
+
* @property {string} action - The name of the action
|
|
14
|
+
* @property {string[]} params - The parameters for the action
|
|
15
|
+
* @property {Object} context - The context object for the action (all the data of this specific item)
|
|
16
|
+
*/
|
|
17
|
+
export const ContextObject = z.object({
|
|
18
|
+
dbId: z.string(),
|
|
19
|
+
collectionId: z.string(),
|
|
20
|
+
finalItem: z.any(),
|
|
21
|
+
attributeMappings: AttributeMappingsSchema,
|
|
22
|
+
context: z.any(),
|
|
23
|
+
});
|
|
24
|
+
export const createOrFindAfterImportOperation = async (database, collectionId, context) => {
|
|
25
|
+
let operation = await findOrCreateOperation(database, collectionId, "afterImportAction");
|
|
26
|
+
if (!operation.batches) {
|
|
27
|
+
operation.batches = [];
|
|
28
|
+
}
|
|
29
|
+
// Directly create a new batch for the context without checking for an existing batch
|
|
30
|
+
const contextData = JSON.stringify(context);
|
|
31
|
+
// Create a new batch with the contextData
|
|
32
|
+
const newBatchId = await addBatch(database, operation, contextData);
|
|
33
|
+
// Update the operation with the new batch's $id
|
|
34
|
+
operation.batches.push(newBatchId);
|
|
35
|
+
await database.updateDocument("migrations", "currentOperations", operation.$id, { batches: operation.batches });
|
|
36
|
+
};
|
|
37
|
+
export const addBatch = async (database, operation, data) => {
|
|
38
|
+
const batch = await database.createDocument("migrations", "batches", ID.unique(), {
|
|
39
|
+
data,
|
|
40
|
+
processed: false,
|
|
41
|
+
});
|
|
42
|
+
await database.updateDocument("migrations", "currentOperations", operation.$id, {
|
|
43
|
+
batches: [...(operation.batches || []), batch.$id],
|
|
44
|
+
});
|
|
45
|
+
return batch.$id;
|
|
46
|
+
};
|
|
47
|
+
export const getAfterImportOperations = async (database, collectionId) => {
|
|
48
|
+
let lastDocumentId;
|
|
49
|
+
const allOperations = [];
|
|
50
|
+
let total = 0;
|
|
51
|
+
do {
|
|
52
|
+
const query = [
|
|
53
|
+
Query.equal("collectionId", collectionId),
|
|
54
|
+
Query.equal("operationType", "afterImportAction"),
|
|
55
|
+
Query.equal("status", "ready"),
|
|
56
|
+
Query.limit(100),
|
|
57
|
+
];
|
|
58
|
+
if (lastDocumentId) {
|
|
59
|
+
query.push(Query.cursorAfter(lastDocumentId));
|
|
60
|
+
}
|
|
61
|
+
const operations = await database.listDocuments("migrations", "currentOperations", query);
|
|
62
|
+
total = operations.total; // Update total with the latest fetch
|
|
63
|
+
allOperations.push(...operations.documents);
|
|
64
|
+
if (operations.documents.length > 0) {
|
|
65
|
+
lastDocumentId =
|
|
66
|
+
operations.documents[operations.documents.length - 1].$id;
|
|
67
|
+
}
|
|
68
|
+
} while (allOperations.length < total);
|
|
69
|
+
const allOps = allOperations.map((op) => OperationSchema.parse(op));
|
|
70
|
+
return allOps;
|
|
71
|
+
};
|
|
72
|
+
export const setAllPendingAfterImportActionsToReady = async (database, dbId, collectionId) => {
|
|
73
|
+
let lastDocumentId;
|
|
74
|
+
do {
|
|
75
|
+
const query = [
|
|
76
|
+
Query.equal("collectionId", collectionId),
|
|
77
|
+
Query.equal("status", "pending"),
|
|
78
|
+
Query.limit(100),
|
|
79
|
+
];
|
|
80
|
+
if (lastDocumentId) {
|
|
81
|
+
query.push(Query.cursorAfter(lastDocumentId));
|
|
82
|
+
}
|
|
83
|
+
const operations = await database.listDocuments("migrations", "currentOperations", query);
|
|
84
|
+
// Update each pending operation to 'ready'
|
|
85
|
+
for (const operation of operations.documents) {
|
|
86
|
+
await database.updateDocument("migrations", "currentOperations", operation.$id, { status: "ready" });
|
|
87
|
+
}
|
|
88
|
+
// Prepare for the next iteration in case there are more than 100 documents
|
|
89
|
+
if (operations.documents.length > 0) {
|
|
90
|
+
lastDocumentId =
|
|
91
|
+
operations.documents[operations.documents.length - 1].$id;
|
|
92
|
+
}
|
|
93
|
+
else {
|
|
94
|
+
lastDocumentId = undefined; // No more documents to process
|
|
95
|
+
}
|
|
96
|
+
} while (lastDocumentId); // Continue if there's a last document indicating more documents might exist
|
|
97
|
+
logger.info(`All pending operations for collection ${collectionId} are now set to ready.`);
|
|
98
|
+
};
|
|
99
|
+
export const findOrCreateOperation = async (database, collectionId, operationType, additionalQueries) => {
|
|
7
100
|
const operations = await database.listDocuments("migrations", "currentOperations", [
|
|
8
101
|
Query.equal("collectionId", collectionId),
|
|
9
102
|
Query.equal("operationType", operationType),
|
|
10
|
-
Query.equal("status", "
|
|
103
|
+
Query.equal("status", "pending"),
|
|
104
|
+
...(additionalQueries || []),
|
|
11
105
|
]);
|
|
12
106
|
if (operations.documents.length > 0) {
|
|
13
107
|
return OperationSchema.parse(operations.documents[0]); // Assuming the first document is the operation we want
|
|
@@ -17,7 +111,7 @@ export const findOrCreateOperation = async (database, collectionId, operationTyp
|
|
|
17
111
|
const op = await database.createDocument("migrations", "currentOperations", ID.unique(), {
|
|
18
112
|
operationType,
|
|
19
113
|
collectionId,
|
|
20
|
-
status: "
|
|
114
|
+
status: "pending",
|
|
21
115
|
batches: [],
|
|
22
116
|
progress: 0,
|
|
23
117
|
total: 0,
|