appwrite-utils-cli 0.0.47 → 0.0.48
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +53 -6
- package/dist/main.js +23 -0
- package/dist/migrations/collections.d.ts +6 -0
- package/dist/migrations/collections.js +133 -2
- package/dist/migrations/dataLoader.d.ts +11 -1
- package/dist/migrations/dataLoader.js +40 -29
- package/dist/migrations/databases.d.ts +10 -0
- package/dist/migrations/databases.js +114 -2
- package/dist/migrations/importController.d.ts +1 -0
- package/dist/migrations/importController.js +23 -5
- package/dist/migrations/storage.d.ts +2 -0
- package/dist/migrations/storage.js +68 -1
- package/dist/migrations/users.d.ts +1 -0
- package/dist/migrations/users.js +46 -1
- package/dist/utils/helperFunctions.d.ts +2 -1
- package/dist/utils/helperFunctions.js +7 -1
- package/dist/utilsController.d.ts +11 -0
- package/dist/utilsController.js +54 -0
- package/package.json +2 -2
- package/src/main.ts +53 -0
- package/src/migrations/collections.ts +222 -2
- package/src/migrations/dataLoader.ts +69 -33
- package/src/migrations/databases.ts +221 -2
- package/src/migrations/importController.ts +40 -11
- package/src/migrations/storage.ts +124 -1
- package/src/migrations/users.ts +66 -1
- package/src/utils/helperFunctions.ts +17 -1
- package/src/utilsController.ts +140 -0
package/README.md
CHANGED
|
@@ -12,6 +12,7 @@
|
|
|
12
12
|
- **Data Import**: Facilitate the import of data into your Appwrite databases with comprehensive command-line support.
|
|
13
13
|
- **Backup Management**: Create backups of your Appwrite databases to ensure data integrity and safety.
|
|
14
14
|
- **Flexible Database Management**: Includes commands to wipe databases, documents, or user data, providing flexibility in managing your database state during development or testing.
|
|
15
|
+
- **Transfer Databases, Collections, Documents, Storage Buckets, and more**: Includes additional commands (new) to transfer your data from one place to another. I also optimized the import process using this.
|
|
15
16
|
|
|
16
17
|
## Installation
|
|
17
18
|
|
|
@@ -60,11 +61,56 @@ Replace `--args` with the appropriate options:
|
|
|
60
61
|
- `--import`: Import data into your databases.
|
|
61
62
|
- `--backup`: Perform a backup of your databases.
|
|
62
63
|
- `--wipe-users`: Wipe all user data.
|
|
63
|
-
- `--write-data`: Write converted imported data to file
|
|
64
|
-
- `--sync`: Synchronize your project's config and generate schema for your database
|
|
65
|
-
- `--endpoint
|
|
66
|
-
- `--project
|
|
67
|
-
- `--key
|
|
64
|
+
- `--write-data`: Write converted imported data to file.
|
|
65
|
+
- `--sync`: Synchronize your project's config and generate schema for your database.
|
|
66
|
+
- `--endpoint <endpoint>`: Set the Appwrite endpoint.
|
|
67
|
+
- `--project <project>`: Set the Appwrite project ID.
|
|
68
|
+
- `--key <key>`: Set the Appwrite API key.
|
|
69
|
+
- `--transfer`: Transfer documents between databases.
|
|
70
|
+
- `--transfer-users`: Transfer users between local and remote.
|
|
71
|
+
- `--transferendpoint <transferEndpoint>`: Set the transfer endpoint for remote transfers.
|
|
72
|
+
- `--transferproject <transferProject>`: Set the transfer project ID for remote transfers.
|
|
73
|
+
- `--transferkey <transferKey>`: Set the transfer key for remote transfers.
|
|
74
|
+
- `--fromdb <fromDbId>`: Set the source database ID.
|
|
75
|
+
- `--targetdb <targetDbId>`: Set the destination database ID.
|
|
76
|
+
- `--fromcoll <collectionId>`: Set the source collection ID for transfer, only used for transfer.
|
|
77
|
+
- `--targetcoll <collectionId>`: Set the collection ID to import data into.
|
|
78
|
+
- `--frombucket <bucketId>`: Set the source bucket ID.
|
|
79
|
+
- `--targetbucket <bucketId>`: Set the destination bucket ID.
|
|
80
|
+
|
|
81
|
+
## Examples
|
|
82
|
+
|
|
83
|
+
### Transfer Databases
|
|
84
|
+
|
|
85
|
+
Transfer databases within the same project or from a local to a remote project. If `--fromcoll` and `--targetcoll` are omitted, it will transfer the entire databases. During the database transfer, it will create any missing collections, attributes, and indices.
|
|
86
|
+
|
|
87
|
+
```bash
|
|
88
|
+
npx appwrite-utils-cli appwrite-migrate --transfer --fromdb fromDbId --targetdb toDbId --transferendpoint https://appwrite.otherserver.com --transferproject yourProjectId --transferkey yourApiKey
|
|
89
|
+
```
|
|
90
|
+
|
|
91
|
+
### Transfer Specific Collections
|
|
92
|
+
|
|
93
|
+
Transfer specific collections from one place to another, with all of their data.
|
|
94
|
+
|
|
95
|
+
```bash
|
|
96
|
+
npx appwrite-utils-cli appwrite-migrate --transfer --fromdb fromDbId --targetdb toDbId --fromcoll sourceCollectionId --targetcoll targetCollectionId --transferendpoint https://appwrite.otherserver.com --transferproject yourProjectId --transferkey yourApiKey
|
|
97
|
+
```
|
|
98
|
+
|
|
99
|
+
### Transfer Buckets
|
|
100
|
+
|
|
101
|
+
Transfer files between buckets.
|
|
102
|
+
|
|
103
|
+
```bash
|
|
104
|
+
npx appwrite-utils-cli appwrite-migrate --transfer --frombucket sourceBucketId --targetbucket targetBucketId --transferendpoint https://appwrite.otherserver.com --transferproject yourProjectId --transferkey yourApiKey
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
### Transfer Users
|
|
108
|
+
|
|
109
|
+
Transfer users between local and remote.
|
|
110
|
+
|
|
111
|
+
```bash
|
|
112
|
+
npx appwrite-utils-cli appwrite-migrate --transfer-users --transferendpoint https://appwrite.otherserver.com --transferproject yourProjectId --transferkey yourApiKey
|
|
113
|
+
```
|
|
68
114
|
|
|
69
115
|
## If you run out of RAM
|
|
70
116
|
|
|
@@ -86,7 +132,8 @@ This setup ensures that developers have robust tools at their fingertips to mana
|
|
|
86
132
|
|
|
87
133
|
### Changelog
|
|
88
134
|
|
|
89
|
-
- 0.0.48:
|
|
135
|
+
- 0.0.48: Added `--transfer`, `--fromdb <targetDatabaseId>`, `--targetdb <targetDatabaseId>`, `--transferendpoint <transferEndpoint>`, `--transferproject <transferProjectId>`, `--transferkey <transferApiKey>`. Additionally, I've added `--fromcoll <collectionId>` and `--targetcoll <collectionId>`. These allow you to do a few things. First, you can now transfer databases in the same project, and from local to a remote project. Second, you can now specify specific collections to transfer from one place to another, with all of their data. If `--fromcoll` and `--targetcoll` are ommitted, it will transfer the databases. During the database transfer, it will create any missing collections, attributes, and indices.
|
|
136
|
+
- 0.0.47: Minor bugfixes in many releases, too small to take note of
|
|
90
137
|
- 0.0.38: Lots of optimizations done to the code, added `tryAwaitWithRetry` for `fetch failed` and others like it errors (looking at you `server error`) -- this should prevent things from going sideways.
|
|
91
138
|
- 0.0.37: Added `documentSecurity`, `enabled`, and `$id` to the `init` collection
|
|
92
139
|
- 0.0.36: Made it update collections by default, sometimes you gotta do what you gotta do
|
package/dist/main.js
CHANGED
|
@@ -8,6 +8,17 @@ program
|
|
|
8
8
|
.option("--endpoint <endpoint>", "Set the Appwrite endpoint", undefined)
|
|
9
9
|
.option("--project <project>", "Set the Appwrite project ID", undefined)
|
|
10
10
|
.option("--key <key>", "Set the Appwrite API key", undefined)
|
|
11
|
+
.option("--transfer", "Transfer documents between databases", false)
|
|
12
|
+
.option("--transfer-users", "Transfer users between local and remote", false)
|
|
13
|
+
.option("--transferendpoint <transferEndpoint>", "Set the transfer endpoint for remote transfers", undefined)
|
|
14
|
+
.option("--transferproject <transferProject>", "Set the transfer project ID for remote transfers", undefined)
|
|
15
|
+
.option("--transferkey <transferKey>", "Set the transfer key for remote transfers", undefined)
|
|
16
|
+
.option("--fromdb <fromDbId>", "Set the source database ID", undefined)
|
|
17
|
+
.option("--targetdb <targetDbId>", "Set the destination database ID", undefined)
|
|
18
|
+
.option("--fromcoll <collectionId>", "Set the source collection ID for transfer, only used for transfer", undefined)
|
|
19
|
+
.option("--targetcoll <collectionId>", "Set the collection ID to import data into", undefined)
|
|
20
|
+
.option("--frombucket <bucketId>", "Set the source bucket ID", undefined)
|
|
21
|
+
.option("--targetbucket <bucketId>", "Set the destination bucket ID", undefined)
|
|
11
22
|
.option("--backup", "Perform a backup before executing the command", false)
|
|
12
23
|
.option("--dev", "Run in development environment", false)
|
|
13
24
|
.option("--prod", "Run in production environment", false)
|
|
@@ -24,6 +35,7 @@ program.on("--help", () => {
|
|
|
24
35
|
console.log("");
|
|
25
36
|
console.log("Examples:");
|
|
26
37
|
console.log(" $ npx appwrite-utils-cli appwrite-migrate --sync --endpoint https://appwrite.example.com --project 123456 --key 7890");
|
|
38
|
+
console.log(" $ npx appwrite-utils-cli appwrite-migrate --transfer --fromdb fromDbId --targetdb toDbId --transferendpoint https://appwrite.otherserver.com --transferproject yourProjectId --transferkey yourApiKey");
|
|
27
39
|
console.log(" $ npx appwrite-utils-cli appwrite-migrate --sync --dev --backup");
|
|
28
40
|
console.log(" $ npx appwrite-utils-cli appwrite-migrate --wipe --wipe-docs --wipe-users --dev");
|
|
29
41
|
console.log(" $ npx appwrite-utils-cli appwrite-migrate --generate --import --write-data --dev");
|
|
@@ -55,6 +67,17 @@ program.action(async (options) => {
|
|
|
55
67
|
endpoint: options.endpoint,
|
|
56
68
|
project: options.project,
|
|
57
69
|
key: options.key,
|
|
70
|
+
transfer: options.transfer,
|
|
71
|
+
transferEndpoint: options.transferEndpoint,
|
|
72
|
+
transferProject: options.transferProject,
|
|
73
|
+
transferKey: options.transferKey,
|
|
74
|
+
fromDbId: options.fromdb,
|
|
75
|
+
targetDbId: options.targetdb,
|
|
76
|
+
fromCollection: options.fromcoll,
|
|
77
|
+
collection: options.targetcoll, // Add this line
|
|
78
|
+
transferUsers: options.transferUsers,
|
|
79
|
+
fromBucket: options.frombucket,
|
|
80
|
+
targetBucket: options.targetbucket,
|
|
58
81
|
};
|
|
59
82
|
console.log("Running operation...", setupOptions);
|
|
60
83
|
await controller.run(setupOptions);
|
|
@@ -14,3 +14,9 @@ export declare const createOrUpdateCollections: (database: Databases, databaseId
|
|
|
14
14
|
}[]) => Promise<void>;
|
|
15
15
|
export declare const generateMockData: (database: Databases, databaseId: string, configCollections: any[]) => Promise<void>;
|
|
16
16
|
export declare const fetchAllCollections: (dbId: string, database: Databases) => Promise<Models.Collection[]>;
|
|
17
|
+
/**
|
|
18
|
+
* Transfers all documents from one collection to another in a different database
|
|
19
|
+
* within the same Appwrite Project
|
|
20
|
+
*/
|
|
21
|
+
export declare const transferDocumentsBetweenDbsLocalToLocal: (db: Databases, fromDbId: string, toDbId: string, fromCollId: string, toCollId: string) => Promise<void>;
|
|
22
|
+
export declare const transferDocumentsBetweenDbsLocalToRemote: (localDb: Databases, endpoint: string, projectId: string, apiKey: string, fromDbId: string, toDbId: string, fromCollId: string, toCollId: string) => Promise<void>;
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { Databases, ID, Permission, Query } from "node-appwrite";
|
|
1
|
+
import { Client, Databases, ID, Permission, Query, } from "node-appwrite";
|
|
2
2
|
import { nameToIdMapping, processQueue } from "./queue.js";
|
|
3
3
|
import { createUpdateCollectionAttributes } from "./attributes.js";
|
|
4
4
|
import { createOrUpdateIndexes } from "./indexes.js";
|
|
@@ -96,7 +96,7 @@ export const wipeDatabase = async (database, databaseId) => {
|
|
|
96
96
|
collectionId: collectionId,
|
|
97
97
|
collectionName: name,
|
|
98
98
|
});
|
|
99
|
-
await database.deleteCollection(databaseId, collectionId);
|
|
99
|
+
tryAwaitWithRetry(async () => await database.deleteCollection(databaseId, collectionId)); // Try to delete the collection and ignore errors if it doesn't exist or if it's already being deleted
|
|
100
100
|
}
|
|
101
101
|
return collectionsDeleted;
|
|
102
102
|
};
|
|
@@ -214,3 +214,134 @@ export const fetchAllCollections = async (dbId, database) => {
|
|
|
214
214
|
console.log(`Fetched a total of ${collections.length} collections.`);
|
|
215
215
|
return collections;
|
|
216
216
|
};
|
|
217
|
+
/**
|
|
218
|
+
* Transfers all documents from one collection to another in a different database
|
|
219
|
+
* within the same Appwrite Project
|
|
220
|
+
*/
|
|
221
|
+
export const transferDocumentsBetweenDbsLocalToLocal = async (db, fromDbId, toDbId, fromCollId, toCollId) => {
|
|
222
|
+
let fromCollDocs = await tryAwaitWithRetry(async () => db.listDocuments(fromDbId, fromCollId, [Query.limit(50)]));
|
|
223
|
+
let totalDocumentsTransferred = 0;
|
|
224
|
+
if (fromCollDocs.documents.length === 0) {
|
|
225
|
+
console.log(`No documents found in collection ${fromCollId}`);
|
|
226
|
+
return;
|
|
227
|
+
}
|
|
228
|
+
else if (fromCollDocs.documents.length < 50) {
|
|
229
|
+
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
230
|
+
const toCreateObject = {
|
|
231
|
+
...doc,
|
|
232
|
+
};
|
|
233
|
+
delete toCreateObject.$databaseId;
|
|
234
|
+
delete toCreateObject.$collectionId;
|
|
235
|
+
delete toCreateObject.$createdAt;
|
|
236
|
+
delete toCreateObject.$updatedAt;
|
|
237
|
+
delete toCreateObject.$id;
|
|
238
|
+
delete toCreateObject.$permissions;
|
|
239
|
+
return tryAwaitWithRetry(async () => await db.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
|
|
240
|
+
});
|
|
241
|
+
await Promise.all(batchedPromises);
|
|
242
|
+
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
243
|
+
}
|
|
244
|
+
else {
|
|
245
|
+
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
246
|
+
const toCreateObject = {
|
|
247
|
+
...doc,
|
|
248
|
+
};
|
|
249
|
+
delete toCreateObject.$databaseId;
|
|
250
|
+
delete toCreateObject.$collectionId;
|
|
251
|
+
delete toCreateObject.$createdAt;
|
|
252
|
+
delete toCreateObject.$updatedAt;
|
|
253
|
+
delete toCreateObject.$id;
|
|
254
|
+
delete toCreateObject.$permissions;
|
|
255
|
+
return tryAwaitWithRetry(async () => db.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
|
|
256
|
+
});
|
|
257
|
+
await Promise.all(batchedPromises);
|
|
258
|
+
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
259
|
+
while (fromCollDocs.documents.length === 50) {
|
|
260
|
+
fromCollDocs = await db.listDocuments(fromDbId, fromCollId, [
|
|
261
|
+
Query.limit(50),
|
|
262
|
+
Query.cursorAfter(fromCollDocs.documents[fromCollDocs.documents.length - 1].$id),
|
|
263
|
+
]);
|
|
264
|
+
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
265
|
+
const toCreateObject = {
|
|
266
|
+
...doc,
|
|
267
|
+
};
|
|
268
|
+
delete toCreateObject.$databaseId;
|
|
269
|
+
delete toCreateObject.$collectionId;
|
|
270
|
+
delete toCreateObject.$createdAt;
|
|
271
|
+
delete toCreateObject.$updatedAt;
|
|
272
|
+
delete toCreateObject.$id;
|
|
273
|
+
delete toCreateObject.$permissions;
|
|
274
|
+
return tryAwaitWithRetry(async () => await db.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
|
|
275
|
+
});
|
|
276
|
+
await Promise.all(batchedPromises);
|
|
277
|
+
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
278
|
+
}
|
|
279
|
+
}
|
|
280
|
+
console.log(`Transferred ${totalDocumentsTransferred} documents from database ${fromDbId} to database ${toDbId} -- collection ${fromCollId} to collection ${toCollId}`);
|
|
281
|
+
};
|
|
282
|
+
export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint, projectId, apiKey, fromDbId, toDbId, fromCollId, toCollId) => {
|
|
283
|
+
const client = new Client()
|
|
284
|
+
.setEndpoint(endpoint)
|
|
285
|
+
.setProject(projectId)
|
|
286
|
+
.setKey(apiKey);
|
|
287
|
+
let totalDocumentsTransferred = 0;
|
|
288
|
+
const remoteDb = new Databases(client);
|
|
289
|
+
let fromCollDocs = await tryAwaitWithRetry(async () => localDb.listDocuments(fromDbId, fromCollId, [Query.limit(50)]));
|
|
290
|
+
if (fromCollDocs.documents.length === 0) {
|
|
291
|
+
console.log(`No documents found in collection ${fromCollId}`);
|
|
292
|
+
return;
|
|
293
|
+
}
|
|
294
|
+
else if (fromCollDocs.documents.length < 50) {
|
|
295
|
+
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
296
|
+
const toCreateObject = {
|
|
297
|
+
...doc,
|
|
298
|
+
};
|
|
299
|
+
delete toCreateObject.$databaseId;
|
|
300
|
+
delete toCreateObject.$collectionId;
|
|
301
|
+
delete toCreateObject.$createdAt;
|
|
302
|
+
delete toCreateObject.$updatedAt;
|
|
303
|
+
delete toCreateObject.$id;
|
|
304
|
+
delete toCreateObject.$permissions;
|
|
305
|
+
return tryAwaitWithRetry(async () => remoteDb.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
|
|
306
|
+
});
|
|
307
|
+
await Promise.all(batchedPromises);
|
|
308
|
+
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
309
|
+
}
|
|
310
|
+
else {
|
|
311
|
+
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
312
|
+
const toCreateObject = {
|
|
313
|
+
...doc,
|
|
314
|
+
};
|
|
315
|
+
delete toCreateObject.$databaseId;
|
|
316
|
+
delete toCreateObject.$collectionId;
|
|
317
|
+
delete toCreateObject.$createdAt;
|
|
318
|
+
delete toCreateObject.$updatedAt;
|
|
319
|
+
delete toCreateObject.$id;
|
|
320
|
+
delete toCreateObject.$permissions;
|
|
321
|
+
return tryAwaitWithRetry(async () => remoteDb.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
|
|
322
|
+
});
|
|
323
|
+
await Promise.all(batchedPromises);
|
|
324
|
+
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
325
|
+
while (fromCollDocs.documents.length === 50) {
|
|
326
|
+
fromCollDocs = await tryAwaitWithRetry(async () => localDb.listDocuments(fromDbId, fromCollId, [
|
|
327
|
+
Query.limit(50),
|
|
328
|
+
Query.cursorAfter(fromCollDocs.documents[fromCollDocs.documents.length - 1].$id),
|
|
329
|
+
]));
|
|
330
|
+
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
331
|
+
const toCreateObject = {
|
|
332
|
+
...doc,
|
|
333
|
+
};
|
|
334
|
+
delete toCreateObject.$databaseId;
|
|
335
|
+
delete toCreateObject.$collectionId;
|
|
336
|
+
delete toCreateObject.$createdAt;
|
|
337
|
+
delete toCreateObject.$updatedAt;
|
|
338
|
+
delete toCreateObject.$id;
|
|
339
|
+
delete toCreateObject.$permissions;
|
|
340
|
+
return tryAwaitWithRetry(async () => remoteDb.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
|
|
341
|
+
});
|
|
342
|
+
await Promise.all(batchedPromises);
|
|
343
|
+
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
console.log(`Total documents transferred from database ${fromDbId} to database ${toDbId} -- collection ${fromCollId} to collection ${toCollId}: ${totalDocumentsTransferred}`);
|
|
347
|
+
};
|
|
@@ -278,6 +278,16 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
278
278
|
originalIdField: z.ZodString;
|
|
279
279
|
targetField: z.ZodOptional<z.ZodString>;
|
|
280
280
|
}, "strip", z.ZodTypeAny, {
|
|
281
|
+
/**
|
|
282
|
+
* Prepares the data for updating documents within a collection.
|
|
283
|
+
* This method loads the raw data based on the import definition, transforms it according to the attribute mappings,
|
|
284
|
+
* finds the new ID for each item based on the primary key or update mapping, and then validates the transformed data.
|
|
285
|
+
* If the data is valid, it updates the import definition with any post-import actions and adds the item to the current collection data.
|
|
286
|
+
*
|
|
287
|
+
* @param db - The database configuration.
|
|
288
|
+
* @param collection - The collection configuration.
|
|
289
|
+
* @param importDef - The import definition containing the attribute mappings and other relevant info.
|
|
290
|
+
*/
|
|
281
291
|
originalIdField: string;
|
|
282
292
|
targetField?: string | undefined;
|
|
283
293
|
}, {
|
|
@@ -1767,7 +1777,7 @@ export declare class DataLoader {
|
|
|
1767
1777
|
* @param collection - The collection configuration.
|
|
1768
1778
|
* @param importDef - The import definition containing the attribute mappings and other relevant info.
|
|
1769
1779
|
*/
|
|
1770
|
-
prepareUpdateData(db: ConfigDatabase, collection: CollectionCreate, importDef: ImportDef): void
|
|
1780
|
+
prepareUpdateData(db: ConfigDatabase, collection: CollectionCreate, importDef: ImportDef): Promise<void>;
|
|
1771
1781
|
private updateReferencesBasedOnAttributeMappings;
|
|
1772
1782
|
private getMergedId;
|
|
1773
1783
|
/**
|
|
@@ -314,7 +314,7 @@ export class DataLoader {
|
|
|
314
314
|
}
|
|
315
315
|
}
|
|
316
316
|
console.log("Running update references");
|
|
317
|
-
|
|
317
|
+
this.dealWithMergedUsers();
|
|
318
318
|
this.updateOldReferencesForNew();
|
|
319
319
|
console.log("Done running update references");
|
|
320
320
|
}
|
|
@@ -858,50 +858,66 @@ export class DataLoader {
|
|
|
858
858
|
* @param collection - The collection configuration.
|
|
859
859
|
* @param importDef - The import definition containing the attribute mappings and other relevant info.
|
|
860
860
|
*/
|
|
861
|
-
prepareUpdateData(db, collection, importDef) {
|
|
862
|
-
// Retrieve the current collection data and old-to-new ID map from the import map
|
|
861
|
+
async prepareUpdateData(db, collection, importDef) {
|
|
863
862
|
const currentData = this.importMap.get(this.getCollectionKey(collection.name));
|
|
864
863
|
const oldIdToNewIdMap = this.oldIdToNewIdPerCollectionMap.get(this.getCollectionKey(collection.name));
|
|
865
|
-
// Log an error and return if no current data is found for the collection
|
|
866
864
|
if (!(currentData?.data && currentData?.data.length > 0) &&
|
|
867
865
|
!oldIdToNewIdMap) {
|
|
868
866
|
logger.error(`No data found for collection ${collection.name} for updateDef but it says it's supposed to have one...`);
|
|
869
867
|
return;
|
|
870
868
|
}
|
|
871
|
-
// Load the raw data based on the import definition
|
|
872
869
|
const rawData = this.loadData(importDef);
|
|
873
870
|
const operationId = this.collectionImportOperations.get(this.getCollectionKey(collection.name));
|
|
874
871
|
if (!operationId) {
|
|
875
872
|
throw new Error(`No import operation found for collection ${collection.name}`);
|
|
876
873
|
}
|
|
877
874
|
for (const item of rawData) {
|
|
878
|
-
// Transform the item data based on the attribute mappings
|
|
879
875
|
let transformedData = this.transformData(item, importDef.attributeMappings);
|
|
880
876
|
let newId;
|
|
881
877
|
let oldId;
|
|
882
|
-
|
|
883
|
-
|
|
884
|
-
if (
|
|
885
|
-
|
|
886
|
-
if (
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
|
|
891
|
-
|
|
892
|
-
|
|
878
|
+
let itemDataToUpdate;
|
|
879
|
+
// Try to find itemDataToUpdate using updateMapping
|
|
880
|
+
if (importDef.updateMapping) {
|
|
881
|
+
oldId = item[importDef.updateMapping.originalIdField];
|
|
882
|
+
if (oldId) {
|
|
883
|
+
itemDataToUpdate = currentData?.data.find(({ context, rawData, finalData }) => {
|
|
884
|
+
const targetField = importDef.updateMapping.targetField ??
|
|
885
|
+
importDef.updateMapping.originalIdField;
|
|
886
|
+
return (`${context[targetField]}` === `${oldId}` ||
|
|
887
|
+
`${rawData[targetField]}` === `${oldId}` ||
|
|
888
|
+
`${finalData[targetField]}` === `${oldId}`);
|
|
889
|
+
});
|
|
890
|
+
if (itemDataToUpdate) {
|
|
891
|
+
newId =
|
|
892
|
+
itemDataToUpdate.finalData.docId ||
|
|
893
|
+
itemDataToUpdate.context.docId;
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
// If updateMapping is not defined or did not find the item, use primaryKeyField
|
|
898
|
+
if (!itemDataToUpdate && importDef.primaryKeyField) {
|
|
899
|
+
oldId = item[importDef.primaryKeyField];
|
|
900
|
+
if (oldId) {
|
|
901
|
+
newId = oldIdToNewIdMap?.get(`${oldId}`);
|
|
902
|
+
if (!newId &&
|
|
903
|
+
this.getCollectionKey(this.config.usersCollectionName) ===
|
|
904
|
+
this.getCollectionKey(collection.name)) {
|
|
905
|
+
for (const [key, value] of this.mergedUserMap.entries()) {
|
|
906
|
+
if (value.includes(`${oldId}`)) {
|
|
907
|
+
newId = key;
|
|
908
|
+
break;
|
|
909
|
+
}
|
|
893
910
|
}
|
|
894
911
|
}
|
|
895
912
|
}
|
|
913
|
+
if (oldId && !itemDataToUpdate) {
|
|
914
|
+
itemDataToUpdate = currentData?.data.find((data) => `${data.context[importDef.primaryKeyField]}` === `${oldId}`);
|
|
915
|
+
}
|
|
896
916
|
}
|
|
897
|
-
|
|
917
|
+
if (!oldId) {
|
|
898
918
|
logger.error(`No old ID found (to update another document with) in prepareUpdateData for ${collection.name}, ${JSON.stringify(item, null, 2)}`);
|
|
899
919
|
continue;
|
|
900
920
|
}
|
|
901
|
-
const itemDataToUpdate = this.importMap
|
|
902
|
-
.get(this.getCollectionKey(collection.name))
|
|
903
|
-
?.data.find((data) => `${data.context[importDef.primaryKeyField]}` === `${oldId}`);
|
|
904
|
-
// Log an error and continue to the next item if no new ID is found
|
|
905
921
|
if (!newId && !itemDataToUpdate) {
|
|
906
922
|
logger.error(`No new id found for collection ${collection.name} for updateDef ${JSON.stringify(item, null, 2)} but it says it's supposed to have one...`);
|
|
907
923
|
continue;
|
|
@@ -920,10 +936,9 @@ export class DataLoader {
|
|
|
920
936
|
transformedData = this.mergeObjects(itemDataToUpdate.finalData, transformedData);
|
|
921
937
|
// Create a context object for the item, including the new ID and transformed data
|
|
922
938
|
let context = this.createContext(db, collection, item, newId);
|
|
923
|
-
context =
|
|
939
|
+
context = this.mergeObjects(context, transformedData);
|
|
924
940
|
// Validate the item before proceeding
|
|
925
|
-
const isValid = this.importDataActions.validateItem(item, importDef.attributeMappings, context);
|
|
926
|
-
// Log info and continue to the next item if it's invalid
|
|
941
|
+
const isValid = await this.importDataActions.validateItem(item, importDef.attributeMappings, context);
|
|
927
942
|
if (!isValid) {
|
|
928
943
|
logger.info(`Skipping item: ${JSON.stringify(item, null, 2)} because it's invalid`);
|
|
929
944
|
continue;
|
|
@@ -935,16 +950,12 @@ export class DataLoader {
|
|
|
935
950
|
...importDef,
|
|
936
951
|
attributeMappings: mappingsWithActions,
|
|
937
952
|
};
|
|
938
|
-
// Add the item with its context and final data to the current collection data
|
|
939
953
|
if (itemDataToUpdate) {
|
|
940
|
-
// Update the existing item's finalData and context in place
|
|
941
954
|
itemDataToUpdate.finalData = this.mergeObjects(itemDataToUpdate.finalData, transformedData);
|
|
942
955
|
itemDataToUpdate.context = context;
|
|
943
956
|
itemDataToUpdate.importDef = newImportDef;
|
|
944
|
-
currentData.data.push(itemDataToUpdate);
|
|
945
957
|
}
|
|
946
958
|
else {
|
|
947
|
-
// If no existing item matches, then add the new item
|
|
948
959
|
currentData.data.push({
|
|
949
960
|
rawData: item,
|
|
950
961
|
context: context,
|
|
@@ -1,2 +1,12 @@
|
|
|
1
1
|
import { Databases, type Models } from "node-appwrite";
|
|
2
2
|
export declare const fetchAllDatabases: (database: Databases) => Promise<Models.Database[]>;
|
|
3
|
+
/**
|
|
4
|
+
* Transfers all collections and documents from one local database to another local database.
|
|
5
|
+
*
|
|
6
|
+
* @param {Databases} localDb - The local database instance.
|
|
7
|
+
* @param {string} fromDbId - The ID of the source database.
|
|
8
|
+
* @param {string} targetDbId - The ID of the target database.
|
|
9
|
+
* @return {Promise<void>} A promise that resolves when the transfer is complete.
|
|
10
|
+
*/
|
|
11
|
+
export declare const transferDatabaseLocalToLocal: (localDb: Databases, fromDbId: string, targetDbId: string) => Promise<void>;
|
|
12
|
+
export declare const transferDatabaseLocalToRemote: (localDb: Databases, endpoint: string, projectId: string, apiKey: string, fromDbId: string, toDbId: string) => Promise<void>;
|
|
@@ -1,5 +1,8 @@
|
|
|
1
|
-
import { Databases, Query } from "node-appwrite";
|
|
2
|
-
import { tryAwaitWithRetry } from "../utils/helperFunctions.js";
|
|
1
|
+
import { Client, Databases, Query } from "node-appwrite";
|
|
2
|
+
import { getAppwriteClient, tryAwaitWithRetry, } from "../utils/helperFunctions.js";
|
|
3
|
+
import { transferDocumentsBetweenDbsLocalToLocal, transferDocumentsBetweenDbsLocalToRemote, } from "./collections.js";
|
|
4
|
+
import { createOrUpdateAttribute } from "./attributes.js";
|
|
5
|
+
import { parseAttribute } from "appwrite-utils";
|
|
3
6
|
export const fetchAllDatabases = async (database) => {
|
|
4
7
|
const databases = await tryAwaitWithRetry(async () => await database.list([Query.limit(25)]));
|
|
5
8
|
const allDatabases = databases.databases;
|
|
@@ -22,3 +25,112 @@ export const fetchAllDatabases = async (database) => {
|
|
|
22
25
|
}
|
|
23
26
|
return allDatabases;
|
|
24
27
|
};
|
|
28
|
+
/**
|
|
29
|
+
* Transfers all collections and documents from one local database to another local database.
|
|
30
|
+
*
|
|
31
|
+
* @param {Databases} localDb - The local database instance.
|
|
32
|
+
* @param {string} fromDbId - The ID of the source database.
|
|
33
|
+
* @param {string} targetDbId - The ID of the target database.
|
|
34
|
+
* @return {Promise<void>} A promise that resolves when the transfer is complete.
|
|
35
|
+
*/
|
|
36
|
+
export const transferDatabaseLocalToLocal = async (localDb, fromDbId, targetDbId) => {
|
|
37
|
+
let lastCollectionId;
|
|
38
|
+
let fromCollections = await tryAwaitWithRetry(async () => await localDb.listCollections(fromDbId, [Query.limit(50)]));
|
|
39
|
+
const allFromCollections = fromCollections.collections;
|
|
40
|
+
if (fromCollections.collections.length < 50) {
|
|
41
|
+
lastCollectionId = undefined;
|
|
42
|
+
}
|
|
43
|
+
else {
|
|
44
|
+
lastCollectionId =
|
|
45
|
+
fromCollections.collections[fromCollections.collections.length - 1].$id;
|
|
46
|
+
while (lastCollectionId) {
|
|
47
|
+
const collections = await localDb.listCollections(fromDbId, [
|
|
48
|
+
Query.limit(50),
|
|
49
|
+
Query.cursorAfter(lastCollectionId),
|
|
50
|
+
]);
|
|
51
|
+
allFromCollections.push(...collections.collections);
|
|
52
|
+
if (collections.collections.length < 50) {
|
|
53
|
+
break;
|
|
54
|
+
}
|
|
55
|
+
lastCollectionId =
|
|
56
|
+
collections.collections[collections.collections.length - 1].$id;
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
lastCollectionId = undefined;
|
|
60
|
+
let toCollections = await tryAwaitWithRetry(async () => await localDb.listCollections(targetDbId, [Query.limit(50)]));
|
|
61
|
+
const allToCollections = toCollections.collections;
|
|
62
|
+
if (toCollections.collections.length < 50) {
|
|
63
|
+
}
|
|
64
|
+
else {
|
|
65
|
+
lastCollectionId =
|
|
66
|
+
toCollections.collections[toCollections.collections.length - 1].$id;
|
|
67
|
+
while (lastCollectionId) {
|
|
68
|
+
const collections = await localDb.listCollections(targetDbId, [
|
|
69
|
+
Query.limit(50),
|
|
70
|
+
Query.cursorAfter(lastCollectionId),
|
|
71
|
+
]);
|
|
72
|
+
allToCollections.push(...collections.collections);
|
|
73
|
+
if (collections.collections.length < 50) {
|
|
74
|
+
lastCollectionId = undefined;
|
|
75
|
+
}
|
|
76
|
+
else {
|
|
77
|
+
lastCollectionId =
|
|
78
|
+
collections.collections[collections.collections.length - 1].$id;
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
for (const collection of allFromCollections) {
|
|
83
|
+
const toCollection = allToCollections.find((c) => c.$id === collection.$id);
|
|
84
|
+
if (toCollection) {
|
|
85
|
+
await transferDocumentsBetweenDbsLocalToLocal(localDb, fromDbId, targetDbId, collection.$id, toCollection.$id);
|
|
86
|
+
}
|
|
87
|
+
else {
|
|
88
|
+
console.log(`Collection ${collection.name} not found in destination database, creating...`);
|
|
89
|
+
const newCollection = await tryAwaitWithRetry(async () => await localDb.createCollection(targetDbId, collection.$id, collection.name, collection.$permissions, collection.documentSecurity, collection.enabled));
|
|
90
|
+
console.log(`Collection ${newCollection.name} created`);
|
|
91
|
+
for (const attribute of collection.attributes) {
|
|
92
|
+
await tryAwaitWithRetry(async () => await createOrUpdateAttribute(localDb, targetDbId, newCollection, parseAttribute(attribute)));
|
|
93
|
+
}
|
|
94
|
+
for (const index of collection.indexes) {
|
|
95
|
+
await tryAwaitWithRetry(async () => await localDb.createIndex(targetDbId, newCollection.$id, index.key, index.type, index.attributes, index.orders));
|
|
96
|
+
}
|
|
97
|
+
await transferDocumentsBetweenDbsLocalToLocal(localDb, fromDbId, targetDbId, collection.$id, newCollection.$id);
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
export const transferDatabaseLocalToRemote = async (localDb, endpoint, projectId, apiKey, fromDbId, toDbId) => {
|
|
102
|
+
const client = getAppwriteClient(endpoint, projectId, apiKey);
|
|
103
|
+
const remoteDb = new Databases(client);
|
|
104
|
+
let lastCollectionId;
|
|
105
|
+
let fromCollections = await tryAwaitWithRetry(async () => await localDb.listCollections(fromDbId, [Query.limit(50)]));
|
|
106
|
+
const allFromCollections = fromCollections.collections;
|
|
107
|
+
if (fromCollections.collections.length < 50) {
|
|
108
|
+
}
|
|
109
|
+
else {
|
|
110
|
+
lastCollectionId =
|
|
111
|
+
fromCollections.collections[fromCollections.collections.length - 1].$id;
|
|
112
|
+
while (lastCollectionId) {
|
|
113
|
+
const collections = await tryAwaitWithRetry(async () => await localDb.listCollections(fromDbId, [
|
|
114
|
+
Query.limit(50),
|
|
115
|
+
Query.cursorAfter(lastCollectionId),
|
|
116
|
+
]));
|
|
117
|
+
allFromCollections.push(...collections.collections);
|
|
118
|
+
if (collections.collections.length < 50) {
|
|
119
|
+
break;
|
|
120
|
+
}
|
|
121
|
+
lastCollectionId =
|
|
122
|
+
collections.collections[collections.collections.length - 1].$id;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
for (const collection of allFromCollections) {
|
|
126
|
+
const toCollection = await tryAwaitWithRetry(async () => await remoteDb.createCollection(toDbId, collection.$id, collection.name, collection.$permissions, collection.documentSecurity, collection.enabled));
|
|
127
|
+
console.log(`Collection ${toCollection.name} created`);
|
|
128
|
+
for (const attribute of collection.attributes) {
|
|
129
|
+
await tryAwaitWithRetry(async () => await createOrUpdateAttribute(remoteDb, toDbId, toCollection, parseAttribute(attribute)));
|
|
130
|
+
}
|
|
131
|
+
for (const index of collection.indexes) {
|
|
132
|
+
await tryAwaitWithRetry(async () => await remoteDb.createIndex(toDbId, toCollection.$id, index.key, index.type, index.attributes, index.orders));
|
|
133
|
+
}
|
|
134
|
+
await transferDocumentsBetweenDbsLocalToRemote(localDb, endpoint, projectId, apiKey, fromDbId, toDbId, collection.$id, toCollection.$id);
|
|
135
|
+
}
|
|
136
|
+
};
|
|
@@ -16,6 +16,7 @@ export declare class ImportController {
|
|
|
16
16
|
private postImportActionsQueue;
|
|
17
17
|
constructor(config: AppwriteConfig, database: Databases, storage: Storage, appwriteFolderPath: string, importDataActions: ImportDataActions, setupOptions: SetupOptions);
|
|
18
18
|
run(): Promise<void>;
|
|
19
|
+
updateOthersToFinalData(updatedDb: ConfigDatabase, targetDb: ConfigDatabase): Promise<void>;
|
|
19
20
|
importCollections(db: ConfigDatabase, dataLoader: DataLoader): Promise<void>;
|
|
20
21
|
executePostImportActions(dbId: string, dataLoader: DataLoader): Promise<void>;
|
|
21
22
|
}
|
|
@@ -7,6 +7,9 @@ import { logger } from "./logging.js";
|
|
|
7
7
|
import { updateOperation } from "./migrationHelper.js";
|
|
8
8
|
import { BatchSchema, OperationCreateSchema, OperationSchema, } from "./backup.js";
|
|
9
9
|
import { DataLoader } from "./dataLoader.js";
|
|
10
|
+
import { transferDocumentsBetweenDbsLocalToLocal } from "./collections.js";
|
|
11
|
+
import { transferDatabaseLocalToLocal } from "./databases.js";
|
|
12
|
+
import { transferStorageLocalToLocal } from "./storage.js";
|
|
10
13
|
export class ImportController {
|
|
11
14
|
config;
|
|
12
15
|
database;
|
|
@@ -37,6 +40,7 @@ export class ImportController {
|
|
|
37
40
|
this.setupOptions.runDev))
|
|
38
41
|
.map((db) => db.name);
|
|
39
42
|
let dataLoader;
|
|
43
|
+
let databaseRan;
|
|
40
44
|
for (let db of this.config.databases) {
|
|
41
45
|
if (db.name.toLowerCase().trim().replace(" ", "") === "migrations" ||
|
|
42
46
|
!databasesToRun.includes(db.name)) {
|
|
@@ -54,16 +58,30 @@ export class ImportController {
|
|
|
54
58
|
console.log(`Starting import data for database: ${db.name}`);
|
|
55
59
|
console.log(`---------------------------------`);
|
|
56
60
|
// await this.importCollections(db);
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
61
|
+
if (!databaseRan) {
|
|
62
|
+
databaseRan = db;
|
|
63
|
+
dataLoader = new DataLoader(this.appwriteFolderPath, this.importDataActions, this.database, this.config, this.setupOptions.shouldWriteFile);
|
|
64
|
+
await dataLoader.start(db.$id);
|
|
65
|
+
await this.importCollections(db, dataLoader);
|
|
66
|
+
await resolveAndUpdateRelationships(db.$id, this.database, this.config);
|
|
67
|
+
await this.executePostImportActions(db.$id, dataLoader);
|
|
68
|
+
}
|
|
69
|
+
else if (databaseRan.$id !== db.$id) {
|
|
70
|
+
await this.updateOthersToFinalData(databaseRan, db);
|
|
71
|
+
}
|
|
62
72
|
console.log(`---------------------------------`);
|
|
63
73
|
console.log(`Finished import data for database: ${db.name}`);
|
|
64
74
|
console.log(`---------------------------------`);
|
|
65
75
|
}
|
|
66
76
|
}
|
|
77
|
+
async updateOthersToFinalData(updatedDb, targetDb) {
|
|
78
|
+
await transferDatabaseLocalToLocal(this.database, updatedDb.$id, targetDb.$id);
|
|
79
|
+
await transferStorageLocalToLocal(this.storage, `${this.config.documentBucketId}_${updatedDb.name
|
|
80
|
+
.toLowerCase()
|
|
81
|
+
.replace(" ", "")}`, `${this.config.documentBucketId}_${targetDb.name
|
|
82
|
+
.toLowerCase()
|
|
83
|
+
.replace(" ", "")}`);
|
|
84
|
+
}
|
|
67
85
|
async importCollections(db, dataLoader) {
|
|
68
86
|
if (!this.config.collections) {
|
|
69
87
|
return;
|
|
@@ -6,3 +6,5 @@ export declare const initOrGetBackupStorage: (storage: Storage) => Promise<Model
|
|
|
6
6
|
export declare const initOrGetDocumentStorage: (storage: Storage, config: AppwriteConfig, dbName: string) => Promise<Models.Bucket | undefined>;
|
|
7
7
|
export declare const wipeDocumentStorage: (storage: Storage, config: AppwriteConfig, dbName: string) => Promise<void>;
|
|
8
8
|
export declare const backupDatabase: (database: Databases, databaseId: string, storage: Storage) => Promise<void>;
|
|
9
|
+
export declare const transferStorageLocalToLocal: (storage: Storage, fromBucketId: string, toBucketId: string) => Promise<void>;
|
|
10
|
+
export declare const transferStorageLocalToRemote: (localStorage: Storage, endpoint: string, projectId: string, apiKey: string, fromBucketId: string, toBucketId: string) => Promise<void>;
|