appwrite-utils-cli 0.0.53 → 0.0.55
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/migrations/backup.d.ts +2 -0
- package/dist/migrations/collections.js +1 -1
- package/dist/migrations/converters.d.ts +1 -0
- package/dist/migrations/converters.js +4 -1
- package/dist/migrations/dataLoader.d.ts +31 -1
- package/dist/migrations/dataLoader.js +48 -10
- package/dist/migrations/importController.js +20 -3
- package/dist/migrations/migrationHelper.d.ts +5 -0
- package/dist/migrations/setupDatabase.js +3 -0
- package/dist/utils/helperFunctions.js +1 -0
- package/package.json +2 -2
- package/src/migrations/collections.ts +1 -3
- package/src/migrations/converters.ts +3 -1
- package/src/migrations/dataLoader.ts +51 -14
- package/src/migrations/importController.ts +37 -3
- package/src/migrations/setupDatabase.ts +3 -0
- package/src/utils/helperFunctions.ts +1 -0
package/README.md
CHANGED
|
@@ -132,6 +132,8 @@ This setup ensures that developers have robust tools at their fingertips to mana
|
|
|
132
132
|
|
|
133
133
|
### Changelog
|
|
134
134
|
|
|
135
|
+
- 0.0.55: Added `documentExists` check to batch creation functionality to try to prevent duplicates
|
|
136
|
+
- 0.0.54: Various fixes in here
|
|
135
137
|
- 0.0.50: Actually fixed the slight bug, it was really in the `mergeObjects`
|
|
136
138
|
- 0.0.49: Fixed a slight bug with `dataLoader` not mapping updates correctly with `updateMapping`
|
|
137
139
|
- 0.0.48: Added `--transfer`, `--fromdb <targetDatabaseId>`, `--targetdb <targetDatabaseId>`, `--transferendpoint <transferEndpoint>`, `--transferproject <transferProjectId>`, `--transferkey <transferApiKey>`. Additionally, I've added `--fromcoll <collectionId>` and `--targetcoll <collectionId>`. These allow you to do a few things. First, you can now transfer databases in the same project, and from local to a remote project. Second, you can now specify specific collections to transfer from one place to another, with all of their data. If `--fromcoll` and `--targetcoll` are ommitted, it will transfer the databases. During the database transfer, it will create any missing collections, attributes, and indices.
|
|
@@ -295,6 +295,7 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
|
295
295
|
targetKey: string;
|
|
296
296
|
oldKey?: string | undefined;
|
|
297
297
|
oldKeys?: string[] | undefined;
|
|
298
|
+
valueToSet?: any;
|
|
298
299
|
fileData?: {
|
|
299
300
|
path: string;
|
|
300
301
|
name: string;
|
|
@@ -548,6 +549,7 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
|
548
549
|
targetKey: string;
|
|
549
550
|
oldKey?: string | undefined;
|
|
550
551
|
oldKeys?: string[] | undefined;
|
|
552
|
+
valueToSet?: any;
|
|
551
553
|
fileData?: {
|
|
552
554
|
path: string;
|
|
553
555
|
name: string;
|
|
@@ -88,7 +88,7 @@ export const fetchAndCacheCollectionByName = async (db, dbId, collectionName) =>
|
|
|
88
88
|
};
|
|
89
89
|
export const wipeDatabase = async (database, databaseId) => {
|
|
90
90
|
console.log(`Wiping database: ${databaseId}`);
|
|
91
|
-
const
|
|
91
|
+
const existingCollections = await fetchAllCollections(databaseId, database);
|
|
92
92
|
let collectionsDeleted = [];
|
|
93
93
|
for (const { $id: collectionId, name: name } of existingCollections) {
|
|
94
94
|
console.log(`Deleting collection: ${collectionId}`);
|
|
@@ -89,7 +89,10 @@ export const convertObjectByAttributeMappings = (obj, attributeMappings) => {
|
|
|
89
89
|
return current;
|
|
90
90
|
};
|
|
91
91
|
for (const mapping of attributeMappings) {
|
|
92
|
-
if (
|
|
92
|
+
if (mapping.valueToSet !== undefined) {
|
|
93
|
+
result[mapping.targetKey] = mapping.valueToSet;
|
|
94
|
+
}
|
|
95
|
+
else if (Array.isArray(mapping.oldKeys)) {
|
|
93
96
|
// Collect and flatten values from multiple oldKeys
|
|
94
97
|
const values = mapping.oldKeys
|
|
95
98
|
.map((oldKey) => resolveValue(obj, oldKey))
|
|
@@ -394,6 +394,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
394
394
|
oldKey: z.ZodOptional<z.ZodString>;
|
|
395
395
|
oldKeys: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
396
396
|
targetKey: z.ZodString;
|
|
397
|
+
valueToSet: z.ZodOptional<z.ZodAny>;
|
|
397
398
|
fileData: z.ZodOptional<z.ZodObject<{
|
|
398
399
|
name: z.ZodString;
|
|
399
400
|
path: z.ZodString;
|
|
@@ -429,6 +430,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
429
430
|
targetKey: string;
|
|
430
431
|
oldKey?: string | undefined;
|
|
431
432
|
oldKeys?: string[] | undefined;
|
|
433
|
+
valueToSet?: any;
|
|
432
434
|
fileData?: {
|
|
433
435
|
path: string;
|
|
434
436
|
name: string;
|
|
@@ -446,6 +448,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
446
448
|
targetKey: string;
|
|
447
449
|
oldKey?: string | undefined;
|
|
448
450
|
oldKeys?: string[] | undefined;
|
|
451
|
+
valueToSet?: any;
|
|
449
452
|
fileData?: {
|
|
450
453
|
path: string;
|
|
451
454
|
name: string;
|
|
@@ -467,6 +470,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
467
470
|
targetKey: string;
|
|
468
471
|
oldKey?: string | undefined;
|
|
469
472
|
oldKeys?: string[] | undefined;
|
|
473
|
+
valueToSet?: any;
|
|
470
474
|
fileData?: {
|
|
471
475
|
path: string;
|
|
472
476
|
name: string;
|
|
@@ -500,6 +504,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
500
504
|
targetKey: string;
|
|
501
505
|
oldKey?: string | undefined;
|
|
502
506
|
oldKeys?: string[] | undefined;
|
|
507
|
+
valueToSet?: any;
|
|
503
508
|
fileData?: {
|
|
504
509
|
path: string;
|
|
505
510
|
name: string;
|
|
@@ -650,6 +655,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
650
655
|
targetKey: string;
|
|
651
656
|
oldKey?: string | undefined;
|
|
652
657
|
oldKeys?: string[] | undefined;
|
|
658
|
+
valueToSet?: any;
|
|
653
659
|
fileData?: {
|
|
654
660
|
path: string;
|
|
655
661
|
name: string;
|
|
@@ -804,6 +810,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
804
810
|
targetKey: string;
|
|
805
811
|
oldKey?: string | undefined;
|
|
806
812
|
oldKeys?: string[] | undefined;
|
|
813
|
+
valueToSet?: any;
|
|
807
814
|
fileData?: {
|
|
808
815
|
path: string;
|
|
809
816
|
name: string;
|
|
@@ -877,6 +884,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
877
884
|
oldKey: z.ZodOptional<z.ZodString>;
|
|
878
885
|
oldKeys: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
879
886
|
targetKey: z.ZodString;
|
|
887
|
+
valueToSet: z.ZodOptional<z.ZodAny>;
|
|
880
888
|
fileData: z.ZodOptional<z.ZodObject<{
|
|
881
889
|
name: z.ZodString;
|
|
882
890
|
path: z.ZodString;
|
|
@@ -912,6 +920,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
912
920
|
targetKey: string;
|
|
913
921
|
oldKey?: string | undefined;
|
|
914
922
|
oldKeys?: string[] | undefined;
|
|
923
|
+
valueToSet?: any;
|
|
915
924
|
fileData?: {
|
|
916
925
|
path: string;
|
|
917
926
|
name: string;
|
|
@@ -929,6 +938,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
929
938
|
targetKey: string;
|
|
930
939
|
oldKey?: string | undefined;
|
|
931
940
|
oldKeys?: string[] | undefined;
|
|
941
|
+
valueToSet?: any;
|
|
932
942
|
fileData?: {
|
|
933
943
|
path: string;
|
|
934
944
|
name: string;
|
|
@@ -950,6 +960,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
950
960
|
targetKey: string;
|
|
951
961
|
oldKey?: string | undefined;
|
|
952
962
|
oldKeys?: string[] | undefined;
|
|
963
|
+
valueToSet?: any;
|
|
953
964
|
fileData?: {
|
|
954
965
|
path: string;
|
|
955
966
|
name: string;
|
|
@@ -983,6 +994,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
983
994
|
targetKey: string;
|
|
984
995
|
oldKey?: string | undefined;
|
|
985
996
|
oldKeys?: string[] | undefined;
|
|
997
|
+
valueToSet?: any;
|
|
986
998
|
fileData?: {
|
|
987
999
|
path: string;
|
|
988
1000
|
name: string;
|
|
@@ -1023,6 +1035,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
1023
1035
|
targetKey: string;
|
|
1024
1036
|
oldKey?: string | undefined;
|
|
1025
1037
|
oldKeys?: string[] | undefined;
|
|
1038
|
+
valueToSet?: any;
|
|
1026
1039
|
fileData?: {
|
|
1027
1040
|
path: string;
|
|
1028
1041
|
name: string;
|
|
@@ -1061,6 +1074,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
1061
1074
|
targetKey: string;
|
|
1062
1075
|
oldKey?: string | undefined;
|
|
1063
1076
|
oldKeys?: string[] | undefined;
|
|
1077
|
+
valueToSet?: any;
|
|
1064
1078
|
fileData?: {
|
|
1065
1079
|
path: string;
|
|
1066
1080
|
name: string;
|
|
@@ -1103,6 +1117,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
1103
1117
|
targetKey: string;
|
|
1104
1118
|
oldKey?: string | undefined;
|
|
1105
1119
|
oldKeys?: string[] | undefined;
|
|
1120
|
+
valueToSet?: any;
|
|
1106
1121
|
fileData?: {
|
|
1107
1122
|
path: string;
|
|
1108
1123
|
name: string;
|
|
@@ -1252,6 +1267,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
1252
1267
|
targetKey: string;
|
|
1253
1268
|
oldKey?: string | undefined;
|
|
1254
1269
|
oldKeys?: string[] | undefined;
|
|
1270
|
+
valueToSet?: any;
|
|
1255
1271
|
fileData?: {
|
|
1256
1272
|
path: string;
|
|
1257
1273
|
name: string;
|
|
@@ -1296,6 +1312,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
1296
1312
|
targetKey: string;
|
|
1297
1313
|
oldKey?: string | undefined;
|
|
1298
1314
|
oldKeys?: string[] | undefined;
|
|
1315
|
+
valueToSet?: any;
|
|
1299
1316
|
fileData?: {
|
|
1300
1317
|
path: string;
|
|
1301
1318
|
name: string;
|
|
@@ -1448,6 +1465,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
1448
1465
|
targetKey: string;
|
|
1449
1466
|
oldKey?: string | undefined;
|
|
1450
1467
|
oldKeys?: string[] | undefined;
|
|
1468
|
+
valueToSet?: any;
|
|
1451
1469
|
fileData?: {
|
|
1452
1470
|
path: string;
|
|
1453
1471
|
name: string;
|
|
@@ -1499,6 +1517,7 @@ export declare class DataLoader {
|
|
|
1499
1517
|
targetKey: string;
|
|
1500
1518
|
oldKey?: string | undefined;
|
|
1501
1519
|
oldKeys?: string[] | undefined;
|
|
1520
|
+
valueToSet?: any;
|
|
1502
1521
|
fileData?: {
|
|
1503
1522
|
path: string;
|
|
1504
1523
|
name: string;
|
|
@@ -1648,6 +1667,7 @@ export declare class DataLoader {
|
|
|
1648
1667
|
targetKey: string;
|
|
1649
1668
|
oldKey?: string | undefined;
|
|
1650
1669
|
oldKeys?: string[] | undefined;
|
|
1670
|
+
valueToSet?: any;
|
|
1651
1671
|
fileData?: {
|
|
1652
1672
|
path: string;
|
|
1653
1673
|
name: string;
|
|
@@ -1719,7 +1739,15 @@ export declare class DataLoader {
|
|
|
1719
1739
|
setupMaps(dbId: string): Promise<void>;
|
|
1720
1740
|
getAllUsers(): Promise<import("node-appwrite").Models.User<import("node-appwrite").Models.Preferences>[]>;
|
|
1721
1741
|
start(dbId: string): Promise<void>;
|
|
1722
|
-
|
|
1742
|
+
/**
|
|
1743
|
+
* Deals with merged users by iterating through all collections in the configuration.
|
|
1744
|
+
* We have merged users if there are duplicate emails or phones in the import data.
|
|
1745
|
+
* This function will iterate through all collections that are the same name as the
|
|
1746
|
+
* users collection and pull out their primaryKeyField's. It will then loop through
|
|
1747
|
+
* each collection and find any documents that have a
|
|
1748
|
+
*
|
|
1749
|
+
* @return {void} This function does not return anything.
|
|
1750
|
+
*/
|
|
1723
1751
|
updateOldReferencesForNew(): void;
|
|
1724
1752
|
private writeMapsToJsonFile;
|
|
1725
1753
|
/**
|
|
@@ -1784,6 +1812,7 @@ export declare class DataLoader {
|
|
|
1784
1812
|
targetKey: string;
|
|
1785
1813
|
oldKey?: string | undefined;
|
|
1786
1814
|
oldKeys?: string[] | undefined;
|
|
1815
|
+
valueToSet?: any;
|
|
1787
1816
|
fileData?: {
|
|
1788
1817
|
path: string;
|
|
1789
1818
|
name: string;
|
|
@@ -1805,6 +1834,7 @@ export declare class DataLoader {
|
|
|
1805
1834
|
targetKey: string;
|
|
1806
1835
|
oldKey?: string | undefined;
|
|
1807
1836
|
oldKeys?: string[] | undefined;
|
|
1837
|
+
valueToSet?: any;
|
|
1808
1838
|
fileData?: {
|
|
1809
1839
|
path: string;
|
|
1810
1840
|
name: string;
|
|
@@ -348,16 +348,54 @@ export class DataLoader {
|
|
|
348
348
|
this.writeMapsToJsonFile();
|
|
349
349
|
}
|
|
350
350
|
}
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
351
|
+
/**
|
|
352
|
+
* Deals with merged users by iterating through all collections in the configuration.
|
|
353
|
+
* We have merged users if there are duplicate emails or phones in the import data.
|
|
354
|
+
* This function will iterate through all collections that are the same name as the
|
|
355
|
+
* users collection and pull out their primaryKeyField's. It will then loop through
|
|
356
|
+
* each collection and find any documents that have a
|
|
357
|
+
*
|
|
358
|
+
* @return {void} This function does not return anything.
|
|
359
|
+
*/
|
|
360
|
+
// dealWithMergedUsers() {
|
|
361
|
+
// const usersCollectionKey = this.getCollectionKey(
|
|
362
|
+
// this.config.usersCollectionName
|
|
363
|
+
// );
|
|
364
|
+
// const usersCollectionData = this.importMap.get(usersCollectionKey);
|
|
365
|
+
// if (!this.config.collections) {
|
|
366
|
+
// console.log("No collections found in configuration.");
|
|
367
|
+
// return;
|
|
368
|
+
// }
|
|
369
|
+
// let needsUpdate = false;
|
|
370
|
+
// let numUpdates = 0;
|
|
371
|
+
// for (const collectionConfig of this.config.collections) {
|
|
372
|
+
// const collectionKey = this.getCollectionKey(collectionConfig.name);
|
|
373
|
+
// const collectionData = this.importMap.get(collectionKey);
|
|
374
|
+
// const collectionImportDefs = collectionConfig.importDefs;
|
|
375
|
+
// const collectionIdMappings = collectionImportDefs
|
|
376
|
+
// .map((importDef) => importDef.idMappings)
|
|
377
|
+
// .flat()
|
|
378
|
+
// .filter((idMapping) => idMapping !== undefined && idMapping !== null);
|
|
379
|
+
// if (!collectionData || !collectionData.data) continue;
|
|
380
|
+
// for (const dataItem of collectionData.data) {
|
|
381
|
+
// for (const idMapping of collectionIdMappings) {
|
|
382
|
+
// // We know it's the users collection here
|
|
383
|
+
// if (this.getCollectionKey(idMapping.targetCollection) === usersCollectionKey) {
|
|
384
|
+
// const targetFieldKey = idMapping.targetFieldToMatch || idMapping.targetField;
|
|
385
|
+
// if (targetFieldKey === )
|
|
386
|
+
// const targetValue = dataItem.finalData[targetFieldKey];
|
|
387
|
+
// const targetCollectionData = this.importMap.get(this.getCollectionKey(idMapping.targetCollection));
|
|
388
|
+
// if (!targetCollectionData || !targetCollectionData.data) continue;
|
|
389
|
+
// const foundData = targetCollectionData.data.filter(({ context }) => {
|
|
390
|
+
// const targetValue = context[targetFieldKey];
|
|
391
|
+
// const isMatch = `${targetValue}` === `${valueToMatch}`;
|
|
392
|
+
// return isMatch && targetValue !== undefined && targetValue !== null;
|
|
393
|
+
// });
|
|
394
|
+
// }
|
|
395
|
+
// }
|
|
396
|
+
// }
|
|
397
|
+
// }
|
|
398
|
+
// }
|
|
361
399
|
updateOldReferencesForNew() {
|
|
362
400
|
if (!this.config.collections) {
|
|
363
401
|
return;
|
|
@@ -7,7 +7,7 @@ import { logger } from "./logging.js";
|
|
|
7
7
|
import { updateOperation } from "./migrationHelper.js";
|
|
8
8
|
import { BatchSchema, OperationCreateSchema, OperationSchema, } from "./backup.js";
|
|
9
9
|
import { DataLoader } from "./dataLoader.js";
|
|
10
|
-
import { transferDocumentsBetweenDbsLocalToLocal } from "./collections.js";
|
|
10
|
+
import { documentExists, transferDocumentsBetweenDbsLocalToLocal, } from "./collections.js";
|
|
11
11
|
import { transferDatabaseLocalToLocal } from "./databases.js";
|
|
12
12
|
import { transferStorageLocalToLocal } from "./storage.js";
|
|
13
13
|
export class ImportController {
|
|
@@ -166,7 +166,24 @@ export class ImportController {
|
|
|
166
166
|
for (let i = 0; i < dataSplit.length; i++) {
|
|
167
167
|
const batches = dataSplit[i];
|
|
168
168
|
console.log(`Processing batch ${i + 1} of ${dataSplit.length}`);
|
|
169
|
-
const
|
|
169
|
+
const documentExistsPromises = batches.map(async (item) => {
|
|
170
|
+
try {
|
|
171
|
+
const id = item.finalData.docId ||
|
|
172
|
+
item.finalData.userId ||
|
|
173
|
+
item.context.docId ||
|
|
174
|
+
item.context.userId;
|
|
175
|
+
if (!item.finalData) {
|
|
176
|
+
return Promise.resolve(null);
|
|
177
|
+
}
|
|
178
|
+
return tryAwaitWithRetry(async () => await documentExists(this.database, db.$id, collection.$id, item.finalData));
|
|
179
|
+
}
|
|
180
|
+
catch (error) {
|
|
181
|
+
console.error(error);
|
|
182
|
+
return Promise.resolve(null);
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
const documentExistsResults = await Promise.all(documentExistsPromises);
|
|
186
|
+
const batchPromises = batches.map((item, index) => {
|
|
170
187
|
try {
|
|
171
188
|
const id = item.finalData.docId ||
|
|
172
189
|
item.finalData.userId ||
|
|
@@ -178,7 +195,7 @@ export class ImportController {
|
|
|
178
195
|
if (item.finalData.hasOwnProperty("docId")) {
|
|
179
196
|
delete item.finalData.docId;
|
|
180
197
|
}
|
|
181
|
-
if (!item.finalData) {
|
|
198
|
+
if (!item.finalData || documentExistsResults[index]) {
|
|
182
199
|
return Promise.resolve();
|
|
183
200
|
}
|
|
184
201
|
return tryAwaitWithRetry(async () => await this.database.createDocument(db.$id, collection.$id, id, item.finalData));
|
|
@@ -19,6 +19,7 @@ export declare const ContextObject: z.ZodObject<{
|
|
|
19
19
|
oldKey: z.ZodOptional<z.ZodString>;
|
|
20
20
|
oldKeys: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
|
21
21
|
targetKey: z.ZodString;
|
|
22
|
+
valueToSet: z.ZodOptional<z.ZodAny>;
|
|
22
23
|
fileData: z.ZodOptional<z.ZodObject<{
|
|
23
24
|
name: z.ZodString;
|
|
24
25
|
path: z.ZodString;
|
|
@@ -54,6 +55,7 @@ export declare const ContextObject: z.ZodObject<{
|
|
|
54
55
|
targetKey: string;
|
|
55
56
|
oldKey?: string | undefined;
|
|
56
57
|
oldKeys?: string[] | undefined;
|
|
58
|
+
valueToSet?: any;
|
|
57
59
|
fileData?: {
|
|
58
60
|
path: string;
|
|
59
61
|
name: string;
|
|
@@ -71,6 +73,7 @@ export declare const ContextObject: z.ZodObject<{
|
|
|
71
73
|
targetKey: string;
|
|
72
74
|
oldKey?: string | undefined;
|
|
73
75
|
oldKeys?: string[] | undefined;
|
|
76
|
+
valueToSet?: any;
|
|
74
77
|
fileData?: {
|
|
75
78
|
path: string;
|
|
76
79
|
name: string;
|
|
@@ -93,6 +96,7 @@ export declare const ContextObject: z.ZodObject<{
|
|
|
93
96
|
targetKey: string;
|
|
94
97
|
oldKey?: string | undefined;
|
|
95
98
|
oldKeys?: string[] | undefined;
|
|
99
|
+
valueToSet?: any;
|
|
96
100
|
fileData?: {
|
|
97
101
|
path: string;
|
|
98
102
|
name: string;
|
|
@@ -116,6 +120,7 @@ export declare const ContextObject: z.ZodObject<{
|
|
|
116
120
|
targetKey: string;
|
|
117
121
|
oldKey?: string | undefined;
|
|
118
122
|
oldKeys?: string[] | undefined;
|
|
123
|
+
valueToSet?: any;
|
|
119
124
|
fileData?: {
|
|
120
125
|
path: string;
|
|
121
126
|
name: string;
|
|
@@ -136,6 +136,9 @@ export const startSetup = async (database, storage, config, setupOptions, appwri
|
|
|
136
136
|
await backupDatabase(database, db.$id, storage);
|
|
137
137
|
}
|
|
138
138
|
deletedCollections = await wipeDatabase(database, db.$id);
|
|
139
|
+
// Add a delay to ensure the deletion process completes
|
|
140
|
+
await new Promise((resolve) => setTimeout(resolve, 5000));
|
|
141
|
+
console.log(`Waited a few seconds to let the database wipe complete...`);
|
|
139
142
|
}
|
|
140
143
|
if (processDatabase) {
|
|
141
144
|
await createOrUpdateCollections(database, db.$id, config, deletedCollections);
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "appwrite-utils-cli",
|
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
|
4
|
-
"version": "0.0.
|
|
4
|
+
"version": "0.0.55",
|
|
5
5
|
"main": "src/main.ts",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"repository": {
|
|
@@ -33,7 +33,7 @@
|
|
|
33
33
|
},
|
|
34
34
|
"dependencies": {
|
|
35
35
|
"@types/inquirer": "^9.0.7",
|
|
36
|
-
"appwrite-utils": "^0.2.
|
|
36
|
+
"appwrite-utils": "^0.2.8",
|
|
37
37
|
"commander": "^12.0.0",
|
|
38
38
|
"inquirer": "^9.2.20",
|
|
39
39
|
"js-yaml": "^4.1.0",
|
|
@@ -129,9 +129,7 @@ export const wipeDatabase = async (
|
|
|
129
129
|
databaseId: string
|
|
130
130
|
): Promise<{ collectionId: string; collectionName: string }[]> => {
|
|
131
131
|
console.log(`Wiping database: ${databaseId}`);
|
|
132
|
-
const
|
|
133
|
-
async () => await database.listCollections(databaseId)
|
|
134
|
-
);
|
|
132
|
+
const existingCollections = await fetchAllCollections(databaseId, database);
|
|
135
133
|
let collectionsDeleted: { collectionId: string; collectionName: string }[] =
|
|
136
134
|
[];
|
|
137
135
|
for (const { $id: collectionId, name: name } of existingCollections) {
|
|
@@ -103,7 +103,9 @@ export const convertObjectByAttributeMappings = (
|
|
|
103
103
|
};
|
|
104
104
|
|
|
105
105
|
for (const mapping of attributeMappings) {
|
|
106
|
-
if (
|
|
106
|
+
if (mapping.valueToSet !== undefined) {
|
|
107
|
+
result[mapping.targetKey] = mapping.valueToSet;
|
|
108
|
+
} else if (Array.isArray(mapping.oldKeys)) {
|
|
107
109
|
// Collect and flatten values from multiple oldKeys
|
|
108
110
|
const values = mapping.oldKeys
|
|
109
111
|
.map((oldKey) => resolveValue(obj, oldKey))
|
|
@@ -428,20 +428,57 @@ export class DataLoader {
|
|
|
428
428
|
}
|
|
429
429
|
}
|
|
430
430
|
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
431
|
+
/**
|
|
432
|
+
* Deals with merged users by iterating through all collections in the configuration.
|
|
433
|
+
* We have merged users if there are duplicate emails or phones in the import data.
|
|
434
|
+
* This function will iterate through all collections that are the same name as the
|
|
435
|
+
* users collection and pull out their primaryKeyField's. It will then loop through
|
|
436
|
+
* each collection and find any documents that have a
|
|
437
|
+
*
|
|
438
|
+
* @return {void} This function does not return anything.
|
|
439
|
+
*/
|
|
440
|
+
// dealWithMergedUsers() {
|
|
441
|
+
// const usersCollectionKey = this.getCollectionKey(
|
|
442
|
+
// this.config.usersCollectionName
|
|
443
|
+
// );
|
|
444
|
+
// const usersCollectionData = this.importMap.get(usersCollectionKey);
|
|
445
|
+
|
|
446
|
+
// if (!this.config.collections) {
|
|
447
|
+
// console.log("No collections found in configuration.");
|
|
448
|
+
// return;
|
|
449
|
+
// }
|
|
450
|
+
|
|
451
|
+
// let needsUpdate = false;
|
|
452
|
+
// let numUpdates = 0;
|
|
453
|
+
|
|
454
|
+
// for (const collectionConfig of this.config.collections) {
|
|
455
|
+
// const collectionKey = this.getCollectionKey(collectionConfig.name);
|
|
456
|
+
// const collectionData = this.importMap.get(collectionKey);
|
|
457
|
+
// const collectionImportDefs = collectionConfig.importDefs;
|
|
458
|
+
// const collectionIdMappings = collectionImportDefs
|
|
459
|
+
// .map((importDef) => importDef.idMappings)
|
|
460
|
+
// .flat()
|
|
461
|
+
// .filter((idMapping) => idMapping !== undefined && idMapping !== null);
|
|
462
|
+
// if (!collectionData || !collectionData.data) continue;
|
|
463
|
+
// for (const dataItem of collectionData.data) {
|
|
464
|
+
// for (const idMapping of collectionIdMappings) {
|
|
465
|
+
// // We know it's the users collection here
|
|
466
|
+
// if (this.getCollectionKey(idMapping.targetCollection) === usersCollectionKey) {
|
|
467
|
+
// const targetFieldKey = idMapping.targetFieldToMatch || idMapping.targetField;
|
|
468
|
+
// if (targetFieldKey === )
|
|
469
|
+
// const targetValue = dataItem.finalData[targetFieldKey];
|
|
470
|
+
// const targetCollectionData = this.importMap.get(this.getCollectionKey(idMapping.targetCollection));
|
|
471
|
+
// if (!targetCollectionData || !targetCollectionData.data) continue;
|
|
472
|
+
// const foundData = targetCollectionData.data.filter(({ context }) => {
|
|
473
|
+
// const targetValue = context[targetFieldKey];
|
|
474
|
+
// const isMatch = `${targetValue}` === `${valueToMatch}`;
|
|
475
|
+
// return isMatch && targetValue !== undefined && targetValue !== null;
|
|
476
|
+
// });
|
|
477
|
+
// }
|
|
478
|
+
// }
|
|
479
|
+
// }
|
|
480
|
+
// }
|
|
481
|
+
// }
|
|
445
482
|
|
|
446
483
|
updateOldReferencesForNew() {
|
|
447
484
|
if (!this.config.collections) {
|
|
@@ -26,7 +26,10 @@ import {
|
|
|
26
26
|
OperationSchema,
|
|
27
27
|
} from "./backup.js";
|
|
28
28
|
import { DataLoader, type CollectionImportData } from "./dataLoader.js";
|
|
29
|
-
import {
|
|
29
|
+
import {
|
|
30
|
+
documentExists,
|
|
31
|
+
transferDocumentsBetweenDbsLocalToLocal,
|
|
32
|
+
} from "./collections.js";
|
|
30
33
|
import { transferDatabaseLocalToLocal } from "./databases.js";
|
|
31
34
|
import { transferStorageLocalToLocal } from "./storage.js";
|
|
32
35
|
|
|
@@ -244,20 +247,50 @@ export class ImportController {
|
|
|
244
247
|
for (let i = 0; i < dataSplit.length; i++) {
|
|
245
248
|
const batches = dataSplit[i];
|
|
246
249
|
console.log(`Processing batch ${i + 1} of ${dataSplit.length}`);
|
|
247
|
-
|
|
250
|
+
|
|
251
|
+
const documentExistsPromises = batches.map(async (item) => {
|
|
248
252
|
try {
|
|
249
253
|
const id =
|
|
250
254
|
item.finalData.docId ||
|
|
251
255
|
item.finalData.userId ||
|
|
252
256
|
item.context.docId ||
|
|
253
257
|
item.context.userId;
|
|
258
|
+
|
|
259
|
+
if (!item.finalData) {
|
|
260
|
+
return Promise.resolve(null);
|
|
261
|
+
}
|
|
262
|
+
return tryAwaitWithRetry(
|
|
263
|
+
async () =>
|
|
264
|
+
await documentExists(
|
|
265
|
+
this.database,
|
|
266
|
+
db.$id,
|
|
267
|
+
collection.$id,
|
|
268
|
+
item.finalData
|
|
269
|
+
)
|
|
270
|
+
);
|
|
271
|
+
} catch (error) {
|
|
272
|
+
console.error(error);
|
|
273
|
+
return Promise.resolve(null);
|
|
274
|
+
}
|
|
275
|
+
});
|
|
276
|
+
|
|
277
|
+
const documentExistsResults = await Promise.all(documentExistsPromises);
|
|
278
|
+
|
|
279
|
+
const batchPromises = batches.map((item, index) => {
|
|
280
|
+
try {
|
|
281
|
+
const id =
|
|
282
|
+
item.finalData.docId ||
|
|
283
|
+
item.finalData.userId ||
|
|
284
|
+
item.context.docId ||
|
|
285
|
+
item.context.userId;
|
|
286
|
+
|
|
254
287
|
if (item.finalData.hasOwnProperty("userId")) {
|
|
255
288
|
delete item.finalData.userId;
|
|
256
289
|
}
|
|
257
290
|
if (item.finalData.hasOwnProperty("docId")) {
|
|
258
291
|
delete item.finalData.docId;
|
|
259
292
|
}
|
|
260
|
-
if (!item.finalData) {
|
|
293
|
+
if (!item.finalData || documentExistsResults[index]) {
|
|
261
294
|
return Promise.resolve();
|
|
262
295
|
}
|
|
263
296
|
return tryAwaitWithRetry(
|
|
@@ -274,6 +307,7 @@ export class ImportController {
|
|
|
274
307
|
return Promise.resolve();
|
|
275
308
|
}
|
|
276
309
|
});
|
|
310
|
+
|
|
277
311
|
// Wait for all promises in the current batch to resolve
|
|
278
312
|
await Promise.all(batchPromises);
|
|
279
313
|
console.log(`Completed batch ${i + 1} of ${dataSplit.length}`);
|
|
@@ -216,6 +216,9 @@ export const startSetup = async (
|
|
|
216
216
|
await backupDatabase(database, db.$id, storage);
|
|
217
217
|
}
|
|
218
218
|
deletedCollections = await wipeDatabase(database, db.$id);
|
|
219
|
+
// Add a delay to ensure the deletion process completes
|
|
220
|
+
await new Promise((resolve) => setTimeout(resolve, 5000));
|
|
221
|
+
console.log(`Waited a few seconds to let the database wipe complete...`);
|
|
219
222
|
}
|
|
220
223
|
|
|
221
224
|
if (processDatabase) {
|