appwrite-utils-cli 0.0.48 → 0.0.51
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/migrations/collections.js +2 -2
- package/dist/migrations/dataLoader.d.ts +0 -10
- package/dist/migrations/dataLoader.js +31 -5
- package/dist/migrations/migrationHelper.js +4 -4
- package/package.json +1 -1
- package/src/migrations/collections.ts +9 -6
- package/src/migrations/dataLoader.ts +35 -4
- package/src/migrations/migrationHelper.ts +24 -22
package/README.md
CHANGED
|
@@ -132,6 +132,8 @@ This setup ensures that developers have robust tools at their fingertips to mana
|
|
|
132
132
|
|
|
133
133
|
### Changelog
|
|
134
134
|
|
|
135
|
+
- 0.0.50: Actually fixed the slight bug, it was really in the `mergeObjects`
|
|
136
|
+
- 0.0.49: Fixed a slight bug with `dataLoader` not mapping updates correctly with `updateMapping`
|
|
135
137
|
- 0.0.48: Added `--transfer`, `--fromdb <targetDatabaseId>`, `--targetdb <targetDatabaseId>`, `--transferendpoint <transferEndpoint>`, `--transferproject <transferProjectId>`, `--transferkey <transferApiKey>`. Additionally, I've added `--fromcoll <collectionId>` and `--targetcoll <collectionId>`. These allow you to do a few things. First, you can now transfer databases in the same project, and from local to a remote project. Second, you can now specify specific collections to transfer from one place to another, with all of their data. If `--fromcoll` and `--targetcoll` are ommitted, it will transfer the databases. During the database transfer, it will create any missing collections, attributes, and indices.
|
|
136
138
|
- 0.0.47: Minor bugfixes in many releases, too small to take note of
|
|
137
139
|
- 0.0.38: Lots of optimizations done to the code, added `tryAwaitWithRetry` for `fetch failed` and others like it errors (looking at you `server error`) -- this should prevent things from going sideways.
|
|
@@ -257,10 +257,10 @@ export const transferDocumentsBetweenDbsLocalToLocal = async (db, fromDbId, toDb
|
|
|
257
257
|
await Promise.all(batchedPromises);
|
|
258
258
|
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
259
259
|
while (fromCollDocs.documents.length === 50) {
|
|
260
|
-
fromCollDocs = await db.listDocuments(fromDbId, fromCollId, [
|
|
260
|
+
fromCollDocs = await tryAwaitWithRetry(async () => await db.listDocuments(fromDbId, fromCollId, [
|
|
261
261
|
Query.limit(50),
|
|
262
262
|
Query.cursorAfter(fromCollDocs.documents[fromCollDocs.documents.length - 1].$id),
|
|
263
|
-
]);
|
|
263
|
+
]));
|
|
264
264
|
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
265
265
|
const toCreateObject = {
|
|
266
266
|
...doc,
|
|
@@ -278,16 +278,6 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
|
278
278
|
originalIdField: z.ZodString;
|
|
279
279
|
targetField: z.ZodOptional<z.ZodString>;
|
|
280
280
|
}, "strip", z.ZodTypeAny, {
|
|
281
|
-
/**
|
|
282
|
-
* Prepares the data for updating documents within a collection.
|
|
283
|
-
* This method loads the raw data based on the import definition, transforms it according to the attribute mappings,
|
|
284
|
-
* finds the new ID for each item based on the primary key or update mapping, and then validates the transformed data.
|
|
285
|
-
* If the data is valid, it updates the import definition with any post-import actions and adds the item to the current collection data.
|
|
286
|
-
*
|
|
287
|
-
* @param db - The database configuration.
|
|
288
|
-
* @param collection - The collection configuration.
|
|
289
|
-
* @param importDef - The import definition containing the attribute mappings and other relevant info.
|
|
290
|
-
*/
|
|
291
281
|
originalIdField: string;
|
|
292
282
|
targetField?: string | undefined;
|
|
293
283
|
}, {
|
|
@@ -119,6 +119,26 @@ export class DataLoader {
|
|
|
119
119
|
continue;
|
|
120
120
|
}
|
|
121
121
|
}
|
|
122
|
+
// Because the objects should technically always be validated FIRST, we can assume the update keys are also defined on the source object
|
|
123
|
+
for (const [key, value] of Object.entries(update)) {
|
|
124
|
+
if (value === undefined || value === null || value === "") {
|
|
125
|
+
continue;
|
|
126
|
+
}
|
|
127
|
+
else if (!Object.hasOwn(source, key)) {
|
|
128
|
+
result[key] = value;
|
|
129
|
+
}
|
|
130
|
+
else if (typeof source[key] === "object" && typeof value === "object") {
|
|
131
|
+
result[key] = this.mergeObjects(source[key], value);
|
|
132
|
+
}
|
|
133
|
+
else if (Array.isArray(source[key]) && Array.isArray(value)) {
|
|
134
|
+
result[key] = [...new Set([...source[key], ...value])].filter((item) => item !== null && item !== undefined && item !== "");
|
|
135
|
+
}
|
|
136
|
+
else if (source[key] === undefined ||
|
|
137
|
+
source[key] === null ||
|
|
138
|
+
source[key] === "") {
|
|
139
|
+
result[key] = value;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
122
142
|
return result;
|
|
123
143
|
}
|
|
124
144
|
// Method to load data from a file specified in the import definition
|
|
@@ -878,7 +898,10 @@ export class DataLoader {
|
|
|
878
898
|
let itemDataToUpdate;
|
|
879
899
|
// Try to find itemDataToUpdate using updateMapping
|
|
880
900
|
if (importDef.updateMapping) {
|
|
881
|
-
|
|
901
|
+
console.log(importDef.updateMapping);
|
|
902
|
+
oldId =
|
|
903
|
+
item[importDef.updateMapping.originalIdField] ||
|
|
904
|
+
transformedData[importDef.updateMapping.originalIdField];
|
|
882
905
|
if (oldId) {
|
|
883
906
|
itemDataToUpdate = currentData?.data.find(({ context, rawData, finalData }) => {
|
|
884
907
|
const targetField = importDef.updateMapping.targetField ??
|
|
@@ -896,7 +919,9 @@ export class DataLoader {
|
|
|
896
919
|
}
|
|
897
920
|
// If updateMapping is not defined or did not find the item, use primaryKeyField
|
|
898
921
|
if (!itemDataToUpdate && importDef.primaryKeyField) {
|
|
899
|
-
oldId =
|
|
922
|
+
oldId =
|
|
923
|
+
item[importDef.primaryKeyField] ||
|
|
924
|
+
transformedData[importDef.primaryKeyField];
|
|
900
925
|
if (oldId) {
|
|
901
926
|
newId = oldIdToNewIdMap?.get(`${oldId}`);
|
|
902
927
|
if (!newId &&
|
|
@@ -919,13 +944,14 @@ export class DataLoader {
|
|
|
919
944
|
continue;
|
|
920
945
|
}
|
|
921
946
|
if (!newId && !itemDataToUpdate) {
|
|
922
|
-
logger.error(`No new id found for collection ${collection.name} for updateDef ${JSON.stringify(item, null, 2)} but it says it's supposed to have one...`);
|
|
947
|
+
logger.error(`No new id && no data found for collection ${collection.name} for updateDef ${JSON.stringify(item, null, 2)} but it says it's supposed to have one...`);
|
|
923
948
|
continue;
|
|
924
949
|
}
|
|
925
950
|
else if (itemDataToUpdate) {
|
|
926
|
-
newId =
|
|
951
|
+
newId =
|
|
952
|
+
itemDataToUpdate.finalData.docId || itemDataToUpdate.context.docId;
|
|
927
953
|
if (!newId) {
|
|
928
|
-
logger.error(`No new id found for collection ${collection.name} for updateDef ${JSON.stringify(item, null, 2)} but it says it's supposed to have one...`);
|
|
954
|
+
logger.error(`No new id found for collection ${collection.name} for updateDef ${JSON.stringify(item, null, 2)} but has itemDataToUpdate ${JSON.stringify(itemDataToUpdate, null, 2)} but it says it's supposed to have one...`);
|
|
929
955
|
continue;
|
|
930
956
|
}
|
|
931
957
|
}
|
|
@@ -67,18 +67,18 @@ export const getAfterImportOperations = async (database, collectionId) => {
|
|
|
67
67
|
return allOps;
|
|
68
68
|
};
|
|
69
69
|
export const findOrCreateOperation = async (database, collectionId, operationType, additionalQueries) => {
|
|
70
|
-
const operations = await database.listDocuments("migrations", "currentOperations", [
|
|
70
|
+
const operations = await tryAwaitWithRetry(async () => await database.listDocuments("migrations", "currentOperations", [
|
|
71
71
|
Query.equal("collectionId", collectionId),
|
|
72
72
|
Query.equal("operationType", operationType),
|
|
73
73
|
Query.equal("status", "pending"),
|
|
74
74
|
...(additionalQueries || []),
|
|
75
|
-
]);
|
|
75
|
+
]));
|
|
76
76
|
if (operations.documents.length > 0) {
|
|
77
77
|
return OperationSchema.parse(operations.documents[0]); // Assuming the first document is the operation we want
|
|
78
78
|
}
|
|
79
79
|
else {
|
|
80
80
|
// Create a new operation document
|
|
81
|
-
const op = await database.createDocument("migrations", "currentOperations", ID.unique(), {
|
|
81
|
+
const op = await tryAwaitWithRetry(async () => await database.createDocument("migrations", "currentOperations", ID.unique(), {
|
|
82
82
|
operationType,
|
|
83
83
|
collectionId,
|
|
84
84
|
status: "pending",
|
|
@@ -86,7 +86,7 @@ export const findOrCreateOperation = async (database, collectionId, operationTyp
|
|
|
86
86
|
progress: 0,
|
|
87
87
|
total: 0,
|
|
88
88
|
error: "",
|
|
89
|
-
});
|
|
89
|
+
}));
|
|
90
90
|
return OperationSchema.parse(op);
|
|
91
91
|
}
|
|
92
92
|
};
|
package/package.json
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "appwrite-utils-cli",
|
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
|
4
|
-
"version": "0.0.
|
|
4
|
+
"version": "0.0.51",
|
|
5
5
|
"main": "src/main.ts",
|
|
6
6
|
"type": "module",
|
|
7
7
|
"repository": {
|
|
@@ -399,12 +399,15 @@ export const transferDocumentsBetweenDbsLocalToLocal = async (
|
|
|
399
399
|
await Promise.all(batchedPromises);
|
|
400
400
|
totalDocumentsTransferred += fromCollDocs.documents.length;
|
|
401
401
|
while (fromCollDocs.documents.length === 50) {
|
|
402
|
-
fromCollDocs = await
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
402
|
+
fromCollDocs = await tryAwaitWithRetry(
|
|
403
|
+
async () =>
|
|
404
|
+
await db.listDocuments(fromDbId, fromCollId, [
|
|
405
|
+
Query.limit(50),
|
|
406
|
+
Query.cursorAfter(
|
|
407
|
+
fromCollDocs.documents[fromCollDocs.documents.length - 1].$id
|
|
408
|
+
),
|
|
409
|
+
])
|
|
410
|
+
);
|
|
408
411
|
const batchedPromises = fromCollDocs.documents.map((doc) => {
|
|
409
412
|
const toCreateObject: Partial<typeof doc> = {
|
|
410
413
|
...doc,
|
|
@@ -150,6 +150,26 @@ export class DataLoader {
|
|
|
150
150
|
continue;
|
|
151
151
|
}
|
|
152
152
|
}
|
|
153
|
+
// Because the objects should technically always be validated FIRST, we can assume the update keys are also defined on the source object
|
|
154
|
+
for (const [key, value] of Object.entries(update)) {
|
|
155
|
+
if (value === undefined || value === null || value === "") {
|
|
156
|
+
continue;
|
|
157
|
+
} else if (!Object.hasOwn(source, key)) {
|
|
158
|
+
result[key] = value;
|
|
159
|
+
} else if (typeof source[key] === "object" && typeof value === "object") {
|
|
160
|
+
result[key] = this.mergeObjects(source[key], value);
|
|
161
|
+
} else if (Array.isArray(source[key]) && Array.isArray(value)) {
|
|
162
|
+
result[key] = [...new Set([...source[key], ...value])].filter(
|
|
163
|
+
(item) => item !== null && item !== undefined && item !== ""
|
|
164
|
+
);
|
|
165
|
+
} else if (
|
|
166
|
+
source[key] === undefined ||
|
|
167
|
+
source[key] === null ||
|
|
168
|
+
source[key] === ""
|
|
169
|
+
) {
|
|
170
|
+
result[key] = value;
|
|
171
|
+
}
|
|
172
|
+
}
|
|
153
173
|
|
|
154
174
|
return result;
|
|
155
175
|
}
|
|
@@ -1207,13 +1227,17 @@ export class DataLoader {
|
|
|
1207
1227
|
|
|
1208
1228
|
// Try to find itemDataToUpdate using updateMapping
|
|
1209
1229
|
if (importDef.updateMapping) {
|
|
1210
|
-
|
|
1230
|
+
console.log(importDef.updateMapping);
|
|
1231
|
+
oldId =
|
|
1232
|
+
item[importDef.updateMapping.originalIdField] ||
|
|
1233
|
+
transformedData[importDef.updateMapping.originalIdField];
|
|
1211
1234
|
if (oldId) {
|
|
1212
1235
|
itemDataToUpdate = currentData?.data.find(
|
|
1213
1236
|
({ context, rawData, finalData }) => {
|
|
1214
1237
|
const targetField =
|
|
1215
1238
|
importDef.updateMapping!.targetField ??
|
|
1216
1239
|
importDef.updateMapping!.originalIdField;
|
|
1240
|
+
|
|
1217
1241
|
return (
|
|
1218
1242
|
`${context[targetField]}` === `${oldId}` ||
|
|
1219
1243
|
`${rawData[targetField]}` === `${oldId}` ||
|
|
@@ -1232,7 +1256,9 @@ export class DataLoader {
|
|
|
1232
1256
|
|
|
1233
1257
|
// If updateMapping is not defined or did not find the item, use primaryKeyField
|
|
1234
1258
|
if (!itemDataToUpdate && importDef.primaryKeyField) {
|
|
1235
|
-
oldId =
|
|
1259
|
+
oldId =
|
|
1260
|
+
item[importDef.primaryKeyField] ||
|
|
1261
|
+
transformedData[importDef.primaryKeyField];
|
|
1236
1262
|
if (oldId) {
|
|
1237
1263
|
newId = oldIdToNewIdMap?.get(`${oldId}`);
|
|
1238
1264
|
if (
|
|
@@ -1268,7 +1294,7 @@ export class DataLoader {
|
|
|
1268
1294
|
|
|
1269
1295
|
if (!newId && !itemDataToUpdate) {
|
|
1270
1296
|
logger.error(
|
|
1271
|
-
`No new id found for collection ${
|
|
1297
|
+
`No new id && no data found for collection ${
|
|
1272
1298
|
collection.name
|
|
1273
1299
|
} for updateDef ${JSON.stringify(
|
|
1274
1300
|
item,
|
|
@@ -1278,7 +1304,8 @@ export class DataLoader {
|
|
|
1278
1304
|
);
|
|
1279
1305
|
continue;
|
|
1280
1306
|
} else if (itemDataToUpdate) {
|
|
1281
|
-
newId =
|
|
1307
|
+
newId =
|
|
1308
|
+
itemDataToUpdate.finalData.docId || itemDataToUpdate.context.docId;
|
|
1282
1309
|
if (!newId) {
|
|
1283
1310
|
logger.error(
|
|
1284
1311
|
`No new id found for collection ${
|
|
@@ -1287,6 +1314,10 @@ export class DataLoader {
|
|
|
1287
1314
|
item,
|
|
1288
1315
|
null,
|
|
1289
1316
|
2
|
|
1317
|
+
)} but has itemDataToUpdate ${JSON.stringify(
|
|
1318
|
+
itemDataToUpdate,
|
|
1319
|
+
null,
|
|
1320
|
+
2
|
|
1290
1321
|
)} but it says it's supposed to have one...`
|
|
1291
1322
|
);
|
|
1292
1323
|
continue;
|
|
@@ -110,34 +110,36 @@ export const findOrCreateOperation = async (
|
|
|
110
110
|
operationType: string,
|
|
111
111
|
additionalQueries?: string[]
|
|
112
112
|
) => {
|
|
113
|
-
const operations = await
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
]
|
|
113
|
+
const operations = await tryAwaitWithRetry(
|
|
114
|
+
async () =>
|
|
115
|
+
await database.listDocuments("migrations", "currentOperations", [
|
|
116
|
+
Query.equal("collectionId", collectionId),
|
|
117
|
+
Query.equal("operationType", operationType),
|
|
118
|
+
Query.equal("status", "pending"),
|
|
119
|
+
...(additionalQueries || []),
|
|
120
|
+
])
|
|
122
121
|
);
|
|
123
122
|
|
|
124
123
|
if (operations.documents.length > 0) {
|
|
125
124
|
return OperationSchema.parse(operations.documents[0]); // Assuming the first document is the operation we want
|
|
126
125
|
} else {
|
|
127
126
|
// Create a new operation document
|
|
128
|
-
const op = await
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
127
|
+
const op = await tryAwaitWithRetry(
|
|
128
|
+
async () =>
|
|
129
|
+
await database.createDocument(
|
|
130
|
+
"migrations",
|
|
131
|
+
"currentOperations",
|
|
132
|
+
ID.unique(),
|
|
133
|
+
{
|
|
134
|
+
operationType,
|
|
135
|
+
collectionId,
|
|
136
|
+
status: "pending",
|
|
137
|
+
batches: [],
|
|
138
|
+
progress: 0,
|
|
139
|
+
total: 0,
|
|
140
|
+
error: "",
|
|
141
|
+
}
|
|
142
|
+
)
|
|
141
143
|
);
|
|
142
144
|
|
|
143
145
|
return OperationSchema.parse(op);
|