appwrite-utils-cli 0.0.49 → 0.0.52

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -132,6 +132,7 @@ This setup ensures that developers have robust tools at their fingertips to mana
132
132
 
133
133
  ### Changelog
134
134
 
135
+ - 0.0.50: Actually fixed the slight bug, it was really in the `mergeObjects`
135
136
  - 0.0.49: Fixed a slight bug with `dataLoader` not mapping updates correctly with `updateMapping`
136
137
  - 0.0.48: Added `--transfer`, `--fromdb <targetDatabaseId>`, `--targetdb <targetDatabaseId>`, `--transferendpoint <transferEndpoint>`, `--transferproject <transferProjectId>`, `--transferkey <transferApiKey>`. Additionally, I've added `--fromcoll <collectionId>` and `--targetcoll <collectionId>`. These allow you to do a few things. First, you can now transfer databases in the same project, and from local to a remote project. Second, you can now specify specific collections to transfer from one place to another, with all of their data. If `--fromcoll` and `--targetcoll` are ommitted, it will transfer the databases. During the database transfer, it will create any missing collections, attributes, and indices.
137
138
  - 0.0.47: Minor bugfixes in many releases, too small to take note of
@@ -257,10 +257,10 @@ export const transferDocumentsBetweenDbsLocalToLocal = async (db, fromDbId, toDb
257
257
  await Promise.all(batchedPromises);
258
258
  totalDocumentsTransferred += fromCollDocs.documents.length;
259
259
  while (fromCollDocs.documents.length === 50) {
260
- fromCollDocs = await db.listDocuments(fromDbId, fromCollId, [
260
+ fromCollDocs = await tryAwaitWithRetry(async () => await db.listDocuments(fromDbId, fromCollId, [
261
261
  Query.limit(50),
262
262
  Query.cursorAfter(fromCollDocs.documents[fromCollDocs.documents.length - 1].$id),
263
- ]);
263
+ ]));
264
264
  const batchedPromises = fromCollDocs.documents.map((doc) => {
265
265
  const toCreateObject = {
266
266
  ...doc,
@@ -192,7 +192,14 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
192
192
  type: z.ZodLiteral<"ip">;
193
193
  error: z.ZodOptional<z.ZodDefault<z.ZodString>>;
194
194
  required: z.ZodOptional<z.ZodDefault<z.ZodBoolean>>;
195
- array: z.ZodOptional<z.ZodDefault<z.ZodBoolean>>;
195
+ array: z.ZodOptional<z.ZodDefault<z.ZodBoolean>>; /**
196
+ * Prepares the data for creating documents in a collection.
197
+ * This involves loading the data, transforming it, and handling ID mappings.
198
+ *
199
+ * @param db - The database configuration.
200
+ * @param collection - The collection configuration.
201
+ * @param importDef - The import definition containing the attribute mappings and other relevant info.
202
+ */
196
203
  xdefault: z.ZodOptional<z.ZodNullable<z.ZodString>>;
197
204
  description: z.ZodOptional<z.ZodNullable<z.ZodUnion<[z.ZodString, z.ZodRecord<z.ZodString, z.ZodString>]>>>;
198
205
  }, "strip", z.ZodTypeAny, {
@@ -278,16 +285,6 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
278
285
  originalIdField: z.ZodString;
279
286
  targetField: z.ZodOptional<z.ZodString>;
280
287
  }, "strip", z.ZodTypeAny, {
281
- /**
282
- * Prepares the data for updating documents within a collection.
283
- * This method loads the raw data based on the import definition, transforms it according to the attribute mappings,
284
- * finds the new ID for each item based on the primary key or update mapping, and then validates the transformed data.
285
- * If the data is valid, it updates the import definition with any post-import actions and adds the item to the current collection data.
286
- *
287
- * @param db - The database configuration.
288
- * @param collection - The collection configuration.
289
- * @param importDef - The import definition containing the attribute mappings and other relevant info.
290
- */
291
288
  originalIdField: string;
292
289
  targetField?: string | undefined;
293
290
  }, {
@@ -119,6 +119,26 @@ export class DataLoader {
119
119
  continue;
120
120
  }
121
121
  }
122
+ // Because the objects should technically always be validated FIRST, we can assume the update keys are also defined on the source object
123
+ for (const [key, value] of Object.entries(update)) {
124
+ if (value === undefined || value === null || value === "") {
125
+ continue;
126
+ }
127
+ else if (!Object.hasOwn(source, key)) {
128
+ result[key] = value;
129
+ }
130
+ else if (typeof source[key] === "object" && typeof value === "object") {
131
+ result[key] = this.mergeObjects(source[key], value);
132
+ }
133
+ else if (Array.isArray(source[key]) && Array.isArray(value)) {
134
+ result[key] = [...new Set([...source[key], ...value])].filter((item) => item !== null && item !== undefined && item !== "");
135
+ }
136
+ else if (source[key] === undefined ||
137
+ source[key] === null ||
138
+ source[key] === "") {
139
+ result[key] = value;
140
+ }
141
+ }
122
142
  return result;
123
143
  }
124
144
  // Method to load data from a file specified in the import definition
@@ -314,7 +334,7 @@ export class DataLoader {
314
334
  }
315
335
  }
316
336
  console.log("Running update references");
317
- this.dealWithMergedUsers();
337
+ // this.dealWithMergedUsers();
318
338
  this.updateOldReferencesForNew();
319
339
  console.log("Done running update references");
320
340
  }
@@ -67,18 +67,18 @@ export const getAfterImportOperations = async (database, collectionId) => {
67
67
  return allOps;
68
68
  };
69
69
  export const findOrCreateOperation = async (database, collectionId, operationType, additionalQueries) => {
70
- const operations = await database.listDocuments("migrations", "currentOperations", [
70
+ const operations = await tryAwaitWithRetry(async () => await database.listDocuments("migrations", "currentOperations", [
71
71
  Query.equal("collectionId", collectionId),
72
72
  Query.equal("operationType", operationType),
73
73
  Query.equal("status", "pending"),
74
74
  ...(additionalQueries || []),
75
- ]);
75
+ ]));
76
76
  if (operations.documents.length > 0) {
77
77
  return OperationSchema.parse(operations.documents[0]); // Assuming the first document is the operation we want
78
78
  }
79
79
  else {
80
80
  // Create a new operation document
81
- const op = await database.createDocument("migrations", "currentOperations", ID.unique(), {
81
+ const op = await tryAwaitWithRetry(async () => await database.createDocument("migrations", "currentOperations", ID.unique(), {
82
82
  operationType,
83
83
  collectionId,
84
84
  status: "pending",
@@ -86,7 +86,7 @@ export const findOrCreateOperation = async (database, collectionId, operationTyp
86
86
  progress: 0,
87
87
  total: 0,
88
88
  error: "",
89
- });
89
+ }));
90
90
  return OperationSchema.parse(op);
91
91
  }
92
92
  };
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "0.0.49",
4
+ "version": "0.0.52",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -399,12 +399,15 @@ export const transferDocumentsBetweenDbsLocalToLocal = async (
399
399
  await Promise.all(batchedPromises);
400
400
  totalDocumentsTransferred += fromCollDocs.documents.length;
401
401
  while (fromCollDocs.documents.length === 50) {
402
- fromCollDocs = await db.listDocuments(fromDbId, fromCollId, [
403
- Query.limit(50),
404
- Query.cursorAfter(
405
- fromCollDocs.documents[fromCollDocs.documents.length - 1].$id
406
- ),
407
- ]);
402
+ fromCollDocs = await tryAwaitWithRetry(
403
+ async () =>
404
+ await db.listDocuments(fromDbId, fromCollId, [
405
+ Query.limit(50),
406
+ Query.cursorAfter(
407
+ fromCollDocs.documents[fromCollDocs.documents.length - 1].$id
408
+ ),
409
+ ])
410
+ );
408
411
  const batchedPromises = fromCollDocs.documents.map((doc) => {
409
412
  const toCreateObject: Partial<typeof doc> = {
410
413
  ...doc,
@@ -150,6 +150,26 @@ export class DataLoader {
150
150
  continue;
151
151
  }
152
152
  }
153
+ // Because the objects should technically always be validated FIRST, we can assume the update keys are also defined on the source object
154
+ for (const [key, value] of Object.entries(update)) {
155
+ if (value === undefined || value === null || value === "") {
156
+ continue;
157
+ } else if (!Object.hasOwn(source, key)) {
158
+ result[key] = value;
159
+ } else if (typeof source[key] === "object" && typeof value === "object") {
160
+ result[key] = this.mergeObjects(source[key], value);
161
+ } else if (Array.isArray(source[key]) && Array.isArray(value)) {
162
+ result[key] = [...new Set([...source[key], ...value])].filter(
163
+ (item) => item !== null && item !== undefined && item !== ""
164
+ );
165
+ } else if (
166
+ source[key] === undefined ||
167
+ source[key] === null ||
168
+ source[key] === ""
169
+ ) {
170
+ result[key] = value;
171
+ }
172
+ }
153
173
 
154
174
  return result;
155
175
  }
@@ -393,7 +413,7 @@ export class DataLoader {
393
413
  }
394
414
  }
395
415
  console.log("Running update references");
396
- this.dealWithMergedUsers();
416
+ // this.dealWithMergedUsers();
397
417
  this.updateOldReferencesForNew();
398
418
  console.log("Done running update references");
399
419
  }
@@ -110,34 +110,36 @@ export const findOrCreateOperation = async (
110
110
  operationType: string,
111
111
  additionalQueries?: string[]
112
112
  ) => {
113
- const operations = await database.listDocuments(
114
- "migrations",
115
- "currentOperations",
116
- [
117
- Query.equal("collectionId", collectionId),
118
- Query.equal("operationType", operationType),
119
- Query.equal("status", "pending"),
120
- ...(additionalQueries || []),
121
- ]
113
+ const operations = await tryAwaitWithRetry(
114
+ async () =>
115
+ await database.listDocuments("migrations", "currentOperations", [
116
+ Query.equal("collectionId", collectionId),
117
+ Query.equal("operationType", operationType),
118
+ Query.equal("status", "pending"),
119
+ ...(additionalQueries || []),
120
+ ])
122
121
  );
123
122
 
124
123
  if (operations.documents.length > 0) {
125
124
  return OperationSchema.parse(operations.documents[0]); // Assuming the first document is the operation we want
126
125
  } else {
127
126
  // Create a new operation document
128
- const op = await database.createDocument(
129
- "migrations",
130
- "currentOperations",
131
- ID.unique(),
132
- {
133
- operationType,
134
- collectionId,
135
- status: "pending",
136
- batches: [],
137
- progress: 0,
138
- total: 0,
139
- error: "",
140
- }
127
+ const op = await tryAwaitWithRetry(
128
+ async () =>
129
+ await database.createDocument(
130
+ "migrations",
131
+ "currentOperations",
132
+ ID.unique(),
133
+ {
134
+ operationType,
135
+ collectionId,
136
+ status: "pending",
137
+ batches: [],
138
+ progress: 0,
139
+ total: 0,
140
+ error: "",
141
+ }
142
+ )
141
143
  );
142
144
 
143
145
  return OperationSchema.parse(op);