appwrite-utils-cli 0.0.55 → 0.0.57

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -132,6 +132,7 @@ This setup ensures that developers have robust tools at their fingertips to mana
132
132
 
133
133
  ### Changelog
134
134
 
135
+ - 0.0.57: Fixed `dataLoader`'s `idMapping`'s giving me issues
135
136
  - 0.0.55: Added `documentExists` check to batch creation functionality to try to prevent duplicates
136
137
  - 0.0.54: Various fixes in here
137
138
  - 0.0.50: Actually fixed the slight bug, it was really in the `mergeObjects`
@@ -3,7 +3,7 @@ import path from "path";
3
3
  import fs from "fs";
4
4
  import os from "os";
5
5
  import { logger } from "./logging.js";
6
- import {} from "appwrite-utils";
6
+ import { tryAwaitWithRetry, } from "appwrite-utils";
7
7
  export const getDatabaseFromConfig = (config) => {
8
8
  if (!config.appwriteClient) {
9
9
  config.appwriteClient = new Client()
@@ -242,7 +242,7 @@ export const afterImportActions = {
242
242
  // Define a helper function to check if a value is a URL
243
243
  const isUrl = (value) => typeof value === "string" &&
244
244
  (value.startsWith("http://") || value.startsWith("https://"));
245
- const doc = await db.getDocument(dbId, collId, docId);
245
+ const doc = await tryAwaitWithRetry(async () => await db.getDocument(dbId, collId, docId));
246
246
  const existingFieldValue = doc[fieldName];
247
247
  // Handle the case where the field is an array
248
248
  let updateData = isArray ? [] : "";
@@ -265,7 +265,7 @@ export const afterImportActions = {
265
265
  // Create InputFile from the downloaded file
266
266
  const inputFile = InputFile.fromPath(tempFilePath, fileName);
267
267
  // Use the full file name (with extension) for creating the file
268
- const file = await storage.createFile(bucketId, ID.unique(), inputFile);
268
+ const file = await tryAwaitWithRetry(async () => await storage.createFile(bucketId, ID.unique(), inputFile));
269
269
  console.log("Created file from URL: ", file.$id);
270
270
  // After uploading, adjust the updateData based on whether the field is an array or not
271
271
  if (isArray) {
@@ -274,15 +274,9 @@ export const afterImportActions = {
274
274
  else {
275
275
  updateData = file.$id; // Set the new file ID
276
276
  }
277
- await db.updateDocument(dbId, collId, doc.$id, {
277
+ await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, doc.$id, {
278
278
  [fieldName]: updateData,
279
- });
280
- // console.log(
281
- // "Updating document with file: ",
282
- // doc.$id,
283
- // `${fieldName}: `,
284
- // updateData
285
- // );
279
+ }));
286
280
  // If the file was downloaded, delete it after uploading
287
281
  fs.unlinkSync(tempFilePath);
288
282
  }
@@ -295,16 +289,16 @@ export const afterImportActions = {
295
289
  }
296
290
  const pathToFile = path.join(filePath, fileFullName);
297
291
  const inputFile = InputFile.fromPath(pathToFile, fileName);
298
- const file = await storage.createFile(bucketId, ID.unique(), inputFile);
292
+ const file = await tryAwaitWithRetry(async () => await storage.createFile(bucketId, ID.unique(), inputFile));
299
293
  if (isArray) {
300
294
  updateData = [...updateData, file.$id]; // Append the new file ID
301
295
  }
302
296
  else {
303
297
  updateData = file.$id; // Set the new file ID
304
298
  }
305
- await db.updateDocument(dbId, collId, doc.$id, {
299
+ tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, doc.$id, {
306
300
  [fieldName]: updateData,
307
- });
301
+ }));
308
302
  console.log("Created file from path: ", file.$id);
309
303
  }
310
304
  }
@@ -1748,6 +1748,14 @@ export declare class DataLoader {
1748
1748
  *
1749
1749
  * @return {void} This function does not return anything.
1750
1750
  */
1751
+ /**
1752
+ * Gets the value to match for a given key in the final data or context.
1753
+ * @param finalData - The final data object.
1754
+ * @param context - The context object.
1755
+ * @param key - The key to get the value for.
1756
+ * @returns The value to match for from finalData or Context
1757
+ */
1758
+ getValueFromData(finalData: any, context: any, key: string): any;
1751
1759
  updateOldReferencesForNew(): void;
1752
1760
  private writeMapsToJsonFile;
1753
1761
  /**
@@ -108,9 +108,9 @@ export class DataLoader {
108
108
  result[key] = [...value, updateValue].filter((item) => item !== null && item !== undefined && item !== "");
109
109
  }
110
110
  }
111
- else if (typeof value === "object") {
111
+ else if (typeof value === "object" && !Array.isArray(value)) {
112
112
  // If the value is an object, we need to merge it
113
- if (typeof update[key] === "object") {
113
+ if (typeof update[key] === "object" && !Array.isArray(update[key])) {
114
114
  result[key] = this.mergeObjects(value, update[key]);
115
115
  }
116
116
  }
@@ -127,7 +127,10 @@ export class DataLoader {
127
127
  else if (!Object.hasOwn(source, key)) {
128
128
  result[key] = value;
129
129
  }
130
- else if (typeof source[key] === "object" && typeof value === "object") {
130
+ else if (typeof source[key] === "object" &&
131
+ typeof value === "object" &&
132
+ !Array.isArray(source[key]) &&
133
+ !Array.isArray(value)) {
131
134
  result[key] = this.mergeObjects(source[key], value);
132
135
  }
133
136
  else if (Array.isArray(source[key]) && Array.isArray(value)) {
@@ -207,9 +210,6 @@ export class DataLoader {
207
210
  transformData(item, attributeMappings) {
208
211
  // Convert the item using the attribute mappings provided
209
212
  const convertedItem = convertObjectByAttributeMappings(item, attributeMappings);
210
- if (item["region"]) {
211
- logger.info(`Converted item: ${JSON.stringify(convertedItem, null, 2)}`);
212
- }
213
213
  // Run additional converter functions on the converted item, if any
214
214
  return this.importDataActions.runConverterFunctions(convertedItem, attributeMappings);
215
215
  }
@@ -315,12 +315,10 @@ export class DataLoader {
315
315
  const updateDefs = collection.importDefs.filter((def) => def.type === "update");
316
316
  for (const createDef of createDefs) {
317
317
  if (!isUsersCollection) {
318
- console.log(`${collection.name} is not users collection`);
319
318
  await this.prepareCreateData(db, collection, createDef);
320
319
  }
321
320
  else {
322
321
  // Special handling for users collection if needed
323
- console.log(`${collection.name} is users collection`);
324
322
  await this.prepareUserCollectionCreateData(db, collection, createDef);
325
323
  }
326
324
  }
@@ -396,6 +394,21 @@ export class DataLoader {
396
394
  // }
397
395
  // }
398
396
  // }
397
+ /**
398
+ * Gets the value to match for a given key in the final data or context.
399
+ * @param finalData - The final data object.
400
+ * @param context - The context object.
401
+ * @param key - The key to get the value for.
402
+ * @returns The value to match for from finalData or Context
403
+ */
404
+ getValueFromData(finalData, context, key) {
405
+ if (context[key] !== undefined &&
406
+ context[key] !== null &&
407
+ context[key] !== "") {
408
+ return context[key];
409
+ }
410
+ return finalData[key];
411
+ }
399
412
  updateOldReferencesForNew() {
400
413
  if (!this.config.collections) {
401
414
  return;
@@ -416,7 +429,7 @@ export class DataLoader {
416
429
  const targetCollectionKey = this.getCollectionKey(idMapping.targetCollection);
417
430
  const fieldToSetKey = idMapping.fieldToSet || idMapping.sourceField;
418
431
  const targetFieldKey = idMapping.targetFieldToMatch || idMapping.targetField;
419
- const valueToMatch = collectionData.data[i].context[idMapping.sourceField];
432
+ const valueToMatch = this.getValueFromData(collectionData.data[i].finalData, collectionData.data[i].context, idMapping.sourceField);
420
433
  // Skip if value to match is missing or empty
421
434
  if (!valueToMatch ||
422
435
  _.isEmpty(valueToMatch) ||
@@ -427,8 +440,8 @@ export class DataLoader {
427
440
  if (!targetCollectionData || !targetCollectionData.data)
428
441
  continue;
429
442
  // Find matching data in the target collection
430
- const foundData = targetCollectionData.data.filter(({ context }) => {
431
- const targetValue = context[targetFieldKey];
443
+ const foundData = targetCollectionData.data.filter(({ context, finalData }) => {
444
+ const targetValue = this.getValueFromData(finalData, context, targetFieldKey);
432
445
  const isMatch = `${targetValue}` === `${valueToMatch}`;
433
446
  // Ensure the targetValue is defined and not null
434
447
  return (isMatch &&
@@ -438,7 +451,7 @@ export class DataLoader {
438
451
  // Log and skip if no matching data found
439
452
  if (!foundData.length) {
440
453
  console.log(`No data found for collection ${collectionConfig.name}:\nTarget collection: ${targetCollectionKey}\nValue to match: ${valueToMatch}\nField to set: ${fieldToSetKey}\nTarget field to match: ${targetFieldKey}\nTarget field value: ${idMapping.targetField}`);
441
- logger.error(`No data found for collection: ${targetCollectionKey} with value: ${valueToMatch} for field: ${fieldToSetKey} -- idMapping: ${JSON.stringify(idMapping, null, 2)}`);
454
+ logger.info(`No data found for collection: ${targetCollectionKey} with value: ${valueToMatch} for field: ${fieldToSetKey} -- idMapping: ${JSON.stringify(idMapping, null, 2)}`);
442
455
  continue;
443
456
  }
444
457
  needsUpdate = true;
@@ -451,7 +464,10 @@ export class DataLoader {
451
464
  // Get the current data to be updated
452
465
  const currentDataFiltered = getCurrentDataFiltered(collectionData.data[i]);
453
466
  // Extract the new data to set
454
- const newData = foundData.map((data) => data.context[idMapping.targetField]);
467
+ const newData = foundData.map((data) => {
468
+ const valueFound = this.getValueFromData(data.finalData, data.context, idMapping.targetField);
469
+ return valueFound;
470
+ });
455
471
  // Handle cases where current data is an array
456
472
  if (isFieldToSetArray) {
457
473
  if (!currentDataFiltered) {
@@ -460,15 +476,23 @@ export class DataLoader {
460
476
  Array.isArray(newData) ? newData : [newData];
461
477
  }
462
478
  else {
463
- // Merge arrays if new data is non-empty array and filter for uniqueness
464
- collectionData.data[i].finalData[fieldToSetKey] = [
465
- ...new Set([
466
- ...(Array.isArray(currentDataFiltered)
467
- ? currentDataFiltered
468
- : [currentDataFiltered]),
469
- ...newData,
470
- ].filter((value) => `${value}` !== `${valueToMatch}`)),
471
- ];
479
+ if (Array.isArray(currentDataFiltered)) {
480
+ // Convert current data to array and merge if new data is non-empty array
481
+ collectionData.data[i].finalData[fieldToSetKey] = [
482
+ ...new Set([...currentDataFiltered, ...newData].filter((value) => `${value}` !== `${valueToMatch}` && value)),
483
+ ];
484
+ }
485
+ else {
486
+ // Merge arrays if new data is non-empty array and filter for uniqueness
487
+ collectionData.data[i].finalData[fieldToSetKey] = [
488
+ ...new Set([
489
+ ...(Array.isArray(currentDataFiltered)
490
+ ? currentDataFiltered
491
+ : [currentDataFiltered]),
492
+ ...newData,
493
+ ].filter((value) => `${value}` !== `${valueToMatch}` && value)),
494
+ ];
495
+ }
472
496
  }
473
497
  }
474
498
  else {
@@ -481,7 +505,7 @@ export class DataLoader {
481
505
  // Convert current data to array and merge if new data is non-empty array, then filter for uniqueness
482
506
  // and take the first value, because it's an array and the attribute is not an array
483
507
  collectionData.data[i].finalData[fieldToSetKey] = [
484
- ...new Set([currentDataFiltered, ...newData].filter((value) => `${value}` !== `${valueToMatch}`)),
508
+ ...new Set([currentDataFiltered, ...newData].filter((value) => `${value}` !== `${valueToMatch}` && value)),
485
509
  ].slice(0, 1)[0];
486
510
  }
487
511
  else if (!Array.isArray(newData) && newData !== undefined) {
@@ -501,39 +525,45 @@ export class DataLoader {
501
525
  }
502
526
  }
503
527
  writeMapsToJsonFile() {
504
- const outputDir = path.resolve(process.cwd());
505
- const outputFile = path.join(outputDir, "dataLoaderOutput.json");
506
- const dataToWrite = {
507
- // Convert Maps to arrays of entries for serialization
508
- oldIdToNewIdPerCollectionMap: Array.from(this.oldIdToNewIdPerCollectionMap.entries()).map(([key, value]) => {
509
- return {
510
- collection: key,
511
- data: Array.from(value.entries()),
512
- };
513
- }),
514
- mergedUserMap: Array.from(this.mergedUserMap.entries()),
515
- dataFromCollections: Array.from(this.importMap.entries()).map(([key, value]) => {
516
- return {
517
- collection: key,
518
- data: value.data.map((item) => item.finalData),
519
- };
520
- }),
521
- // emailToUserIdMap: Array.from(this.emailToUserIdMap.entries()),
522
- // phoneToUserIdMap: Array.from(this.phoneToUserIdMap.entries()),
523
- };
524
- // Use JSON.stringify with a replacer function to handle Maps
525
- const replacer = (key, value) => {
526
- if (value instanceof Map) {
527
- return Array.from(value.entries());
528
- }
529
- return value;
528
+ const outputDir = path.resolve(process.cwd(), "zlogs");
529
+ // Ensure the logs directory exists
530
+ if (!fs.existsSync(outputDir)) {
531
+ fs.mkdirSync(outputDir);
532
+ }
533
+ // Helper function to write data to a file
534
+ const writeToFile = (fileName, data) => {
535
+ const outputFile = path.join(outputDir, fileName);
536
+ fs.writeFile(outputFile, JSON.stringify(data, null, 2), "utf8", (err) => {
537
+ if (err) {
538
+ console.error(`Error writing data to ${fileName}:`, err);
539
+ return;
540
+ }
541
+ console.log(`Data successfully written to ${fileName}`);
542
+ });
530
543
  };
531
- fs.writeFile(outputFile, JSON.stringify(dataToWrite, replacer, 2), "utf8", (err) => {
532
- if (err) {
533
- console.error("Error writing data to JSON file:", err);
534
- return;
535
- }
536
- console.log(`Data successfully written to ${outputFile}`);
544
+ // Convert Maps to arrays of entries for serialization
545
+ const oldIdToNewIdPerCollectionMap = Array.from(this.oldIdToNewIdPerCollectionMap.entries()).map(([key, value]) => {
546
+ return {
547
+ collection: key,
548
+ data: Array.from(value.entries()),
549
+ };
550
+ });
551
+ const mergedUserMap = Array.from(this.mergedUserMap.entries());
552
+ // Write each part to a separate file
553
+ writeToFile("oldIdToNewIdPerCollectionMap.json", oldIdToNewIdPerCollectionMap);
554
+ writeToFile("mergedUserMap.json", mergedUserMap);
555
+ // Write each collection's data to a separate file
556
+ this.importMap.forEach((value, key) => {
557
+ const data = {
558
+ collection: key,
559
+ data: value.data.map((item) => {
560
+ return {
561
+ finalData: item.finalData,
562
+ context: item.context,
563
+ };
564
+ }),
565
+ };
566
+ writeToFile(`${key}.json`, data);
537
567
  });
538
568
  }
539
569
  /**
@@ -557,9 +587,15 @@ export class DataLoader {
557
587
  // Check for duplicate email and phone
558
588
  if (email && this.emailToUserIdMap.has(email)) {
559
589
  existingId = this.emailToUserIdMap.get(email);
590
+ if (phone && !this.phoneToUserIdMap.has(phone)) {
591
+ this.phoneToUserIdMap.set(phone, newId);
592
+ }
560
593
  }
561
594
  else if (phone && this.phoneToUserIdMap.has(phone)) {
562
595
  existingId = this.phoneToUserIdMap.get(phone);
596
+ if (email && !this.emailToUserIdMap.has(email)) {
597
+ this.emailToUserIdMap.set(email, newId);
598
+ }
563
599
  }
564
600
  else {
565
601
  if (email)
@@ -654,7 +690,6 @@ export class DataLoader {
654
690
  this.oldIdToNewIdPerCollectionMap
655
691
  .set(this.getCollectionKey(collection.name), oldIdToNewIdMap)
656
692
  .get(this.getCollectionKey(collection.name));
657
- console.log(`${collection.name} -- collectionOldIdToNewIdMap: ${collectionOldIdToNewIdMap}`);
658
693
  if (!operationId) {
659
694
  throw new Error(`No import operation found for collection ${collection.name}`);
660
695
  }
@@ -740,7 +775,10 @@ export class DataLoader {
740
775
  if (currentData.data[i].finalData.docId === existingId ||
741
776
  currentData.data[i].finalData.userId === existingId) {
742
777
  currentData.data[i].finalData = this.mergeObjects(currentData.data[i].finalData, transformedItem);
743
- currentData.data[i].context = context;
778
+ currentData.data[i].context = {
779
+ ...currentData.data[i].context,
780
+ ...context,
781
+ };
744
782
  currentData.data[i].importDef = newImportDef;
745
783
  this.importMap.set(this.getCollectionKey(collection.name), currentData);
746
784
  this.oldIdToNewIdPerCollectionMap.set(this.getCollectionKey(collection.name), collectionOldIdToNewIdMap);
@@ -786,7 +824,6 @@ export class DataLoader {
786
824
  this.oldIdToNewIdPerCollectionMap
787
825
  .set(this.getCollectionKey(collection.name), oldIdToNewIdMapNew)
788
826
  .get(this.getCollectionKey(collection.name));
789
- console.log(`${collection.name} -- collectionOldIdToNewIdMap: ${collectionOldIdToNewIdMap}`);
790
827
  const isRegions = collection.name.toLowerCase() === "regions";
791
828
  // Iterate through each item in the raw data
792
829
  for (const item of rawData) {
@@ -801,6 +838,9 @@ export class DataLoader {
801
838
  let context = this.createContext(db, collection, item, itemIdNew);
802
839
  // Transform the item data based on the attribute mappings
803
840
  const transformedData = this.transformData(item, importDef.attributeMappings);
841
+ if (collection.name.toLowerCase() === "councils") {
842
+ console.log("Transformed Council: ", transformedData);
843
+ }
804
844
  if (isRegions) {
805
845
  logger.info(`Transformed region: ${JSON.stringify(transformedData, null, 2)}`);
806
846
  }
@@ -874,7 +914,6 @@ export class DataLoader {
874
914
  let itemDataToUpdate;
875
915
  // Try to find itemDataToUpdate using updateMapping
876
916
  if (importDef.updateMapping) {
877
- console.log(importDef.updateMapping);
878
917
  oldId =
879
918
  item[importDef.updateMapping.originalIdField] ||
880
919
  transformedData[importDef.updateMapping.originalIdField];
@@ -898,7 +937,7 @@ export class DataLoader {
898
937
  oldId =
899
938
  item[importDef.primaryKeyField] ||
900
939
  transformedData[importDef.primaryKeyField];
901
- if (oldId) {
940
+ if (oldId && oldId.length > 0) {
902
941
  newId = oldIdToNewIdMap?.get(`${oldId}`);
903
942
  if (!newId &&
904
943
  this.getCollectionKey(this.config.usersCollectionName) ===
@@ -166,23 +166,31 @@ export class ImportController {
166
166
  for (let i = 0; i < dataSplit.length; i++) {
167
167
  const batches = dataSplit[i];
168
168
  console.log(`Processing batch ${i + 1} of ${dataSplit.length}`);
169
- const documentExistsPromises = batches.map(async (item) => {
170
- try {
171
- const id = item.finalData.docId ||
172
- item.finalData.userId ||
173
- item.context.docId ||
174
- item.context.userId;
175
- if (!item.finalData) {
176
- return Promise.resolve(null);
177
- }
178
- return tryAwaitWithRetry(async () => await documentExists(this.database, db.$id, collection.$id, item.finalData));
179
- }
180
- catch (error) {
181
- console.error(error);
182
- return Promise.resolve(null);
183
- }
184
- });
185
- const documentExistsResults = await Promise.all(documentExistsPromises);
169
+ // const documentExistsPromises = batches.map(async (item) => {
170
+ // try {
171
+ // const id =
172
+ // item.finalData.docId ||
173
+ // item.finalData.userId ||
174
+ // item.context.docId ||
175
+ // item.context.userId;
176
+ // if (!item.finalData) {
177
+ // return Promise.resolve(null);
178
+ // }
179
+ // return tryAwaitWithRetry(
180
+ // async () =>
181
+ // await documentExists(
182
+ // this.database,
183
+ // db.$id,
184
+ // collection.$id,
185
+ // item.finalData
186
+ // )
187
+ // );
188
+ // } catch (error) {
189
+ // console.error(error);
190
+ // return Promise.resolve(null);
191
+ // }
192
+ // });
193
+ // const documentExistsResults = await Promise.all(documentExistsPromises);
186
194
  const batchPromises = batches.map((item, index) => {
187
195
  try {
188
196
  const id = item.finalData.docId ||
@@ -195,7 +203,7 @@ export class ImportController {
195
203
  if (item.finalData.hasOwnProperty("docId")) {
196
204
  delete item.finalData.docId;
197
205
  }
198
- if (!item.finalData || documentExistsResults[index]) {
206
+ if (!item.finalData) {
199
207
  return Promise.resolve();
200
208
  }
201
209
  return tryAwaitWithRetry(async () => await this.database.createDocument(db.$id, collection.$id, id, item.finalData));
@@ -1,15 +1,27 @@
1
1
  import winston from "winston";
2
+ import fs from "fs";
3
+ import path from "path";
4
+ // Ensure the logs directory exists
5
+ const logDir = path.join(process.cwd(), "zlogs");
6
+ if (!fs.existsSync(logDir)) {
7
+ fs.mkdirSync(logDir);
8
+ }
2
9
  export const logger = winston.createLogger({
3
- level: "info",
10
+ level: "debug",
4
11
  format: winston.format.json({ space: 2 }),
5
12
  defaultMeta: { service: "appwrite-utils-cli" },
6
13
  transports: [
7
- //
8
- // - Write all logs with importance level of `error` or less to `error.log`
9
- // - Write all logs with importance level of `info` or less to `combined.log`
10
- //
11
- new winston.transports.File({ filename: "error.log", level: "error" }),
12
- new winston.transports.File({ filename: "warn.log", level: "warn" }),
13
- new winston.transports.File({ filename: "combined.log" }),
14
+ new winston.transports.File({
15
+ filename: path.join(logDir, "error.log"),
16
+ level: "error",
17
+ }),
18
+ new winston.transports.File({
19
+ filename: path.join(logDir, "warn.log"),
20
+ level: "warn",
21
+ }),
22
+ new winston.transports.File({
23
+ filename: path.join(logDir, "info.log"),
24
+ level: "info",
25
+ }),
14
26
  ],
15
27
  });
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "0.0.55",
4
+ "version": "0.0.57",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -11,7 +11,11 @@ import path from "path";
11
11
  import fs from "fs";
12
12
  import os from "os";
13
13
  import { logger } from "./logging.js";
14
- import { type AfterImportActions, type AppwriteConfig } from "appwrite-utils";
14
+ import {
15
+ tryAwaitWithRetry,
16
+ type AfterImportActions,
17
+ type AppwriteConfig,
18
+ } from "appwrite-utils";
15
19
 
16
20
  export const getDatabaseFromConfig = (config: AppwriteConfig) => {
17
21
  if (!config.appwriteClient) {
@@ -401,7 +405,9 @@ export const afterImportActions = {
401
405
  typeof value === "string" &&
402
406
  (value.startsWith("http://") || value.startsWith("https://"));
403
407
 
404
- const doc = await db.getDocument(dbId, collId, docId);
408
+ const doc = await tryAwaitWithRetry(
409
+ async () => await db.getDocument(dbId, collId, docId)
410
+ );
405
411
  const existingFieldValue = doc[fieldName as keyof typeof doc];
406
412
 
407
413
  // Handle the case where the field is an array
@@ -432,7 +438,9 @@ export const afterImportActions = {
432
438
  const inputFile = InputFile.fromPath(tempFilePath, fileName);
433
439
 
434
440
  // Use the full file name (with extension) for creating the file
435
- const file = await storage.createFile(bucketId, ID.unique(), inputFile);
441
+ const file = await tryAwaitWithRetry(
442
+ async () => await storage.createFile(bucketId, ID.unique(), inputFile)
443
+ );
436
444
 
437
445
  console.log("Created file from URL: ", file.$id);
438
446
 
@@ -442,15 +450,12 @@ export const afterImportActions = {
442
450
  } else {
443
451
  updateData = file.$id; // Set the new file ID
444
452
  }
445
- await db.updateDocument(dbId, collId, doc.$id, {
446
- [fieldName]: updateData,
447
- });
448
- // console.log(
449
- // "Updating document with file: ",
450
- // doc.$id,
451
- // `${fieldName}: `,
452
- // updateData
453
- // );
453
+ await tryAwaitWithRetry(
454
+ async () =>
455
+ await db.updateDocument(dbId, collId, doc.$id, {
456
+ [fieldName]: updateData,
457
+ })
458
+ );
454
459
 
455
460
  // If the file was downloaded, delete it after uploading
456
461
  fs.unlinkSync(tempFilePath);
@@ -465,16 +470,21 @@ export const afterImportActions = {
465
470
  }
466
471
  const pathToFile = path.join(filePath, fileFullName);
467
472
  const inputFile = InputFile.fromPath(pathToFile, fileName);
468
- const file = await storage.createFile(bucketId, ID.unique(), inputFile);
473
+ const file = await tryAwaitWithRetry(
474
+ async () => await storage.createFile(bucketId, ID.unique(), inputFile)
475
+ );
469
476
 
470
477
  if (isArray) {
471
478
  updateData = [...updateData, file.$id]; // Append the new file ID
472
479
  } else {
473
480
  updateData = file.$id; // Set the new file ID
474
481
  }
475
- await db.updateDocument(dbId, collId, doc.$id, {
476
- [fieldName]: updateData,
477
- });
482
+ tryAwaitWithRetry(
483
+ async () =>
484
+ await db.updateDocument(dbId, collId, doc.$id, {
485
+ [fieldName]: updateData,
486
+ })
487
+ );
478
488
  console.log("Created file from path: ", file.$id);
479
489
  }
480
490
  } catch (error) {
@@ -140,9 +140,9 @@ export class DataLoader {
140
140
  (item) => item !== null && item !== undefined && item !== ""
141
141
  );
142
142
  }
143
- } else if (typeof value === "object") {
143
+ } else if (typeof value === "object" && !Array.isArray(value)) {
144
144
  // If the value is an object, we need to merge it
145
- if (typeof update[key] === "object") {
145
+ if (typeof update[key] === "object" && !Array.isArray(update[key])) {
146
146
  result[key] = this.mergeObjects(value, update[key]);
147
147
  }
148
148
  } else {
@@ -156,7 +156,12 @@ export class DataLoader {
156
156
  continue;
157
157
  } else if (!Object.hasOwn(source, key)) {
158
158
  result[key] = value;
159
- } else if (typeof source[key] === "object" && typeof value === "object") {
159
+ } else if (
160
+ typeof source[key] === "object" &&
161
+ typeof value === "object" &&
162
+ !Array.isArray(source[key]) &&
163
+ !Array.isArray(value)
164
+ ) {
160
165
  result[key] = this.mergeObjects(source[key], value);
161
166
  } else if (Array.isArray(source[key]) && Array.isArray(value)) {
162
167
  result[key] = [...new Set([...source[key], ...value])].filter(
@@ -260,9 +265,6 @@ export class DataLoader {
260
265
  item,
261
266
  attributeMappings
262
267
  );
263
- if (item["region"]) {
264
- logger.info(`Converted item: ${JSON.stringify(convertedItem, null, 2)}`);
265
- }
266
268
  // Run additional converter functions on the converted item, if any
267
269
  return this.importDataActions.runConverterFunctions(
268
270
  convertedItem,
@@ -389,11 +391,9 @@ export class DataLoader {
389
391
  );
390
392
  for (const createDef of createDefs) {
391
393
  if (!isUsersCollection) {
392
- console.log(`${collection.name} is not users collection`);
393
394
  await this.prepareCreateData(db, collection, createDef);
394
395
  } else {
395
396
  // Special handling for users collection if needed
396
- console.log(`${collection.name} is users collection`);
397
397
  await this.prepareUserCollectionCreateData(
398
398
  db,
399
399
  collection,
@@ -480,6 +480,24 @@ export class DataLoader {
480
480
  // }
481
481
  // }
482
482
 
483
+ /**
484
+ * Gets the value to match for a given key in the final data or context.
485
+ * @param finalData - The final data object.
486
+ * @param context - The context object.
487
+ * @param key - The key to get the value for.
488
+ * @returns The value to match for from finalData or Context
489
+ */
490
+ getValueFromData(finalData: any, context: any, key: string) {
491
+ if (
492
+ context[key] !== undefined &&
493
+ context[key] !== null &&
494
+ context[key] !== ""
495
+ ) {
496
+ return context[key];
497
+ }
498
+ return finalData[key];
499
+ }
500
+
483
501
  updateOldReferencesForNew() {
484
502
  if (!this.config.collections) {
485
503
  return;
@@ -510,8 +528,11 @@ export class DataLoader {
510
528
  idMapping.fieldToSet || idMapping.sourceField;
511
529
  const targetFieldKey =
512
530
  idMapping.targetFieldToMatch || idMapping.targetField;
513
- const valueToMatch =
514
- collectionData.data[i].context[idMapping.sourceField];
531
+ const valueToMatch = this.getValueFromData(
532
+ collectionData.data[i].finalData,
533
+ collectionData.data[i].context,
534
+ idMapping.sourceField
535
+ );
515
536
 
516
537
  // Skip if value to match is missing or empty
517
538
  if (
@@ -532,8 +553,12 @@ export class DataLoader {
532
553
 
533
554
  // Find matching data in the target collection
534
555
  const foundData = targetCollectionData.data.filter(
535
- ({ context }) => {
536
- const targetValue = context[targetFieldKey];
556
+ ({ context, finalData }) => {
557
+ const targetValue = this.getValueFromData(
558
+ finalData,
559
+ context,
560
+ targetFieldKey
561
+ );
537
562
  const isMatch = `${targetValue}` === `${valueToMatch}`;
538
563
  // Ensure the targetValue is defined and not null
539
564
  return (
@@ -543,13 +568,12 @@ export class DataLoader {
543
568
  );
544
569
  }
545
570
  );
546
-
547
571
  // Log and skip if no matching data found
548
572
  if (!foundData.length) {
549
573
  console.log(
550
574
  `No data found for collection ${collectionConfig.name}:\nTarget collection: ${targetCollectionKey}\nValue to match: ${valueToMatch}\nField to set: ${fieldToSetKey}\nTarget field to match: ${targetFieldKey}\nTarget field value: ${idMapping.targetField}`
551
575
  );
552
- logger.error(
576
+ logger.info(
553
577
  `No data found for collection: ${targetCollectionKey} with value: ${valueToMatch} for field: ${fieldToSetKey} -- idMapping: ${JSON.stringify(
554
578
  idMapping,
555
579
  null,
@@ -576,9 +600,14 @@ export class DataLoader {
576
600
  );
577
601
 
578
602
  // Extract the new data to set
579
- const newData = foundData.map(
580
- (data) => data.context[idMapping.targetField]
581
- );
603
+ const newData = foundData.map((data) => {
604
+ const valueFound = this.getValueFromData(
605
+ data.finalData,
606
+ data.context,
607
+ idMapping.targetField
608
+ );
609
+ return valueFound;
610
+ });
582
611
 
583
612
  // Handle cases where current data is an array
584
613
  if (isFieldToSetArray) {
@@ -587,19 +616,32 @@ export class DataLoader {
587
616
  collectionData.data[i].finalData[fieldToSetKey] =
588
617
  Array.isArray(newData) ? newData : [newData];
589
618
  } else {
590
- // Merge arrays if new data is non-empty array and filter for uniqueness
591
- collectionData.data[i].finalData[fieldToSetKey] = [
592
- ...new Set(
593
- [
594
- ...(Array.isArray(currentDataFiltered)
595
- ? currentDataFiltered
596
- : [currentDataFiltered]),
597
- ...newData,
598
- ].filter(
599
- (value: any) => `${value}` !== `${valueToMatch}`
600
- )
601
- ),
602
- ];
619
+ if (Array.isArray(currentDataFiltered)) {
620
+ // Convert current data to array and merge if new data is non-empty array
621
+ collectionData.data[i].finalData[fieldToSetKey] = [
622
+ ...new Set(
623
+ [...currentDataFiltered, ...newData].filter(
624
+ (value: any) =>
625
+ `${value}` !== `${valueToMatch}` && value
626
+ )
627
+ ),
628
+ ];
629
+ } else {
630
+ // Merge arrays if new data is non-empty array and filter for uniqueness
631
+ collectionData.data[i].finalData[fieldToSetKey] = [
632
+ ...new Set(
633
+ [
634
+ ...(Array.isArray(currentDataFiltered)
635
+ ? currentDataFiltered
636
+ : [currentDataFiltered]),
637
+ ...newData,
638
+ ].filter(
639
+ (value: any) =>
640
+ `${value}` !== `${valueToMatch}` && value
641
+ )
642
+ ),
643
+ ];
644
+ }
603
645
  }
604
646
  } else {
605
647
  if (!currentDataFiltered) {
@@ -612,7 +654,8 @@ export class DataLoader {
612
654
  collectionData.data[i].finalData[fieldToSetKey] = [
613
655
  ...new Set(
614
656
  [currentDataFiltered, ...newData].filter(
615
- (value: any) => `${value}` !== `${valueToMatch}`
657
+ (value: any) =>
658
+ `${value}` !== `${valueToMatch}` && value
616
659
  )
617
660
  ),
618
661
  ].slice(0, 1)[0];
@@ -635,52 +678,57 @@ export class DataLoader {
635
678
  }
636
679
 
637
680
  private writeMapsToJsonFile() {
638
- const outputDir = path.resolve(process.cwd());
639
- const outputFile = path.join(outputDir, "dataLoaderOutput.json");
640
-
641
- const dataToWrite = {
642
- // Convert Maps to arrays of entries for serialization
643
- oldIdToNewIdPerCollectionMap: Array.from(
644
- this.oldIdToNewIdPerCollectionMap.entries()
645
- ).map(([key, value]) => {
646
- return {
647
- collection: key,
648
- data: Array.from(value.entries()),
649
- };
650
- }),
651
- mergedUserMap: Array.from(this.mergedUserMap.entries()),
652
- dataFromCollections: Array.from(this.importMap.entries()).map(
653
- ([key, value]) => {
654
- return {
655
- collection: key,
656
- data: value.data.map((item: any) => item.finalData),
657
- };
658
- }
659
- ),
660
- // emailToUserIdMap: Array.from(this.emailToUserIdMap.entries()),
661
- // phoneToUserIdMap: Array.from(this.phoneToUserIdMap.entries()),
662
- };
681
+ const outputDir = path.resolve(process.cwd(), "zlogs");
663
682
 
664
- // Use JSON.stringify with a replacer function to handle Maps
665
- const replacer = (key: any, value: any) => {
666
- if (value instanceof Map) {
667
- return Array.from(value.entries());
668
- }
669
- return value;
670
- };
683
+ // Ensure the logs directory exists
684
+ if (!fs.existsSync(outputDir)) {
685
+ fs.mkdirSync(outputDir);
686
+ }
671
687
 
672
- fs.writeFile(
673
- outputFile,
674
- JSON.stringify(dataToWrite, replacer, 2),
675
- "utf8",
676
- (err) => {
688
+ // Helper function to write data to a file
689
+ const writeToFile = (fileName: string, data: any) => {
690
+ const outputFile = path.join(outputDir, fileName);
691
+ fs.writeFile(outputFile, JSON.stringify(data, null, 2), "utf8", (err) => {
677
692
  if (err) {
678
- console.error("Error writing data to JSON file:", err);
693
+ console.error(`Error writing data to ${fileName}:`, err);
679
694
  return;
680
695
  }
681
- console.log(`Data successfully written to ${outputFile}`);
682
- }
696
+ console.log(`Data successfully written to ${fileName}`);
697
+ });
698
+ };
699
+
700
+ // Convert Maps to arrays of entries for serialization
701
+ const oldIdToNewIdPerCollectionMap = Array.from(
702
+ this.oldIdToNewIdPerCollectionMap.entries()
703
+ ).map(([key, value]) => {
704
+ return {
705
+ collection: key,
706
+ data: Array.from(value.entries()),
707
+ };
708
+ });
709
+
710
+ const mergedUserMap = Array.from(this.mergedUserMap.entries());
711
+
712
+ // Write each part to a separate file
713
+ writeToFile(
714
+ "oldIdToNewIdPerCollectionMap.json",
715
+ oldIdToNewIdPerCollectionMap
683
716
  );
717
+ writeToFile("mergedUserMap.json", mergedUserMap);
718
+
719
+ // Write each collection's data to a separate file
720
+ this.importMap.forEach((value, key) => {
721
+ const data = {
722
+ collection: key,
723
+ data: value.data.map((item: any) => {
724
+ return {
725
+ finalData: item.finalData,
726
+ context: item.context,
727
+ };
728
+ }),
729
+ };
730
+ writeToFile(`${key}.json`, data);
731
+ });
684
732
  }
685
733
 
686
734
  /**
@@ -723,8 +771,14 @@ export class DataLoader {
723
771
  // Check for duplicate email and phone
724
772
  if (email && this.emailToUserIdMap.has(email)) {
725
773
  existingId = this.emailToUserIdMap.get(email);
774
+ if (phone && !this.phoneToUserIdMap.has(phone)) {
775
+ this.phoneToUserIdMap.set(phone, newId);
776
+ }
726
777
  } else if (phone && this.phoneToUserIdMap.has(phone)) {
727
778
  existingId = this.phoneToUserIdMap.get(phone);
779
+ if (email && !this.emailToUserIdMap.has(email)) {
780
+ this.emailToUserIdMap.set(email, newId);
781
+ }
728
782
  } else {
729
783
  if (email) this.emailToUserIdMap.set(email, newId);
730
784
  if (phone) this.phoneToUserIdMap.set(phone, newId);
@@ -827,9 +881,6 @@ export class DataLoader {
827
881
  this.oldIdToNewIdPerCollectionMap
828
882
  .set(this.getCollectionKey(collection.name), oldIdToNewIdMap)
829
883
  .get(this.getCollectionKey(collection.name));
830
- console.log(
831
- `${collection.name} -- collectionOldIdToNewIdMap: ${collectionOldIdToNewIdMap}`
832
- );
833
884
  if (!operationId) {
834
885
  throw new Error(
835
886
  `No import operation found for collection ${collection.name}`
@@ -963,7 +1014,10 @@ export class DataLoader {
963
1014
  currentData.data[i].finalData,
964
1015
  transformedItem
965
1016
  );
966
- currentData.data[i].context = context;
1017
+ currentData.data[i].context = {
1018
+ ...currentData.data[i].context,
1019
+ ...context,
1020
+ };
967
1021
  currentData.data[i].importDef = newImportDef;
968
1022
  this.importMap.set(
969
1023
  this.getCollectionKey(collection.name),
@@ -1030,9 +1084,6 @@ export class DataLoader {
1030
1084
  this.oldIdToNewIdPerCollectionMap
1031
1085
  .set(this.getCollectionKey(collection.name), oldIdToNewIdMapNew)
1032
1086
  .get(this.getCollectionKey(collection.name));
1033
- console.log(
1034
- `${collection.name} -- collectionOldIdToNewIdMap: ${collectionOldIdToNewIdMap}`
1035
- );
1036
1087
  const isRegions = collection.name.toLowerCase() === "regions";
1037
1088
  // Iterate through each item in the raw data
1038
1089
  for (const item of rawData) {
@@ -1054,6 +1105,9 @@ export class DataLoader {
1054
1105
  item,
1055
1106
  importDef.attributeMappings
1056
1107
  );
1108
+ if (collection.name.toLowerCase() === "councils") {
1109
+ console.log("Transformed Council: ", transformedData);
1110
+ }
1057
1111
  if (isRegions) {
1058
1112
  logger.info(
1059
1113
  `Transformed region: ${JSON.stringify(transformedData, null, 2)}`
@@ -1167,7 +1221,6 @@ export class DataLoader {
1167
1221
 
1168
1222
  // Try to find itemDataToUpdate using updateMapping
1169
1223
  if (importDef.updateMapping) {
1170
- console.log(importDef.updateMapping);
1171
1224
  oldId =
1172
1225
  item[importDef.updateMapping.originalIdField] ||
1173
1226
  transformedData[importDef.updateMapping.originalIdField];
@@ -1199,7 +1252,7 @@ export class DataLoader {
1199
1252
  oldId =
1200
1253
  item[importDef.primaryKeyField] ||
1201
1254
  transformedData[importDef.primaryKeyField];
1202
- if (oldId) {
1255
+ if (oldId && oldId.length > 0) {
1203
1256
  newId = oldIdToNewIdMap?.get(`${oldId}`);
1204
1257
  if (
1205
1258
  !newId &&
@@ -248,33 +248,33 @@ export class ImportController {
248
248
  const batches = dataSplit[i];
249
249
  console.log(`Processing batch ${i + 1} of ${dataSplit.length}`);
250
250
 
251
- const documentExistsPromises = batches.map(async (item) => {
252
- try {
253
- const id =
254
- item.finalData.docId ||
255
- item.finalData.userId ||
256
- item.context.docId ||
257
- item.context.userId;
251
+ // const documentExistsPromises = batches.map(async (item) => {
252
+ // try {
253
+ // const id =
254
+ // item.finalData.docId ||
255
+ // item.finalData.userId ||
256
+ // item.context.docId ||
257
+ // item.context.userId;
258
258
 
259
- if (!item.finalData) {
260
- return Promise.resolve(null);
261
- }
262
- return tryAwaitWithRetry(
263
- async () =>
264
- await documentExists(
265
- this.database,
266
- db.$id,
267
- collection.$id,
268
- item.finalData
269
- )
270
- );
271
- } catch (error) {
272
- console.error(error);
273
- return Promise.resolve(null);
274
- }
275
- });
259
+ // if (!item.finalData) {
260
+ // return Promise.resolve(null);
261
+ // }
262
+ // return tryAwaitWithRetry(
263
+ // async () =>
264
+ // await documentExists(
265
+ // this.database,
266
+ // db.$id,
267
+ // collection.$id,
268
+ // item.finalData
269
+ // )
270
+ // );
271
+ // } catch (error) {
272
+ // console.error(error);
273
+ // return Promise.resolve(null);
274
+ // }
275
+ // });
276
276
 
277
- const documentExistsResults = await Promise.all(documentExistsPromises);
277
+ // const documentExistsResults = await Promise.all(documentExistsPromises);
278
278
 
279
279
  const batchPromises = batches.map((item, index) => {
280
280
  try {
@@ -290,7 +290,7 @@ export class ImportController {
290
290
  if (item.finalData.hasOwnProperty("docId")) {
291
291
  delete item.finalData.docId;
292
292
  }
293
- if (!item.finalData || documentExistsResults[index]) {
293
+ if (!item.finalData) {
294
294
  return Promise.resolve();
295
295
  }
296
296
  return tryAwaitWithRetry(
@@ -1,16 +1,29 @@
1
1
  import winston from "winston";
2
+ import fs from "fs";
3
+ import path from "path";
4
+
5
+ // Ensure the logs directory exists
6
+ const logDir = path.join(process.cwd(), "zlogs");
7
+ if (!fs.existsSync(logDir)) {
8
+ fs.mkdirSync(logDir);
9
+ }
2
10
 
3
11
  export const logger = winston.createLogger({
4
- level: "info",
12
+ level: "debug",
5
13
  format: winston.format.json({ space: 2 }),
6
14
  defaultMeta: { service: "appwrite-utils-cli" },
7
15
  transports: [
8
- //
9
- // - Write all logs with importance level of `error` or less to `error.log`
10
- // - Write all logs with importance level of `info` or less to `combined.log`
11
- //
12
- new winston.transports.File({ filename: "error.log", level: "error" }),
13
- new winston.transports.File({ filename: "warn.log", level: "warn" }),
14
- new winston.transports.File({ filename: "combined.log" }),
16
+ new winston.transports.File({
17
+ filename: path.join(logDir, "error.log"),
18
+ level: "error",
19
+ }),
20
+ new winston.transports.File({
21
+ filename: path.join(logDir, "warn.log"),
22
+ level: "warn",
23
+ }),
24
+ new winston.transports.File({
25
+ filename: path.join(logDir, "info.log"),
26
+ level: "info",
27
+ }),
15
28
  ],
16
29
  });