appwrite-utils-cli 0.0.68 → 0.0.70

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -132,6 +132,8 @@ This setup ensures that developers have robust tools at their fingertips to mana
132
132
 
133
133
  ### Changelog
134
134
 
135
+ - 0.0.70: Bump to `node-appwrite` version
136
+ - 0.0.69: Fixed single ID not getting replaced due to the below change =D also, `nice`
135
137
  - 0.0.68: Fixed the occasional case where, when mapping ID's from old data to new, there would be an array of ID's to match against. `idMappings` now supports arrays.
136
138
  - 0.0.67: Fixed `updates` in `importDef`'s update mappings overwriting postImportActions from the original
137
139
  - 0.0.57: Fixed `dataLoader`'s `idMapping`'s giving me issues
@@ -1,4 +1,5 @@
1
- import { Databases, Storage, InputFile, Query, ID, Client, } from "node-appwrite";
1
+ import { Databases, Storage, Query, ID, Client, Compression, } from "node-appwrite";
2
+ import { InputFile } from "node-appwrite/file";
2
3
  import path from "path";
3
4
  import fs from "fs";
4
5
  import os from "os";
@@ -208,11 +209,11 @@ export const afterImportActions = {
208
209
  return await tryAwaitWithRetry(async () => await storage.getBucket(bucketId));
209
210
  }
210
211
  catch (error) {
211
- return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus));
212
+ return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression ? Compression.Gzip : undefined, encryption, antivirus));
212
213
  }
213
214
  }
214
215
  else {
215
- return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus));
216
+ return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression ? Compression.Gzip : undefined, encryption, antivirus));
216
217
  }
217
218
  }
218
219
  catch (error) {
@@ -229,6 +230,10 @@ export const afterImportActions = {
229
230
  // console.log(
230
231
  // `Processing field ${fieldName} in collection ${collId} for document ${docId} in database ${dbId} in bucket ${bucketId} with path ${filePath} and name ${fileName}...`
231
232
  // );
233
+ if (filePath.length === 0 || fileName.length === 0) {
234
+ console.error(`File path or name is empty for field ${fieldName} in collection ${collId}, skipping...`);
235
+ return;
236
+ }
232
237
  let isArray = false;
233
238
  if (!attribute) {
234
239
  console.log(`Field ${fieldName} not found in collection ${collId}, weird, skipping...`);
@@ -74,6 +74,15 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
74
74
  error?: string | undefined;
75
75
  xdefault?: number | null | undefined;
76
76
  min?: number | undefined;
77
+ /**
78
+ * Prepares the data for creating user collection documents.
79
+ * This involves loading the data, transforming it according to the import definition,
80
+ * and handling the creation of new unique IDs for each item.
81
+ *
82
+ * @param db - The database configuration.
83
+ * @param collection - The collection configuration.
84
+ * @param importDef - The import definition containing the attribute mappings and other relevant info.
85
+ */
77
86
  max?: number | undefined;
78
87
  }, {
79
88
  key: string;
@@ -666,16 +675,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
666
675
  converters?: string[] | undefined;
667
676
  validationActions?: {
668
677
  params: string[];
669
- action: string; /**
670
- * Generates attribute mappings with post-import actions based on the provided attribute mappings.
671
- * This method checks each mapping for a fileData attribute and adds a post-import action to create a file
672
- * and update the field with the file's ID if necessary.
673
- *
674
- * @param attributeMappings - The attribute mappings from the import definition.
675
- * @param context - The context object containing information about the database, collection, and document.
676
- * @param item - The item being imported, used for resolving template paths in fileData mappings.
677
- * @returns The attribute mappings updated with any necessary post-import actions.
678
- */
678
+ action: string;
679
679
  }[] | undefined;
680
680
  postImportActions?: {
681
681
  params: (string | Record<string, any>)[];
@@ -1295,16 +1295,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
1295
1295
  converters?: string[] | undefined;
1296
1296
  validationActions?: {
1297
1297
  params: string[];
1298
- action: string; /**
1299
- * Generates attribute mappings with post-import actions based on the provided attribute mappings.
1300
- * This method checks each mapping for a fileData attribute and adds a post-import action to create a file
1301
- * and update the field with the file's ID if necessary.
1302
- *
1303
- * @param attributeMappings - The attribute mappings from the import definition.
1304
- * @param context - The context object containing information about the database, collection, and document.
1305
- * @param item - The item being imported, used for resolving template paths in fileData mappings.
1306
- * @returns The attribute mappings updated with any necessary post-import actions.
1307
- */
1298
+ action: string;
1308
1299
  }[] | undefined;
1309
1300
  postImportActions?: {
1310
1301
  params: (string | Record<string, any>)[];
@@ -1708,16 +1699,7 @@ export declare class DataLoader {
1708
1699
  converters?: string[] | undefined;
1709
1700
  validationActions?: {
1710
1701
  params: string[];
1711
- action: string; /**
1712
- * Generates attribute mappings with post-import actions based on the provided attribute mappings.
1713
- * This method checks each mapping for a fileData attribute and adds a post-import action to create a file
1714
- * and update the field with the file's ID if necessary.
1715
- *
1716
- * @param attributeMappings - The attribute mappings from the import definition.
1717
- * @param context - The context object containing information about the database, collection, and document.
1718
- * @param item - The item being imported, used for resolving template paths in fileData mappings.
1719
- * @returns The attribute mappings updated with any necessary post-import actions.
1720
- */
1702
+ action: string;
1721
1703
  }[] | undefined;
1722
1704
  postImportActions?: {
1723
1705
  params: (string | Record<string, any>)[];
@@ -453,9 +453,9 @@ export class DataLoader {
453
453
  continue;
454
454
  // Handle cases where sourceValue is an array
455
455
  const sourceValues = Array.isArray(sourceValue)
456
- ? sourceValue
457
- : [sourceValue];
458
- const newData = [];
456
+ ? sourceValue.map((sourceValue) => `${sourceValue}`)
457
+ : [`${sourceValue}`];
458
+ let newData = [];
459
459
  for (const valueToMatch of sourceValues) {
460
460
  // Find matching data in the target collection
461
461
  const foundData = targetCollectionData.data.filter(({ context, finalData }) => {
@@ -468,12 +468,14 @@ export class DataLoader {
468
468
  });
469
469
  if (foundData.length) {
470
470
  newData.push(...foundData.map((data) => {
471
- return this.getValueFromData(data.finalData, data.context, idMapping.targetField);
471
+ const newValue = this.getValueFromData(data.finalData, data.context, idMapping.targetField);
472
+ return newValue;
472
473
  }));
473
474
  }
474
475
  else {
475
476
  logger.info(`No data found for collection: ${targetCollectionKey} with value: ${valueToMatch} for field: ${fieldToSetKey} -- idMapping: ${JSON.stringify(idMapping, null, 2)}`);
476
477
  }
478
+ continue;
477
479
  }
478
480
  const getCurrentDataFiltered = (currentData) => {
479
481
  if (Array.isArray(currentData.finalData[fieldToSetKey])) {
@@ -511,7 +513,8 @@ export class DataLoader {
511
513
  ...newData,
512
514
  ].filter((value) => value !== null &&
513
515
  value !== undefined &&
514
- value !== "")),
516
+ value !== "" &&
517
+ !sourceValues.includes(`${value}`))),
515
518
  ];
516
519
  }
517
520
  }
@@ -528,7 +531,8 @@ export class DataLoader {
528
531
  collectionData.data[i].finalData[fieldToSetKey] = [
529
532
  ...new Set([currentDataFiltered, ...newData].filter((value) => value !== null &&
530
533
  value !== undefined &&
531
- value !== "")),
534
+ value !== "" &&
535
+ !sourceValues.includes(`${value}`))),
532
536
  ].slice(0, 1)[0];
533
537
  }
534
538
  else if (!Array.isArray(newData) &&
@@ -1,4 +1,4 @@
1
- import { Client, Databases, Query } from "node-appwrite";
1
+ import { Client, Databases, IndexType, Query, } from "node-appwrite";
2
2
  import { getAppwriteClient, tryAwaitWithRetry, } from "../utils/helperFunctions.js";
3
3
  import { transferDocumentsBetweenDbsLocalToLocal, transferDocumentsBetweenDbsLocalToRemote, } from "./collections.js";
4
4
  import { createOrUpdateAttribute } from "./attributes.js";
@@ -1,4 +1,4 @@
1
- import { ID, InputFile, Query, } from "node-appwrite";
1
+ import {} from "node-appwrite";
2
2
  import { validationRules, } from "appwrite-utils";
3
3
  import { converterFunctions } from "appwrite-utils";
4
4
  import { convertObjectBySchema } from "./converters.js";
@@ -1,5 +1,5 @@
1
1
  import { indexSchema } from "appwrite-utils";
2
- import { Databases, Query } from "node-appwrite";
2
+ import { Databases, IndexType, Query } from "node-appwrite";
3
3
  import { tryAwaitWithRetry } from "../utils/helperFunctions.js";
4
4
  // import {}
5
5
  export const createOrUpdateIndex = async (dbId, db, collectionId, index) => {
@@ -1,4 +1,5 @@
1
- import { Storage, Databases, Query, InputFile, ID, Permission, } from "node-appwrite";
1
+ import { Storage, Databases, Query, ID, Permission, } from "node-appwrite";
2
+ import { InputFile } from "node-appwrite/file";
2
3
  import {} from "./backup.js";
3
4
  import { splitIntoBatches } from "./migrationHelper.js";
4
5
  import { getAppwriteClient, tryAwaitWithRetry, } from "../utils/helperFunctions.js";
@@ -307,7 +308,9 @@ export const transferStorageLocalToRemote = async (localStorage, endpoint, proje
307
308
  }
308
309
  }
309
310
  for (const file of allFromFiles) {
310
- await tryAwaitWithRetry(async () => await remoteStorage.createFile(toBucketId, file.$id, file, file.$permissions));
311
+ const fileData = await tryAwaitWithRetry(async () => await localStorage.getFileDownload(file.bucketId, file.$id));
312
+ const fileToCreate = InputFile.fromBuffer(Buffer.from(fileData), file.name);
313
+ await tryAwaitWithRetry(async () => await remoteStorage.createFile(toBucketId, file.$id, fileToCreate, file.$permissions));
311
314
  numberOfFiles++;
312
315
  }
313
316
  console.log(`Transferred ${numberOfFiles} files from ${fromBucketId} to ${toBucketId}`);
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "0.0.68",
4
+ "version": "0.0.70",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -33,14 +33,14 @@
33
33
  },
34
34
  "dependencies": {
35
35
  "@types/inquirer": "^9.0.7",
36
- "appwrite-utils": "^0.3.5",
36
+ "appwrite-utils": "^0.3.6",
37
37
  "commander": "^12.0.0",
38
38
  "inquirer": "^9.2.20",
39
39
  "js-yaml": "^4.1.0",
40
40
  "lodash": "^4.17.21",
41
41
  "luxon": "^3.4.4",
42
42
  "nanostores": "^0.10.3",
43
- "node-appwrite": "^12.0.1",
43
+ "node-appwrite": "^13.0.0",
44
44
  "tsx": "^4.9.3",
45
45
  "ulid": "^2.3.0",
46
46
  "winston": "^3.13.0",
@@ -1,12 +1,13 @@
1
1
  import {
2
2
  Databases,
3
3
  Storage,
4
- InputFile,
5
4
  Query,
6
5
  ID,
7
6
  type Models,
8
7
  Client,
8
+ Compression,
9
9
  } from "node-appwrite";
10
+ import { InputFile } from "node-appwrite/file";
10
11
  import path from "path";
11
12
  import fs from "fs";
12
13
  import os from "os";
@@ -385,7 +386,7 @@ export const afterImportActions = {
385
386
  enabled,
386
387
  maxFileSize,
387
388
  allowedExtensions,
388
- compression,
389
+ compression ? Compression.Gzip : undefined,
389
390
  encryption,
390
391
  antivirus
391
392
  )
@@ -402,7 +403,7 @@ export const afterImportActions = {
402
403
  enabled,
403
404
  maxFileSize,
404
405
  allowedExtensions,
405
- compression,
406
+ compression ? Compression.Gzip : undefined,
406
407
  encryption,
407
408
  antivirus
408
409
  )
@@ -433,6 +434,13 @@ export const afterImportActions = {
433
434
  // console.log(
434
435
  // `Processing field ${fieldName} in collection ${collId} for document ${docId} in database ${dbId} in bucket ${bucketId} with path ${filePath} and name ${fileName}...`
435
436
  // );
437
+ if (filePath.length === 0 || fileName.length === 0) {
438
+ console.error(
439
+ `File path or name is empty for field ${fieldName} in collection ${collId}, skipping...`
440
+ );
441
+ return;
442
+ }
443
+
436
444
  let isArray = false;
437
445
  if (!attribute) {
438
446
  console.log(
@@ -567,9 +567,9 @@ export class DataLoader {
567
567
 
568
568
  // Handle cases where sourceValue is an array
569
569
  const sourceValues = Array.isArray(sourceValue)
570
- ? sourceValue
571
- : [sourceValue];
572
- const newData = [];
570
+ ? sourceValue.map((sourceValue) => `${sourceValue}`)
571
+ : [`${sourceValue}`];
572
+ let newData = [];
573
573
 
574
574
  for (const valueToMatch of sourceValues) {
575
575
  // Find matching data in the target collection
@@ -593,11 +593,12 @@ export class DataLoader {
593
593
  if (foundData.length) {
594
594
  newData.push(
595
595
  ...foundData.map((data) => {
596
- return this.getValueFromData(
596
+ const newValue = this.getValueFromData(
597
597
  data.finalData,
598
598
  data.context,
599
599
  idMapping.targetField
600
600
  );
601
+ return newValue;
601
602
  })
602
603
  );
603
604
  } else {
@@ -609,6 +610,7 @@ export class DataLoader {
609
610
  )}`
610
611
  );
611
612
  }
613
+ continue;
612
614
  }
613
615
 
614
616
  const getCurrentDataFiltered = (currentData: any) => {
@@ -660,7 +662,8 @@ export class DataLoader {
660
662
  (value: any) =>
661
663
  value !== null &&
662
664
  value !== undefined &&
663
- value !== ""
665
+ value !== "" &&
666
+ !sourceValues.includes(`${value}`)
664
667
  )
665
668
  ),
666
669
  ];
@@ -680,7 +683,8 @@ export class DataLoader {
680
683
  (value: any) =>
681
684
  value !== null &&
682
685
  value !== undefined &&
683
- value !== ""
686
+ value !== "" &&
687
+ !sourceValues.includes(`${value}`)
684
688
  )
685
689
  ),
686
690
  ].slice(0, 1)[0];
@@ -1,4 +1,10 @@
1
- import { Client, Databases, Query, type Models } from "node-appwrite";
1
+ import {
2
+ Client,
3
+ Databases,
4
+ IndexType,
5
+ Query,
6
+ type Models,
7
+ } from "node-appwrite";
2
8
  import {
3
9
  getAppwriteClient,
4
10
  tryAwaitWithRetry,
@@ -139,7 +145,7 @@ export const transferDatabaseLocalToLocal = async (
139
145
  targetDbId,
140
146
  newCollection.$id,
141
147
  index.key,
142
- index.type,
148
+ index.type as IndexType,
143
149
  index.attributes,
144
150
  index.orders
145
151
  )
@@ -226,7 +232,7 @@ export const transferDatabaseLocalToRemote = async (
226
232
  toDbId,
227
233
  toCollection.$id,
228
234
  index.key,
229
- index.type,
235
+ index.type as IndexType,
230
236
  index.attributes,
231
237
  index.orders
232
238
  )
@@ -1,10 +1,4 @@
1
- import {
2
- ID,
3
- InputFile,
4
- Query,
5
- type Databases,
6
- type Storage,
7
- } from "node-appwrite";
1
+ import { type Databases, type Storage } from "node-appwrite";
8
2
  import type { AppwriteConfig } from "appwrite-utils";
9
3
  import {
10
4
  validationRules,
@@ -1,5 +1,5 @@
1
1
  import { indexSchema, type Index } from "appwrite-utils";
2
- import { Databases, Query, type Models } from "node-appwrite";
2
+ import { Databases, IndexType, Query, type Models } from "node-appwrite";
3
3
  import { tryAwaitWithRetry } from "../utils/helperFunctions.js";
4
4
  // import {}
5
5
 
@@ -19,7 +19,7 @@ export const createOrUpdateIndex = async (
19
19
  dbId,
20
20
  collectionId,
21
21
  index.key,
22
- index.type,
22
+ index.type as IndexType,
23
23
  index.attributes,
24
24
  index.orders
25
25
  );
@@ -2,11 +2,11 @@ import {
2
2
  Storage,
3
3
  Databases,
4
4
  Query,
5
- InputFile,
6
5
  type Models,
7
6
  ID,
8
7
  Permission,
9
8
  } from "node-appwrite";
9
+ import { InputFile } from "node-appwrite/file";
10
10
  import { type OperationCreate, type BackupCreate } from "./backup.js";
11
11
  import { splitIntoBatches } from "./migrationHelper.js";
12
12
  import type { AppwriteConfig } from "appwrite-utils";
@@ -490,12 +490,16 @@ export const transferStorageLocalToRemote = async (
490
490
  }
491
491
 
492
492
  for (const file of allFromFiles) {
493
+ const fileData = await tryAwaitWithRetry(
494
+ async () => await localStorage.getFileDownload(file.bucketId, file.$id)
495
+ );
496
+ const fileToCreate = InputFile.fromBuffer(Buffer.from(fileData), file.name);
493
497
  await tryAwaitWithRetry(
494
498
  async () =>
495
499
  await remoteStorage.createFile(
496
500
  toBucketId,
497
501
  file.$id,
498
- file,
502
+ fileToCreate,
499
503
  file.$permissions
500
504
  )
501
505
  );