appwrite-utils-cli 0.0.70 → 0.0.72
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +2 -0
- package/dist/migrations/converters.d.ts +2 -19
- package/dist/migrations/dataLoader.d.ts +36 -12
- package/dist/migrations/storage.js +24 -2
- package/package.json +9 -9
- package/src/migrations/storage.ts +24 -2
package/README.md
CHANGED
@@ -132,6 +132,8 @@ This setup ensures that developers have robust tools at their fingertips to mana
|
|
132
132
|
|
133
133
|
### Changelog
|
134
134
|
|
135
|
+
- 0.0.72: Remove `ulid` for `ulidx`, fixing compatibility issues
|
136
|
+
- 0.0.71: Slight change to file download logic after errors
|
135
137
|
- 0.0.70: Bump to `node-appwrite` version
|
136
138
|
- 0.0.69: Fixed single ID not getting replaced due to the below change =D also, `nice`
|
137
139
|
- 0.0.68: Fixed the occasional case where, when mapping ID's from old data to new, there would be an array of ID's to match against. `idMappings` now supports arrays.
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import { type AttributeMappings } from "appwrite-utils";
|
1
2
|
/**
|
2
3
|
* Deeply converts all properties of an object (or array) to strings.
|
3
4
|
* @param data The input data to convert.
|
@@ -28,25 +29,7 @@ export declare const convertObjectBySchema: (obj: Record<string, any>, schema: R
|
|
28
29
|
* @param attributeMappings The attributeMappings defining how keys in the object should be converted.
|
29
30
|
* @returns The converted object with keys renamed according to attributeMappings.
|
30
31
|
*/
|
31
|
-
export declare const convertObjectByAttributeMappings: (obj: Record<string, any>, attributeMappings:
|
32
|
-
targetKey: string;
|
33
|
-
oldKey?: string | undefined;
|
34
|
-
oldKeys?: string[] | undefined;
|
35
|
-
valueToSet?: any;
|
36
|
-
fileData?: {
|
37
|
-
path: string;
|
38
|
-
name: string;
|
39
|
-
} | undefined;
|
40
|
-
converters?: string[] | undefined;
|
41
|
-
validationActions?: {
|
42
|
-
params: string[];
|
43
|
-
action: string;
|
44
|
-
}[] | undefined;
|
45
|
-
postImportActions?: {
|
46
|
-
params: (string | Record<string, any>)[];
|
47
|
-
action: string;
|
48
|
-
}[] | undefined;
|
49
|
-
}[]) => Record<string, any>;
|
32
|
+
export declare const convertObjectByAttributeMappings: (obj: Record<string, any>, attributeMappings: AttributeMappings) => Record<string, any>;
|
50
33
|
/**
|
51
34
|
* Ensures data conversion without mutating the original input.
|
52
35
|
* @param data The data to convert.
|
@@ -74,15 +74,6 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
74
74
|
error?: string | undefined;
|
75
75
|
xdefault?: number | null | undefined;
|
76
76
|
min?: number | undefined;
|
77
|
-
/**
|
78
|
-
* Prepares the data for creating user collection documents.
|
79
|
-
* This involves loading the data, transforming it according to the import definition,
|
80
|
-
* and handling the creation of new unique IDs for each item.
|
81
|
-
*
|
82
|
-
* @param db - The database configuration.
|
83
|
-
* @param collection - The collection configuration.
|
84
|
-
* @param importDef - The import definition containing the attribute mappings and other relevant info.
|
85
|
-
*/
|
86
77
|
max?: number | undefined;
|
87
78
|
}, {
|
88
79
|
key: string;
|
@@ -683,7 +674,18 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
683
674
|
}[] | undefined;
|
684
675
|
}[];
|
685
676
|
type?: "create" | "update" | undefined;
|
686
|
-
basePath
|
677
|
+
basePath
|
678
|
+
/**
|
679
|
+
* Generates attribute mappings with post-import actions based on the provided attribute mappings.
|
680
|
+
* This method checks each mapping for a fileData attribute and adds a post-import action to create a file
|
681
|
+
* and update the field with the file's ID if necessary.
|
682
|
+
*
|
683
|
+
* @param attributeMappings - The attribute mappings from the import definition.
|
684
|
+
* @param context - The context object containing information about the database, collection, and document.
|
685
|
+
* @param item - The item being imported, used for resolving template paths in fileData mappings.
|
686
|
+
* @returns The attribute mappings updated with any necessary post-import actions.
|
687
|
+
*/
|
688
|
+
?: string | undefined;
|
687
689
|
idMappings?: {
|
688
690
|
sourceField: string;
|
689
691
|
targetField: string;
|
@@ -1303,7 +1305,18 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1303
1305
|
}[] | undefined;
|
1304
1306
|
}[];
|
1305
1307
|
type?: "create" | "update" | undefined;
|
1306
|
-
basePath
|
1308
|
+
basePath
|
1309
|
+
/**
|
1310
|
+
* Generates attribute mappings with post-import actions based on the provided attribute mappings.
|
1311
|
+
* This method checks each mapping for a fileData attribute and adds a post-import action to create a file
|
1312
|
+
* and update the field with the file's ID if necessary.
|
1313
|
+
*
|
1314
|
+
* @param attributeMappings - The attribute mappings from the import definition.
|
1315
|
+
* @param context - The context object containing information about the database, collection, and document.
|
1316
|
+
* @param item - The item being imported, used for resolving template paths in fileData mappings.
|
1317
|
+
* @returns The attribute mappings updated with any necessary post-import actions.
|
1318
|
+
*/
|
1319
|
+
?: string | undefined;
|
1307
1320
|
idMappings?: {
|
1308
1321
|
sourceField: string;
|
1309
1322
|
targetField: string;
|
@@ -1707,7 +1720,18 @@ export declare class DataLoader {
|
|
1707
1720
|
}[] | undefined;
|
1708
1721
|
}[];
|
1709
1722
|
type?: "create" | "update" | undefined;
|
1710
|
-
basePath
|
1723
|
+
basePath
|
1724
|
+
/**
|
1725
|
+
* Generates attribute mappings with post-import actions based on the provided attribute mappings.
|
1726
|
+
* This method checks each mapping for a fileData attribute and adds a post-import action to create a file
|
1727
|
+
* and update the field with the file's ID if necessary.
|
1728
|
+
*
|
1729
|
+
* @param attributeMappings - The attribute mappings from the import definition.
|
1730
|
+
* @param context - The context object containing information about the database, collection, and document.
|
1731
|
+
* @param item - The item being imported, used for resolving template paths in fileData mappings.
|
1732
|
+
* @returns The attribute mappings updated with any necessary post-import actions.
|
1733
|
+
*/
|
1734
|
+
?: string | undefined;
|
1711
1735
|
idMappings?: {
|
1712
1736
|
sourceField: string;
|
1713
1737
|
targetField: string;
|
@@ -252,9 +252,27 @@ export const transferStorageLocalToLocal = async (storage, fromBucketId, toBucke
|
|
252
252
|
let fromFiles = await tryAwaitWithRetry(async () => await storage.listFiles(fromBucketId, [Query.limit(100)]));
|
253
253
|
const allFromFiles = fromFiles.files;
|
254
254
|
let numberOfFiles = 0;
|
255
|
+
const downloadFileWithRetry = async (bucketId, fileId) => {
|
256
|
+
let attempts = 3;
|
257
|
+
while (attempts > 0) {
|
258
|
+
try {
|
259
|
+
return await storage.getFileDownload(bucketId, fileId);
|
260
|
+
}
|
261
|
+
catch (error) {
|
262
|
+
console.error(`Error downloading file ${fileId}: ${error}`);
|
263
|
+
attempts--;
|
264
|
+
if (attempts === 0)
|
265
|
+
throw error;
|
266
|
+
}
|
267
|
+
}
|
268
|
+
};
|
255
269
|
if (fromFiles.files.length < 100) {
|
256
270
|
for (const file of allFromFiles) {
|
257
|
-
const fileData = await tryAwaitWithRetry(async () => await
|
271
|
+
const fileData = await tryAwaitWithRetry(async () => await downloadFileWithRetry(file.bucketId, file.$id));
|
272
|
+
if (!fileData) {
|
273
|
+
console.error(`Error downloading file ${file.$id}`);
|
274
|
+
continue;
|
275
|
+
}
|
258
276
|
const fileToCreate = InputFile.fromBuffer(Buffer.from(fileData), file.name);
|
259
277
|
console.log(`Creating file: ${file.name}`);
|
260
278
|
tryAwaitWithRetry(async () => await storage.createFile(toBucketId, file.$id, fileToCreate, file.$permissions));
|
@@ -277,7 +295,11 @@ export const transferStorageLocalToLocal = async (storage, fromBucketId, toBucke
|
|
277
295
|
}
|
278
296
|
}
|
279
297
|
for (const file of allFromFiles) {
|
280
|
-
const fileData = await tryAwaitWithRetry(async () => await
|
298
|
+
const fileData = await tryAwaitWithRetry(async () => await downloadFileWithRetry(file.bucketId, file.$id));
|
299
|
+
if (!fileData) {
|
300
|
+
console.error(`Error downloading file ${file.$id}`);
|
301
|
+
continue;
|
302
|
+
}
|
281
303
|
const fileToCreate = InputFile.fromBuffer(Buffer.from(fileData), file.name);
|
282
304
|
await tryAwaitWithRetry(async () => await storage.createFile(toBucketId, file.$id, fileToCreate, file.$permissions));
|
283
305
|
numberOfFiles++;
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "appwrite-utils-cli",
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
4
|
-
"version": "0.0.
|
4
|
+
"version": "0.0.72",
|
5
5
|
"main": "src/main.ts",
|
6
6
|
"type": "module",
|
7
7
|
"repository": {
|
@@ -33,23 +33,23 @@
|
|
33
33
|
},
|
34
34
|
"dependencies": {
|
35
35
|
"@types/inquirer": "^9.0.7",
|
36
|
-
"appwrite-utils": "
|
37
|
-
"commander": "^12.
|
38
|
-
"inquirer": "^9.
|
36
|
+
"appwrite-utils": "workspace:*",
|
37
|
+
"commander": "^12.1.0",
|
38
|
+
"inquirer": "^9.3.1",
|
39
39
|
"js-yaml": "^4.1.0",
|
40
40
|
"lodash": "^4.17.21",
|
41
41
|
"luxon": "^3.4.4",
|
42
42
|
"nanostores": "^0.10.3",
|
43
43
|
"node-appwrite": "^13.0.0",
|
44
|
-
"tsx": "^4.
|
45
|
-
"
|
44
|
+
"tsx": "^4.15.8",
|
45
|
+
"ulidx": "^2.3.0",
|
46
46
|
"winston": "^3.13.0",
|
47
|
-
"zod": "^3.
|
47
|
+
"zod": "^3.23.8"
|
48
48
|
},
|
49
49
|
"devDependencies": {
|
50
50
|
"@types/js-yaml": "^4.0.9",
|
51
|
-
"@types/lodash": "^4.17.
|
51
|
+
"@types/lodash": "^4.17.6",
|
52
52
|
"@types/luxon": "^3.4.2",
|
53
|
-
"typescript": "^5.
|
53
|
+
"typescript": "^5.5.2"
|
54
54
|
}
|
55
55
|
}
|
@@ -388,11 +388,29 @@ export const transferStorageLocalToLocal = async (
|
|
388
388
|
);
|
389
389
|
const allFromFiles = fromFiles.files;
|
390
390
|
let numberOfFiles = 0;
|
391
|
+
|
392
|
+
const downloadFileWithRetry = async (bucketId: string, fileId: string) => {
|
393
|
+
let attempts = 3;
|
394
|
+
while (attempts > 0) {
|
395
|
+
try {
|
396
|
+
return await storage.getFileDownload(bucketId, fileId);
|
397
|
+
} catch (error) {
|
398
|
+
console.error(`Error downloading file ${fileId}: ${error}`);
|
399
|
+
attempts--;
|
400
|
+
if (attempts === 0) throw error;
|
401
|
+
}
|
402
|
+
}
|
403
|
+
};
|
404
|
+
|
391
405
|
if (fromFiles.files.length < 100) {
|
392
406
|
for (const file of allFromFiles) {
|
393
407
|
const fileData = await tryAwaitWithRetry(
|
394
|
-
async () => await
|
408
|
+
async () => await downloadFileWithRetry(file.bucketId, file.$id)
|
395
409
|
);
|
410
|
+
if (!fileData) {
|
411
|
+
console.error(`Error downloading file ${file.$id}`);
|
412
|
+
continue;
|
413
|
+
}
|
396
414
|
const fileToCreate = InputFile.fromBuffer(
|
397
415
|
Buffer.from(fileData),
|
398
416
|
file.name
|
@@ -428,8 +446,12 @@ export const transferStorageLocalToLocal = async (
|
|
428
446
|
}
|
429
447
|
for (const file of allFromFiles) {
|
430
448
|
const fileData = await tryAwaitWithRetry(
|
431
|
-
async () => await
|
449
|
+
async () => await downloadFileWithRetry(file.bucketId, file.$id)
|
432
450
|
);
|
451
|
+
if (!fileData) {
|
452
|
+
console.error(`Error downloading file ${file.$id}`);
|
453
|
+
continue;
|
454
|
+
}
|
433
455
|
const fileToCreate = InputFile.fromBuffer(
|
434
456
|
Buffer.from(fileData),
|
435
457
|
file.name
|