appwrite-utils-cli 0.0.262 → 0.0.263
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -5
- package/dist/main.js +6 -0
- package/dist/migrations/afterImportActions.d.ts +3 -1
- package/dist/migrations/afterImportActions.js +2 -2
- package/dist/migrations/appwriteToX.d.ts +109 -0
- package/dist/migrations/appwriteToX.js +88 -0
- package/dist/migrations/backup.d.ts +4 -4
- package/dist/migrations/dataLoader.d.ts +17 -24
- package/dist/migrations/dataLoader.js +26 -32
- package/dist/migrations/databases.d.ts +2 -0
- package/dist/migrations/databases.js +23 -0
- package/dist/migrations/schema.d.ts +345 -32
- package/dist/migrations/schema.js +34 -14
- package/dist/migrations/schemaStrings.d.ts +1 -0
- package/dist/migrations/schemaStrings.js +10 -0
- package/dist/utilsController.d.ts +1 -0
- package/dist/utilsController.js +7 -0
- package/package.json +1 -1
- package/src/main.ts +6 -0
- package/src/migrations/afterImportActions.ts +2 -2
- package/src/migrations/appwriteToX.ts +122 -0
- package/src/migrations/dataLoader.ts +35 -43
- package/src/migrations/databases.ts +25 -0
- package/src/migrations/schema.ts +40 -14
- package/src/migrations/schemaStrings.ts +11 -0
- package/src/utilsController.ts +9 -0
package/README.md
CHANGED
@@ -15,12 +15,20 @@
|
|
15
15
|
|
16
16
|
## Installation
|
17
17
|
|
18
|
-
To use `appwrite-utils-cli`, you
|
18
|
+
To use `appwrite-utils-cli`, you can install it globally via npm to make it accessible from anywhere in your command line:
|
19
19
|
|
20
20
|
```bash
|
21
21
|
npm install -g appwrite-utils-cli
|
22
22
|
```
|
23
23
|
|
24
|
+
However, due to the nature of the speed at which I am developing this project, I would recommend the following command:
|
25
|
+
|
26
|
+
```bash
|
27
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate -- --arg1 --arg2 --arg3
|
28
|
+
```
|
29
|
+
|
30
|
+
**DO NOT INSTALL THIS LOCALLY INTO YOUR PROJECT, IT IS MEANT TO BE USED AS A COMMAND LINE TOOL ONLY**
|
31
|
+
|
24
32
|
## Usage
|
25
33
|
|
26
34
|
After installation, you can access the tool directly from your command line using the provided commands. Here's how you can use the different functionalities:
|
@@ -30,7 +38,7 @@ After installation, you can access the tool directly from your command line usin
|
|
30
38
|
Set up your Appwrite project with necessary configurations:
|
31
39
|
|
32
40
|
```bash
|
33
|
-
appwrite-setup
|
41
|
+
npx --package=appwrite-utils-cli@latest appwrite-setup
|
34
42
|
```
|
35
43
|
|
36
44
|
To generate an example configuration file:
|
@@ -39,12 +47,20 @@ To generate an example configuration file:
|
|
39
47
|
appwrite-setup --example
|
40
48
|
```
|
41
49
|
|
50
|
+
To synchronize your `appwriteConfig.yaml` with your Appwrite Database, first you must run the setup command and enter your Appwrite instances details in the `projectId`, `endpoint`, and `apiKey`, then run the following
|
51
|
+
|
52
|
+
```bash
|
53
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate -- --sync
|
54
|
+
```
|
55
|
+
|
56
|
+
This will initialize your config and generate schemas for your database using ZOD to `src/appwrite/schemas`
|
57
|
+
|
42
58
|
### Running Migrations and Tasks
|
43
59
|
|
44
60
|
Run migration and management tasks with specific flags according to your needs:
|
45
61
|
|
46
62
|
```bash
|
47
|
-
appwrite-migrate --args
|
63
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate --args
|
48
64
|
```
|
49
65
|
|
50
66
|
Replace `--args` with the appropriate options:
|
@@ -58,17 +74,19 @@ Replace `--args` with the appropriate options:
|
|
58
74
|
- `--import`: Import data into your databases.
|
59
75
|
- `--backup`: Perform a backup of your databases.
|
60
76
|
- `--wipe-users` or `--wipeUsers`: Wipe all user data.
|
77
|
+
- `--write-data` or `--writeData`: Write converted imported data to file
|
78
|
+
- `--sync`: Synchronize your project's config and generate schema for your database
|
61
79
|
|
62
80
|
For example, to run migrations in a development environment and import data:
|
63
81
|
|
64
82
|
```bash
|
65
|
-
appwrite-migrate --dev --import
|
83
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate --dev --import
|
66
84
|
```
|
67
85
|
|
68
86
|
To initialize your project, generate schemas, but not import data:
|
69
87
|
|
70
88
|
```bash
|
71
|
-
appwrite-migrate --init
|
89
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate --init
|
72
90
|
```
|
73
91
|
|
74
92
|
This setup ensures that developers have robust tools at their fingertips to manage complex Appwrite projects effectively from the command line. I also have added logging automatically for information and errors as the console can be hard to keep up with.
|
@@ -81,6 +99,7 @@ This setup ensures that developers have robust tools at their fingertips to mana
|
|
81
99
|
|
82
100
|
### Changelog
|
83
101
|
|
102
|
+
- 0.0.254: Added `--sync` to synchronize your Appwrite instance with your local `appwriteConfig.yaml` and generate schemas
|
84
103
|
- 0.0.253: Added `--writeData` (or `--write-data`) to command to write the output of the import data to a file called dataLoaderOutput in your root dir
|
85
104
|
- 0.0.23: Added batching to user deletion
|
86
105
|
- 0.0.22: Converted all import processes except `postImportActions` and Relationship Resolution to the local data import, so it should be much faster.
|
package/dist/main.js
CHANGED
@@ -4,6 +4,7 @@ const args = process.argv.slice(2);
|
|
4
4
|
async function main() {
|
5
5
|
const controller = new UtilsController();
|
6
6
|
await controller.init();
|
7
|
+
let sync = false;
|
7
8
|
let runProd = false;
|
8
9
|
let runStaging = false;
|
9
10
|
let runDev = false;
|
@@ -14,6 +15,9 @@ async function main() {
|
|
14
15
|
let importData = false;
|
15
16
|
let wipeDocuments = false;
|
16
17
|
let shouldWriteFile = false;
|
18
|
+
if (args.includes("--sync")) {
|
19
|
+
sync = true;
|
20
|
+
}
|
17
21
|
if (args.includes("--prod")) {
|
18
22
|
runProd = true;
|
19
23
|
}
|
@@ -46,6 +50,7 @@ async function main() {
|
|
46
50
|
}
|
47
51
|
if (args.includes("--init")) {
|
48
52
|
await controller.run({
|
53
|
+
sync: sync,
|
49
54
|
runProd: runProd,
|
50
55
|
runStaging: runStaging,
|
51
56
|
runDev: runDev,
|
@@ -62,6 +67,7 @@ async function main() {
|
|
62
67
|
}
|
63
68
|
else {
|
64
69
|
await controller.run({
|
70
|
+
sync: sync,
|
65
71
|
runProd: runProd,
|
66
72
|
runStaging: runStaging,
|
67
73
|
runDev: runDev,
|
@@ -1,5 +1,7 @@
|
|
1
|
-
import { type Models } from "node-appwrite";
|
1
|
+
import { Databases, Storage, type Models } from "node-appwrite";
|
2
2
|
import type { AppwriteConfig } from "./schema.js";
|
3
|
+
export declare const getDatabaseFromConfig: (config: AppwriteConfig) => Databases;
|
4
|
+
export declare const getStorageFromConfig: (config: AppwriteConfig) => Storage;
|
3
5
|
export interface AfterImportActions {
|
4
6
|
[key: string]: (config: AppwriteConfig, ...args: any[]) => Promise<any>;
|
5
7
|
}
|
@@ -3,7 +3,7 @@ import path from "path";
|
|
3
3
|
import fs from "fs";
|
4
4
|
import os from "os";
|
5
5
|
import { logger } from "./logging.js";
|
6
|
-
const getDatabaseFromConfig = (config) => {
|
6
|
+
export const getDatabaseFromConfig = (config) => {
|
7
7
|
if (!config.appwriteClient) {
|
8
8
|
config.appwriteClient = new Client()
|
9
9
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -12,7 +12,7 @@ const getDatabaseFromConfig = (config) => {
|
|
12
12
|
}
|
13
13
|
return new Databases(config.appwriteClient);
|
14
14
|
};
|
15
|
-
const getStorageFromConfig = (config) => {
|
15
|
+
export const getStorageFromConfig = (config) => {
|
16
16
|
if (!config.appwriteClient) {
|
17
17
|
config.appwriteClient = new Client()
|
18
18
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -0,0 +1,109 @@
|
|
1
|
+
import { type Models } from "node-appwrite";
|
2
|
+
import { type AppwriteConfig } from "./schema.js";
|
3
|
+
export declare class AppwriteToX {
|
4
|
+
config: AppwriteConfig;
|
5
|
+
updatedConfig: AppwriteConfig;
|
6
|
+
collToAttributeMap: Map<string, ({
|
7
|
+
key: string;
|
8
|
+
type: "string";
|
9
|
+
error: string;
|
10
|
+
required: boolean;
|
11
|
+
array: boolean;
|
12
|
+
size: number;
|
13
|
+
xdefault?: string | null | undefined;
|
14
|
+
encrypted?: boolean | undefined;
|
15
|
+
} | {
|
16
|
+
key: string;
|
17
|
+
type: "integer";
|
18
|
+
error: string;
|
19
|
+
required: boolean;
|
20
|
+
array: boolean;
|
21
|
+
min?: number | undefined;
|
22
|
+
max?: number | undefined;
|
23
|
+
xdefault?: number | null | undefined;
|
24
|
+
} | {
|
25
|
+
key: string;
|
26
|
+
type: "float";
|
27
|
+
error: string;
|
28
|
+
required: boolean;
|
29
|
+
array: boolean;
|
30
|
+
min?: number | undefined;
|
31
|
+
max?: number | undefined;
|
32
|
+
xdefault?: number | null | undefined;
|
33
|
+
} | {
|
34
|
+
key: string;
|
35
|
+
type: "boolean";
|
36
|
+
error: string;
|
37
|
+
required: boolean;
|
38
|
+
array: boolean;
|
39
|
+
xdefault?: boolean | null | undefined;
|
40
|
+
} | {
|
41
|
+
key: string;
|
42
|
+
type: "datetime";
|
43
|
+
error: string;
|
44
|
+
required: boolean;
|
45
|
+
array: boolean;
|
46
|
+
xdefault?: string | null | undefined;
|
47
|
+
} | {
|
48
|
+
key: string;
|
49
|
+
type: "email";
|
50
|
+
error: string;
|
51
|
+
required: boolean;
|
52
|
+
array: boolean;
|
53
|
+
xdefault?: string | null | undefined;
|
54
|
+
} | {
|
55
|
+
key: string;
|
56
|
+
type: "ip";
|
57
|
+
error: string;
|
58
|
+
required: boolean;
|
59
|
+
array: boolean;
|
60
|
+
xdefault?: string | null | undefined;
|
61
|
+
} | {
|
62
|
+
key: string;
|
63
|
+
type: "url";
|
64
|
+
error: string;
|
65
|
+
required: boolean;
|
66
|
+
array: boolean;
|
67
|
+
xdefault?: string | null | undefined;
|
68
|
+
} | {
|
69
|
+
key: string;
|
70
|
+
type: "enum";
|
71
|
+
error: string;
|
72
|
+
required: boolean;
|
73
|
+
array: boolean;
|
74
|
+
elements: string[];
|
75
|
+
xdefault?: string | null | undefined;
|
76
|
+
} | {
|
77
|
+
key: string;
|
78
|
+
type: "relationship";
|
79
|
+
error: string;
|
80
|
+
required: boolean;
|
81
|
+
relatedCollection: string;
|
82
|
+
relationType: "oneToMany" | "manyToOne" | "oneToOne" | "manyToMany";
|
83
|
+
twoWay: boolean;
|
84
|
+
twoWayKey: string;
|
85
|
+
onDelete: "setNull" | "cascade" | "restrict";
|
86
|
+
side: "parent" | "child";
|
87
|
+
array?: boolean | undefined;
|
88
|
+
importMapping?: {
|
89
|
+
originalIdField: string;
|
90
|
+
targetField?: string | undefined;
|
91
|
+
} | undefined;
|
92
|
+
})[]>;
|
93
|
+
appwriteFolderPath: string;
|
94
|
+
constructor(config: AppwriteConfig, appwriteFolderPath: string);
|
95
|
+
parsePermissionString: (permissionString: string) => {
|
96
|
+
permission: string;
|
97
|
+
target: string;
|
98
|
+
};
|
99
|
+
parsePermissionsArray: (permissions: string[]) => ({
|
100
|
+
permission: string;
|
101
|
+
target: string;
|
102
|
+
} | {
|
103
|
+
permission: string;
|
104
|
+
target: string;
|
105
|
+
})[];
|
106
|
+
updateCollectionConfigAttributes: (collection: Models.Collection) => void;
|
107
|
+
appwriteSync(config: AppwriteConfig): Promise<void>;
|
108
|
+
toSchemas(): Promise<void>;
|
109
|
+
}
|
@@ -0,0 +1,88 @@
|
|
1
|
+
import { SchemaGenerator } from "./schemaStrings.js";
|
2
|
+
import { Databases, Query } from "node-appwrite";
|
3
|
+
import { fetchAllCollections } from "./collections.js";
|
4
|
+
import { fetchAllDatabases } from "./databases.js";
|
5
|
+
import { collectionSchema, attributeSchema, AppwriteConfigSchema, permissionsSchema, attributesSchema, indexesSchema, } from "./schema.js";
|
6
|
+
import { getDatabaseFromConfig } from "./afterImportActions.js";
|
7
|
+
export class AppwriteToX {
|
8
|
+
config;
|
9
|
+
updatedConfig;
|
10
|
+
collToAttributeMap = new Map();
|
11
|
+
appwriteFolderPath;
|
12
|
+
constructor(config, appwriteFolderPath) {
|
13
|
+
this.config = config;
|
14
|
+
this.updatedConfig = config;
|
15
|
+
this.appwriteFolderPath = appwriteFolderPath;
|
16
|
+
}
|
17
|
+
// Function to parse a single permission string
|
18
|
+
parsePermissionString = (permissionString) => {
|
19
|
+
const match = permissionString.match(/^(\w+)\('([^']+)'\)$/);
|
20
|
+
if (!match) {
|
21
|
+
throw new Error(`Invalid permission format: ${permissionString}`);
|
22
|
+
}
|
23
|
+
return {
|
24
|
+
permission: match[1],
|
25
|
+
target: match[2],
|
26
|
+
};
|
27
|
+
};
|
28
|
+
// Function to parse an array of permission strings
|
29
|
+
parsePermissionsArray = (permissions) => {
|
30
|
+
const parsedPermissions = permissionsSchema.parse(permissions);
|
31
|
+
// Validate the parsed permissions using Zod
|
32
|
+
return parsedPermissions ?? [];
|
33
|
+
};
|
34
|
+
updateCollectionConfigAttributes = (collection) => {
|
35
|
+
for (const attribute of collection.attributes) {
|
36
|
+
const attributeMap = this.collToAttributeMap.get(collection.name);
|
37
|
+
const attributeParsed = attributeSchema.parse(attribute);
|
38
|
+
this.collToAttributeMap
|
39
|
+
.get(collection.name)
|
40
|
+
?.push(attributeParsed);
|
41
|
+
}
|
42
|
+
};
|
43
|
+
async appwriteSync(config) {
|
44
|
+
const db = getDatabaseFromConfig(config);
|
45
|
+
const databases = await fetchAllDatabases(db);
|
46
|
+
let updatedConfig = { ...config };
|
47
|
+
// Loop through each database
|
48
|
+
for (const database of databases) {
|
49
|
+
const collections = await fetchAllCollections(database.$id, db);
|
50
|
+
// Loop through each collection in the current database
|
51
|
+
for (const collection of collections) {
|
52
|
+
const existingCollectionIndex = updatedConfig.collections.findIndex((c) => c.name === collection.name);
|
53
|
+
// Parse the collection permissions and attributes
|
54
|
+
const collPermissions = this.parsePermissionsArray(collection.$permissions);
|
55
|
+
const collAttributes = attributesSchema.parse(collection.attributes);
|
56
|
+
this.collToAttributeMap.set(collection.name, collAttributes);
|
57
|
+
const collIndexes = indexesSchema.parse(collection.indexes);
|
58
|
+
// Prepare the collection object to be added or updated
|
59
|
+
const collToPush = collectionSchema.parse({
|
60
|
+
name: collection.name,
|
61
|
+
enabled: collection.enabled,
|
62
|
+
documentSecurity: collection.documentSecurity,
|
63
|
+
$createdAt: collection.$createdAt,
|
64
|
+
$updatedAt: collection.$updatedAt,
|
65
|
+
$permissions: collPermissions.length > 0 ? collPermissions : undefined,
|
66
|
+
indexes: collIndexes.length > 0 ? collIndexes : undefined,
|
67
|
+
attributes: collAttributes.length > 0 ? collAttributes : undefined,
|
68
|
+
});
|
69
|
+
if (existingCollectionIndex !== -1) {
|
70
|
+
// Update existing collection
|
71
|
+
updatedConfig.collections[existingCollectionIndex] = collToPush;
|
72
|
+
}
|
73
|
+
else {
|
74
|
+
// Add new collection
|
75
|
+
updatedConfig.collections.push(collToPush);
|
76
|
+
}
|
77
|
+
}
|
78
|
+
console.log(`Processed ${collections.length} collections in ${database.name}`);
|
79
|
+
}
|
80
|
+
this.updatedConfig = updatedConfig;
|
81
|
+
}
|
82
|
+
async toSchemas() {
|
83
|
+
await this.appwriteSync(this.config);
|
84
|
+
const generator = new SchemaGenerator(this.updatedConfig, this.appwriteFolderPath);
|
85
|
+
generator.updateYamlSchemas();
|
86
|
+
generator.generateSchemas();
|
87
|
+
}
|
88
|
+
}
|
@@ -265,12 +265,12 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
265
265
|
})[];
|
266
266
|
name: string;
|
267
267
|
$id: string;
|
268
|
+
enabled: boolean;
|
269
|
+
documentSecurity: boolean;
|
268
270
|
$permissions: {
|
269
271
|
permission: string;
|
270
272
|
target: string;
|
271
273
|
}[];
|
272
|
-
enabled: boolean;
|
273
|
-
documentSecurity: boolean;
|
274
274
|
indexes: {
|
275
275
|
key: string;
|
276
276
|
type: "key" | "unique" | "fulltext";
|
@@ -494,12 +494,12 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
494
494
|
})[];
|
495
495
|
name: string;
|
496
496
|
$id: string;
|
497
|
+
enabled: boolean;
|
498
|
+
documentSecurity: boolean;
|
497
499
|
$permissions: {
|
498
500
|
permission: string;
|
499
501
|
target: string;
|
500
502
|
}[];
|
501
|
-
enabled: boolean;
|
502
|
-
documentSecurity: boolean;
|
503
503
|
indexes: {
|
504
504
|
key: string;
|
505
505
|
type: "key" | "unique" | "fulltext";
|
@@ -4,7 +4,10 @@ import { z } from "zod";
|
|
4
4
|
import { type Databases } from "node-appwrite";
|
5
5
|
export declare const CollectionImportDataSchema: z.ZodObject<{
|
6
6
|
collection: z.ZodOptional<z.ZodObject<Omit<{
|
7
|
+
name: z.ZodString;
|
7
8
|
$id: z.ZodDefault<z.ZodOptional<z.ZodString>>;
|
9
|
+
enabled: z.ZodDefault<z.ZodBoolean>;
|
10
|
+
documentSecurity: z.ZodDefault<z.ZodBoolean>;
|
8
11
|
$createdAt: z.ZodString;
|
9
12
|
$updatedAt: z.ZodString;
|
10
13
|
$permissions: z.ZodDefault<z.ZodArray<z.ZodObject<{
|
@@ -17,10 +20,6 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
17
20
|
permission: string;
|
18
21
|
target: string;
|
19
22
|
}>, "many">>;
|
20
|
-
databaseId: z.ZodOptional<z.ZodString>;
|
21
|
-
name: z.ZodString;
|
22
|
-
enabled: z.ZodDefault<z.ZodBoolean>;
|
23
|
-
documentSecurity: z.ZodDefault<z.ZodBoolean>;
|
24
23
|
attributes: z.ZodDefault<z.ZodArray<z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodObject<{
|
25
24
|
key: z.ZodString;
|
26
25
|
type: z.ZodDefault<z.ZodLiteral<"string">>;
|
@@ -232,14 +231,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
232
231
|
elements?: string[] | undefined;
|
233
232
|
xdefault?: string | null | undefined;
|
234
233
|
}>]>, z.ZodObject<{
|
235
|
-
key: z.ZodString;
|
236
|
-
* Transforms the given item based on the provided attribute mappings.
|
237
|
-
* This method applies conversion rules to the item's attributes as defined in the attribute mappings.
|
238
|
-
*
|
239
|
-
* @param item - The item to be transformed.
|
240
|
-
* @param attributeMappings - The mappings that define how each attribute should be transformed.
|
241
|
-
* @returns The transformed item.
|
242
|
-
*/
|
234
|
+
key: z.ZodString;
|
243
235
|
type: z.ZodDefault<z.ZodLiteral<"relationship">>;
|
244
236
|
error: z.ZodDefault<z.ZodString>;
|
245
237
|
required: z.ZodDefault<z.ZodBoolean>;
|
@@ -483,6 +475,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
483
475
|
targetField: string;
|
484
476
|
} | undefined;
|
485
477
|
}>, "many">>;
|
478
|
+
databaseId: z.ZodOptional<z.ZodString>;
|
486
479
|
}, "$createdAt" | "$updatedAt">, "strip", z.ZodTypeAny, {
|
487
480
|
attributes: ({
|
488
481
|
key: string;
|
@@ -573,12 +566,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
573
566
|
})[];
|
574
567
|
name: string;
|
575
568
|
$id: string;
|
569
|
+
enabled: boolean;
|
570
|
+
documentSecurity: boolean;
|
576
571
|
$permissions: {
|
577
572
|
permission: string;
|
578
573
|
target: string;
|
579
574
|
}[];
|
580
|
-
enabled: boolean;
|
581
|
-
documentSecurity: boolean;
|
582
575
|
indexes: {
|
583
576
|
key: string;
|
584
577
|
type: "key" | "unique" | "fulltext";
|
@@ -712,13 +705,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
712
705
|
} | undefined;
|
713
706
|
})[] | undefined;
|
714
707
|
$id?: string | undefined;
|
708
|
+
enabled?: boolean | undefined;
|
709
|
+
documentSecurity?: boolean | undefined;
|
715
710
|
$permissions?: {
|
716
711
|
permission: string;
|
717
712
|
target: string;
|
718
713
|
}[] | undefined;
|
719
|
-
databaseId?: string | undefined;
|
720
|
-
enabled?: boolean | undefined;
|
721
|
-
documentSecurity?: boolean | undefined;
|
722
714
|
indexes?: {
|
723
715
|
key: string;
|
724
716
|
attributes: string[];
|
@@ -761,6 +753,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
761
753
|
targetField: string;
|
762
754
|
} | undefined;
|
763
755
|
}[] | undefined;
|
756
|
+
databaseId?: string | undefined;
|
764
757
|
}>>;
|
765
758
|
data: z.ZodArray<z.ZodObject<{
|
766
759
|
rawData: z.ZodAny;
|
@@ -1141,12 +1134,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1141
1134
|
})[];
|
1142
1135
|
name: string;
|
1143
1136
|
$id: string;
|
1137
|
+
enabled: boolean;
|
1138
|
+
documentSecurity: boolean;
|
1144
1139
|
$permissions: {
|
1145
1140
|
permission: string;
|
1146
1141
|
target: string;
|
1147
1142
|
}[];
|
1148
|
-
enabled: boolean;
|
1149
|
-
documentSecurity: boolean;
|
1150
1143
|
indexes: {
|
1151
1144
|
key: string;
|
1152
1145
|
type: "key" | "unique" | "fulltext";
|
@@ -1321,13 +1314,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1321
1314
|
} | undefined;
|
1322
1315
|
})[] | undefined;
|
1323
1316
|
$id?: string | undefined;
|
1317
|
+
enabled?: boolean | undefined;
|
1318
|
+
documentSecurity?: boolean | undefined;
|
1324
1319
|
$permissions?: {
|
1325
1320
|
permission: string;
|
1326
1321
|
target: string;
|
1327
1322
|
}[] | undefined;
|
1328
|
-
databaseId?: string | undefined;
|
1329
|
-
enabled?: boolean | undefined;
|
1330
|
-
documentSecurity?: boolean | undefined;
|
1331
1323
|
indexes?: {
|
1332
1324
|
key: string;
|
1333
1325
|
attributes: string[];
|
@@ -1370,6 +1362,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1370
1362
|
targetField: string;
|
1371
1363
|
} | undefined;
|
1372
1364
|
}[] | undefined;
|
1365
|
+
databaseId?: string | undefined;
|
1373
1366
|
} | undefined;
|
1374
1367
|
}>;
|
1375
1368
|
export type CollectionImportData = z.infer<typeof CollectionImportDataSchema>;
|
@@ -1509,12 +1502,12 @@ export declare class DataLoader {
|
|
1509
1502
|
})[];
|
1510
1503
|
name: string;
|
1511
1504
|
$id: string;
|
1505
|
+
enabled: boolean;
|
1506
|
+
documentSecurity: boolean;
|
1512
1507
|
$permissions: {
|
1513
1508
|
permission: string;
|
1514
1509
|
target: string;
|
1515
1510
|
}[];
|
1516
|
-
enabled: boolean;
|
1517
|
-
documentSecurity: boolean;
|
1518
1511
|
indexes: {
|
1519
1512
|
key: string;
|
1520
1513
|
type: "key" | "unique" | "fulltext";
|
@@ -285,28 +285,29 @@ export class DataLoader {
|
|
285
285
|
const oldIds = Array.isArray(item.context[idMapping.sourceField])
|
286
286
|
? item.context[idMapping.sourceField]
|
287
287
|
: [item.context[idMapping.sourceField]];
|
288
|
+
const resolvedNewIds = [];
|
288
289
|
oldIds.forEach((oldId) => {
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
if (newIdForOldId
|
293
|
-
|
294
|
-
|
295
|
-
// Properly update the target field based on whether it should be an array
|
296
|
-
if (isArray) {
|
297
|
-
if (!Array.isArray(item.finalData[targetField])) {
|
298
|
-
item.finalData[targetField] = [newIdForOldId];
|
299
|
-
}
|
300
|
-
else if (!item.finalData[targetField].includes(newIdForOldId)) {
|
301
|
-
item.finalData[targetField].push(newIdForOldId);
|
302
|
-
}
|
303
|
-
}
|
304
|
-
else {
|
305
|
-
item.finalData[targetField] = newIdForOldId;
|
306
|
-
}
|
307
|
-
needsUpdate = true;
|
290
|
+
// Attempt to find a new ID for the old ID
|
291
|
+
let newIdForOldId = this.findNewIdForOldId(oldId, idMapping);
|
292
|
+
// Check if a new ID was found and it's not already included
|
293
|
+
if (newIdForOldId &&
|
294
|
+
!resolvedNewIds.includes(newIdForOldId)) {
|
295
|
+
resolvedNewIds.push(newIdForOldId);
|
308
296
|
}
|
309
297
|
});
|
298
|
+
if (resolvedNewIds.length) {
|
299
|
+
const targetField = idMapping.fieldToSet || idMapping.targetField;
|
300
|
+
const isArray = collectionConfig.attributes.some((attribute) => attribute.key === targetField && attribute.array);
|
301
|
+
// Set the target field based on whether it's an array or single value
|
302
|
+
if (isArray) {
|
303
|
+
item.finalData[targetField] = resolvedNewIds;
|
304
|
+
}
|
305
|
+
else {
|
306
|
+
// In case of a single value, use the first resolved ID
|
307
|
+
item.finalData[targetField] = resolvedNewIds[0];
|
308
|
+
}
|
309
|
+
needsUpdate = true;
|
310
|
+
}
|
310
311
|
}
|
311
312
|
}
|
312
313
|
}
|
@@ -320,23 +321,16 @@ export class DataLoader {
|
|
320
321
|
}
|
321
322
|
}
|
322
323
|
findNewIdForOldId(oldId, idMapping) {
|
323
|
-
//
|
324
|
-
let newIdForOldId;
|
324
|
+
// First, check if the old ID has been merged into a new one
|
325
325
|
for (const [newUserId, oldIds] of this.mergedUserMap.entries()) {
|
326
326
|
if (oldIds.includes(oldId)) {
|
327
|
-
|
328
|
-
break;
|
329
|
-
}
|
330
|
-
}
|
331
|
-
// If no new ID found in merged users, check the old-to-new ID map for the target collection
|
332
|
-
if (!newIdForOldId) {
|
333
|
-
const targetCollectionKey = this.getCollectionKey(idMapping.targetCollection);
|
334
|
-
const targetOldIdToNewIdMap = this.oldIdToNewIdPerCollectionMap.get(targetCollectionKey);
|
335
|
-
if (targetOldIdToNewIdMap && targetOldIdToNewIdMap.has(oldId)) {
|
336
|
-
newIdForOldId = targetOldIdToNewIdMap.get(oldId);
|
327
|
+
return newUserId;
|
337
328
|
}
|
338
329
|
}
|
339
|
-
|
330
|
+
// If not merged, look for a direct mapping from old to new ID
|
331
|
+
const targetCollectionKey = this.getCollectionKey(idMapping.targetCollection);
|
332
|
+
const targetOldIdToNewIdMap = this.oldIdToNewIdPerCollectionMap.get(targetCollectionKey);
|
333
|
+
return targetOldIdToNewIdMap?.get(oldId);
|
340
334
|
}
|
341
335
|
writeMapsToJsonFile() {
|
342
336
|
const outputDir = path.resolve(process.cwd());
|
@@ -0,0 +1,23 @@
|
|
1
|
+
import { Databases, Query } from "node-appwrite";
|
2
|
+
export const fetchAllDatabases = async (database) => {
|
3
|
+
const databases = await database.list([Query.limit(25)]);
|
4
|
+
const allDatabases = databases.databases;
|
5
|
+
let lastDatabaseId = allDatabases[allDatabases.length - 1].$id;
|
6
|
+
if (databases.databases.length < 25) {
|
7
|
+
return allDatabases;
|
8
|
+
}
|
9
|
+
else {
|
10
|
+
while (lastDatabaseId) {
|
11
|
+
const databases = await database.list([
|
12
|
+
Query.limit(25),
|
13
|
+
Query.cursorAfter(lastDatabaseId),
|
14
|
+
]);
|
15
|
+
allDatabases.push(...databases.databases);
|
16
|
+
if (databases.databases.length < 25) {
|
17
|
+
break;
|
18
|
+
}
|
19
|
+
lastDatabaseId = databases.databases[databases.databases.length - 1].$id;
|
20
|
+
}
|
21
|
+
}
|
22
|
+
return allDatabases;
|
23
|
+
};
|