appwrite-utils-cli 0.0.262 → 0.0.264
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +24 -5
- package/dist/main.js +6 -0
- package/dist/migrations/afterImportActions.d.ts +3 -1
- package/dist/migrations/afterImportActions.js +2 -2
- package/dist/migrations/appwriteToX.d.ts +109 -0
- package/dist/migrations/appwriteToX.js +110 -0
- package/dist/migrations/backup.d.ts +4 -4
- package/dist/migrations/dataLoader.d.ts +20 -27
- package/dist/migrations/dataLoader.js +49 -43
- package/dist/migrations/databases.d.ts +2 -0
- package/dist/migrations/databases.js +23 -0
- package/dist/migrations/indexes.js +7 -0
- package/dist/migrations/schema.d.ts +345 -32
- package/dist/migrations/schema.js +34 -14
- package/dist/migrations/schemaStrings.d.ts +1 -0
- package/dist/migrations/schemaStrings.js +35 -5
- package/dist/setup.js +0 -0
- package/dist/utilsController.d.ts +1 -0
- package/dist/utilsController.js +7 -0
- package/package.json +1 -1
- package/src/main.ts +6 -0
- package/src/migrations/afterImportActions.ts +2 -2
- package/src/migrations/appwriteToX.ts +156 -0
- package/src/migrations/dataLoader.ts +88 -89
- package/src/migrations/databases.ts +25 -0
- package/src/migrations/indexes.ts +8 -0
- package/src/migrations/schema.ts +40 -14
- package/src/migrations/schemaStrings.ts +48 -12
- package/src/utilsController.ts +9 -0
package/README.md
CHANGED
@@ -15,12 +15,20 @@
|
|
15
15
|
|
16
16
|
## Installation
|
17
17
|
|
18
|
-
To use `appwrite-utils-cli`, you
|
18
|
+
To use `appwrite-utils-cli`, you can install it globally via npm to make it accessible from anywhere in your command line:
|
19
19
|
|
20
20
|
```bash
|
21
21
|
npm install -g appwrite-utils-cli
|
22
22
|
```
|
23
23
|
|
24
|
+
However, due to the nature of the speed at which I am developing this project, I would recommend the following command:
|
25
|
+
|
26
|
+
```bash
|
27
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate -- --arg1 --arg2 --arg3
|
28
|
+
```
|
29
|
+
|
30
|
+
**DO NOT INSTALL THIS LOCALLY INTO YOUR PROJECT, IT IS MEANT TO BE USED AS A COMMAND LINE TOOL ONLY**
|
31
|
+
|
24
32
|
## Usage
|
25
33
|
|
26
34
|
After installation, you can access the tool directly from your command line using the provided commands. Here's how you can use the different functionalities:
|
@@ -30,7 +38,7 @@ After installation, you can access the tool directly from your command line usin
|
|
30
38
|
Set up your Appwrite project with necessary configurations:
|
31
39
|
|
32
40
|
```bash
|
33
|
-
appwrite-setup
|
41
|
+
npx --package=appwrite-utils-cli@latest appwrite-setup
|
34
42
|
```
|
35
43
|
|
36
44
|
To generate an example configuration file:
|
@@ -39,12 +47,20 @@ To generate an example configuration file:
|
|
39
47
|
appwrite-setup --example
|
40
48
|
```
|
41
49
|
|
50
|
+
To synchronize your `appwriteConfig.yaml` with your Appwrite Database, first you must run the setup command and enter your Appwrite instances details in the `projectId`, `endpoint`, and `apiKey`, then run the following
|
51
|
+
|
52
|
+
```bash
|
53
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate -- --sync
|
54
|
+
```
|
55
|
+
|
56
|
+
This will initialize your config and generate schemas for your database using ZOD to `src/appwrite/schemas`
|
57
|
+
|
42
58
|
### Running Migrations and Tasks
|
43
59
|
|
44
60
|
Run migration and management tasks with specific flags according to your needs:
|
45
61
|
|
46
62
|
```bash
|
47
|
-
appwrite-migrate --args
|
63
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate --args
|
48
64
|
```
|
49
65
|
|
50
66
|
Replace `--args` with the appropriate options:
|
@@ -58,17 +74,19 @@ Replace `--args` with the appropriate options:
|
|
58
74
|
- `--import`: Import data into your databases.
|
59
75
|
- `--backup`: Perform a backup of your databases.
|
60
76
|
- `--wipe-users` or `--wipeUsers`: Wipe all user data.
|
77
|
+
- `--write-data` or `--writeData`: Write converted imported data to file
|
78
|
+
- `--sync`: Synchronize your project's config and generate schema for your database
|
61
79
|
|
62
80
|
For example, to run migrations in a development environment and import data:
|
63
81
|
|
64
82
|
```bash
|
65
|
-
appwrite-migrate --dev --import
|
83
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate --dev --import
|
66
84
|
```
|
67
85
|
|
68
86
|
To initialize your project, generate schemas, but not import data:
|
69
87
|
|
70
88
|
```bash
|
71
|
-
appwrite-migrate --init
|
89
|
+
npx --package=appwrite-utils-cli@latest appwrite-migrate --init
|
72
90
|
```
|
73
91
|
|
74
92
|
This setup ensures that developers have robust tools at their fingertips to manage complex Appwrite projects effectively from the command line. I also have added logging automatically for information and errors as the console can be hard to keep up with.
|
@@ -81,6 +99,7 @@ This setup ensures that developers have robust tools at their fingertips to mana
|
|
81
99
|
|
82
100
|
### Changelog
|
83
101
|
|
102
|
+
- 0.0.254: Added `--sync` to synchronize your Appwrite instance with your local `appwriteConfig.yaml` and generate schemas
|
84
103
|
- 0.0.253: Added `--writeData` (or `--write-data`) to command to write the output of the import data to a file called dataLoaderOutput in your root dir
|
85
104
|
- 0.0.23: Added batching to user deletion
|
86
105
|
- 0.0.22: Converted all import processes except `postImportActions` and Relationship Resolution to the local data import, so it should be much faster.
|
package/dist/main.js
CHANGED
@@ -4,6 +4,7 @@ const args = process.argv.slice(2);
|
|
4
4
|
async function main() {
|
5
5
|
const controller = new UtilsController();
|
6
6
|
await controller.init();
|
7
|
+
let sync = false;
|
7
8
|
let runProd = false;
|
8
9
|
let runStaging = false;
|
9
10
|
let runDev = false;
|
@@ -14,6 +15,9 @@ async function main() {
|
|
14
15
|
let importData = false;
|
15
16
|
let wipeDocuments = false;
|
16
17
|
let shouldWriteFile = false;
|
18
|
+
if (args.includes("--sync")) {
|
19
|
+
sync = true;
|
20
|
+
}
|
17
21
|
if (args.includes("--prod")) {
|
18
22
|
runProd = true;
|
19
23
|
}
|
@@ -46,6 +50,7 @@ async function main() {
|
|
46
50
|
}
|
47
51
|
if (args.includes("--init")) {
|
48
52
|
await controller.run({
|
53
|
+
sync: sync,
|
49
54
|
runProd: runProd,
|
50
55
|
runStaging: runStaging,
|
51
56
|
runDev: runDev,
|
@@ -62,6 +67,7 @@ async function main() {
|
|
62
67
|
}
|
63
68
|
else {
|
64
69
|
await controller.run({
|
70
|
+
sync: sync,
|
65
71
|
runProd: runProd,
|
66
72
|
runStaging: runStaging,
|
67
73
|
runDev: runDev,
|
@@ -1,5 +1,7 @@
|
|
1
|
-
import { type Models } from "node-appwrite";
|
1
|
+
import { Databases, Storage, type Models } from "node-appwrite";
|
2
2
|
import type { AppwriteConfig } from "./schema.js";
|
3
|
+
export declare const getDatabaseFromConfig: (config: AppwriteConfig) => Databases;
|
4
|
+
export declare const getStorageFromConfig: (config: AppwriteConfig) => Storage;
|
3
5
|
export interface AfterImportActions {
|
4
6
|
[key: string]: (config: AppwriteConfig, ...args: any[]) => Promise<any>;
|
5
7
|
}
|
@@ -3,7 +3,7 @@ import path from "path";
|
|
3
3
|
import fs from "fs";
|
4
4
|
import os from "os";
|
5
5
|
import { logger } from "./logging.js";
|
6
|
-
const getDatabaseFromConfig = (config) => {
|
6
|
+
export const getDatabaseFromConfig = (config) => {
|
7
7
|
if (!config.appwriteClient) {
|
8
8
|
config.appwriteClient = new Client()
|
9
9
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -12,7 +12,7 @@ const getDatabaseFromConfig = (config) => {
|
|
12
12
|
}
|
13
13
|
return new Databases(config.appwriteClient);
|
14
14
|
};
|
15
|
-
const getStorageFromConfig = (config) => {
|
15
|
+
export const getStorageFromConfig = (config) => {
|
16
16
|
if (!config.appwriteClient) {
|
17
17
|
config.appwriteClient = new Client()
|
18
18
|
.setEndpoint(config.appwriteEndpoint)
|
@@ -0,0 +1,109 @@
|
|
1
|
+
import { type Models } from "node-appwrite";
|
2
|
+
import { type AppwriteConfig } from "./schema.js";
|
3
|
+
export declare class AppwriteToX {
|
4
|
+
config: AppwriteConfig;
|
5
|
+
updatedConfig: AppwriteConfig;
|
6
|
+
collToAttributeMap: Map<string, ({
|
7
|
+
key: string;
|
8
|
+
type: "string";
|
9
|
+
error: string;
|
10
|
+
required: boolean;
|
11
|
+
array: boolean;
|
12
|
+
size: number;
|
13
|
+
xdefault?: string | null | undefined;
|
14
|
+
encrypted?: boolean | undefined;
|
15
|
+
} | {
|
16
|
+
key: string;
|
17
|
+
type: "integer";
|
18
|
+
error: string;
|
19
|
+
required: boolean;
|
20
|
+
array: boolean;
|
21
|
+
min?: number | undefined;
|
22
|
+
max?: number | undefined;
|
23
|
+
xdefault?: number | null | undefined;
|
24
|
+
} | {
|
25
|
+
key: string;
|
26
|
+
type: "float";
|
27
|
+
error: string;
|
28
|
+
required: boolean;
|
29
|
+
array: boolean;
|
30
|
+
min?: number | undefined;
|
31
|
+
max?: number | undefined;
|
32
|
+
xdefault?: number | null | undefined;
|
33
|
+
} | {
|
34
|
+
key: string;
|
35
|
+
type: "boolean";
|
36
|
+
error: string;
|
37
|
+
required: boolean;
|
38
|
+
array: boolean;
|
39
|
+
xdefault?: boolean | null | undefined;
|
40
|
+
} | {
|
41
|
+
key: string;
|
42
|
+
type: "datetime";
|
43
|
+
error: string;
|
44
|
+
required: boolean;
|
45
|
+
array: boolean;
|
46
|
+
xdefault?: string | null | undefined;
|
47
|
+
} | {
|
48
|
+
key: string;
|
49
|
+
type: "email";
|
50
|
+
error: string;
|
51
|
+
required: boolean;
|
52
|
+
array: boolean;
|
53
|
+
xdefault?: string | null | undefined;
|
54
|
+
} | {
|
55
|
+
key: string;
|
56
|
+
type: "ip";
|
57
|
+
error: string;
|
58
|
+
required: boolean;
|
59
|
+
array: boolean;
|
60
|
+
xdefault?: string | null | undefined;
|
61
|
+
} | {
|
62
|
+
key: string;
|
63
|
+
type: "url";
|
64
|
+
error: string;
|
65
|
+
required: boolean;
|
66
|
+
array: boolean;
|
67
|
+
xdefault?: string | null | undefined;
|
68
|
+
} | {
|
69
|
+
key: string;
|
70
|
+
type: "enum";
|
71
|
+
error: string;
|
72
|
+
required: boolean;
|
73
|
+
array: boolean;
|
74
|
+
elements: string[];
|
75
|
+
xdefault?: string | null | undefined;
|
76
|
+
} | {
|
77
|
+
key: string;
|
78
|
+
type: "relationship";
|
79
|
+
error: string;
|
80
|
+
required: boolean;
|
81
|
+
relatedCollection: string;
|
82
|
+
relationType: "oneToMany" | "manyToOne" | "oneToOne" | "manyToMany";
|
83
|
+
twoWay: boolean;
|
84
|
+
twoWayKey: string;
|
85
|
+
onDelete: "setNull" | "cascade" | "restrict";
|
86
|
+
side: "parent" | "child";
|
87
|
+
array?: boolean | undefined;
|
88
|
+
importMapping?: {
|
89
|
+
originalIdField: string;
|
90
|
+
targetField?: string | undefined;
|
91
|
+
} | undefined;
|
92
|
+
})[]>;
|
93
|
+
appwriteFolderPath: string;
|
94
|
+
constructor(config: AppwriteConfig, appwriteFolderPath: string);
|
95
|
+
parsePermissionString: (permissionString: string) => {
|
96
|
+
permission: string;
|
97
|
+
target: string;
|
98
|
+
};
|
99
|
+
parsePermissionsArray: (permissions: string[]) => ({
|
100
|
+
permission: string;
|
101
|
+
target: string;
|
102
|
+
} | {
|
103
|
+
permission: string;
|
104
|
+
target: string;
|
105
|
+
})[];
|
106
|
+
updateCollectionConfigAttributes: (collection: Models.Collection) => void;
|
107
|
+
appwriteSync(config: AppwriteConfig): Promise<void>;
|
108
|
+
toSchemas(): Promise<void>;
|
109
|
+
}
|
@@ -0,0 +1,110 @@
|
|
1
|
+
import { SchemaGenerator } from "./schemaStrings.js";
|
2
|
+
import { Databases, Query } from "node-appwrite";
|
3
|
+
import { fetchAllCollections } from "./collections.js";
|
4
|
+
import { fetchAllDatabases } from "./databases.js";
|
5
|
+
import { collectionSchema, attributeSchema, AppwriteConfigSchema, permissionsSchema, attributesSchema, indexesSchema, } from "./schema.js";
|
6
|
+
import { getDatabaseFromConfig } from "./afterImportActions.js";
|
7
|
+
export class AppwriteToX {
|
8
|
+
config;
|
9
|
+
updatedConfig;
|
10
|
+
collToAttributeMap = new Map();
|
11
|
+
appwriteFolderPath;
|
12
|
+
constructor(config, appwriteFolderPath) {
|
13
|
+
this.config = config;
|
14
|
+
this.updatedConfig = config;
|
15
|
+
this.appwriteFolderPath = appwriteFolderPath;
|
16
|
+
}
|
17
|
+
// Function to parse a single permission string
|
18
|
+
parsePermissionString = (permissionString) => {
|
19
|
+
const match = permissionString.match(/^(\w+)\('([^']+)'\)$/);
|
20
|
+
if (!match) {
|
21
|
+
throw new Error(`Invalid permission format: ${permissionString}`);
|
22
|
+
}
|
23
|
+
return {
|
24
|
+
permission: match[1],
|
25
|
+
target: match[2],
|
26
|
+
};
|
27
|
+
};
|
28
|
+
// Function to parse an array of permission strings
|
29
|
+
parsePermissionsArray = (permissions) => {
|
30
|
+
const parsedPermissions = permissionsSchema.parse(permissions);
|
31
|
+
// Validate the parsed permissions using Zod
|
32
|
+
return parsedPermissions ?? [];
|
33
|
+
};
|
34
|
+
updateCollectionConfigAttributes = (collection) => {
|
35
|
+
for (const attribute of collection.attributes) {
|
36
|
+
const attributeMap = this.collToAttributeMap.get(collection.name);
|
37
|
+
const attributeParsed = attributeSchema.parse(attribute);
|
38
|
+
this.collToAttributeMap
|
39
|
+
.get(collection.name)
|
40
|
+
?.push(attributeParsed);
|
41
|
+
}
|
42
|
+
};
|
43
|
+
async appwriteSync(config) {
|
44
|
+
const db = getDatabaseFromConfig(config);
|
45
|
+
const databases = await fetchAllDatabases(db);
|
46
|
+
let updatedConfig = { ...config };
|
47
|
+
// Loop through each database
|
48
|
+
for (const database of databases) {
|
49
|
+
if (database.name.toLowerCase() === "migrations") {
|
50
|
+
continue;
|
51
|
+
}
|
52
|
+
const collections = await fetchAllCollections(database.$id, db);
|
53
|
+
// Loop through each collection in the current database
|
54
|
+
for (const collection of collections) {
|
55
|
+
const existingCollectionIndex = updatedConfig.collections.findIndex((c) => c.name === collection.name);
|
56
|
+
// Parse the collection permissions and attributes
|
57
|
+
const collPermissions = this.parsePermissionsArray(collection.$permissions);
|
58
|
+
const collAttributes = attributesSchema
|
59
|
+
.parse(collection.attributes)
|
60
|
+
.filter((attribute) => attribute.type === "relationship"
|
61
|
+
? attribute.side !== "child"
|
62
|
+
: true);
|
63
|
+
for (const attribute of collAttributes) {
|
64
|
+
if (attribute.type === "relationship" &&
|
65
|
+
attribute.relatedCollection) {
|
66
|
+
console.log(`Fetching related collection for ID: ${attribute.relatedCollection}`);
|
67
|
+
try {
|
68
|
+
const relatedCollectionPulled = await db.getCollection(database.$id, attribute.relatedCollection);
|
69
|
+
console.log(`Fetched Collection Name: ${relatedCollectionPulled.name}`);
|
70
|
+
attribute.relatedCollection = relatedCollectionPulled.name;
|
71
|
+
console.log(`Updated attribute.relatedCollection to: ${attribute.relatedCollection}`);
|
72
|
+
}
|
73
|
+
catch (error) {
|
74
|
+
console.log("Error fetching related collection:", error);
|
75
|
+
}
|
76
|
+
}
|
77
|
+
}
|
78
|
+
this.collToAttributeMap.set(collection.name, collAttributes);
|
79
|
+
const collIndexes = indexesSchema.parse(collection.indexes);
|
80
|
+
// Prepare the collection object to be added or updated
|
81
|
+
const collToPush = collectionSchema.parse({
|
82
|
+
name: collection.name,
|
83
|
+
enabled: collection.enabled,
|
84
|
+
documentSecurity: collection.documentSecurity,
|
85
|
+
$createdAt: collection.$createdAt,
|
86
|
+
$updatedAt: collection.$updatedAt,
|
87
|
+
$permissions: collPermissions.length > 0 ? collPermissions : undefined,
|
88
|
+
indexes: collIndexes.length > 0 ? collIndexes : undefined,
|
89
|
+
attributes: collAttributes.length > 0 ? collAttributes : undefined,
|
90
|
+
});
|
91
|
+
if (existingCollectionIndex !== -1) {
|
92
|
+
// Update existing collection
|
93
|
+
updatedConfig.collections[existingCollectionIndex] = collToPush;
|
94
|
+
}
|
95
|
+
else {
|
96
|
+
// Add new collection
|
97
|
+
updatedConfig.collections.push(collToPush);
|
98
|
+
}
|
99
|
+
}
|
100
|
+
console.log(`Processed ${collections.length} collections in ${database.name}`);
|
101
|
+
}
|
102
|
+
this.updatedConfig = updatedConfig;
|
103
|
+
}
|
104
|
+
async toSchemas() {
|
105
|
+
await this.appwriteSync(this.config);
|
106
|
+
const generator = new SchemaGenerator(this.updatedConfig, this.appwriteFolderPath);
|
107
|
+
generator.updateYamlSchemas();
|
108
|
+
generator.generateSchemas();
|
109
|
+
}
|
110
|
+
}
|
@@ -265,12 +265,12 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
265
265
|
})[];
|
266
266
|
name: string;
|
267
267
|
$id: string;
|
268
|
+
enabled: boolean;
|
269
|
+
documentSecurity: boolean;
|
268
270
|
$permissions: {
|
269
271
|
permission: string;
|
270
272
|
target: string;
|
271
273
|
}[];
|
272
|
-
enabled: boolean;
|
273
|
-
documentSecurity: boolean;
|
274
274
|
indexes: {
|
275
275
|
key: string;
|
276
276
|
type: "key" | "unique" | "fulltext";
|
@@ -494,12 +494,12 @@ export declare const getMigrationCollectionSchemas: () => {
|
|
494
494
|
})[];
|
495
495
|
name: string;
|
496
496
|
$id: string;
|
497
|
+
enabled: boolean;
|
498
|
+
documentSecurity: boolean;
|
497
499
|
$permissions: {
|
498
500
|
permission: string;
|
499
501
|
target: string;
|
500
502
|
}[];
|
501
|
-
enabled: boolean;
|
502
|
-
documentSecurity: boolean;
|
503
503
|
indexes: {
|
504
504
|
key: string;
|
505
505
|
type: "key" | "unique" | "fulltext";
|
@@ -4,7 +4,10 @@ import { z } from "zod";
|
|
4
4
|
import { type Databases } from "node-appwrite";
|
5
5
|
export declare const CollectionImportDataSchema: z.ZodObject<{
|
6
6
|
collection: z.ZodOptional<z.ZodObject<Omit<{
|
7
|
+
name: z.ZodString;
|
7
8
|
$id: z.ZodDefault<z.ZodOptional<z.ZodString>>;
|
9
|
+
enabled: z.ZodDefault<z.ZodBoolean>;
|
10
|
+
documentSecurity: z.ZodDefault<z.ZodBoolean>;
|
8
11
|
$createdAt: z.ZodString;
|
9
12
|
$updatedAt: z.ZodString;
|
10
13
|
$permissions: z.ZodDefault<z.ZodArray<z.ZodObject<{
|
@@ -17,10 +20,6 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
17
20
|
permission: string;
|
18
21
|
target: string;
|
19
22
|
}>, "many">>;
|
20
|
-
databaseId: z.ZodOptional<z.ZodString>;
|
21
|
-
name: z.ZodString;
|
22
|
-
enabled: z.ZodDefault<z.ZodBoolean>;
|
23
|
-
documentSecurity: z.ZodDefault<z.ZodBoolean>;
|
24
23
|
attributes: z.ZodDefault<z.ZodArray<z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodUnion<[z.ZodObject<{
|
25
24
|
key: z.ZodString;
|
26
25
|
type: z.ZodDefault<z.ZodLiteral<"string">>;
|
@@ -232,14 +231,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
232
231
|
elements?: string[] | undefined;
|
233
232
|
xdefault?: string | null | undefined;
|
234
233
|
}>]>, z.ZodObject<{
|
235
|
-
key: z.ZodString;
|
236
|
-
* Transforms the given item based on the provided attribute mappings.
|
237
|
-
* This method applies conversion rules to the item's attributes as defined in the attribute mappings.
|
238
|
-
*
|
239
|
-
* @param item - The item to be transformed.
|
240
|
-
* @param attributeMappings - The mappings that define how each attribute should be transformed.
|
241
|
-
* @returns The transformed item.
|
242
|
-
*/
|
234
|
+
key: z.ZodString;
|
243
235
|
type: z.ZodDefault<z.ZodLiteral<"relationship">>;
|
244
236
|
error: z.ZodDefault<z.ZodString>;
|
245
237
|
required: z.ZodDefault<z.ZodBoolean>;
|
@@ -483,6 +475,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
483
475
|
targetField: string;
|
484
476
|
} | undefined;
|
485
477
|
}>, "many">>;
|
478
|
+
databaseId: z.ZodOptional<z.ZodString>;
|
486
479
|
}, "$createdAt" | "$updatedAt">, "strip", z.ZodTypeAny, {
|
487
480
|
attributes: ({
|
488
481
|
key: string;
|
@@ -573,12 +566,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
573
566
|
})[];
|
574
567
|
name: string;
|
575
568
|
$id: string;
|
569
|
+
enabled: boolean;
|
570
|
+
documentSecurity: boolean;
|
576
571
|
$permissions: {
|
577
572
|
permission: string;
|
578
573
|
target: string;
|
579
574
|
}[];
|
580
|
-
enabled: boolean;
|
581
|
-
documentSecurity: boolean;
|
582
575
|
indexes: {
|
583
576
|
key: string;
|
584
577
|
type: "key" | "unique" | "fulltext";
|
@@ -712,13 +705,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
712
705
|
} | undefined;
|
713
706
|
})[] | undefined;
|
714
707
|
$id?: string | undefined;
|
708
|
+
enabled?: boolean | undefined;
|
709
|
+
documentSecurity?: boolean | undefined;
|
715
710
|
$permissions?: {
|
716
711
|
permission: string;
|
717
712
|
target: string;
|
718
713
|
}[] | undefined;
|
719
|
-
databaseId?: string | undefined;
|
720
|
-
enabled?: boolean | undefined;
|
721
|
-
documentSecurity?: boolean | undefined;
|
722
714
|
indexes?: {
|
723
715
|
key: string;
|
724
716
|
attributes: string[];
|
@@ -761,6 +753,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
761
753
|
targetField: string;
|
762
754
|
} | undefined;
|
763
755
|
}[] | undefined;
|
756
|
+
databaseId?: string | undefined;
|
764
757
|
}>>;
|
765
758
|
data: z.ZodArray<z.ZodObject<{
|
766
759
|
rawData: z.ZodAny;
|
@@ -1141,12 +1134,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1141
1134
|
})[];
|
1142
1135
|
name: string;
|
1143
1136
|
$id: string;
|
1137
|
+
enabled: boolean;
|
1138
|
+
documentSecurity: boolean;
|
1144
1139
|
$permissions: {
|
1145
1140
|
permission: string;
|
1146
1141
|
target: string;
|
1147
1142
|
}[];
|
1148
|
-
enabled: boolean;
|
1149
|
-
documentSecurity: boolean;
|
1150
1143
|
indexes: {
|
1151
1144
|
key: string;
|
1152
1145
|
type: "key" | "unique" | "fulltext";
|
@@ -1321,13 +1314,12 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1321
1314
|
} | undefined;
|
1322
1315
|
})[] | undefined;
|
1323
1316
|
$id?: string | undefined;
|
1317
|
+
enabled?: boolean | undefined;
|
1318
|
+
documentSecurity?: boolean | undefined;
|
1324
1319
|
$permissions?: {
|
1325
1320
|
permission: string;
|
1326
1321
|
target: string;
|
1327
1322
|
}[] | undefined;
|
1328
|
-
databaseId?: string | undefined;
|
1329
|
-
enabled?: boolean | undefined;
|
1330
|
-
documentSecurity?: boolean | undefined;
|
1331
1323
|
indexes?: {
|
1332
1324
|
key: string;
|
1333
1325
|
attributes: string[];
|
@@ -1370,6 +1362,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1370
1362
|
targetField: string;
|
1371
1363
|
} | undefined;
|
1372
1364
|
}[] | undefined;
|
1365
|
+
databaseId?: string | undefined;
|
1373
1366
|
} | undefined;
|
1374
1367
|
}>;
|
1375
1368
|
export type CollectionImportData = z.infer<typeof CollectionImportDataSchema>;
|
@@ -1509,12 +1502,12 @@ export declare class DataLoader {
|
|
1509
1502
|
})[];
|
1510
1503
|
name: string;
|
1511
1504
|
$id: string;
|
1505
|
+
enabled: boolean;
|
1506
|
+
documentSecurity: boolean;
|
1512
1507
|
$permissions: {
|
1513
1508
|
permission: string;
|
1514
1509
|
target: string;
|
1515
1510
|
}[];
|
1516
|
-
enabled: boolean;
|
1517
|
-
documentSecurity: boolean;
|
1518
1511
|
indexes: {
|
1519
1512
|
key: string;
|
1520
1513
|
type: "key" | "unique" | "fulltext";
|
@@ -1580,7 +1573,7 @@ export declare class DataLoader {
|
|
1580
1573
|
* @returns The updated source object.
|
1581
1574
|
*/
|
1582
1575
|
mergeObjects(source: any, update: any): any;
|
1583
|
-
loadData(importDef: ImportDef):
|
1576
|
+
loadData(importDef: ImportDef): any[];
|
1584
1577
|
checkMapValuesForId(newId: string, collectionName: string): string | false;
|
1585
1578
|
getTrueUniqueId(collectionName: string): string;
|
1586
1579
|
createContext(db: ConfigDatabase, collection: ConfigCollection, item: any, docId: string): any;
|
@@ -1592,12 +1585,12 @@ export declare class DataLoader {
|
|
1592
1585
|
* @param attributeMappings - The mappings that define how each attribute should be transformed.
|
1593
1586
|
* @returns The transformed item.
|
1594
1587
|
*/
|
1595
|
-
transformData(item: any, attributeMappings: AttributeMappings):
|
1588
|
+
transformData(item: any, attributeMappings: AttributeMappings): any;
|
1596
1589
|
setupMaps(dbId: string): Promise<void>;
|
1597
1590
|
getAllUsers(): Promise<import("node-appwrite").Models.User<import("node-appwrite").Models.Preferences>[]>;
|
1598
1591
|
start(dbId: string): Promise<void>;
|
1599
1592
|
updateReferencesInRelatedCollections(): Promise<void>;
|
1600
|
-
findNewIdForOldId(oldId: string, idMapping: IdMapping):
|
1593
|
+
findNewIdForOldId(oldId: string, idMapping: IdMapping, importDef: ImportDef): any;
|
1601
1594
|
private writeMapsToJsonFile;
|
1602
1595
|
/**
|
1603
1596
|
* Prepares user data by checking for duplicates based on email or phone, adding to a duplicate map if found,
|