appwrite-utils-cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +80 -0
- package/dist/main.d.ts +2 -0
- package/dist/main.js +74 -0
- package/dist/migrations/afterImportActions.d.ts +12 -0
- package/dist/migrations/afterImportActions.js +196 -0
- package/dist/migrations/attributes.d.ts +4 -0
- package/dist/migrations/attributes.js +158 -0
- package/dist/migrations/backup.d.ts +621 -0
- package/dist/migrations/backup.js +159 -0
- package/dist/migrations/collections.d.ts +16 -0
- package/dist/migrations/collections.js +207 -0
- package/dist/migrations/converters.d.ts +179 -0
- package/dist/migrations/converters.js +575 -0
- package/dist/migrations/dbHelpers.d.ts +5 -0
- package/dist/migrations/dbHelpers.js +54 -0
- package/dist/migrations/importController.d.ts +44 -0
- package/dist/migrations/importController.js +312 -0
- package/dist/migrations/importDataActions.d.ts +44 -0
- package/dist/migrations/importDataActions.js +219 -0
- package/dist/migrations/indexes.d.ts +4 -0
- package/dist/migrations/indexes.js +18 -0
- package/dist/migrations/logging.d.ts +2 -0
- package/dist/migrations/logging.js +14 -0
- package/dist/migrations/migrationHelper.d.ts +18 -0
- package/dist/migrations/migrationHelper.js +66 -0
- package/dist/migrations/queue.d.ts +13 -0
- package/dist/migrations/queue.js +79 -0
- package/dist/migrations/relationships.d.ts +90 -0
- package/dist/migrations/relationships.js +209 -0
- package/dist/migrations/schema.d.ts +3142 -0
- package/dist/migrations/schema.js +485 -0
- package/dist/migrations/schemaStrings.d.ts +12 -0
- package/dist/migrations/schemaStrings.js +261 -0
- package/dist/migrations/setupDatabase.d.ts +7 -0
- package/dist/migrations/setupDatabase.js +151 -0
- package/dist/migrations/storage.d.ts +8 -0
- package/dist/migrations/storage.js +241 -0
- package/dist/migrations/users.d.ts +11 -0
- package/dist/migrations/users.js +114 -0
- package/dist/migrations/validationRules.d.ts +43 -0
- package/dist/migrations/validationRules.js +42 -0
- package/dist/schemas/authUser.d.ts +62 -0
- package/dist/schemas/authUser.js +17 -0
- package/dist/setup.d.ts +2 -0
- package/dist/setup.js +5 -0
- package/dist/types.d.ts +9 -0
- package/dist/types.js +5 -0
- package/dist/utils/configSchema.json +742 -0
- package/dist/utils/helperFunctions.d.ts +34 -0
- package/dist/utils/helperFunctions.js +72 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.js +2 -0
- package/dist/utils/setupFiles.d.ts +2 -0
- package/dist/utils/setupFiles.js +276 -0
- package/dist/utilsController.d.ts +30 -0
- package/dist/utilsController.js +106 -0
- package/package.json +34 -0
- package/src/main.ts +77 -0
- package/src/migrations/afterImportActions.ts +300 -0
- package/src/migrations/attributes.ts +315 -0
- package/src/migrations/backup.ts +189 -0
- package/src/migrations/collections.ts +303 -0
- package/src/migrations/converters.ts +628 -0
- package/src/migrations/dbHelpers.ts +89 -0
- package/src/migrations/importController.ts +509 -0
- package/src/migrations/importDataActions.ts +313 -0
- package/src/migrations/indexes.ts +37 -0
- package/src/migrations/logging.ts +15 -0
- package/src/migrations/migrationHelper.ts +100 -0
- package/src/migrations/queue.ts +119 -0
- package/src/migrations/relationships.ts +336 -0
- package/src/migrations/schema.ts +590 -0
- package/src/migrations/schemaStrings.ts +310 -0
- package/src/migrations/setupDatabase.ts +219 -0
- package/src/migrations/storage.ts +351 -0
- package/src/migrations/users.ts +148 -0
- package/src/migrations/validationRules.ts +63 -0
- package/src/schemas/authUser.ts +23 -0
- package/src/setup.ts +8 -0
- package/src/types.ts +14 -0
- package/src/utils/configSchema.json +742 -0
- package/src/utils/helperFunctions.ts +111 -0
- package/src/utils/index.ts +2 -0
- package/src/utils/setupFiles.ts +295 -0
- package/src/utilsController.ts +173 -0
- package/tsconfig.json +37 -0
|
@@ -0,0 +1,189 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
import {
|
|
3
|
+
attributeSchema,
|
|
4
|
+
type Attribute,
|
|
5
|
+
parseAttribute,
|
|
6
|
+
CollectionCreateSchema,
|
|
7
|
+
} from "./schema.js";
|
|
8
|
+
|
|
9
|
+
export const BackupSchema = z.object({
|
|
10
|
+
$id: z.string(),
|
|
11
|
+
$createdAt: z.string(),
|
|
12
|
+
$updatedAt: z.string(),
|
|
13
|
+
database: z.string(),
|
|
14
|
+
collections: z.array(z.string()),
|
|
15
|
+
documents: z
|
|
16
|
+
.array(
|
|
17
|
+
z.object({
|
|
18
|
+
collectionId: z.string(),
|
|
19
|
+
data: z.string(),
|
|
20
|
+
})
|
|
21
|
+
)
|
|
22
|
+
.default([]),
|
|
23
|
+
});
|
|
24
|
+
|
|
25
|
+
export type Backup = z.infer<typeof BackupSchema>;
|
|
26
|
+
|
|
27
|
+
export const BackupCreateSchema = BackupSchema.omit({
|
|
28
|
+
$id: true,
|
|
29
|
+
$createdAt: true,
|
|
30
|
+
$updatedAt: true,
|
|
31
|
+
});
|
|
32
|
+
|
|
33
|
+
export type BackupCreate = z.infer<typeof BackupCreateSchema>;
|
|
34
|
+
|
|
35
|
+
export const BatchSchema = z.object({
|
|
36
|
+
$id: z.string(),
|
|
37
|
+
$createdAt: z.string(),
|
|
38
|
+
$updatedAt: z.string(),
|
|
39
|
+
data: z.string().describe("The serialized data for this batch"),
|
|
40
|
+
processed: z
|
|
41
|
+
.boolean()
|
|
42
|
+
.default(false)
|
|
43
|
+
.describe("Whether the batch has been processed"),
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
export type Batch = z.infer<typeof BatchSchema>;
|
|
47
|
+
|
|
48
|
+
export const BatchCreateSchema = BatchSchema.omit({
|
|
49
|
+
$id: true,
|
|
50
|
+
$createdAt: true,
|
|
51
|
+
$updatedAt: true,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
export type BatchCreate = z.infer<typeof BatchCreateSchema>;
|
|
55
|
+
|
|
56
|
+
export const OperationSchema = z.object({
|
|
57
|
+
$id: z.string(),
|
|
58
|
+
$createdAt: z.string(),
|
|
59
|
+
$updatedAt: z.string(),
|
|
60
|
+
operationType: z.string(),
|
|
61
|
+
collectionId: z.string(),
|
|
62
|
+
data: z.any(),
|
|
63
|
+
batches: z.array(z.string()).default([]).optional(),
|
|
64
|
+
progress: z.number(),
|
|
65
|
+
total: z.number(),
|
|
66
|
+
error: z.string(),
|
|
67
|
+
status: z.enum(["pending", "in_progress", "completed", "error"]),
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
export type Operation = z.infer<typeof OperationSchema>;
|
|
71
|
+
|
|
72
|
+
export const OperationCreateSchema = OperationSchema.omit({
|
|
73
|
+
$id: true,
|
|
74
|
+
$createdAt: true,
|
|
75
|
+
$updatedAt: true,
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
export type OperationCreate = z.infer<typeof OperationCreateSchema>;
|
|
79
|
+
|
|
80
|
+
export const getMigrationCollectionSchemas = () => {
|
|
81
|
+
const currentOperationsAttributes: Attribute[] = [
|
|
82
|
+
parseAttribute({
|
|
83
|
+
key: "operationType",
|
|
84
|
+
type: "string",
|
|
85
|
+
error: "Invalid Operation Type",
|
|
86
|
+
size: 50,
|
|
87
|
+
required: true,
|
|
88
|
+
array: false,
|
|
89
|
+
xdefault: null,
|
|
90
|
+
}),
|
|
91
|
+
attributeSchema.parse({
|
|
92
|
+
key: "collectionId",
|
|
93
|
+
type: "string",
|
|
94
|
+
error: "Invalid Collection Id",
|
|
95
|
+
size: 50,
|
|
96
|
+
array: false,
|
|
97
|
+
xdefault: null,
|
|
98
|
+
}),
|
|
99
|
+
attributeSchema.parse({
|
|
100
|
+
key: "batches",
|
|
101
|
+
type: "string",
|
|
102
|
+
error: "Invalid Batches",
|
|
103
|
+
size: 1073741824,
|
|
104
|
+
array: true,
|
|
105
|
+
}),
|
|
106
|
+
attributeSchema.parse({
|
|
107
|
+
key: "data",
|
|
108
|
+
type: "string",
|
|
109
|
+
error: "Invalid Data",
|
|
110
|
+
size: 1073741824,
|
|
111
|
+
}),
|
|
112
|
+
attributeSchema.parse({
|
|
113
|
+
key: "progress",
|
|
114
|
+
type: "integer",
|
|
115
|
+
error: "Invalid Progress",
|
|
116
|
+
required: true,
|
|
117
|
+
array: false,
|
|
118
|
+
}),
|
|
119
|
+
attributeSchema.parse({
|
|
120
|
+
key: "total",
|
|
121
|
+
type: "integer",
|
|
122
|
+
error: "Invalid Total",
|
|
123
|
+
required: true,
|
|
124
|
+
array: false,
|
|
125
|
+
}),
|
|
126
|
+
attributeSchema.parse({
|
|
127
|
+
key: "error",
|
|
128
|
+
type: "string",
|
|
129
|
+
error: "Operation Error",
|
|
130
|
+
required: false,
|
|
131
|
+
array: false,
|
|
132
|
+
}),
|
|
133
|
+
attributeSchema.parse({
|
|
134
|
+
key: "status",
|
|
135
|
+
type: "enum",
|
|
136
|
+
elements: ["pending", "in_progress", "completed", "error"],
|
|
137
|
+
error: "Invalid Status",
|
|
138
|
+
array: false,
|
|
139
|
+
xdefault: "pending",
|
|
140
|
+
}),
|
|
141
|
+
];
|
|
142
|
+
|
|
143
|
+
const currentOperationsConfig = CollectionCreateSchema.parse({
|
|
144
|
+
name: "CurrentOperations",
|
|
145
|
+
enabled: true,
|
|
146
|
+
documentSecurity: false,
|
|
147
|
+
attributes: [],
|
|
148
|
+
indexes: [],
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
const batchesAttributes: Attribute[] = [
|
|
152
|
+
attributeSchema.parse({
|
|
153
|
+
key: "data",
|
|
154
|
+
type: "string",
|
|
155
|
+
size: 1073741824,
|
|
156
|
+
error: "Invalid Data",
|
|
157
|
+
required: true,
|
|
158
|
+
array: false,
|
|
159
|
+
}),
|
|
160
|
+
attributeSchema.parse({
|
|
161
|
+
key: "processed",
|
|
162
|
+
type: "boolean",
|
|
163
|
+
error: "Invalid Processed",
|
|
164
|
+
required: true,
|
|
165
|
+
array: false,
|
|
166
|
+
xdefault: false,
|
|
167
|
+
}),
|
|
168
|
+
];
|
|
169
|
+
|
|
170
|
+
const batchesConfig = CollectionCreateSchema.parse({
|
|
171
|
+
name: "Batches",
|
|
172
|
+
enabled: true,
|
|
173
|
+
documentSecurity: false,
|
|
174
|
+
attributes: [],
|
|
175
|
+
indexes: [],
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
const toReturn = {
|
|
179
|
+
CurrentOperations: {
|
|
180
|
+
collection: currentOperationsConfig,
|
|
181
|
+
attributes: currentOperationsAttributes,
|
|
182
|
+
},
|
|
183
|
+
Batches: {
|
|
184
|
+
collection: batchesConfig,
|
|
185
|
+
attributes: batchesAttributes,
|
|
186
|
+
},
|
|
187
|
+
};
|
|
188
|
+
return toReturn;
|
|
189
|
+
};
|
|
@@ -0,0 +1,303 @@
|
|
|
1
|
+
import { Databases, ID, Permission, Query, type Models } from "node-appwrite";
|
|
2
|
+
import type { AppwriteConfig, CollectionCreate } from "./schema.js";
|
|
3
|
+
import { nameToIdMapping, processQueue } from "./queue.js";
|
|
4
|
+
import { createUpdateCollectionAttributes } from "./attributes.js";
|
|
5
|
+
import { createOrUpdateIndexes } from "./indexes.js";
|
|
6
|
+
import {
|
|
7
|
+
ensureDirectoryExistence,
|
|
8
|
+
toCamelCase,
|
|
9
|
+
toPascalCase,
|
|
10
|
+
writeFileSync,
|
|
11
|
+
} from "../utils/index.js";
|
|
12
|
+
import _ from "lodash";
|
|
13
|
+
import { SchemaGenerator } from "./schemaStrings.js";
|
|
14
|
+
import path from "path";
|
|
15
|
+
|
|
16
|
+
const { join } = _;
|
|
17
|
+
|
|
18
|
+
export const documentExists = async (
|
|
19
|
+
db: Databases,
|
|
20
|
+
dbId: string,
|
|
21
|
+
targetCollectionId: string,
|
|
22
|
+
toCreateObject: any
|
|
23
|
+
): Promise<Models.Document | null> => {
|
|
24
|
+
// Had to do this because kept running into issues with type checking arrays so, sorry 40ms
|
|
25
|
+
const collection = await db.getCollection(dbId, targetCollectionId);
|
|
26
|
+
const attributes = collection.attributes as any[];
|
|
27
|
+
let arrayTypeAttributes = attributes
|
|
28
|
+
.filter((attribute: any) => attribute.array === true)
|
|
29
|
+
.map((attribute: any) => attribute.key);
|
|
30
|
+
// Function to check if a string is JSON
|
|
31
|
+
const isJsonString = (str: string) => {
|
|
32
|
+
try {
|
|
33
|
+
const json = JSON.parse(str);
|
|
34
|
+
return typeof json === "object" && json !== null; // Check if parsed JSON is an object or array
|
|
35
|
+
} catch (e) {
|
|
36
|
+
return false;
|
|
37
|
+
}
|
|
38
|
+
};
|
|
39
|
+
|
|
40
|
+
// Validate and prepare query parameters
|
|
41
|
+
const validQueryParams = _.chain(toCreateObject)
|
|
42
|
+
.pickBy(
|
|
43
|
+
(value, key) =>
|
|
44
|
+
!arrayTypeAttributes.includes(key) &&
|
|
45
|
+
!key.startsWith("$") &&
|
|
46
|
+
!_.isNull(value) &&
|
|
47
|
+
!_.isUndefined(value) &&
|
|
48
|
+
!_.isEmpty(value) &&
|
|
49
|
+
!_.isObject(value) && // Keeps excluding objects
|
|
50
|
+
!_.isArray(value) && // Explicitly exclude arrays
|
|
51
|
+
!(_.isString(value) && isJsonString(value)) && // Exclude JSON strings
|
|
52
|
+
(_.isString(value) ? value.length < 4096 && value.length > 0 : true) // String length check
|
|
53
|
+
)
|
|
54
|
+
.mapValues((value, key) =>
|
|
55
|
+
_.isString(value) || _.isNumber(value) || _.isBoolean(value)
|
|
56
|
+
? value
|
|
57
|
+
: null
|
|
58
|
+
)
|
|
59
|
+
.omitBy(_.isNull) // Remove any null values that might have been added in mapValues
|
|
60
|
+
.toPairs()
|
|
61
|
+
.slice(0, 25) // Limit to 25 to adhere to query limit
|
|
62
|
+
.map(([key, value]) => Query.equal(key, value as any))
|
|
63
|
+
.value();
|
|
64
|
+
|
|
65
|
+
// Execute the query with the validated and prepared parameters
|
|
66
|
+
const result = await db.listDocuments(
|
|
67
|
+
dbId,
|
|
68
|
+
targetCollectionId,
|
|
69
|
+
validQueryParams
|
|
70
|
+
);
|
|
71
|
+
return result.documents[0] || null;
|
|
72
|
+
};
|
|
73
|
+
|
|
74
|
+
export const checkForCollection = async (
|
|
75
|
+
db: Databases,
|
|
76
|
+
dbId: string,
|
|
77
|
+
collection: Partial<CollectionCreate>
|
|
78
|
+
): Promise<Models.Collection | null> => {
|
|
79
|
+
try {
|
|
80
|
+
console.log(`Checking for collection with name: ${collection.name}`);
|
|
81
|
+
const response = await db.listCollections(dbId, [
|
|
82
|
+
Query.equal("name", collection.name!),
|
|
83
|
+
]);
|
|
84
|
+
if (response.collections.length > 0) {
|
|
85
|
+
console.log(`Collection found: ${response.collections[0].$id}`);
|
|
86
|
+
return { ...collection, ...response.collections[0] };
|
|
87
|
+
} else {
|
|
88
|
+
console.log(`No collection found with name: ${collection.name}`);
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
} catch (error) {
|
|
92
|
+
console.error(`Error checking for collection: ${error}`);
|
|
93
|
+
return null;
|
|
94
|
+
}
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
// Helper function to fetch and cache collection by name
|
|
98
|
+
export const fetchAndCacheCollectionByName = async (
|
|
99
|
+
db: Databases,
|
|
100
|
+
dbId: string,
|
|
101
|
+
collectionName: string
|
|
102
|
+
): Promise<Models.Collection | undefined> => {
|
|
103
|
+
if (nameToIdMapping.has(collectionName)) {
|
|
104
|
+
const collectionId = nameToIdMapping.get(collectionName);
|
|
105
|
+
console.log(`\tCollection found in cache: ${collectionId}`);
|
|
106
|
+
return await db.getCollection(dbId, collectionId!);
|
|
107
|
+
} else {
|
|
108
|
+
console.log(`\tFetching collection by name: ${collectionName}`);
|
|
109
|
+
const collectionsPulled = await db.listCollections(dbId, [
|
|
110
|
+
Query.equal("name", collectionName),
|
|
111
|
+
]);
|
|
112
|
+
if (collectionsPulled.total > 0) {
|
|
113
|
+
const collection = collectionsPulled.collections[0];
|
|
114
|
+
console.log(`\tCollection found: ${collection.$id}`);
|
|
115
|
+
nameToIdMapping.set(collectionName, collection.$id);
|
|
116
|
+
return collection;
|
|
117
|
+
} else {
|
|
118
|
+
console.log(`\tCollection not found by name: ${collectionName}`);
|
|
119
|
+
return undefined;
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
};
|
|
123
|
+
|
|
124
|
+
export const wipeDatabase = async (
|
|
125
|
+
database: Databases,
|
|
126
|
+
databaseId: string
|
|
127
|
+
): Promise<{ collectionId: string; collectionName: string }[]> => {
|
|
128
|
+
console.log(`Wiping database: ${databaseId}`);
|
|
129
|
+
const { collections: existingCollections } = await database.listCollections(
|
|
130
|
+
databaseId
|
|
131
|
+
);
|
|
132
|
+
let collectionsDeleted: { collectionId: string; collectionName: string }[] =
|
|
133
|
+
[];
|
|
134
|
+
for (const { $id: collectionId, name: name } of existingCollections) {
|
|
135
|
+
console.log(`Deleting collection: ${collectionId}`);
|
|
136
|
+
collectionsDeleted.push({
|
|
137
|
+
collectionId: collectionId,
|
|
138
|
+
collectionName: name,
|
|
139
|
+
});
|
|
140
|
+
await database.deleteCollection(databaseId, collectionId);
|
|
141
|
+
}
|
|
142
|
+
return collectionsDeleted;
|
|
143
|
+
};
|
|
144
|
+
|
|
145
|
+
export const generateSchemas = async (
|
|
146
|
+
config: AppwriteConfig,
|
|
147
|
+
appwriteFolderPath: string
|
|
148
|
+
): Promise<void> => {
|
|
149
|
+
const schemaGenerator = new SchemaGenerator(config, appwriteFolderPath);
|
|
150
|
+
schemaGenerator.generateSchemas();
|
|
151
|
+
};
|
|
152
|
+
|
|
153
|
+
export const createOrUpdateCollections = async (
|
|
154
|
+
database: Databases,
|
|
155
|
+
databaseId: string,
|
|
156
|
+
config: AppwriteConfig,
|
|
157
|
+
deletedCollections?: { collectionId: string; collectionName: string }[]
|
|
158
|
+
): Promise<void> => {
|
|
159
|
+
const configCollections = config.collections;
|
|
160
|
+
for (const { attributes, indexes, ...collection } of configCollections) {
|
|
161
|
+
let collectionsFound = await database.listCollections(databaseId, [
|
|
162
|
+
Query.equal("name", collection.name),
|
|
163
|
+
]);
|
|
164
|
+
|
|
165
|
+
const permissions = [];
|
|
166
|
+
if (collection.$permissions.length > 0) {
|
|
167
|
+
for (const permission of collection.$permissions) {
|
|
168
|
+
switch (permission.permission) {
|
|
169
|
+
case "read":
|
|
170
|
+
permissions.push(Permission.read(permission.target));
|
|
171
|
+
break;
|
|
172
|
+
case "create":
|
|
173
|
+
permissions.push(Permission.create(permission.target));
|
|
174
|
+
break;
|
|
175
|
+
case "update":
|
|
176
|
+
permissions.push(Permission.update(permission.target));
|
|
177
|
+
break;
|
|
178
|
+
case "delete":
|
|
179
|
+
permissions.push(Permission.delete(permission.target));
|
|
180
|
+
break;
|
|
181
|
+
case "write":
|
|
182
|
+
permissions.push(Permission.write(permission.target));
|
|
183
|
+
break;
|
|
184
|
+
default:
|
|
185
|
+
console.log(`Unknown permission: ${permission.permission}`);
|
|
186
|
+
break;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
let collectionToUse =
|
|
191
|
+
collectionsFound.total > 0 ? collectionsFound.collections[0] : null;
|
|
192
|
+
if (!collectionToUse) {
|
|
193
|
+
console.log(`Creating collection: ${collection.name}`);
|
|
194
|
+
if (deletedCollections && deletedCollections.length > 0) {
|
|
195
|
+
const foundColl = deletedCollections.find(
|
|
196
|
+
(coll) =>
|
|
197
|
+
coll.collectionName.toLowerCase().trim().replace(" ", "") ===
|
|
198
|
+
collection.name.toLowerCase().trim().replace(" ", "")
|
|
199
|
+
);
|
|
200
|
+
if (foundColl) {
|
|
201
|
+
const collectionId = foundColl.collectionId || ID.unique();
|
|
202
|
+
console.log(
|
|
203
|
+
`Processing collection: ${collection.name} with ID: ${collectionId}`
|
|
204
|
+
);
|
|
205
|
+
collectionToUse = await database.createCollection(
|
|
206
|
+
databaseId,
|
|
207
|
+
collectionId,
|
|
208
|
+
collection.name,
|
|
209
|
+
permissions,
|
|
210
|
+
collection.documentSecurity,
|
|
211
|
+
collection.enabled
|
|
212
|
+
);
|
|
213
|
+
nameToIdMapping.set(collection.name, collectionToUse.$id);
|
|
214
|
+
} else {
|
|
215
|
+
collectionToUse = await database.createCollection(
|
|
216
|
+
databaseId,
|
|
217
|
+
ID.unique(),
|
|
218
|
+
collection.name,
|
|
219
|
+
permissions,
|
|
220
|
+
collection.documentSecurity,
|
|
221
|
+
collection.enabled
|
|
222
|
+
);
|
|
223
|
+
nameToIdMapping.set(collection.name, collectionToUse.$id);
|
|
224
|
+
}
|
|
225
|
+
} else {
|
|
226
|
+
collectionToUse = await database.createCollection(
|
|
227
|
+
databaseId,
|
|
228
|
+
ID.unique(),
|
|
229
|
+
collection.name,
|
|
230
|
+
permissions,
|
|
231
|
+
collection.documentSecurity,
|
|
232
|
+
collection.enabled
|
|
233
|
+
);
|
|
234
|
+
nameToIdMapping.set(collection.name, collectionToUse.$id);
|
|
235
|
+
}
|
|
236
|
+
} else {
|
|
237
|
+
console.log(`Collection ${collection.name} already exists.`);
|
|
238
|
+
}
|
|
239
|
+
console.log("Creating Attributes");
|
|
240
|
+
await createUpdateCollectionAttributes(
|
|
241
|
+
database,
|
|
242
|
+
databaseId,
|
|
243
|
+
collectionToUse,
|
|
244
|
+
attributes
|
|
245
|
+
);
|
|
246
|
+
console.log("Creating Indexes");
|
|
247
|
+
await createOrUpdateIndexes(
|
|
248
|
+
databaseId,
|
|
249
|
+
database,
|
|
250
|
+
collectionToUse.$id,
|
|
251
|
+
indexes
|
|
252
|
+
);
|
|
253
|
+
}
|
|
254
|
+
await processQueue(database, databaseId);
|
|
255
|
+
};
|
|
256
|
+
|
|
257
|
+
export const generateMockData = async (
|
|
258
|
+
database: Databases,
|
|
259
|
+
databaseId: string,
|
|
260
|
+
configCollections: any[]
|
|
261
|
+
): Promise<void> => {
|
|
262
|
+
for (const { collection, mockFunction } of configCollections) {
|
|
263
|
+
if (mockFunction) {
|
|
264
|
+
console.log(`Generating mock data for collection: ${collection.name}`);
|
|
265
|
+
const mockData = mockFunction();
|
|
266
|
+
for (const data of mockData) {
|
|
267
|
+
await database.createDocument(
|
|
268
|
+
databaseId,
|
|
269
|
+
collection.$id,
|
|
270
|
+
ID.unique(),
|
|
271
|
+
data
|
|
272
|
+
);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
};
|
|
277
|
+
|
|
278
|
+
export const fetchAllCollections = async (
|
|
279
|
+
dbId: string,
|
|
280
|
+
database: Databases
|
|
281
|
+
): Promise<Models.Collection[]> => {
|
|
282
|
+
console.log(`Fetching all collections for database ID: ${dbId}`);
|
|
283
|
+
let collections: Models.Collection[] = [];
|
|
284
|
+
let moreCollections = true;
|
|
285
|
+
let lastCollectionId: string | undefined;
|
|
286
|
+
|
|
287
|
+
while (moreCollections) {
|
|
288
|
+
const queries = [Query.limit(500)];
|
|
289
|
+
if (lastCollectionId) {
|
|
290
|
+
queries.push(Query.cursorAfter(lastCollectionId));
|
|
291
|
+
}
|
|
292
|
+
const response = await database.listCollections(dbId, queries);
|
|
293
|
+
collections = collections.concat(response.collections);
|
|
294
|
+
moreCollections = response.collections.length === 500;
|
|
295
|
+
if (moreCollections) {
|
|
296
|
+
lastCollectionId =
|
|
297
|
+
response.collections[response.collections.length - 1].$id;
|
|
298
|
+
}
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
console.log(`Fetched a total of ${collections.length} collections.`);
|
|
302
|
+
return collections;
|
|
303
|
+
};
|