appwrite-utils-cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +80 -0
- package/dist/main.d.ts +2 -0
- package/dist/main.js +74 -0
- package/dist/migrations/afterImportActions.d.ts +12 -0
- package/dist/migrations/afterImportActions.js +196 -0
- package/dist/migrations/attributes.d.ts +4 -0
- package/dist/migrations/attributes.js +158 -0
- package/dist/migrations/backup.d.ts +621 -0
- package/dist/migrations/backup.js +159 -0
- package/dist/migrations/collections.d.ts +16 -0
- package/dist/migrations/collections.js +207 -0
- package/dist/migrations/converters.d.ts +179 -0
- package/dist/migrations/converters.js +575 -0
- package/dist/migrations/dbHelpers.d.ts +5 -0
- package/dist/migrations/dbHelpers.js +54 -0
- package/dist/migrations/importController.d.ts +44 -0
- package/dist/migrations/importController.js +312 -0
- package/dist/migrations/importDataActions.d.ts +44 -0
- package/dist/migrations/importDataActions.js +219 -0
- package/dist/migrations/indexes.d.ts +4 -0
- package/dist/migrations/indexes.js +18 -0
- package/dist/migrations/logging.d.ts +2 -0
- package/dist/migrations/logging.js +14 -0
- package/dist/migrations/migrationHelper.d.ts +18 -0
- package/dist/migrations/migrationHelper.js +66 -0
- package/dist/migrations/queue.d.ts +13 -0
- package/dist/migrations/queue.js +79 -0
- package/dist/migrations/relationships.d.ts +90 -0
- package/dist/migrations/relationships.js +209 -0
- package/dist/migrations/schema.d.ts +3142 -0
- package/dist/migrations/schema.js +485 -0
- package/dist/migrations/schemaStrings.d.ts +12 -0
- package/dist/migrations/schemaStrings.js +261 -0
- package/dist/migrations/setupDatabase.d.ts +7 -0
- package/dist/migrations/setupDatabase.js +151 -0
- package/dist/migrations/storage.d.ts +8 -0
- package/dist/migrations/storage.js +241 -0
- package/dist/migrations/users.d.ts +11 -0
- package/dist/migrations/users.js +114 -0
- package/dist/migrations/validationRules.d.ts +43 -0
- package/dist/migrations/validationRules.js +42 -0
- package/dist/schemas/authUser.d.ts +62 -0
- package/dist/schemas/authUser.js +17 -0
- package/dist/setup.d.ts +2 -0
- package/dist/setup.js +5 -0
- package/dist/types.d.ts +9 -0
- package/dist/types.js +5 -0
- package/dist/utils/configSchema.json +742 -0
- package/dist/utils/helperFunctions.d.ts +34 -0
- package/dist/utils/helperFunctions.js +72 -0
- package/dist/utils/index.d.ts +2 -0
- package/dist/utils/index.js +2 -0
- package/dist/utils/setupFiles.d.ts +2 -0
- package/dist/utils/setupFiles.js +276 -0
- package/dist/utilsController.d.ts +30 -0
- package/dist/utilsController.js +106 -0
- package/package.json +34 -0
- package/src/main.ts +77 -0
- package/src/migrations/afterImportActions.ts +300 -0
- package/src/migrations/attributes.ts +315 -0
- package/src/migrations/backup.ts +189 -0
- package/src/migrations/collections.ts +303 -0
- package/src/migrations/converters.ts +628 -0
- package/src/migrations/dbHelpers.ts +89 -0
- package/src/migrations/importController.ts +509 -0
- package/src/migrations/importDataActions.ts +313 -0
- package/src/migrations/indexes.ts +37 -0
- package/src/migrations/logging.ts +15 -0
- package/src/migrations/migrationHelper.ts +100 -0
- package/src/migrations/queue.ts +119 -0
- package/src/migrations/relationships.ts +336 -0
- package/src/migrations/schema.ts +590 -0
- package/src/migrations/schemaStrings.ts +310 -0
- package/src/migrations/setupDatabase.ts +219 -0
- package/src/migrations/storage.ts +351 -0
- package/src/migrations/users.ts +148 -0
- package/src/migrations/validationRules.ts +63 -0
- package/src/schemas/authUser.ts +23 -0
- package/src/setup.ts +8 -0
- package/src/types.ts +14 -0
- package/src/utils/configSchema.json +742 -0
- package/src/utils/helperFunctions.ts +111 -0
- package/src/utils/index.ts +2 -0
- package/src/utils/setupFiles.ts +295 -0
- package/src/utilsController.ts +173 -0
- package/tsconfig.json +37 -0
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
import {
|
|
2
|
+
Databases,
|
|
3
|
+
Storage,
|
|
4
|
+
InputFile,
|
|
5
|
+
Query,
|
|
6
|
+
ID,
|
|
7
|
+
type Models,
|
|
8
|
+
Client,
|
|
9
|
+
} from "node-appwrite";
|
|
10
|
+
import type { AppwriteConfig } from "./schema.js";
|
|
11
|
+
import path from "path";
|
|
12
|
+
import fs from "fs";
|
|
13
|
+
import os from "os";
|
|
14
|
+
|
|
15
|
+
const getDatabaseFromConfig = (config: AppwriteConfig) => {
|
|
16
|
+
if (!config.appwriteClient) {
|
|
17
|
+
config.appwriteClient = new Client()
|
|
18
|
+
.setEndpoint(config.appwriteEndpoint)
|
|
19
|
+
.setProject(config.appwriteProject)
|
|
20
|
+
.setKey(config.appwriteKey);
|
|
21
|
+
}
|
|
22
|
+
return new Databases(config.appwriteClient!);
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
const getStorageFromConfig = (config: AppwriteConfig) => {
|
|
26
|
+
if (!config.appwriteClient) {
|
|
27
|
+
config.appwriteClient = new Client()
|
|
28
|
+
.setEndpoint(config.appwriteEndpoint)
|
|
29
|
+
.setProject(config.appwriteProject)
|
|
30
|
+
.setKey(config.appwriteKey);
|
|
31
|
+
}
|
|
32
|
+
return new Storage(config.appwriteClient!);
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
export interface AfterImportActions {
|
|
36
|
+
[key: string]: (config: AppwriteConfig, ...args: any[]) => Promise<any>;
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
export const afterImportActions = {
|
|
40
|
+
updateCreatedDocument: async (
|
|
41
|
+
config: AppwriteConfig,
|
|
42
|
+
dbId: string,
|
|
43
|
+
collId: string,
|
|
44
|
+
docId: string,
|
|
45
|
+
data: any
|
|
46
|
+
) => {
|
|
47
|
+
try {
|
|
48
|
+
const db = getDatabaseFromConfig(config);
|
|
49
|
+
await db.updateDocument(dbId, collId, docId, data);
|
|
50
|
+
} catch (error) {
|
|
51
|
+
console.error("Error updating document: ", error);
|
|
52
|
+
}
|
|
53
|
+
},
|
|
54
|
+
checkAndUpdateFieldInDocument: async (
|
|
55
|
+
config: AppwriteConfig,
|
|
56
|
+
dbId: string,
|
|
57
|
+
collId: string,
|
|
58
|
+
docId: string,
|
|
59
|
+
fieldName: string,
|
|
60
|
+
oldFieldValue: any,
|
|
61
|
+
newFieldValue: any
|
|
62
|
+
) => {
|
|
63
|
+
try {
|
|
64
|
+
const db = getDatabaseFromConfig(config);
|
|
65
|
+
const doc = await db.getDocument(dbId, collId, docId);
|
|
66
|
+
if (doc[fieldName as keyof typeof doc] == oldFieldValue) {
|
|
67
|
+
await db.updateDocument(dbId, collId, docId, {
|
|
68
|
+
[fieldName]: newFieldValue,
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
} catch (error) {
|
|
72
|
+
console.error("Error updating document: ", error);
|
|
73
|
+
}
|
|
74
|
+
},
|
|
75
|
+
setFieldFromOtherCollectionDocument: async (
|
|
76
|
+
config: AppwriteConfig,
|
|
77
|
+
dbId: string,
|
|
78
|
+
collIdOrName: string,
|
|
79
|
+
docId: string,
|
|
80
|
+
fieldName: string,
|
|
81
|
+
otherCollIdOrName: string,
|
|
82
|
+
otherDocId: string,
|
|
83
|
+
otherFieldName: string
|
|
84
|
+
) => {
|
|
85
|
+
const db = getDatabaseFromConfig(config);
|
|
86
|
+
|
|
87
|
+
// Helper function to find a collection ID by name or return the ID if given
|
|
88
|
+
const findCollectionId = async (collectionIdentifier: string) => {
|
|
89
|
+
const collectionsPulled = await db.listCollections(dbId, [
|
|
90
|
+
Query.limit(25),
|
|
91
|
+
Query.equal("name", collectionIdentifier),
|
|
92
|
+
]);
|
|
93
|
+
if (collectionsPulled.total > 0) {
|
|
94
|
+
return collectionsPulled.collections[0].$id;
|
|
95
|
+
} else {
|
|
96
|
+
// Assuming the passed identifier might directly be an ID if not found by name
|
|
97
|
+
return collectionIdentifier;
|
|
98
|
+
}
|
|
99
|
+
};
|
|
100
|
+
|
|
101
|
+
try {
|
|
102
|
+
// Resolve the IDs for both the target and other collections
|
|
103
|
+
const targetCollectionId = await findCollectionId(collIdOrName);
|
|
104
|
+
const otherCollectionId = await findCollectionId(otherCollIdOrName);
|
|
105
|
+
|
|
106
|
+
// Retrieve the "other" document
|
|
107
|
+
const otherDoc = await db.getDocument(
|
|
108
|
+
dbId,
|
|
109
|
+
otherCollectionId,
|
|
110
|
+
otherDocId
|
|
111
|
+
);
|
|
112
|
+
const valueToSet = otherDoc[otherFieldName as keyof typeof otherDoc];
|
|
113
|
+
|
|
114
|
+
if (valueToSet) {
|
|
115
|
+
// Update the target document
|
|
116
|
+
await db.updateDocument(dbId, targetCollectionId, docId, {
|
|
117
|
+
[fieldName]: valueToSet,
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
console.log(
|
|
122
|
+
`Field ${fieldName} updated successfully in document ${docId}.`
|
|
123
|
+
);
|
|
124
|
+
} catch (error) {
|
|
125
|
+
console.error(
|
|
126
|
+
"Error setting field from other collection document: ",
|
|
127
|
+
error
|
|
128
|
+
);
|
|
129
|
+
}
|
|
130
|
+
},
|
|
131
|
+
createOrGetBucket: async (
|
|
132
|
+
config: AppwriteConfig,
|
|
133
|
+
bucketName: string,
|
|
134
|
+
bucketId?: string,
|
|
135
|
+
permissions?: string[],
|
|
136
|
+
fileSecurity?: boolean,
|
|
137
|
+
enabled?: boolean,
|
|
138
|
+
maxFileSize?: number,
|
|
139
|
+
allowedExtensions?: string[],
|
|
140
|
+
compression?: string,
|
|
141
|
+
encryption?: boolean,
|
|
142
|
+
antivirus?: boolean
|
|
143
|
+
) => {
|
|
144
|
+
try {
|
|
145
|
+
const storage = getStorageFromConfig(config);
|
|
146
|
+
const bucket = await storage.listBuckets([
|
|
147
|
+
Query.equal("name", bucketName),
|
|
148
|
+
]);
|
|
149
|
+
if (bucket.buckets.length > 0) {
|
|
150
|
+
return bucket.buckets[0];
|
|
151
|
+
} else if (bucketId) {
|
|
152
|
+
try {
|
|
153
|
+
return await storage.getBucket(bucketId);
|
|
154
|
+
} catch (error) {
|
|
155
|
+
return await storage.createBucket(
|
|
156
|
+
bucketId,
|
|
157
|
+
bucketName,
|
|
158
|
+
permissions,
|
|
159
|
+
fileSecurity,
|
|
160
|
+
enabled,
|
|
161
|
+
maxFileSize,
|
|
162
|
+
allowedExtensions,
|
|
163
|
+
compression,
|
|
164
|
+
encryption,
|
|
165
|
+
antivirus
|
|
166
|
+
);
|
|
167
|
+
}
|
|
168
|
+
} else {
|
|
169
|
+
return await storage.createBucket(
|
|
170
|
+
bucketId || ID.unique(),
|
|
171
|
+
bucketName,
|
|
172
|
+
permissions,
|
|
173
|
+
fileSecurity,
|
|
174
|
+
enabled,
|
|
175
|
+
maxFileSize,
|
|
176
|
+
allowedExtensions,
|
|
177
|
+
compression,
|
|
178
|
+
encryption,
|
|
179
|
+
antivirus
|
|
180
|
+
);
|
|
181
|
+
}
|
|
182
|
+
} catch (error) {
|
|
183
|
+
console.error("Error creating or getting bucket: ", error);
|
|
184
|
+
}
|
|
185
|
+
},
|
|
186
|
+
createFileAndUpdateField: async (
|
|
187
|
+
config: AppwriteConfig,
|
|
188
|
+
dbId: string,
|
|
189
|
+
collId: string,
|
|
190
|
+
docId: string,
|
|
191
|
+
fieldName: string,
|
|
192
|
+
bucketId: string,
|
|
193
|
+
filePath: string,
|
|
194
|
+
fileName: string
|
|
195
|
+
) => {
|
|
196
|
+
try {
|
|
197
|
+
const db = getDatabaseFromConfig(config);
|
|
198
|
+
const storage = getStorageFromConfig(config);
|
|
199
|
+
const collection = await db.getCollection(dbId, collId);
|
|
200
|
+
const attributes = collection.attributes as any[];
|
|
201
|
+
const attribute = attributes.find((a) => a.key === fieldName);
|
|
202
|
+
// console.log(
|
|
203
|
+
// `Processing field ${fieldName} in collection ${collId} for document ${docId} in database ${dbId} in bucket ${bucketId} with path ${filePath} and name ${fileName}...`
|
|
204
|
+
// );
|
|
205
|
+
let isArray = false;
|
|
206
|
+
if (!attribute) {
|
|
207
|
+
console.log(
|
|
208
|
+
`Field ${fieldName} not found in collection ${collId}, weird, skipping...`
|
|
209
|
+
);
|
|
210
|
+
return;
|
|
211
|
+
} else if (attribute.array === true) {
|
|
212
|
+
isArray = true;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
// Define a helper function to check if a value is a URL
|
|
216
|
+
const isUrl = (value: any) =>
|
|
217
|
+
typeof value === "string" &&
|
|
218
|
+
(value.startsWith("http://") || value.startsWith("https://"));
|
|
219
|
+
|
|
220
|
+
const doc = await db.getDocument(dbId, collId, docId);
|
|
221
|
+
const existingFieldValue = doc[fieldName as keyof typeof doc];
|
|
222
|
+
|
|
223
|
+
// Handle the case where the field is an array
|
|
224
|
+
let updateData: string | string[] = isArray ? [] : "";
|
|
225
|
+
if (isArray && Array.isArray(existingFieldValue)) {
|
|
226
|
+
updateData = existingFieldValue.filter((val) => !isUrl(val)); // Remove URLs from the array
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
// Process file upload and update logic
|
|
230
|
+
if (isUrl(filePath)) {
|
|
231
|
+
// Create a temporary directory
|
|
232
|
+
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "appwrite_tmp"));
|
|
233
|
+
const tempFilePath = path.join(tempDir, fileName);
|
|
234
|
+
|
|
235
|
+
// Download the file using fetch
|
|
236
|
+
const response = await fetch(filePath);
|
|
237
|
+
if (!response.ok)
|
|
238
|
+
console.error(
|
|
239
|
+
`Failed to fetch ${filePath}: ${response.statusText} for document ${docId} with field ${fieldName}`
|
|
240
|
+
);
|
|
241
|
+
|
|
242
|
+
// Use arrayBuffer if buffer is not available
|
|
243
|
+
const arrayBuffer = await response.arrayBuffer();
|
|
244
|
+
const buffer = Buffer.from(arrayBuffer);
|
|
245
|
+
fs.writeFileSync(tempFilePath, buffer);
|
|
246
|
+
|
|
247
|
+
// Create InputFile from the downloaded file
|
|
248
|
+
const inputFile = InputFile.fromPath(tempFilePath, fileName);
|
|
249
|
+
|
|
250
|
+
// Use the full file name (with extension) for creating the file
|
|
251
|
+
const file = await storage.createFile(bucketId, ID.unique(), inputFile);
|
|
252
|
+
|
|
253
|
+
console.log("Created file from URL: ", file.$id);
|
|
254
|
+
|
|
255
|
+
// After uploading, adjust the updateData based on whether the field is an array or not
|
|
256
|
+
if (isArray) {
|
|
257
|
+
updateData = [...updateData, file.$id]; // Append the new file ID
|
|
258
|
+
} else {
|
|
259
|
+
updateData = file.$id; // Set the new file ID
|
|
260
|
+
}
|
|
261
|
+
// console.log(
|
|
262
|
+
// "Updating document with file: ",
|
|
263
|
+
// doc.$id,
|
|
264
|
+
// `${fieldName}: `,
|
|
265
|
+
// updateData
|
|
266
|
+
// );
|
|
267
|
+
|
|
268
|
+
// If the file was downloaded, delete it after uploading
|
|
269
|
+
fs.unlinkSync(tempFilePath);
|
|
270
|
+
} else {
|
|
271
|
+
const files = fs.readdirSync(filePath);
|
|
272
|
+
const fileFullName = files.find((file) => file.includes(fileName));
|
|
273
|
+
if (!fileFullName) {
|
|
274
|
+
console.error(
|
|
275
|
+
`File starting with '${fileName}' not found in '${filePath}'`
|
|
276
|
+
);
|
|
277
|
+
return;
|
|
278
|
+
}
|
|
279
|
+
const pathToFile = path.join(filePath, fileFullName);
|
|
280
|
+
const inputFile = InputFile.fromPath(pathToFile, fileName);
|
|
281
|
+
const file = await storage.createFile(bucketId, ID.unique(), inputFile);
|
|
282
|
+
|
|
283
|
+
if (isArray) {
|
|
284
|
+
updateData = [...updateData, file.$id]; // Append the new file ID
|
|
285
|
+
} else {
|
|
286
|
+
updateData = file.$id; // Set the new file ID
|
|
287
|
+
}
|
|
288
|
+
await db.updateDocument(dbId, collId, doc.$id, {
|
|
289
|
+
[fieldName]: updateData,
|
|
290
|
+
});
|
|
291
|
+
console.log("Created file from path: ", file.$id);
|
|
292
|
+
}
|
|
293
|
+
} catch (error) {
|
|
294
|
+
console.error("Error creating file and updating field: ", error);
|
|
295
|
+
console.log(
|
|
296
|
+
`Params were: dbId: ${dbId}, collId: ${collId}, docId: ${docId}, fieldName: ${fieldName}, filePath: ${filePath}, fileName: ${fileName}`
|
|
297
|
+
);
|
|
298
|
+
}
|
|
299
|
+
},
|
|
300
|
+
};
|
|
@@ -0,0 +1,315 @@
|
|
|
1
|
+
import { Query, type Databases, type Models } from "node-appwrite";
|
|
2
|
+
import type { Attribute } from "./schema.js";
|
|
3
|
+
import { nameToIdMapping, enqueueOperation } from "./queue.js";
|
|
4
|
+
|
|
5
|
+
export const createOrUpdateAttribute = async (
|
|
6
|
+
db: Databases,
|
|
7
|
+
dbId: string,
|
|
8
|
+
collection: Models.Collection,
|
|
9
|
+
attribute: Attribute
|
|
10
|
+
): Promise<void> => {
|
|
11
|
+
let action = "create";
|
|
12
|
+
let foundAttribute;
|
|
13
|
+
try {
|
|
14
|
+
foundAttribute = await db.getAttribute(dbId, collection.$id, attribute.key);
|
|
15
|
+
} catch (error) {
|
|
16
|
+
foundAttribute = undefined;
|
|
17
|
+
}
|
|
18
|
+
let numSameAttributes = 0;
|
|
19
|
+
if (foundAttribute && foundAttribute.key === attribute.key) {
|
|
20
|
+
numSameAttributes++;
|
|
21
|
+
return;
|
|
22
|
+
} else if (foundAttribute) {
|
|
23
|
+
action = "update";
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
// Relationship attribute logic with adjustments
|
|
27
|
+
let collectionFoundViaRelatedCollection: Models.Collection | undefined;
|
|
28
|
+
let relatedCollectionId: string | undefined;
|
|
29
|
+
if (attribute.type === "relationship") {
|
|
30
|
+
if (nameToIdMapping.has(attribute.relatedCollection)) {
|
|
31
|
+
relatedCollectionId = nameToIdMapping.get(attribute.relatedCollection);
|
|
32
|
+
try {
|
|
33
|
+
collectionFoundViaRelatedCollection = await db.getCollection(
|
|
34
|
+
dbId,
|
|
35
|
+
relatedCollectionId!
|
|
36
|
+
);
|
|
37
|
+
} catch (e) {
|
|
38
|
+
console.log(
|
|
39
|
+
`Collection not found: ${attribute.relatedCollection} when nameToIdMapping was set`
|
|
40
|
+
);
|
|
41
|
+
collectionFoundViaRelatedCollection = undefined;
|
|
42
|
+
}
|
|
43
|
+
} else {
|
|
44
|
+
const collectionsPulled = await db.listCollections(dbId, [
|
|
45
|
+
Query.equal("name", attribute.relatedCollection),
|
|
46
|
+
]);
|
|
47
|
+
if (collectionsPulled.total > 0) {
|
|
48
|
+
collectionFoundViaRelatedCollection = collectionsPulled.collections[0];
|
|
49
|
+
relatedCollectionId = collectionFoundViaRelatedCollection.$id;
|
|
50
|
+
nameToIdMapping.set(attribute.relatedCollection, relatedCollectionId);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
if (!(relatedCollectionId && collectionFoundViaRelatedCollection)) {
|
|
54
|
+
console.log(`Enqueueing operation for attribute: ${attribute.key}`);
|
|
55
|
+
enqueueOperation({
|
|
56
|
+
type: "attribute",
|
|
57
|
+
collectionId: collection.$id,
|
|
58
|
+
collection: collection,
|
|
59
|
+
attribute,
|
|
60
|
+
dependencies: [attribute.relatedCollection],
|
|
61
|
+
});
|
|
62
|
+
return;
|
|
63
|
+
}
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
switch (attribute.type) {
|
|
67
|
+
case "string":
|
|
68
|
+
if (action === "create") {
|
|
69
|
+
await db.createStringAttribute(
|
|
70
|
+
dbId,
|
|
71
|
+
collection.$id,
|
|
72
|
+
attribute.key,
|
|
73
|
+
attribute.size,
|
|
74
|
+
attribute.required,
|
|
75
|
+
attribute.xdefault || undefined,
|
|
76
|
+
attribute.array,
|
|
77
|
+
attribute.encrypted
|
|
78
|
+
);
|
|
79
|
+
} else {
|
|
80
|
+
await db.updateStringAttribute(
|
|
81
|
+
dbId,
|
|
82
|
+
collection.$id,
|
|
83
|
+
attribute.key,
|
|
84
|
+
attribute.required,
|
|
85
|
+
attribute.xdefault || undefined
|
|
86
|
+
);
|
|
87
|
+
}
|
|
88
|
+
break;
|
|
89
|
+
case "integer":
|
|
90
|
+
if (action === "create") {
|
|
91
|
+
await db.createIntegerAttribute(
|
|
92
|
+
dbId,
|
|
93
|
+
collection.$id,
|
|
94
|
+
attribute.key,
|
|
95
|
+
attribute.required,
|
|
96
|
+
attribute.min,
|
|
97
|
+
attribute.max,
|
|
98
|
+
attribute.xdefault || undefined,
|
|
99
|
+
attribute.array
|
|
100
|
+
);
|
|
101
|
+
} else {
|
|
102
|
+
await db.updateIntegerAttribute(
|
|
103
|
+
dbId,
|
|
104
|
+
collection.$id,
|
|
105
|
+
attribute.key,
|
|
106
|
+
attribute.required,
|
|
107
|
+
attribute.min || 0,
|
|
108
|
+
attribute.max || 2147483647,
|
|
109
|
+
attribute.xdefault || undefined
|
|
110
|
+
);
|
|
111
|
+
}
|
|
112
|
+
break;
|
|
113
|
+
case "float":
|
|
114
|
+
if (action === "create") {
|
|
115
|
+
await db.createFloatAttribute(
|
|
116
|
+
dbId,
|
|
117
|
+
collection.$id,
|
|
118
|
+
attribute.key,
|
|
119
|
+
attribute.required,
|
|
120
|
+
attribute.min,
|
|
121
|
+
attribute.max,
|
|
122
|
+
attribute.xdefault || undefined,
|
|
123
|
+
attribute.array
|
|
124
|
+
);
|
|
125
|
+
} else {
|
|
126
|
+
await db.updateFloatAttribute(
|
|
127
|
+
dbId,
|
|
128
|
+
collection.$id,
|
|
129
|
+
attribute.key,
|
|
130
|
+
attribute.required,
|
|
131
|
+
attribute.min || 0,
|
|
132
|
+
attribute.max || 2147483647,
|
|
133
|
+
attribute.xdefault || undefined
|
|
134
|
+
);
|
|
135
|
+
}
|
|
136
|
+
break;
|
|
137
|
+
case "boolean":
|
|
138
|
+
if (action === "create") {
|
|
139
|
+
await db.createBooleanAttribute(
|
|
140
|
+
dbId,
|
|
141
|
+
collection.$id,
|
|
142
|
+
attribute.key,
|
|
143
|
+
attribute.required,
|
|
144
|
+
attribute.xdefault || undefined,
|
|
145
|
+
attribute.array
|
|
146
|
+
);
|
|
147
|
+
} else {
|
|
148
|
+
await db.updateBooleanAttribute(
|
|
149
|
+
dbId,
|
|
150
|
+
collection.$id,
|
|
151
|
+
attribute.key,
|
|
152
|
+
attribute.required,
|
|
153
|
+
attribute.xdefault || undefined
|
|
154
|
+
);
|
|
155
|
+
}
|
|
156
|
+
break;
|
|
157
|
+
case "datetime":
|
|
158
|
+
if (action === "create") {
|
|
159
|
+
await db.createDatetimeAttribute(
|
|
160
|
+
dbId,
|
|
161
|
+
collection.$id,
|
|
162
|
+
attribute.key,
|
|
163
|
+
attribute.required,
|
|
164
|
+
attribute.xdefault || undefined,
|
|
165
|
+
attribute.array
|
|
166
|
+
);
|
|
167
|
+
} else {
|
|
168
|
+
await db.updateDatetimeAttribute(
|
|
169
|
+
dbId,
|
|
170
|
+
collection.$id,
|
|
171
|
+
attribute.key,
|
|
172
|
+
attribute.required,
|
|
173
|
+
attribute.xdefault || undefined
|
|
174
|
+
);
|
|
175
|
+
}
|
|
176
|
+
break;
|
|
177
|
+
case "email":
|
|
178
|
+
if (action === "create") {
|
|
179
|
+
await db.createEmailAttribute(
|
|
180
|
+
dbId,
|
|
181
|
+
collection.$id,
|
|
182
|
+
attribute.key,
|
|
183
|
+
attribute.required,
|
|
184
|
+
attribute.xdefault || undefined,
|
|
185
|
+
attribute.array
|
|
186
|
+
);
|
|
187
|
+
} else {
|
|
188
|
+
await db.updateEmailAttribute(
|
|
189
|
+
dbId,
|
|
190
|
+
collection.$id,
|
|
191
|
+
attribute.key,
|
|
192
|
+
attribute.required,
|
|
193
|
+
attribute.xdefault || undefined
|
|
194
|
+
);
|
|
195
|
+
}
|
|
196
|
+
break;
|
|
197
|
+
case "ip":
|
|
198
|
+
if (action === "create") {
|
|
199
|
+
await db.createIpAttribute(
|
|
200
|
+
dbId,
|
|
201
|
+
collection.$id,
|
|
202
|
+
attribute.key,
|
|
203
|
+
attribute.required,
|
|
204
|
+
attribute.xdefault || undefined,
|
|
205
|
+
attribute.array
|
|
206
|
+
);
|
|
207
|
+
} else {
|
|
208
|
+
await db.updateIpAttribute(
|
|
209
|
+
dbId,
|
|
210
|
+
collection.$id,
|
|
211
|
+
attribute.key,
|
|
212
|
+
attribute.required,
|
|
213
|
+
attribute.xdefault || undefined
|
|
214
|
+
);
|
|
215
|
+
}
|
|
216
|
+
break;
|
|
217
|
+
case "url":
|
|
218
|
+
if (action === "create") {
|
|
219
|
+
await db.createUrlAttribute(
|
|
220
|
+
dbId,
|
|
221
|
+
collection.$id,
|
|
222
|
+
attribute.key,
|
|
223
|
+
attribute.required,
|
|
224
|
+
attribute.xdefault || undefined,
|
|
225
|
+
attribute.array
|
|
226
|
+
);
|
|
227
|
+
} else {
|
|
228
|
+
await db.updateUrlAttribute(
|
|
229
|
+
dbId,
|
|
230
|
+
collection.$id,
|
|
231
|
+
attribute.key,
|
|
232
|
+
attribute.required,
|
|
233
|
+
attribute.xdefault || undefined
|
|
234
|
+
);
|
|
235
|
+
}
|
|
236
|
+
break;
|
|
237
|
+
case "enum":
|
|
238
|
+
if (action === "create") {
|
|
239
|
+
await db.createEnumAttribute(
|
|
240
|
+
dbId,
|
|
241
|
+
collection.$id,
|
|
242
|
+
attribute.key,
|
|
243
|
+
attribute.elements,
|
|
244
|
+
attribute.required,
|
|
245
|
+
attribute.xdefault || undefined,
|
|
246
|
+
attribute.array
|
|
247
|
+
);
|
|
248
|
+
} else {
|
|
249
|
+
await db.updateEnumAttribute(
|
|
250
|
+
dbId,
|
|
251
|
+
collection.$id,
|
|
252
|
+
attribute.key,
|
|
253
|
+
attribute.elements,
|
|
254
|
+
attribute.required,
|
|
255
|
+
attribute.xdefault || undefined
|
|
256
|
+
);
|
|
257
|
+
}
|
|
258
|
+
break;
|
|
259
|
+
case "relationship":
|
|
260
|
+
if (action === "create") {
|
|
261
|
+
await db.createRelationshipAttribute(
|
|
262
|
+
dbId,
|
|
263
|
+
collection.$id,
|
|
264
|
+
relatedCollectionId!,
|
|
265
|
+
attribute.relationType,
|
|
266
|
+
attribute.twoWay,
|
|
267
|
+
attribute.key,
|
|
268
|
+
attribute.twoWayKey,
|
|
269
|
+
attribute.onDelete
|
|
270
|
+
);
|
|
271
|
+
} else {
|
|
272
|
+
await db.updateRelationshipAttribute(
|
|
273
|
+
dbId,
|
|
274
|
+
collection.$id,
|
|
275
|
+
attribute.key,
|
|
276
|
+
attribute.onDelete
|
|
277
|
+
);
|
|
278
|
+
}
|
|
279
|
+
break;
|
|
280
|
+
default:
|
|
281
|
+
console.error("Invalid attribute type");
|
|
282
|
+
break;
|
|
283
|
+
}
|
|
284
|
+
};
|
|
285
|
+
|
|
286
|
+
export const createUpdateCollectionAttributes = async (
|
|
287
|
+
db: Databases,
|
|
288
|
+
dbId: string,
|
|
289
|
+
collection: Models.Collection,
|
|
290
|
+
attributes: Attribute[]
|
|
291
|
+
): Promise<void> => {
|
|
292
|
+
console.log(
|
|
293
|
+
`Creating/Updating attributes for collection: ${collection.name}`
|
|
294
|
+
);
|
|
295
|
+
|
|
296
|
+
const batchSize = 3; // Size of each batch
|
|
297
|
+
for (let i = 0; i < attributes.length; i += batchSize) {
|
|
298
|
+
// Slice the attributes array to get a batch of at most batchSize elements
|
|
299
|
+
const batch = attributes.slice(i, i + batchSize);
|
|
300
|
+
const attributePromises = batch.map((attribute) =>
|
|
301
|
+
createOrUpdateAttribute(db, dbId, collection, attribute)
|
|
302
|
+
);
|
|
303
|
+
|
|
304
|
+
// Await the completion of all promises in the current batch
|
|
305
|
+
const results = await Promise.allSettled(attributePromises);
|
|
306
|
+
results.forEach((result) => {
|
|
307
|
+
if (result.status === "rejected") {
|
|
308
|
+
console.error("An attribute promise was rejected:", result.reason);
|
|
309
|
+
}
|
|
310
|
+
});
|
|
311
|
+
}
|
|
312
|
+
console.log(
|
|
313
|
+
`Finished creating/updating attributes for collection: ${collection.name}`
|
|
314
|
+
);
|
|
315
|
+
};
|