appwrite-utils-cli 0.0.286 → 0.9.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. package/README.md +122 -96
  2. package/dist/collections/attributes.d.ts +4 -0
  3. package/dist/collections/attributes.js +224 -0
  4. package/dist/collections/indexes.d.ts +4 -0
  5. package/dist/collections/indexes.js +27 -0
  6. package/dist/collections/methods.d.ts +16 -0
  7. package/dist/collections/methods.js +216 -0
  8. package/dist/databases/methods.d.ts +6 -0
  9. package/dist/databases/methods.js +33 -0
  10. package/dist/interactiveCLI.d.ts +19 -0
  11. package/dist/interactiveCLI.js +555 -0
  12. package/dist/main.js +227 -62
  13. package/dist/migrations/afterImportActions.js +37 -40
  14. package/dist/migrations/appwriteToX.d.ts +26 -25
  15. package/dist/migrations/appwriteToX.js +42 -6
  16. package/dist/migrations/attributes.js +21 -20
  17. package/dist/migrations/backup.d.ts +93 -87
  18. package/dist/migrations/collections.d.ts +6 -0
  19. package/dist/migrations/collections.js +149 -20
  20. package/dist/migrations/converters.d.ts +2 -18
  21. package/dist/migrations/converters.js +13 -2
  22. package/dist/migrations/dataLoader.d.ts +276 -161
  23. package/dist/migrations/dataLoader.js +535 -292
  24. package/dist/migrations/databases.js +8 -2
  25. package/dist/migrations/helper.d.ts +3 -0
  26. package/dist/migrations/helper.js +21 -0
  27. package/dist/migrations/importController.d.ts +5 -2
  28. package/dist/migrations/importController.js +125 -88
  29. package/dist/migrations/importDataActions.d.ts +9 -1
  30. package/dist/migrations/importDataActions.js +15 -3
  31. package/dist/migrations/indexes.js +3 -2
  32. package/dist/migrations/logging.js +20 -8
  33. package/dist/migrations/migrationHelper.d.ts +9 -4
  34. package/dist/migrations/migrationHelper.js +6 -5
  35. package/dist/migrations/openapi.d.ts +1 -1
  36. package/dist/migrations/openapi.js +33 -18
  37. package/dist/migrations/queue.js +3 -2
  38. package/dist/migrations/relationships.d.ts +2 -2
  39. package/dist/migrations/schemaStrings.js +53 -41
  40. package/dist/migrations/setupDatabase.d.ts +2 -4
  41. package/dist/migrations/setupDatabase.js +24 -105
  42. package/dist/migrations/storage.d.ts +3 -1
  43. package/dist/migrations/storage.js +110 -16
  44. package/dist/migrations/transfer.d.ts +30 -0
  45. package/dist/migrations/transfer.js +337 -0
  46. package/dist/migrations/users.d.ts +2 -1
  47. package/dist/migrations/users.js +78 -43
  48. package/dist/schemas/authUser.d.ts +2 -2
  49. package/dist/storage/methods.d.ts +15 -0
  50. package/dist/storage/methods.js +207 -0
  51. package/dist/storage/schemas.d.ts +687 -0
  52. package/dist/storage/schemas.js +175 -0
  53. package/dist/utils/getClientFromConfig.d.ts +4 -0
  54. package/dist/utils/getClientFromConfig.js +16 -0
  55. package/dist/utils/helperFunctions.d.ts +11 -1
  56. package/dist/utils/helperFunctions.js +38 -0
  57. package/dist/utils/retryFailedPromises.d.ts +2 -0
  58. package/dist/utils/retryFailedPromises.js +21 -0
  59. package/dist/utils/schemaStrings.d.ts +13 -0
  60. package/dist/utils/schemaStrings.js +403 -0
  61. package/dist/utils/setupFiles.js +110 -61
  62. package/dist/utilsController.d.ts +40 -22
  63. package/dist/utilsController.js +164 -84
  64. package/package.json +13 -15
  65. package/src/collections/attributes.ts +483 -0
  66. package/src/collections/indexes.ts +53 -0
  67. package/src/collections/methods.ts +331 -0
  68. package/src/databases/methods.ts +47 -0
  69. package/src/init.ts +64 -64
  70. package/src/interactiveCLI.ts +767 -0
  71. package/src/main.ts +292 -83
  72. package/src/migrations/afterImportActions.ts +553 -490
  73. package/src/migrations/appwriteToX.ts +237 -174
  74. package/src/migrations/attributes.ts +483 -422
  75. package/src/migrations/backup.ts +205 -205
  76. package/src/migrations/collections.ts +545 -300
  77. package/src/migrations/converters.ts +161 -150
  78. package/src/migrations/dataLoader.ts +1615 -1304
  79. package/src/migrations/databases.ts +44 -25
  80. package/src/migrations/dbHelpers.ts +92 -92
  81. package/src/migrations/helper.ts +40 -0
  82. package/src/migrations/importController.ts +448 -384
  83. package/src/migrations/importDataActions.ts +315 -307
  84. package/src/migrations/indexes.ts +40 -37
  85. package/src/migrations/logging.ts +29 -16
  86. package/src/migrations/migrationHelper.ts +207 -201
  87. package/src/migrations/openapi.ts +83 -70
  88. package/src/migrations/queue.ts +118 -119
  89. package/src/migrations/relationships.ts +324 -324
  90. package/src/migrations/schemaStrings.ts +472 -460
  91. package/src/migrations/setupDatabase.ts +118 -219
  92. package/src/migrations/storage.ts +538 -358
  93. package/src/migrations/transfer.ts +608 -0
  94. package/src/migrations/users.ts +362 -285
  95. package/src/migrations/validationRules.ts +63 -63
  96. package/src/schemas/authUser.ts +23 -23
  97. package/src/setup.ts +8 -8
  98. package/src/storage/methods.ts +371 -0
  99. package/src/storage/schemas.ts +205 -0
  100. package/src/types.ts +9 -9
  101. package/src/utils/getClientFromConfig.ts +17 -0
  102. package/src/utils/helperFunctions.ts +181 -127
  103. package/src/utils/index.ts +2 -2
  104. package/src/utils/loadConfigs.ts +59 -59
  105. package/src/utils/retryFailedPromises.ts +27 -0
  106. package/src/utils/schemaStrings.ts +473 -0
  107. package/src/utils/setupFiles.ts +228 -182
  108. package/src/utilsController.ts +325 -194
  109. package/tsconfig.json +37 -37
@@ -1,324 +1,324 @@
1
- import { Databases, Query, type Models } from "node-appwrite";
2
- import { fetchAllCollections } from "./collections.js";
3
- import type {
4
- AppwriteConfig,
5
- Attribute,
6
- RelationshipAttribute,
7
- } from "appwrite-utils";
8
- import { logger } from "./logging.js";
9
-
10
- /**
11
- * Finds collections that have defined relationship attributes.
12
- */
13
- export const findCollectionsWithRelationships = (config: AppwriteConfig) => {
14
- const toReturn = new Map<string, RelationshipAttribute[]>();
15
- if (!config.collections) {
16
- return toReturn;
17
- }
18
- for (const collection of config.collections) {
19
- if (collection.attributes) {
20
- for (const attribute of collection.attributes) {
21
- if (
22
- attribute.type === "relationship" &&
23
- attribute.twoWay &&
24
- attribute.side === "parent"
25
- ) {
26
- toReturn.set(collection.name, toReturn.get(collection.name) || []);
27
- toReturn
28
- .get(collection.name)
29
- ?.push(attribute as RelationshipAttribute);
30
- }
31
- }
32
- }
33
- }
34
- return toReturn;
35
- };
36
-
37
- export async function resolveAndUpdateRelationships(
38
- dbId: string,
39
- database: Databases,
40
- config: AppwriteConfig
41
- ) {
42
- const collections = await fetchAllCollections(dbId, database);
43
- const collectionsWithRelationships = findCollectionsWithRelationships(config);
44
-
45
- // Process each collection sequentially
46
- for (const collection of collections) {
47
- console.log(
48
- `Processing collection: ${collection.name} (${collection.$id})`
49
- );
50
- const relAttributeMap = collectionsWithRelationships.get(
51
- collection.name
52
- ) as RelationshipAttribute[]; // Get the relationship attributes for the collections
53
-
54
- if (!relAttributeMap) {
55
- console.log(
56
- `No mapping found for collection: ${collection.name}, skipping...`
57
- );
58
- continue;
59
- }
60
-
61
- await processCollection(dbId, database, collection, relAttributeMap);
62
- }
63
- console.log(
64
- `Completed relationship resolution and update for database ID: ${dbId}`
65
- );
66
- }
67
-
68
- async function processCollection(
69
- dbId: string,
70
- database: Databases,
71
- collection: Models.Collection,
72
- relAttributeMap: RelationshipAttribute[]
73
- ) {
74
- let after; // For pagination
75
- let hasMore = true;
76
-
77
- while (hasMore) {
78
- const response: Models.DocumentList<Models.Document> =
79
- await database.listDocuments(dbId, collection.$id, [
80
- Query.limit(100), // Fetch documents in batches of 100
81
- ...(after ? [Query.cursorAfter(after)] : []),
82
- ]);
83
-
84
- const documents = response.documents;
85
- console.log(
86
- `Fetched ${documents.length} documents from collection: ${collection.name}`
87
- );
88
-
89
- if (documents.length > 0) {
90
- const updates = await prepareDocumentUpdates(
91
- database,
92
- dbId,
93
- collection.name,
94
- documents,
95
- relAttributeMap
96
- );
97
-
98
- // Execute updates for the current batch
99
- await executeUpdatesInBatches(dbId, database, updates);
100
- }
101
-
102
- if (documents.length === 100) {
103
- after = documents[documents.length - 1].$id; // Prepare for the next page
104
- } else {
105
- hasMore = false; // No more documents to fetch
106
- }
107
- }
108
- }
109
-
110
- async function findDocumentsByOriginalId(
111
- database: Databases,
112
- dbId: string,
113
- targetCollection: Models.Collection,
114
- targetKey: string,
115
- originalId: string | string[]
116
- ): Promise<Models.Document[] | undefined> {
117
- const relatedCollectionId = targetCollection.$id;
118
- const collection = await database.listCollections(dbId, [
119
- Query.equal("$id", relatedCollectionId),
120
- ]);
121
- if (collection.total === 0) {
122
- console.log(`Collection ${relatedCollectionId} doesn't exist, skipping...`);
123
- return undefined;
124
- }
125
- const targetAttr = collection.collections[0].attributes.find(
126
- // @ts-ignore
127
- (attr) => attr.key === targetKey
128
- ) as any;
129
- if (!targetAttr) {
130
- console.log(
131
- `Attribute ${targetKey} not found in collection ${relatedCollectionId}, skipping...`
132
- );
133
- return undefined;
134
- }
135
- let queries: string[] = [];
136
- if (targetAttr.array) {
137
- // @ts-ignore
138
- queries.push(Query.contains(targetKey, originalId));
139
- } else {
140
- queries.push(Query.equal(targetKey, originalId));
141
- }
142
- const response = await database.listDocuments(dbId, relatedCollectionId, [
143
- ...queries,
144
- Query.limit(500), // Adjust the limit based on your needs or implement pagination
145
- ]);
146
-
147
- if (response.documents.length < 0) {
148
- return undefined;
149
- } else if (response.documents.length > 0) {
150
- return response.documents;
151
- } else {
152
- return undefined;
153
- }
154
- }
155
-
156
- async function prepareDocumentUpdates(
157
- database: Databases,
158
- dbId: string,
159
- collectionName: string,
160
- documents: Models.Document[],
161
- relationships: RelationshipAttribute[]
162
- ): Promise<{ collectionId: string; documentId: string; updatePayload: any }[]> {
163
- console.log(`Preparing updates for collection: ${collectionName}`);
164
- const updates: {
165
- collectionId: string;
166
- documentId: string;
167
- updatePayload: any;
168
- }[] = [];
169
-
170
- const thisCollection = (
171
- await database.listCollections(dbId, [Query.equal("name", collectionName)])
172
- ).collections[0];
173
- const thisCollectionId = thisCollection?.$id;
174
-
175
- if (!thisCollectionId) {
176
- console.log(`No collection found with name: ${collectionName}`);
177
- return [];
178
- }
179
-
180
- for (const doc of documents) {
181
- let updatePayload: { [key: string]: any } = {};
182
-
183
- for (const rel of relationships) {
184
- // Skip if not dealing with the parent side of a two-way relationship
185
- if (rel.twoWay && rel.side !== "parent") {
186
- console.log("Skipping non-parent side of two-way relationship...");
187
- continue;
188
- }
189
-
190
- const isSingleReference =
191
- rel.relationType === "oneToOne" || rel.relationType === "manyToOne";
192
- const originalIdField = rel.importMapping?.originalIdField;
193
- const targetField = rel.importMapping?.targetField || originalIdField; // Use originalIdField if targetField is not specified
194
- if (!originalIdField) {
195
- console.log("Missing originalIdField in importMapping, skipping...");
196
- continue;
197
- }
198
- const originalId = doc[originalIdField as keyof typeof doc];
199
- if (!originalId) {
200
- continue;
201
- }
202
-
203
- const relatedCollection = (
204
- await database.listCollections(dbId, [
205
- Query.equal("name", rel.relatedCollection),
206
- ])
207
- ).collections[0];
208
-
209
- if (!relatedCollection) {
210
- console.log(
211
- `Related collection ${rel.relatedCollection} not found, skipping...`
212
- );
213
- continue;
214
- }
215
-
216
- const foundDocuments = await findDocumentsByOriginalId(
217
- database,
218
- dbId,
219
- relatedCollection,
220
- targetField!,
221
- originalId
222
- );
223
-
224
- if (foundDocuments && foundDocuments.length > 0) {
225
- const relationshipKey = rel.key;
226
- const existingRefs = doc[relationshipKey as keyof typeof doc] || [];
227
- let existingRefIds: string[] = [];
228
- if (Array.isArray(existingRefs)) {
229
- // @ts-ignore
230
- existingRefIds = existingRefs.map((ref) => ref.$id);
231
- } else if (existingRefs) {
232
- // @ts-ignore
233
- existingRefIds = [existingRefs.$id];
234
- }
235
-
236
- const newRefs = foundDocuments.map((fd) => fd.$id);
237
- const allRefs = [...new Set([...existingRefIds, ...newRefs])]; // Combine and remove duplicates
238
-
239
- // Update logic based on the relationship cardinality
240
- updatePayload[relationshipKey] = isSingleReference
241
- ? newRefs[0] || existingRefIds[0]
242
- : allRefs;
243
- console.log(`Updating ${relationshipKey} with ${allRefs.length} refs`);
244
- }
245
- }
246
-
247
- if (Object.keys(updatePayload).length > 0) {
248
- updates.push({
249
- collectionId: thisCollectionId,
250
- documentId: doc.$id,
251
- updatePayload: updatePayload,
252
- });
253
- }
254
- }
255
-
256
- return updates;
257
- }
258
-
259
- async function processInBatches<T>(
260
- items: T[],
261
- batchSize: number,
262
- processFunction: (batch: T[]) => Promise<void>
263
- ) {
264
- const maxParallelBatches = 25; // Adjust this value to control the number of parallel batches
265
- let currentIndex = 0;
266
- let activeBatchPromises: Promise<void>[] = [];
267
-
268
- while (currentIndex < items.length) {
269
- // While there's still data to process and we haven't reached our parallel limit
270
- while (
271
- currentIndex < items.length &&
272
- activeBatchPromises.length < maxParallelBatches
273
- ) {
274
- const batch = items.slice(currentIndex, currentIndex + batchSize);
275
- currentIndex += batchSize;
276
- // Add new batch processing promise to the array
277
- activeBatchPromises.push(processFunction(batch));
278
- }
279
-
280
- // Wait for one of the batch processes to complete
281
- await Promise.race(activeBatchPromises).then(() => {
282
- // Remove the resolved promise from the activeBatchPromises array
283
- activeBatchPromises = activeBatchPromises.filter(
284
- (p) => p !== Promise.race(activeBatchPromises)
285
- );
286
- });
287
- }
288
-
289
- // After processing all batches, ensure all active promises are resolved
290
- await Promise.all(activeBatchPromises);
291
- }
292
-
293
- async function executeUpdatesInBatches(
294
- dbId: string,
295
- database: Databases,
296
- updates: { collectionId: string; documentId: string; updatePayload: any }[]
297
- ) {
298
- const batchSize = 25; // Adjust based on your rate limit and performance testing
299
- for (let i = 0; i < updates.length; i += batchSize) {
300
- const batch = updates.slice(i, i + batchSize);
301
- await Promise.all(
302
- batch.map((update) =>
303
- database
304
- .updateDocument(
305
- dbId,
306
- update.collectionId,
307
- update.documentId,
308
- update.updatePayload
309
- )
310
- .catch((error) => {
311
- logger.error(
312
- `Error updating doc ${
313
- update.documentId
314
- } in ${dbId}, update payload: ${JSON.stringify(
315
- update.updatePayload,
316
- undefined,
317
- 4
318
- )}, error: ${error}`
319
- );
320
- })
321
- )
322
- );
323
- }
324
- }
1
+ import { Databases, Query, type Models } from "node-appwrite";
2
+ import { fetchAllCollections } from "./collections.js";
3
+ import type {
4
+ AppwriteConfig,
5
+ Attribute,
6
+ RelationshipAttribute,
7
+ } from "appwrite-utils";
8
+ import { logger } from "./logging.js";
9
+
10
+ /**
11
+ * Finds collections that have defined relationship attributes.
12
+ */
13
+ export const findCollectionsWithRelationships = (config: AppwriteConfig) => {
14
+ const toReturn = new Map<string, RelationshipAttribute[]>();
15
+ if (!config.collections) {
16
+ return toReturn;
17
+ }
18
+ for (const collection of config.collections) {
19
+ if (collection.attributes) {
20
+ for (const attribute of collection.attributes) {
21
+ if (
22
+ attribute.type === "relationship" &&
23
+ attribute.twoWay &&
24
+ attribute.side === "parent"
25
+ ) {
26
+ toReturn.set(collection.name, toReturn.get(collection.name) || []);
27
+ toReturn
28
+ .get(collection.name)
29
+ ?.push(attribute as RelationshipAttribute);
30
+ }
31
+ }
32
+ }
33
+ }
34
+ return toReturn;
35
+ };
36
+
37
+ export async function resolveAndUpdateRelationships(
38
+ dbId: string,
39
+ database: Databases,
40
+ config: AppwriteConfig
41
+ ) {
42
+ const collections = await fetchAllCollections(dbId, database);
43
+ const collectionsWithRelationships = findCollectionsWithRelationships(config);
44
+
45
+ // Process each collection sequentially
46
+ for (const collection of collections) {
47
+ console.log(
48
+ `Processing collection: ${collection.name} (${collection.$id})`
49
+ );
50
+ const relAttributeMap = collectionsWithRelationships.get(
51
+ collection.name
52
+ ) as RelationshipAttribute[]; // Get the relationship attributes for the collections
53
+
54
+ if (!relAttributeMap) {
55
+ console.log(
56
+ `No mapping found for collection: ${collection.name}, skipping...`
57
+ );
58
+ continue;
59
+ }
60
+
61
+ await processCollection(dbId, database, collection, relAttributeMap);
62
+ }
63
+ console.log(
64
+ `Completed relationship resolution and update for database ID: ${dbId}`
65
+ );
66
+ }
67
+
68
+ async function processCollection(
69
+ dbId: string,
70
+ database: Databases,
71
+ collection: Models.Collection,
72
+ relAttributeMap: RelationshipAttribute[]
73
+ ) {
74
+ let after; // For pagination
75
+ let hasMore = true;
76
+
77
+ while (hasMore) {
78
+ const response: Models.DocumentList<Models.Document> =
79
+ await database.listDocuments(dbId, collection.$id, [
80
+ Query.limit(100), // Fetch documents in batches of 100
81
+ ...(after ? [Query.cursorAfter(after)] : []),
82
+ ]);
83
+
84
+ const documents = response.documents;
85
+ console.log(
86
+ `Fetched ${documents.length} documents from collection: ${collection.name}`
87
+ );
88
+
89
+ if (documents.length > 0) {
90
+ const updates = await prepareDocumentUpdates(
91
+ database,
92
+ dbId,
93
+ collection.name,
94
+ documents,
95
+ relAttributeMap
96
+ );
97
+
98
+ // Execute updates for the current batch
99
+ await executeUpdatesInBatches(dbId, database, updates);
100
+ }
101
+
102
+ if (documents.length === 100) {
103
+ after = documents[documents.length - 1].$id; // Prepare for the next page
104
+ } else {
105
+ hasMore = false; // No more documents to fetch
106
+ }
107
+ }
108
+ }
109
+
110
+ async function findDocumentsByOriginalId(
111
+ database: Databases,
112
+ dbId: string,
113
+ targetCollection: Models.Collection,
114
+ targetKey: string,
115
+ originalId: string | string[]
116
+ ): Promise<Models.Document[] | undefined> {
117
+ const relatedCollectionId = targetCollection.$id;
118
+ const collection = await database.listCollections(dbId, [
119
+ Query.equal("$id", relatedCollectionId),
120
+ ]);
121
+ if (collection.total === 0) {
122
+ console.log(`Collection ${relatedCollectionId} doesn't exist, skipping...`);
123
+ return undefined;
124
+ }
125
+ const targetAttr = collection.collections[0].attributes.find(
126
+ // @ts-ignore
127
+ (attr) => attr.key === targetKey
128
+ ) as any;
129
+ if (!targetAttr) {
130
+ console.log(
131
+ `Attribute ${targetKey} not found in collection ${relatedCollectionId}, skipping...`
132
+ );
133
+ return undefined;
134
+ }
135
+ let queries: string[] = [];
136
+ if (targetAttr.array) {
137
+ // @ts-ignore
138
+ queries.push(Query.contains(targetKey, originalId));
139
+ } else {
140
+ queries.push(Query.equal(targetKey, originalId));
141
+ }
142
+ const response = await database.listDocuments(dbId, relatedCollectionId, [
143
+ ...queries,
144
+ Query.limit(500), // Adjust the limit based on your needs or implement pagination
145
+ ]);
146
+
147
+ if (response.documents.length < 0) {
148
+ return undefined;
149
+ } else if (response.documents.length > 0) {
150
+ return response.documents;
151
+ } else {
152
+ return undefined;
153
+ }
154
+ }
155
+
156
+ async function prepareDocumentUpdates(
157
+ database: Databases,
158
+ dbId: string,
159
+ collectionName: string,
160
+ documents: Models.Document[],
161
+ relationships: RelationshipAttribute[]
162
+ ): Promise<{ collectionId: string; documentId: string; updatePayload: any }[]> {
163
+ console.log(`Preparing updates for collection: ${collectionName}`);
164
+ const updates: {
165
+ collectionId: string;
166
+ documentId: string;
167
+ updatePayload: any;
168
+ }[] = [];
169
+
170
+ const thisCollection = (
171
+ await database.listCollections(dbId, [Query.equal("name", collectionName)])
172
+ ).collections[0];
173
+ const thisCollectionId = thisCollection?.$id;
174
+
175
+ if (!thisCollectionId) {
176
+ console.log(`No collection found with name: ${collectionName}`);
177
+ return [];
178
+ }
179
+
180
+ for (const doc of documents) {
181
+ let updatePayload: { [key: string]: any } = {};
182
+
183
+ for (const rel of relationships) {
184
+ // Skip if not dealing with the parent side of a two-way relationship
185
+ if (rel.twoWay && rel.side !== "parent") {
186
+ console.log("Skipping non-parent side of two-way relationship...");
187
+ continue;
188
+ }
189
+
190
+ const isSingleReference =
191
+ rel.relationType === "oneToOne" || rel.relationType === "manyToOne";
192
+ const originalIdField = rel.importMapping?.originalIdField;
193
+ const targetField = rel.importMapping?.targetField || originalIdField; // Use originalIdField if targetField is not specified
194
+ if (!originalIdField) {
195
+ console.log("Missing originalIdField in importMapping, skipping...");
196
+ continue;
197
+ }
198
+ const originalId = doc[originalIdField as keyof typeof doc];
199
+ if (!originalId) {
200
+ continue;
201
+ }
202
+
203
+ const relatedCollection = (
204
+ await database.listCollections(dbId, [
205
+ Query.equal("name", rel.relatedCollection),
206
+ ])
207
+ ).collections[0];
208
+
209
+ if (!relatedCollection) {
210
+ console.log(
211
+ `Related collection ${rel.relatedCollection} not found, skipping...`
212
+ );
213
+ continue;
214
+ }
215
+
216
+ const foundDocuments = await findDocumentsByOriginalId(
217
+ database,
218
+ dbId,
219
+ relatedCollection,
220
+ targetField!,
221
+ originalId
222
+ );
223
+
224
+ if (foundDocuments && foundDocuments.length > 0) {
225
+ const relationshipKey = rel.key;
226
+ const existingRefs = doc[relationshipKey as keyof typeof doc] || [];
227
+ let existingRefIds: string[] = [];
228
+ if (Array.isArray(existingRefs)) {
229
+ // @ts-ignore
230
+ existingRefIds = existingRefs.map((ref) => ref.$id);
231
+ } else if (existingRefs) {
232
+ // @ts-ignore
233
+ existingRefIds = [existingRefs.$id];
234
+ }
235
+
236
+ const newRefs = foundDocuments.map((fd) => fd.$id);
237
+ const allRefs = [...new Set([...existingRefIds, ...newRefs])]; // Combine and remove duplicates
238
+
239
+ // Update logic based on the relationship cardinality
240
+ updatePayload[relationshipKey] = isSingleReference
241
+ ? newRefs[0] || existingRefIds[0]
242
+ : allRefs;
243
+ console.log(`Updating ${relationshipKey} with ${allRefs.length} refs`);
244
+ }
245
+ }
246
+
247
+ if (Object.keys(updatePayload).length > 0) {
248
+ updates.push({
249
+ collectionId: thisCollectionId,
250
+ documentId: doc.$id,
251
+ updatePayload: updatePayload,
252
+ });
253
+ }
254
+ }
255
+
256
+ return updates;
257
+ }
258
+
259
+ async function processInBatches<T>(
260
+ items: T[],
261
+ batchSize: number,
262
+ processFunction: (batch: T[]) => Promise<void>
263
+ ) {
264
+ const maxParallelBatches = 25; // Adjust this value to control the number of parallel batches
265
+ let currentIndex = 0;
266
+ let activeBatchPromises: Promise<void>[] = [];
267
+
268
+ while (currentIndex < items.length) {
269
+ // While there's still data to process and we haven't reached our parallel limit
270
+ while (
271
+ currentIndex < items.length &&
272
+ activeBatchPromises.length < maxParallelBatches
273
+ ) {
274
+ const batch = items.slice(currentIndex, currentIndex + batchSize);
275
+ currentIndex += batchSize;
276
+ // Add new batch processing promise to the array
277
+ activeBatchPromises.push(processFunction(batch));
278
+ }
279
+
280
+ // Wait for one of the batch processes to complete
281
+ await Promise.race(activeBatchPromises).then(() => {
282
+ // Remove the resolved promise from the activeBatchPromises array
283
+ activeBatchPromises = activeBatchPromises.filter(
284
+ (p) => p !== Promise.race(activeBatchPromises)
285
+ );
286
+ });
287
+ }
288
+
289
+ // After processing all batches, ensure all active promises are resolved
290
+ await Promise.all(activeBatchPromises);
291
+ }
292
+
293
+ async function executeUpdatesInBatches(
294
+ dbId: string,
295
+ database: Databases,
296
+ updates: { collectionId: string; documentId: string; updatePayload: any }[]
297
+ ) {
298
+ const batchSize = 25; // Adjust based on your rate limit and performance testing
299
+ for (let i = 0; i < updates.length; i += batchSize) {
300
+ const batch = updates.slice(i, i + batchSize);
301
+ await Promise.all(
302
+ batch.map((update) =>
303
+ database
304
+ .updateDocument(
305
+ dbId,
306
+ update.collectionId,
307
+ update.documentId,
308
+ update.updatePayload
309
+ )
310
+ .catch((error) => {
311
+ logger.error(
312
+ `Error updating doc ${
313
+ update.documentId
314
+ } in ${dbId}, update payload: ${JSON.stringify(
315
+ update.updatePayload,
316
+ undefined,
317
+ 4
318
+ )}, error: ${error}`
319
+ );
320
+ })
321
+ )
322
+ );
323
+ }
324
+ }