appwrite-utils-cli 0.0.60 → 0.0.62
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -26,7 +26,7 @@ export const afterImportActions = {
|
|
26
26
|
updateCreatedDocument: async (config, dbId, collId, docId, data) => {
|
27
27
|
try {
|
28
28
|
const db = getDatabaseFromConfig(config);
|
29
|
-
await db.updateDocument(dbId, collId, docId, data);
|
29
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, docId, data));
|
30
30
|
}
|
31
31
|
catch (error) {
|
32
32
|
console.error("Error updating document: ", error);
|
@@ -35,11 +35,11 @@ export const afterImportActions = {
|
|
35
35
|
checkAndUpdateFieldInDocument: async (config, dbId, collId, docId, fieldName, oldFieldValue, newFieldValue) => {
|
36
36
|
try {
|
37
37
|
const db = getDatabaseFromConfig(config);
|
38
|
-
const doc = await db.getDocument(dbId, collId, docId);
|
38
|
+
const doc = await tryAwaitWithRetry(async () => await db.getDocument(dbId, collId, docId));
|
39
39
|
if (doc[fieldName] == oldFieldValue) {
|
40
|
-
await db.updateDocument(dbId, collId, docId, {
|
40
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, collId, docId, {
|
41
41
|
[fieldName]: newFieldValue,
|
42
|
-
});
|
42
|
+
}));
|
43
43
|
}
|
44
44
|
}
|
45
45
|
catch (error) {
|
@@ -50,10 +50,10 @@ export const afterImportActions = {
|
|
50
50
|
const db = getDatabaseFromConfig(config);
|
51
51
|
// Helper function to find a collection ID by name or return the ID if given
|
52
52
|
const findCollectionId = async (collectionIdentifier) => {
|
53
|
-
const collectionsPulled = await db.listCollections(dbId, [
|
53
|
+
const collectionsPulled = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
|
54
54
|
Query.limit(25),
|
55
55
|
Query.equal("name", collectionIdentifier),
|
56
|
-
]);
|
56
|
+
]));
|
57
57
|
if (collectionsPulled.total > 0) {
|
58
58
|
return collectionsPulled.collections[0].$id;
|
59
59
|
}
|
@@ -71,9 +71,9 @@ export const afterImportActions = {
|
|
71
71
|
const valueToSet = otherDoc[otherFieldName];
|
72
72
|
if (valueToSet) {
|
73
73
|
// Update the target document
|
74
|
-
await db.updateDocument(dbId, targetCollectionId, docId, {
|
74
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, {
|
75
75
|
[fieldName]: valueToSet,
|
76
|
-
});
|
76
|
+
}));
|
77
77
|
}
|
78
78
|
console.log(`Field ${fieldName} updated successfully in document ${docId}.`);
|
79
79
|
}
|
@@ -89,17 +89,17 @@ export const afterImportActions = {
|
|
89
89
|
const db = getDatabaseFromConfig(config);
|
90
90
|
// Helper function to find a collection ID by name or return the ID if given
|
91
91
|
const findCollectionId = async (collectionIdentifier) => {
|
92
|
-
const collections = await db.listCollections(dbId, [
|
92
|
+
const collections = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
|
93
93
|
Query.equal("name", collectionIdentifier),
|
94
94
|
Query.limit(1),
|
95
|
-
]);
|
95
|
+
]));
|
96
96
|
return collections.total > 0
|
97
97
|
? collections.collections[0].$id
|
98
98
|
: collectionIdentifier;
|
99
99
|
};
|
100
100
|
// Function to check if the target field is an array
|
101
101
|
const isTargetFieldArray = async (collectionId, fieldName) => {
|
102
|
-
const collection = await db.getCollection(dbId, collectionId);
|
102
|
+
const collection = await tryAwaitWithRetry(async () => await db.getCollection(dbId, collectionId));
|
103
103
|
const attribute = collection.attributes.find((attr) => attr.key === fieldName);
|
104
104
|
// @ts-ignore
|
105
105
|
return attribute?.array === true;
|
@@ -119,7 +119,7 @@ export const afterImportActions = {
|
|
119
119
|
queries.push(Query.cursorAfter(cursor));
|
120
120
|
}
|
121
121
|
queries.push(Query.limit(docLimit));
|
122
|
-
const response = await db.listDocuments(dbId, otherCollectionId, queries);
|
122
|
+
const response = await tryAwaitWithRetry(async () => await db.listDocuments(dbId, otherCollectionId, queries));
|
123
123
|
const documents = response.documents;
|
124
124
|
if (documents.length === 0 || documents.length < docLimit) {
|
125
125
|
return documents;
|
@@ -134,7 +134,7 @@ export const afterImportActions = {
|
|
134
134
|
const updatePayload = targetFieldIsArray
|
135
135
|
? { [fieldName]: documentIds }
|
136
136
|
: { [fieldName]: documentIds[0] };
|
137
|
-
await db.updateDocument(dbId, targetCollectionId, docId, updatePayload);
|
137
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, updatePayload));
|
138
138
|
console.log(`Field ${fieldName} updated successfully in document ${docId} with ${documentIds.length} document IDs.`);
|
139
139
|
}
|
140
140
|
}
|
@@ -145,10 +145,10 @@ export const afterImportActions = {
|
|
145
145
|
setTargetFieldFromOtherCollectionDocumentsByMatchingField: async (config, dbId, collIdOrName, docId, fieldName, otherCollIdOrName, matchingFieldName, matchingFieldValue, targetField) => {
|
146
146
|
const db = getDatabaseFromConfig(config);
|
147
147
|
const findCollectionId = async (collectionIdentifier) => {
|
148
|
-
const collections = await db.listCollections(dbId, [
|
148
|
+
const collections = await tryAwaitWithRetry(async () => await db.listCollections(dbId, [
|
149
149
|
Query.equal("name", collectionIdentifier),
|
150
150
|
Query.limit(1),
|
151
|
-
]);
|
151
|
+
]));
|
152
152
|
return collections.total > 0
|
153
153
|
? collections.collections[0].$id
|
154
154
|
: collectionIdentifier;
|
@@ -172,7 +172,7 @@ export const afterImportActions = {
|
|
172
172
|
if (cursor) {
|
173
173
|
queries.push(Query.cursorAfter(cursor));
|
174
174
|
}
|
175
|
-
const response = await db.listDocuments(dbId, otherCollectionId, queries);
|
175
|
+
const response = await tryAwaitWithRetry(async () => await db.listDocuments(dbId, otherCollectionId, queries));
|
176
176
|
const documents = response.documents;
|
177
177
|
if (documents.length === 0 || documents.length < docLimit) {
|
178
178
|
return documents;
|
@@ -188,7 +188,7 @@ export const afterImportActions = {
|
|
188
188
|
const updatePayload = targetFieldIsArray
|
189
189
|
? { [fieldName]: targetFieldValues }
|
190
190
|
: { [fieldName]: targetFieldValues[0] };
|
191
|
-
await db.updateDocument(dbId, targetCollectionId, docId, updatePayload);
|
191
|
+
await tryAwaitWithRetry(async () => await db.updateDocument(dbId, targetCollectionId, docId, updatePayload));
|
192
192
|
console.log(`Field ${fieldName} updated successfully in document ${docId} with values from field ${targetField}.`);
|
193
193
|
}
|
194
194
|
}
|
@@ -199,22 +199,20 @@ export const afterImportActions = {
|
|
199
199
|
createOrGetBucket: async (config, bucketName, bucketId, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus) => {
|
200
200
|
try {
|
201
201
|
const storage = getStorageFromConfig(config);
|
202
|
-
const bucket = await storage.listBuckets([
|
203
|
-
Query.equal("name", bucketName),
|
204
|
-
]);
|
202
|
+
const bucket = await tryAwaitWithRetry(async () => await storage.listBuckets([Query.equal("name", bucketName)]));
|
205
203
|
if (bucket.buckets.length > 0) {
|
206
204
|
return bucket.buckets[0];
|
207
205
|
}
|
208
206
|
else if (bucketId) {
|
209
207
|
try {
|
210
|
-
return await storage.getBucket(bucketId);
|
208
|
+
return await tryAwaitWithRetry(async () => await storage.getBucket(bucketId));
|
211
209
|
}
|
212
210
|
catch (error) {
|
213
|
-
return await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus);
|
211
|
+
return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId, bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus));
|
214
212
|
}
|
215
213
|
}
|
216
214
|
else {
|
217
|
-
return await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus);
|
215
|
+
return await tryAwaitWithRetry(async () => await storage.createBucket(bucketId || ID.unique(), bucketName, permissions, fileSecurity, enabled, maxFileSize, allowedExtensions, compression, encryption, antivirus));
|
218
216
|
}
|
219
217
|
}
|
220
218
|
catch (error) {
|
@@ -225,7 +223,7 @@ export const afterImportActions = {
|
|
225
223
|
try {
|
226
224
|
const db = getDatabaseFromConfig(config);
|
227
225
|
const storage = getStorageFromConfig(config);
|
228
|
-
const collection = await db.getCollection(dbId, collId);
|
226
|
+
const collection = await tryAwaitWithRetry(async () => await db.getCollection(dbId, collId));
|
229
227
|
const attributes = collection.attributes;
|
230
228
|
const attribute = attributes.find((a) => a.key === fieldName);
|
231
229
|
// console.log(
|
@@ -255,7 +253,7 @@ export const afterImportActions = {
|
|
255
253
|
const tempDir = fs.mkdtempSync(path.join(os.tmpdir(), "appwrite_tmp"));
|
256
254
|
const tempFilePath = path.join(tempDir, fileName);
|
257
255
|
// Download the file using fetch
|
258
|
-
const response = await fetch(filePath);
|
256
|
+
const response = await tryAwaitWithRetry(async () => await fetch(filePath));
|
259
257
|
if (!response.ok)
|
260
258
|
console.error(`Failed to fetch ${filePath}: ${response.statusText} for document ${docId} with field ${fieldName}`);
|
261
259
|
// Use arrayBuffer if buffer is not available
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "appwrite-utils-cli",
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
4
|
-
"version": "0.0.
|
4
|
+
"version": "0.0.62",
|
5
5
|
"main": "src/main.ts",
|
6
6
|
"type": "module",
|
7
7
|
"repository": {
|
@@ -33,7 +33,7 @@
|
|
33
33
|
},
|
34
34
|
"dependencies": {
|
35
35
|
"@types/inquirer": "^9.0.7",
|
36
|
-
"appwrite-utils": "^0.3.
|
36
|
+
"appwrite-utils": "^0.3.1",
|
37
37
|
"commander": "^12.0.0",
|
38
38
|
"inquirer": "^9.2.20",
|
39
39
|
"js-yaml": "^4.1.0",
|
@@ -47,7 +47,9 @@ export const afterImportActions = {
|
|
47
47
|
) => {
|
48
48
|
try {
|
49
49
|
const db = getDatabaseFromConfig(config);
|
50
|
-
await
|
50
|
+
await tryAwaitWithRetry(
|
51
|
+
async () => await db.updateDocument(dbId, collId, docId, data)
|
52
|
+
);
|
51
53
|
} catch (error) {
|
52
54
|
console.error("Error updating document: ", error);
|
53
55
|
}
|
@@ -63,11 +65,16 @@ export const afterImportActions = {
|
|
63
65
|
) => {
|
64
66
|
try {
|
65
67
|
const db = getDatabaseFromConfig(config);
|
66
|
-
const doc = await
|
68
|
+
const doc = await tryAwaitWithRetry(
|
69
|
+
async () => await db.getDocument(dbId, collId, docId)
|
70
|
+
);
|
67
71
|
if (doc[fieldName as keyof typeof doc] == oldFieldValue) {
|
68
|
-
await
|
69
|
-
|
70
|
-
|
72
|
+
await tryAwaitWithRetry(
|
73
|
+
async () =>
|
74
|
+
await db.updateDocument(dbId, collId, docId, {
|
75
|
+
[fieldName]: newFieldValue,
|
76
|
+
})
|
77
|
+
);
|
71
78
|
}
|
72
79
|
} catch (error) {
|
73
80
|
console.error("Error updating document: ", error);
|
@@ -87,10 +94,13 @@ export const afterImportActions = {
|
|
87
94
|
|
88
95
|
// Helper function to find a collection ID by name or return the ID if given
|
89
96
|
const findCollectionId = async (collectionIdentifier: string) => {
|
90
|
-
const collectionsPulled = await
|
91
|
-
|
92
|
-
|
93
|
-
|
97
|
+
const collectionsPulled = await tryAwaitWithRetry(
|
98
|
+
async () =>
|
99
|
+
await db.listCollections(dbId, [
|
100
|
+
Query.limit(25),
|
101
|
+
Query.equal("name", collectionIdentifier),
|
102
|
+
])
|
103
|
+
);
|
94
104
|
if (collectionsPulled.total > 0) {
|
95
105
|
return collectionsPulled.collections[0].$id;
|
96
106
|
} else {
|
@@ -114,9 +124,12 @@ export const afterImportActions = {
|
|
114
124
|
|
115
125
|
if (valueToSet) {
|
116
126
|
// Update the target document
|
117
|
-
await
|
118
|
-
|
119
|
-
|
127
|
+
await tryAwaitWithRetry(
|
128
|
+
async () =>
|
129
|
+
await db.updateDocument(dbId, targetCollectionId, docId, {
|
130
|
+
[fieldName]: valueToSet,
|
131
|
+
})
|
132
|
+
);
|
120
133
|
}
|
121
134
|
|
122
135
|
console.log(
|
@@ -148,10 +161,13 @@ export const afterImportActions = {
|
|
148
161
|
|
149
162
|
// Helper function to find a collection ID by name or return the ID if given
|
150
163
|
const findCollectionId = async (collectionIdentifier: string) => {
|
151
|
-
const collections = await
|
152
|
-
|
153
|
-
|
154
|
-
|
164
|
+
const collections = await tryAwaitWithRetry(
|
165
|
+
async () =>
|
166
|
+
await db.listCollections(dbId, [
|
167
|
+
Query.equal("name", collectionIdentifier),
|
168
|
+
Query.limit(1),
|
169
|
+
])
|
170
|
+
);
|
155
171
|
return collections.total > 0
|
156
172
|
? collections.collections[0].$id
|
157
173
|
: collectionIdentifier;
|
@@ -162,7 +178,9 @@ export const afterImportActions = {
|
|
162
178
|
collectionId: string,
|
163
179
|
fieldName: string
|
164
180
|
) => {
|
165
|
-
const collection = await
|
181
|
+
const collection = await tryAwaitWithRetry(
|
182
|
+
async () => await db.getCollection(dbId, collectionId)
|
183
|
+
);
|
166
184
|
const attribute = collection.attributes.find(
|
167
185
|
(attr: any) => attr.key === fieldName
|
168
186
|
);
|
@@ -191,10 +209,8 @@ export const afterImportActions = {
|
|
191
209
|
queries.push(Query.cursorAfter(cursor));
|
192
210
|
}
|
193
211
|
queries.push(Query.limit(docLimit));
|
194
|
-
const response = await
|
195
|
-
dbId,
|
196
|
-
otherCollectionId,
|
197
|
-
queries
|
212
|
+
const response = await tryAwaitWithRetry(
|
213
|
+
async () => await db.listDocuments(dbId, otherCollectionId, queries)
|
198
214
|
);
|
199
215
|
const documents = response.documents;
|
200
216
|
if (documents.length === 0 || documents.length < docLimit) {
|
@@ -212,7 +228,15 @@ export const afterImportActions = {
|
|
212
228
|
const updatePayload = targetFieldIsArray
|
213
229
|
? { [fieldName]: documentIds }
|
214
230
|
: { [fieldName]: documentIds[0] };
|
215
|
-
await
|
231
|
+
await tryAwaitWithRetry(
|
232
|
+
async () =>
|
233
|
+
await db.updateDocument(
|
234
|
+
dbId,
|
235
|
+
targetCollectionId,
|
236
|
+
docId,
|
237
|
+
updatePayload
|
238
|
+
)
|
239
|
+
);
|
216
240
|
|
217
241
|
console.log(
|
218
242
|
`Field ${fieldName} updated successfully in document ${docId} with ${documentIds.length} document IDs.`
|
@@ -239,10 +263,13 @@ export const afterImportActions = {
|
|
239
263
|
const db = getDatabaseFromConfig(config);
|
240
264
|
|
241
265
|
const findCollectionId = async (collectionIdentifier: string) => {
|
242
|
-
const collections = await
|
243
|
-
|
244
|
-
|
245
|
-
|
266
|
+
const collections = await tryAwaitWithRetry(
|
267
|
+
async () =>
|
268
|
+
await db.listCollections(dbId, [
|
269
|
+
Query.equal("name", collectionIdentifier),
|
270
|
+
Query.limit(1),
|
271
|
+
])
|
272
|
+
);
|
246
273
|
return collections.total > 0
|
247
274
|
? collections.collections[0].$id
|
248
275
|
: collectionIdentifier;
|
@@ -279,10 +306,8 @@ export const afterImportActions = {
|
|
279
306
|
if (cursor) {
|
280
307
|
queries.push(Query.cursorAfter(cursor));
|
281
308
|
}
|
282
|
-
const response = await
|
283
|
-
dbId,
|
284
|
-
otherCollectionId,
|
285
|
-
queries
|
309
|
+
const response = await tryAwaitWithRetry(
|
310
|
+
async () => await db.listDocuments(dbId, otherCollectionId, queries)
|
286
311
|
);
|
287
312
|
const documents = response.documents;
|
288
313
|
if (documents.length === 0 || documents.length < docLimit) {
|
@@ -303,7 +328,15 @@ export const afterImportActions = {
|
|
303
328
|
const updatePayload = targetFieldIsArray
|
304
329
|
? { [fieldName]: targetFieldValues }
|
305
330
|
: { [fieldName]: targetFieldValues[0] };
|
306
|
-
await
|
331
|
+
await tryAwaitWithRetry(
|
332
|
+
async () =>
|
333
|
+
await db.updateDocument(
|
334
|
+
dbId,
|
335
|
+
targetCollectionId,
|
336
|
+
docId,
|
337
|
+
updatePayload
|
338
|
+
)
|
339
|
+
);
|
307
340
|
|
308
341
|
console.log(
|
309
342
|
`Field ${fieldName} updated successfully in document ${docId} with values from field ${targetField}.`
|
@@ -331,40 +364,48 @@ export const afterImportActions = {
|
|
331
364
|
) => {
|
332
365
|
try {
|
333
366
|
const storage = getStorageFromConfig(config);
|
334
|
-
const bucket = await
|
335
|
-
Query.equal("name", bucketName)
|
336
|
-
|
367
|
+
const bucket = await tryAwaitWithRetry(
|
368
|
+
async () => await storage.listBuckets([Query.equal("name", bucketName)])
|
369
|
+
);
|
337
370
|
if (bucket.buckets.length > 0) {
|
338
371
|
return bucket.buckets[0];
|
339
372
|
} else if (bucketId) {
|
340
373
|
try {
|
341
|
-
return await
|
374
|
+
return await tryAwaitWithRetry(
|
375
|
+
async () => await storage.getBucket(bucketId)
|
376
|
+
);
|
342
377
|
} catch (error) {
|
343
|
-
return await
|
344
|
-
|
345
|
-
|
346
|
-
|
347
|
-
|
348
|
-
|
349
|
-
|
350
|
-
|
351
|
-
|
352
|
-
|
353
|
-
|
378
|
+
return await tryAwaitWithRetry(
|
379
|
+
async () =>
|
380
|
+
await storage.createBucket(
|
381
|
+
bucketId,
|
382
|
+
bucketName,
|
383
|
+
permissions,
|
384
|
+
fileSecurity,
|
385
|
+
enabled,
|
386
|
+
maxFileSize,
|
387
|
+
allowedExtensions,
|
388
|
+
compression,
|
389
|
+
encryption,
|
390
|
+
antivirus
|
391
|
+
)
|
354
392
|
);
|
355
393
|
}
|
356
394
|
} else {
|
357
|
-
return await
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
|
395
|
+
return await tryAwaitWithRetry(
|
396
|
+
async () =>
|
397
|
+
await storage.createBucket(
|
398
|
+
bucketId || ID.unique(),
|
399
|
+
bucketName,
|
400
|
+
permissions,
|
401
|
+
fileSecurity,
|
402
|
+
enabled,
|
403
|
+
maxFileSize,
|
404
|
+
allowedExtensions,
|
405
|
+
compression,
|
406
|
+
encryption,
|
407
|
+
antivirus
|
408
|
+
)
|
368
409
|
);
|
369
410
|
}
|
370
411
|
} catch (error) {
|
@@ -384,7 +425,9 @@ export const afterImportActions = {
|
|
384
425
|
try {
|
385
426
|
const db = getDatabaseFromConfig(config);
|
386
427
|
const storage = getStorageFromConfig(config);
|
387
|
-
const collection = await
|
428
|
+
const collection = await tryAwaitWithRetry(
|
429
|
+
async () => await db.getCollection(dbId, collId)
|
430
|
+
);
|
388
431
|
const attributes = collection.attributes as any[];
|
389
432
|
const attribute = attributes.find((a) => a.key === fieldName);
|
390
433
|
// console.log(
|
@@ -423,7 +466,9 @@ export const afterImportActions = {
|
|
423
466
|
const tempFilePath = path.join(tempDir, fileName);
|
424
467
|
|
425
468
|
// Download the file using fetch
|
426
|
-
const response = await
|
469
|
+
const response = await tryAwaitWithRetry(
|
470
|
+
async () => await fetch(filePath)
|
471
|
+
);
|
427
472
|
if (!response.ok)
|
428
473
|
console.error(
|
429
474
|
`Failed to fetch ${filePath}: ${response.statusText} for document ${docId} with field ${fieldName}`
|