appwrite-utils-cli 0.9.982 → 0.9.984
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/main.js +56 -42
- package/dist/migrations/dataLoader.d.ts +2 -42
- package/dist/migrations/dataLoader.js +57 -12
- package/dist/migrations/importController.js +24 -9
- package/dist/migrations/migrationHelper.d.ts +2 -2
- package/dist/migrations/transfer.d.ts +2 -2
- package/dist/utilsController.js +39 -26
- package/package.json +1 -1
- package/src/appwrite.zip +0 -0
- package/src/main.ts +77 -52
- package/src/migrations/dataLoader.ts +74 -23
- package/src/migrations/importController.ts +37 -22
- package/src/migrations/transfer.ts +2 -2
- package/src/utilsController.ts +62 -53
- package/zlogs/album.json +0 -4
- package/zlogs/announcements.json +0 -397
- package/zlogs/announcementscomments.json +0 -36
- package/zlogs/articles.json +0 -138
- package/zlogs/articlescomments.json +0 -4
- package/zlogs/artist.json +0 -4
- package/zlogs/businesscategories.json +0 -7097
- package/zlogs/contacts.json +0 -517063
- package/zlogs/contactscouncils.json +0 -61905
- package/zlogs/contactssociallinks.json +0 -13776
- package/zlogs/councils.json +0 -5076
- package/zlogs/documents.json +0 -917
- package/zlogs/emails.json +0 -4
- package/zlogs/events.json +0 -132625
- package/zlogs/genre.json +0 -4
- package/zlogs/knowledgebase.json +0 -333
- package/zlogs/knowledgebasecomments.json +0 -4
- package/zlogs/linkcategories.json +0 -180
- package/zlogs/links.json +0 -4364
- package/zlogs/memberrequests.json +0 -83
- package/zlogs/memberrequestscomments.json +0 -65
- package/zlogs/mergedUserMap.json +0 -1
- package/zlogs/oldIdToNewIdPerCollectionMap.json +0 -1
- package/zlogs/playlist.json +0 -4
- package/zlogs/regions.json +0 -145
- package/zlogs/song.json +0 -4
- package/zlogs/testimonials.json +0 -335
- package/zlogs/useractivity.json +0 -4
- package/zlogs/userdata.json +0 -4
- package/zlogs/users.json +0 -4
package/README.md
CHANGED
@@ -125,6 +125,7 @@ This updated CLI ensures that developers have robust tools at their fingertips t
|
|
125
125
|
|
126
126
|
## Changelog
|
127
127
|
|
128
|
+
- 0.9.983: Fixed `afterImportActions` not resolving
|
128
129
|
- 0.9.981: Try fixing `tryAwaitWithRetry` to catch `522` errors from Cloudflare, they were appearing for some users, also added a 1000ms delay to `tryAwaitWithRetry`
|
129
130
|
- 0.9.98: Fixing some import errors reported by users
|
130
131
|
- 0.9.95: Updated to include new version of `appwrite-utils`
|
package/dist/main.js
CHANGED
@@ -217,52 +217,66 @@ async function main() {
|
|
217
217
|
await controller.importData(options);
|
218
218
|
}
|
219
219
|
if (parsedArgv.transfer) {
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
if (
|
227
|
-
|
228
|
-
|
229
|
-
|
220
|
+
if (parsedArgv.transfer) {
|
221
|
+
const isRemote = !!parsedArgv.remoteEndpoint;
|
222
|
+
let fromDb, toDb;
|
223
|
+
let targetDatabases;
|
224
|
+
let targetStorage;
|
225
|
+
// Only fetch databases if database IDs are provided
|
226
|
+
if (parsedArgv.fromDbId && parsedArgv.toDbId) {
|
227
|
+
fromDb = (await controller.getDatabasesByIds([parsedArgv.fromDbId]))[0];
|
228
|
+
if (isRemote) {
|
229
|
+
if (!parsedArgv.remoteEndpoint ||
|
230
|
+
!parsedArgv.remoteProjectId ||
|
231
|
+
!parsedArgv.remoteApiKey) {
|
232
|
+
throw new Error("Remote transfer details are missing");
|
233
|
+
}
|
234
|
+
const remoteClient = getClient(parsedArgv.remoteEndpoint, parsedArgv.remoteProjectId, parsedArgv.remoteApiKey);
|
235
|
+
targetDatabases = new Databases(remoteClient);
|
236
|
+
targetStorage = new Storage(remoteClient);
|
237
|
+
const remoteDbs = await fetchAllDatabases(targetDatabases);
|
238
|
+
toDb = remoteDbs.find((db) => db.$id === parsedArgv.toDbId);
|
239
|
+
}
|
240
|
+
else {
|
241
|
+
toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId]))[0];
|
242
|
+
}
|
243
|
+
if (!fromDb || !toDb) {
|
244
|
+
throw new Error("Source or target database not found");
|
245
|
+
}
|
230
246
|
}
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
if (parsedArgv.toBucketId) {
|
248
|
-
if (isRemote) {
|
249
|
-
targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
|
247
|
+
// Handle storage setup
|
248
|
+
let sourceBucket, targetBucket;
|
249
|
+
if (parsedArgv.fromBucketId) {
|
250
|
+
sourceBucket = await controller.storage?.getBucket(parsedArgv.fromBucketId);
|
251
|
+
}
|
252
|
+
if (parsedArgv.toBucketId) {
|
253
|
+
if (isRemote) {
|
254
|
+
if (!targetStorage) {
|
255
|
+
const remoteClient = getClient(parsedArgv.remoteEndpoint, parsedArgv.remoteProjectId, parsedArgv.remoteApiKey);
|
256
|
+
targetStorage = new Storage(remoteClient);
|
257
|
+
}
|
258
|
+
targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
|
259
|
+
}
|
260
|
+
else {
|
261
|
+
targetBucket = await controller.storage?.getBucket(parsedArgv.toBucketId);
|
262
|
+
}
|
250
263
|
}
|
251
|
-
|
252
|
-
|
264
|
+
// Validate that at least one transfer type is specified
|
265
|
+
if (!fromDb && !sourceBucket) {
|
266
|
+
throw new Error("No source database or bucket specified for transfer");
|
253
267
|
}
|
268
|
+
const transferOptions = {
|
269
|
+
isRemote,
|
270
|
+
fromDb,
|
271
|
+
targetDb: toDb,
|
272
|
+
transferEndpoint: parsedArgv.remoteEndpoint,
|
273
|
+
transferProject: parsedArgv.remoteProjectId,
|
274
|
+
transferKey: parsedArgv.remoteApiKey,
|
275
|
+
sourceBucket: sourceBucket,
|
276
|
+
targetBucket: targetBucket,
|
277
|
+
};
|
278
|
+
await controller.transferData(transferOptions);
|
254
279
|
}
|
255
|
-
const transferOptions = {
|
256
|
-
isRemote,
|
257
|
-
fromDb: fromDb[0],
|
258
|
-
targetDb: toDb,
|
259
|
-
transferEndpoint: parsedArgv.remoteEndpoint,
|
260
|
-
transferProject: parsedArgv.remoteProjectId,
|
261
|
-
transferKey: parsedArgv.remoteApiKey,
|
262
|
-
sourceBucket: sourceBucket,
|
263
|
-
targetBucket: targetBucket,
|
264
|
-
};
|
265
|
-
await controller.transferData(transferOptions);
|
266
280
|
}
|
267
281
|
}
|
268
282
|
}
|
@@ -708,27 +708,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
708
708
|
attributes?: ({
|
709
709
|
key: string;
|
710
710
|
type?: "string" | undefined;
|
711
|
-
format
|
712
|
-
/**
|
713
|
-
* Generates attribute mappings with post-import actions based on the provided attribute mappings.
|
714
|
-
* This method checks each mapping for a fileData attribute and adds a post-import action to create a file
|
715
|
-
* and update the field with the file's ID if necessary.
|
716
|
-
*
|
717
|
-
* @param attributeMappings - The attribute mappings from the import definition.
|
718
|
-
* @param context - The context object containing information about the database, collection, and document.
|
719
|
-
* @param item - The item being imported, used for resolving template paths in fileData mappings.
|
720
|
-
* @returns The attribute mappings updated with any necessary post-import actions.
|
721
|
-
*/
|
722
|
-
? /**
|
723
|
-
* Generates attribute mappings with post-import actions based on the provided attribute mappings.
|
724
|
-
* This method checks each mapping for a fileData attribute and adds a post-import action to create a file
|
725
|
-
* and update the field with the file's ID if necessary.
|
726
|
-
*
|
727
|
-
* @param attributeMappings - The attribute mappings from the import definition.
|
728
|
-
* @param context - The context object containing information about the database, collection, and document.
|
729
|
-
* @param item - The item being imported, used for resolving template paths in fileData mappings.
|
730
|
-
* @returns The attribute mappings updated with any necessary post-import actions.
|
731
|
-
*/: string | null | undefined;
|
711
|
+
format?: string | null | undefined;
|
732
712
|
description?: string | Record<string, string> | undefined;
|
733
713
|
required?: boolean | undefined;
|
734
714
|
array?: boolean | undefined;
|
@@ -1392,27 +1372,7 @@ export declare const CollectionImportDataSchema: z.ZodObject<{
|
|
1392
1372
|
attributes?: ({
|
1393
1373
|
key: string;
|
1394
1374
|
type?: "string" | undefined;
|
1395
|
-
format
|
1396
|
-
/**
|
1397
|
-
* Generates attribute mappings with post-import actions based on the provided attribute mappings.
|
1398
|
-
* This method checks each mapping for a fileData attribute and adds a post-import action to create a file
|
1399
|
-
* and update the field with the file's ID if necessary.
|
1400
|
-
*
|
1401
|
-
* @param attributeMappings - The attribute mappings from the import definition.
|
1402
|
-
* @param context - The context object containing information about the database, collection, and document.
|
1403
|
-
* @param item - The item being imported, used for resolving template paths in fileData mappings.
|
1404
|
-
* @returns The attribute mappings updated with any necessary post-import actions.
|
1405
|
-
*/
|
1406
|
-
? /**
|
1407
|
-
* Generates attribute mappings with post-import actions based on the provided attribute mappings.
|
1408
|
-
* This method checks each mapping for a fileData attribute and adds a post-import action to create a file
|
1409
|
-
* and update the field with the file's ID if necessary.
|
1410
|
-
*
|
1411
|
-
* @param attributeMappings - The attribute mappings from the import definition.
|
1412
|
-
* @param context - The context object containing information about the database, collection, and document.
|
1413
|
-
* @param item - The item being imported, used for resolving template paths in fileData mappings.
|
1414
|
-
* @returns The attribute mappings updated with any necessary post-import actions.
|
1415
|
-
*/: string | null | undefined;
|
1375
|
+
format?: string | null | undefined;
|
1416
1376
|
description?: string | Record<string, string> | undefined;
|
1417
1377
|
required?: boolean | undefined;
|
1418
1378
|
array?: boolean | undefined;
|
@@ -842,6 +842,7 @@ export class DataLoader {
|
|
842
842
|
}
|
843
843
|
}
|
844
844
|
// Update the attribute mappings with any actions that need to be performed post-import
|
845
|
+
// We added the basePath to get the folder from the filePath
|
845
846
|
const mappingsWithActions = this.getAttributeMappingsWithActions(importDef.attributeMappings, context, transformedItem);
|
846
847
|
// Update the import definition with the new attribute mappings
|
847
848
|
const newImportDef = {
|
@@ -947,6 +948,7 @@ export class DataLoader {
|
|
947
948
|
continue;
|
948
949
|
}
|
949
950
|
// Update the attribute mappings with any actions that need to be performed post-import
|
951
|
+
// We added the basePath to get the folder from the filePath
|
950
952
|
const mappingsWithActions = this.getAttributeMappingsWithActions(importDef.attributeMappings, context, transformedData);
|
951
953
|
// Update the import definition with the new attribute mappings
|
952
954
|
const newImportDef = {
|
@@ -1068,6 +1070,7 @@ export class DataLoader {
|
|
1068
1070
|
continue;
|
1069
1071
|
}
|
1070
1072
|
// Update the attribute mappings with any actions that need to be performed post-import
|
1073
|
+
// We added the basePath to get the folder from the filePath
|
1071
1074
|
const mappingsWithActions = this.getAttributeMappingsWithActions(importDef.attributeMappings, context, transformedData);
|
1072
1075
|
// Update the import definition with the new attribute mappings
|
1073
1076
|
const newImportDef = {
|
@@ -1077,20 +1080,29 @@ export class DataLoader {
|
|
1077
1080
|
if (itemDataToUpdate) {
|
1078
1081
|
itemDataToUpdate.finalData = this.mergeObjects(itemDataToUpdate.finalData, transformedData);
|
1079
1082
|
itemDataToUpdate.context = context;
|
1080
|
-
//
|
1081
|
-
|
1082
|
-
|
1083
|
-
|
1084
|
-
...
|
1083
|
+
// Fix: Ensure we properly merge the attribute mappings and their actions
|
1084
|
+
const mergedAttributeMappings = newImportDef.attributeMappings.map((newMapping) => {
|
1085
|
+
const existingMapping = itemDataToUpdate.importDef?.attributeMappings.find((m) => m.targetKey === newMapping.targetKey);
|
1086
|
+
return {
|
1087
|
+
...newMapping,
|
1085
1088
|
postImportActions: [
|
1086
|
-
...(
|
1087
|
-
...(
|
1088
|
-
?.postImportActions || []),
|
1089
|
+
...(existingMapping?.postImportActions || []),
|
1090
|
+
...(newMapping.postImportActions || []),
|
1089
1091
|
],
|
1090
|
-
}
|
1092
|
+
};
|
1093
|
+
});
|
1094
|
+
itemDataToUpdate.importDef = {
|
1095
|
+
...newImportDef,
|
1096
|
+
attributeMappings: mergedAttributeMappings,
|
1091
1097
|
};
|
1092
|
-
|
1093
|
-
|
1098
|
+
// Debug logging
|
1099
|
+
if (mergedAttributeMappings.some((m) => m.postImportActions?.length > 0)) {
|
1100
|
+
logger.info(`Post-import actions for ${collection.name}: ${JSON.stringify(mergedAttributeMappings
|
1101
|
+
.filter((m) => m.postImportActions?.length > 0)
|
1102
|
+
.map((m) => ({
|
1103
|
+
targetKey: m.targetKey,
|
1104
|
+
actions: m.postImportActions,
|
1105
|
+
})), null, 2)}`);
|
1094
1106
|
}
|
1095
1107
|
}
|
1096
1108
|
else {
|
@@ -1165,7 +1177,40 @@ export class DataLoader {
|
|
1165
1177
|
let mappingFilePath = this.importDataActions.resolveTemplate(mapping.fileData.path, context, item);
|
1166
1178
|
// Ensure the file path is absolute if it doesn't start with "http"
|
1167
1179
|
if (!mappingFilePath.toLowerCase().startsWith("http")) {
|
1168
|
-
|
1180
|
+
// First try the direct path
|
1181
|
+
let fullPath = path.resolve(this.appwriteFolderPath, mappingFilePath);
|
1182
|
+
// If file doesn't exist, search in subdirectories
|
1183
|
+
if (!fs.existsSync(fullPath)) {
|
1184
|
+
const findFileInDir = (dir) => {
|
1185
|
+
const files = fs.readdirSync(dir);
|
1186
|
+
for (const file of files) {
|
1187
|
+
const filePath = path.join(dir, file);
|
1188
|
+
const stat = fs.statSync(filePath);
|
1189
|
+
if (stat.isDirectory()) {
|
1190
|
+
// Recursively search subdirectories
|
1191
|
+
const found = findFileInDir(filePath);
|
1192
|
+
if (found)
|
1193
|
+
return found;
|
1194
|
+
}
|
1195
|
+
else if (file === path.basename(mappingFilePath)) {
|
1196
|
+
return filePath;
|
1197
|
+
}
|
1198
|
+
}
|
1199
|
+
return null;
|
1200
|
+
};
|
1201
|
+
const foundPath = findFileInDir(this.appwriteFolderPath);
|
1202
|
+
if (foundPath) {
|
1203
|
+
mappingFilePath = foundPath;
|
1204
|
+
}
|
1205
|
+
else {
|
1206
|
+
logger.warn(`File not found in any subdirectory: ${mappingFilePath}`);
|
1207
|
+
// Keep the original resolved path as fallback
|
1208
|
+
mappingFilePath = fullPath;
|
1209
|
+
}
|
1210
|
+
}
|
1211
|
+
else {
|
1212
|
+
mappingFilePath = fullPath;
|
1213
|
+
}
|
1169
1214
|
}
|
1170
1215
|
// Define the after-import action to create a file and update the field
|
1171
1216
|
const afterImportAction = {
|
@@ -68,13 +68,22 @@ export class ImportController {
|
|
68
68
|
}
|
69
69
|
}
|
70
70
|
async updateOthersToFinalData(updatedDb, targetDb) {
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
71
|
+
if (this.database) {
|
72
|
+
await transferDatabaseLocalToLocal(this.database, updatedDb.$id, targetDb.$id);
|
73
|
+
}
|
74
|
+
if (this.storage) {
|
75
|
+
// Find the corresponding database configs
|
76
|
+
const updatedDbConfig = this.config.databases.find((db) => db.$id === updatedDb.$id);
|
77
|
+
const targetDbConfig = this.config.databases.find((db) => db.$id === targetDb.$id);
|
78
|
+
const sourceBucketId = updatedDbConfig?.bucket?.$id ||
|
79
|
+
(this.config.documentBucketId &&
|
80
|
+
`${this.config.documentBucketId}_${updatedDb.$id.toLowerCase().trim().replace(" ", "")}`);
|
81
|
+
const targetBucketId = targetDbConfig?.bucket?.$id ||
|
82
|
+
(this.config.documentBucketId &&
|
83
|
+
`${this.config.documentBucketId}_${targetDb.$id.toLowerCase().trim().replace(" ", "")}`);
|
84
|
+
if (sourceBucketId && targetBucketId) {
|
85
|
+
await transferStorageLocalToLocal(this.storage, sourceBucketId, targetBucketId);
|
86
|
+
}
|
78
87
|
}
|
79
88
|
}
|
80
89
|
async importCollections(db, dataLoader, specificCollections) {
|
@@ -198,10 +207,13 @@ export class ImportController {
|
|
198
207
|
}
|
199
208
|
}
|
200
209
|
async executePostImportActions(dbId, dataLoader, specificCollections) {
|
201
|
-
|
210
|
+
console.log("Executing post-import actions...");
|
211
|
+
const collectionsToProcess = specificCollections && specificCollections.length > 0 ? specificCollections : (this.config.collections ? this.config.collections.map(c => c.name) : Array.from(dataLoader.importMap.keys()));
|
212
|
+
console.log("Collections to process:", collectionsToProcess);
|
202
213
|
// Iterate over each collection in the importMap
|
203
214
|
for (const [collectionKey, collectionData] of dataLoader.importMap.entries()) {
|
204
|
-
|
215
|
+
const allCollectionKeys = collectionsToProcess.map(c => dataLoader.getCollectionKey(c));
|
216
|
+
if (allCollectionKeys.includes(collectionKey)) {
|
205
217
|
console.log(`Processing post-import actions for collection: ${collectionKey}`);
|
206
218
|
// Iterate over each item in the collectionData.data
|
207
219
|
for (const item of collectionData.data) {
|
@@ -221,6 +233,9 @@ export class ImportController {
|
|
221
233
|
}
|
222
234
|
}
|
223
235
|
}
|
236
|
+
else {
|
237
|
+
console.log(`Skipping collection: ${collectionKey} because it's not valid for post-import actions`);
|
238
|
+
}
|
224
239
|
}
|
225
240
|
}
|
226
241
|
}
|
@@ -91,7 +91,6 @@ export declare const ContextObject: z.ZodObject<{
|
|
91
91
|
context: z.ZodAny;
|
92
92
|
}, "strip", z.ZodTypeAny, {
|
93
93
|
collectionId: string;
|
94
|
-
dbId: string;
|
95
94
|
attributeMappings: {
|
96
95
|
targetKey: string;
|
97
96
|
oldKey?: string | undefined;
|
@@ -111,11 +110,11 @@ export declare const ContextObject: z.ZodObject<{
|
|
111
110
|
action: string;
|
112
111
|
}[] | undefined;
|
113
112
|
}[];
|
113
|
+
dbId: string;
|
114
114
|
context?: any;
|
115
115
|
finalItem?: any;
|
116
116
|
}, {
|
117
117
|
collectionId: string;
|
118
|
-
dbId: string;
|
119
118
|
attributeMappings: {
|
120
119
|
targetKey: string;
|
121
120
|
oldKey?: string | undefined;
|
@@ -135,6 +134,7 @@ export declare const ContextObject: z.ZodObject<{
|
|
135
134
|
action: string;
|
136
135
|
}[] | undefined;
|
137
136
|
}[];
|
137
|
+
dbId: string;
|
138
138
|
context?: any;
|
139
139
|
finalItem?: any;
|
140
140
|
}>;
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { Databases, Storage, type Models } from "node-appwrite";
|
2
2
|
export interface TransferOptions {
|
3
|
-
fromDb: Models.Database;
|
4
|
-
targetDb: Models.Database;
|
3
|
+
fromDb: Models.Database | undefined;
|
4
|
+
targetDb: Models.Database | undefined;
|
5
5
|
isRemote: boolean;
|
6
6
|
collections?: string[];
|
7
7
|
transferEndpoint?: string;
|
package/dist/utilsController.js
CHANGED
@@ -96,6 +96,8 @@ export class UtilsController {
|
|
96
96
|
await this.init();
|
97
97
|
if (!this.database)
|
98
98
|
throw new Error("Database not initialized");
|
99
|
+
if (ids.length === 0)
|
100
|
+
return [];
|
99
101
|
const dbs = await this.database.list([
|
100
102
|
Query.limit(500),
|
101
103
|
Query.equal("$id", ids),
|
@@ -199,13 +201,11 @@ export class UtilsController {
|
|
199
201
|
return this.appwriteFolderPath;
|
200
202
|
}
|
201
203
|
async transferData(options) {
|
202
|
-
|
203
|
-
throw new Error("Database is not initialized, is the config file correct & created?");
|
204
|
-
}
|
204
|
+
// Remove database requirement check
|
205
205
|
let sourceClient = this.database;
|
206
206
|
let targetClient;
|
207
|
-
let sourceDatabases;
|
208
|
-
let targetDatabases;
|
207
|
+
let sourceDatabases = [];
|
208
|
+
let targetDatabases = [];
|
209
209
|
if (options.isRemote) {
|
210
210
|
if (!options.transferEndpoint ||
|
211
211
|
!options.transferProject ||
|
@@ -213,34 +213,47 @@ export class UtilsController {
|
|
213
213
|
throw new Error("Remote transfer options are missing");
|
214
214
|
}
|
215
215
|
const remoteClient = getClient(options.transferEndpoint, options.transferProject, options.transferKey);
|
216
|
-
|
217
|
-
|
218
|
-
|
216
|
+
if (this.database) {
|
217
|
+
targetClient = new Databases(remoteClient);
|
218
|
+
sourceDatabases = await fetchAllDatabases(sourceClient);
|
219
|
+
targetDatabases = await fetchAllDatabases(targetClient);
|
220
|
+
}
|
219
221
|
}
|
220
|
-
else {
|
222
|
+
else if (this.database) {
|
221
223
|
targetClient = sourceClient;
|
222
224
|
sourceDatabases = targetDatabases = await fetchAllDatabases(sourceClient);
|
223
225
|
}
|
224
|
-
//
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
226
|
+
// Only validate databases if they're provided in options
|
227
|
+
if (options.fromDb && options.targetDb) {
|
228
|
+
const fromDb = sourceDatabases.find((db) => db.$id === options.fromDb.$id);
|
229
|
+
const targetDb = targetDatabases.find((db) => db.$id === options.targetDb.$id);
|
230
|
+
if (!fromDb || !targetDb) {
|
231
|
+
throw new Error("Source or target database not found");
|
232
|
+
}
|
233
|
+
if (options.isRemote && targetClient) {
|
234
|
+
await transferDatabaseLocalToRemote(sourceClient, options.transferEndpoint, options.transferProject, options.transferKey, fromDb.$id, targetDb.$id);
|
235
|
+
}
|
236
|
+
else if (targetClient) {
|
237
|
+
await transferDatabaseLocalToLocal(sourceClient, fromDb.$id, targetDb.$id);
|
235
238
|
}
|
236
239
|
}
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
240
|
+
// Handle storage transfer separately
|
241
|
+
if (this.storage && (options.sourceBucket || options.fromDb)) {
|
242
|
+
const sourceBucketId = options.sourceBucket?.$id ||
|
243
|
+
(options.fromDb && this.config?.documentBucketId &&
|
244
|
+
`${this.config.documentBucketId}_${options.fromDb.$id.toLowerCase().trim().replace(" ", "")}`);
|
245
|
+
const targetBucketId = options.targetBucket?.$id ||
|
246
|
+
(options.targetDb && this.config?.documentBucketId &&
|
247
|
+
`${this.config.documentBucketId}_${options.targetDb.$id.toLowerCase().trim().replace(" ", "")}`);
|
248
|
+
if (sourceBucketId && targetBucketId) {
|
249
|
+
if (options.isRemote) {
|
250
|
+
await transferStorageLocalToRemote(this.storage, options.transferEndpoint, options.transferProject, options.transferKey, sourceBucketId, targetBucketId);
|
251
|
+
}
|
252
|
+
else {
|
253
|
+
await transferStorageLocalToLocal(this.storage, sourceBucketId, targetBucketId);
|
254
|
+
}
|
242
255
|
}
|
243
256
|
}
|
244
|
-
console.log("
|
257
|
+
console.log("Transfer completed");
|
245
258
|
}
|
246
259
|
}
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "appwrite-utils-cli",
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
4
|
-
"version": "0.9.
|
4
|
+
"version": "0.9.984",
|
5
5
|
"main": "src/main.ts",
|
6
6
|
"type": "module",
|
7
7
|
"repository": {
|
package/src/appwrite.zip
ADDED
Binary file
|
package/src/main.ts
CHANGED
@@ -271,7 +271,6 @@ async function main() {
|
|
271
271
|
}
|
272
272
|
}
|
273
273
|
|
274
|
-
|
275
274
|
if (options.generateSchemas) {
|
276
275
|
await controller.generateSchemas();
|
277
276
|
}
|
@@ -281,65 +280,91 @@ async function main() {
|
|
281
280
|
}
|
282
281
|
|
283
282
|
if (parsedArgv.transfer) {
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
283
|
+
if (parsedArgv.transfer) {
|
284
|
+
const isRemote = !!parsedArgv.remoteEndpoint;
|
285
|
+
let fromDb, toDb: Models.Database | undefined;
|
286
|
+
let targetDatabases: Databases | undefined;
|
287
|
+
let targetStorage: Storage | undefined;
|
288
|
+
|
289
|
+
// Only fetch databases if database IDs are provided
|
290
|
+
if (parsedArgv.fromDbId && parsedArgv.toDbId) {
|
291
|
+
fromDb = (
|
292
|
+
await controller.getDatabasesByIds([parsedArgv.fromDbId])
|
293
|
+
)[0];
|
294
|
+
|
295
|
+
if (isRemote) {
|
296
|
+
if (
|
297
|
+
!parsedArgv.remoteEndpoint ||
|
298
|
+
!parsedArgv.remoteProjectId ||
|
299
|
+
!parsedArgv.remoteApiKey
|
300
|
+
) {
|
301
|
+
throw new Error("Remote transfer details are missing");
|
302
|
+
}
|
303
|
+
const remoteClient = getClient(
|
304
|
+
parsedArgv.remoteEndpoint,
|
305
|
+
parsedArgv.remoteProjectId,
|
306
|
+
parsedArgv.remoteApiKey
|
307
|
+
);
|
308
|
+
targetDatabases = new Databases(remoteClient);
|
309
|
+
targetStorage = new Storage(remoteClient);
|
310
|
+
const remoteDbs = await fetchAllDatabases(targetDatabases);
|
311
|
+
toDb = remoteDbs.find((db) => db.$id === parsedArgv.toDbId);
|
312
|
+
} else {
|
313
|
+
toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId]))[0];
|
314
|
+
}
|
289
315
|
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
!parsedArgv.remoteProjectId ||
|
294
|
-
!parsedArgv.remoteApiKey
|
295
|
-
) {
|
296
|
-
throw new Error("Remote transfer details are missing");
|
316
|
+
if (!fromDb || !toDb) {
|
317
|
+
throw new Error("Source or target database not found");
|
318
|
+
}
|
297
319
|
}
|
298
|
-
const remoteClient = getClient(
|
299
|
-
parsedArgv.remoteEndpoint,
|
300
|
-
parsedArgv.remoteProjectId,
|
301
|
-
parsedArgv.remoteApiKey
|
302
|
-
);
|
303
|
-
targetDatabases = new Databases(remoteClient);
|
304
|
-
targetStorage = new Storage(remoteClient);
|
305
|
-
const remoteDbs = await fetchAllDatabases(targetDatabases);
|
306
|
-
toDb = remoteDbs.find((db) => db.$id === parsedArgv.toDbId);
|
307
|
-
} else {
|
308
|
-
toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId!]))[0];
|
309
|
-
}
|
310
320
|
|
311
|
-
|
312
|
-
|
313
|
-
|
321
|
+
// Handle storage setup
|
322
|
+
let sourceBucket, targetBucket;
|
323
|
+
if (parsedArgv.fromBucketId) {
|
324
|
+
sourceBucket = await controller.storage?.getBucket(
|
325
|
+
parsedArgv.fromBucketId
|
326
|
+
);
|
327
|
+
}
|
328
|
+
if (parsedArgv.toBucketId) {
|
329
|
+
if (isRemote) {
|
330
|
+
if (!targetStorage) {
|
331
|
+
const remoteClient = getClient(
|
332
|
+
parsedArgv.remoteEndpoint!,
|
333
|
+
parsedArgv.remoteProjectId!,
|
334
|
+
parsedArgv.remoteApiKey!
|
335
|
+
);
|
336
|
+
targetStorage = new Storage(remoteClient);
|
337
|
+
}
|
338
|
+
targetBucket = await targetStorage?.getBucket(
|
339
|
+
parsedArgv.toBucketId
|
340
|
+
);
|
341
|
+
} else {
|
342
|
+
targetBucket = await controller.storage?.getBucket(
|
343
|
+
parsedArgv.toBucketId
|
344
|
+
);
|
345
|
+
}
|
346
|
+
}
|
314
347
|
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
);
|
320
|
-
}
|
321
|
-
if (parsedArgv.toBucketId) {
|
322
|
-
if (isRemote) {
|
323
|
-
targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
|
324
|
-
} else {
|
325
|
-
targetBucket = await controller.storage?.getBucket(
|
326
|
-
parsedArgv.toBucketId
|
348
|
+
// Validate that at least one transfer type is specified
|
349
|
+
if (!fromDb && !sourceBucket) {
|
350
|
+
throw new Error(
|
351
|
+
"No source database or bucket specified for transfer"
|
327
352
|
);
|
328
353
|
}
|
329
|
-
}
|
330
354
|
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
355
|
+
const transferOptions: TransferOptions = {
|
356
|
+
isRemote,
|
357
|
+
fromDb,
|
358
|
+
targetDb: toDb,
|
359
|
+
transferEndpoint: parsedArgv.remoteEndpoint,
|
360
|
+
transferProject: parsedArgv.remoteProjectId,
|
361
|
+
transferKey: parsedArgv.remoteApiKey,
|
362
|
+
sourceBucket: sourceBucket,
|
363
|
+
targetBucket: targetBucket,
|
364
|
+
};
|
341
365
|
|
342
|
-
|
366
|
+
await controller.transferData(transferOptions);
|
367
|
+
}
|
343
368
|
}
|
344
369
|
}
|
345
370
|
}
|