appwrite-utils-cli 0.9.983 → 0.9.990
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +1 -0
- package/dist/main.js +58 -44
- package/dist/migrations/dataLoader.js +44 -8
- package/dist/migrations/importController.js +30 -7
- package/dist/migrations/transfer.d.ts +2 -2
- package/dist/migrations/transfer.js +14 -2
- package/dist/utilsController.d.ts +2 -4
- package/dist/utilsController.js +64 -28
- package/package.json +1 -1
- package/src/main.ts +80 -55
- package/src/migrations/dataLoader.ts +65 -25
- package/src/migrations/importController.ts +46 -19
- package/src/migrations/transfer.ts +18 -8
- package/src/utilsController.ts +88 -55
- package/zlogs/album.json +0 -4
- package/zlogs/announcements.json +0 -397
- package/zlogs/announcementscomments.json +0 -36
- package/zlogs/articles.json +0 -138
- package/zlogs/articlescomments.json +0 -4
- package/zlogs/artist.json +0 -4
- package/zlogs/businesscategories.json +0 -7097
- package/zlogs/contacts.json +0 -517063
- package/zlogs/contactscouncils.json +0 -61905
- package/zlogs/contactssociallinks.json +0 -13776
- package/zlogs/councils.json +0 -5076
- package/zlogs/documents.json +0 -917
- package/zlogs/emails.json +0 -4
- package/zlogs/events.json +0 -132625
- package/zlogs/genre.json +0 -4
- package/zlogs/knowledgebase.json +0 -333
- package/zlogs/knowledgebasecomments.json +0 -4
- package/zlogs/linkcategories.json +0 -180
- package/zlogs/links.json +0 -4364
- package/zlogs/memberrequests.json +0 -83
- package/zlogs/memberrequestscomments.json +0 -65
- package/zlogs/mergedUserMap.json +0 -1
- package/zlogs/oldIdToNewIdPerCollectionMap.json +0 -1
- package/zlogs/playlist.json +0 -4
- package/zlogs/regions.json +0 -145
- package/zlogs/song.json +0 -4
- package/zlogs/testimonials.json +0 -335
- package/zlogs/useractivity.json +0 -4
- package/zlogs/userdata.json +0 -4
- package/zlogs/users.json +0 -4
package/README.md
CHANGED
@@ -125,6 +125,7 @@ This updated CLI ensures that developers have robust tools at their fingertips t
|
|
125
125
|
|
126
126
|
## Changelog
|
127
127
|
|
128
|
+
- 0.9.990: Fixed `transferFilesLocalToLocal` and `remote` if a document exists with that `$id`, also fixed wipe `"all"` option also wiping the associated buckets
|
128
129
|
- 0.9.983: Fixed `afterImportActions` not resolving
|
129
130
|
- 0.9.981: Try fixing `tryAwaitWithRetry` to catch `522` errors from Cloudflare, they were appearing for some users, also added a 1000ms delay to `tryAwaitWithRetry`
|
130
131
|
- 0.9.98: Fixing some import errors reported by users
|
package/dist/main.js
CHANGED
@@ -141,7 +141,7 @@ async function main() {
|
|
141
141
|
collections: parsedArgv.collectionIds?.split(","),
|
142
142
|
doBackup: parsedArgv.backup,
|
143
143
|
wipeDatabase: parsedArgv.wipe === "all" || parsedArgv.wipe === "docs",
|
144
|
-
wipeDocumentStorage: parsedArgv.wipe === "all",
|
144
|
+
wipeDocumentStorage: parsedArgv.wipe === "all" || parsedArgv.wipe === "storage",
|
145
145
|
wipeUsers: parsedArgv.wipe === "all" || parsedArgv.wipe === "users",
|
146
146
|
generateSchemas: parsedArgv.generate,
|
147
147
|
importData: parsedArgv.import,
|
@@ -173,7 +173,7 @@ async function main() {
|
|
173
173
|
options.wipeCollections) {
|
174
174
|
if (options.wipeDatabase && options.databases) {
|
175
175
|
for (const db of options.databases) {
|
176
|
-
await controller.wipeDatabase(db);
|
176
|
+
await controller.wipeDatabase(db, options.wipeDocumentStorage);
|
177
177
|
}
|
178
178
|
}
|
179
179
|
if (options.wipeDocumentStorage && parsedArgv.bucketIds) {
|
@@ -217,52 +217,66 @@ async function main() {
|
|
217
217
|
await controller.importData(options);
|
218
218
|
}
|
219
219
|
if (parsedArgv.transfer) {
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
if (
|
227
|
-
|
228
|
-
|
229
|
-
|
220
|
+
if (parsedArgv.transfer) {
|
221
|
+
const isRemote = !!parsedArgv.remoteEndpoint;
|
222
|
+
let fromDb, toDb;
|
223
|
+
let targetDatabases;
|
224
|
+
let targetStorage;
|
225
|
+
// Only fetch databases if database IDs are provided
|
226
|
+
if (parsedArgv.fromDbId && parsedArgv.toDbId) {
|
227
|
+
fromDb = (await controller.getDatabasesByIds([parsedArgv.fromDbId]))[0];
|
228
|
+
if (isRemote) {
|
229
|
+
if (!parsedArgv.remoteEndpoint ||
|
230
|
+
!parsedArgv.remoteProjectId ||
|
231
|
+
!parsedArgv.remoteApiKey) {
|
232
|
+
throw new Error("Remote transfer details are missing");
|
233
|
+
}
|
234
|
+
const remoteClient = getClient(parsedArgv.remoteEndpoint, parsedArgv.remoteProjectId, parsedArgv.remoteApiKey);
|
235
|
+
targetDatabases = new Databases(remoteClient);
|
236
|
+
targetStorage = new Storage(remoteClient);
|
237
|
+
const remoteDbs = await fetchAllDatabases(targetDatabases);
|
238
|
+
toDb = remoteDbs.find((db) => db.$id === parsedArgv.toDbId);
|
239
|
+
}
|
240
|
+
else {
|
241
|
+
toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId]))[0];
|
242
|
+
}
|
243
|
+
if (!fromDb || !toDb) {
|
244
|
+
throw new Error("Source or target database not found");
|
245
|
+
}
|
230
246
|
}
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
if (parsedArgv.toBucketId) {
|
248
|
-
if (isRemote) {
|
249
|
-
targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
|
247
|
+
// Handle storage setup
|
248
|
+
let sourceBucket, targetBucket;
|
249
|
+
if (parsedArgv.fromBucketId) {
|
250
|
+
sourceBucket = await controller.storage?.getBucket(parsedArgv.fromBucketId);
|
251
|
+
}
|
252
|
+
if (parsedArgv.toBucketId) {
|
253
|
+
if (isRemote) {
|
254
|
+
if (!targetStorage) {
|
255
|
+
const remoteClient = getClient(parsedArgv.remoteEndpoint, parsedArgv.remoteProjectId, parsedArgv.remoteApiKey);
|
256
|
+
targetStorage = new Storage(remoteClient);
|
257
|
+
}
|
258
|
+
targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
|
259
|
+
}
|
260
|
+
else {
|
261
|
+
targetBucket = await controller.storage?.getBucket(parsedArgv.toBucketId);
|
262
|
+
}
|
250
263
|
}
|
251
|
-
|
252
|
-
|
264
|
+
// Validate that at least one transfer type is specified
|
265
|
+
if (!fromDb && !sourceBucket) {
|
266
|
+
throw new Error("No source database or bucket specified for transfer");
|
253
267
|
}
|
268
|
+
const transferOptions = {
|
269
|
+
isRemote,
|
270
|
+
fromDb,
|
271
|
+
targetDb: toDb,
|
272
|
+
transferEndpoint: parsedArgv.remoteEndpoint,
|
273
|
+
transferProject: parsedArgv.remoteProjectId,
|
274
|
+
transferKey: parsedArgv.remoteApiKey,
|
275
|
+
sourceBucket: sourceBucket,
|
276
|
+
targetBucket: targetBucket,
|
277
|
+
};
|
278
|
+
await controller.transferData(transferOptions);
|
254
279
|
}
|
255
|
-
const transferOptions = {
|
256
|
-
isRemote,
|
257
|
-
fromDb: fromDb[0],
|
258
|
-
targetDb: toDb,
|
259
|
-
transferEndpoint: parsedArgv.remoteEndpoint,
|
260
|
-
transferProject: parsedArgv.remoteProjectId,
|
261
|
-
transferKey: parsedArgv.remoteApiKey,
|
262
|
-
sourceBucket: sourceBucket,
|
263
|
-
targetBucket: targetBucket,
|
264
|
-
};
|
265
|
-
await controller.transferData(transferOptions);
|
266
280
|
}
|
267
281
|
}
|
268
282
|
}
|
@@ -842,6 +842,7 @@ export class DataLoader {
|
|
842
842
|
}
|
843
843
|
}
|
844
844
|
// Update the attribute mappings with any actions that need to be performed post-import
|
845
|
+
// We added the basePath to get the folder from the filePath
|
845
846
|
const mappingsWithActions = this.getAttributeMappingsWithActions(importDef.attributeMappings, context, transformedItem);
|
846
847
|
// Update the import definition with the new attribute mappings
|
847
848
|
const newImportDef = {
|
@@ -947,6 +948,7 @@ export class DataLoader {
|
|
947
948
|
continue;
|
948
949
|
}
|
949
950
|
// Update the attribute mappings with any actions that need to be performed post-import
|
951
|
+
// We added the basePath to get the folder from the filePath
|
950
952
|
const mappingsWithActions = this.getAttributeMappingsWithActions(importDef.attributeMappings, context, transformedData);
|
951
953
|
// Update the import definition with the new attribute mappings
|
952
954
|
const newImportDef = {
|
@@ -1068,6 +1070,7 @@ export class DataLoader {
|
|
1068
1070
|
continue;
|
1069
1071
|
}
|
1070
1072
|
// Update the attribute mappings with any actions that need to be performed post-import
|
1073
|
+
// We added the basePath to get the folder from the filePath
|
1071
1074
|
const mappingsWithActions = this.getAttributeMappingsWithActions(importDef.attributeMappings, context, transformedData);
|
1072
1075
|
// Update the import definition with the new attribute mappings
|
1073
1076
|
const newImportDef = {
|
@@ -1084,21 +1087,21 @@ export class DataLoader {
|
|
1084
1087
|
...newMapping,
|
1085
1088
|
postImportActions: [
|
1086
1089
|
...(existingMapping?.postImportActions || []),
|
1087
|
-
...(newMapping.postImportActions || [])
|
1088
|
-
]
|
1090
|
+
...(newMapping.postImportActions || []),
|
1091
|
+
],
|
1089
1092
|
};
|
1090
1093
|
});
|
1091
1094
|
itemDataToUpdate.importDef = {
|
1092
1095
|
...newImportDef,
|
1093
|
-
attributeMappings: mergedAttributeMappings
|
1096
|
+
attributeMappings: mergedAttributeMappings,
|
1094
1097
|
};
|
1095
1098
|
// Debug logging
|
1096
|
-
if (mergedAttributeMappings.some(m => m.postImportActions?.length > 0)) {
|
1099
|
+
if (mergedAttributeMappings.some((m) => m.postImportActions?.length > 0)) {
|
1097
1100
|
logger.info(`Post-import actions for ${collection.name}: ${JSON.stringify(mergedAttributeMappings
|
1098
|
-
.filter(m => m.postImportActions?.length > 0)
|
1099
|
-
.map(m => ({
|
1101
|
+
.filter((m) => m.postImportActions?.length > 0)
|
1102
|
+
.map((m) => ({
|
1100
1103
|
targetKey: m.targetKey,
|
1101
|
-
actions: m.postImportActions
|
1104
|
+
actions: m.postImportActions,
|
1102
1105
|
})), null, 2)}`);
|
1103
1106
|
}
|
1104
1107
|
}
|
@@ -1174,7 +1177,40 @@ export class DataLoader {
|
|
1174
1177
|
let mappingFilePath = this.importDataActions.resolveTemplate(mapping.fileData.path, context, item);
|
1175
1178
|
// Ensure the file path is absolute if it doesn't start with "http"
|
1176
1179
|
if (!mappingFilePath.toLowerCase().startsWith("http")) {
|
1177
|
-
|
1180
|
+
// First try the direct path
|
1181
|
+
let fullPath = path.resolve(this.appwriteFolderPath, mappingFilePath);
|
1182
|
+
// If file doesn't exist, search in subdirectories
|
1183
|
+
if (!fs.existsSync(fullPath)) {
|
1184
|
+
const findFileInDir = (dir) => {
|
1185
|
+
const files = fs.readdirSync(dir);
|
1186
|
+
for (const file of files) {
|
1187
|
+
const filePath = path.join(dir, file);
|
1188
|
+
const stat = fs.statSync(filePath);
|
1189
|
+
if (stat.isDirectory()) {
|
1190
|
+
// Recursively search subdirectories
|
1191
|
+
const found = findFileInDir(filePath);
|
1192
|
+
if (found)
|
1193
|
+
return found;
|
1194
|
+
}
|
1195
|
+
else if (file === path.basename(mappingFilePath)) {
|
1196
|
+
return filePath;
|
1197
|
+
}
|
1198
|
+
}
|
1199
|
+
return null;
|
1200
|
+
};
|
1201
|
+
const foundPath = findFileInDir(this.appwriteFolderPath);
|
1202
|
+
if (foundPath) {
|
1203
|
+
mappingFilePath = foundPath;
|
1204
|
+
}
|
1205
|
+
else {
|
1206
|
+
logger.warn(`File not found in any subdirectory: ${mappingFilePath}`);
|
1207
|
+
// Keep the original resolved path as fallback
|
1208
|
+
mappingFilePath = fullPath;
|
1209
|
+
}
|
1210
|
+
}
|
1211
|
+
else {
|
1212
|
+
mappingFilePath = fullPath;
|
1213
|
+
}
|
1178
1214
|
}
|
1179
1215
|
// Define the after-import action to create a file and update the field
|
1180
1216
|
const afterImportAction = {
|
@@ -68,13 +68,36 @@ export class ImportController {
|
|
68
68
|
}
|
69
69
|
}
|
70
70
|
async updateOthersToFinalData(updatedDb, targetDb) {
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
71
|
+
if (this.database) {
|
72
|
+
await transferDatabaseLocalToLocal(this.database, updatedDb.$id, targetDb.$id);
|
73
|
+
}
|
74
|
+
if (this.storage) {
|
75
|
+
// Find the corresponding database configs
|
76
|
+
const updatedDbConfig = this.config.databases.find((db) => db.$id === updatedDb.$id);
|
77
|
+
const targetDbConfig = this.config.databases.find((db) => db.$id === targetDb.$id);
|
78
|
+
const allBuckets = await this.storage.listBuckets([Query.limit(1000)]);
|
79
|
+
const bucketsWithDbIdInThem = allBuckets.buckets.filter(bucket => bucket.name.toLowerCase().includes(updatedDb.$id.toLowerCase()));
|
80
|
+
const configuredUpdatedBucketId = `${this.config.documentBucketId}_${updatedDb.$id.toLowerCase().trim().replace(" ", "")}`;
|
81
|
+
const configuredTargetBucketId = `${this.config.documentBucketId}_${targetDb.$id.toLowerCase().trim().replace(" ", "")}`;
|
82
|
+
let sourceBucketId;
|
83
|
+
let targetBucketId;
|
84
|
+
if (bucketsWithDbIdInThem.find(bucket => bucket.$id === configuredUpdatedBucketId)) {
|
85
|
+
sourceBucketId = configuredUpdatedBucketId;
|
86
|
+
}
|
87
|
+
else if (bucketsWithDbIdInThem.find(bucket => bucket.$id === configuredTargetBucketId)) {
|
88
|
+
targetBucketId = configuredTargetBucketId;
|
89
|
+
}
|
90
|
+
if (!sourceBucketId) {
|
91
|
+
sourceBucketId = updatedDbConfig?.bucket?.$id ||
|
92
|
+
bucketsWithDbIdInThem[0]?.$id;
|
93
|
+
}
|
94
|
+
if (!targetBucketId) {
|
95
|
+
targetBucketId = targetDbConfig?.bucket?.$id ||
|
96
|
+
bucketsWithDbIdInThem[0]?.$id;
|
97
|
+
}
|
98
|
+
if (sourceBucketId && targetBucketId) {
|
99
|
+
await transferStorageLocalToLocal(this.storage, sourceBucketId, targetBucketId);
|
100
|
+
}
|
78
101
|
}
|
79
102
|
}
|
80
103
|
async importCollections(db, dataLoader, specificCollections) {
|
@@ -1,7 +1,7 @@
|
|
1
1
|
import { Databases, Storage, type Models } from "node-appwrite";
|
2
2
|
export interface TransferOptions {
|
3
|
-
fromDb: Models.Database;
|
4
|
-
targetDb: Models.Database;
|
3
|
+
fromDb: Models.Database | undefined;
|
4
|
+
targetDb: Models.Database | undefined;
|
5
5
|
isRemote: boolean;
|
6
6
|
collections?: string[];
|
7
7
|
transferEndpoint?: string;
|
@@ -33,7 +33,13 @@ export const transferStorageLocalToLocal = async (storage, fromBucketId, toBucke
|
|
33
33
|
}
|
34
34
|
const fileToCreate = InputFile.fromBuffer(new Uint8Array(fileData), file.name);
|
35
35
|
console.log(`Creating file: ${file.name}`);
|
36
|
-
|
36
|
+
try {
|
37
|
+
await tryAwaitWithRetry(async () => await storage.createFile(toBucketId, file.$id, fileToCreate, file.$permissions));
|
38
|
+
}
|
39
|
+
catch (error) {
|
40
|
+
// File already exists, so we can skip it
|
41
|
+
continue;
|
42
|
+
}
|
37
43
|
numberOfFiles++;
|
38
44
|
}
|
39
45
|
}
|
@@ -90,7 +96,13 @@ export const transferStorageLocalToRemote = async (localStorage, endpoint, proje
|
|
90
96
|
for (const file of allFromFiles) {
|
91
97
|
const fileData = await tryAwaitWithRetry(async () => await localStorage.getFileDownload(file.bucketId, file.$id));
|
92
98
|
const fileToCreate = InputFile.fromBuffer(new Uint8Array(fileData), file.name);
|
93
|
-
|
99
|
+
try {
|
100
|
+
await tryAwaitWithRetry(async () => await remoteStorage.createFile(toBucketId, file.$id, fileToCreate, file.$permissions));
|
101
|
+
}
|
102
|
+
catch (error) {
|
103
|
+
// File already exists, so we can skip it
|
104
|
+
continue;
|
105
|
+
}
|
94
106
|
numberOfFiles++;
|
95
107
|
}
|
96
108
|
console.log(`Transferred ${numberOfFiles} files from ${fromBucketId} to ${toBucketId}`);
|
@@ -36,10 +36,8 @@ export declare class UtilsController {
|
|
36
36
|
wipeOtherDatabases(databasesToKeep: Models.Database[]): Promise<void>;
|
37
37
|
wipeUsers(): Promise<void>;
|
38
38
|
backupDatabase(database: Models.Database): Promise<void>;
|
39
|
-
wipeDatabase(database: Models.Database): Promise<
|
40
|
-
|
41
|
-
collectionName: string;
|
42
|
-
}[]>;
|
39
|
+
wipeDatabase(database: Models.Database, wipeBucket?: boolean): Promise<void>;
|
40
|
+
wipeBucketFromDatabase(database: Models.Database): Promise<void>;
|
43
41
|
wipeCollection(database: Models.Database, collection: Models.Collection): Promise<void>;
|
44
42
|
wipeDocumentStorage(bucketId: string): Promise<void>;
|
45
43
|
createOrUpdateCollectionsForDatabases(databases: Models.Database[], collections?: Models.Collection[]): Promise<void>;
|
package/dist/utilsController.js
CHANGED
@@ -96,6 +96,8 @@ export class UtilsController {
|
|
96
96
|
await this.init();
|
97
97
|
if (!this.database)
|
98
98
|
throw new Error("Database not initialized");
|
99
|
+
if (ids.length === 0)
|
100
|
+
return [];
|
99
101
|
const dbs = await this.database.list([
|
100
102
|
Query.limit(500),
|
101
103
|
Query.equal("$id", ids),
|
@@ -121,11 +123,34 @@ export class UtilsController {
|
|
121
123
|
throw new Error("Database, storage, or config not initialized");
|
122
124
|
await backupDatabase(this.config, this.database, database.$id, this.storage);
|
123
125
|
}
|
124
|
-
async wipeDatabase(database) {
|
126
|
+
async wipeDatabase(database, wipeBucket = false) {
|
125
127
|
await this.init();
|
126
128
|
if (!this.database)
|
127
129
|
throw new Error("Database not initialized");
|
128
|
-
|
130
|
+
await wipeDatabase(this.database, database.$id);
|
131
|
+
if (wipeBucket) {
|
132
|
+
await this.wipeBucketFromDatabase(database);
|
133
|
+
}
|
134
|
+
}
|
135
|
+
async wipeBucketFromDatabase(database) {
|
136
|
+
// Check configured bucket in database config
|
137
|
+
const configuredBucket = this.config?.databases?.find(db => db.$id === database.$id)?.bucket;
|
138
|
+
if (configuredBucket?.$id) {
|
139
|
+
await this.wipeDocumentStorage(configuredBucket.$id);
|
140
|
+
}
|
141
|
+
// Also check for document bucket ID pattern
|
142
|
+
if (this.config?.documentBucketId) {
|
143
|
+
const documentBucketId = `${this.config.documentBucketId}_${database.$id.toLowerCase().trim().replace(/\s+/g, "")}`;
|
144
|
+
try {
|
145
|
+
await this.wipeDocumentStorage(documentBucketId);
|
146
|
+
}
|
147
|
+
catch (error) {
|
148
|
+
// Ignore if bucket doesn't exist
|
149
|
+
if (error?.type !== 'storage_bucket_not_found') {
|
150
|
+
throw error;
|
151
|
+
}
|
152
|
+
}
|
153
|
+
}
|
129
154
|
}
|
130
155
|
async wipeCollection(database, collection) {
|
131
156
|
await this.init();
|
@@ -199,13 +224,11 @@ export class UtilsController {
|
|
199
224
|
return this.appwriteFolderPath;
|
200
225
|
}
|
201
226
|
async transferData(options) {
|
202
|
-
|
203
|
-
throw new Error("Database is not initialized, is the config file correct & created?");
|
204
|
-
}
|
227
|
+
// Remove database requirement check
|
205
228
|
let sourceClient = this.database;
|
206
229
|
let targetClient;
|
207
|
-
let sourceDatabases;
|
208
|
-
let targetDatabases;
|
230
|
+
let sourceDatabases = [];
|
231
|
+
let targetDatabases = [];
|
209
232
|
if (options.isRemote) {
|
210
233
|
if (!options.transferEndpoint ||
|
211
234
|
!options.transferProject ||
|
@@ -213,34 +236,47 @@ export class UtilsController {
|
|
213
236
|
throw new Error("Remote transfer options are missing");
|
214
237
|
}
|
215
238
|
const remoteClient = getClient(options.transferEndpoint, options.transferProject, options.transferKey);
|
216
|
-
|
217
|
-
|
218
|
-
|
239
|
+
if (this.database) {
|
240
|
+
targetClient = new Databases(remoteClient);
|
241
|
+
sourceDatabases = await fetchAllDatabases(sourceClient);
|
242
|
+
targetDatabases = await fetchAllDatabases(targetClient);
|
243
|
+
}
|
219
244
|
}
|
220
|
-
else {
|
245
|
+
else if (this.database) {
|
221
246
|
targetClient = sourceClient;
|
222
247
|
sourceDatabases = targetDatabases = await fetchAllDatabases(sourceClient);
|
223
248
|
}
|
224
|
-
//
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
249
|
+
// Only validate databases if they're provided in options
|
250
|
+
if (options.fromDb && options.targetDb) {
|
251
|
+
const fromDb = sourceDatabases.find((db) => db.$id === options.fromDb.$id);
|
252
|
+
const targetDb = targetDatabases.find((db) => db.$id === options.targetDb.$id);
|
253
|
+
if (!fromDb || !targetDb) {
|
254
|
+
throw new Error("Source or target database not found");
|
255
|
+
}
|
256
|
+
if (options.isRemote && targetClient) {
|
257
|
+
await transferDatabaseLocalToRemote(sourceClient, options.transferEndpoint, options.transferProject, options.transferKey, fromDb.$id, targetDb.$id);
|
258
|
+
}
|
259
|
+
else if (targetClient) {
|
260
|
+
await transferDatabaseLocalToLocal(sourceClient, fromDb.$id, targetDb.$id);
|
235
261
|
}
|
236
262
|
}
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
263
|
+
// Handle storage transfer separately
|
264
|
+
if (this.storage && (options.sourceBucket || options.fromDb)) {
|
265
|
+
const sourceBucketId = options.sourceBucket?.$id ||
|
266
|
+
(options.fromDb && this.config?.documentBucketId &&
|
267
|
+
`${this.config.documentBucketId}_${options.fromDb.$id.toLowerCase().trim().replace(" ", "")}`);
|
268
|
+
const targetBucketId = options.targetBucket?.$id ||
|
269
|
+
(options.targetDb && this.config?.documentBucketId &&
|
270
|
+
`${this.config.documentBucketId}_${options.targetDb.$id.toLowerCase().trim().replace(" ", "")}`);
|
271
|
+
if (sourceBucketId && targetBucketId) {
|
272
|
+
if (options.isRemote) {
|
273
|
+
await transferStorageLocalToRemote(this.storage, options.transferEndpoint, options.transferProject, options.transferKey, sourceBucketId, targetBucketId);
|
274
|
+
}
|
275
|
+
else {
|
276
|
+
await transferStorageLocalToLocal(this.storage, sourceBucketId, targetBucketId);
|
277
|
+
}
|
242
278
|
}
|
243
279
|
}
|
244
|
-
console.log("
|
280
|
+
console.log("Transfer completed");
|
245
281
|
}
|
246
282
|
}
|
package/package.json
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
{
|
2
2
|
"name": "appwrite-utils-cli",
|
3
3
|
"description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
|
4
|
-
"version": "0.9.
|
4
|
+
"version": "0.9.990",
|
5
5
|
"main": "src/main.ts",
|
6
6
|
"type": "module",
|
7
7
|
"repository": {
|
package/src/main.ts
CHANGED
@@ -16,7 +16,7 @@ interface CliOptions {
|
|
16
16
|
dbIds?: string;
|
17
17
|
collectionIds?: string;
|
18
18
|
bucketIds?: string;
|
19
|
-
wipe?: "all" | "docs" | "users";
|
19
|
+
wipe?: "all" | "storage" | "docs" | "users";
|
20
20
|
wipeCollections?: boolean;
|
21
21
|
generate?: boolean;
|
22
22
|
import?: boolean;
|
@@ -181,7 +181,7 @@ async function main() {
|
|
181
181
|
collections: parsedArgv.collectionIds?.split(","),
|
182
182
|
doBackup: parsedArgv.backup,
|
183
183
|
wipeDatabase: parsedArgv.wipe === "all" || parsedArgv.wipe === "docs",
|
184
|
-
wipeDocumentStorage: parsedArgv.wipe === "all",
|
184
|
+
wipeDocumentStorage: parsedArgv.wipe === "all" || parsedArgv.wipe === "storage",
|
185
185
|
wipeUsers: parsedArgv.wipe === "all" || parsedArgv.wipe === "users",
|
186
186
|
generateSchemas: parsedArgv.generate,
|
187
187
|
importData: parsedArgv.import,
|
@@ -220,7 +220,7 @@ async function main() {
|
|
220
220
|
) {
|
221
221
|
if (options.wipeDatabase && options.databases) {
|
222
222
|
for (const db of options.databases) {
|
223
|
-
await controller.wipeDatabase(db);
|
223
|
+
await controller.wipeDatabase(db, options.wipeDocumentStorage);
|
224
224
|
}
|
225
225
|
}
|
226
226
|
if (options.wipeDocumentStorage && parsedArgv.bucketIds) {
|
@@ -271,7 +271,6 @@ async function main() {
|
|
271
271
|
}
|
272
272
|
}
|
273
273
|
|
274
|
-
|
275
274
|
if (options.generateSchemas) {
|
276
275
|
await controller.generateSchemas();
|
277
276
|
}
|
@@ -281,65 +280,91 @@ async function main() {
|
|
281
280
|
}
|
282
281
|
|
283
282
|
if (parsedArgv.transfer) {
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
283
|
+
if (parsedArgv.transfer) {
|
284
|
+
const isRemote = !!parsedArgv.remoteEndpoint;
|
285
|
+
let fromDb, toDb: Models.Database | undefined;
|
286
|
+
let targetDatabases: Databases | undefined;
|
287
|
+
let targetStorage: Storage | undefined;
|
288
|
+
|
289
|
+
// Only fetch databases if database IDs are provided
|
290
|
+
if (parsedArgv.fromDbId && parsedArgv.toDbId) {
|
291
|
+
fromDb = (
|
292
|
+
await controller.getDatabasesByIds([parsedArgv.fromDbId])
|
293
|
+
)[0];
|
294
|
+
|
295
|
+
if (isRemote) {
|
296
|
+
if (
|
297
|
+
!parsedArgv.remoteEndpoint ||
|
298
|
+
!parsedArgv.remoteProjectId ||
|
299
|
+
!parsedArgv.remoteApiKey
|
300
|
+
) {
|
301
|
+
throw new Error("Remote transfer details are missing");
|
302
|
+
}
|
303
|
+
const remoteClient = getClient(
|
304
|
+
parsedArgv.remoteEndpoint,
|
305
|
+
parsedArgv.remoteProjectId,
|
306
|
+
parsedArgv.remoteApiKey
|
307
|
+
);
|
308
|
+
targetDatabases = new Databases(remoteClient);
|
309
|
+
targetStorage = new Storage(remoteClient);
|
310
|
+
const remoteDbs = await fetchAllDatabases(targetDatabases);
|
311
|
+
toDb = remoteDbs.find((db) => db.$id === parsedArgv.toDbId);
|
312
|
+
} else {
|
313
|
+
toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId]))[0];
|
314
|
+
}
|
289
315
|
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
!parsedArgv.remoteProjectId ||
|
294
|
-
!parsedArgv.remoteApiKey
|
295
|
-
) {
|
296
|
-
throw new Error("Remote transfer details are missing");
|
316
|
+
if (!fromDb || !toDb) {
|
317
|
+
throw new Error("Source or target database not found");
|
318
|
+
}
|
297
319
|
}
|
298
|
-
const remoteClient = getClient(
|
299
|
-
parsedArgv.remoteEndpoint,
|
300
|
-
parsedArgv.remoteProjectId,
|
301
|
-
parsedArgv.remoteApiKey
|
302
|
-
);
|
303
|
-
targetDatabases = new Databases(remoteClient);
|
304
|
-
targetStorage = new Storage(remoteClient);
|
305
|
-
const remoteDbs = await fetchAllDatabases(targetDatabases);
|
306
|
-
toDb = remoteDbs.find((db) => db.$id === parsedArgv.toDbId);
|
307
|
-
} else {
|
308
|
-
toDb = (await controller.getDatabasesByIds([parsedArgv.toDbId!]))[0];
|
309
|
-
}
|
310
320
|
|
311
|
-
|
312
|
-
|
313
|
-
|
321
|
+
// Handle storage setup
|
322
|
+
let sourceBucket, targetBucket;
|
323
|
+
if (parsedArgv.fromBucketId) {
|
324
|
+
sourceBucket = await controller.storage?.getBucket(
|
325
|
+
parsedArgv.fromBucketId
|
326
|
+
);
|
327
|
+
}
|
328
|
+
if (parsedArgv.toBucketId) {
|
329
|
+
if (isRemote) {
|
330
|
+
if (!targetStorage) {
|
331
|
+
const remoteClient = getClient(
|
332
|
+
parsedArgv.remoteEndpoint!,
|
333
|
+
parsedArgv.remoteProjectId!,
|
334
|
+
parsedArgv.remoteApiKey!
|
335
|
+
);
|
336
|
+
targetStorage = new Storage(remoteClient);
|
337
|
+
}
|
338
|
+
targetBucket = await targetStorage?.getBucket(
|
339
|
+
parsedArgv.toBucketId
|
340
|
+
);
|
341
|
+
} else {
|
342
|
+
targetBucket = await controller.storage?.getBucket(
|
343
|
+
parsedArgv.toBucketId
|
344
|
+
);
|
345
|
+
}
|
346
|
+
}
|
314
347
|
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
);
|
320
|
-
}
|
321
|
-
if (parsedArgv.toBucketId) {
|
322
|
-
if (isRemote) {
|
323
|
-
targetBucket = await targetStorage?.getBucket(parsedArgv.toBucketId);
|
324
|
-
} else {
|
325
|
-
targetBucket = await controller.storage?.getBucket(
|
326
|
-
parsedArgv.toBucketId
|
348
|
+
// Validate that at least one transfer type is specified
|
349
|
+
if (!fromDb && !sourceBucket) {
|
350
|
+
throw new Error(
|
351
|
+
"No source database or bucket specified for transfer"
|
327
352
|
);
|
328
353
|
}
|
329
|
-
}
|
330
354
|
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
340
|
-
|
355
|
+
const transferOptions: TransferOptions = {
|
356
|
+
isRemote,
|
357
|
+
fromDb,
|
358
|
+
targetDb: toDb,
|
359
|
+
transferEndpoint: parsedArgv.remoteEndpoint,
|
360
|
+
transferProject: parsedArgv.remoteProjectId,
|
361
|
+
transferKey: parsedArgv.remoteApiKey,
|
362
|
+
sourceBucket: sourceBucket,
|
363
|
+
targetBucket: targetBucket,
|
364
|
+
};
|
341
365
|
|
342
|
-
|
366
|
+
await controller.transferData(transferOptions);
|
367
|
+
}
|
343
368
|
}
|
344
369
|
}
|
345
370
|
}
|