appwrite-utils-cli 1.5.2 → 1.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (233) hide show
  1. package/CHANGELOG.md +199 -0
  2. package/README.md +251 -29
  3. package/dist/adapters/AdapterFactory.d.ts +10 -3
  4. package/dist/adapters/AdapterFactory.js +213 -17
  5. package/dist/adapters/TablesDBAdapter.js +60 -17
  6. package/dist/backups/operations/bucketBackup.d.ts +19 -0
  7. package/dist/backups/operations/bucketBackup.js +197 -0
  8. package/dist/backups/operations/collectionBackup.d.ts +30 -0
  9. package/dist/backups/operations/collectionBackup.js +201 -0
  10. package/dist/backups/operations/comprehensiveBackup.d.ts +25 -0
  11. package/dist/backups/operations/comprehensiveBackup.js +238 -0
  12. package/dist/backups/schemas/bucketManifest.d.ts +93 -0
  13. package/dist/backups/schemas/bucketManifest.js +33 -0
  14. package/dist/backups/schemas/comprehensiveManifest.d.ts +108 -0
  15. package/dist/backups/schemas/comprehensiveManifest.js +32 -0
  16. package/dist/backups/tracking/centralizedTracking.d.ts +34 -0
  17. package/dist/backups/tracking/centralizedTracking.js +274 -0
  18. package/dist/cli/commands/configCommands.d.ts +8 -0
  19. package/dist/cli/commands/configCommands.js +160 -0
  20. package/dist/cli/commands/databaseCommands.d.ts +13 -0
  21. package/dist/cli/commands/databaseCommands.js +479 -0
  22. package/dist/cli/commands/functionCommands.d.ts +7 -0
  23. package/dist/cli/commands/functionCommands.js +289 -0
  24. package/dist/cli/commands/schemaCommands.d.ts +7 -0
  25. package/dist/cli/commands/schemaCommands.js +134 -0
  26. package/dist/cli/commands/transferCommands.d.ts +5 -0
  27. package/dist/cli/commands/transferCommands.js +384 -0
  28. package/dist/collections/attributes.d.ts +5 -4
  29. package/dist/collections/attributes.js +539 -246
  30. package/dist/collections/indexes.js +39 -37
  31. package/dist/collections/methods.d.ts +2 -16
  32. package/dist/collections/methods.js +90 -538
  33. package/dist/collections/transferOperations.d.ts +7 -0
  34. package/dist/collections/transferOperations.js +331 -0
  35. package/dist/collections/wipeOperations.d.ts +16 -0
  36. package/dist/collections/wipeOperations.js +328 -0
  37. package/dist/config/configMigration.d.ts +87 -0
  38. package/dist/config/configMigration.js +390 -0
  39. package/dist/config/configValidation.d.ts +66 -0
  40. package/dist/config/configValidation.js +358 -0
  41. package/dist/config/yamlConfig.d.ts +455 -1
  42. package/dist/config/yamlConfig.js +145 -52
  43. package/dist/databases/methods.js +3 -2
  44. package/dist/databases/setup.d.ts +1 -2
  45. package/dist/databases/setup.js +9 -87
  46. package/dist/examples/yamlTerminologyExample.d.ts +42 -0
  47. package/dist/examples/yamlTerminologyExample.js +269 -0
  48. package/dist/functions/deployments.js +11 -10
  49. package/dist/functions/methods.d.ts +1 -1
  50. package/dist/functions/methods.js +5 -4
  51. package/dist/init.js +9 -9
  52. package/dist/interactiveCLI.d.ts +8 -17
  53. package/dist/interactiveCLI.js +209 -1172
  54. package/dist/main.js +364 -21
  55. package/dist/migrations/afterImportActions.js +22 -30
  56. package/dist/migrations/appwriteToX.js +71 -25
  57. package/dist/migrations/dataLoader.js +35 -26
  58. package/dist/migrations/importController.js +29 -30
  59. package/dist/migrations/relationships.js +13 -12
  60. package/dist/migrations/services/ImportOrchestrator.js +16 -19
  61. package/dist/migrations/transfer.js +46 -46
  62. package/dist/migrations/yaml/YamlImportConfigLoader.d.ts +3 -1
  63. package/dist/migrations/yaml/YamlImportConfigLoader.js +6 -3
  64. package/dist/migrations/yaml/YamlImportIntegration.d.ts +9 -3
  65. package/dist/migrations/yaml/YamlImportIntegration.js +22 -11
  66. package/dist/migrations/yaml/generateImportSchemas.d.ts +14 -1
  67. package/dist/migrations/yaml/generateImportSchemas.js +736 -7
  68. package/dist/schemas/authUser.d.ts +1 -1
  69. package/dist/setupController.js +3 -2
  70. package/dist/shared/backupMetadataSchema.d.ts +94 -0
  71. package/dist/shared/backupMetadataSchema.js +38 -0
  72. package/dist/shared/backupTracking.d.ts +18 -0
  73. package/dist/shared/backupTracking.js +176 -0
  74. package/dist/shared/confirmationDialogs.js +15 -15
  75. package/dist/shared/errorUtils.d.ts +54 -0
  76. package/dist/shared/errorUtils.js +95 -0
  77. package/dist/shared/functionManager.js +20 -19
  78. package/dist/shared/indexManager.js +12 -11
  79. package/dist/shared/jsonSchemaGenerator.js +10 -26
  80. package/dist/shared/logging.d.ts +51 -0
  81. package/dist/shared/logging.js +70 -0
  82. package/dist/shared/messageFormatter.d.ts +2 -0
  83. package/dist/shared/messageFormatter.js +10 -0
  84. package/dist/shared/migrationHelpers.d.ts +6 -16
  85. package/dist/shared/migrationHelpers.js +24 -21
  86. package/dist/shared/operationLogger.d.ts +8 -1
  87. package/dist/shared/operationLogger.js +11 -24
  88. package/dist/shared/operationQueue.d.ts +28 -1
  89. package/dist/shared/operationQueue.js +268 -66
  90. package/dist/shared/operationsTable.d.ts +26 -0
  91. package/dist/shared/operationsTable.js +286 -0
  92. package/dist/shared/operationsTableSchema.d.ts +48 -0
  93. package/dist/shared/operationsTableSchema.js +35 -0
  94. package/dist/shared/relationshipExtractor.d.ts +56 -0
  95. package/dist/shared/relationshipExtractor.js +138 -0
  96. package/dist/shared/schemaGenerator.d.ts +19 -1
  97. package/dist/shared/schemaGenerator.js +56 -75
  98. package/dist/storage/backupCompression.d.ts +20 -0
  99. package/dist/storage/backupCompression.js +67 -0
  100. package/dist/storage/methods.d.ts +16 -2
  101. package/dist/storage/methods.js +98 -14
  102. package/dist/users/methods.js +9 -8
  103. package/dist/utils/configDiscovery.d.ts +78 -0
  104. package/dist/utils/configDiscovery.js +430 -0
  105. package/dist/utils/directoryUtils.d.ts +22 -0
  106. package/dist/utils/directoryUtils.js +59 -0
  107. package/dist/utils/getClientFromConfig.d.ts +17 -8
  108. package/dist/utils/getClientFromConfig.js +162 -17
  109. package/dist/utils/helperFunctions.d.ts +16 -2
  110. package/dist/utils/helperFunctions.js +19 -5
  111. package/dist/utils/loadConfigs.d.ts +34 -9
  112. package/dist/utils/loadConfigs.js +236 -316
  113. package/dist/utils/pathResolvers.d.ts +53 -0
  114. package/dist/utils/pathResolvers.js +72 -0
  115. package/dist/utils/projectConfig.d.ts +119 -0
  116. package/dist/utils/projectConfig.js +171 -0
  117. package/dist/utils/retryFailedPromises.js +4 -2
  118. package/dist/utils/sessionAuth.d.ts +48 -0
  119. package/dist/utils/sessionAuth.js +164 -0
  120. package/dist/utils/sessionPreservationExample.d.ts +1666 -0
  121. package/dist/utils/sessionPreservationExample.js +101 -0
  122. package/dist/utils/setupFiles.js +301 -41
  123. package/dist/utils/typeGuards.d.ts +35 -0
  124. package/dist/utils/typeGuards.js +57 -0
  125. package/dist/utils/versionDetection.js +145 -9
  126. package/dist/utils/yamlConverter.d.ts +53 -3
  127. package/dist/utils/yamlConverter.js +232 -13
  128. package/dist/utils/yamlLoader.d.ts +70 -0
  129. package/dist/utils/yamlLoader.js +263 -0
  130. package/dist/utilsController.d.ts +36 -3
  131. package/dist/utilsController.js +186 -56
  132. package/package.json +12 -2
  133. package/src/adapters/AdapterFactory.ts +263 -35
  134. package/src/adapters/TablesDBAdapter.ts +225 -36
  135. package/src/backups/operations/bucketBackup.ts +277 -0
  136. package/src/backups/operations/collectionBackup.ts +310 -0
  137. package/src/backups/operations/comprehensiveBackup.ts +342 -0
  138. package/src/backups/schemas/bucketManifest.ts +78 -0
  139. package/src/backups/schemas/comprehensiveManifest.ts +76 -0
  140. package/src/backups/tracking/centralizedTracking.ts +352 -0
  141. package/src/cli/commands/configCommands.ts +194 -0
  142. package/src/cli/commands/databaseCommands.ts +635 -0
  143. package/src/cli/commands/functionCommands.ts +379 -0
  144. package/src/cli/commands/schemaCommands.ts +163 -0
  145. package/src/cli/commands/transferCommands.ts +457 -0
  146. package/src/collections/attributes.ts +900 -621
  147. package/src/collections/attributes.ts.backup +1555 -0
  148. package/src/collections/indexes.ts +116 -114
  149. package/src/collections/methods.ts +295 -968
  150. package/src/collections/transferOperations.ts +516 -0
  151. package/src/collections/wipeOperations.ts +501 -0
  152. package/src/config/README.md +274 -0
  153. package/src/config/configMigration.ts +575 -0
  154. package/src/config/configValidation.ts +445 -0
  155. package/src/config/yamlConfig.ts +168 -55
  156. package/src/databases/methods.ts +3 -2
  157. package/src/databases/setup.ts +11 -138
  158. package/src/examples/yamlTerminologyExample.ts +341 -0
  159. package/src/functions/deployments.ts +14 -12
  160. package/src/functions/methods.ts +11 -11
  161. package/src/functions/templates/hono-typescript/README.md +286 -0
  162. package/src/functions/templates/hono-typescript/package.json +26 -0
  163. package/src/functions/templates/hono-typescript/src/adapters/request.ts +74 -0
  164. package/src/functions/templates/hono-typescript/src/adapters/response.ts +106 -0
  165. package/src/functions/templates/hono-typescript/src/app.ts +180 -0
  166. package/src/functions/templates/hono-typescript/src/context.ts +103 -0
  167. package/src/functions/templates/hono-typescript/src/index.ts +54 -0
  168. package/src/functions/templates/hono-typescript/src/middleware/appwrite.ts +119 -0
  169. package/src/functions/templates/hono-typescript/tsconfig.json +20 -0
  170. package/src/functions/templates/typescript-node/package.json +2 -1
  171. package/src/functions/templates/typescript-node/src/context.ts +103 -0
  172. package/src/functions/templates/typescript-node/src/index.ts +18 -12
  173. package/src/functions/templates/uv/pyproject.toml +1 -0
  174. package/src/functions/templates/uv/src/context.py +125 -0
  175. package/src/functions/templates/uv/src/index.py +35 -5
  176. package/src/init.ts +9 -11
  177. package/src/interactiveCLI.ts +274 -1563
  178. package/src/main.ts +418 -24
  179. package/src/migrations/afterImportActions.ts +71 -44
  180. package/src/migrations/appwriteToX.ts +100 -34
  181. package/src/migrations/dataLoader.ts +48 -34
  182. package/src/migrations/importController.ts +44 -39
  183. package/src/migrations/relationships.ts +28 -18
  184. package/src/migrations/services/ImportOrchestrator.ts +24 -27
  185. package/src/migrations/transfer.ts +159 -121
  186. package/src/migrations/yaml/YamlImportConfigLoader.ts +11 -4
  187. package/src/migrations/yaml/YamlImportIntegration.ts +47 -20
  188. package/src/migrations/yaml/generateImportSchemas.ts +751 -12
  189. package/src/setupController.ts +3 -2
  190. package/src/shared/backupMetadataSchema.ts +93 -0
  191. package/src/shared/backupTracking.ts +211 -0
  192. package/src/shared/confirmationDialogs.ts +19 -19
  193. package/src/shared/errorUtils.ts +110 -0
  194. package/src/shared/functionManager.ts +21 -20
  195. package/src/shared/indexManager.ts +12 -11
  196. package/src/shared/jsonSchemaGenerator.ts +38 -52
  197. package/src/shared/logging.ts +75 -0
  198. package/src/shared/messageFormatter.ts +14 -1
  199. package/src/shared/migrationHelpers.ts +45 -38
  200. package/src/shared/operationLogger.ts +11 -36
  201. package/src/shared/operationQueue.ts +322 -93
  202. package/src/shared/operationsTable.ts +338 -0
  203. package/src/shared/operationsTableSchema.ts +60 -0
  204. package/src/shared/relationshipExtractor.ts +214 -0
  205. package/src/shared/schemaGenerator.ts +179 -219
  206. package/src/storage/backupCompression.ts +88 -0
  207. package/src/storage/methods.ts +131 -34
  208. package/src/users/methods.ts +11 -9
  209. package/src/utils/configDiscovery.ts +502 -0
  210. package/src/utils/directoryUtils.ts +61 -0
  211. package/src/utils/getClientFromConfig.ts +205 -22
  212. package/src/utils/helperFunctions.ts +23 -5
  213. package/src/utils/loadConfigs.ts +313 -345
  214. package/src/utils/pathResolvers.ts +81 -0
  215. package/src/utils/projectConfig.ts +299 -0
  216. package/src/utils/retryFailedPromises.ts +4 -2
  217. package/src/utils/sessionAuth.ts +230 -0
  218. package/src/utils/setupFiles.ts +322 -54
  219. package/src/utils/typeGuards.ts +65 -0
  220. package/src/utils/versionDetection.ts +218 -64
  221. package/src/utils/yamlConverter.ts +296 -13
  222. package/src/utils/yamlLoader.ts +364 -0
  223. package/src/utilsController.ts +314 -110
  224. package/tests/README.md +497 -0
  225. package/tests/adapters/AdapterFactory.test.ts +277 -0
  226. package/tests/integration/syncOperations.test.ts +463 -0
  227. package/tests/jest.config.js +25 -0
  228. package/tests/migration/configMigration.test.ts +546 -0
  229. package/tests/setup.ts +62 -0
  230. package/tests/testUtils.ts +340 -0
  231. package/tests/utils/loadConfigs.test.ts +350 -0
  232. package/tests/validation/configValidation.test.ts +412 -0
  233. package/src/utils/schemaStrings.ts +0 -517
@@ -1,16 +1,20 @@
1
- import { Client, Databases, ID, Permission, Query, } from "node-appwrite";
1
+ import { Databases, ID, Permission, Query, } from "node-appwrite";
2
2
  import { getAdapterFromConfig } from "../utils/getClientFromConfig.js";
3
- import { nameToIdMapping, processQueue, queuedOperations } from "../shared/operationQueue.js";
4
- import { createUpdateCollectionAttributes, createUpdateCollectionAttributesWithStatusCheck } from "./attributes.js";
5
- import { createOrUpdateIndexes, createOrUpdateIndexesWithStatusCheck } from "./indexes.js";
3
+ import { nameToIdMapping, processQueue, queuedOperations, clearProcessingState, isCollectionProcessed, markCollectionProcessed } from "../shared/operationQueue.js";
4
+ import { logger } from "../shared/logging.js";
5
+ import { createUpdateCollectionAttributesWithStatusCheck } from "./attributes.js";
6
+ import { createOrUpdateIndexesWithStatusCheck } from "./indexes.js";
6
7
  import { SchemaGenerator } from "../shared/schemaGenerator.js";
7
- import { isNull, isUndefined, isNil, isPlainObject, isString, isJSONValue, chunk, } from "es-toolkit";
8
+ import { isNull, isUndefined, isNil, isPlainObject, isString, } from "es-toolkit";
8
9
  import { delay, tryAwaitWithRetry } from "../utils/helperFunctions.js";
9
10
  import { MessageFormatter } from "../shared/messageFormatter.js";
10
- import { ProgressManager } from "../shared/progressManager.js";
11
- import chalk from "chalk";
11
+ import { isLegacyDatabases } from "../utils/typeGuards.js";
12
+ // Re-export wipe operations
13
+ export { wipeDatabase, wipeCollection, wipeAllTables, wipeTableRows, } from "./wipeOperations.js";
14
+ // Re-export transfer operations
15
+ export { transferDocumentsBetweenDbsLocalToLocal, transferDocumentsBetweenDbsLocalToRemote, } from "./transferOperations.js";
12
16
  export const documentExists = async (db, dbId, targetCollectionId, toCreateObject) => {
13
- const collection = await (db instanceof Databases ?
17
+ const collection = await (isLegacyDatabases(db) ?
14
18
  db.getCollection(dbId, targetCollectionId) :
15
19
  db.getTable({ databaseId: dbId, tableId: targetCollectionId }));
16
20
  const attributes = collection.attributes;
@@ -49,19 +53,19 @@ export const documentExists = async (db, dbId, targetCollectionId, toCreateObjec
49
53
  // Convert to Query parameters
50
54
  const validQueryParams = validMappedEntries.map(([key, value]) => Query.equal(key, value));
51
55
  // Execute the query with the validated and prepared parameters
52
- const result = await (db instanceof Databases ?
56
+ const result = await (isLegacyDatabases(db) ?
53
57
  db.listDocuments(dbId, targetCollectionId, validQueryParams) :
54
58
  db.listRows({ databaseId: dbId, tableId: targetCollectionId, queries: validQueryParams }));
55
- const items = db instanceof Databases ? result.documents : (result.rows || result.documents);
59
+ const items = isLegacyDatabases(db) ? result.documents : (result.rows || result.documents);
56
60
  return items?.[0] || null;
57
61
  };
58
62
  export const checkForCollection = async (db, dbId, collection) => {
59
63
  try {
60
64
  MessageFormatter.progress(`Checking for collection with name: ${collection.name}`, { prefix: "Collections" });
61
- const response = await tryAwaitWithRetry(async () => db instanceof Databases ?
65
+ const response = await tryAwaitWithRetry(async () => isLegacyDatabases(db) ?
62
66
  await db.listCollections(dbId, [Query.equal("name", collection.name)]) :
63
67
  await db.listTables({ databaseId: dbId, queries: [Query.equal("name", collection.name)] }));
64
- const items = db instanceof Databases ? response.collections : (response.tables || response.collections);
68
+ const items = isLegacyDatabases(db) ? response.collections : (response.tables || response.collections);
65
69
  if (items && items.length > 0) {
66
70
  MessageFormatter.info(`Collection found: ${items[0].$id}`, { prefix: "Collections" });
67
71
  return { ...collection, ...items[0] };
@@ -72,7 +76,14 @@ export const checkForCollection = async (db, dbId, collection) => {
72
76
  }
73
77
  }
74
78
  catch (error) {
79
+ const errorMessage = error instanceof Error ? error.message : String(error);
75
80
  MessageFormatter.error(`Error checking for collection: ${collection.name}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Collections" });
81
+ logger.error('Collection check failed', {
82
+ collectionName: collection.name,
83
+ dbId,
84
+ error: errorMessage,
85
+ operation: 'checkForCollection'
86
+ });
76
87
  return null;
77
88
  }
78
89
  };
@@ -81,16 +92,16 @@ export const fetchAndCacheCollectionByName = async (db, dbId, collectionName) =>
81
92
  if (nameToIdMapping.has(collectionName)) {
82
93
  const collectionId = nameToIdMapping.get(collectionName);
83
94
  MessageFormatter.debug(`Collection found in cache: ${collectionId}`, undefined, { prefix: "Collections" });
84
- return await tryAwaitWithRetry(async () => db instanceof Databases ?
95
+ return await tryAwaitWithRetry(async () => isLegacyDatabases(db) ?
85
96
  await db.getCollection(dbId, collectionId) :
86
97
  await db.getTable({ databaseId: dbId, tableId: collectionId }));
87
98
  }
88
99
  else {
89
100
  MessageFormatter.progress(`Fetching collection by name: ${collectionName}`, { prefix: "Collections" });
90
- const collectionsPulled = await tryAwaitWithRetry(async () => db instanceof Databases ?
101
+ const collectionsPulled = await tryAwaitWithRetry(async () => isLegacyDatabases(db) ?
91
102
  await db.listCollections(dbId, [Query.equal("name", collectionName)]) :
92
103
  await db.listTables({ databaseId: dbId, queries: [Query.equal("name", collectionName)] }));
93
- const items = db instanceof Databases ? collectionsPulled.collections : (collectionsPulled.tables || collectionsPulled.collections);
104
+ const items = isLegacyDatabases(db) ? collectionsPulled.collections : (collectionsPulled.tables || collectionsPulled.collections);
94
105
  if ((collectionsPulled.total || items?.length) > 0) {
95
106
  const collection = items[0];
96
107
  MessageFormatter.info(`Collection found: ${collection.$id}`, { prefix: "Collections" });
@@ -103,173 +114,13 @@ export const fetchAndCacheCollectionByName = async (db, dbId, collectionName) =>
103
114
  }
104
115
  }
105
116
  };
106
- async function wipeDocumentsFromCollection(database, databaseId, collectionId) {
107
- try {
108
- const initialDocuments = await database.listDocuments(databaseId, collectionId, [Query.limit(1000)]);
109
- let documents = initialDocuments.documents;
110
- let totalDocuments = documents.length;
111
- let cursor = initialDocuments.documents.length >= 1000
112
- ? initialDocuments.documents[initialDocuments.documents.length - 1].$id
113
- : undefined;
114
- while (cursor) {
115
- const docsResponse = await database.listDocuments(databaseId, collectionId, [Query.limit(1000), ...(cursor ? [Query.cursorAfter(cursor)] : [])]);
116
- documents.push(...docsResponse.documents);
117
- totalDocuments = documents.length;
118
- cursor =
119
- docsResponse.documents.length >= 1000
120
- ? docsResponse.documents[docsResponse.documents.length - 1].$id
121
- : undefined;
122
- if (totalDocuments % 10000 === 0) {
123
- MessageFormatter.progress(`Found ${totalDocuments} documents...`, { prefix: "Wipe" });
124
- }
125
- }
126
- MessageFormatter.info(`Found ${totalDocuments} documents to delete`, { prefix: "Wipe" });
127
- if (totalDocuments === 0) {
128
- MessageFormatter.info("No documents to delete", { prefix: "Wipe" });
129
- return;
130
- }
131
- // Create progress tracker for deletion
132
- const progress = ProgressManager.create(`delete-${collectionId}`, totalDocuments, { title: "Deleting documents" });
133
- const maxStackSize = 50; // Reduced batch size
134
- const docBatches = chunk(documents, maxStackSize);
135
- let documentsProcessed = 0;
136
- for (let i = 0; i < docBatches.length; i++) {
137
- const batch = docBatches[i];
138
- const deletePromises = batch.map(async (doc) => {
139
- try {
140
- await tryAwaitWithRetry(async () => database.deleteDocument(databaseId, collectionId, doc.$id));
141
- documentsProcessed++;
142
- progress.update(documentsProcessed);
143
- }
144
- catch (error) {
145
- // Skip if document doesn't exist or other non-critical errors
146
- if (!error.message?.includes("Document with the requested ID could not be found")) {
147
- MessageFormatter.error(`Failed to delete document ${doc.$id}`, error.message, { prefix: "Wipe" });
148
- }
149
- documentsProcessed++;
150
- progress.update(documentsProcessed);
151
- }
152
- });
153
- await Promise.all(deletePromises);
154
- await delay(50); // Increased delay between batches
155
- // Progress is now handled by ProgressManager automatically
156
- }
157
- progress.stop();
158
- MessageFormatter.success(`Completed deletion of ${totalDocuments} documents from collection ${collectionId}`, { prefix: "Wipe" });
159
- }
160
- catch (error) {
161
- MessageFormatter.error(`Error wiping documents from collection ${collectionId}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Wipe" });
162
- throw error;
163
- }
164
- }
165
- export const wipeDatabase = async (database, databaseId) => {
166
- MessageFormatter.info(`Wiping database: ${databaseId}`, { prefix: "Wipe" });
167
- const existingCollections = await fetchAllCollections(databaseId, database);
168
- let collectionsDeleted = [];
169
- if (existingCollections.length === 0) {
170
- MessageFormatter.info("No collections to delete", { prefix: "Wipe" });
171
- return collectionsDeleted;
172
- }
173
- const progress = ProgressManager.create(`wipe-db-${databaseId}`, existingCollections.length, { title: "Deleting collections" });
174
- let processed = 0;
175
- for (const { $id: collectionId, name: name } of existingCollections) {
176
- MessageFormatter.progress(`Deleting collection: ${collectionId}`, { prefix: "Wipe" });
177
- collectionsDeleted.push({
178
- collectionId: collectionId,
179
- collectionName: name,
180
- });
181
- tryAwaitWithRetry(async () => await database.deleteCollection(databaseId, collectionId)); // Try to delete the collection and ignore errors if it doesn't exist or if it's already being deleted
182
- processed++;
183
- progress.update(processed);
184
- await delay(100);
185
- }
186
- progress.stop();
187
- MessageFormatter.success(`Deleted ${collectionsDeleted.length} collections from database`, { prefix: "Wipe" });
188
- return collectionsDeleted;
189
- };
190
- export const wipeCollection = async (database, databaseId, collectionId) => {
191
- const collections = await database.listCollections(databaseId, [
192
- Query.equal("$id", collectionId),
193
- ]);
194
- if (collections.total === 0) {
195
- MessageFormatter.warning(`Collection ${collectionId} not found`, { prefix: "Wipe" });
196
- return;
197
- }
198
- const collection = collections.collections[0];
199
- await wipeDocumentsFromCollection(database, databaseId, collection.$id);
200
- };
201
- // TablesDB helpers for wiping
202
- export const wipeAllTables = async (adapter, databaseId) => {
203
- MessageFormatter.info(`Wiping tables in database: ${databaseId}`, { prefix: 'Wipe' });
204
- const res = await adapter.listTables({ databaseId, queries: [Query.limit(500)] });
205
- const tables = res.tables || [];
206
- const deleted = [];
207
- const progress = ProgressManager.create(`wipe-db-${databaseId}`, tables.length, { title: 'Deleting tables' });
208
- let processed = 0;
209
- for (const t of tables) {
210
- try {
211
- await adapter.deleteTable({ databaseId, tableId: t.$id });
212
- deleted.push({ tableId: t.$id, tableName: t.name });
213
- }
214
- catch (e) {
215
- MessageFormatter.error(`Failed deleting table ${t.$id}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Wipe' });
216
- }
217
- processed++;
218
- progress.update(processed);
219
- await delay(100);
220
- }
221
- progress.stop();
222
- return deleted;
223
- };
224
- export const wipeTableRows = async (adapter, databaseId, tableId) => {
225
- try {
226
- const initial = await adapter.listRows({ databaseId, tableId, queries: [Query.limit(1000)] });
227
- let rows = initial.rows || [];
228
- let total = rows.length;
229
- let cursor = rows.length >= 1000 ? rows[rows.length - 1].$id : undefined;
230
- while (cursor) {
231
- const resp = await adapter.listRows({ databaseId, tableId, queries: [Query.limit(1000), ...(cursor ? [Query.cursorAfter(cursor)] : [])] });
232
- const more = resp.rows || [];
233
- rows.push(...more);
234
- total = rows.length;
235
- cursor = more.length >= 1000 ? more[more.length - 1].$id : undefined;
236
- if (total % 10000 === 0) {
237
- MessageFormatter.progress(`Found ${total} rows...`, { prefix: 'Wipe' });
238
- }
239
- }
240
- MessageFormatter.info(`Found ${total} rows to delete`, { prefix: 'Wipe' });
241
- if (total === 0)
242
- return;
243
- const progress = ProgressManager.create(`delete-${tableId}`, total, { title: 'Deleting rows' });
244
- let processed = 0;
245
- const maxStackSize = 50;
246
- const batches = chunk(rows, maxStackSize);
247
- for (const batch of batches) {
248
- await Promise.all(batch.map(async (row) => {
249
- try {
250
- await adapter.deleteRow({ databaseId, tableId, id: row.$id });
251
- }
252
- catch (e) {
253
- // ignore missing rows
254
- }
255
- processed++;
256
- progress.update(processed);
257
- }));
258
- await delay(50);
259
- }
260
- progress.stop();
261
- MessageFormatter.success(`Completed deletion of ${total} rows from table ${tableId}`, { prefix: 'Wipe' });
262
- }
263
- catch (error) {
264
- MessageFormatter.error(`Error wiping rows from table ${tableId}`, error instanceof Error ? error : new Error(String(error)), { prefix: 'Wipe' });
265
- throw error;
266
- }
267
- };
268
117
  export const generateSchemas = async (config, appwriteFolderPath) => {
269
118
  const schemaGenerator = new SchemaGenerator(config, appwriteFolderPath);
270
119
  schemaGenerator.generateSchemas();
271
120
  };
272
121
  export const createOrUpdateCollections = async (database, databaseId, config, deletedCollections, selectedCollections = []) => {
122
+ // Clear processing state at the start of a new operation
123
+ clearProcessingState();
273
124
  // If API mode is tablesdb, route to adapter-based implementation
274
125
  try {
275
126
  const { adapter, apiMode } = await getAdapterFromConfig(config);
@@ -286,8 +137,14 @@ export const createOrUpdateCollections = async (database, databaseId, config, de
286
137
  return;
287
138
  }
288
139
  const usedIds = new Set();
140
+ MessageFormatter.info(`Processing ${collectionsToProcess.length} collections with intelligent state management`, { prefix: "Collections" });
289
141
  for (const collection of collectionsToProcess) {
290
142
  const { attributes, indexes, ...collectionData } = collection;
143
+ // Check if this collection has already been processed in this session
144
+ if (collectionData.$id && isCollectionProcessed(collectionData.$id)) {
145
+ MessageFormatter.info(`Collection '${collectionData.name}' already processed, skipping`, { prefix: "Collections" });
146
+ continue;
147
+ }
291
148
  // Prepare permissions for the collection
292
149
  const permissions = [];
293
150
  if (collection.$permissions && collection.$permissions.length > 0) {
@@ -354,6 +211,8 @@ export const createOrUpdateCollections = async (database, databaseId, config, de
354
211
  else {
355
212
  MessageFormatter.info(`Collection ${collectionData.name} exists, updating it`, { prefix: "Collections" });
356
213
  await tryAwaitWithRetry(async () => await database.updateCollection(databaseId, collectionToUse.$id, collectionData.name, permissions, collectionData.documentSecurity ?? false, collectionData.enabled ?? true));
214
+ // Cache the existing collection ID
215
+ nameToIdMapping.set(collectionData.name, collectionToUse.$id);
357
216
  }
358
217
  // Add delay after creating/updating collection
359
218
  await delay(250);
@@ -370,24 +229,28 @@ export const createOrUpdateCollections = async (database, databaseId, config, de
370
229
  ?.indexes ?? [];
371
230
  MessageFormatter.progress("Creating Indexes", { prefix: "Collections" });
372
231
  await createOrUpdateIndexesWithStatusCheck(databaseId, database, collectionToUse.$id, collectionToUse, indexesToUse);
232
+ // Mark this collection as fully processed to prevent re-processing
233
+ markCollectionProcessed(collectionToUse.$id, collectionData.name);
373
234
  // Add delay after creating indexes
374
235
  await delay(250);
375
236
  }
376
- // Process any remaining tasks in the queue (only if there are operations to process)
237
+ // Process any remaining relationship attributes in the queue
238
+ // This surgical approach only processes specific attributes, not entire collections
377
239
  if (queuedOperations.length > 0) {
378
- MessageFormatter.info(`Processing ${queuedOperations.length} queued operations (relationship dependencies)`, { prefix: "Collections" });
240
+ MessageFormatter.info(`🔧 Processing ${queuedOperations.length} queued relationship attributes (surgical approach)`, { prefix: "Collections" });
379
241
  await processQueue(database, databaseId);
380
242
  }
381
243
  else {
382
- MessageFormatter.info("No queued operations to process", { prefix: "Collections" });
244
+ MessageFormatter.info("No queued relationship attributes to process", { prefix: "Collections" });
383
245
  }
384
246
  };
385
- // New: Adapter-based implementation for TablesDB
247
+ // New: Adapter-based implementation for TablesDB with state management
386
248
  export const createOrUpdateCollectionsViaAdapter = async (adapter, databaseId, config, deletedCollections, selectedCollections = []) => {
387
249
  const collectionsToProcess = selectedCollections.length > 0 ? selectedCollections : (config.collections || []);
388
250
  if (!collectionsToProcess || collectionsToProcess.length === 0)
389
251
  return;
390
252
  const usedIds = new Set();
253
+ MessageFormatter.info(`Processing ${collectionsToProcess.length} tables via adapter with intelligent state management`, { prefix: "Tables" });
391
254
  // Helper: create attributes through adapter
392
255
  const createAttr = async (tableId, attr) => {
393
256
  const base = {
@@ -417,6 +280,11 @@ export const createOrUpdateCollectionsViaAdapter = async (adapter, databaseId, c
417
280
  const relQueue = [];
418
281
  for (const collection of collectionsToProcess) {
419
282
  const { attributes, indexes, ...collectionData } = collection;
283
+ // Check if this table has already been processed in this session
284
+ if (collectionData.$id && isCollectionProcessed(collectionData.$id)) {
285
+ MessageFormatter.info(`Table '${collectionData.name}' already processed, skipping`, { prefix: "Tables" });
286
+ continue;
287
+ }
420
288
  // Prepare permissions as strings (reuse Permission helper)
421
289
  const permissions = [];
422
290
  if (collection.$permissions && collection.$permissions.length > 0) {
@@ -481,6 +349,8 @@ export const createOrUpdateCollectionsViaAdapter = async (adapter, databaseId, c
481
349
  documentSecurity: !!collectionData.documentSecurity,
482
350
  enabled: collectionData.enabled !== false
483
351
  });
352
+ // Cache the existing table ID
353
+ nameToIdMapping.set(collectionData.name, tableId);
484
354
  }
485
355
  // Add small delay after table create/update
486
356
  await delay(250);
@@ -535,39 +405,48 @@ export const createOrUpdateCollectionsViaAdapter = async (adapter, databaseId, c
535
405
  MessageFormatter.error(`Failed to create index ${idx.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Indexes' });
536
406
  }
537
407
  }
408
+ // Mark this table as fully processed to prevent re-processing
409
+ markCollectionProcessed(tableId, collectionData.name);
538
410
  }
539
411
  // Process queued relationships once mapping likely populated
540
- for (const { tableId, attr } of relQueue) {
541
- const relNameOrId = attr.relatedCollection;
542
- if (!relNameOrId)
543
- continue;
544
- const relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
545
- if (relId) {
546
- attr.relatedCollection = relId;
547
- try {
548
- await adapter.createAttribute({
549
- databaseId,
550
- tableId,
551
- key: attr.key,
552
- type: attr.type,
553
- size: attr.size,
554
- required: !!attr.required,
555
- default: attr.xdefault,
556
- array: !!attr.array,
557
- min: attr.min,
558
- max: attr.max,
559
- elements: attr.elements,
560
- relatedCollection: relId,
561
- relationType: attr.relationType,
562
- twoWay: attr.twoWay,
563
- twoWayKey: attr.twoWayKey,
564
- onDelete: attr.onDelete,
565
- side: attr.side
566
- });
567
- await delay(150);
412
+ if (relQueue.length > 0) {
413
+ MessageFormatter.info(`🔧 Processing ${relQueue.length} queued relationship attributes for tables`, { prefix: "Tables" });
414
+ for (const { tableId, attr } of relQueue) {
415
+ const relNameOrId = attr.relatedCollection;
416
+ if (!relNameOrId)
417
+ continue;
418
+ const relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
419
+ if (relId) {
420
+ attr.relatedCollection = relId;
421
+ try {
422
+ await adapter.createAttribute({
423
+ databaseId,
424
+ tableId,
425
+ key: attr.key,
426
+ type: attr.type,
427
+ size: attr.size,
428
+ required: !!attr.required,
429
+ default: attr.xdefault,
430
+ array: !!attr.array,
431
+ min: attr.min,
432
+ max: attr.max,
433
+ elements: attr.elements,
434
+ relatedCollection: relId,
435
+ relationType: attr.relationType,
436
+ twoWay: attr.twoWay,
437
+ twoWayKey: attr.twoWayKey,
438
+ onDelete: attr.onDelete,
439
+ side: attr.side
440
+ });
441
+ await delay(150);
442
+ MessageFormatter.info(`✅ Successfully processed queued relationship: ${attr.key}`, { prefix: "Tables" });
443
+ }
444
+ catch (e) {
445
+ MessageFormatter.error(`Failed queued relationship ${attr.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Attributes' });
446
+ }
568
447
  }
569
- catch (e) {
570
- MessageFormatter.error(`Failed queued relationship ${attr.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Attributes' });
448
+ else {
449
+ MessageFormatter.warning(`Could not resolve relationship ${attr.key} -> ${relNameOrId}`, { prefix: "Tables" });
571
450
  }
572
451
  }
573
452
  }
@@ -604,330 +483,3 @@ export const fetchAllCollections = async (dbId, database) => {
604
483
  MessageFormatter.success(`Fetched a total of ${collections.length} collections`, { prefix: "Collections" });
605
484
  return collections;
606
485
  };
607
- /**
608
- * Transfers all documents from one collection to another in a different database
609
- * within the same Appwrite Project
610
- */
611
- export const transferDocumentsBetweenDbsLocalToLocal = async (db, fromDbId, toDbId, fromCollId, toCollId) => {
612
- let fromCollDocs = await tryAwaitWithRetry(async () => db.listDocuments(fromDbId, fromCollId, [Query.limit(50)]));
613
- let totalDocumentsTransferred = 0;
614
- if (fromCollDocs.documents.length === 0) {
615
- MessageFormatter.info(`No documents found in collection ${fromCollId}`, { prefix: "Transfer" });
616
- return;
617
- }
618
- else if (fromCollDocs.documents.length < 50) {
619
- const batchedPromises = fromCollDocs.documents.map((doc) => {
620
- const toCreateObject = {
621
- ...doc,
622
- };
623
- delete toCreateObject.$databaseId;
624
- delete toCreateObject.$collectionId;
625
- delete toCreateObject.$createdAt;
626
- delete toCreateObject.$updatedAt;
627
- delete toCreateObject.$id;
628
- delete toCreateObject.$permissions;
629
- return tryAwaitWithRetry(async () => await db.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
630
- });
631
- await Promise.all(batchedPromises);
632
- totalDocumentsTransferred += fromCollDocs.documents.length;
633
- }
634
- else {
635
- const batchedPromises = fromCollDocs.documents.map((doc) => {
636
- const toCreateObject = {
637
- ...doc,
638
- };
639
- delete toCreateObject.$databaseId;
640
- delete toCreateObject.$collectionId;
641
- delete toCreateObject.$createdAt;
642
- delete toCreateObject.$updatedAt;
643
- delete toCreateObject.$id;
644
- delete toCreateObject.$permissions;
645
- return tryAwaitWithRetry(async () => db.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
646
- });
647
- await Promise.all(batchedPromises);
648
- totalDocumentsTransferred += fromCollDocs.documents.length;
649
- while (fromCollDocs.documents.length === 50) {
650
- fromCollDocs = await tryAwaitWithRetry(async () => await db.listDocuments(fromDbId, fromCollId, [
651
- Query.limit(50),
652
- Query.cursorAfter(fromCollDocs.documents[fromCollDocs.documents.length - 1].$id),
653
- ]));
654
- const batchedPromises = fromCollDocs.documents.map((doc) => {
655
- const toCreateObject = {
656
- ...doc,
657
- };
658
- delete toCreateObject.$databaseId;
659
- delete toCreateObject.$collectionId;
660
- delete toCreateObject.$createdAt;
661
- delete toCreateObject.$updatedAt;
662
- delete toCreateObject.$id;
663
- delete toCreateObject.$permissions;
664
- return tryAwaitWithRetry(async () => await db.createDocument(toDbId, toCollId, doc.$id, toCreateObject, doc.$permissions));
665
- });
666
- await Promise.all(batchedPromises);
667
- totalDocumentsTransferred += fromCollDocs.documents.length;
668
- }
669
- }
670
- MessageFormatter.success(`Transferred ${totalDocumentsTransferred} documents from database ${fromDbId} to database ${toDbId} -- collection ${fromCollId} to collection ${toCollId}`, { prefix: "Transfer" });
671
- };
672
- /**
673
- * Enhanced document transfer with fault tolerance and exponential backoff
674
- */
675
- const transferDocumentWithRetry = async (db, dbId, collectionId, documentId, documentData, permissions, maxRetries = 3, retryCount = 0) => {
676
- try {
677
- await db.createDocument(dbId, collectionId, documentId, documentData, permissions);
678
- return true;
679
- }
680
- catch (error) {
681
- // Check if document already exists
682
- if (error.code === 409 || error.message?.toLowerCase().includes('already exists')) {
683
- await db.updateDocument(dbId, collectionId, documentId, documentData, permissions);
684
- }
685
- if (retryCount < maxRetries) {
686
- // Calculate exponential backoff: 1s, 2s, 4s
687
- const exponentialDelay = Math.min(1000 * Math.pow(2, retryCount), 8000);
688
- console.log(chalk.yellow(`Retrying document ${documentId} (attempt ${retryCount + 1}/${maxRetries}, backoff: ${exponentialDelay}ms)`));
689
- await delay(exponentialDelay);
690
- return await transferDocumentWithRetry(db, dbId, collectionId, documentId, documentData, permissions, maxRetries, retryCount + 1);
691
- }
692
- console.log(chalk.red(`Failed to transfer document ${documentId} after ${maxRetries} retries: ${error.message}`));
693
- return false;
694
- }
695
- };
696
- /**
697
- * Check if endpoint supports bulk operations (cloud.appwrite.io)
698
- */
699
- const supportsBulkOperations = (endpoint) => {
700
- return endpoint.includes('cloud.appwrite.io');
701
- };
702
- /**
703
- * Direct HTTP implementation of bulk upsert API
704
- */
705
- const bulkUpsertDocuments = async (client, dbId, collectionId, documents) => {
706
- const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
707
- const url = new URL(client.config.endpoint + apiPath);
708
- const headers = {
709
- 'Content-Type': 'application/json',
710
- 'X-Appwrite-Project': client.config.project,
711
- 'X-Appwrite-Key': client.config.key
712
- };
713
- const response = await fetch(url.toString(), {
714
- method: 'PUT',
715
- headers,
716
- body: JSON.stringify({ documents })
717
- });
718
- if (!response.ok) {
719
- const errorData = await response.json().catch(() => ({ message: 'Unknown error' }));
720
- throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
721
- }
722
- return await response.json();
723
- };
724
- /**
725
- * Direct HTTP implementation of bulk create API
726
- */
727
- const bulkCreateDocuments = async (client, dbId, collectionId, documents) => {
728
- const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
729
- const url = new URL(client.config.endpoint + apiPath);
730
- const headers = {
731
- 'Content-Type': 'application/json',
732
- 'X-Appwrite-Project': client.config.project,
733
- 'X-Appwrite-Key': client.config.key
734
- };
735
- const response = await fetch(url.toString(), {
736
- method: 'POST',
737
- headers,
738
- body: JSON.stringify({ documents })
739
- });
740
- if (!response.ok) {
741
- const errorData = await response.json().catch(() => ({ message: 'Unknown error' }));
742
- throw new Error(`Bulk create failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
743
- }
744
- return await response.json();
745
- };
746
- /**
747
- * Enhanced bulk document creation using direct HTTP calls
748
- */
749
- const transferDocumentsBulkUpsert = async (client, dbId, collectionId, documents, maxBatchSize = 1000) => {
750
- let successful = 0;
751
- let failed = 0;
752
- // Prepare documents for bulk upsert
753
- const preparedDocs = documents.map(doc => {
754
- const toCreateObject = { ...doc };
755
- delete toCreateObject.$databaseId;
756
- delete toCreateObject.$collectionId;
757
- delete toCreateObject.$createdAt;
758
- delete toCreateObject.$updatedAt;
759
- // Keep $id and $permissions for upsert functionality
760
- return toCreateObject;
761
- });
762
- // Process in batches based on plan limits
763
- const documentBatches = chunk(preparedDocs, maxBatchSize);
764
- for (const batch of documentBatches) {
765
- console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
766
- try {
767
- // Try bulk upsert with direct HTTP call
768
- const result = await bulkUpsertDocuments(client, dbId, collectionId, batch);
769
- successful += result.documents?.length || batch.length;
770
- console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
771
- }
772
- catch (error) {
773
- console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
774
- // If bulk upsert fails, try with smaller batch size (Pro plan limit)
775
- if (maxBatchSize > 100) {
776
- const smallerBatches = chunk(batch, 100);
777
- for (const smallBatch of smallerBatches) {
778
- try {
779
- const result = await bulkUpsertDocuments(client, dbId, collectionId, smallBatch);
780
- successful += result.documents?.length || smallBatch.length;
781
- console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
782
- }
783
- catch (smallBatchError) {
784
- console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
785
- // Fall back to individual document transfer for this batch
786
- const db = new Databases(client);
787
- const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, smallBatch.map((doc, index) => ({
788
- ...doc,
789
- $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
790
- $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
791
- })));
792
- successful += indivSuccessful;
793
- failed += indivFailed;
794
- }
795
- // Add delay between batches
796
- await delay(200);
797
- }
798
- }
799
- else {
800
- // Fall back to individual document transfer
801
- const db = new Databases(client);
802
- const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, batch.map((doc, index) => ({
803
- ...doc,
804
- $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
805
- $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
806
- })));
807
- successful += indivSuccessful;
808
- failed += indivFailed;
809
- }
810
- }
811
- // Add delay between major batches
812
- if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
813
- await delay(500);
814
- }
815
- }
816
- return { successful, failed };
817
- };
818
- /**
819
- * Fallback batch document transfer with individual retry logic
820
- */
821
- const transferDocumentBatchWithRetryFallback = async (db, dbId, collectionId, documents, batchSize = 10) => {
822
- let successful = 0;
823
- let failed = 0;
824
- // Process documents in smaller batches to avoid overwhelming the server
825
- const documentBatches = chunk(documents, batchSize);
826
- for (const batch of documentBatches) {
827
- console.log(chalk.blue(`Processing batch of ${batch.length} documents...`));
828
- const batchPromises = batch.map(async (doc) => {
829
- const toCreateObject = { ...doc };
830
- delete toCreateObject.$databaseId;
831
- delete toCreateObject.$collectionId;
832
- delete toCreateObject.$createdAt;
833
- delete toCreateObject.$updatedAt;
834
- delete toCreateObject.$id;
835
- delete toCreateObject.$permissions;
836
- const result = await transferDocumentWithRetry(db, dbId, collectionId, doc.$id, toCreateObject, doc.$permissions || []);
837
- return { docId: doc.$id, success: result };
838
- });
839
- const results = await Promise.allSettled(batchPromises);
840
- results.forEach((result, index) => {
841
- if (result.status === 'fulfilled') {
842
- if (result.value.success) {
843
- successful++;
844
- }
845
- else {
846
- failed++;
847
- }
848
- }
849
- else {
850
- console.log(chalk.red(`Batch promise rejected for document ${batch[index].$id}: ${result.reason}`));
851
- failed++;
852
- }
853
- });
854
- // Add delay between batches to avoid rate limiting
855
- if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
856
- await delay(500);
857
- }
858
- }
859
- return { successful, failed };
860
- };
861
- /**
862
- * Enhanced batch document transfer with fault tolerance and bulk API support
863
- */
864
- const transferDocumentBatchWithRetry = async (db, client, dbId, collectionId, documents, batchSize = 10) => {
865
- // Check if we can use bulk operations
866
- if (supportsBulkOperations(client.config.endpoint)) {
867
- console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
868
- // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
869
- const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
870
- for (const maxBatchSize of batchSizes) {
871
- try {
872
- return await transferDocumentsBulkUpsert(client, dbId, collectionId, documents, maxBatchSize);
873
- }
874
- catch (error) {
875
- console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
876
- continue;
877
- }
878
- }
879
- // If all bulk operations fail, fall back to individual transfers
880
- console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
881
- }
882
- // Fall back to individual document transfer
883
- return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
884
- };
885
- export const transferDocumentsBetweenDbsLocalToRemote = async (localDb, endpoint, projectId, apiKey, fromDbId, toDbId, fromCollId, toCollId) => {
886
- console.log(chalk.blue(`Starting enhanced document transfer from ${fromCollId} to ${toCollId}...`));
887
- const client = new Client()
888
- .setEndpoint(endpoint)
889
- .setProject(projectId)
890
- .setKey(apiKey);
891
- const remoteDb = new Databases(client);
892
- let totalDocumentsProcessed = 0;
893
- let totalSuccessful = 0;
894
- let totalFailed = 0;
895
- // Fetch documents in larger batches (1000 at a time)
896
- let hasMoreDocuments = true;
897
- let lastDocumentId;
898
- while (hasMoreDocuments) {
899
- const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
900
- if (lastDocumentId) {
901
- queries.push(Query.cursorAfter(lastDocumentId));
902
- }
903
- const fromCollDocs = await tryAwaitWithRetry(async () => localDb.listDocuments(fromDbId, fromCollId, queries));
904
- if (fromCollDocs.documents.length === 0) {
905
- hasMoreDocuments = false;
906
- break;
907
- }
908
- console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
909
- const { successful, failed } = await transferDocumentBatchWithRetry(remoteDb, client, toDbId, toCollId, fromCollDocs.documents);
910
- totalDocumentsProcessed += fromCollDocs.documents.length;
911
- totalSuccessful += successful;
912
- totalFailed += failed;
913
- // Check if we have more documents to process
914
- if (fromCollDocs.documents.length < 1000) {
915
- hasMoreDocuments = false;
916
- }
917
- else {
918
- lastDocumentId = fromCollDocs.documents[fromCollDocs.documents.length - 1].$id;
919
- }
920
- console.log(chalk.gray(`Batch complete: ${successful} successful, ${failed} failed`));
921
- }
922
- if (totalDocumentsProcessed === 0) {
923
- MessageFormatter.info(`No documents found in collection ${fromCollId}`, { prefix: "Transfer" });
924
- return;
925
- }
926
- const message = `Total documents processed: ${totalDocumentsProcessed}, successful: ${totalSuccessful}, failed: ${totalFailed}`;
927
- if (totalFailed > 0) {
928
- MessageFormatter.warning(message, { prefix: "Transfer" });
929
- }
930
- else {
931
- MessageFormatter.success(message, { prefix: "Transfer" });
932
- }
933
- };