appwrite-utils-cli 1.5.2 → 1.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (233) hide show
  1. package/CHANGELOG.md +199 -0
  2. package/README.md +251 -29
  3. package/dist/adapters/AdapterFactory.d.ts +10 -3
  4. package/dist/adapters/AdapterFactory.js +213 -17
  5. package/dist/adapters/TablesDBAdapter.js +60 -17
  6. package/dist/backups/operations/bucketBackup.d.ts +19 -0
  7. package/dist/backups/operations/bucketBackup.js +197 -0
  8. package/dist/backups/operations/collectionBackup.d.ts +30 -0
  9. package/dist/backups/operations/collectionBackup.js +201 -0
  10. package/dist/backups/operations/comprehensiveBackup.d.ts +25 -0
  11. package/dist/backups/operations/comprehensiveBackup.js +238 -0
  12. package/dist/backups/schemas/bucketManifest.d.ts +93 -0
  13. package/dist/backups/schemas/bucketManifest.js +33 -0
  14. package/dist/backups/schemas/comprehensiveManifest.d.ts +108 -0
  15. package/dist/backups/schemas/comprehensiveManifest.js +32 -0
  16. package/dist/backups/tracking/centralizedTracking.d.ts +34 -0
  17. package/dist/backups/tracking/centralizedTracking.js +274 -0
  18. package/dist/cli/commands/configCommands.d.ts +8 -0
  19. package/dist/cli/commands/configCommands.js +160 -0
  20. package/dist/cli/commands/databaseCommands.d.ts +13 -0
  21. package/dist/cli/commands/databaseCommands.js +478 -0
  22. package/dist/cli/commands/functionCommands.d.ts +7 -0
  23. package/dist/cli/commands/functionCommands.js +289 -0
  24. package/dist/cli/commands/schemaCommands.d.ts +7 -0
  25. package/dist/cli/commands/schemaCommands.js +134 -0
  26. package/dist/cli/commands/transferCommands.d.ts +5 -0
  27. package/dist/cli/commands/transferCommands.js +384 -0
  28. package/dist/collections/attributes.d.ts +5 -4
  29. package/dist/collections/attributes.js +539 -246
  30. package/dist/collections/indexes.js +39 -37
  31. package/dist/collections/methods.d.ts +2 -16
  32. package/dist/collections/methods.js +90 -538
  33. package/dist/collections/transferOperations.d.ts +7 -0
  34. package/dist/collections/transferOperations.js +331 -0
  35. package/dist/collections/wipeOperations.d.ts +16 -0
  36. package/dist/collections/wipeOperations.js +328 -0
  37. package/dist/config/configMigration.d.ts +87 -0
  38. package/dist/config/configMigration.js +390 -0
  39. package/dist/config/configValidation.d.ts +66 -0
  40. package/dist/config/configValidation.js +358 -0
  41. package/dist/config/yamlConfig.d.ts +455 -1
  42. package/dist/config/yamlConfig.js +145 -52
  43. package/dist/databases/methods.js +3 -2
  44. package/dist/databases/setup.d.ts +1 -2
  45. package/dist/databases/setup.js +9 -87
  46. package/dist/examples/yamlTerminologyExample.d.ts +42 -0
  47. package/dist/examples/yamlTerminologyExample.js +269 -0
  48. package/dist/functions/deployments.js +11 -10
  49. package/dist/functions/methods.d.ts +1 -1
  50. package/dist/functions/methods.js +5 -4
  51. package/dist/init.js +9 -9
  52. package/dist/interactiveCLI.d.ts +8 -17
  53. package/dist/interactiveCLI.js +181 -1172
  54. package/dist/main.js +364 -21
  55. package/dist/migrations/afterImportActions.js +22 -30
  56. package/dist/migrations/appwriteToX.js +71 -25
  57. package/dist/migrations/dataLoader.js +35 -26
  58. package/dist/migrations/importController.js +29 -30
  59. package/dist/migrations/relationships.js +13 -12
  60. package/dist/migrations/services/ImportOrchestrator.js +16 -19
  61. package/dist/migrations/transfer.js +46 -46
  62. package/dist/migrations/yaml/YamlImportConfigLoader.d.ts +3 -1
  63. package/dist/migrations/yaml/YamlImportConfigLoader.js +6 -3
  64. package/dist/migrations/yaml/YamlImportIntegration.d.ts +9 -3
  65. package/dist/migrations/yaml/YamlImportIntegration.js +22 -11
  66. package/dist/migrations/yaml/generateImportSchemas.d.ts +14 -1
  67. package/dist/migrations/yaml/generateImportSchemas.js +736 -7
  68. package/dist/schemas/authUser.d.ts +1 -1
  69. package/dist/setupController.js +3 -2
  70. package/dist/shared/backupMetadataSchema.d.ts +94 -0
  71. package/dist/shared/backupMetadataSchema.js +38 -0
  72. package/dist/shared/backupTracking.d.ts +18 -0
  73. package/dist/shared/backupTracking.js +176 -0
  74. package/dist/shared/confirmationDialogs.js +15 -15
  75. package/dist/shared/errorUtils.d.ts +54 -0
  76. package/dist/shared/errorUtils.js +95 -0
  77. package/dist/shared/functionManager.js +20 -19
  78. package/dist/shared/indexManager.js +12 -11
  79. package/dist/shared/jsonSchemaGenerator.js +10 -26
  80. package/dist/shared/logging.d.ts +51 -0
  81. package/dist/shared/logging.js +70 -0
  82. package/dist/shared/messageFormatter.d.ts +2 -0
  83. package/dist/shared/messageFormatter.js +10 -0
  84. package/dist/shared/migrationHelpers.d.ts +6 -16
  85. package/dist/shared/migrationHelpers.js +24 -21
  86. package/dist/shared/operationLogger.d.ts +8 -1
  87. package/dist/shared/operationLogger.js +11 -24
  88. package/dist/shared/operationQueue.d.ts +28 -1
  89. package/dist/shared/operationQueue.js +268 -66
  90. package/dist/shared/operationsTable.d.ts +26 -0
  91. package/dist/shared/operationsTable.js +286 -0
  92. package/dist/shared/operationsTableSchema.d.ts +48 -0
  93. package/dist/shared/operationsTableSchema.js +35 -0
  94. package/dist/shared/relationshipExtractor.d.ts +56 -0
  95. package/dist/shared/relationshipExtractor.js +138 -0
  96. package/dist/shared/schemaGenerator.d.ts +19 -1
  97. package/dist/shared/schemaGenerator.js +56 -75
  98. package/dist/storage/backupCompression.d.ts +20 -0
  99. package/dist/storage/backupCompression.js +67 -0
  100. package/dist/storage/methods.d.ts +16 -2
  101. package/dist/storage/methods.js +98 -14
  102. package/dist/users/methods.js +9 -8
  103. package/dist/utils/configDiscovery.d.ts +78 -0
  104. package/dist/utils/configDiscovery.js +430 -0
  105. package/dist/utils/directoryUtils.d.ts +22 -0
  106. package/dist/utils/directoryUtils.js +59 -0
  107. package/dist/utils/getClientFromConfig.d.ts +17 -8
  108. package/dist/utils/getClientFromConfig.js +162 -17
  109. package/dist/utils/helperFunctions.d.ts +16 -2
  110. package/dist/utils/helperFunctions.js +19 -5
  111. package/dist/utils/loadConfigs.d.ts +34 -9
  112. package/dist/utils/loadConfigs.js +236 -316
  113. package/dist/utils/pathResolvers.d.ts +53 -0
  114. package/dist/utils/pathResolvers.js +72 -0
  115. package/dist/utils/projectConfig.d.ts +119 -0
  116. package/dist/utils/projectConfig.js +171 -0
  117. package/dist/utils/retryFailedPromises.js +4 -2
  118. package/dist/utils/sessionAuth.d.ts +48 -0
  119. package/dist/utils/sessionAuth.js +164 -0
  120. package/dist/utils/sessionPreservationExample.d.ts +1666 -0
  121. package/dist/utils/sessionPreservationExample.js +101 -0
  122. package/dist/utils/setupFiles.js +301 -41
  123. package/dist/utils/typeGuards.d.ts +35 -0
  124. package/dist/utils/typeGuards.js +57 -0
  125. package/dist/utils/versionDetection.js +145 -9
  126. package/dist/utils/yamlConverter.d.ts +53 -3
  127. package/dist/utils/yamlConverter.js +232 -13
  128. package/dist/utils/yamlLoader.d.ts +70 -0
  129. package/dist/utils/yamlLoader.js +263 -0
  130. package/dist/utilsController.d.ts +36 -3
  131. package/dist/utilsController.js +186 -56
  132. package/package.json +12 -2
  133. package/src/adapters/AdapterFactory.ts +263 -35
  134. package/src/adapters/TablesDBAdapter.ts +225 -36
  135. package/src/backups/operations/bucketBackup.ts +277 -0
  136. package/src/backups/operations/collectionBackup.ts +310 -0
  137. package/src/backups/operations/comprehensiveBackup.ts +342 -0
  138. package/src/backups/schemas/bucketManifest.ts +78 -0
  139. package/src/backups/schemas/comprehensiveManifest.ts +76 -0
  140. package/src/backups/tracking/centralizedTracking.ts +352 -0
  141. package/src/cli/commands/configCommands.ts +194 -0
  142. package/src/cli/commands/databaseCommands.ts +635 -0
  143. package/src/cli/commands/functionCommands.ts +379 -0
  144. package/src/cli/commands/schemaCommands.ts +163 -0
  145. package/src/cli/commands/transferCommands.ts +457 -0
  146. package/src/collections/attributes.ts +900 -621
  147. package/src/collections/attributes.ts.backup +1555 -0
  148. package/src/collections/indexes.ts +116 -114
  149. package/src/collections/methods.ts +295 -968
  150. package/src/collections/transferOperations.ts +516 -0
  151. package/src/collections/wipeOperations.ts +501 -0
  152. package/src/config/README.md +274 -0
  153. package/src/config/configMigration.ts +575 -0
  154. package/src/config/configValidation.ts +445 -0
  155. package/src/config/yamlConfig.ts +168 -55
  156. package/src/databases/methods.ts +3 -2
  157. package/src/databases/setup.ts +11 -138
  158. package/src/examples/yamlTerminologyExample.ts +341 -0
  159. package/src/functions/deployments.ts +14 -12
  160. package/src/functions/methods.ts +11 -11
  161. package/src/functions/templates/hono-typescript/README.md +286 -0
  162. package/src/functions/templates/hono-typescript/package.json +26 -0
  163. package/src/functions/templates/hono-typescript/src/adapters/request.ts +74 -0
  164. package/src/functions/templates/hono-typescript/src/adapters/response.ts +106 -0
  165. package/src/functions/templates/hono-typescript/src/app.ts +180 -0
  166. package/src/functions/templates/hono-typescript/src/context.ts +103 -0
  167. package/src/functions/templates/hono-typescript/src/index.ts +54 -0
  168. package/src/functions/templates/hono-typescript/src/middleware/appwrite.ts +119 -0
  169. package/src/functions/templates/hono-typescript/tsconfig.json +20 -0
  170. package/src/functions/templates/typescript-node/package.json +2 -1
  171. package/src/functions/templates/typescript-node/src/context.ts +103 -0
  172. package/src/functions/templates/typescript-node/src/index.ts +18 -12
  173. package/src/functions/templates/uv/pyproject.toml +1 -0
  174. package/src/functions/templates/uv/src/context.py +125 -0
  175. package/src/functions/templates/uv/src/index.py +35 -5
  176. package/src/init.ts +9 -11
  177. package/src/interactiveCLI.ts +278 -1596
  178. package/src/main.ts +418 -24
  179. package/src/migrations/afterImportActions.ts +71 -44
  180. package/src/migrations/appwriteToX.ts +100 -34
  181. package/src/migrations/dataLoader.ts +48 -34
  182. package/src/migrations/importController.ts +44 -39
  183. package/src/migrations/relationships.ts +28 -18
  184. package/src/migrations/services/ImportOrchestrator.ts +24 -27
  185. package/src/migrations/transfer.ts +159 -121
  186. package/src/migrations/yaml/YamlImportConfigLoader.ts +11 -4
  187. package/src/migrations/yaml/YamlImportIntegration.ts +47 -20
  188. package/src/migrations/yaml/generateImportSchemas.ts +751 -12
  189. package/src/setupController.ts +3 -2
  190. package/src/shared/backupMetadataSchema.ts +93 -0
  191. package/src/shared/backupTracking.ts +211 -0
  192. package/src/shared/confirmationDialogs.ts +19 -19
  193. package/src/shared/errorUtils.ts +110 -0
  194. package/src/shared/functionManager.ts +21 -20
  195. package/src/shared/indexManager.ts +12 -11
  196. package/src/shared/jsonSchemaGenerator.ts +38 -52
  197. package/src/shared/logging.ts +75 -0
  198. package/src/shared/messageFormatter.ts +14 -1
  199. package/src/shared/migrationHelpers.ts +45 -38
  200. package/src/shared/operationLogger.ts +11 -36
  201. package/src/shared/operationQueue.ts +322 -93
  202. package/src/shared/operationsTable.ts +338 -0
  203. package/src/shared/operationsTableSchema.ts +60 -0
  204. package/src/shared/relationshipExtractor.ts +214 -0
  205. package/src/shared/schemaGenerator.ts +179 -219
  206. package/src/storage/backupCompression.ts +88 -0
  207. package/src/storage/methods.ts +131 -34
  208. package/src/users/methods.ts +11 -9
  209. package/src/utils/configDiscovery.ts +502 -0
  210. package/src/utils/directoryUtils.ts +61 -0
  211. package/src/utils/getClientFromConfig.ts +205 -22
  212. package/src/utils/helperFunctions.ts +23 -5
  213. package/src/utils/loadConfigs.ts +313 -345
  214. package/src/utils/pathResolvers.ts +81 -0
  215. package/src/utils/projectConfig.ts +299 -0
  216. package/src/utils/retryFailedPromises.ts +4 -2
  217. package/src/utils/sessionAuth.ts +230 -0
  218. package/src/utils/setupFiles.ts +322 -54
  219. package/src/utils/typeGuards.ts +65 -0
  220. package/src/utils/versionDetection.ts +218 -64
  221. package/src/utils/yamlConverter.ts +296 -13
  222. package/src/utils/yamlLoader.ts +364 -0
  223. package/src/utilsController.ts +314 -110
  224. package/tests/README.md +497 -0
  225. package/tests/adapters/AdapterFactory.test.ts +277 -0
  226. package/tests/integration/syncOperations.test.ts +463 -0
  227. package/tests/jest.config.js +25 -0
  228. package/tests/migration/configMigration.test.ts +546 -0
  229. package/tests/setup.ts +62 -0
  230. package/tests/testUtils.ts +340 -0
  231. package/tests/utils/loadConfigs.test.ts +350 -0
  232. package/tests/validation/configValidation.test.ts +412 -0
  233. package/src/utils/schemaStrings.ts +0 -517
@@ -1,17 +1,24 @@
1
1
  import {
2
- Client,
3
2
  Databases,
4
3
  ID,
5
4
  Permission,
6
5
  Query,
7
6
  type Models,
8
7
  } from "node-appwrite";
9
- import type { AppwriteConfig, CollectionCreate, Indexes, Attribute } from "appwrite-utils";
10
- import type { DatabaseAdapter } from "../adapters/DatabaseAdapter.js";
11
- import { getAdapterFromConfig } from "../utils/getClientFromConfig.js";
12
- import { nameToIdMapping, processQueue, queuedOperations } from "../shared/operationQueue.js";
13
- import { createUpdateCollectionAttributes, createUpdateCollectionAttributesWithStatusCheck } from "./attributes.js";
14
- import { createOrUpdateIndexes, createOrUpdateIndexesWithStatusCheck } from "./indexes.js";
8
+ import type { AppwriteConfig, CollectionCreate, Indexes, Attribute } from "appwrite-utils";
9
+ import type { DatabaseAdapter } from "../adapters/DatabaseAdapter.js";
10
+ import { getAdapterFromConfig } from "../utils/getClientFromConfig.js";
11
+ import {
12
+ nameToIdMapping,
13
+ processQueue,
14
+ queuedOperations,
15
+ clearProcessingState,
16
+ isCollectionProcessed,
17
+ markCollectionProcessed
18
+ } from "../shared/operationQueue.js";
19
+ import { logger } from "../shared/logging.js";
20
+ import { createUpdateCollectionAttributesWithStatusCheck } from "./attributes.js";
21
+ import { createOrUpdateIndexesWithStatusCheck } from "./indexes.js";
15
22
  import { SchemaGenerator } from "../shared/schemaGenerator.js";
16
23
  import {
17
24
  isNull,
@@ -19,13 +26,24 @@ import {
19
26
  isNil,
20
27
  isPlainObject,
21
28
  isString,
22
- isJSONValue,
23
- chunk,
24
29
  } from "es-toolkit";
25
30
  import { delay, tryAwaitWithRetry } from "../utils/helperFunctions.js";
26
31
  import { MessageFormatter } from "../shared/messageFormatter.js";
27
- import { ProgressManager } from "../shared/progressManager.js";
28
- import chalk from "chalk";
32
+ import { isLegacyDatabases } from "../utils/typeGuards.js";
33
+
34
+ // Re-export wipe operations
35
+ export {
36
+ wipeDatabase,
37
+ wipeCollection,
38
+ wipeAllTables,
39
+ wipeTableRows,
40
+ } from "./wipeOperations.js";
41
+
42
+ // Re-export transfer operations
43
+ export {
44
+ transferDocumentsBetweenDbsLocalToLocal,
45
+ transferDocumentsBetweenDbsLocalToRemote,
46
+ } from "./transferOperations.js";
29
47
 
30
48
  export const documentExists = async (
31
49
  db: Databases | DatabaseAdapter,
@@ -33,7 +51,7 @@ export const documentExists = async (
33
51
  targetCollectionId: string,
34
52
  toCreateObject: any
35
53
  ): Promise<Models.Document | null> => {
36
- const collection = await (db instanceof Databases ?
54
+ const collection = await (isLegacyDatabases(db) ?
37
55
  db.getCollection(dbId, targetCollectionId) :
38
56
  db.getTable({ databaseId: dbId, tableId: targetCollectionId }));
39
57
  const attributes = (collection as any).attributes as any[];
@@ -81,11 +99,11 @@ export const documentExists = async (
81
99
  );
82
100
 
83
101
  // Execute the query with the validated and prepared parameters
84
- const result = await (db instanceof Databases ?
102
+ const result = await (isLegacyDatabases(db) ?
85
103
  db.listDocuments(dbId, targetCollectionId, validQueryParams) :
86
104
  db.listRows({ databaseId: dbId, tableId: targetCollectionId, queries: validQueryParams }));
87
-
88
- const items = db instanceof Databases ? result.documents : ((result as any).rows || result.documents);
105
+
106
+ const items = isLegacyDatabases(db) ? result.documents : ((result as any).rows || result.documents);
89
107
  return items?.[0] || null;
90
108
  };
91
109
 
@@ -97,11 +115,11 @@ export const checkForCollection = async (
97
115
  try {
98
116
  MessageFormatter.progress(`Checking for collection with name: ${collection.name}`, { prefix: "Collections" });
99
117
  const response = await tryAwaitWithRetry(
100
- async () => db instanceof Databases ?
118
+ async () => isLegacyDatabases(db) ?
101
119
  await db.listCollections(dbId, [Query.equal("name", collection.name!)]) :
102
120
  await db.listTables({ databaseId: dbId, queries: [Query.equal("name", collection.name!)] })
103
121
  );
104
- const items = db instanceof Databases ? response.collections : ((response as any).tables || response.collections);
122
+ const items = isLegacyDatabases(db) ? response.collections : ((response as any).tables || response.collections);
105
123
  if (items && items.length > 0) {
106
124
  MessageFormatter.info(`Collection found: ${items[0].$id}`, { prefix: "Collections" });
107
125
  return { ...collection, ...items[0] } as Models.Collection;
@@ -110,7 +128,14 @@ export const checkForCollection = async (
110
128
  return null;
111
129
  }
112
130
  } catch (error) {
131
+ const errorMessage = error instanceof Error ? error.message : String(error);
113
132
  MessageFormatter.error(`Error checking for collection: ${collection.name}`, error instanceof Error ? error : new Error(String(error)), { prefix: "Collections" });
133
+ logger.error('Collection check failed', {
134
+ collectionName: collection.name,
135
+ dbId,
136
+ error: errorMessage,
137
+ operation: 'checkForCollection'
138
+ });
114
139
  return null;
115
140
  }
116
141
  };
@@ -125,18 +150,18 @@ export const fetchAndCacheCollectionByName = async (
125
150
  const collectionId = nameToIdMapping.get(collectionName);
126
151
  MessageFormatter.debug(`Collection found in cache: ${collectionId}`, undefined, { prefix: "Collections" });
127
152
  return await tryAwaitWithRetry(
128
- async () => db instanceof Databases ?
153
+ async () => isLegacyDatabases(db) ?
129
154
  await db.getCollection(dbId, collectionId!) :
130
155
  await db.getTable({ databaseId: dbId, tableId: collectionId! })
131
156
  ) as Models.Collection;
132
157
  } else {
133
158
  MessageFormatter.progress(`Fetching collection by name: ${collectionName}`, { prefix: "Collections" });
134
159
  const collectionsPulled = await tryAwaitWithRetry(
135
- async () => db instanceof Databases ?
160
+ async () => isLegacyDatabases(db) ?
136
161
  await db.listCollections(dbId, [Query.equal("name", collectionName)]) :
137
162
  await db.listTables({ databaseId: dbId, queries: [Query.equal("name", collectionName)] })
138
163
  );
139
- const items = db instanceof Databases ? collectionsPulled.collections : ((collectionsPulled as any).tables || collectionsPulled.collections);
164
+ const items = isLegacyDatabases(db) ? collectionsPulled.collections : ((collectionsPulled as any).tables || collectionsPulled.collections);
140
165
  if ((collectionsPulled.total || items?.length) > 0) {
141
166
  const collection = items[0];
142
167
  MessageFormatter.info(`Collection found: ${collection.$id}`, { prefix: "Collections" });
@@ -149,233 +174,6 @@ export const fetchAndCacheCollectionByName = async (
149
174
  }
150
175
  };
151
176
 
152
- async function wipeDocumentsFromCollection(
153
- database: Databases,
154
- databaseId: string,
155
- collectionId: string
156
- ) {
157
- try {
158
- const initialDocuments = await database.listDocuments(
159
- databaseId,
160
- collectionId,
161
- [Query.limit(1000)]
162
- );
163
- let documents = initialDocuments.documents;
164
- let totalDocuments = documents.length;
165
- let cursor =
166
- initialDocuments.documents.length >= 1000
167
- ? initialDocuments.documents[initialDocuments.documents.length - 1].$id
168
- : undefined;
169
-
170
- while (cursor) {
171
- const docsResponse = await database.listDocuments(
172
- databaseId,
173
- collectionId,
174
- [Query.limit(1000), ...(cursor ? [Query.cursorAfter(cursor)] : [])]
175
- );
176
- documents.push(...docsResponse.documents);
177
- totalDocuments = documents.length;
178
- cursor =
179
- docsResponse.documents.length >= 1000
180
- ? docsResponse.documents[docsResponse.documents.length - 1].$id
181
- : undefined;
182
- if (totalDocuments % 10000 === 0) {
183
- MessageFormatter.progress(`Found ${totalDocuments} documents...`, { prefix: "Wipe" });
184
- }
185
- }
186
-
187
- MessageFormatter.info(`Found ${totalDocuments} documents to delete`, { prefix: "Wipe" });
188
-
189
- if (totalDocuments === 0) {
190
- MessageFormatter.info("No documents to delete", { prefix: "Wipe" });
191
- return;
192
- }
193
-
194
- // Create progress tracker for deletion
195
- const progress = ProgressManager.create(
196
- `delete-${collectionId}`,
197
- totalDocuments,
198
- { title: "Deleting documents" }
199
- );
200
-
201
- const maxStackSize = 50; // Reduced batch size
202
- const docBatches = chunk(documents, maxStackSize);
203
- let documentsProcessed = 0;
204
-
205
- for (let i = 0; i < docBatches.length; i++) {
206
- const batch = docBatches[i];
207
- const deletePromises = batch.map(async (doc) => {
208
- try {
209
- await tryAwaitWithRetry(async () =>
210
- database.deleteDocument(databaseId, collectionId, doc.$id)
211
- );
212
- documentsProcessed++;
213
- progress.update(documentsProcessed);
214
- } catch (error: any) {
215
- // Skip if document doesn't exist or other non-critical errors
216
- if (
217
- !error.message?.includes(
218
- "Document with the requested ID could not be found"
219
- )
220
- ) {
221
- MessageFormatter.error(
222
- `Failed to delete document ${doc.$id}`,
223
- error.message,
224
- { prefix: "Wipe" }
225
- );
226
- }
227
- documentsProcessed++;
228
- progress.update(documentsProcessed);
229
- }
230
- });
231
-
232
- await Promise.all(deletePromises);
233
- await delay(50); // Increased delay between batches
234
-
235
- // Progress is now handled by ProgressManager automatically
236
- }
237
-
238
- progress.stop();
239
- MessageFormatter.success(
240
- `Completed deletion of ${totalDocuments} documents from collection ${collectionId}`,
241
- { prefix: "Wipe" }
242
- );
243
- } catch (error) {
244
- MessageFormatter.error(
245
- `Error wiping documents from collection ${collectionId}`,
246
- error instanceof Error ? error : new Error(String(error)),
247
- { prefix: "Wipe" }
248
- );
249
- throw error;
250
- }
251
- }
252
-
253
- export const wipeDatabase = async (
254
- database: Databases,
255
- databaseId: string
256
- ): Promise<{ collectionId: string; collectionName: string }[]> => {
257
- MessageFormatter.info(`Wiping database: ${databaseId}`, { prefix: "Wipe" });
258
- const existingCollections = await fetchAllCollections(databaseId, database);
259
- let collectionsDeleted: { collectionId: string; collectionName: string }[] =
260
- [];
261
-
262
- if (existingCollections.length === 0) {
263
- MessageFormatter.info("No collections to delete", { prefix: "Wipe" });
264
- return collectionsDeleted;
265
- }
266
-
267
- const progress = ProgressManager.create(
268
- `wipe-db-${databaseId}`,
269
- existingCollections.length,
270
- { title: "Deleting collections" }
271
- );
272
-
273
- let processed = 0;
274
- for (const { $id: collectionId, name: name } of existingCollections) {
275
- MessageFormatter.progress(`Deleting collection: ${collectionId}`, { prefix: "Wipe" });
276
- collectionsDeleted.push({
277
- collectionId: collectionId,
278
- collectionName: name,
279
- });
280
- tryAwaitWithRetry(
281
- async () => await database.deleteCollection(databaseId, collectionId)
282
- ); // Try to delete the collection and ignore errors if it doesn't exist or if it's already being deleted
283
- processed++;
284
- progress.update(processed);
285
- await delay(100);
286
- }
287
-
288
- progress.stop();
289
- MessageFormatter.success(`Deleted ${collectionsDeleted.length} collections from database`, { prefix: "Wipe" });
290
- return collectionsDeleted;
291
- };
292
-
293
- export const wipeCollection = async (
294
- database: Databases,
295
- databaseId: string,
296
- collectionId: string
297
- ): Promise<void> => {
298
- const collections = await database.listCollections(databaseId, [
299
- Query.equal("$id", collectionId),
300
- ]);
301
- if (collections.total === 0) {
302
- MessageFormatter.warning(`Collection ${collectionId} not found`, { prefix: "Wipe" });
303
- return;
304
- }
305
- const collection = collections.collections[0];
306
- await wipeDocumentsFromCollection(database, databaseId, collection.$id);
307
- };
308
-
309
- // TablesDB helpers for wiping
310
- export const wipeAllTables = async (
311
- adapter: DatabaseAdapter,
312
- databaseId: string
313
- ): Promise<{ tableId: string; tableName: string }[]> => {
314
- MessageFormatter.info(`Wiping tables in database: ${databaseId}`, { prefix: 'Wipe' });
315
- const res = await adapter.listTables({ databaseId, queries: [Query.limit(500)] });
316
- const tables: any[] = (res as any).tables || [];
317
- const deleted: { tableId: string; tableName: string }[] = [];
318
- const progress = ProgressManager.create(`wipe-db-${databaseId}`, tables.length, { title: 'Deleting tables' });
319
- let processed = 0;
320
- for (const t of tables) {
321
- try {
322
- await adapter.deleteTable({ databaseId, tableId: t.$id });
323
- deleted.push({ tableId: t.$id, tableName: t.name });
324
- } catch (e) {
325
- MessageFormatter.error(`Failed deleting table ${t.$id}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Wipe' });
326
- }
327
- processed++; progress.update(processed);
328
- await delay(100);
329
- }
330
- progress.stop();
331
- return deleted;
332
- };
333
-
334
- export const wipeTableRows = async (
335
- adapter: DatabaseAdapter,
336
- databaseId: string,
337
- tableId: string
338
- ): Promise<void> => {
339
- try {
340
- const initial = await adapter.listRows({ databaseId, tableId, queries: [Query.limit(1000)] });
341
- let rows: any[] = (initial as any).rows || [];
342
- let total = rows.length;
343
- let cursor = rows.length >= 1000 ? rows[rows.length - 1].$id : undefined;
344
- while (cursor) {
345
- const resp = await adapter.listRows({ databaseId, tableId, queries: [Query.limit(1000), ...(cursor ? [Query.cursorAfter(cursor)] : [])] });
346
- const more: any[] = (resp as any).rows || [];
347
- rows.push(...more);
348
- total = rows.length;
349
- cursor = more.length >= 1000 ? more[more.length - 1].$id : undefined;
350
- if (total % 10000 === 0) {
351
- MessageFormatter.progress(`Found ${total} rows...`, { prefix: 'Wipe' });
352
- }
353
- }
354
- MessageFormatter.info(`Found ${total} rows to delete`, { prefix: 'Wipe' });
355
- if (total === 0) return;
356
- const progress = ProgressManager.create(`delete-${tableId}`, total, { title: 'Deleting rows' });
357
- let processed = 0;
358
- const maxStackSize = 50;
359
- const batches = chunk(rows, maxStackSize);
360
- for (const batch of batches) {
361
- await Promise.all(batch.map(async (row: any) => {
362
- try {
363
- await adapter.deleteRow({ databaseId, tableId, id: row.$id });
364
- } catch (e: any) {
365
- // ignore missing rows
366
- }
367
- processed++; progress.update(processed);
368
- }));
369
- await delay(50);
370
- }
371
- progress.stop();
372
- MessageFormatter.success(`Completed deletion of ${total} rows from table ${tableId}`, { prefix: 'Wipe' });
373
- } catch (error) {
374
- MessageFormatter.error(`Error wiping rows from table ${tableId}`, error instanceof Error ? error : new Error(String(error)), { prefix: 'Wipe' });
375
- throw error;
376
- }
377
- };
378
-
379
177
  export const generateSchemas = async (
380
178
  config: AppwriteConfig,
381
179
  appwriteFolderPath: string
@@ -384,23 +182,26 @@ export const generateSchemas = async (
384
182
  schemaGenerator.generateSchemas();
385
183
  };
386
184
 
387
- export const createOrUpdateCollections = async (
388
- database: Databases,
389
- databaseId: string,
390
- config: AppwriteConfig,
391
- deletedCollections?: { collectionId: string; collectionName: string }[],
392
- selectedCollections: Models.Collection[] = []
393
- ): Promise<void> => {
394
- // If API mode is tablesdb, route to adapter-based implementation
395
- try {
396
- const { adapter, apiMode } = await getAdapterFromConfig(config);
397
- if (apiMode === 'tablesdb') {
398
- await createOrUpdateCollectionsViaAdapter(adapter, databaseId, config, deletedCollections, selectedCollections);
399
- return;
400
- }
401
- } catch {
402
- // Fallback to legacy path below
403
- }
185
+ export const createOrUpdateCollections = async (
186
+ database: Databases,
187
+ databaseId: string,
188
+ config: AppwriteConfig,
189
+ deletedCollections?: { collectionId: string; collectionName: string }[],
190
+ selectedCollections: Models.Collection[] = []
191
+ ): Promise<void> => {
192
+ // Clear processing state at the start of a new operation
193
+ clearProcessingState();
194
+
195
+ // If API mode is tablesdb, route to adapter-based implementation
196
+ try {
197
+ const { adapter, apiMode } = await getAdapterFromConfig(config);
198
+ if (apiMode === 'tablesdb') {
199
+ await createOrUpdateCollectionsViaAdapter(adapter, databaseId, config, deletedCollections, selectedCollections);
200
+ return;
201
+ }
202
+ } catch {
203
+ // Fallback to legacy path below
204
+ }
404
205
  const collectionsToProcess =
405
206
  selectedCollections.length > 0 ? selectedCollections : config.collections;
406
207
  if (!collectionsToProcess) {
@@ -408,9 +209,17 @@ export const createOrUpdateCollections = async (
408
209
  }
409
210
  const usedIds = new Set();
410
211
 
212
+ MessageFormatter.info(`Processing ${collectionsToProcess.length} collections with intelligent state management`, { prefix: "Collections" });
213
+
411
214
  for (const collection of collectionsToProcess) {
412
215
  const { attributes, indexes, ...collectionData } = collection;
413
216
 
217
+ // Check if this collection has already been processed in this session
218
+ if (collectionData.$id && isCollectionProcessed(collectionData.$id)) {
219
+ MessageFormatter.info(`Collection '${collectionData.name}' already processed, skipping`, { prefix: "Collections" });
220
+ continue;
221
+ }
222
+
414
223
  // Prepare permissions for the collection
415
224
  const permissions: string[] = [];
416
225
  if (collection.$permissions && collection.$permissions.length > 0) {
@@ -509,6 +318,8 @@ export const createOrUpdateCollections = async (
509
318
  collectionData.enabled ?? true
510
319
  )
511
320
  );
321
+ // Cache the existing collection ID
322
+ nameToIdMapping.set(collectionData.name, collectionToUse.$id);
512
323
  }
513
324
 
514
325
  // Add delay after creating/updating collection
@@ -542,210 +353,233 @@ export const createOrUpdateCollections = async (
542
353
  indexesToUse as Indexes
543
354
  );
544
355
 
356
+ // Mark this collection as fully processed to prevent re-processing
357
+ markCollectionProcessed(collectionToUse!.$id, collectionData.name);
358
+
545
359
  // Add delay after creating indexes
546
360
  await delay(250);
547
361
  }
548
- // Process any remaining tasks in the queue (only if there are operations to process)
362
+
363
+ // Process any remaining relationship attributes in the queue
364
+ // This surgical approach only processes specific attributes, not entire collections
549
365
  if (queuedOperations.length > 0) {
550
- MessageFormatter.info(`Processing ${queuedOperations.length} queued operations (relationship dependencies)`, { prefix: "Collections" });
366
+ MessageFormatter.info(`🔧 Processing ${queuedOperations.length} queued relationship attributes (surgical approach)`, { prefix: "Collections" });
551
367
  await processQueue(database, databaseId);
552
368
  } else {
553
- MessageFormatter.info("No queued operations to process", { prefix: "Collections" });
369
+ MessageFormatter.info("No queued relationship attributes to process", { prefix: "Collections" });
554
370
  }
555
- };
556
-
557
- // New: Adapter-based implementation for TablesDB
558
- export const createOrUpdateCollectionsViaAdapter = async (
559
- adapter: DatabaseAdapter,
560
- databaseId: string,
561
- config: AppwriteConfig,
562
- deletedCollections?: { collectionId: string; collectionName: string }[],
563
- selectedCollections: Models.Collection[] = []
564
- ): Promise<void> => {
565
- const collectionsToProcess =
566
- selectedCollections.length > 0 ? selectedCollections : (config.collections || []);
567
- if (!collectionsToProcess || collectionsToProcess.length === 0) return;
568
-
569
- const usedIds = new Set<string>();
570
-
571
- // Helper: create attributes through adapter
572
- const createAttr = async (tableId: string, attr: Attribute) => {
573
- const base: any = {
574
- databaseId,
575
- tableId,
576
- key: attr.key,
577
- type: (attr as any).type,
578
- size: (attr as any).size,
579
- required: !!(attr as any).required,
580
- default: (attr as any).xdefault,
581
- array: !!(attr as any).array,
582
- min: (attr as any).min,
583
- max: (attr as any).max,
584
- elements: (attr as any).elements,
585
- encrypt: (attr as any).encrypted,
586
- relatedCollection: (attr as any).relatedCollection,
587
- relationType: (attr as any).relationType,
588
- twoWay: (attr as any).twoWay,
589
- twoWayKey: (attr as any).twoWayKey,
590
- onDelete: (attr as any).onDelete,
591
- side: (attr as any).side,
592
- };
593
- await adapter.createAttribute(base);
594
- await delay(150);
595
- };
596
-
597
- // Local queue for unresolved relationships
598
- const relQueue: { tableId: string; attr: Attribute }[] = [];
599
-
600
- for (const collection of collectionsToProcess) {
601
- const { attributes, indexes, ...collectionData } = collection as any;
602
-
603
- // Prepare permissions as strings (reuse Permission helper)
604
- const permissions: string[] = [];
605
- if (collection.$permissions && collection.$permissions.length > 0) {
606
- for (const p of collection.$permissions as any[]) {
607
- if (typeof p === 'string') permissions.push(p);
608
- else {
609
- switch (p.permission) {
610
- case 'read': permissions.push(Permission.read(p.target)); break;
611
- case 'create': permissions.push(Permission.create(p.target)); break;
612
- case 'update': permissions.push(Permission.update(p.target)); break;
613
- case 'delete': permissions.push(Permission.delete(p.target)); break;
614
- case 'write': permissions.push(Permission.write(p.target)); break;
615
- default: break;
616
- }
617
- }
618
- }
619
- }
620
-
621
- // Find existing table by name
622
- const list = await adapter.listTables({ databaseId, queries: [Query.equal('name', collectionData.name)] });
623
- const items: any[] = (list as any).tables || [];
624
- let table = items[0];
625
- let tableId: string;
626
-
627
- if (!table) {
628
- // Determine ID (prefer provided $id or re-use deleted one)
629
- let foundColl = deletedCollections?.find(
630
- (coll) => coll.collectionName.toLowerCase().trim().replace(" ", "") === collectionData.name.toLowerCase().trim().replace(" ", "")
631
- );
632
- if (collectionData.$id) tableId = collectionData.$id;
633
- else if (foundColl && !usedIds.has(foundColl.collectionId)) tableId = foundColl.collectionId;
634
- else tableId = ID.unique();
635
- usedIds.add(tableId);
636
-
637
- const res = await adapter.createTable({
638
- databaseId,
639
- id: tableId,
640
- name: collectionData.name,
641
- permissions,
642
- documentSecurity: !!collectionData.documentSecurity,
643
- enabled: collectionData.enabled !== false
644
- });
645
- table = (res as any).data || res;
646
- nameToIdMapping.set(collectionData.name, tableId);
647
- } else {
648
- tableId = table.$id;
649
- await adapter.updateTable({
650
- databaseId,
651
- id: tableId,
652
- name: collectionData.name,
653
- permissions,
654
- documentSecurity: !!collectionData.documentSecurity,
655
- enabled: collectionData.enabled !== false
656
- });
657
- }
658
-
659
- // Add small delay after table create/update
660
- await delay(250);
661
-
662
- // Create attributes: non-relationship first
663
- const nonRel = (attributes || []).filter((a: Attribute) => a.type !== 'relationship');
664
- for (const attr of nonRel) {
665
- await createAttr(tableId, attr as Attribute);
666
- }
667
-
668
- // Relationship attributes resolve relatedCollection to ID
669
- const rels = (attributes || []).filter((a: Attribute) => a.type === 'relationship');
670
- for (const attr of rels as any[]) {
671
- const relNameOrId = attr.relatedCollection as string | undefined;
672
- if (!relNameOrId) continue;
673
- let relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
674
-
675
- // If looks like a name (not ULID) and not in cache, try query by name
676
- if (!nameToIdMapping.has(relNameOrId)) {
677
- try {
678
- const relList = await adapter.listTables({ databaseId, queries: [Query.equal('name', relNameOrId)] });
679
- const relItems: any[] = (relList as any).tables || [];
680
- if (relItems[0]?.$id) {
681
- relId = relItems[0].$id;
682
- nameToIdMapping.set(relNameOrId, relId);
683
- }
684
- } catch {}
685
- }
686
-
687
- if (relId && typeof relId === 'string') {
688
- attr.relatedCollection = relId;
689
- await createAttr(tableId, attr as Attribute);
690
- } else {
691
- // Defer if unresolved
692
- relQueue.push({ tableId, attr: attr as Attribute });
693
- }
694
- }
695
-
696
- // Indexes
697
- const idxs = (indexes || []) as any[];
698
- for (const idx of idxs) {
699
- try {
700
- await adapter.createIndex({
701
- databaseId,
702
- tableId,
703
- key: idx.key,
704
- type: idx.type,
705
- attributes: idx.attributes,
706
- orders: idx.orders || []
707
- });
708
- await delay(150);
709
- } catch (e) {
710
- MessageFormatter.error(`Failed to create index ${idx.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Indexes' });
711
- }
712
- }
713
- }
714
-
715
- // Process queued relationships once mapping likely populated
716
- for (const { tableId, attr } of relQueue) {
717
- const relNameOrId = (attr as any).relatedCollection as string | undefined;
718
- if (!relNameOrId) continue;
719
- const relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
720
- if (relId) {
721
- (attr as any).relatedCollection = relId;
722
- try {
723
- await adapter.createAttribute({
724
- databaseId,
725
- tableId,
726
- key: (attr as any).key,
727
- type: (attr as any).type,
728
- size: (attr as any).size,
729
- required: !!(attr as any).required,
730
- default: (attr as any).xdefault,
731
- array: !!(attr as any).array,
732
- min: (attr as any).min,
733
- max: (attr as any).max,
734
- elements: (attr as any).elements,
735
- relatedCollection: relId,
736
- relationType: (attr as any).relationType,
737
- twoWay: (attr as any).twoWay,
738
- twoWayKey: (attr as any).twoWayKey,
739
- onDelete: (attr as any).onDelete,
740
- side: (attr as any).side
741
- });
742
- await delay(150);
743
- } catch (e) {
744
- MessageFormatter.error(`Failed queued relationship ${attr.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Attributes' });
745
- }
746
- }
747
- }
748
- };
371
+ };
372
+
373
+ // New: Adapter-based implementation for TablesDB with state management
374
+ export const createOrUpdateCollectionsViaAdapter = async (
375
+ adapter: DatabaseAdapter,
376
+ databaseId: string,
377
+ config: AppwriteConfig,
378
+ deletedCollections?: { collectionId: string; collectionName: string }[],
379
+ selectedCollections: Models.Collection[] = []
380
+ ): Promise<void> => {
381
+ const collectionsToProcess =
382
+ selectedCollections.length > 0 ? selectedCollections : (config.collections || []);
383
+ if (!collectionsToProcess || collectionsToProcess.length === 0) return;
384
+
385
+ const usedIds = new Set<string>();
386
+ MessageFormatter.info(`Processing ${collectionsToProcess.length} tables via adapter with intelligent state management`, { prefix: "Tables" });
387
+
388
+ // Helper: create attributes through adapter
389
+ const createAttr = async (tableId: string, attr: Attribute) => {
390
+ const base: any = {
391
+ databaseId,
392
+ tableId,
393
+ key: attr.key,
394
+ type: (attr as any).type,
395
+ size: (attr as any).size,
396
+ required: !!(attr as any).required,
397
+ default: (attr as any).xdefault,
398
+ array: !!(attr as any).array,
399
+ min: (attr as any).min,
400
+ max: (attr as any).max,
401
+ elements: (attr as any).elements,
402
+ encrypt: (attr as any).encrypted,
403
+ relatedCollection: (attr as any).relatedCollection,
404
+ relationType: (attr as any).relationType,
405
+ twoWay: (attr as any).twoWay,
406
+ twoWayKey: (attr as any).twoWayKey,
407
+ onDelete: (attr as any).onDelete,
408
+ side: (attr as any).side,
409
+ };
410
+ await adapter.createAttribute(base);
411
+ await delay(150);
412
+ };
413
+
414
+ // Local queue for unresolved relationships
415
+ const relQueue: { tableId: string; attr: Attribute }[] = [];
416
+
417
+ for (const collection of collectionsToProcess) {
418
+ const { attributes, indexes, ...collectionData } = collection as any;
419
+
420
+ // Check if this table has already been processed in this session
421
+ if (collectionData.$id && isCollectionProcessed(collectionData.$id)) {
422
+ MessageFormatter.info(`Table '${collectionData.name}' already processed, skipping`, { prefix: "Tables" });
423
+ continue;
424
+ }
425
+
426
+ // Prepare permissions as strings (reuse Permission helper)
427
+ const permissions: string[] = [];
428
+ if (collection.$permissions && collection.$permissions.length > 0) {
429
+ for (const p of collection.$permissions as any[]) {
430
+ if (typeof p === 'string') permissions.push(p);
431
+ else {
432
+ switch (p.permission) {
433
+ case 'read': permissions.push(Permission.read(p.target)); break;
434
+ case 'create': permissions.push(Permission.create(p.target)); break;
435
+ case 'update': permissions.push(Permission.update(p.target)); break;
436
+ case 'delete': permissions.push(Permission.delete(p.target)); break;
437
+ case 'write': permissions.push(Permission.write(p.target)); break;
438
+ default: break;
439
+ }
440
+ }
441
+ }
442
+ }
443
+
444
+ // Find existing table by name
445
+ const list = await adapter.listTables({ databaseId, queries: [Query.equal('name', collectionData.name)] });
446
+ const items: any[] = (list as any).tables || [];
447
+ let table = items[0];
448
+ let tableId: string;
449
+
450
+ if (!table) {
451
+ // Determine ID (prefer provided $id or re-use deleted one)
452
+ let foundColl = deletedCollections?.find(
453
+ (coll) => coll.collectionName.toLowerCase().trim().replace(" ", "") === collectionData.name.toLowerCase().trim().replace(" ", "")
454
+ );
455
+ if (collectionData.$id) tableId = collectionData.$id;
456
+ else if (foundColl && !usedIds.has(foundColl.collectionId)) tableId = foundColl.collectionId;
457
+ else tableId = ID.unique();
458
+ usedIds.add(tableId);
459
+
460
+ const res = await adapter.createTable({
461
+ databaseId,
462
+ id: tableId,
463
+ name: collectionData.name,
464
+ permissions,
465
+ documentSecurity: !!collectionData.documentSecurity,
466
+ enabled: collectionData.enabled !== false
467
+ });
468
+ table = (res as any).data || res;
469
+ nameToIdMapping.set(collectionData.name, tableId);
470
+ } else {
471
+ tableId = table.$id;
472
+ await adapter.updateTable({
473
+ databaseId,
474
+ id: tableId,
475
+ name: collectionData.name,
476
+ permissions,
477
+ documentSecurity: !!collectionData.documentSecurity,
478
+ enabled: collectionData.enabled !== false
479
+ });
480
+ // Cache the existing table ID
481
+ nameToIdMapping.set(collectionData.name, tableId);
482
+ }
483
+
484
+ // Add small delay after table create/update
485
+ await delay(250);
486
+
487
+ // Create attributes: non-relationship first
488
+ const nonRel = (attributes || []).filter((a: Attribute) => a.type !== 'relationship');
489
+ for (const attr of nonRel) {
490
+ await createAttr(tableId, attr as Attribute);
491
+ }
492
+
493
+ // Relationship attributes — resolve relatedCollection to ID
494
+ const rels = (attributes || []).filter((a: Attribute) => a.type === 'relationship');
495
+ for (const attr of rels as any[]) {
496
+ const relNameOrId = attr.relatedCollection as string | undefined;
497
+ if (!relNameOrId) continue;
498
+ let relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
499
+
500
+ // If looks like a name (not ULID) and not in cache, try query by name
501
+ if (!nameToIdMapping.has(relNameOrId)) {
502
+ try {
503
+ const relList = await adapter.listTables({ databaseId, queries: [Query.equal('name', relNameOrId)] });
504
+ const relItems: any[] = (relList as any).tables || [];
505
+ if (relItems[0]?.$id) {
506
+ relId = relItems[0].$id;
507
+ nameToIdMapping.set(relNameOrId, relId);
508
+ }
509
+ } catch {}
510
+ }
511
+
512
+ if (relId && typeof relId === 'string') {
513
+ attr.relatedCollection = relId;
514
+ await createAttr(tableId, attr as Attribute);
515
+ } else {
516
+ // Defer if unresolved
517
+ relQueue.push({ tableId, attr: attr as Attribute });
518
+ }
519
+ }
520
+
521
+ // Indexes
522
+ const idxs = (indexes || []) as any[];
523
+ for (const idx of idxs) {
524
+ try {
525
+ await adapter.createIndex({
526
+ databaseId,
527
+ tableId,
528
+ key: idx.key,
529
+ type: idx.type,
530
+ attributes: idx.attributes,
531
+ orders: idx.orders || []
532
+ });
533
+ await delay(150);
534
+ } catch (e) {
535
+ MessageFormatter.error(`Failed to create index ${idx.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Indexes' });
536
+ }
537
+ }
538
+
539
+ // Mark this table as fully processed to prevent re-processing
540
+ markCollectionProcessed(tableId, collectionData.name);
541
+ }
542
+
543
+ // Process queued relationships once mapping likely populated
544
+ if (relQueue.length > 0) {
545
+ MessageFormatter.info(`🔧 Processing ${relQueue.length} queued relationship attributes for tables`, { prefix: "Tables" });
546
+ for (const { tableId, attr } of relQueue) {
547
+ const relNameOrId = (attr as any).relatedCollection as string | undefined;
548
+ if (!relNameOrId) continue;
549
+ const relId = nameToIdMapping.get(relNameOrId) || relNameOrId;
550
+ if (relId) {
551
+ (attr as any).relatedCollection = relId;
552
+ try {
553
+ await adapter.createAttribute({
554
+ databaseId,
555
+ tableId,
556
+ key: (attr as any).key,
557
+ type: (attr as any).type,
558
+ size: (attr as any).size,
559
+ required: !!(attr as any).required,
560
+ default: (attr as any).xdefault,
561
+ array: !!(attr as any).array,
562
+ min: (attr as any).min,
563
+ max: (attr as any).max,
564
+ elements: (attr as any).elements,
565
+ relatedCollection: relId,
566
+ relationType: (attr as any).relationType,
567
+ twoWay: (attr as any).twoWay,
568
+ twoWayKey: (attr as any).twoWayKey,
569
+ onDelete: (attr as any).onDelete,
570
+ side: (attr as any).side
571
+ });
572
+ await delay(150);
573
+ MessageFormatter.info(`✅ Successfully processed queued relationship: ${attr.key}`, { prefix: "Tables" });
574
+ } catch (e) {
575
+ MessageFormatter.error(`Failed queued relationship ${attr.key}`, e instanceof Error ? e : new Error(String(e)), { prefix: 'Attributes' });
576
+ }
577
+ } else {
578
+ MessageFormatter.warning(`Could not resolve relationship ${attr.key} -> ${relNameOrId}`, { prefix: "Tables" });
579
+ }
580
+ }
581
+ }
582
+ };
749
583
 
750
584
  export const generateMockData = async (
751
585
  database: Databases,
@@ -796,510 +630,3 @@ export const fetchAllCollections = async (
796
630
  MessageFormatter.success(`Fetched a total of ${collections.length} collections`, { prefix: "Collections" });
797
631
  return collections;
798
632
  };
799
-
800
- /**
801
- * Transfers all documents from one collection to another in a different database
802
- * within the same Appwrite Project
803
- */
804
- export const transferDocumentsBetweenDbsLocalToLocal = async (
805
- db: Databases,
806
- fromDbId: string,
807
- toDbId: string,
808
- fromCollId: string,
809
- toCollId: string
810
- ) => {
811
- let fromCollDocs = await tryAwaitWithRetry(async () =>
812
- db.listDocuments(fromDbId, fromCollId, [Query.limit(50)])
813
- );
814
- let totalDocumentsTransferred = 0;
815
-
816
- if (fromCollDocs.documents.length === 0) {
817
- MessageFormatter.info(`No documents found in collection ${fromCollId}`, { prefix: "Transfer" });
818
- return;
819
- } else if (fromCollDocs.documents.length < 50) {
820
- const batchedPromises = fromCollDocs.documents.map((doc) => {
821
- const toCreateObject: any = {
822
- ...doc,
823
- };
824
- delete toCreateObject.$databaseId;
825
- delete toCreateObject.$collectionId;
826
- delete toCreateObject.$createdAt;
827
- delete toCreateObject.$updatedAt;
828
- delete toCreateObject.$id;
829
- delete toCreateObject.$permissions;
830
- return tryAwaitWithRetry(
831
- async () =>
832
- await db.createDocument(
833
- toDbId,
834
- toCollId,
835
- doc.$id,
836
- toCreateObject,
837
- doc.$permissions
838
- )
839
- );
840
- });
841
- await Promise.all(batchedPromises);
842
- totalDocumentsTransferred += fromCollDocs.documents.length;
843
- } else {
844
- const batchedPromises = fromCollDocs.documents.map((doc) => {
845
- const toCreateObject: any = {
846
- ...doc,
847
- };
848
- delete toCreateObject.$databaseId;
849
- delete toCreateObject.$collectionId;
850
- delete toCreateObject.$createdAt;
851
- delete toCreateObject.$updatedAt;
852
- delete toCreateObject.$id;
853
- delete toCreateObject.$permissions;
854
- return tryAwaitWithRetry(async () =>
855
- db.createDocument(
856
- toDbId,
857
- toCollId,
858
- doc.$id,
859
- toCreateObject,
860
- doc.$permissions
861
- )
862
- );
863
- });
864
- await Promise.all(batchedPromises);
865
- totalDocumentsTransferred += fromCollDocs.documents.length;
866
- while (fromCollDocs.documents.length === 50) {
867
- fromCollDocs = await tryAwaitWithRetry(
868
- async () =>
869
- await db.listDocuments(fromDbId, fromCollId, [
870
- Query.limit(50),
871
- Query.cursorAfter(
872
- fromCollDocs.documents[fromCollDocs.documents.length - 1].$id
873
- ),
874
- ])
875
- );
876
- const batchedPromises = fromCollDocs.documents.map((doc) => {
877
- const toCreateObject: any = {
878
- ...doc,
879
- };
880
- delete toCreateObject.$databaseId;
881
- delete toCreateObject.$collectionId;
882
- delete toCreateObject.$createdAt;
883
- delete toCreateObject.$updatedAt;
884
- delete toCreateObject.$id;
885
- delete toCreateObject.$permissions;
886
- return tryAwaitWithRetry(
887
- async () =>
888
- await db.createDocument(
889
- toDbId,
890
- toCollId,
891
- doc.$id,
892
- toCreateObject,
893
- doc.$permissions
894
- )
895
- );
896
- });
897
- await Promise.all(batchedPromises);
898
- totalDocumentsTransferred += fromCollDocs.documents.length;
899
- }
900
- }
901
-
902
- MessageFormatter.success(
903
- `Transferred ${totalDocumentsTransferred} documents from database ${fromDbId} to database ${toDbId} -- collection ${fromCollId} to collection ${toCollId}`,
904
- { prefix: "Transfer" }
905
- );
906
- };
907
-
908
- /**
909
- * Enhanced document transfer with fault tolerance and exponential backoff
910
- */
911
- const transferDocumentWithRetry = async (
912
- db: Databases,
913
- dbId: string,
914
- collectionId: string,
915
- documentId: string,
916
- documentData: any,
917
- permissions: string[],
918
- maxRetries: number = 3,
919
- retryCount: number = 0
920
- ): Promise<boolean> => {
921
- try {
922
- await db.createDocument(
923
- dbId,
924
- collectionId,
925
- documentId,
926
- documentData,
927
- permissions
928
- );
929
- return true;
930
- } catch (error: any) {
931
- // Check if document already exists
932
- if (error.code === 409 || error.message?.toLowerCase().includes('already exists')) {
933
- await db.updateDocument(
934
- dbId,
935
- collectionId,
936
- documentId,
937
- documentData,
938
- permissions
939
- );
940
- }
941
-
942
- if (retryCount < maxRetries) {
943
- // Calculate exponential backoff: 1s, 2s, 4s
944
- const exponentialDelay = Math.min(1000 * Math.pow(2, retryCount), 8000);
945
- console.log(chalk.yellow(`Retrying document ${documentId} (attempt ${retryCount + 1}/${maxRetries}, backoff: ${exponentialDelay}ms)`));
946
-
947
- await delay(exponentialDelay);
948
-
949
- return await transferDocumentWithRetry(
950
- db,
951
- dbId,
952
- collectionId,
953
- documentId,
954
- documentData,
955
- permissions,
956
- maxRetries,
957
- retryCount + 1
958
- );
959
- }
960
-
961
- console.log(chalk.red(`Failed to transfer document ${documentId} after ${maxRetries} retries: ${error.message}`));
962
- return false;
963
- }
964
- };
965
-
966
- /**
967
- * Check if endpoint supports bulk operations (cloud.appwrite.io)
968
- */
969
- const supportsBulkOperations = (endpoint: string): boolean => {
970
- return endpoint.includes('cloud.appwrite.io');
971
- };
972
-
973
- /**
974
- * Direct HTTP implementation of bulk upsert API
975
- */
976
- const bulkUpsertDocuments = async (
977
- client: any,
978
- dbId: string,
979
- collectionId: string,
980
- documents: any[]
981
- ): Promise<any> => {
982
- const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
983
- const url = new URL(client.config.endpoint + apiPath);
984
-
985
- const headers = {
986
- 'Content-Type': 'application/json',
987
- 'X-Appwrite-Project': client.config.project,
988
- 'X-Appwrite-Key': client.config.key
989
- };
990
-
991
- const response = await fetch(url.toString(), {
992
- method: 'PUT',
993
- headers,
994
- body: JSON.stringify({ documents })
995
- });
996
-
997
- if (!response.ok) {
998
- const errorData: any = await response.json().catch(() => ({ message: 'Unknown error' }));
999
- throw new Error(`Bulk upsert failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
1000
- }
1001
-
1002
- return await response.json();
1003
- };
1004
-
1005
- /**
1006
- * Direct HTTP implementation of bulk create API
1007
- */
1008
- const bulkCreateDocuments = async (
1009
- client: any,
1010
- dbId: string,
1011
- collectionId: string,
1012
- documents: any[]
1013
- ): Promise<any> => {
1014
- const apiPath = `/databases/${dbId}/collections/${collectionId}/documents`;
1015
- const url = new URL(client.config.endpoint + apiPath);
1016
-
1017
- const headers = {
1018
- 'Content-Type': 'application/json',
1019
- 'X-Appwrite-Project': client.config.project,
1020
- 'X-Appwrite-Key': client.config.key
1021
- };
1022
-
1023
- const response = await fetch(url.toString(), {
1024
- method: 'POST',
1025
- headers,
1026
- body: JSON.stringify({ documents })
1027
- });
1028
-
1029
- if (!response.ok) {
1030
- const errorData: any = await response.json().catch(() => ({ message: 'Unknown error' }));
1031
- throw new Error(`Bulk create failed: ${response.status} - ${errorData.message || 'Unknown error'}`);
1032
- }
1033
-
1034
- return await response.json();
1035
- };
1036
-
1037
- /**
1038
- * Enhanced bulk document creation using direct HTTP calls
1039
- */
1040
- const transferDocumentsBulkUpsert = async (
1041
- client: any,
1042
- dbId: string,
1043
- collectionId: string,
1044
- documents: any[],
1045
- maxBatchSize: number = 1000
1046
- ): Promise<{ successful: number; failed: number }> => {
1047
- let successful = 0;
1048
- let failed = 0;
1049
-
1050
- // Prepare documents for bulk upsert
1051
- const preparedDocs = documents.map(doc => {
1052
- const toCreateObject: any = { ...doc };
1053
- delete toCreateObject.$databaseId;
1054
- delete toCreateObject.$collectionId;
1055
- delete toCreateObject.$createdAt;
1056
- delete toCreateObject.$updatedAt;
1057
-
1058
- // Keep $id and $permissions for upsert functionality
1059
- return toCreateObject;
1060
- });
1061
-
1062
- // Process in batches based on plan limits
1063
- const documentBatches = chunk(preparedDocs, maxBatchSize);
1064
-
1065
- for (const batch of documentBatches) {
1066
- console.log(chalk.blue(`Bulk upserting ${batch.length} documents...`));
1067
-
1068
- try {
1069
- // Try bulk upsert with direct HTTP call
1070
- const result = await bulkUpsertDocuments(client, dbId, collectionId, batch);
1071
- successful += result.documents?.length || batch.length;
1072
- console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || batch.length} documents`));
1073
-
1074
- } catch (error: any) {
1075
- console.log(chalk.yellow(`Bulk upsert failed, trying smaller batch size...`));
1076
-
1077
- // If bulk upsert fails, try with smaller batch size (Pro plan limit)
1078
- if (maxBatchSize > 100) {
1079
- const smallerBatches = chunk(batch, 100);
1080
-
1081
- for (const smallBatch of smallerBatches) {
1082
- try {
1083
- const result = await bulkUpsertDocuments(client, dbId, collectionId, smallBatch);
1084
- successful += result.documents?.length || smallBatch.length;
1085
- console.log(chalk.green(`✅ Bulk upserted ${result.documents?.length || smallBatch.length} documents (smaller batch)`));
1086
- } catch (smallBatchError: any) {
1087
- console.log(chalk.yellow(`Smaller batch failed, falling back to individual transfers...`));
1088
-
1089
- // Fall back to individual document transfer for this batch
1090
- const db = new Databases(client);
1091
- const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
1092
- db, dbId, collectionId, smallBatch.map((doc, index) => ({
1093
- ...doc,
1094
- $id: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$id || ID.unique(),
1095
- $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + smallerBatches.indexOf(smallBatch) * 100 + index]?.$permissions || []
1096
- }))
1097
- );
1098
- successful += indivSuccessful;
1099
- failed += indivFailed;
1100
- }
1101
-
1102
- // Add delay between batches
1103
- await delay(200);
1104
- }
1105
- } else {
1106
- // Fall back to individual document transfer
1107
- const db = new Databases(client);
1108
- const { successful: indivSuccessful, failed: indivFailed } = await transferDocumentBatchWithRetryFallback(
1109
- db, dbId, collectionId, batch.map((doc, index) => ({
1110
- ...doc,
1111
- $id: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$id || ID.unique(),
1112
- $permissions: documents[documentBatches.indexOf(batch) * maxBatchSize + index]?.$permissions || []
1113
- }))
1114
- );
1115
- successful += indivSuccessful;
1116
- failed += indivFailed;
1117
- }
1118
- }
1119
-
1120
- // Add delay between major batches
1121
- if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
1122
- await delay(500);
1123
- }
1124
- }
1125
-
1126
- return { successful, failed };
1127
- };
1128
-
1129
- /**
1130
- * Fallback batch document transfer with individual retry logic
1131
- */
1132
- const transferDocumentBatchWithRetryFallback = async (
1133
- db: Databases,
1134
- dbId: string,
1135
- collectionId: string,
1136
- documents: any[],
1137
- batchSize: number = 10
1138
- ): Promise<{ successful: number; failed: number }> => {
1139
- let successful = 0;
1140
- let failed = 0;
1141
-
1142
- // Process documents in smaller batches to avoid overwhelming the server
1143
- const documentBatches = chunk(documents, batchSize);
1144
-
1145
- for (const batch of documentBatches) {
1146
- console.log(chalk.blue(`Processing batch of ${batch.length} documents...`));
1147
-
1148
- const batchPromises = batch.map(async (doc) => {
1149
- const toCreateObject: Partial<typeof doc> = { ...doc };
1150
- delete toCreateObject.$databaseId;
1151
- delete toCreateObject.$collectionId;
1152
- delete toCreateObject.$createdAt;
1153
- delete toCreateObject.$updatedAt;
1154
- delete toCreateObject.$id;
1155
- delete toCreateObject.$permissions;
1156
-
1157
- const result = await transferDocumentWithRetry(
1158
- db,
1159
- dbId,
1160
- collectionId,
1161
- doc.$id,
1162
- toCreateObject,
1163
- doc.$permissions || []
1164
- );
1165
-
1166
- return { docId: doc.$id, success: result };
1167
- });
1168
-
1169
- const results = await Promise.allSettled(batchPromises);
1170
-
1171
- results.forEach((result, index) => {
1172
- if (result.status === 'fulfilled') {
1173
- if (result.value.success) {
1174
- successful++;
1175
- } else {
1176
- failed++;
1177
- }
1178
- } else {
1179
- console.log(chalk.red(`Batch promise rejected for document ${batch[index].$id}: ${result.reason}`));
1180
- failed++;
1181
- }
1182
- });
1183
-
1184
- // Add delay between batches to avoid rate limiting
1185
- if (documentBatches.indexOf(batch) < documentBatches.length - 1) {
1186
- await delay(500);
1187
- }
1188
- }
1189
-
1190
- return { successful, failed };
1191
- };
1192
-
1193
- /**
1194
- * Enhanced batch document transfer with fault tolerance and bulk API support
1195
- */
1196
- const transferDocumentBatchWithRetry = async (
1197
- db: Databases,
1198
- client: any,
1199
- dbId: string,
1200
- collectionId: string,
1201
- documents: any[],
1202
- batchSize: number = 10
1203
- ): Promise<{ successful: number; failed: number }> => {
1204
- // Check if we can use bulk operations
1205
- if (supportsBulkOperations(client.config.endpoint)) {
1206
- console.log(chalk.green(`🚀 Using bulk upsert API for faster document transfer`));
1207
-
1208
- // Try with Scale plan limit first (2500), then Pro (1000), then Free (100)
1209
- const batchSizes = [1000, 100]; // Start with Pro plan, fallback to Free
1210
-
1211
- for (const maxBatchSize of batchSizes) {
1212
- try {
1213
- return await transferDocumentsBulkUpsert(client, dbId, collectionId, documents, maxBatchSize);
1214
- } catch (error: any) {
1215
- console.log(chalk.yellow(`Bulk upsert with batch size ${maxBatchSize} failed, trying smaller size...`));
1216
- continue;
1217
- }
1218
- }
1219
-
1220
- // If all bulk operations fail, fall back to individual transfers
1221
- console.log(chalk.yellow(`All bulk operations failed, falling back to individual document transfers`));
1222
- }
1223
-
1224
- // Fall back to individual document transfer
1225
- return await transferDocumentBatchWithRetryFallback(db, dbId, collectionId, documents, batchSize);
1226
- };
1227
-
1228
- export const transferDocumentsBetweenDbsLocalToRemote = async (
1229
- localDb: Databases,
1230
- endpoint: string,
1231
- projectId: string,
1232
- apiKey: string,
1233
- fromDbId: string,
1234
- toDbId: string,
1235
- fromCollId: string,
1236
- toCollId: string
1237
- ) => {
1238
- console.log(chalk.blue(`Starting enhanced document transfer from ${fromCollId} to ${toCollId}...`));
1239
-
1240
- const client = new Client()
1241
- .setEndpoint(endpoint)
1242
- .setProject(projectId)
1243
- .setKey(apiKey);
1244
-
1245
- const remoteDb = new Databases(client);
1246
- let totalDocumentsProcessed = 0;
1247
- let totalSuccessful = 0;
1248
- let totalFailed = 0;
1249
-
1250
- // Fetch documents in larger batches (1000 at a time)
1251
- let hasMoreDocuments = true;
1252
- let lastDocumentId: string | undefined;
1253
-
1254
- while (hasMoreDocuments) {
1255
- const queries = [Query.limit(1000)]; // Fetch 1000 documents at a time
1256
- if (lastDocumentId) {
1257
- queries.push(Query.cursorAfter(lastDocumentId));
1258
- }
1259
-
1260
- const fromCollDocs = await tryAwaitWithRetry(async () =>
1261
- localDb.listDocuments(fromDbId, fromCollId, queries)
1262
- );
1263
-
1264
- if (fromCollDocs.documents.length === 0) {
1265
- hasMoreDocuments = false;
1266
- break;
1267
- }
1268
-
1269
- console.log(chalk.blue(`Fetched ${fromCollDocs.documents.length} documents, processing for transfer...`));
1270
-
1271
- const { successful, failed } = await transferDocumentBatchWithRetry(
1272
- remoteDb,
1273
- client,
1274
- toDbId,
1275
- toCollId,
1276
- fromCollDocs.documents
1277
- );
1278
-
1279
- totalDocumentsProcessed += fromCollDocs.documents.length;
1280
- totalSuccessful += successful;
1281
- totalFailed += failed;
1282
-
1283
- // Check if we have more documents to process
1284
- if (fromCollDocs.documents.length < 1000) {
1285
- hasMoreDocuments = false;
1286
- } else {
1287
- lastDocumentId = fromCollDocs.documents[fromCollDocs.documents.length - 1].$id;
1288
- }
1289
-
1290
- console.log(chalk.gray(`Batch complete: ${successful} successful, ${failed} failed`));
1291
- }
1292
-
1293
- if (totalDocumentsProcessed === 0) {
1294
- MessageFormatter.info(`No documents found in collection ${fromCollId}`, { prefix: "Transfer" });
1295
- return;
1296
- }
1297
-
1298
- const message = `Total documents processed: ${totalDocumentsProcessed}, successful: ${totalSuccessful}, failed: ${totalFailed}`;
1299
-
1300
- if (totalFailed > 0) {
1301
- MessageFormatter.warning(message, { prefix: "Transfer" });
1302
- } else {
1303
- MessageFormatter.success(message, { prefix: "Transfer" });
1304
- }
1305
- };