appwrite-utils-cli 1.7.6 → 1.7.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -72,6 +72,7 @@ export interface BulkDeleteRowsParams {
72
72
  databaseId: string;
73
73
  tableId: string;
74
74
  rowIds: string[];
75
+ batchSize?: number;
75
76
  }
76
77
  export interface CreateIndexParams {
77
78
  databaseId: string;
@@ -7,6 +7,7 @@
7
7
  * older Appwrite instances.
8
8
  */
9
9
  import { Query } from "node-appwrite";
10
+ import { chunk } from "es-toolkit";
10
11
  import { BaseAdapter, AdapterError, UnsupportedOperationError } from './DatabaseAdapter.js';
11
12
  /**
12
13
  * LegacyAdapter - Translates TablesDB calls to legacy Databases API
@@ -310,13 +311,24 @@ export class LegacyAdapter extends BaseAdapter {
310
311
  }
311
312
  async bulkDeleteRows(params) {
312
313
  try {
313
- // Try to use deleteDocuments with queries first (more efficient)
314
- const queries = params.rowIds.map(id => Query.equal('$id', id));
314
+ let queries;
315
+ // Wipe mode: use Query.limit for deleting without fetching
316
+ if (params.rowIds.length === 0) {
317
+ const batchSize = params.batchSize || 250;
318
+ queries = [Query.limit(batchSize)];
319
+ }
320
+ // Specific IDs mode: chunk into batches of 80-90 to stay within Appwrite limits
321
+ // (max 100 IDs per Query.equal, and queries must be < 4096 chars total)
322
+ else {
323
+ const ID_BATCH_SIZE = 85; // Safe batch size for Query.equal
324
+ const idBatches = chunk(params.rowIds, ID_BATCH_SIZE);
325
+ queries = idBatches.map(batch => Query.equal('$id', batch));
326
+ }
315
327
  const result = await this.databases.deleteDocuments(params.databaseId, params.tableId, // Maps tableId to collectionId
316
328
  queries);
317
329
  return {
318
330
  data: result,
319
- total: params.rowIds.length
331
+ total: params.rowIds.length || result.total || 0
320
332
  };
321
333
  }
322
334
  catch (error) {
@@ -6,6 +6,7 @@
6
6
  * and returns Models.Row instead of Models.Document.
7
7
  */
8
8
  import { Query } from "node-appwrite";
9
+ import { chunk } from "es-toolkit";
9
10
  import { BaseAdapter, AdapterError } from './DatabaseAdapter.js';
10
11
  /**
11
12
  * TablesDBAdapter implementation for native TablesDB API
@@ -270,8 +271,19 @@ export class TablesDBAdapter extends BaseAdapter {
270
271
  }
271
272
  async bulkDeleteRows(params) {
272
273
  try {
273
- // Convert rowIds to queries for the deleteRows API
274
- const queries = params.rowIds.map(id => Query.equal('$id', id));
274
+ let queries;
275
+ // Wipe mode: use Query.limit for deleting without fetching
276
+ if (params.rowIds.length === 0) {
277
+ const batchSize = params.batchSize || 250;
278
+ queries = [Query.limit(batchSize)];
279
+ }
280
+ // Specific IDs mode: chunk into batches of 80-90 to stay within Appwrite limits
281
+ // (max 100 IDs per Query.equal, and queries must be < 4096 chars total)
282
+ else {
283
+ const ID_BATCH_SIZE = 85; // Safe batch size for Query.equal
284
+ const idBatches = chunk(params.rowIds, ID_BATCH_SIZE);
285
+ queries = idBatches.map(batch => Query.equal('$id', batch));
286
+ }
275
287
  const result = await this.tablesDB.deleteRows({
276
288
  databaseId: params.databaseId,
277
289
  tableId: params.tableId,
@@ -279,7 +291,7 @@ export class TablesDBAdapter extends BaseAdapter {
279
291
  });
280
292
  return {
281
293
  data: result,
282
- total: params.rowIds.length
294
+ total: params.rowIds.length || result.total || 0
283
295
  };
284
296
  }
285
297
  catch (error) {
@@ -10,7 +10,7 @@ export declare const wipeAllTables: (adapter: DatabaseAdapter, databaseId: strin
10
10
  tableName: string;
11
11
  }[]>;
12
12
  /**
13
- * Optimized streaming deletion of all rows from a table
14
- * Uses bulk deletion when available, falls back to optimized individual deletion
13
+ * Optimized deletion of all rows from a table using direct bulk deletion
14
+ * Uses Query.limit() to delete rows without fetching IDs first
15
15
  */
16
16
  export declare const wipeTableRows: (adapter: DatabaseAdapter, databaseId: string, tableId: string) => Promise<void>;
@@ -2,7 +2,7 @@ import { Databases, Query, } from "node-appwrite";
2
2
  import { tryAwaitWithRetry } from "../utils/helperFunctions.js";
3
3
  import { MessageFormatter } from "../shared/messageFormatter.js";
4
4
  import { ProgressManager } from "../shared/progressManager.js";
5
- import { isRetryableError, isBulkNotSupportedError, isCriticalError } from "../shared/errorUtils.js";
5
+ import { isRetryableError, isCriticalError } from "../shared/errorUtils.js";
6
6
  import { delay } from "../utils/helperFunctions.js";
7
7
  import { chunk } from "es-toolkit";
8
8
  import pLimit from "p-limit";
@@ -167,82 +167,54 @@ export const wipeAllTables = async (adapter, databaseId) => {
167
167
  return deleted;
168
168
  };
169
169
  /**
170
- * Optimized streaming deletion of all rows from a table
171
- * Uses bulk deletion when available, falls back to optimized individual deletion
170
+ * Optimized deletion of all rows from a table using direct bulk deletion
171
+ * Uses Query.limit() to delete rows without fetching IDs first
172
172
  */
173
173
  export const wipeTableRows = async (adapter, databaseId, tableId) => {
174
174
  try {
175
- // Configuration for optimized deletion
176
- const FETCH_BATCH_SIZE = 1000; // How many to fetch per query
177
- const BULK_DELETE_BATCH_SIZE = 500; // How many to bulk delete at once
178
- const INDIVIDUAL_DELETE_BATCH_SIZE = 200; // For fallback individual deletion
179
- const MAX_CONCURRENT_OPERATIONS = 10; // Concurrent bulk/individual operations
175
+ // Check if bulk deletion is available
176
+ if (!adapter.bulkDeleteRows) {
177
+ MessageFormatter.error("Bulk deletion not available for this adapter - wipe operation not supported", new Error("bulkDeleteRows not available"), { prefix: "Wipe" });
178
+ throw new Error("Bulk deletion required for wipe operations");
179
+ }
180
+ const DELETE_BATCH_SIZE = 250; // How many rows to delete per batch
180
181
  let totalDeleted = 0;
181
- let cursor;
182
182
  let hasMoreRows = true;
183
183
  MessageFormatter.info("Starting optimized table row deletion...", { prefix: "Wipe" });
184
- // Create progress tracker (we'll update the total as we discover more rows)
185
- const progress = ProgressManager.create(`delete-${tableId}`, 1, // Start with 1, will update as we go
184
+ const progress = ProgressManager.create(`delete-${tableId}`, 1, // Start with 1, will update as we discover more
186
185
  { title: "Deleting table rows" });
187
186
  while (hasMoreRows) {
188
- // Fetch next batch of rows
189
- const queries = [Query.limit(FETCH_BATCH_SIZE)];
190
- if (cursor) {
191
- queries.push(Query.cursorAfter(cursor));
192
- }
193
- const response = await adapter.listRows({ databaseId, tableId, queries });
194
- const rows = response.rows || [];
195
- if (rows.length === 0) {
196
- hasMoreRows = false;
197
- break;
198
- }
199
- // Update progress total as we discover more rows
200
- if (rows.length === FETCH_BATCH_SIZE) {
201
- // There might be more rows, update progress total
202
- progress.setTotal(totalDeleted + rows.length + 1000); // Estimate more
203
- }
204
- MessageFormatter.progress(`Processing batch: ${rows.length} rows (${totalDeleted + rows.length} total so far)`, { prefix: "Wipe" });
205
- // Try to use bulk deletion first, fall back to individual deletion
206
- const rowIds = rows.map((row) => row.$id);
207
- // Check if bulk deletion is available and try it first
208
- if (adapter.bulkDeleteRows) {
209
- try {
210
- // Attempt bulk deletion (available in TablesDB)
211
- const deletedCount = await tryBulkDeletion(adapter, databaseId, tableId, rowIds, BULK_DELETE_BATCH_SIZE, MAX_CONCURRENT_OPERATIONS);
212
- totalDeleted += deletedCount;
213
- progress.update(totalDeleted);
214
- }
215
- catch (bulkError) {
216
- // Enhanced error handling: categorize the error and decide on fallback strategy
217
- const errorMessage = bulkError instanceof Error ? bulkError.message : String(bulkError);
218
- if (isRetryableError(errorMessage)) {
219
- MessageFormatter.progress(`Bulk deletion encountered retryable error, retrying with individual deletion for ${rows.length} rows`, { prefix: "Wipe" });
220
- }
221
- else if (isBulkNotSupportedError(errorMessage)) {
222
- MessageFormatter.progress(`Bulk deletion not supported by server, switching to individual deletion for ${rows.length} rows`, { prefix: "Wipe" });
223
- }
224
- else {
225
- MessageFormatter.progress(`Bulk deletion failed (${errorMessage}), falling back to individual deletion for ${rows.length} rows`, { prefix: "Wipe" });
226
- }
227
- const deletedCount = await tryIndividualDeletion(adapter, databaseId, tableId, rows, INDIVIDUAL_DELETE_BATCH_SIZE, MAX_CONCURRENT_OPERATIONS, progress, totalDeleted);
228
- totalDeleted += deletedCount;
187
+ try {
188
+ // Delete next batch using Query.limit() - no fetching needed!
189
+ const result = await tryAwaitWithRetry(async () => adapter.bulkDeleteRows({
190
+ databaseId,
191
+ tableId,
192
+ rowIds: [], // Empty array signals we want to use Query.limit instead
193
+ batchSize: DELETE_BATCH_SIZE
194
+ }));
195
+ const deletedCount = result.total || 0;
196
+ if (deletedCount === 0) {
197
+ hasMoreRows = false;
198
+ break;
229
199
  }
230
- }
231
- else {
232
- // Bulk deletion not available, use optimized individual deletion
233
- MessageFormatter.progress(`Using individual deletion for ${rows.length} rows (bulk deletion not available)`, { prefix: "Wipe" });
234
- const deletedCount = await tryIndividualDeletion(adapter, databaseId, tableId, rows, INDIVIDUAL_DELETE_BATCH_SIZE, MAX_CONCURRENT_OPERATIONS, progress, totalDeleted);
235
200
  totalDeleted += deletedCount;
201
+ progress.setTotal(totalDeleted + 100); // Estimate more rows exist
202
+ progress.update(totalDeleted);
203
+ MessageFormatter.progress(`Deleted ${deletedCount} rows (${totalDeleted} total so far)`, { prefix: "Wipe" });
204
+ // Small delay between batches to be respectful to the API
205
+ await delay(10);
236
206
  }
237
- // Set up cursor for next iteration
238
- if (rows.length < FETCH_BATCH_SIZE) {
239
- hasMoreRows = false;
240
- }
241
- else {
242
- cursor = rows[rows.length - 1].$id;
207
+ catch (error) {
208
+ const errorMessage = error.message || String(error);
209
+ if (isCriticalError(errorMessage)) {
210
+ MessageFormatter.error(`Critical error during bulk deletion: ${errorMessage}`, error, { prefix: "Wipe" });
211
+ throw error;
212
+ }
213
+ else {
214
+ MessageFormatter.error(`Error during deletion batch: ${errorMessage}`, error, { prefix: "Wipe" });
215
+ // Continue trying with next batch
216
+ }
243
217
  }
244
- // Small delay between fetch cycles to be respectful to the API
245
- await delay(10);
246
218
  }
247
219
  // Update final progress total
248
220
  progress.setTotal(totalDeleted);
@@ -259,77 +231,3 @@ export const wipeTableRows = async (adapter, databaseId, tableId) => {
259
231
  throw error;
260
232
  }
261
233
  };
262
- /**
263
- * Helper function to attempt bulk deletion of row IDs
264
- */
265
- async function tryBulkDeletion(adapter, databaseId, tableId, rowIds, batchSize, maxConcurrent) {
266
- if (!adapter.bulkDeleteRows) {
267
- throw new Error("Bulk deletion not available on this adapter");
268
- }
269
- const limit = pLimit(maxConcurrent);
270
- const batches = chunk(rowIds, batchSize);
271
- let successfullyDeleted = 0;
272
- const deletePromises = batches.map((batch) => limit(async () => {
273
- try {
274
- const result = await tryAwaitWithRetry(async () => adapter.bulkDeleteRows({ databaseId, tableId, rowIds: batch }));
275
- successfullyDeleted += batch.length; // Assume success if no error thrown
276
- }
277
- catch (error) {
278
- const errorMessage = error.message || String(error);
279
- // Enhanced error handling for bulk deletion
280
- if (isCriticalError(errorMessage)) {
281
- MessageFormatter.error(`Critical error in bulk deletion batch: ${errorMessage}`, error, { prefix: "Wipe" });
282
- throw error;
283
- }
284
- else {
285
- // For non-critical errors in bulk deletion, re-throw to trigger fallback
286
- throw new Error(`Bulk deletion batch failed: ${errorMessage}`);
287
- }
288
- }
289
- }));
290
- await Promise.all(deletePromises);
291
- return successfullyDeleted;
292
- }
293
- /**
294
- * Helper function for fallback individual deletion
295
- */
296
- async function tryIndividualDeletion(adapter, databaseId, tableId, rows, batchSize, maxConcurrent, progress, baseDeleted) {
297
- const limit = pLimit(maxConcurrent);
298
- const batches = chunk(rows, batchSize);
299
- let processedInBatch = 0;
300
- let successfullyDeleted = 0;
301
- const deletePromises = batches.map((batch) => limit(async () => {
302
- const batchDeletePromises = batch.map(async (row) => {
303
- try {
304
- await tryAwaitWithRetry(async () => adapter.deleteRow({ databaseId, tableId, id: row.$id }));
305
- successfullyDeleted++;
306
- }
307
- catch (error) {
308
- const errorMessage = error.message || String(error);
309
- // Enhanced error handling for row deletion
310
- if (errorMessage.includes("Row with the requested ID could not be found")) {
311
- // Row already deleted, count as success since it's gone
312
- successfullyDeleted++;
313
- }
314
- else if (isCriticalError(errorMessage)) {
315
- // Critical error, log and rethrow to stop operation
316
- MessageFormatter.error(`Critical error deleting row ${row.$id}: ${errorMessage}`, error, { prefix: "Wipe" });
317
- throw error;
318
- }
319
- else if (isRetryableError(errorMessage)) {
320
- // Retryable error, will be handled by tryAwaitWithRetry
321
- MessageFormatter.progress(`Retryable error for row ${row.$id}, will retry`, { prefix: "Wipe" });
322
- }
323
- else {
324
- // Other non-critical errors, log but continue
325
- MessageFormatter.error(`Failed to delete row ${row.$id}: ${errorMessage}`, error, { prefix: "Wipe" });
326
- }
327
- }
328
- processedInBatch++;
329
- progress.update(baseDeleted + successfullyDeleted);
330
- });
331
- await Promise.all(batchDeletePromises);
332
- }));
333
- await Promise.all(deletePromises);
334
- return successfullyDeleted;
335
- }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "appwrite-utils-cli",
3
3
  "description": "Appwrite Utility Functions to help with database management, data conversion, data import, migrations, and much more. Meant to be used as a CLI tool, I do not recommend installing this in frontend environments.",
4
- "version": "1.7.6",
4
+ "version": "1.7.7",
5
5
  "main": "src/main.ts",
6
6
  "type": "module",
7
7
  "repository": {
@@ -86,7 +86,8 @@ export interface BulkUpsertRowsParams {
86
86
  export interface BulkDeleteRowsParams {
87
87
  databaseId: string;
88
88
  tableId: string;
89
- rowIds: string[];
89
+ rowIds: string[]; // Empty array = wipe mode (use Query.limit), otherwise specific IDs to delete
90
+ batchSize?: number; // Optional batch size for wipe mode (default 250)
90
91
  }
91
92
 
92
93
  // Index operation parameters
@@ -7,32 +7,33 @@
7
7
  * older Appwrite instances.
8
8
  */
9
9
 
10
- import { Query } from "node-appwrite";
11
- import {
12
- BaseAdapter,
13
- type CreateRowParams,
14
- type UpdateRowParams,
15
- type ListRowsParams,
16
- type DeleteRowParams,
17
- type CreateTableParams,
18
- type UpdateTableParams,
19
- type ListTablesParams,
20
- type DeleteTableParams,
21
- type GetTableParams,
22
- type BulkCreateRowsParams,
23
- type BulkUpsertRowsParams,
24
- type BulkDeleteRowsParams,
25
- type CreateIndexParams,
26
- type ListIndexesParams,
27
- type DeleteIndexParams,
28
- type CreateAttributeParams,
29
- type UpdateAttributeParams,
30
- type DeleteAttributeParams,
31
- type ApiResponse,
32
- type AdapterMetadata,
33
- AdapterError,
34
- UnsupportedOperationError
35
- } from './DatabaseAdapter.js';
10
+ import { Query } from "node-appwrite";
11
+ import { chunk } from "es-toolkit";
12
+ import {
13
+ BaseAdapter,
14
+ type CreateRowParams,
15
+ type UpdateRowParams,
16
+ type ListRowsParams,
17
+ type DeleteRowParams,
18
+ type CreateTableParams,
19
+ type UpdateTableParams,
20
+ type ListTablesParams,
21
+ type DeleteTableParams,
22
+ type GetTableParams,
23
+ type BulkCreateRowsParams,
24
+ type BulkUpsertRowsParams,
25
+ type BulkDeleteRowsParams,
26
+ type CreateIndexParams,
27
+ type ListIndexesParams,
28
+ type DeleteIndexParams,
29
+ type CreateAttributeParams,
30
+ type UpdateAttributeParams,
31
+ type DeleteAttributeParams,
32
+ type ApiResponse,
33
+ type AdapterMetadata,
34
+ AdapterError,
35
+ UnsupportedOperationError
36
+ } from './DatabaseAdapter.js';
36
37
 
37
38
  /**
38
39
  * LegacyAdapter - Translates TablesDB calls to legacy Databases API
@@ -587,62 +588,74 @@ export class LegacyAdapter extends BaseAdapter {
587
588
  throw new UnsupportedOperationError('bulkUpsertRows', 'legacy');
588
589
  }
589
590
 
590
- async bulkDeleteRows(params: BulkDeleteRowsParams): Promise<ApiResponse> {
591
- try {
592
- // Try to use deleteDocuments with queries first (more efficient)
593
- const queries = params.rowIds.map(id => Query.equal('$id', id));
594
-
595
- const result = await this.databases.deleteDocuments(
596
- params.databaseId,
597
- params.tableId, // Maps tableId to collectionId
598
- queries
599
- );
600
-
601
- return {
602
- data: result,
603
- total: params.rowIds.length
604
- };
605
- } catch (error) {
606
- // If deleteDocuments with queries fails, fall back to individual deletes
607
- const errorMessage = error instanceof Error ? error.message : String(error);
608
-
609
- // Check if the error indicates that deleteDocuments with queries is not supported
610
- if (errorMessage.includes('not supported') || errorMessage.includes('invalid') || errorMessage.includes('queries')) {
611
- // Fall back to individual deletions
612
- const results = [];
613
- const errors = [];
614
-
615
- for (const rowId of params.rowIds) {
616
- try {
617
- await this.deleteRow({
618
- databaseId: params.databaseId,
619
- tableId: params.tableId,
620
- id: rowId
621
- });
622
- results.push({ id: rowId, deleted: true });
623
- } catch (individualError) {
624
- errors.push({
625
- rowId,
626
- error: individualError instanceof Error ? individualError.message : 'Unknown error'
627
- });
628
- }
629
- }
630
-
631
- return {
632
- data: results,
633
- total: results.length,
634
- errors: errors.length > 0 ? errors : undefined
635
- };
636
- } else {
637
- // Re-throw the original error if it's not a support issue
638
- throw new AdapterError(
639
- `Failed to bulk delete rows (legacy): ${errorMessage}`,
640
- 'BULK_DELETE_ROWS_FAILED',
641
- error instanceof Error ? error : undefined
642
- );
643
- }
644
- }
645
- }
591
+ async bulkDeleteRows(params: BulkDeleteRowsParams): Promise<ApiResponse> {
592
+ try {
593
+ let queries: string[];
594
+
595
+ // Wipe mode: use Query.limit for deleting without fetching
596
+ if (params.rowIds.length === 0) {
597
+ const batchSize = params.batchSize || 250;
598
+ queries = [Query.limit(batchSize)];
599
+ }
600
+ // Specific IDs mode: chunk into batches of 80-90 to stay within Appwrite limits
601
+ // (max 100 IDs per Query.equal, and queries must be < 4096 chars total)
602
+ else {
603
+ const ID_BATCH_SIZE = 85; // Safe batch size for Query.equal
604
+ const idBatches = chunk(params.rowIds, ID_BATCH_SIZE);
605
+ queries = idBatches.map(batch => Query.equal('$id', batch));
606
+ }
607
+
608
+ const result = await this.databases.deleteDocuments(
609
+ params.databaseId,
610
+ params.tableId, // Maps tableId to collectionId
611
+ queries
612
+ );
613
+
614
+ return {
615
+ data: result,
616
+ total: params.rowIds.length || (result as any).total || 0
617
+ };
618
+ } catch (error) {
619
+ // If deleteDocuments with queries fails, fall back to individual deletes
620
+ const errorMessage = error instanceof Error ? error.message : String(error);
621
+
622
+ // Check if the error indicates that deleteDocuments with queries is not supported
623
+ if (errorMessage.includes('not supported') || errorMessage.includes('invalid') || errorMessage.includes('queries')) {
624
+ // Fall back to individual deletions
625
+ const results = [];
626
+ const errors = [];
627
+
628
+ for (const rowId of params.rowIds) {
629
+ try {
630
+ await this.deleteRow({
631
+ databaseId: params.databaseId,
632
+ tableId: params.tableId,
633
+ id: rowId
634
+ });
635
+ results.push({ id: rowId, deleted: true });
636
+ } catch (individualError) {
637
+ errors.push({
638
+ rowId,
639
+ error: individualError instanceof Error ? individualError.message : 'Unknown error'
640
+ });
641
+ }
642
+ }
643
+
644
+ return {
645
+ data: results,
646
+ total: results.length,
647
+ errors: errors.length > 0 ? errors : undefined
648
+ };
649
+ } else {
650
+ // Re-throw the original error if it's not a support issue
651
+ throw new AdapterError(
652
+ `Failed to bulk delete rows (legacy): ${errorMessage}`,
653
+ 'BULK_DELETE_ROWS_FAILED',
654
+ error instanceof Error ? error : undefined
655
+ );
656
+ }
657
+ }
658
+ }
646
659
 
647
660
  // Metadata and Capabilities
648
661
 
@@ -6,32 +6,33 @@
6
6
  * and returns Models.Row instead of Models.Document.
7
7
  */
8
8
 
9
- import { Query } from "node-appwrite";
10
- import {
11
- BaseAdapter,
12
- type DatabaseAdapter,
13
- type CreateRowParams,
14
- type UpdateRowParams,
15
- type ListRowsParams,
16
- type DeleteRowParams,
17
- type CreateTableParams,
18
- type UpdateTableParams,
19
- type ListTablesParams,
20
- type DeleteTableParams,
21
- type GetTableParams,
22
- type BulkCreateRowsParams,
23
- type BulkUpsertRowsParams,
24
- type BulkDeleteRowsParams,
25
- type CreateIndexParams,
26
- type ListIndexesParams,
27
- type DeleteIndexParams,
28
- type CreateAttributeParams,
29
- type UpdateAttributeParams,
30
- type DeleteAttributeParams,
31
- type ApiResponse,
32
- type AdapterMetadata,
33
- AdapterError
34
- } from './DatabaseAdapter.js';
9
+ import { Query } from "node-appwrite";
10
+ import { chunk } from "es-toolkit";
11
+ import {
12
+ BaseAdapter,
13
+ type DatabaseAdapter,
14
+ type CreateRowParams,
15
+ type UpdateRowParams,
16
+ type ListRowsParams,
17
+ type DeleteRowParams,
18
+ type CreateTableParams,
19
+ type UpdateTableParams,
20
+ type ListTablesParams,
21
+ type DeleteTableParams,
22
+ type GetTableParams,
23
+ type BulkCreateRowsParams,
24
+ type BulkUpsertRowsParams,
25
+ type BulkDeleteRowsParams,
26
+ type CreateIndexParams,
27
+ type ListIndexesParams,
28
+ type DeleteIndexParams,
29
+ type CreateAttributeParams,
30
+ type UpdateAttributeParams,
31
+ type DeleteAttributeParams,
32
+ type ApiResponse,
33
+ type AdapterMetadata,
34
+ AdapterError
35
+ } from './DatabaseAdapter.js';
35
36
 
36
37
  /**
37
38
  * TablesDBAdapter implementation for native TablesDB API
@@ -514,27 +515,41 @@ export class TablesDBAdapter extends BaseAdapter {
514
515
  }
515
516
  }
516
517
 
517
- async bulkDeleteRows(params: BulkDeleteRowsParams): Promise<ApiResponse> {
518
- try {
519
- // Convert rowIds to queries for the deleteRows API
520
- const queries = params.rowIds.map(id => Query.equal('$id', id));
521
- const result = await this.tablesDB.deleteRows({
522
- databaseId: params.databaseId,
523
- tableId: params.tableId,
524
- queries: queries
525
- });
526
- return {
527
- data: result,
528
- total: params.rowIds.length
529
- };
530
- } catch (error) {
531
- throw new AdapterError(
532
- `Failed to bulk delete rows: ${error instanceof Error ? error.message : 'Unknown error'}`,
533
- 'BULK_DELETE_ROWS_FAILED',
534
- error instanceof Error ? error : undefined
535
- );
536
- }
537
- }
518
+ async bulkDeleteRows(params: BulkDeleteRowsParams): Promise<ApiResponse> {
519
+ try {
520
+ let queries: string[];
521
+
522
+ // Wipe mode: use Query.limit for deleting without fetching
523
+ if (params.rowIds.length === 0) {
524
+ const batchSize = params.batchSize || 250;
525
+ queries = [Query.limit(batchSize)];
526
+ }
527
+ // Specific IDs mode: chunk into batches of 80-90 to stay within Appwrite limits
528
+ // (max 100 IDs per Query.equal, and queries must be < 4096 chars total)
529
+ else {
530
+ const ID_BATCH_SIZE = 85; // Safe batch size for Query.equal
531
+ const idBatches = chunk(params.rowIds, ID_BATCH_SIZE);
532
+ queries = idBatches.map(batch => Query.equal('$id', batch));
533
+ }
534
+
535
+ const result = await this.tablesDB.deleteRows({
536
+ databaseId: params.databaseId,
537
+ tableId: params.tableId,
538
+ queries: queries
539
+ });
540
+
541
+ return {
542
+ data: result,
543
+ total: params.rowIds.length || (result as any).total || 0
544
+ };
545
+ } catch (error) {
546
+ throw new AdapterError(
547
+ `Failed to bulk delete rows: ${error instanceof Error ? error.message : 'Unknown error'}`,
548
+ 'BULK_DELETE_ROWS_FAILED',
549
+ error instanceof Error ? error : undefined
550
+ );
551
+ }
552
+ }
538
553
 
539
554
  // Metadata and Capabilities
540
555
  getMetadata(): AdapterMetadata {
@@ -7,7 +7,7 @@ import type { DatabaseAdapter } from "../adapters/DatabaseAdapter.js";
7
7
  import { tryAwaitWithRetry } from "../utils/helperFunctions.js";
8
8
  import { MessageFormatter } from "../shared/messageFormatter.js";
9
9
  import { ProgressManager } from "../shared/progressManager.js";
10
- import { isRetryableError, isBulkNotSupportedError, isCriticalError } from "../shared/errorUtils.js";
10
+ import { isRetryableError, isCriticalError } from "../shared/errorUtils.js";
11
11
  import { delay } from "../utils/helperFunctions.js";
12
12
  import { chunk } from "es-toolkit";
13
13
  import pLimit from "p-limit";
@@ -239,8 +239,8 @@ export const wipeAllTables = async (
239
239
  };
240
240
 
241
241
  /**
242
- * Optimized streaming deletion of all rows from a table
243
- * Uses bulk deletion when available, falls back to optimized individual deletion
242
+ * Optimized deletion of all rows from a table using direct bulk deletion
243
+ * Uses Query.limit() to delete rows without fetching IDs first
244
244
  */
245
245
  export const wipeTableRows = async (
246
246
  adapter: DatabaseAdapter,
@@ -248,123 +248,78 @@ export const wipeTableRows = async (
248
248
  tableId: string
249
249
  ): Promise<void> => {
250
250
  try {
251
- // Configuration for optimized deletion
252
- const FETCH_BATCH_SIZE = 1000; // How many to fetch per query
253
- const BULK_DELETE_BATCH_SIZE = 500; // How many to bulk delete at once
254
- const INDIVIDUAL_DELETE_BATCH_SIZE = 200; // For fallback individual deletion
255
- const MAX_CONCURRENT_OPERATIONS = 10; // Concurrent bulk/individual operations
251
+ // Check if bulk deletion is available
252
+ if (!adapter.bulkDeleteRows) {
253
+ MessageFormatter.error(
254
+ "Bulk deletion not available for this adapter - wipe operation not supported",
255
+ new Error("bulkDeleteRows not available"),
256
+ { prefix: "Wipe" }
257
+ );
258
+ throw new Error("Bulk deletion required for wipe operations");
259
+ }
256
260
 
261
+ const DELETE_BATCH_SIZE = 250; // How many rows to delete per batch
257
262
  let totalDeleted = 0;
258
- let cursor: string | undefined;
259
263
  let hasMoreRows = true;
260
264
 
261
265
  MessageFormatter.info("Starting optimized table row deletion...", { prefix: "Wipe" });
262
266
 
263
- // Create progress tracker (we'll update the total as we discover more rows)
264
267
  const progress = ProgressManager.create(
265
268
  `delete-${tableId}`,
266
- 1, // Start with 1, will update as we go
269
+ 1, // Start with 1, will update as we discover more
267
270
  { title: "Deleting table rows" }
268
271
  );
269
272
 
270
273
  while (hasMoreRows) {
271
- // Fetch next batch of rows
272
- const queries = [Query.limit(FETCH_BATCH_SIZE)];
273
- if (cursor) {
274
- queries.push(Query.cursorAfter(cursor));
274
+ try {
275
+ // Delete next batch using Query.limit() - no fetching needed!
276
+ const result = await tryAwaitWithRetry(async () =>
277
+ adapter.bulkDeleteRows!({
278
+ databaseId,
279
+ tableId,
280
+ rowIds: [], // Empty array signals we want to use Query.limit instead
281
+ batchSize: DELETE_BATCH_SIZE
282
+ })
283
+ );
284
+
285
+ const deletedCount = (result as any).total || 0;
286
+
287
+ if (deletedCount === 0) {
288
+ hasMoreRows = false;
289
+ break;
290
+ }
291
+
292
+ totalDeleted += deletedCount;
293
+ progress.setTotal(totalDeleted + 100); // Estimate more rows exist
294
+ progress.update(totalDeleted);
295
+
296
+ MessageFormatter.progress(
297
+ `Deleted ${deletedCount} rows (${totalDeleted} total so far)`,
298
+ { prefix: "Wipe" }
299
+ );
300
+
301
+ // Small delay between batches to be respectful to the API
302
+ await delay(10);
303
+
304
+ } catch (error: any) {
305
+ const errorMessage = error.message || String(error);
306
+
307
+ if (isCriticalError(errorMessage)) {
308
+ MessageFormatter.error(
309
+ `Critical error during bulk deletion: ${errorMessage}`,
310
+ error,
311
+ { prefix: "Wipe" }
312
+ );
313
+ throw error;
314
+ } else {
315
+ MessageFormatter.error(
316
+ `Error during deletion batch: ${errorMessage}`,
317
+ error,
318
+ { prefix: "Wipe" }
319
+ );
320
+ // Continue trying with next batch
321
+ }
275
322
  }
276
-
277
- const response = await adapter.listRows({ databaseId, tableId, queries });
278
- const rows: any[] = (response as any).rows || [];
279
-
280
- if (rows.length === 0) {
281
- hasMoreRows = false;
282
- break;
283
- }
284
-
285
- // Update progress total as we discover more rows
286
- if (rows.length === FETCH_BATCH_SIZE) {
287
- // There might be more rows, update progress total
288
- progress.setTotal(totalDeleted + rows.length + 1000); // Estimate more
289
- }
290
-
291
- MessageFormatter.progress(
292
- `Processing batch: ${rows.length} rows (${totalDeleted + rows.length} total so far)`,
293
- { prefix: "Wipe" }
294
- );
295
-
296
- // Try to use bulk deletion first, fall back to individual deletion
297
- const rowIds = rows.map((row: any) => row.$id);
298
-
299
- // Check if bulk deletion is available and try it first
300
- if (adapter.bulkDeleteRows) {
301
- try {
302
- // Attempt bulk deletion (available in TablesDB)
303
- const deletedCount = await tryBulkDeletion(adapter, databaseId, tableId, rowIds, BULK_DELETE_BATCH_SIZE, MAX_CONCURRENT_OPERATIONS);
304
- totalDeleted += deletedCount;
305
- progress.update(totalDeleted);
306
- } catch (bulkError) {
307
- // Enhanced error handling: categorize the error and decide on fallback strategy
308
- const errorMessage = bulkError instanceof Error ? bulkError.message : String(bulkError);
309
-
310
- if (isRetryableError(errorMessage)) {
311
- MessageFormatter.progress(
312
- `Bulk deletion encountered retryable error, retrying with individual deletion for ${rows.length} rows`,
313
- { prefix: "Wipe" }
314
- );
315
- } else if (isBulkNotSupportedError(errorMessage)) {
316
- MessageFormatter.progress(
317
- `Bulk deletion not supported by server, switching to individual deletion for ${rows.length} rows`,
318
- { prefix: "Wipe" }
319
- );
320
- } else {
321
- MessageFormatter.progress(
322
- `Bulk deletion failed (${errorMessage}), falling back to individual deletion for ${rows.length} rows`,
323
- { prefix: "Wipe" }
324
- );
325
- }
326
-
327
- const deletedCount = await tryIndividualDeletion(
328
- adapter,
329
- databaseId,
330
- tableId,
331
- rows,
332
- INDIVIDUAL_DELETE_BATCH_SIZE,
333
- MAX_CONCURRENT_OPERATIONS,
334
- progress,
335
- totalDeleted
336
- );
337
- totalDeleted += deletedCount;
338
- }
339
- } else {
340
- // Bulk deletion not available, use optimized individual deletion
341
- MessageFormatter.progress(
342
- `Using individual deletion for ${rows.length} rows (bulk deletion not available)`,
343
- { prefix: "Wipe" }
344
- );
345
-
346
- const deletedCount = await tryIndividualDeletion(
347
- adapter,
348
- databaseId,
349
- tableId,
350
- rows,
351
- INDIVIDUAL_DELETE_BATCH_SIZE,
352
- MAX_CONCURRENT_OPERATIONS,
353
- progress,
354
- totalDeleted
355
- );
356
- totalDeleted += deletedCount;
357
- }
358
-
359
- // Set up cursor for next iteration
360
- if (rows.length < FETCH_BATCH_SIZE) {
361
- hasMoreRows = false;
362
- } else {
363
- cursor = rows[rows.length - 1].$id;
364
- }
365
-
366
- // Small delay between fetch cycles to be respectful to the API
367
- await delay(10);
368
323
  }
369
324
 
370
325
  // Update final progress total
@@ -389,120 +344,3 @@ export const wipeTableRows = async (
389
344
  throw error;
390
345
  }
391
346
  };
392
-
393
- /**
394
- * Helper function to attempt bulk deletion of row IDs
395
- */
396
- async function tryBulkDeletion(
397
- adapter: DatabaseAdapter,
398
- databaseId: string,
399
- tableId: string,
400
- rowIds: string[],
401
- batchSize: number,
402
- maxConcurrent: number
403
- ): Promise<number> {
404
- if (!adapter.bulkDeleteRows) {
405
- throw new Error("Bulk deletion not available on this adapter");
406
- }
407
-
408
- const limit = pLimit(maxConcurrent);
409
- const batches = chunk(rowIds, batchSize);
410
- let successfullyDeleted = 0;
411
-
412
- const deletePromises = batches.map((batch) =>
413
- limit(async () => {
414
- try {
415
- const result = await tryAwaitWithRetry(async () =>
416
- adapter.bulkDeleteRows!({ databaseId, tableId, rowIds: batch })
417
- );
418
- successfullyDeleted += batch.length; // Assume success if no error thrown
419
- } catch (error: any) {
420
- const errorMessage = error.message || String(error);
421
-
422
- // Enhanced error handling for bulk deletion
423
- if (isCriticalError(errorMessage)) {
424
- MessageFormatter.error(
425
- `Critical error in bulk deletion batch: ${errorMessage}`,
426
- error,
427
- { prefix: "Wipe" }
428
- );
429
- throw error;
430
- } else {
431
- // For non-critical errors in bulk deletion, re-throw to trigger fallback
432
- throw new Error(`Bulk deletion batch failed: ${errorMessage}`);
433
- }
434
- }
435
- })
436
- );
437
-
438
- await Promise.all(deletePromises);
439
- return successfullyDeleted;
440
- }
441
-
442
- /**
443
- * Helper function for fallback individual deletion
444
- */
445
- async function tryIndividualDeletion(
446
- adapter: DatabaseAdapter,
447
- databaseId: string,
448
- tableId: string,
449
- rows: any[],
450
- batchSize: number,
451
- maxConcurrent: number,
452
- progress: any,
453
- baseDeleted: number
454
- ): Promise<number> {
455
- const limit = pLimit(maxConcurrent);
456
- const batches = chunk(rows, batchSize);
457
- let processedInBatch = 0;
458
- let successfullyDeleted = 0;
459
-
460
- const deletePromises = batches.map((batch) =>
461
- limit(async () => {
462
- const batchDeletePromises = batch.map(async (row: any) => {
463
- try {
464
- await tryAwaitWithRetry(async () =>
465
- adapter.deleteRow({ databaseId, tableId, id: row.$id })
466
- );
467
- successfullyDeleted++;
468
- } catch (error: any) {
469
- const errorMessage = error.message || String(error);
470
-
471
- // Enhanced error handling for row deletion
472
- if (errorMessage.includes("Row with the requested ID could not be found")) {
473
- // Row already deleted, count as success since it's gone
474
- successfullyDeleted++;
475
- } else if (isCriticalError(errorMessage)) {
476
- // Critical error, log and rethrow to stop operation
477
- MessageFormatter.error(
478
- `Critical error deleting row ${row.$id}: ${errorMessage}`,
479
- error,
480
- { prefix: "Wipe" }
481
- );
482
- throw error;
483
- } else if (isRetryableError(errorMessage)) {
484
- // Retryable error, will be handled by tryAwaitWithRetry
485
- MessageFormatter.progress(
486
- `Retryable error for row ${row.$id}, will retry`,
487
- { prefix: "Wipe" }
488
- );
489
- } else {
490
- // Other non-critical errors, log but continue
491
- MessageFormatter.error(
492
- `Failed to delete row ${row.$id}: ${errorMessage}`,
493
- error,
494
- { prefix: "Wipe" }
495
- );
496
- }
497
- }
498
- processedInBatch++;
499
- progress.update(baseDeleted + successfullyDeleted);
500
- });
501
-
502
- await Promise.all(batchDeletePromises);
503
- })
504
- );
505
-
506
- await Promise.all(deletePromises);
507
- return successfullyDeleted;
508
- }