@fachkraftfreund/n8n-nodes-supabase 1.2.16 → 1.2.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -924,6 +924,21 @@ class Supabase {
924
924
  }
925
925
  }
926
926
  }
927
+ else if (resource === 'database' && operation === 'read') {
928
+ try {
929
+ const operationResults = await database_1.executeDatabaseOperation.call(this, supabase, operation, 0, credentials.host);
930
+ returnData.push(...operationResults);
931
+ }
932
+ catch (error) {
933
+ const errorMessage = error instanceof Error ? error.message : 'Unknown error occurred';
934
+ if (this.continueOnFail()) {
935
+ returnData.push({ json: { error: errorMessage } });
936
+ }
937
+ else {
938
+ throw new n8n_workflow_1.NodeOperationError(this.getNode(), errorMessage);
939
+ }
940
+ }
941
+ }
927
942
  else {
928
943
  for (let itemIndex = 0; itemIndex < items.length; itemIndex++) {
929
944
  try {
@@ -206,35 +206,44 @@ async function handleRead(supabase, itemIndex, hostUrl) {
206
206
  const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters, maxInChars, maxItems);
207
207
  console.log(`[Supabase READ] item=${itemIndex} table=${table} returnAll=${returnAll} chunks=${filterChunks.length} maxItems=${maxItems} maxInChars=${maxInChars}`);
208
208
  const returnData = [];
209
- if (returnAll) {
210
- for (let ci = 0; ci < filterChunks.length; ci++) {
211
- const chunkFilters = filterChunks[ci];
212
- const inFilter = chunkFilters.find(f => f.operator === 'in');
213
- const chunkIds = inFilter && Array.isArray(inFilter.value) ? inFilter.value.length : '?';
214
- console.log(`[Supabase READ] chunk ${ci + 1}/${filterChunks.length} (${chunkIds} IDs) - starting...`);
215
- const chunkStart = Date.now();
216
- const batchSize = 1000;
217
- let batchOffset = 0;
218
- let hasMore = true;
219
- while (hasMore) {
220
- const batchQuery = buildReadQuery(supabase, table, returnFields, chunkFilters, sort, { count: 'exact' });
221
- const { data: batchData, error: batchError } = await batchQuery.range(batchOffset, batchOffset + batchSize - 1);
222
- if (batchError) {
223
- console.log(`[Supabase READ] chunk ${ci + 1} FAILED after ${Date.now() - chunkStart}ms: ${(0, supabaseClient_1.formatSupabaseError)(batchError)}`);
224
- throw new Error((0, supabaseClient_1.formatSupabaseError)(batchError));
225
- }
226
- if (Array.isArray(batchData)) {
227
- for (const row of batchData) {
228
- returnData.push({ json: row });
229
- }
230
- hasMore = batchData.length === batchSize;
231
- }
232
- else {
233
- hasMore = false;
209
+ async function executeChunk(chunkFilters, chunkIndex, totalChunks) {
210
+ const inFilter = chunkFilters.find(f => f.operator === 'in');
211
+ const chunkIds = inFilter && Array.isArray(inFilter.value) ? inFilter.value.length : '?';
212
+ console.log(`[Supabase READ] chunk ${chunkIndex + 1}/${totalChunks} (${chunkIds} IDs) - starting...`);
213
+ const chunkStart = Date.now();
214
+ const rows = [];
215
+ const batchSize = 1000;
216
+ let batchOffset = 0;
217
+ let hasMore = true;
218
+ while (hasMore) {
219
+ const batchQuery = buildReadQuery(supabase, table, returnFields, chunkFilters, sort);
220
+ const { data: batchData, error: batchError } = await batchQuery.range(batchOffset, batchOffset + batchSize - 1);
221
+ if (batchError) {
222
+ console.log(`[Supabase READ] chunk ${chunkIndex + 1} FAILED after ${Date.now() - chunkStart}ms: ${(0, supabaseClient_1.formatSupabaseError)(batchError)}`);
223
+ throw new Error((0, supabaseClient_1.formatSupabaseError)(batchError));
224
+ }
225
+ if (Array.isArray(batchData)) {
226
+ for (const row of batchData) {
227
+ rows.push({ json: row });
234
228
  }
235
- batchOffset += batchSize;
229
+ hasMore = batchData.length === batchSize;
230
+ }
231
+ else {
232
+ hasMore = false;
233
+ }
234
+ batchOffset += batchSize;
235
+ }
236
+ console.log(`[Supabase READ] chunk ${chunkIndex + 1}/${totalChunks} done in ${Date.now() - chunkStart}ms, rows: ${rows.length}`);
237
+ return rows;
238
+ }
239
+ if (returnAll) {
240
+ const CONCURRENCY = 3;
241
+ for (let i = 0; i < filterChunks.length; i += CONCURRENCY) {
242
+ const batch = filterChunks.slice(i, i + CONCURRENCY);
243
+ const results = await Promise.all(batch.map((chunkFilters, j) => executeChunk(chunkFilters, i + j, filterChunks.length)));
244
+ for (const rows of results) {
245
+ returnData.push(...rows);
236
246
  }
237
- console.log(`[Supabase READ] chunk ${ci + 1}/${filterChunks.length} done in ${Date.now() - chunkStart}ms, rows so far: ${returnData.length}`);
238
247
  }
239
248
  }
240
249
  else {
@@ -9,6 +9,13 @@ function createSupabaseClient(credentials) {
9
9
  persistSession: false,
10
10
  detectSessionInUrl: false,
11
11
  },
12
+ global: {
13
+ fetch: (url, init) => {
14
+ const controller = new AbortController();
15
+ const timeout = setTimeout(() => controller.abort(), 60000);
16
+ return fetch(url, { ...init, signal: controller.signal }).finally(() => clearTimeout(timeout));
17
+ },
18
+ },
12
19
  });
13
20
  return client;
14
21
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fachkraftfreund/n8n-nodes-supabase",
3
- "version": "1.2.16",
3
+ "version": "1.2.18",
4
4
  "description": "Comprehensive n8n community node for Supabase with database and storage operations",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",