@fachkraftfreund/n8n-nodes-supabase 1.2.14 → 1.2.16

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -891,6 +891,7 @@ class Supabase {
891
891
  async execute() {
892
892
  const items = this.getInputData();
893
893
  const returnData = [];
894
+ console.log(`[Supabase] execute: ${items.length} input items`);
894
895
  const credentials = await this.getCredentials('supabaseExtendedApi');
895
896
  try {
896
897
  (0, supabaseClient_1.validateCredentials)(credentials);
@@ -195,12 +195,24 @@ async function handleRead(supabase, itemIndex, hostUrl) {
195
195
  const returnAll = this.getNodeParameter('returnAll', itemIndex, false);
196
196
  const filters = getFilters(this, itemIndex);
197
197
  const sort = this.getNodeParameter('sort.sortField', itemIndex, []);
198
+ for (const f of filters) {
199
+ const valType = Array.isArray(f.value) ? `array[${f.value.length}]` : typeof f.value;
200
+ const valLen = typeof f.value === 'string' ? f.value.length : Array.isArray(f.value) ? f.value.length : 0;
201
+ console.log(`[Supabase READ] item=${itemIndex} filter: ${f.column} ${f.operator} (${valType}, len=${valLen})`);
202
+ }
198
203
  const overhead = (0, supabaseClient_1.estimateUrlOverhead)(hostUrl, table, returnFields, filters, sort);
199
204
  const maxInChars = Math.max(500, supabaseClient_1.MAX_SAFE_URL_LENGTH - overhead);
200
- const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters, maxInChars);
205
+ const maxItems = (0, supabaseClient_1.computeMaxIdsPerChunk)(returnFields);
206
+ const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters, maxInChars, maxItems);
207
+ console.log(`[Supabase READ] item=${itemIndex} table=${table} returnAll=${returnAll} chunks=${filterChunks.length} maxItems=${maxItems} maxInChars=${maxInChars}`);
201
208
  const returnData = [];
202
209
  if (returnAll) {
203
- for (const chunkFilters of filterChunks) {
210
+ for (let ci = 0; ci < filterChunks.length; ci++) {
211
+ const chunkFilters = filterChunks[ci];
212
+ const inFilter = chunkFilters.find(f => f.operator === 'in');
213
+ const chunkIds = inFilter && Array.isArray(inFilter.value) ? inFilter.value.length : '?';
214
+ console.log(`[Supabase READ] chunk ${ci + 1}/${filterChunks.length} (${chunkIds} IDs) - starting...`);
215
+ const chunkStart = Date.now();
204
216
  const batchSize = 1000;
205
217
  let batchOffset = 0;
206
218
  let hasMore = true;
@@ -208,6 +220,7 @@ async function handleRead(supabase, itemIndex, hostUrl) {
208
220
  const batchQuery = buildReadQuery(supabase, table, returnFields, chunkFilters, sort, { count: 'exact' });
209
221
  const { data: batchData, error: batchError } = await batchQuery.range(batchOffset, batchOffset + batchSize - 1);
210
222
  if (batchError) {
223
+ console.log(`[Supabase READ] chunk ${ci + 1} FAILED after ${Date.now() - chunkStart}ms: ${(0, supabaseClient_1.formatSupabaseError)(batchError)}`);
211
224
  throw new Error((0, supabaseClient_1.formatSupabaseError)(batchError));
212
225
  }
213
226
  if (Array.isArray(batchData)) {
@@ -221,6 +234,7 @@ async function handleRead(supabase, itemIndex, hostUrl) {
221
234
  }
222
235
  batchOffset += batchSize;
223
236
  }
237
+ console.log(`[Supabase READ] chunk ${ci + 1}/${filterChunks.length} done in ${Date.now() - chunkStart}ms, rows so far: ${returnData.length}`);
224
238
  }
225
239
  }
226
240
  else {
@@ -13,6 +13,7 @@ export declare function validateColumnName(columnName: string): void;
13
13
  export declare function convertFilterOperator(operator: string): string;
14
14
  export declare function normalizeFilterValue(operator: string, value: string | number | boolean | null | unknown[]): string | number | boolean | null;
15
15
  export declare const MAX_SAFE_URL_LENGTH = 7500;
16
+ export declare function computeMaxIdsPerChunk(selectFields?: string): number;
16
17
  export declare function estimateUrlOverhead(hostUrl: string, table: string, selectFields?: string, filters?: IRowFilter[], sort?: IRowSort[]): number;
17
- export declare function chunkInFilterValues(values: unknown[], maxChars: number): unknown[][];
18
- export declare function expandChunkedFilters(filters: IRowFilter[], maxInChars?: number): IRowFilter[][];
18
+ export declare function chunkInFilterValues(values: unknown[], maxChars: number, maxItems?: number): unknown[][];
19
+ export declare function expandChunkedFilters(filters: IRowFilter[], maxInChars?: number, maxItems?: number): IRowFilter[][];
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.expandChunkedFilters = exports.chunkInFilterValues = exports.estimateUrlOverhead = exports.MAX_SAFE_URL_LENGTH = exports.normalizeFilterValue = exports.convertFilterOperator = exports.validateColumnName = exports.validateTableName = exports.sanitizeColumnName = exports.isNetworkError = exports.isAuthError = exports.formatSupabaseError = exports.getDatabaseUrl = exports.getStorageUrl = exports.validateCredentials = exports.createSupabaseClient = void 0;
3
+ exports.expandChunkedFilters = exports.chunkInFilterValues = exports.estimateUrlOverhead = exports.computeMaxIdsPerChunk = exports.MAX_SAFE_URL_LENGTH = exports.normalizeFilterValue = exports.convertFilterOperator = exports.validateColumnName = exports.validateTableName = exports.sanitizeColumnName = exports.isNetworkError = exports.isAuthError = exports.formatSupabaseError = exports.getDatabaseUrl = exports.getStorageUrl = exports.validateCredentials = exports.createSupabaseClient = void 0;
4
4
  const supabase_js_1 = require("@supabase/supabase-js");
5
5
  function createSupabaseClient(credentials) {
6
6
  const client = (0, supabase_js_1.createClient)(credentials.host, credentials.serviceKey, {
@@ -154,6 +154,16 @@ function normalizeFilterValue(operator, value) {
154
154
  exports.normalizeFilterValue = normalizeFilterValue;
155
155
  exports.MAX_SAFE_URL_LENGTH = 7500;
156
156
  const MIN_IN_CHUNK_CHARS = 500;
157
+ function computeMaxIdsPerChunk(selectFields) {
158
+ const BASE_LIMIT = 2000;
159
+ if (!selectFields || selectFields === '*')
160
+ return BASE_LIMIT;
161
+ const joinCount = (selectFields.match(/\(/g) || []).length;
162
+ if (joinCount === 0)
163
+ return BASE_LIMIT;
164
+ return Math.max(100, Math.floor(BASE_LIMIT / (1 + joinCount * 1.5)));
165
+ }
166
+ exports.computeMaxIdsPerChunk = computeMaxIdsPerChunk;
157
167
  function estimateUrlOverhead(hostUrl, table, selectFields, filters, sort) {
158
168
  let overhead = hostUrl.length + '/rest/v1/'.length + table.length + 1;
159
169
  if (selectFields) {
@@ -179,14 +189,14 @@ function estimateUrlOverhead(hostUrl, table, selectFields, filters, sort) {
179
189
  return overhead;
180
190
  }
181
191
  exports.estimateUrlOverhead = estimateUrlOverhead;
182
- function chunkInFilterValues(values, maxChars) {
192
+ function chunkInFilterValues(values, maxChars, maxItems = Infinity) {
183
193
  const chunks = [];
184
194
  let currentChunk = [];
185
195
  let currentLength = 0;
186
196
  for (const value of values) {
187
197
  const valueStr = String(value);
188
198
  const addedLength = currentChunk.length === 0 ? valueStr.length : valueStr.length + 1;
189
- if (currentLength + addedLength > maxChars && currentChunk.length > 0) {
199
+ if ((currentLength + addedLength > maxChars || currentChunk.length >= maxItems) && currentChunk.length > 0) {
190
200
  chunks.push(currentChunk);
191
201
  currentChunk = [value];
192
202
  currentLength = valueStr.length;
@@ -202,7 +212,7 @@ function chunkInFilterValues(values, maxChars) {
202
212
  return chunks;
203
213
  }
204
214
  exports.chunkInFilterValues = chunkInFilterValues;
205
- function expandChunkedFilters(filters, maxInChars) {
215
+ function expandChunkedFilters(filters, maxInChars, maxItems) {
206
216
  const staticFilters = [];
207
217
  const inEntries = [];
208
218
  for (const filter of filters) {
@@ -236,11 +246,12 @@ function expandChunkedFilters(filters, maxInChars) {
236
246
  const totalBudget = maxInChars !== null && maxInChars !== void 0 ? maxInChars : defaultBudget;
237
247
  const perFilterBudget = Math.max(MIN_IN_CHUNK_CHARS, Math.floor(totalBudget / inEntries.length));
238
248
  const chunkedEntries = [];
249
+ const itemCap = maxItems !== null && maxItems !== void 0 ? maxItems : Infinity;
239
250
  for (const entry of inEntries) {
240
- if (entry.serializedLength > perFilterBudget) {
251
+ if (entry.serializedLength > perFilterBudget || entry.values.length > itemCap) {
241
252
  chunkedEntries.push({
242
253
  filter: entry.filter,
243
- chunks: chunkInFilterValues(entry.values, perFilterBudget),
254
+ chunks: chunkInFilterValues(entry.values, perFilterBudget, itemCap),
244
255
  });
245
256
  }
246
257
  else {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fachkraftfreund/n8n-nodes-supabase",
3
- "version": "1.2.14",
3
+ "version": "1.2.16",
4
4
  "description": "Comprehensive n8n community node for Supabase with database and storage operations",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",