@fachkraftfreund/n8n-nodes-supabase 1.2.12 → 1.2.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -928,7 +928,7 @@ class Supabase {
928
928
  try {
929
929
  let operationResults = [];
930
930
  if (resource === 'database') {
931
- operationResults = await database_1.executeDatabaseOperation.call(this, supabase, operation, itemIndex);
931
+ operationResults = await database_1.executeDatabaseOperation.call(this, supabase, operation, itemIndex, credentials.host);
932
932
  }
933
933
  else if (resource === 'storage') {
934
934
  operationResults = await storage_1.executeStorageOperation.call(this, supabase, operation, itemIndex);
@@ -1,5 +1,5 @@
1
1
  import { SupabaseClient } from '@supabase/supabase-js';
2
2
  import { IExecuteFunctions, INodeExecutionData } from 'n8n-workflow';
3
3
  import { DatabaseOperation } from '../../types';
4
- export declare function executeDatabaseOperation(this: IExecuteFunctions, supabase: SupabaseClient, operation: DatabaseOperation, itemIndex: number): Promise<INodeExecutionData[]>;
4
+ export declare function executeDatabaseOperation(this: IExecuteFunctions, supabase: SupabaseClient, operation: DatabaseOperation, itemIndex: number, hostUrl: string): Promise<INodeExecutionData[]>;
5
5
  export declare function executeBulkDatabaseOperation(this: IExecuteFunctions, supabase: SupabaseClient, operation: DatabaseOperation, itemCount: number): Promise<INodeExecutionData[]>;
@@ -2,15 +2,15 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.executeBulkDatabaseOperation = exports.executeDatabaseOperation = void 0;
4
4
  const supabaseClient_1 = require("../../utils/supabaseClient");
5
- async function executeDatabaseOperation(supabase, operation, itemIndex) {
5
+ async function executeDatabaseOperation(supabase, operation, itemIndex, hostUrl) {
6
6
  const returnData = [];
7
7
  try {
8
8
  switch (operation) {
9
9
  case 'read':
10
- returnData.push(...await handleRead.call(this, supabase, itemIndex));
10
+ returnData.push(...await handleRead.call(this, supabase, itemIndex, hostUrl));
11
11
  break;
12
12
  case 'delete':
13
- returnData.push(...await handleDelete.call(this, supabase, itemIndex));
13
+ returnData.push(...await handleDelete.call(this, supabase, itemIndex, hostUrl));
14
14
  break;
15
15
  case 'createTable':
16
16
  returnData.push(...await handleCreateTable.call(this, supabase, itemIndex));
@@ -188,23 +188,25 @@ function buildReadQuery(supabase, table, returnFields, filters, sort, options) {
188
188
  }
189
189
  return query;
190
190
  }
191
- async function handleRead(supabase, itemIndex) {
191
+ async function handleRead(supabase, itemIndex, hostUrl) {
192
192
  const table = this.getNodeParameter('table', itemIndex);
193
193
  (0, supabaseClient_1.validateTableName)(table);
194
194
  const returnFields = this.getNodeParameter('returnFields', itemIndex, '*');
195
195
  const returnAll = this.getNodeParameter('returnAll', itemIndex, false);
196
196
  const filters = getFilters(this, itemIndex);
197
197
  const sort = this.getNodeParameter('sort.sortField', itemIndex, []);
198
- const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters);
198
+ const overhead = (0, supabaseClient_1.estimateUrlOverhead)(hostUrl, table, returnFields, filters, sort);
199
+ const maxInChars = Math.max(500, supabaseClient_1.MAX_SAFE_URL_LENGTH - overhead);
200
+ const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters, maxInChars);
199
201
  const returnData = [];
200
- for (const chunkFilters of filterChunks) {
201
- if (returnAll) {
202
+ if (returnAll) {
203
+ for (const chunkFilters of filterChunks) {
202
204
  const batchSize = 1000;
203
- let offset = 0;
205
+ let batchOffset = 0;
204
206
  let hasMore = true;
205
207
  while (hasMore) {
206
208
  const batchQuery = buildReadQuery(supabase, table, returnFields, chunkFilters, sort, { count: 'exact' });
207
- const { data: batchData, error: batchError } = await batchQuery.range(offset, offset + batchSize - 1);
209
+ const { data: batchData, error: batchError } = await batchQuery.range(batchOffset, batchOffset + batchSize - 1);
208
210
  if (batchError) {
209
211
  throw new Error((0, supabaseClient_1.formatSupabaseError)(batchError));
210
212
  }
@@ -217,18 +219,26 @@ async function handleRead(supabase, itemIndex) {
217
219
  else {
218
220
  hasMore = false;
219
221
  }
220
- offset += batchSize;
222
+ batchOffset += batchSize;
221
223
  }
222
224
  }
223
- else {
224
- const limit = this.getNodeParameter('limit', itemIndex, undefined);
225
- const offset = this.getNodeParameter('offset', itemIndex, undefined);
225
+ }
226
+ else {
227
+ const limit = this.getNodeParameter('limit', itemIndex, 100);
228
+ const userOffset = this.getNodeParameter('offset', itemIndex, 0);
229
+ const isMultiChunk = filterChunks.length > 1;
230
+ for (const chunkFilters of filterChunks) {
226
231
  let query = buildReadQuery(supabase, table, returnFields, chunkFilters, sort);
227
- if (limit !== undefined) {
228
- query = query.limit(limit);
232
+ if (isMultiChunk) {
233
+ query = query.limit(userOffset + limit);
229
234
  }
230
- if (offset !== undefined) {
231
- query = query.range(offset, offset + (limit || 1000) - 1);
235
+ else {
236
+ if (userOffset > 0) {
237
+ query = query.range(userOffset, userOffset + limit - 1);
238
+ }
239
+ else {
240
+ query = query.limit(limit);
241
+ }
232
242
  }
233
243
  const { data, error } = await query;
234
244
  if (error) {
@@ -240,6 +250,11 @@ async function handleRead(supabase, itemIndex) {
240
250
  }
241
251
  }
242
252
  }
253
+ if (isMultiChunk && (userOffset > 0 || returnData.length > limit)) {
254
+ const sliced = returnData.slice(userOffset, userOffset + limit);
255
+ returnData.length = 0;
256
+ returnData.push(...sliced);
257
+ }
243
258
  }
244
259
  if (returnData.length === 0) {
245
260
  returnData.push({
@@ -254,14 +269,16 @@ async function handleRead(supabase, itemIndex) {
254
269
  }
255
270
  return returnData;
256
271
  }
257
- async function handleDelete(supabase, itemIndex) {
272
+ async function handleDelete(supabase, itemIndex, hostUrl) {
258
273
  const table = this.getNodeParameter('table', itemIndex);
259
274
  (0, supabaseClient_1.validateTableName)(table);
260
275
  const filters = this.getNodeParameter('filters.filter', itemIndex, []);
261
276
  if (filters.length === 0) {
262
277
  throw new Error('At least one filter is required for delete operations to prevent accidental data loss');
263
278
  }
264
- const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters);
279
+ const overhead = (0, supabaseClient_1.estimateUrlOverhead)(hostUrl, table, undefined, filters);
280
+ const maxInChars = Math.max(500, supabaseClient_1.MAX_SAFE_URL_LENGTH - overhead);
281
+ const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters, maxInChars);
265
282
  const allDeleted = [];
266
283
  for (const chunkFilters of filterChunks) {
267
284
  let query = supabase.from(table).delete();
@@ -1,5 +1,5 @@
1
1
  import { SupabaseClient } from '@supabase/supabase-js';
2
- import { ISupabaseCredentials, IRowFilter } from '../types';
2
+ import { ISupabaseCredentials, IRowFilter, IRowSort } from '../types';
3
3
  export declare function createSupabaseClient(credentials: ISupabaseCredentials): SupabaseClient;
4
4
  export declare function validateCredentials(credentials: ISupabaseCredentials): void;
5
5
  export declare function getStorageUrl(projectUrl: string): string;
@@ -12,6 +12,7 @@ export declare function validateTableName(tableName: string): void;
12
12
  export declare function validateColumnName(columnName: string): void;
13
13
  export declare function convertFilterOperator(operator: string): string;
14
14
  export declare function normalizeFilterValue(operator: string, value: string | number | boolean | null | unknown[]): string | number | boolean | null;
15
- export declare const IN_FILTER_MAX_CHAR_LENGTH = 4000;
16
- export declare function chunkInFilterValues(values: unknown[]): unknown[][];
17
- export declare function expandChunkedFilters(filters: IRowFilter[]): IRowFilter[][];
15
+ export declare const MAX_SAFE_URL_LENGTH = 7500;
16
+ export declare function estimateUrlOverhead(hostUrl: string, table: string, selectFields?: string, filters?: IRowFilter[], sort?: IRowSort[]): number;
17
+ export declare function chunkInFilterValues(values: unknown[], maxChars: number): unknown[][];
18
+ export declare function expandChunkedFilters(filters: IRowFilter[], maxInChars?: number): IRowFilter[][];
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.expandChunkedFilters = exports.chunkInFilterValues = exports.IN_FILTER_MAX_CHAR_LENGTH = exports.normalizeFilterValue = exports.convertFilterOperator = exports.validateColumnName = exports.validateTableName = exports.sanitizeColumnName = exports.isNetworkError = exports.isAuthError = exports.formatSupabaseError = exports.getDatabaseUrl = exports.getStorageUrl = exports.validateCredentials = exports.createSupabaseClient = void 0;
3
+ exports.expandChunkedFilters = exports.chunkInFilterValues = exports.estimateUrlOverhead = exports.MAX_SAFE_URL_LENGTH = exports.normalizeFilterValue = exports.convertFilterOperator = exports.validateColumnName = exports.validateTableName = exports.sanitizeColumnName = exports.isNetworkError = exports.isAuthError = exports.formatSupabaseError = exports.getDatabaseUrl = exports.getStorageUrl = exports.validateCredentials = exports.createSupabaseClient = void 0;
4
4
  const supabase_js_1 = require("@supabase/supabase-js");
5
5
  function createSupabaseClient(credentials) {
6
6
  const client = (0, supabase_js_1.createClient)(credentials.host, credentials.serviceKey, {
@@ -152,15 +152,41 @@ function normalizeFilterValue(operator, value) {
152
152
  return value;
153
153
  }
154
154
  exports.normalizeFilterValue = normalizeFilterValue;
155
- exports.IN_FILTER_MAX_CHAR_LENGTH = 4000;
156
- function chunkInFilterValues(values) {
155
+ exports.MAX_SAFE_URL_LENGTH = 7500;
156
+ const MIN_IN_CHUNK_CHARS = 500;
157
+ function estimateUrlOverhead(hostUrl, table, selectFields, filters, sort) {
158
+ let overhead = hostUrl.length + '/rest/v1/'.length + table.length + 1;
159
+ if (selectFields) {
160
+ overhead += 'select='.length + selectFields.length + 1;
161
+ }
162
+ if (filters) {
163
+ for (const f of filters) {
164
+ if (f.operator === 'in') {
165
+ overhead += f.column.length + '=in.()&'.length;
166
+ }
167
+ else {
168
+ const val = normalizeFilterValue(f.operator, f.value);
169
+ overhead += f.column.length + 1 + f.operator.length + 1 + String(val).length + 1;
170
+ }
171
+ }
172
+ }
173
+ if (sort) {
174
+ for (const s of sort) {
175
+ overhead += 'order='.length + s.column.length + 1 + (s.ascending ? 3 : 4) + 1;
176
+ }
177
+ }
178
+ overhead += 230;
179
+ return overhead;
180
+ }
181
+ exports.estimateUrlOverhead = estimateUrlOverhead;
182
+ function chunkInFilterValues(values, maxChars) {
157
183
  const chunks = [];
158
184
  let currentChunk = [];
159
185
  let currentLength = 0;
160
186
  for (const value of values) {
161
187
  const valueStr = String(value);
162
188
  const addedLength = currentChunk.length === 0 ? valueStr.length : valueStr.length + 1;
163
- if (currentLength + addedLength > exports.IN_FILTER_MAX_CHAR_LENGTH && currentChunk.length > 0) {
189
+ if (currentLength + addedLength > maxChars && currentChunk.length > 0) {
164
190
  chunks.push(currentChunk);
165
191
  currentChunk = [value];
166
192
  currentLength = valueStr.length;
@@ -176,25 +202,57 @@ function chunkInFilterValues(values) {
176
202
  return chunks;
177
203
  }
178
204
  exports.chunkInFilterValues = chunkInFilterValues;
179
- function expandChunkedFilters(filters) {
205
+ function expandChunkedFilters(filters, maxInChars) {
180
206
  const staticFilters = [];
181
- const chunkedEntries = [];
207
+ const inEntries = [];
182
208
  for (const filter of filters) {
183
- if (filter.operator === 'in' && Array.isArray(filter.value)) {
184
- const serialized = filter.value.map(String).join(',');
185
- if (serialized.length > exports.IN_FILTER_MAX_CHAR_LENGTH) {
186
- chunkedEntries.push({
187
- filter,
188
- chunks: chunkInFilterValues(filter.value),
189
- });
209
+ if (filter.operator === 'in') {
210
+ let values;
211
+ if (Array.isArray(filter.value)) {
212
+ values = filter.value;
213
+ }
214
+ else if (typeof filter.value === 'string') {
215
+ let str = filter.value.trim();
216
+ if (str.startsWith('(') && str.endsWith(')'))
217
+ str = str.slice(1, -1);
218
+ if (str.startsWith('[') && str.endsWith(']'))
219
+ str = str.slice(1, -1);
220
+ values = str.split(',').map(v => v.trim()).filter(v => v.length > 0);
221
+ }
222
+ else {
223
+ staticFilters.push(filter);
190
224
  continue;
191
225
  }
226
+ const serializedLength = values.map(String).join(',').length;
227
+ inEntries.push({ filter: { ...filter, value: values }, values, serializedLength });
228
+ continue;
192
229
  }
193
230
  staticFilters.push(filter);
194
231
  }
195
- if (chunkedEntries.length === 0) {
232
+ if (inEntries.length === 0) {
196
233
  return [filters];
197
234
  }
235
+ const defaultBudget = exports.MAX_SAFE_URL_LENGTH - 500;
236
+ const totalBudget = maxInChars !== null && maxInChars !== void 0 ? maxInChars : defaultBudget;
237
+ const perFilterBudget = Math.max(MIN_IN_CHUNK_CHARS, Math.floor(totalBudget / inEntries.length));
238
+ const chunkedEntries = [];
239
+ for (const entry of inEntries) {
240
+ if (entry.serializedLength > perFilterBudget) {
241
+ chunkedEntries.push({
242
+ filter: entry.filter,
243
+ chunks: chunkInFilterValues(entry.values, perFilterBudget),
244
+ });
245
+ }
246
+ else {
247
+ staticFilters.push(entry.filter);
248
+ }
249
+ }
250
+ if (chunkedEntries.length === 0) {
251
+ return [filters.map(f => {
252
+ const inEntry = inEntries.find(e => e.filter.column === f.column && f.operator === 'in');
253
+ return inEntry ? inEntry.filter : f;
254
+ })];
255
+ }
198
256
  let combinations = [staticFilters];
199
257
  for (const { filter, chunks } of chunkedEntries) {
200
258
  const newCombinations = [];
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fachkraftfreund/n8n-nodes-supabase",
3
- "version": "1.2.12",
3
+ "version": "1.2.14",
4
4
  "description": "Comprehensive n8n community node for Supabase with database and storage operations",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",