@fachkraftfreund/n8n-nodes-supabase 1.2.9 → 1.2.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -150,38 +150,39 @@ async function handleBulkUpdate(supabase, itemCount) {
150
150
  }
151
151
  return [{ json: { data, operation: 'update', table } }];
152
152
  }
153
- function buildReadQuery(context, supabase, table, returnFields, itemIndex, options) {
154
- const selectFields = returnFields && returnFields !== '*' ? returnFields : '*';
155
- let query = supabase.from(table).select(selectFields, options);
153
+ function getFilters(context, itemIndex) {
156
154
  const uiMode = context.getNodeParameter('uiMode', itemIndex, 'simple');
157
155
  if (uiMode === 'simple') {
158
- const filters = context.getNodeParameter('filters.filter', itemIndex, []);
159
- for (const filter of filters) {
160
- const operator = (0, supabaseClient_1.convertFilterOperator)(filter.operator);
161
- query = query.filter(filter.column, operator, (0, supabaseClient_1.normalizeFilterValue)(filter.operator, filter.value));
162
- }
156
+ return context.getNodeParameter('filters.filter', itemIndex, []);
163
157
  }
164
- else {
165
- const advancedFilters = context.getNodeParameter('advancedFilters', itemIndex, '');
166
- if (advancedFilters) {
167
- try {
168
- const filters = JSON.parse(advancedFilters);
169
- for (const [column, condition] of Object.entries(filters)) {
170
- if (typeof condition === 'object' && condition !== null) {
171
- const [operator, value] = Object.entries(condition)[0];
172
- query = query.filter(column, (0, supabaseClient_1.convertFilterOperator)(operator), (0, supabaseClient_1.normalizeFilterValue)(operator, value));
173
- }
174
- else {
175
- query = query.eq(column, condition);
176
- }
177
- }
158
+ const advancedFilters = context.getNodeParameter('advancedFilters', itemIndex, '');
159
+ if (!advancedFilters)
160
+ return [];
161
+ try {
162
+ const parsed = JSON.parse(advancedFilters);
163
+ const filters = [];
164
+ for (const [column, condition] of Object.entries(parsed)) {
165
+ if (typeof condition === 'object' && condition !== null) {
166
+ const [operator, value] = Object.entries(condition)[0];
167
+ filters.push({ column, operator: operator, value });
178
168
  }
179
- catch {
180
- throw new Error('Invalid advanced filters JSON');
169
+ else {
170
+ filters.push({ column, operator: 'eq', value: condition });
181
171
  }
182
172
  }
173
+ return filters;
174
+ }
175
+ catch {
176
+ throw new Error('Invalid advanced filters JSON');
177
+ }
178
+ }
179
+ function buildReadQuery(supabase, table, returnFields, filters, sort, options) {
180
+ const selectFields = returnFields && returnFields !== '*' ? returnFields : '*';
181
+ let query = supabase.from(table).select(selectFields, options);
182
+ for (const filter of filters) {
183
+ const operator = (0, supabaseClient_1.convertFilterOperator)(filter.operator);
184
+ query = query.filter(filter.column, operator, (0, supabaseClient_1.normalizeFilterValue)(filter.operator, filter.value));
183
185
  }
184
- const sort = context.getNodeParameter('sort.sortField', itemIndex, []);
185
186
  for (const sortField of sort) {
186
187
  query = query.order(sortField.column, { ascending: sortField.ascending });
187
188
  }
@@ -192,46 +193,51 @@ async function handleRead(supabase, itemIndex) {
192
193
  (0, supabaseClient_1.validateTableName)(table);
193
194
  const returnFields = this.getNodeParameter('returnFields', itemIndex, '*');
194
195
  const returnAll = this.getNodeParameter('returnAll', itemIndex, false);
196
+ const filters = getFilters(this, itemIndex);
197
+ const sort = this.getNodeParameter('sort.sortField', itemIndex, []);
198
+ const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters);
195
199
  const returnData = [];
196
- if (returnAll) {
197
- const batchSize = 1000;
198
- let offset = 0;
199
- let hasMore = true;
200
- while (hasMore) {
201
- const batchQuery = buildReadQuery(this, supabase, table, returnFields, itemIndex, { count: 'exact' });
202
- const { data: batchData, error: batchError } = await batchQuery.range(offset, offset + batchSize - 1);
203
- if (batchError) {
204
- throw new Error((0, supabaseClient_1.formatSupabaseError)(batchError));
205
- }
206
- if (Array.isArray(batchData)) {
207
- for (const row of batchData) {
208
- returnData.push({ json: row });
200
+ for (const chunkFilters of filterChunks) {
201
+ if (returnAll) {
202
+ const batchSize = 1000;
203
+ let offset = 0;
204
+ let hasMore = true;
205
+ while (hasMore) {
206
+ const batchQuery = buildReadQuery(supabase, table, returnFields, chunkFilters, sort, { count: 'exact' });
207
+ const { data: batchData, error: batchError } = await batchQuery.range(offset, offset + batchSize - 1);
208
+ if (batchError) {
209
+ throw new Error((0, supabaseClient_1.formatSupabaseError)(batchError));
209
210
  }
210
- hasMore = batchData.length === batchSize;
211
- }
212
- else {
213
- hasMore = false;
211
+ if (Array.isArray(batchData)) {
212
+ for (const row of batchData) {
213
+ returnData.push({ json: row });
214
+ }
215
+ hasMore = batchData.length === batchSize;
216
+ }
217
+ else {
218
+ hasMore = false;
219
+ }
220
+ offset += batchSize;
214
221
  }
215
- offset += batchSize;
216
- }
217
- }
218
- else {
219
- const limit = this.getNodeParameter('limit', itemIndex, undefined);
220
- const offset = this.getNodeParameter('offset', itemIndex, undefined);
221
- let query = buildReadQuery(this, supabase, table, returnFields, itemIndex);
222
- if (limit !== undefined) {
223
- query = query.limit(limit);
224
- }
225
- if (offset !== undefined) {
226
- query = query.range(offset, offset + (limit || 1000) - 1);
227
- }
228
- const { data, error } = await query;
229
- if (error) {
230
- throw new Error((0, supabaseClient_1.formatSupabaseError)(error));
231
222
  }
232
- if (Array.isArray(data)) {
233
- for (const row of data) {
234
- returnData.push({ json: row });
223
+ else {
224
+ const limit = this.getNodeParameter('limit', itemIndex, undefined);
225
+ const offset = this.getNodeParameter('offset', itemIndex, undefined);
226
+ let query = buildReadQuery(supabase, table, returnFields, chunkFilters, sort);
227
+ if (limit !== undefined) {
228
+ query = query.limit(limit);
229
+ }
230
+ if (offset !== undefined) {
231
+ query = query.range(offset, offset + (limit || 1000) - 1);
232
+ }
233
+ const { data, error } = await query;
234
+ if (error) {
235
+ throw new Error((0, supabaseClient_1.formatSupabaseError)(error));
236
+ }
237
+ if (Array.isArray(data)) {
238
+ for (const row of data) {
239
+ returnData.push({ json: row });
240
+ }
235
241
  }
236
242
  }
237
243
  }
@@ -251,20 +257,27 @@ async function handleRead(supabase, itemIndex) {
251
257
  async function handleDelete(supabase, itemIndex) {
252
258
  const table = this.getNodeParameter('table', itemIndex);
253
259
  (0, supabaseClient_1.validateTableName)(table);
254
- let query = supabase.from(table).delete();
255
260
  const filters = this.getNodeParameter('filters.filter', itemIndex, []);
256
261
  if (filters.length === 0) {
257
262
  throw new Error('At least one filter is required for delete operations to prevent accidental data loss');
258
263
  }
259
- for (const filter of filters) {
260
- const operator = (0, supabaseClient_1.convertFilterOperator)(filter.operator);
261
- query = query.filter(filter.column, operator, (0, supabaseClient_1.normalizeFilterValue)(filter.operator, filter.value));
262
- }
263
- const { data, error } = await query.select();
264
- if (error) {
265
- throw new Error((0, supabaseClient_1.formatSupabaseError)(error));
264
+ const filterChunks = (0, supabaseClient_1.expandChunkedFilters)(filters);
265
+ const allDeleted = [];
266
+ for (const chunkFilters of filterChunks) {
267
+ let query = supabase.from(table).delete();
268
+ for (const filter of chunkFilters) {
269
+ const operator = (0, supabaseClient_1.convertFilterOperator)(filter.operator);
270
+ query = query.filter(filter.column, operator, (0, supabaseClient_1.normalizeFilterValue)(filter.operator, filter.value));
271
+ }
272
+ const { data, error } = await query.select();
273
+ if (error) {
274
+ throw new Error((0, supabaseClient_1.formatSupabaseError)(error));
275
+ }
276
+ if (Array.isArray(data)) {
277
+ allDeleted.push(...data);
278
+ }
266
279
  }
267
- return [{ json: { data, operation: 'delete', table, deleted: (data === null || data === void 0 ? void 0 : data.length) || 0 } }];
280
+ return [{ json: { data: allDeleted, operation: 'delete', table, deleted: allDeleted.length } }];
268
281
  }
269
282
  async function handleCreateTable(supabase, itemIndex) {
270
283
  const tableName = this.getNodeParameter('tableName', itemIndex);
@@ -8,7 +8,7 @@ export interface ISupabaseCredentials {
8
8
  export interface IRowFilter {
9
9
  column: string;
10
10
  operator: 'eq' | 'neq' | 'gt' | 'gte' | 'lt' | 'lte' | 'like' | 'ilike' | 'is' | 'in' | 'cs' | 'cd';
11
- value: string | number | boolean | null;
11
+ value: string | number | boolean | null | unknown[];
12
12
  }
13
13
  export interface IRowSort {
14
14
  column: string;
@@ -1,5 +1,5 @@
1
1
  import { SupabaseClient } from '@supabase/supabase-js';
2
- import { ISupabaseCredentials } from '../types';
2
+ import { ISupabaseCredentials, IRowFilter } from '../types';
3
3
  export declare function createSupabaseClient(credentials: ISupabaseCredentials): SupabaseClient;
4
4
  export declare function validateCredentials(credentials: ISupabaseCredentials): void;
5
5
  export declare function getStorageUrl(projectUrl: string): string;
@@ -11,4 +11,7 @@ export declare function sanitizeColumnName(columnName: string): string;
11
11
  export declare function validateTableName(tableName: string): void;
12
12
  export declare function validateColumnName(columnName: string): void;
13
13
  export declare function convertFilterOperator(operator: string): string;
14
- export declare function normalizeFilterValue(operator: string, value: string | number | boolean | null): string | number | boolean | null;
14
+ export declare function normalizeFilterValue(operator: string, value: string | number | boolean | null | unknown[]): string | number | boolean | null;
15
+ export declare const IN_FILTER_CHUNK_SIZE = 200;
16
+ export declare function chunkArray<T>(arr: T[], size: number): T[][];
17
+ export declare function expandChunkedFilters(filters: IRowFilter[]): IRowFilter[][];
@@ -1,6 +1,6 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.normalizeFilterValue = exports.convertFilterOperator = exports.validateColumnName = exports.validateTableName = exports.sanitizeColumnName = exports.isNetworkError = exports.isAuthError = exports.formatSupabaseError = exports.getDatabaseUrl = exports.getStorageUrl = exports.validateCredentials = exports.createSupabaseClient = void 0;
3
+ exports.expandChunkedFilters = exports.chunkArray = exports.IN_FILTER_CHUNK_SIZE = exports.normalizeFilterValue = exports.convertFilterOperator = exports.validateColumnName = exports.validateTableName = exports.sanitizeColumnName = exports.isNetworkError = exports.isAuthError = exports.formatSupabaseError = exports.getDatabaseUrl = exports.getStorageUrl = exports.validateCredentials = exports.createSupabaseClient = void 0;
4
4
  const supabase_js_1 = require("@supabase/supabase-js");
5
5
  function createSupabaseClient(credentials) {
6
6
  const client = (0, supabase_js_1.createClient)(credentials.host, credentials.serviceKey, {
@@ -137,13 +137,60 @@ function convertFilterOperator(operator) {
137
137
  }
138
138
  exports.convertFilterOperator = convertFilterOperator;
139
139
  function normalizeFilterValue(operator, value) {
140
- if (operator === 'in' && typeof value === 'string') {
141
- const trimmed = value.trim();
142
- if (!trimmed.startsWith('(')) {
143
- return `(${trimmed})`;
140
+ if (operator === 'in') {
141
+ if (Array.isArray(value)) {
142
+ return `(${value.join(',')})`;
143
+ }
144
+ if (typeof value === 'string') {
145
+ const trimmed = value.trim();
146
+ if (!trimmed.startsWith('(')) {
147
+ return `(${trimmed})`;
148
+ }
149
+ return trimmed;
144
150
  }
145
- return trimmed;
146
151
  }
147
152
  return value;
148
153
  }
149
154
  exports.normalizeFilterValue = normalizeFilterValue;
155
+ exports.IN_FILTER_CHUNK_SIZE = 200;
156
+ function chunkArray(arr, size) {
157
+ const chunks = [];
158
+ for (let i = 0; i < arr.length; i += size) {
159
+ chunks.push(arr.slice(i, i + size));
160
+ }
161
+ return chunks;
162
+ }
163
+ exports.chunkArray = chunkArray;
164
+ function expandChunkedFilters(filters) {
165
+ const staticFilters = [];
166
+ const chunkedEntries = [];
167
+ for (const filter of filters) {
168
+ if (filter.operator === 'in' && Array.isArray(filter.value) && filter.value.length > exports.IN_FILTER_CHUNK_SIZE) {
169
+ chunkedEntries.push({
170
+ filter,
171
+ chunks: chunkArray(filter.value, exports.IN_FILTER_CHUNK_SIZE),
172
+ });
173
+ }
174
+ else {
175
+ staticFilters.push(filter);
176
+ }
177
+ }
178
+ if (chunkedEntries.length === 0) {
179
+ return [filters];
180
+ }
181
+ let combinations = [staticFilters];
182
+ for (const { filter, chunks } of chunkedEntries) {
183
+ const newCombinations = [];
184
+ for (const existing of combinations) {
185
+ for (const chunk of chunks) {
186
+ newCombinations.push([
187
+ ...existing,
188
+ { ...filter, value: chunk },
189
+ ]);
190
+ }
191
+ }
192
+ combinations = newCombinations;
193
+ }
194
+ return combinations;
195
+ }
196
+ exports.expandChunkedFilters = expandChunkedFilters;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fachkraftfreund/n8n-nodes-supabase",
3
- "version": "1.2.9",
3
+ "version": "1.2.10",
4
4
  "description": "Comprehensive n8n community node for Supabase with database and storage operations",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",