@fachkraftfreund/n8n-nodes-supabase 1.3.0 → 1.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,6 +4,9 @@ exports.SupabaseCsvExport = void 0;
4
4
  const n8n_workflow_1 = require("n8n-workflow");
5
5
  const supabaseClient_1 = require("./utils/supabaseClient");
6
6
  const supabaseClient_2 = require("./utils/supabaseClient");
7
+ function escapeRegExp(s) {
8
+ return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
9
+ }
7
10
  function escapeCsvField(value, delimiter, quoteChar) {
8
11
  if (value === null || value === undefined)
9
12
  return '';
@@ -12,31 +15,28 @@ function escapeCsvField(value, delimiter, quoteChar) {
12
15
  str.includes(quoteChar) ||
13
16
  str.includes('\n') ||
14
17
  str.includes('\r')) {
15
- return quoteChar + str.replace(new RegExp(escapeRegExp(quoteChar), 'g'), quoteChar + quoteChar) + quoteChar;
18
+ const escaped = quoteChar + str.replace(new RegExp(escapeRegExp(quoteChar), 'g'), quoteChar + quoteChar) + quoteChar;
19
+ return escaped;
16
20
  }
17
21
  return str;
18
22
  }
19
- function escapeRegExp(s) {
20
- return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
21
- }
22
- function generateCsv(rows, options) {
23
- if (rows.length === 0)
24
- return '';
25
- const { delimiter, quoteChar, includeHeaders } = options;
26
- const headerSet = new Set();
23
+ function discoverHeaders(rows) {
24
+ const set = new Set();
27
25
  for (const row of rows) {
28
26
  for (const key of Object.keys(row))
29
- headerSet.add(key);
30
- }
31
- const headers = [...headerSet];
32
- const lines = [];
33
- if (includeHeaders) {
34
- lines.push(headers.map((h) => escapeCsvField(h, delimiter, quoteChar)).join(delimiter));
27
+ set.add(key);
35
28
  }
36
- for (const row of rows) {
37
- lines.push(headers.map((h) => escapeCsvField(row[h], delimiter, quoteChar)).join(delimiter));
29
+ return [...set];
30
+ }
31
+ function batchToCsvBuffer(rows, headers, delimiter, quoteChar) {
32
+ const lines = new Array(rows.length);
33
+ for (let i = 0; i < rows.length; i++) {
34
+ const row = rows[i];
35
+ lines[i] = headers
36
+ .map((h) => escapeCsvField(row[h], delimiter, quoteChar))
37
+ .join(delimiter);
38
38
  }
39
- return lines.join('\n');
39
+ return Buffer.from(lines.join('\n'), 'utf-8');
40
40
  }
41
41
  function buildSelectQuery(supabase, table, selectFields, filters, sort) {
42
42
  let query = supabase.from(table).select(selectFields);
@@ -75,95 +75,90 @@ function parseFilters(context, itemIndex) {
75
75
  throw new Error('Invalid advanced filters JSON');
76
76
  }
77
77
  }
78
- async function fetchAllRows(supabase, table, selectFields, filters, sort, hostUrl, returnAll, limit) {
78
+ const BATCH_SIZE = 1000;
79
+ async function* fetchBatches(supabase, table, selectFields, filters, sort, hostUrl, returnAll, limit) {
79
80
  const overhead = (0, supabaseClient_2.estimateUrlOverhead)(hostUrl, table, selectFields, filters, sort);
80
81
  const maxInChars = Math.max(500, supabaseClient_2.MAX_SAFE_URL_LENGTH - overhead);
81
82
  const maxItems = (0, supabaseClient_2.computeMaxIdsPerChunk)(selectFields);
82
83
  const filterChunks = (0, supabaseClient_2.expandChunkedFilters)(filters, maxInChars, maxItems);
83
- const allRows = [];
84
- if (returnAll) {
85
- const batchSize = 1000;
86
- const hasIdColumn = selectFields === '*' || selectFields.split(',').some((f) => f.trim() === 'id');
87
- for (const chunkFilters of filterChunks) {
84
+ const hasIdColumn = selectFields === '*' || selectFields.split(',').some((f) => f.trim() === 'id');
85
+ let totalYielded = 0;
86
+ const maxRows = returnAll ? Infinity : limit;
87
+ const startTime = Date.now();
88
+ console.log(`[Supabase CSV] starting export table=${table} returnAll=${returnAll} chunks=${filterChunks.length} keyset=${hasIdColumn}`);
89
+ for (let ci = 0; ci < filterChunks.length; ci++) {
90
+ const chunkFilters = filterChunks[ci];
91
+ if (totalYielded >= maxRows)
92
+ break;
93
+ if (returnAll) {
88
94
  let hasMore = true;
95
+ let batchNum = 0;
89
96
  if (hasIdColumn) {
90
97
  let lastId = null;
91
98
  while (hasMore) {
92
99
  let query = buildSelectQuery(supabase, table, selectFields, chunkFilters, []);
93
- if (lastId !== null) {
100
+ if (lastId !== null)
94
101
  query = query.gt('id', lastId);
95
- }
96
- query = query.order('id', { ascending: true }).limit(batchSize);
102
+ query = query.order('id', { ascending: true }).limit(BATCH_SIZE);
97
103
  const { data, error } = await query;
98
104
  if (error)
99
105
  throw new Error((0, supabaseClient_2.formatSupabaseError)(error));
100
106
  if (Array.isArray(data) && data.length > 0) {
101
- for (const row of data)
102
- allRows.push(row);
107
+ yield data;
108
+ totalYielded += data.length;
103
109
  lastId = data[data.length - 1].id;
104
- hasMore = data.length === batchSize;
110
+ hasMore = data.length === BATCH_SIZE;
105
111
  }
106
112
  else {
107
113
  hasMore = false;
108
114
  }
115
+ batchNum++;
116
+ if (batchNum % 50 === 0) {
117
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
118
+ console.log(`[Supabase CSV] chunk ${ci + 1}/${filterChunks.length} batch ${batchNum} — ${totalYielded} rows fetched (${elapsed}s)`);
119
+ }
109
120
  }
110
121
  }
111
122
  else {
112
123
  let offset = 0;
113
124
  while (hasMore) {
114
125
  const query = buildSelectQuery(supabase, table, selectFields, chunkFilters, sort);
115
- const { data, error } = await query.range(offset, offset + batchSize - 1);
126
+ const { data, error } = await query.range(offset, offset + BATCH_SIZE - 1);
116
127
  if (error)
117
128
  throw new Error((0, supabaseClient_2.formatSupabaseError)(error));
118
129
  if (Array.isArray(data) && data.length > 0) {
119
- for (const row of data)
120
- allRows.push(row);
121
- hasMore = data.length === batchSize;
130
+ yield data;
131
+ totalYielded += data.length;
132
+ hasMore = data.length === BATCH_SIZE;
122
133
  }
123
134
  else {
124
135
  hasMore = false;
125
136
  }
126
- offset += batchSize;
137
+ offset += BATCH_SIZE;
138
+ batchNum++;
139
+ if (batchNum % 50 === 0) {
140
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
141
+ console.log(`[Supabase CSV] chunk ${ci + 1}/${filterChunks.length} batch ${batchNum} (offset) — ${totalYielded} rows fetched (${elapsed}s)`);
142
+ }
127
143
  }
128
144
  }
129
145
  }
130
- if (hasIdColumn && sort.length > 0) {
131
- allRows.sort((a, b) => {
132
- var _a, _b;
133
- for (const s of sort) {
134
- const aVal = ((_a = a[s.column]) !== null && _a !== void 0 ? _a : null);
135
- const bVal = ((_b = b[s.column]) !== null && _b !== void 0 ? _b : null);
136
- if (aVal === bVal)
137
- continue;
138
- if (aVal === null)
139
- return 1;
140
- if (bVal === null)
141
- return -1;
142
- if (aVal < bVal)
143
- return s.ascending ? -1 : 1;
144
- if (aVal > bVal)
145
- return s.ascending ? 1 : -1;
146
- }
147
- return 0;
148
- });
149
- }
150
- }
151
- else {
152
- for (const chunkFilters of filterChunks) {
146
+ else {
147
+ const remaining = maxRows - totalYielded;
148
+ if (remaining <= 0)
149
+ break;
153
150
  const query = buildSelectQuery(supabase, table, selectFields, chunkFilters, sort);
154
- const { data, error } = await query.limit(limit);
151
+ const { data, error } = await query.limit(remaining);
155
152
  if (error)
156
153
  throw new Error((0, supabaseClient_2.formatSupabaseError)(error));
157
- if (Array.isArray(data)) {
158
- for (const row of data)
159
- allRows.push(row);
154
+ if (Array.isArray(data) && data.length > 0) {
155
+ yield data;
156
+ totalYielded += data.length;
160
157
  }
161
158
  }
162
- if (allRows.length > limit) {
163
- allRows.length = limit;
164
- }
165
159
  }
166
- return allRows;
160
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
161
+ console.log(`[Supabase CSV] fetch complete — ${totalYielded} rows in ${elapsed}s`);
167
162
  }
168
163
  class SupabaseCsvExport {
169
164
  constructor() {
@@ -366,7 +361,9 @@ class SupabaseCsvExport {
366
361
  name: 'enableTransform',
367
362
  type: 'boolean',
368
363
  default: false,
369
- description: 'Whether to apply a JavaScript transform before generating the CSV',
364
+ description: 'Whether to apply a JavaScript transform before generating the CSV. ' +
365
+ 'The transform runs per batch (~1000 rows) so it stays memory-efficient ' +
366
+ 'even for very large exports. Use .filter() and .map() to shape your data.',
370
367
  },
371
368
  {
372
369
  displayName: 'Transform Parameters',
@@ -564,6 +561,15 @@ class SupabaseCsvExport {
564
561
  const limit = returnAll ? 0 : this.getNodeParameter('limit', 0, 100);
565
562
  const filters = parseFilters(this, 0);
566
563
  const sort = this.getNodeParameter('sort.sortField', 0, []);
564
+ const enableTransform = this.getNodeParameter('enableTransform', 0, false);
565
+ const idColumn = this.getNodeParameter('idColumn', 0, 'id');
566
+ const csvOpts = this.getNodeParameter('csvOptions', 0, {});
567
+ const csvOptions = {
568
+ delimiter: csvOpts.delimiter || ',',
569
+ quoteChar: csvOpts.quoteChar || '"',
570
+ includeHeaders: csvOpts.includeHeaders !== false,
571
+ fileName: csvOpts.fileName || 'export.csv',
572
+ };
567
573
  const joins = this.getNodeParameter('joins.join', 0, []);
568
574
  let selectWithJoins = returnFields;
569
575
  for (const j of joins) {
@@ -573,66 +579,95 @@ class SupabaseCsvExport {
573
579
  const hint = j.joinType === 'inner' ? `${j.table}!inner` : j.table;
574
580
  selectWithJoins += `,${hint}(${cols})`;
575
581
  }
576
- let rows;
582
+ const { delimiter, quoteChar } = csvOptions;
583
+ const csvChunks = [];
584
+ const ids = [];
585
+ let rowCount = 0;
586
+ let headers = null;
577
587
  try {
578
- rows = await fetchAllRows(supabase, table, selectWithJoins, filters, sort, credentials.host, returnAll, limit);
579
- }
580
- catch (error) {
581
- const msg = error instanceof Error ? error.message : 'Unknown error';
582
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Failed to fetch data: ${msg}`);
583
- }
584
- const enableTransform = this.getNodeParameter('enableTransform', 0, false);
585
- if (enableTransform) {
586
- const paramEntries = this.getNodeParameter('transformParams.param', 0, []);
587
- const params = {};
588
- for (const entry of paramEntries) {
589
- if (entry.name) {
590
- params[entry.name] = entry.value;
588
+ let transformFn = null;
589
+ let params = {};
590
+ if (enableTransform) {
591
+ const paramEntries = this.getNodeParameter('transformParams.param', 0, []);
592
+ for (const entry of paramEntries) {
593
+ if (entry.name)
594
+ params[entry.name] = entry.value;
595
+ }
596
+ const code = this.getNodeParameter('transformCode', 0, 'return rows;');
597
+ try {
598
+ transformFn = new Function('rows', 'params', code);
599
+ }
600
+ catch (error) {
601
+ const msg = error instanceof Error ? error.message : 'Unknown error';
602
+ throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Transform code syntax error: ${msg}`);
591
603
  }
592
604
  }
593
- const code = this.getNodeParameter('transformCode', 0, 'return rows;');
594
- try {
595
- const transformFn = new Function('rows', 'params', code);
596
- const result = transformFn(rows, params);
597
- if (!Array.isArray(result)) {
598
- throw new Error('Transform code must return an array. Got: ' + typeof result);
605
+ for await (const batch of fetchBatches(supabase, table, selectWithJoins, filters, sort, credentials.host, returnAll, limit)) {
606
+ let rows = batch;
607
+ if (transformFn) {
608
+ try {
609
+ const result = transformFn(batch, params);
610
+ if (!Array.isArray(result)) {
611
+ throw new Error('Transform code must return an array. Got: ' + typeof result);
612
+ }
613
+ rows = result;
614
+ }
615
+ catch (error) {
616
+ if (error instanceof n8n_workflow_1.NodeOperationError)
617
+ throw error;
618
+ const msg = error instanceof Error ? error.message : 'Unknown error';
619
+ throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Transform code error: ${msg}`);
620
+ }
599
621
  }
600
- rows = result;
622
+ if (rows.length === 0)
623
+ continue;
624
+ if (headers === null) {
625
+ headers = discoverHeaders(rows);
626
+ if (csvOptions.includeHeaders) {
627
+ const headerLine = headers
628
+ .map((h) => escapeCsvField(h, delimiter, quoteChar))
629
+ .join(delimiter);
630
+ csvChunks.push(Buffer.from(headerLine + '\n', 'utf-8'));
631
+ }
632
+ }
633
+ for (const row of rows) {
634
+ if (row[idColumn] != null)
635
+ ids.push(row[idColumn]);
636
+ }
637
+ const buf = batchToCsvBuffer(rows, headers, delimiter, quoteChar);
638
+ csvChunks.push(buf);
639
+ csvChunks.push(Buffer.from('\n', 'utf-8'));
640
+ rowCount += rows.length;
601
641
  }
602
- catch (error) {
603
- const msg = error instanceof Error ? error.message : 'Unknown error';
604
- throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Transform code error: ${msg}`);
642
+ if (csvChunks.length > 0) {
643
+ const last = csvChunks[csvChunks.length - 1];
644
+ if (last.length === 1 && last[0] === 0x0a) {
645
+ csvChunks.pop();
646
+ }
605
647
  }
606
648
  }
607
- const idColumn = this.getNodeParameter('idColumn', 0, 'id');
608
- const ids = [];
609
- for (const row of rows) {
610
- if (row[idColumn] !== undefined && row[idColumn] !== null) {
611
- ids.push(row[idColumn]);
612
- }
649
+ catch (error) {
650
+ const msg = error instanceof Error ? error.message : 'Unknown error';
651
+ if (error instanceof n8n_workflow_1.NodeOperationError)
652
+ throw error;
653
+ throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Export failed: ${msg}`);
613
654
  }
614
- const csvOpts = this.getNodeParameter('csvOptions', 0, {});
615
- const csvOptions = {
616
- delimiter: csvOpts.delimiter || ',',
617
- quoteChar: csvOpts.quoteChar || '"',
618
- includeHeaders: csvOpts.includeHeaders !== false,
619
- fileName: csvOpts.fileName || 'export.csv',
620
- };
621
- const csvContent = generateCsv(rows, csvOptions);
622
- const csvBuffer = Buffer.from(csvContent, 'utf-8');
655
+ const csvBuffer = Buffer.concat(csvChunks);
656
+ csvChunks.length = 0;
623
657
  const binaryData = await this.helpers.prepareBinaryData(csvBuffer, csvOptions.fileName, 'text/csv');
624
- const returnItem = {
625
- json: {
626
- table,
627
- rowCount: rows.length,
628
- ids,
629
- fileName: csvOptions.fileName,
630
- },
631
- binary: {
632
- data: binaryData,
633
- },
634
- };
635
- return [[returnItem]];
658
+ return [[
659
+ {
660
+ json: {
661
+ table,
662
+ rowCount,
663
+ ids,
664
+ fileName: csvOptions.fileName,
665
+ },
666
+ binary: {
667
+ data: binaryData,
668
+ },
669
+ },
670
+ ]];
636
671
  }
637
672
  }
638
673
  exports.SupabaseCsvExport = SupabaseCsvExport;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fachkraftfreund/n8n-nodes-supabase",
3
- "version": "1.3.0",
3
+ "version": "1.3.2",
4
4
  "description": "Comprehensive n8n community node for Supabase with database and storage operations",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",