@fachkraftfreund/n8n-nodes-supabase 1.3.1 → 1.3.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,6 +2,10 @@
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.SupabaseCsvExport = void 0;
4
4
  const n8n_workflow_1 = require("n8n-workflow");
5
+ const fs_1 = require("fs");
6
+ const os_1 = require("os");
7
+ const path_1 = require("path");
8
+ const crypto_1 = require("crypto");
5
9
  const supabaseClient_1 = require("./utils/supabaseClient");
6
10
  const supabaseClient_2 = require("./utils/supabaseClient");
7
11
  function escapeRegExp(s) {
@@ -28,7 +32,7 @@ function discoverHeaders(rows) {
28
32
  }
29
33
  return [...set];
30
34
  }
31
- function batchToCsvBuffer(rows, headers, delimiter, quoteChar) {
35
+ function batchToCsvLines(rows, headers, delimiter, quoteChar) {
32
36
  const lines = new Array(rows.length);
33
37
  for (let i = 0; i < rows.length; i++) {
34
38
  const row = rows[i];
@@ -36,7 +40,19 @@ function batchToCsvBuffer(rows, headers, delimiter, quoteChar) {
36
40
  .map((h) => escapeCsvField(row[h], delimiter, quoteChar))
37
41
  .join(delimiter);
38
42
  }
39
- return Buffer.from(lines.join('\n'), 'utf-8');
43
+ return lines.join('\n') + '\n';
44
+ }
45
+ function streamWrite(stream, data) {
46
+ return new Promise((resolve, reject) => {
47
+ const ok = stream.write(data, 'utf-8');
48
+ if (ok) {
49
+ resolve();
50
+ }
51
+ else {
52
+ stream.once('drain', resolve);
53
+ stream.once('error', reject);
54
+ }
55
+ });
40
56
  }
41
57
  function buildSelectQuery(supabase, table, selectFields, filters, sort) {
42
58
  let query = supabase.from(table).select(selectFields);
@@ -76,21 +92,24 @@ function parseFilters(context, itemIndex) {
76
92
  }
77
93
  }
78
94
  const BATCH_SIZE = 1000;
79
- async function* fetchBatches(supabase, table, selectFields, filters, sort, hostUrl, returnAll, limit, preferOffset) {
95
+ async function* fetchBatches(supabase, table, selectFields, filters, sort, hostUrl, returnAll, limit) {
80
96
  const overhead = (0, supabaseClient_2.estimateUrlOverhead)(hostUrl, table, selectFields, filters, sort);
81
97
  const maxInChars = Math.max(500, supabaseClient_2.MAX_SAFE_URL_LENGTH - overhead);
82
98
  const maxItems = (0, supabaseClient_2.computeMaxIdsPerChunk)(selectFields);
83
99
  const filterChunks = (0, supabaseClient_2.expandChunkedFilters)(filters, maxInChars, maxItems);
100
+ const hasIdColumn = selectFields === '*' || selectFields.split(',').some((f) => f.trim() === 'id');
84
101
  let totalYielded = 0;
85
102
  const maxRows = returnAll ? Infinity : limit;
86
- for (const chunkFilters of filterChunks) {
103
+ const startTime = Date.now();
104
+ console.log(`[Supabase CSV] starting export table=${table} returnAll=${returnAll} chunks=${filterChunks.length} keyset=${hasIdColumn}`);
105
+ for (let ci = 0; ci < filterChunks.length; ci++) {
106
+ const chunkFilters = filterChunks[ci];
87
107
  if (totalYielded >= maxRows)
88
108
  break;
89
109
  if (returnAll) {
90
- const useKeyset = !preferOffset &&
91
- (selectFields === '*' || selectFields.split(',').some((f) => f.trim() === 'id'));
92
110
  let hasMore = true;
93
- if (useKeyset) {
111
+ let batchNum = 0;
112
+ if (hasIdColumn) {
94
113
  let lastId = null;
95
114
  while (hasMore) {
96
115
  let query = buildSelectQuery(supabase, table, selectFields, chunkFilters, []);
@@ -109,6 +128,11 @@ async function* fetchBatches(supabase, table, selectFields, filters, sort, hostU
109
128
  else {
110
129
  hasMore = false;
111
130
  }
131
+ batchNum++;
132
+ if (batchNum % 50 === 0) {
133
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
134
+ console.log(`[Supabase CSV] chunk ${ci + 1}/${filterChunks.length} batch ${batchNum} — ${totalYielded} rows fetched (${elapsed}s)`);
135
+ }
112
136
  }
113
137
  }
114
138
  else {
@@ -127,6 +151,11 @@ async function* fetchBatches(supabase, table, selectFields, filters, sort, hostU
127
151
  hasMore = false;
128
152
  }
129
153
  offset += BATCH_SIZE;
154
+ batchNum++;
155
+ if (batchNum % 50 === 0) {
156
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
157
+ console.log(`[Supabase CSV] chunk ${ci + 1}/${filterChunks.length} batch ${batchNum} (offset) — ${totalYielded} rows fetched (${elapsed}s)`);
158
+ }
130
159
  }
131
160
  }
132
161
  }
@@ -144,6 +173,8 @@ async function* fetchBatches(supabase, table, selectFields, filters, sort, hostU
144
173
  }
145
174
  }
146
175
  }
176
+ const elapsed = ((Date.now() - startTime) / 1000).toFixed(1);
177
+ console.log(`[Supabase CSV] fetch complete — ${totalYielded} rows in ${elapsed}s`);
147
178
  }
148
179
  class SupabaseCsvExport {
149
180
  constructor() {
@@ -565,7 +596,8 @@ class SupabaseCsvExport {
565
596
  selectWithJoins += `,${hint}(${cols})`;
566
597
  }
567
598
  const { delimiter, quoteChar } = csvOptions;
568
- const csvChunks = [];
599
+ const tmpPath = (0, path_1.join)((0, os_1.tmpdir)(), `n8n-csv-${(0, crypto_1.randomBytes)(8).toString('hex')}.csv`);
600
+ const fileStream = (0, fs_1.createWriteStream)(tmpPath, { encoding: 'utf-8' });
569
601
  const ids = [];
570
602
  let rowCount = 0;
571
603
  let headers = null;
@@ -587,8 +619,7 @@ class SupabaseCsvExport {
587
619
  throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Transform code syntax error: ${msg}`);
588
620
  }
589
621
  }
590
- const preferOffset = sort.length > 0;
591
- for await (const batch of fetchBatches(supabase, table, selectWithJoins, filters, sort, credentials.host, returnAll, limit, preferOffset)) {
622
+ for await (const batch of fetchBatches(supabase, table, selectWithJoins, filters, sort, credentials.host, returnAll, limit)) {
592
623
  let rows = batch;
593
624
  if (transformFn) {
594
625
  try {
@@ -612,25 +643,37 @@ class SupabaseCsvExport {
612
643
  if (csvOptions.includeHeaders) {
613
644
  const headerLine = headers
614
645
  .map((h) => escapeCsvField(h, delimiter, quoteChar))
615
- .join(delimiter);
616
- csvChunks.push(Buffer.from(headerLine + '\n', 'utf-8'));
646
+ .join(delimiter) + '\n';
647
+ await streamWrite(fileStream, headerLine);
617
648
  }
618
649
  }
619
650
  for (const row of rows) {
620
651
  if (row[idColumn] != null)
621
652
  ids.push(row[idColumn]);
622
653
  }
623
- const buf = batchToCsvBuffer(rows, headers, delimiter, quoteChar);
624
- csvChunks.push(buf);
625
- csvChunks.push(Buffer.from('\n', 'utf-8'));
654
+ await streamWrite(fileStream, batchToCsvLines(rows, headers, delimiter, quoteChar));
626
655
  rowCount += rows.length;
627
656
  }
628
- if (csvChunks.length > 0) {
629
- const last = csvChunks[csvChunks.length - 1];
630
- if (last.length === 1 && last[0] === 0x0a) {
631
- csvChunks.pop();
632
- }
633
- }
657
+ await new Promise((resolve, reject) => {
658
+ fileStream.end(() => resolve());
659
+ fileStream.on('error', reject);
660
+ });
661
+ console.log(`[Supabase CSV] wrote ${rowCount} rows to temp file, passing to n8n binary storage`);
662
+ const readStream = (0, fs_1.createReadStream)(tmpPath);
663
+ const binaryData = await this.helpers.prepareBinaryData(readStream, csvOptions.fileName, 'text/csv');
664
+ return [[
665
+ {
666
+ json: {
667
+ table,
668
+ rowCount,
669
+ ids,
670
+ fileName: csvOptions.fileName,
671
+ },
672
+ binary: {
673
+ data: binaryData,
674
+ },
675
+ },
676
+ ]];
634
677
  }
635
678
  catch (error) {
636
679
  const msg = error instanceof Error ? error.message : 'Unknown error';
@@ -638,22 +681,16 @@ class SupabaseCsvExport {
638
681
  throw error;
639
682
  throw new n8n_workflow_1.NodeOperationError(this.getNode(), `Export failed: ${msg}`);
640
683
  }
641
- const csvBuffer = Buffer.concat(csvChunks);
642
- csvChunks.length = 0;
643
- const binaryData = await this.helpers.prepareBinaryData(csvBuffer, csvOptions.fileName, 'text/csv');
644
- return [[
645
- {
646
- json: {
647
- table,
648
- rowCount,
649
- ids,
650
- fileName: csvOptions.fileName,
651
- },
652
- binary: {
653
- data: binaryData,
654
- },
655
- },
656
- ]];
684
+ finally {
685
+ try {
686
+ fileStream.destroy();
687
+ }
688
+ catch { }
689
+ try {
690
+ (0, fs_1.unlinkSync)(tmpPath);
691
+ }
692
+ catch { }
693
+ }
657
694
  }
658
695
  }
659
696
  exports.SupabaseCsvExport = SupabaseCsvExport;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@fachkraftfreund/n8n-nodes-supabase",
3
- "version": "1.3.1",
3
+ "version": "1.3.3",
4
4
  "description": "Comprehensive n8n community node for Supabase with database and storage operations",
5
5
  "keywords": [
6
6
  "n8n-community-node-package",