@fachkraftfreund/n8n-nodes-supabase 1.2.23 → 1.2.25
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
|
@@ -49,6 +49,72 @@ async function executeDatabaseOperation(supabase, operation, itemIndex, hostUrl)
|
|
|
49
49
|
return returnData;
|
|
50
50
|
}
|
|
51
51
|
exports.executeDatabaseOperation = executeDatabaseOperation;
|
|
52
|
+
const BULK_BATCH_SIZE = 500;
|
|
53
|
+
const MAX_RETRIES = 3;
|
|
54
|
+
const RETRY_BASE_DELAY_MS = 1000;
|
|
55
|
+
function isRetryableError(msg) {
|
|
56
|
+
const lower = msg.toLowerCase();
|
|
57
|
+
return (lower.includes('lock timeout') ||
|
|
58
|
+
lower.includes('canceling statement due to lock') ||
|
|
59
|
+
lower.includes('deadlock') ||
|
|
60
|
+
lower.includes('too many connections') ||
|
|
61
|
+
lower.includes('rate limit') ||
|
|
62
|
+
lower.includes('could not serialize access') ||
|
|
63
|
+
lower.includes('connection terminated') ||
|
|
64
|
+
lower.includes('connection reset') ||
|
|
65
|
+
lower.includes('econnreset') ||
|
|
66
|
+
lower.includes('timeout'));
|
|
67
|
+
}
|
|
68
|
+
async function withRetry(fn, label) {
|
|
69
|
+
var _a;
|
|
70
|
+
let lastError;
|
|
71
|
+
for (let attempt = 0; attempt <= MAX_RETRIES; attempt++) {
|
|
72
|
+
try {
|
|
73
|
+
return await fn();
|
|
74
|
+
}
|
|
75
|
+
catch (err) {
|
|
76
|
+
const msg = (_a = err === null || err === void 0 ? void 0 : err.message) !== null && _a !== void 0 ? _a : String(err);
|
|
77
|
+
if (attempt < MAX_RETRIES && isRetryableError(msg)) {
|
|
78
|
+
const delay = RETRY_BASE_DELAY_MS * Math.pow(2, attempt);
|
|
79
|
+
console.log(`[Supabase ${label}] transient error (attempt ${attempt + 1}/${MAX_RETRIES + 1}), retrying in ${delay}ms: ${msg}`);
|
|
80
|
+
await new Promise((r) => setTimeout(r, delay));
|
|
81
|
+
lastError = err instanceof Error ? err : new Error(msg);
|
|
82
|
+
}
|
|
83
|
+
else {
|
|
84
|
+
throw err;
|
|
85
|
+
}
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
throw lastError;
|
|
89
|
+
}
|
|
90
|
+
function sanitizeString(value) {
|
|
91
|
+
return value
|
|
92
|
+
.replace(/\x00/g, '')
|
|
93
|
+
.replace(/\\u[0-9a-fA-F]{4}/g, '')
|
|
94
|
+
.replace(/\\/g, '');
|
|
95
|
+
}
|
|
96
|
+
function sanitizeRow(obj) {
|
|
97
|
+
for (const key of Object.keys(obj)) {
|
|
98
|
+
const val = obj[key];
|
|
99
|
+
if (typeof val === 'string') {
|
|
100
|
+
obj[key] = sanitizeString(val);
|
|
101
|
+
}
|
|
102
|
+
else if (Array.isArray(val)) {
|
|
103
|
+
for (let i = 0; i < val.length; i++) {
|
|
104
|
+
if (typeof val[i] === 'string') {
|
|
105
|
+
val[i] = sanitizeString(val[i]);
|
|
106
|
+
}
|
|
107
|
+
else if (val[i] && typeof val[i] === 'object') {
|
|
108
|
+
sanitizeRow(val[i]);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
else if (val && typeof val === 'object') {
|
|
113
|
+
sanitizeRow(val);
|
|
114
|
+
}
|
|
115
|
+
}
|
|
116
|
+
return obj;
|
|
117
|
+
}
|
|
52
118
|
function collectRowData(context, itemCount) {
|
|
53
119
|
const rows = [];
|
|
54
120
|
for (let i = 0; i < itemCount; i++) {
|
|
@@ -72,7 +138,7 @@ function collectRowData(context, itemCount) {
|
|
|
72
138
|
}
|
|
73
139
|
}
|
|
74
140
|
}
|
|
75
|
-
rows.push(row);
|
|
141
|
+
rows.push(sanitizeRow(row));
|
|
76
142
|
}
|
|
77
143
|
return rows;
|
|
78
144
|
}
|
|
@@ -98,16 +164,26 @@ async function handleBulkCreate(supabase, itemCount) {
|
|
|
98
164
|
const table = this.getNodeParameter('table', 0);
|
|
99
165
|
(0, supabaseClient_1.validateTableName)(table);
|
|
100
166
|
const rows = collectRowData(this, itemCount);
|
|
101
|
-
const
|
|
102
|
-
|
|
103
|
-
.
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
167
|
+
const returnData = [];
|
|
168
|
+
for (let offset = 0; offset < rows.length; offset += BULK_BATCH_SIZE) {
|
|
169
|
+
const batch = rows.slice(offset, offset + BULK_BATCH_SIZE);
|
|
170
|
+
const data = await withRetry(async () => {
|
|
171
|
+
const { data, error } = await supabase
|
|
172
|
+
.from(table)
|
|
173
|
+
.insert(batch)
|
|
174
|
+
.select();
|
|
175
|
+
if (error)
|
|
176
|
+
throw new Error((0, supabaseClient_1.formatSupabaseError)(error));
|
|
177
|
+
return data;
|
|
178
|
+
}, `CREATE ${table} batch ${Math.floor(offset / BULK_BATCH_SIZE) + 1}`);
|
|
179
|
+
if (Array.isArray(data)) {
|
|
180
|
+
for (const row of data)
|
|
181
|
+
returnData.push({ json: row });
|
|
182
|
+
}
|
|
109
183
|
}
|
|
110
|
-
return
|
|
184
|
+
return returnData.length > 0
|
|
185
|
+
? returnData
|
|
186
|
+
: [{ json: { data: [], operation: 'create', table } }];
|
|
111
187
|
}
|
|
112
188
|
async function handleBulkUpsert(supabase, itemCount) {
|
|
113
189
|
const table = this.getNodeParameter('table', 0);
|
|
@@ -117,16 +193,26 @@ async function handleBulkUpsert(supabase, itemCount) {
|
|
|
117
193
|
const options = {};
|
|
118
194
|
if (onConflict)
|
|
119
195
|
options.onConflict = onConflict;
|
|
120
|
-
const
|
|
121
|
-
|
|
122
|
-
.
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
196
|
+
const returnData = [];
|
|
197
|
+
for (let offset = 0; offset < rows.length; offset += BULK_BATCH_SIZE) {
|
|
198
|
+
const batch = rows.slice(offset, offset + BULK_BATCH_SIZE);
|
|
199
|
+
const data = await withRetry(async () => {
|
|
200
|
+
const { data, error } = await supabase
|
|
201
|
+
.from(table)
|
|
202
|
+
.upsert(batch, options)
|
|
203
|
+
.select();
|
|
204
|
+
if (error)
|
|
205
|
+
throw new Error((0, supabaseClient_1.formatSupabaseError)(error));
|
|
206
|
+
return data;
|
|
207
|
+
}, `UPSERT ${table} batch ${Math.floor(offset / BULK_BATCH_SIZE) + 1}`);
|
|
208
|
+
if (Array.isArray(data)) {
|
|
209
|
+
for (const row of data)
|
|
210
|
+
returnData.push({ json: row });
|
|
211
|
+
}
|
|
128
212
|
}
|
|
129
|
-
return
|
|
213
|
+
return returnData.length > 0
|
|
214
|
+
? returnData
|
|
215
|
+
: [{ json: { data: [], operation: 'upsert', table } }];
|
|
130
216
|
}
|
|
131
217
|
async function handleBulkUpdate(supabase, itemCount) {
|
|
132
218
|
const table = this.getNodeParameter('table', 0);
|
|
@@ -142,16 +228,26 @@ async function handleBulkUpdate(supabase, itemCount) {
|
|
|
142
228
|
throw new Error(`Item ${i} is missing the match column "${matchColumn}"`);
|
|
143
229
|
}
|
|
144
230
|
}
|
|
145
|
-
const
|
|
146
|
-
|
|
147
|
-
.
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
231
|
+
const returnData = [];
|
|
232
|
+
for (let offset = 0; offset < rows.length; offset += BULK_BATCH_SIZE) {
|
|
233
|
+
const batch = rows.slice(offset, offset + BULK_BATCH_SIZE);
|
|
234
|
+
const data = await withRetry(async () => {
|
|
235
|
+
const { data, error } = await supabase
|
|
236
|
+
.from(table)
|
|
237
|
+
.upsert(batch, { onConflict: matchColumn })
|
|
238
|
+
.select();
|
|
239
|
+
if (error)
|
|
240
|
+
throw new Error((0, supabaseClient_1.formatSupabaseError)(error));
|
|
241
|
+
return data;
|
|
242
|
+
}, `UPDATE ${table} batch ${Math.floor(offset / BULK_BATCH_SIZE) + 1}`);
|
|
243
|
+
if (Array.isArray(data)) {
|
|
244
|
+
for (const row of data)
|
|
245
|
+
returnData.push({ json: row });
|
|
246
|
+
}
|
|
153
247
|
}
|
|
154
|
-
return
|
|
248
|
+
return returnData.length > 0
|
|
249
|
+
? returnData
|
|
250
|
+
: [{ json: { data: [], operation: 'update', table } }];
|
|
155
251
|
}
|
|
156
252
|
function getFilters(context, itemIndex) {
|
|
157
253
|
const uiMode = context.getNodeParameter('uiMode', itemIndex, 'simple');
|
package/package.json
CHANGED