@openfn/language-motherduck 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +674 -0
- package/LICENSE.LESSER +165 -0
- package/README.md +96 -0
- package/ast.json +1111 -0
- package/configuration-schema.json +27 -0
- package/dist/index.cjs +537 -0
- package/dist/index.js +512 -0
- package/package.json +49 -0
- package/types/Adaptor.d.ts +61 -0
- package/types/index.d.ts +4 -0
- package/types/mock.d.ts +17 -0
- package/types/util.d.ts +45 -0
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
3
|
+
"properties": {
|
|
4
|
+
"token": {
|
|
5
|
+
"title": "MotherDuck Token",
|
|
6
|
+
"type": "string",
|
|
7
|
+
"description": "MotherDuck authentication token",
|
|
8
|
+
"writeOnly": true,
|
|
9
|
+
"examples": ["token_abc123..."]
|
|
10
|
+
},
|
|
11
|
+
"database": {
|
|
12
|
+
"title": "Database Name",
|
|
13
|
+
"type": "string",
|
|
14
|
+
"description": "MotherDuck database name",
|
|
15
|
+
"examples": ["my_database", "analytics_warehouse", "production_db"]
|
|
16
|
+
},
|
|
17
|
+
"sessionHint": {
|
|
18
|
+
"title": "Session Hint",
|
|
19
|
+
"type": "string",
|
|
20
|
+
"description": "Optional session hint for MotherDuck read scaling",
|
|
21
|
+
"examples": ["primary", "readonly", "analytics"]
|
|
22
|
+
}
|
|
23
|
+
},
|
|
24
|
+
"type": "object",
|
|
25
|
+
"additionalProperties": true,
|
|
26
|
+
"required": ["token"]
|
|
27
|
+
}
|
package/dist/index.cjs
ADDED
|
@@ -0,0 +1,537 @@
|
|
|
1
|
+
var __create = Object.create;
|
|
2
|
+
var __defProp = Object.defineProperty;
|
|
3
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
4
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
5
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
6
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
7
|
+
var __export = (target, all) => {
|
|
8
|
+
for (var name in all)
|
|
9
|
+
__defProp(target, name, { get: all[name], enumerable: true });
|
|
10
|
+
};
|
|
11
|
+
var __copyProps = (to, from, except, desc) => {
|
|
12
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
13
|
+
for (let key of __getOwnPropNames(from))
|
|
14
|
+
if (!__hasOwnProp.call(to, key) && key !== except)
|
|
15
|
+
__defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
|
|
16
|
+
}
|
|
17
|
+
return to;
|
|
18
|
+
};
|
|
19
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
|
|
20
|
+
isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target,
|
|
21
|
+
mod
|
|
22
|
+
));
|
|
23
|
+
var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
|
|
24
|
+
|
|
25
|
+
// src/index.js
|
|
26
|
+
var src_exports = {};
|
|
27
|
+
__export(src_exports, {
|
|
28
|
+
alterState: () => import_language_common2.alterState,
|
|
29
|
+
arrayToString: () => import_language_common2.arrayToString,
|
|
30
|
+
as: () => import_language_common2.as,
|
|
31
|
+
combine: () => import_language_common2.combine,
|
|
32
|
+
cursor: () => import_language_common2.cursor,
|
|
33
|
+
dataPath: () => import_language_common2.dataPath,
|
|
34
|
+
dataValue: () => import_language_common2.dataValue,
|
|
35
|
+
dateFns: () => import_language_common2.dateFns,
|
|
36
|
+
default: () => src_default,
|
|
37
|
+
each: () => import_language_common2.each,
|
|
38
|
+
execute: () => execute,
|
|
39
|
+
field: () => import_language_common2.field,
|
|
40
|
+
fields: () => import_language_common2.fields,
|
|
41
|
+
fn: () => import_language_common2.fn,
|
|
42
|
+
fnIf: () => import_language_common2.fnIf,
|
|
43
|
+
group: () => import_language_common2.group,
|
|
44
|
+
insert: () => insert,
|
|
45
|
+
lastReferenceValue: () => import_language_common2.lastReferenceValue,
|
|
46
|
+
map: () => import_language_common2.map,
|
|
47
|
+
merge: () => import_language_common2.merge,
|
|
48
|
+
query: () => query,
|
|
49
|
+
sourceValue: () => import_language_common2.sourceValue,
|
|
50
|
+
util: () => util_exports
|
|
51
|
+
});
|
|
52
|
+
module.exports = __toCommonJS(src_exports);
|
|
53
|
+
|
|
54
|
+
// src/Adaptor.js
|
|
55
|
+
var Adaptor_exports = {};
|
|
56
|
+
__export(Adaptor_exports, {
|
|
57
|
+
alterState: () => import_language_common2.alterState,
|
|
58
|
+
arrayToString: () => import_language_common2.arrayToString,
|
|
59
|
+
as: () => import_language_common2.as,
|
|
60
|
+
combine: () => import_language_common2.combine,
|
|
61
|
+
cursor: () => import_language_common2.cursor,
|
|
62
|
+
dataPath: () => import_language_common2.dataPath,
|
|
63
|
+
dataValue: () => import_language_common2.dataValue,
|
|
64
|
+
dateFns: () => import_language_common2.dateFns,
|
|
65
|
+
each: () => import_language_common2.each,
|
|
66
|
+
execute: () => execute,
|
|
67
|
+
field: () => import_language_common2.field,
|
|
68
|
+
fields: () => import_language_common2.fields,
|
|
69
|
+
fn: () => import_language_common2.fn,
|
|
70
|
+
fnIf: () => import_language_common2.fnIf,
|
|
71
|
+
group: () => import_language_common2.group,
|
|
72
|
+
insert: () => insert,
|
|
73
|
+
lastReferenceValue: () => import_language_common2.lastReferenceValue,
|
|
74
|
+
map: () => import_language_common2.map,
|
|
75
|
+
merge: () => import_language_common2.merge,
|
|
76
|
+
query: () => query,
|
|
77
|
+
sourceValue: () => import_language_common2.sourceValue,
|
|
78
|
+
util: () => util_exports
|
|
79
|
+
});
|
|
80
|
+
var import_language_common = require("@openfn/language-common");
|
|
81
|
+
var import_util = require("@openfn/language-common/util");
|
|
82
|
+
var import_node_api = require("@duckdb/node-api");
|
|
83
|
+
var import_lodash = __toESM(require("lodash"), 1);
|
|
84
|
+
|
|
85
|
+
// src/util.js
|
|
86
|
+
var util_exports = {};
|
|
87
|
+
__export(util_exports, {
|
|
88
|
+
convertBigIntToNumber: () => convertBigIntToNumber,
|
|
89
|
+
escapeSqlString: () => escapeSqlString,
|
|
90
|
+
formatSqlValue: () => formatSqlValue,
|
|
91
|
+
queryHandler: () => queryHandler,
|
|
92
|
+
validateSqlIdentifier: () => validateSqlIdentifier
|
|
93
|
+
});
|
|
94
|
+
function validateSqlIdentifier(identifier) {
|
|
95
|
+
if (typeof identifier !== "string") {
|
|
96
|
+
throw new Error("SQL identifier must be a string");
|
|
97
|
+
}
|
|
98
|
+
if (!/^[a-zA-Z_][a-zA-Z0-9_.]*$/.test(identifier)) {
|
|
99
|
+
throw new Error(`Invalid SQL identifier: ${identifier}. Only alphanumeric characters, underscores, and dots are allowed.`);
|
|
100
|
+
}
|
|
101
|
+
const upperIdentifier = identifier.toUpperCase();
|
|
102
|
+
const dangerousPatterns = [
|
|
103
|
+
"DROP",
|
|
104
|
+
"DELETE",
|
|
105
|
+
"INSERT",
|
|
106
|
+
"UPDATE",
|
|
107
|
+
"ALTER",
|
|
108
|
+
"CREATE",
|
|
109
|
+
"EXEC",
|
|
110
|
+
"UNION",
|
|
111
|
+
"SELECT",
|
|
112
|
+
"--",
|
|
113
|
+
"/*",
|
|
114
|
+
"*/",
|
|
115
|
+
";"
|
|
116
|
+
];
|
|
117
|
+
for (const pattern of dangerousPatterns) {
|
|
118
|
+
if (upperIdentifier.includes(pattern)) {
|
|
119
|
+
throw new Error(`SQL identifier contains forbidden pattern: ${pattern}`);
|
|
120
|
+
}
|
|
121
|
+
}
|
|
122
|
+
return identifier;
|
|
123
|
+
}
|
|
124
|
+
function escapeSqlString(value) {
|
|
125
|
+
if (typeof value !== "string") {
|
|
126
|
+
return value;
|
|
127
|
+
}
|
|
128
|
+
return value.replace(/'/g, "''");
|
|
129
|
+
}
|
|
130
|
+
function formatSqlValue(value) {
|
|
131
|
+
if (value === null || value === void 0) {
|
|
132
|
+
return "NULL";
|
|
133
|
+
}
|
|
134
|
+
if (typeof value === "string") {
|
|
135
|
+
return `'${escapeSqlString(value)}'`;
|
|
136
|
+
}
|
|
137
|
+
if (typeof value === "boolean") {
|
|
138
|
+
return value ? "TRUE" : "FALSE";
|
|
139
|
+
}
|
|
140
|
+
return value.toString();
|
|
141
|
+
}
|
|
142
|
+
function convertBigIntToNumber(obj) {
|
|
143
|
+
if (typeof obj === "bigint") {
|
|
144
|
+
if (obj <= Number.MAX_SAFE_INTEGER && obj >= Number.MIN_SAFE_INTEGER) {
|
|
145
|
+
return Number(obj);
|
|
146
|
+
} else {
|
|
147
|
+
return obj.toString();
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
if (Array.isArray(obj)) {
|
|
151
|
+
return obj.map(convertBigIntToNumber);
|
|
152
|
+
}
|
|
153
|
+
if (obj !== null && typeof obj === "object") {
|
|
154
|
+
if (obj.width !== void 0 && obj.scale !== void 0 && obj.value !== void 0) {
|
|
155
|
+
const scale = Number(obj.scale);
|
|
156
|
+
const value = typeof obj.value === "bigint" ? Number(obj.value) : obj.value;
|
|
157
|
+
return value / Math.pow(10, scale);
|
|
158
|
+
}
|
|
159
|
+
const converted = {};
|
|
160
|
+
for (const [key, value] of Object.entries(obj)) {
|
|
161
|
+
converted[key] = convertBigIntToNumber(value);
|
|
162
|
+
}
|
|
163
|
+
return converted;
|
|
164
|
+
}
|
|
165
|
+
return obj;
|
|
166
|
+
}
|
|
167
|
+
async function queryHandler(connection2, state, sqlQuery, options, composeNextState2) {
|
|
168
|
+
if (!connection2) {
|
|
169
|
+
throw new Error("No active MotherDuck connection found. Ensure you are running within an execute() block.");
|
|
170
|
+
}
|
|
171
|
+
try {
|
|
172
|
+
const result = await connection2.runAndReadAll(sqlQuery);
|
|
173
|
+
const rawRows = result.getRowObjects();
|
|
174
|
+
const rows = convertBigIntToNumber(rawRows);
|
|
175
|
+
const nextState = {
|
|
176
|
+
...composeNextState2(state, rows),
|
|
177
|
+
response: {
|
|
178
|
+
rows,
|
|
179
|
+
rowCount: rows.length,
|
|
180
|
+
command: sqlQuery.trim().split(" ")[0].toUpperCase(),
|
|
181
|
+
query: options.writeSql ? sqlQuery : "[query hidden]"
|
|
182
|
+
}
|
|
183
|
+
};
|
|
184
|
+
return nextState;
|
|
185
|
+
} catch (error) {
|
|
186
|
+
const errorMessage = `MotherDuck query failed: ${error.message}`;
|
|
187
|
+
console.error(errorMessage);
|
|
188
|
+
console.error("Failed query:", sqlQuery.substring(0, 200) + (sqlQuery.length > 200 ? "..." : ""));
|
|
189
|
+
const enhancedError = new Error(errorMessage);
|
|
190
|
+
enhancedError.originalError = error;
|
|
191
|
+
enhancedError.query = sqlQuery;
|
|
192
|
+
throw enhancedError;
|
|
193
|
+
}
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
// src/mock.js
|
|
197
|
+
var mockTables = {};
|
|
198
|
+
function parseSelectLiterals(sql) {
|
|
199
|
+
const result = {};
|
|
200
|
+
const selectMatch = sql.match(/SELECT\s+(.+?)(?:\s+FROM|\s*$)/is);
|
|
201
|
+
if (!selectMatch)
|
|
202
|
+
return result;
|
|
203
|
+
const selectClause = selectMatch[1].trim();
|
|
204
|
+
const columns = [];
|
|
205
|
+
let current = "";
|
|
206
|
+
let inQuotes = false;
|
|
207
|
+
let parenDepth = 0;
|
|
208
|
+
for (let i = 0; i < selectClause.length; i++) {
|
|
209
|
+
const char = selectClause[i];
|
|
210
|
+
if (char === "'" && (i === 0 || selectClause[i - 1] !== "\\")) {
|
|
211
|
+
inQuotes = !inQuotes;
|
|
212
|
+
}
|
|
213
|
+
if (!inQuotes) {
|
|
214
|
+
if (char === "(")
|
|
215
|
+
parenDepth++;
|
|
216
|
+
if (char === ")")
|
|
217
|
+
parenDepth--;
|
|
218
|
+
if (char === "," && parenDepth === 0) {
|
|
219
|
+
columns.push(current.trim());
|
|
220
|
+
current = "";
|
|
221
|
+
continue;
|
|
222
|
+
}
|
|
223
|
+
}
|
|
224
|
+
current += char;
|
|
225
|
+
}
|
|
226
|
+
if (current.trim())
|
|
227
|
+
columns.push(current.trim());
|
|
228
|
+
columns.forEach((col) => {
|
|
229
|
+
var _a;
|
|
230
|
+
const stringMatch = col.match(/'([^']*)'\s+(?:as\s+)?(\w+)/i);
|
|
231
|
+
const numberMatch = col.match(/^(\d+)\s+(?:as\s+)?(\w+)/i);
|
|
232
|
+
const funcMatch = col.match(/^(\w+)\s*\([^)]*\)\s+(?:as\s+)?(\w+)/i);
|
|
233
|
+
if (stringMatch) {
|
|
234
|
+
result[stringMatch[2]] = stringMatch[1];
|
|
235
|
+
} else if (numberMatch) {
|
|
236
|
+
result[numberMatch[2]] = parseInt(numberMatch[1]);
|
|
237
|
+
} else if (funcMatch) {
|
|
238
|
+
const funcName = funcMatch[1].toLowerCase();
|
|
239
|
+
const alias = funcMatch[2];
|
|
240
|
+
if (funcName === "count") {
|
|
241
|
+
result[alias] = ((_a = mockTables[Object.keys(mockTables)[0]]) == null ? void 0 : _a.length) || 0;
|
|
242
|
+
} else if (funcName === "current_database") {
|
|
243
|
+
result[alias] = "my_db";
|
|
244
|
+
}
|
|
245
|
+
}
|
|
246
|
+
});
|
|
247
|
+
return result;
|
|
248
|
+
}
|
|
249
|
+
function createMockConnection() {
|
|
250
|
+
const mockConnection = {
|
|
251
|
+
runAndReadAll: async (sql) => {
|
|
252
|
+
if (/SELECT\s+FROM/i.test(sql) || /WHERE\s+INVALID/i.test(sql)) {
|
|
253
|
+
throw new Error('Parser Error: syntax error at or near "FROM"');
|
|
254
|
+
}
|
|
255
|
+
let mockData = [];
|
|
256
|
+
if (/CREATE TABLE/i.test(sql)) {
|
|
257
|
+
const tableMatch = sql.match(/CREATE TABLE\s+(\w+)/i);
|
|
258
|
+
if (tableMatch) {
|
|
259
|
+
const tableName = tableMatch[1];
|
|
260
|
+
mockTables[tableName] = [];
|
|
261
|
+
}
|
|
262
|
+
mockData = [{ success: true }];
|
|
263
|
+
} else if (/INSERT INTO/i.test(sql)) {
|
|
264
|
+
const tableMatch = sql.match(/INSERT INTO\s+(\w+)/i);
|
|
265
|
+
const valuesMatch = sql.match(/VALUES\s+(.+)/is);
|
|
266
|
+
if (tableMatch && valuesMatch) {
|
|
267
|
+
const tableName = tableMatch[1];
|
|
268
|
+
if (!mockTables[tableName])
|
|
269
|
+
mockTables[tableName] = [];
|
|
270
|
+
const columnsMatch = sql.match(/\(([^)]+)\)\s+VALUES/i);
|
|
271
|
+
const columns = columnsMatch ? columnsMatch[1].split(",").map((c) => c.trim()) : [];
|
|
272
|
+
const valuesSets = valuesMatch[1].match(/\([^)]+\)/g) || [];
|
|
273
|
+
valuesSets.forEach((valueSet) => {
|
|
274
|
+
const values = valueSet.slice(1, -1).split(",").map((v) => {
|
|
275
|
+
v = v.trim();
|
|
276
|
+
if (v.startsWith("'") && v.endsWith("'")) {
|
|
277
|
+
return v.slice(1, -1).replace(/''/g, "'");
|
|
278
|
+
}
|
|
279
|
+
if (v === "NULL")
|
|
280
|
+
return null;
|
|
281
|
+
if (v === "TRUE")
|
|
282
|
+
return true;
|
|
283
|
+
if (v === "FALSE")
|
|
284
|
+
return false;
|
|
285
|
+
if (/^\d+$/.test(v))
|
|
286
|
+
return parseInt(v);
|
|
287
|
+
return v;
|
|
288
|
+
});
|
|
289
|
+
const row = {};
|
|
290
|
+
columns.forEach((col, idx) => {
|
|
291
|
+
row[col] = values[idx];
|
|
292
|
+
});
|
|
293
|
+
mockTables[tableName].push(row);
|
|
294
|
+
});
|
|
295
|
+
}
|
|
296
|
+
mockData = [{ success: true }];
|
|
297
|
+
} else if (/SELECT/i.test(sql)) {
|
|
298
|
+
const fromMatch = sql.match(/FROM\s+(\w+)/i);
|
|
299
|
+
if (fromMatch) {
|
|
300
|
+
const tableName = fromMatch[1];
|
|
301
|
+
const tableData = mockTables[tableName] || [];
|
|
302
|
+
if (/COUNT\s*\(\s*\*\s*\)/i.test(sql)) {
|
|
303
|
+
const aliasMatch = sql.match(/COUNT\s*\(\s*\*\s*\)\s+(?:as\s+)?(\w+)/i);
|
|
304
|
+
const alias = aliasMatch ? aliasMatch[1] : "count";
|
|
305
|
+
mockData = [{ [alias]: tableData.length }];
|
|
306
|
+
} else if (/SELECT\s+\*/i.test(sql)) {
|
|
307
|
+
mockData = [...tableData];
|
|
308
|
+
} else {
|
|
309
|
+
const selectMatch = sql.match(/SELECT\s+(.+?)\s+FROM/is);
|
|
310
|
+
if (selectMatch) {
|
|
311
|
+
const columns = selectMatch[1].split(",").map((c) => c.trim());
|
|
312
|
+
mockData = tableData.map((row) => {
|
|
313
|
+
const newRow = {};
|
|
314
|
+
columns.forEach((col) => {
|
|
315
|
+
if (row[col] !== void 0) {
|
|
316
|
+
newRow[col] = row[col];
|
|
317
|
+
}
|
|
318
|
+
});
|
|
319
|
+
return newRow;
|
|
320
|
+
});
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
} else {
|
|
324
|
+
const literals = parseSelectLiterals(sql);
|
|
325
|
+
if (Object.keys(literals).length === 0) {
|
|
326
|
+
mockData = [];
|
|
327
|
+
} else {
|
|
328
|
+
mockData = [literals];
|
|
329
|
+
}
|
|
330
|
+
}
|
|
331
|
+
} else if (/UPDATE/i.test(sql)) {
|
|
332
|
+
mockData = [{ success: true }];
|
|
333
|
+
} else if (/DELETE/i.test(sql)) {
|
|
334
|
+
mockData = [{ success: true }];
|
|
335
|
+
} else if (/DROP TABLE/i.test(sql)) {
|
|
336
|
+
const tableMatch = sql.match(/DROP TABLE\s+(?:IF EXISTS\s+)?(\w+)/i);
|
|
337
|
+
if (tableMatch) {
|
|
338
|
+
delete mockTables[tableMatch[1]];
|
|
339
|
+
}
|
|
340
|
+
mockData = [{ success: true }];
|
|
341
|
+
} else {
|
|
342
|
+
mockData = [{ result: "mock_data" }];
|
|
343
|
+
}
|
|
344
|
+
return {
|
|
345
|
+
getRowObjects: () => mockData
|
|
346
|
+
};
|
|
347
|
+
},
|
|
348
|
+
close: () => {
|
|
349
|
+
Object.keys(mockTables).forEach((key) => delete mockTables[key]);
|
|
350
|
+
}
|
|
351
|
+
};
|
|
352
|
+
const mockInstance = {
|
|
353
|
+
connect: async () => mockConnection,
|
|
354
|
+
close: () => {
|
|
355
|
+
Object.keys(mockTables).forEach((key) => delete mockTables[key]);
|
|
356
|
+
}
|
|
357
|
+
};
|
|
358
|
+
return { mockInstance, mockConnection };
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
// src/Adaptor.js
|
|
362
|
+
var import_language_common2 = require("@openfn/language-common");
|
|
363
|
+
var instance = null;
|
|
364
|
+
var connection = null;
|
|
365
|
+
function execute(...operations) {
|
|
366
|
+
const initialState = {
|
|
367
|
+
references: [],
|
|
368
|
+
data: null
|
|
369
|
+
};
|
|
370
|
+
return async (state) => {
|
|
371
|
+
try {
|
|
372
|
+
return await (0, import_language_common.execute)(
|
|
373
|
+
createConnection,
|
|
374
|
+
...operations,
|
|
375
|
+
disconnect
|
|
376
|
+
)({ ...initialState, ...state });
|
|
377
|
+
} catch (error) {
|
|
378
|
+
disconnect(state);
|
|
379
|
+
throw error;
|
|
380
|
+
}
|
|
381
|
+
};
|
|
382
|
+
}
|
|
383
|
+
async function createConnection(state) {
|
|
384
|
+
const {
|
|
385
|
+
token,
|
|
386
|
+
database,
|
|
387
|
+
sessionHint,
|
|
388
|
+
testMode = false
|
|
389
|
+
} = state.configuration;
|
|
390
|
+
if (testMode || process.env.NODE_ENV === "test") {
|
|
391
|
+
const { mockInstance, mockConnection } = createMockConnection();
|
|
392
|
+
instance = mockInstance;
|
|
393
|
+
connection = mockConnection;
|
|
394
|
+
return state;
|
|
395
|
+
}
|
|
396
|
+
if (!token) {
|
|
397
|
+
throw new Error(
|
|
398
|
+
"MotherDuck token is required. Please provide a token in the configuration."
|
|
399
|
+
);
|
|
400
|
+
}
|
|
401
|
+
let databasePath = `md:${database}`;
|
|
402
|
+
if (sessionHint) {
|
|
403
|
+
databasePath += `?session_hint=${sessionHint}`;
|
|
404
|
+
}
|
|
405
|
+
const config = {
|
|
406
|
+
motherduck_token: token
|
|
407
|
+
};
|
|
408
|
+
console.log(`Connecting to MotherDuck cloud database: ${database}`);
|
|
409
|
+
instance = await import_node_api.DuckDBInstance.create(databasePath, config);
|
|
410
|
+
connection = await instance.connect();
|
|
411
|
+
console.log("Connected successfully to MotherDuck");
|
|
412
|
+
return state;
|
|
413
|
+
}
|
|
414
|
+
function disconnect(state) {
|
|
415
|
+
if (connection && typeof connection.close === "function") {
|
|
416
|
+
connection.close();
|
|
417
|
+
}
|
|
418
|
+
if (instance && typeof instance.close === "function") {
|
|
419
|
+
instance.close();
|
|
420
|
+
}
|
|
421
|
+
connection = null;
|
|
422
|
+
instance = null;
|
|
423
|
+
return state;
|
|
424
|
+
}
|
|
425
|
+
function query(sqlQuery, options = {}) {
|
|
426
|
+
return (state) => {
|
|
427
|
+
const [resolvedQuery, resolvedOptions] = (0, import_util.expandReferences)(
|
|
428
|
+
state,
|
|
429
|
+
sqlQuery,
|
|
430
|
+
options
|
|
431
|
+
);
|
|
432
|
+
return queryHandler(
|
|
433
|
+
connection,
|
|
434
|
+
state,
|
|
435
|
+
resolvedQuery,
|
|
436
|
+
resolvedOptions,
|
|
437
|
+
import_language_common.composeNextState
|
|
438
|
+
);
|
|
439
|
+
};
|
|
440
|
+
}
|
|
441
|
+
function insert(table, records, options = {}) {
|
|
442
|
+
return async (state) => {
|
|
443
|
+
const [resolvedTable, resolvedRecords, resolvedOptions] = (0, import_util.expandReferences)(
|
|
444
|
+
state,
|
|
445
|
+
table,
|
|
446
|
+
records,
|
|
447
|
+
options
|
|
448
|
+
);
|
|
449
|
+
const batchSize = resolvedOptions.batchSize || 1e3;
|
|
450
|
+
const recordsArray = Array.isArray(resolvedRecords) ? resolvedRecords : [resolvedRecords];
|
|
451
|
+
if (!recordsArray || recordsArray.length === 0) {
|
|
452
|
+
console.log("No records provided; skipping insert.");
|
|
453
|
+
return {
|
|
454
|
+
...state,
|
|
455
|
+
data: { recordsInserted: 0, batches: 0 }
|
|
456
|
+
};
|
|
457
|
+
}
|
|
458
|
+
validateSqlIdentifier(resolvedTable);
|
|
459
|
+
const totalRecords = recordsArray.length;
|
|
460
|
+
console.log(
|
|
461
|
+
`Preparing to insert ${totalRecords} record${totalRecords !== 1 ? "s" : ""} into:`,
|
|
462
|
+
resolvedTable
|
|
463
|
+
);
|
|
464
|
+
const chunks = import_lodash.default.chunk(recordsArray, batchSize);
|
|
465
|
+
if (chunks.length > 1) {
|
|
466
|
+
console.log(
|
|
467
|
+
`Large dataset detected. Splitting into ${chunks.length} batches of up to ${batchSize} records.`
|
|
468
|
+
);
|
|
469
|
+
}
|
|
470
|
+
let currentState = state;
|
|
471
|
+
let totalInserted = 0;
|
|
472
|
+
for (let i = 0; i < chunks.length; i++) {
|
|
473
|
+
const chunk = chunks[i];
|
|
474
|
+
const batchNumber = i + 1;
|
|
475
|
+
if (chunks.length > 1) {
|
|
476
|
+
console.log(
|
|
477
|
+
`Processing batch ${batchNumber}/${chunks.length}: ${chunk.length} records`
|
|
478
|
+
);
|
|
479
|
+
}
|
|
480
|
+
const columns = Object.keys(chunk[0]);
|
|
481
|
+
const columnsList = columns.join(", ");
|
|
482
|
+
columns.forEach((col) => validateSqlIdentifier(col));
|
|
483
|
+
const valuesStrings = chunk.map((record) => {
|
|
484
|
+
const values = columns.map((key) => formatSqlValue(record[key]));
|
|
485
|
+
return `(${values.join(", ")})`;
|
|
486
|
+
});
|
|
487
|
+
const sqlQuery = `INSERT INTO ${resolvedTable} (${columnsList}) VALUES ${valuesStrings.join(
|
|
488
|
+
", "
|
|
489
|
+
)}`;
|
|
490
|
+
currentState = await queryHandler(
|
|
491
|
+
connection,
|
|
492
|
+
currentState,
|
|
493
|
+
sqlQuery,
|
|
494
|
+
resolvedOptions,
|
|
495
|
+
import_language_common.composeNextState
|
|
496
|
+
);
|
|
497
|
+
totalInserted += chunk.length;
|
|
498
|
+
}
|
|
499
|
+
if (chunks.length > 1) {
|
|
500
|
+
console.log(
|
|
501
|
+
`Successfully inserted ${totalInserted} records in ${chunks.length} batches.`
|
|
502
|
+
);
|
|
503
|
+
}
|
|
504
|
+
return {
|
|
505
|
+
...currentState,
|
|
506
|
+
data: { recordsInserted: totalInserted, batches: chunks.length }
|
|
507
|
+
};
|
|
508
|
+
};
|
|
509
|
+
}
|
|
510
|
+
|
|
511
|
+
// src/index.js
|
|
512
|
+
var src_default = Adaptor_exports;
|
|
513
|
+
// Annotate the CommonJS export names for ESM import in node:
|
|
514
|
+
0 && (module.exports = {
|
|
515
|
+
alterState,
|
|
516
|
+
arrayToString,
|
|
517
|
+
as,
|
|
518
|
+
combine,
|
|
519
|
+
cursor,
|
|
520
|
+
dataPath,
|
|
521
|
+
dataValue,
|
|
522
|
+
dateFns,
|
|
523
|
+
each,
|
|
524
|
+
execute,
|
|
525
|
+
field,
|
|
526
|
+
fields,
|
|
527
|
+
fn,
|
|
528
|
+
fnIf,
|
|
529
|
+
group,
|
|
530
|
+
insert,
|
|
531
|
+
lastReferenceValue,
|
|
532
|
+
map,
|
|
533
|
+
merge,
|
|
534
|
+
query,
|
|
535
|
+
sourceValue,
|
|
536
|
+
util
|
|
537
|
+
});
|