@duckdbfan/drizzle-duckdb 0.0.7 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +344 -62
- package/dist/bin/duckdb-introspect.d.ts +2 -0
- package/dist/client.d.ts +42 -0
- package/dist/columns.d.ts +100 -9
- package/dist/dialect.d.ts +27 -2
- package/dist/driver.d.ts +53 -37
- package/dist/duckdb-introspect.mjs +2890 -0
- package/dist/helpers.d.ts +1 -0
- package/dist/helpers.mjs +360 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.mjs +3015 -228
- package/dist/introspect.d.ts +74 -0
- package/dist/migrator.d.ts +3 -2
- package/dist/olap.d.ts +46 -0
- package/dist/operators.d.ts +8 -0
- package/dist/options.d.ts +7 -0
- package/dist/pool.d.ts +30 -0
- package/dist/select-builder.d.ts +31 -0
- package/dist/session.d.ts +33 -8
- package/dist/sql/ast-transformer.d.ts +33 -0
- package/dist/sql/result-mapper.d.ts +9 -0
- package/dist/sql/selection.d.ts +2 -0
- package/dist/sql/visitors/array-operators.d.ts +5 -0
- package/dist/sql/visitors/column-qualifier.d.ts +10 -0
- package/dist/sql/visitors/generate-series-alias.d.ts +13 -0
- package/dist/sql/visitors/union-with-hoister.d.ts +11 -0
- package/dist/utils.d.ts +2 -5
- package/dist/value-wrappers-core.d.ts +42 -0
- package/dist/value-wrappers.d.ts +8 -0
- package/package.json +53 -16
- package/src/bin/duckdb-introspect.ts +181 -0
- package/src/client.ts +528 -0
- package/src/columns.ts +420 -65
- package/src/dialect.ts +111 -15
- package/src/driver.ts +266 -180
- package/src/helpers.ts +18 -0
- package/src/index.ts +8 -1
- package/src/introspect.ts +935 -0
- package/src/migrator.ts +10 -5
- package/src/olap.ts +190 -0
- package/src/operators.ts +27 -0
- package/src/options.ts +25 -0
- package/src/pool.ts +274 -0
- package/src/select-builder.ts +110 -0
- package/src/session.ts +306 -66
- package/src/sql/ast-transformer.ts +170 -0
- package/src/sql/result-mapper.ts +303 -0
- package/src/sql/selection.ts +60 -0
- package/src/sql/visitors/array-operators.ts +214 -0
- package/src/sql/visitors/column-qualifier.ts +586 -0
- package/src/sql/visitors/generate-series-alias.ts +291 -0
- package/src/sql/visitors/union-with-hoister.ts +106 -0
- package/src/utils.ts +2 -222
- package/src/value-wrappers-core.ts +168 -0
- package/src/value-wrappers.ts +165 -0
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { DuckDBInstance } from '@duckdb/node-api';
|
|
3
|
+
import { mkdir, writeFile } from 'node:fs/promises';
|
|
4
|
+
import path from 'node:path';
|
|
5
|
+
import process from 'node:process';
|
|
6
|
+
import { drizzle } from '../index.ts';
|
|
7
|
+
import { introspect } from '../introspect.ts';
|
|
8
|
+
|
|
9
|
+
interface CliOptions {
|
|
10
|
+
url?: string;
|
|
11
|
+
database?: string;
|
|
12
|
+
allDatabases: boolean;
|
|
13
|
+
schemas?: string[];
|
|
14
|
+
outFile: string;
|
|
15
|
+
outMeta?: string;
|
|
16
|
+
includeViews: boolean;
|
|
17
|
+
useCustomTimeTypes: boolean;
|
|
18
|
+
importBasePath?: string;
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function parseArgs(argv: string[]): CliOptions {
|
|
22
|
+
const options: CliOptions = {
|
|
23
|
+
outFile: path.resolve(process.cwd(), 'drizzle/schema.ts'),
|
|
24
|
+
outMeta: undefined,
|
|
25
|
+
allDatabases: false,
|
|
26
|
+
includeViews: false,
|
|
27
|
+
useCustomTimeTypes: true,
|
|
28
|
+
};
|
|
29
|
+
|
|
30
|
+
for (let i = 0; i < argv.length; i += 1) {
|
|
31
|
+
const arg = argv[i]!;
|
|
32
|
+
switch (arg) {
|
|
33
|
+
case '--url':
|
|
34
|
+
options.url = argv[++i];
|
|
35
|
+
break;
|
|
36
|
+
case '--database':
|
|
37
|
+
case '--db':
|
|
38
|
+
options.database = argv[++i];
|
|
39
|
+
break;
|
|
40
|
+
case '--all-databases':
|
|
41
|
+
options.allDatabases = true;
|
|
42
|
+
break;
|
|
43
|
+
case '--schema':
|
|
44
|
+
case '--schemas':
|
|
45
|
+
options.schemas = argv[++i]
|
|
46
|
+
?.split(',')
|
|
47
|
+
.map((s) => s.trim())
|
|
48
|
+
.filter(Boolean);
|
|
49
|
+
break;
|
|
50
|
+
case '--out':
|
|
51
|
+
case '--outFile':
|
|
52
|
+
options.outFile = path.resolve(
|
|
53
|
+
process.cwd(),
|
|
54
|
+
argv[++i] ?? 'drizzle/schema.ts'
|
|
55
|
+
);
|
|
56
|
+
break;
|
|
57
|
+
case '--out-json':
|
|
58
|
+
case '--outJson':
|
|
59
|
+
case '--json':
|
|
60
|
+
options.outMeta = path.resolve(
|
|
61
|
+
process.cwd(),
|
|
62
|
+
argv[++i] ?? 'drizzle/schema.meta.json'
|
|
63
|
+
);
|
|
64
|
+
break;
|
|
65
|
+
case '--include-views':
|
|
66
|
+
case '--includeViews':
|
|
67
|
+
options.includeViews = true;
|
|
68
|
+
break;
|
|
69
|
+
case '--use-pg-time':
|
|
70
|
+
options.useCustomTimeTypes = false;
|
|
71
|
+
break;
|
|
72
|
+
case '--import-base':
|
|
73
|
+
options.importBasePath = argv[++i];
|
|
74
|
+
break;
|
|
75
|
+
case '--help':
|
|
76
|
+
case '-h':
|
|
77
|
+
printHelp();
|
|
78
|
+
process.exit(0);
|
|
79
|
+
default:
|
|
80
|
+
if (arg.startsWith('-')) {
|
|
81
|
+
console.warn(`Unknown option ${arg}`);
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
return options;
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
function printHelp(): void {
|
|
90
|
+
console.log(`duckdb-introspect
|
|
91
|
+
|
|
92
|
+
Usage:
|
|
93
|
+
bun x duckdb-introspect --url <duckdb path|md:> [--schema my_schema] [--out ./drizzle/schema.ts]
|
|
94
|
+
|
|
95
|
+
Options:
|
|
96
|
+
--url DuckDB database path (e.g. :memory:, ./local.duckdb, md:)
|
|
97
|
+
--database, --db Database/catalog to introspect (default: current database)
|
|
98
|
+
--all-databases Introspect all attached databases (not just current)
|
|
99
|
+
--schema Comma separated schema list (defaults to all non-system schemas)
|
|
100
|
+
--out Output file (default: ./drizzle/schema.ts)
|
|
101
|
+
--json Optional JSON metadata output (default: ./drizzle/schema.meta.json)
|
|
102
|
+
--include-views Include views in the generated schema
|
|
103
|
+
--use-pg-time Use pg-core timestamp/date/time instead of DuckDB custom helpers
|
|
104
|
+
--import-base Override import path for duckdb helpers (default: package name)
|
|
105
|
+
|
|
106
|
+
Database Filtering:
|
|
107
|
+
By default, only tables from the current database are introspected. This prevents
|
|
108
|
+
returning tables from all attached databases in MotherDuck workspaces.
|
|
109
|
+
|
|
110
|
+
Use --database to specify a different database, or --all-databases to introspect
|
|
111
|
+
all attached databases.
|
|
112
|
+
|
|
113
|
+
Examples:
|
|
114
|
+
# Local DuckDB file
|
|
115
|
+
bun x duckdb-introspect --url ./my-database.duckdb --out ./schema.ts
|
|
116
|
+
|
|
117
|
+
# MotherDuck (requires MOTHERDUCK_TOKEN env var)
|
|
118
|
+
MOTHERDUCK_TOKEN=xxx bun x duckdb-introspect --url md: --database my_cloud_db --out ./schema.ts
|
|
119
|
+
`);
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
async function main() {
|
|
123
|
+
const options = parseArgs(process.argv.slice(2));
|
|
124
|
+
if (!options.url) {
|
|
125
|
+
printHelp();
|
|
126
|
+
throw new Error('Missing required --url');
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
const instanceOptions =
|
|
130
|
+
options.url.startsWith('md:') && process.env.MOTHERDUCK_TOKEN
|
|
131
|
+
? { motherduck_token: process.env.MOTHERDUCK_TOKEN }
|
|
132
|
+
: undefined;
|
|
133
|
+
|
|
134
|
+
const instance = await DuckDBInstance.create(options.url, instanceOptions);
|
|
135
|
+
const connection = await instance.connect();
|
|
136
|
+
const db = drizzle(connection);
|
|
137
|
+
|
|
138
|
+
try {
|
|
139
|
+
const result = await introspect(db, {
|
|
140
|
+
database: options.database,
|
|
141
|
+
allDatabases: options.allDatabases,
|
|
142
|
+
schemas: options.schemas,
|
|
143
|
+
includeViews: options.includeViews,
|
|
144
|
+
useCustomTimeTypes: options.useCustomTimeTypes,
|
|
145
|
+
importBasePath: options.importBasePath,
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
await mkdir(path.dirname(options.outFile), { recursive: true });
|
|
149
|
+
await writeFile(options.outFile, result.files.schemaTs, 'utf8');
|
|
150
|
+
if (options.outMeta) {
|
|
151
|
+
await mkdir(path.dirname(options.outMeta), { recursive: true });
|
|
152
|
+
await writeFile(
|
|
153
|
+
options.outMeta,
|
|
154
|
+
JSON.stringify(result.files.metaJson, null, 2),
|
|
155
|
+
'utf8'
|
|
156
|
+
);
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
console.log(`Wrote schema to ${options.outFile}`);
|
|
160
|
+
if (options.outMeta) {
|
|
161
|
+
console.log(`Wrote metadata to ${options.outMeta}`);
|
|
162
|
+
}
|
|
163
|
+
} finally {
|
|
164
|
+
if (
|
|
165
|
+
'closeSync' in connection &&
|
|
166
|
+
typeof connection.closeSync === 'function'
|
|
167
|
+
) {
|
|
168
|
+
connection.closeSync();
|
|
169
|
+
}
|
|
170
|
+
if ('closeSync' in instance && typeof instance.closeSync === 'function') {
|
|
171
|
+
instance.closeSync();
|
|
172
|
+
} else if ('close' in instance && typeof instance.close === 'function') {
|
|
173
|
+
await instance.close();
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
|
|
178
|
+
main().catch((err) => {
|
|
179
|
+
console.error(err instanceof Error ? err.message : err);
|
|
180
|
+
process.exit(1);
|
|
181
|
+
});
|
package/src/client.ts
ADDED
|
@@ -0,0 +1,528 @@
|
|
|
1
|
+
import {
|
|
2
|
+
listValue,
|
|
3
|
+
timestampValue,
|
|
4
|
+
type DuckDBConnection,
|
|
5
|
+
type DuckDBPreparedStatement,
|
|
6
|
+
type DuckDBValue,
|
|
7
|
+
} from '@duckdb/node-api';
|
|
8
|
+
import {
|
|
9
|
+
DUCKDB_VALUE_MARKER,
|
|
10
|
+
wrapperToNodeApiValue,
|
|
11
|
+
type AnyDuckDBValueWrapper,
|
|
12
|
+
} from './value-wrappers.ts';
|
|
13
|
+
import type { PreparedStatementCacheConfig } from './options.ts';
|
|
14
|
+
|
|
15
|
+
export type DuckDBClientLike = DuckDBConnection | DuckDBConnectionPool;
|
|
16
|
+
export type RowData = Record<string, unknown>;
|
|
17
|
+
|
|
18
|
+
export interface DuckDBConnectionPool {
|
|
19
|
+
acquire(): Promise<DuckDBConnection>;
|
|
20
|
+
release(connection: DuckDBConnection): void | Promise<void>;
|
|
21
|
+
close?(): Promise<void> | void;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
export function isPool(
|
|
25
|
+
client: DuckDBClientLike
|
|
26
|
+
): client is DuckDBConnectionPool {
|
|
27
|
+
return typeof (client as DuckDBConnectionPool).acquire === 'function';
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
export interface ExecuteClientOptions {
|
|
31
|
+
prepareCache?: PreparedStatementCacheConfig;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export type ExecuteArraysResult = { columns: string[]; rows: unknown[][] };
|
|
35
|
+
|
|
36
|
+
type MaterializedRows = ExecuteArraysResult;
|
|
37
|
+
|
|
38
|
+
type PreparedCacheEntry = {
|
|
39
|
+
statement: DuckDBPreparedStatement;
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
type PreparedStatementCache = {
|
|
43
|
+
size: number;
|
|
44
|
+
entries: Map<string, PreparedCacheEntry>;
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
const PREPARED_CACHE = Symbol.for('drizzle-duckdb:prepared-cache');
|
|
48
|
+
|
|
49
|
+
export interface PrepareParamsOptions {
|
|
50
|
+
rejectStringArrayLiterals?: boolean;
|
|
51
|
+
warnOnStringArrayLiteral?: () => void;
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
function isPgArrayLiteral(value: string): boolean {
|
|
55
|
+
return value.startsWith('{') && value.endsWith('}');
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function parsePgArrayLiteral(value: string): unknown {
|
|
59
|
+
const json = value.replace(/{/g, '[').replace(/}/g, ']');
|
|
60
|
+
|
|
61
|
+
try {
|
|
62
|
+
return JSON.parse(json);
|
|
63
|
+
} catch {
|
|
64
|
+
return value;
|
|
65
|
+
}
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
export function prepareParams(
|
|
69
|
+
params: unknown[],
|
|
70
|
+
options: PrepareParamsOptions = {}
|
|
71
|
+
): unknown[] {
|
|
72
|
+
return params.map((param) => {
|
|
73
|
+
if (typeof param === 'string' && param.length > 0) {
|
|
74
|
+
const firstChar = param[0];
|
|
75
|
+
const maybeArrayLiteral =
|
|
76
|
+
firstChar === '{' ||
|
|
77
|
+
firstChar === '[' ||
|
|
78
|
+
firstChar === ' ' ||
|
|
79
|
+
firstChar === '\t';
|
|
80
|
+
|
|
81
|
+
if (maybeArrayLiteral) {
|
|
82
|
+
const trimmed =
|
|
83
|
+
firstChar === '{' || firstChar === '[' ? param : param.trim();
|
|
84
|
+
|
|
85
|
+
if (trimmed && isPgArrayLiteral(trimmed)) {
|
|
86
|
+
if (options.rejectStringArrayLiterals) {
|
|
87
|
+
throw new Error(
|
|
88
|
+
'Stringified array literals are not supported. Use duckDbList()/duckDbArray() or pass native arrays.'
|
|
89
|
+
);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (options.warnOnStringArrayLiteral) {
|
|
93
|
+
options.warnOnStringArrayLiteral();
|
|
94
|
+
}
|
|
95
|
+
return parsePgArrayLiteral(trimmed);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
return param;
|
|
100
|
+
});
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
/**
|
|
104
|
+
* Convert a value to DuckDB Node API value.
|
|
105
|
+
* Handles wrapper types and plain values for backward compatibility.
|
|
106
|
+
* Optimized for the common case (primitives) in the hot path.
|
|
107
|
+
*/
|
|
108
|
+
function toNodeApiValue(value: unknown): DuckDBValue {
|
|
109
|
+
// Fast path 1: null/undefined
|
|
110
|
+
if (value == null) return null;
|
|
111
|
+
|
|
112
|
+
// Fast path 2: primitives (most common)
|
|
113
|
+
const t = typeof value;
|
|
114
|
+
if (t === 'string' || t === 'number' || t === 'bigint' || t === 'boolean') {
|
|
115
|
+
return value as DuckDBValue;
|
|
116
|
+
}
|
|
117
|
+
|
|
118
|
+
// Fast path 3: pre-wrapped DuckDB value (Symbol check ~2-3ns)
|
|
119
|
+
if (t === 'object' && DUCKDB_VALUE_MARKER in (value as object)) {
|
|
120
|
+
return wrapperToNodeApiValue(
|
|
121
|
+
value as AnyDuckDBValueWrapper,
|
|
122
|
+
toNodeApiValue
|
|
123
|
+
);
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
// Legacy path: plain arrays (backward compatibility)
|
|
127
|
+
if (Array.isArray(value)) {
|
|
128
|
+
return listValue(value.map((inner) => toNodeApiValue(inner)));
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Date conversion to timestamp
|
|
132
|
+
if (value instanceof Date) {
|
|
133
|
+
return timestampValue(BigInt(value.getTime()) * 1000n);
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// Fallback for unknown objects
|
|
137
|
+
return value as DuckDBValue;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
function deduplicateColumns(columns: string[]): string[] {
|
|
141
|
+
const counts = new Map<string, number>();
|
|
142
|
+
let hasDuplicates = false;
|
|
143
|
+
|
|
144
|
+
for (const column of columns) {
|
|
145
|
+
const next = (counts.get(column) ?? 0) + 1;
|
|
146
|
+
counts.set(column, next);
|
|
147
|
+
if (next > 1) {
|
|
148
|
+
hasDuplicates = true;
|
|
149
|
+
break;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (!hasDuplicates) {
|
|
154
|
+
return columns;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
counts.clear();
|
|
158
|
+
return columns.map((column) => {
|
|
159
|
+
const count = counts.get(column) ?? 0;
|
|
160
|
+
counts.set(column, count + 1);
|
|
161
|
+
return count === 0 ? column : `${column}_${count}`;
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
function destroyPreparedStatement(entry: PreparedCacheEntry | undefined): void {
|
|
166
|
+
if (!entry) return;
|
|
167
|
+
try {
|
|
168
|
+
entry.statement.destroySync();
|
|
169
|
+
} catch {
|
|
170
|
+
// Ignore cleanup errors
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function getPreparedCache(
|
|
175
|
+
connection: DuckDBConnection,
|
|
176
|
+
size: number
|
|
177
|
+
): PreparedStatementCache {
|
|
178
|
+
const store = connection as unknown as Record<
|
|
179
|
+
symbol,
|
|
180
|
+
PreparedStatementCache | undefined
|
|
181
|
+
>;
|
|
182
|
+
const existing = store[PREPARED_CACHE];
|
|
183
|
+
if (existing) {
|
|
184
|
+
existing.size = size;
|
|
185
|
+
return existing;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
const cache: PreparedStatementCache = { size, entries: new Map() };
|
|
189
|
+
store[PREPARED_CACHE] = cache;
|
|
190
|
+
return cache;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
function evictOldest(cache: PreparedStatementCache): void {
|
|
194
|
+
const oldest = cache.entries.keys().next();
|
|
195
|
+
if (!oldest.done) {
|
|
196
|
+
const key = oldest.value as string;
|
|
197
|
+
const entry = cache.entries.get(key);
|
|
198
|
+
cache.entries.delete(key);
|
|
199
|
+
destroyPreparedStatement(entry);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function evictCacheEntry(cache: PreparedStatementCache, key: string): void {
|
|
204
|
+
const entry = cache.entries.get(key);
|
|
205
|
+
cache.entries.delete(key);
|
|
206
|
+
destroyPreparedStatement(entry);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
async function getOrPrepareStatement(
|
|
210
|
+
connection: DuckDBConnection,
|
|
211
|
+
query: string,
|
|
212
|
+
cacheConfig: PreparedStatementCacheConfig
|
|
213
|
+
): Promise<DuckDBPreparedStatement> {
|
|
214
|
+
const cache = getPreparedCache(connection, cacheConfig.size);
|
|
215
|
+
const cached = cache.entries.get(query);
|
|
216
|
+
if (cached) {
|
|
217
|
+
cache.entries.delete(query);
|
|
218
|
+
cache.entries.set(query, cached);
|
|
219
|
+
return cached.statement;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
const statement = await connection.prepare(query);
|
|
223
|
+
cache.entries.set(query, { statement });
|
|
224
|
+
|
|
225
|
+
while (cache.entries.size > cache.size) {
|
|
226
|
+
evictOldest(cache);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return statement;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
async function materializeResultRows(result: {
|
|
233
|
+
getRowsJS: () => Promise<unknown[][] | undefined>;
|
|
234
|
+
columnNames: () => string[];
|
|
235
|
+
deduplicatedColumnNames?: () => string[];
|
|
236
|
+
}): Promise<MaterializedRows> {
|
|
237
|
+
const rows = (await result.getRowsJS()) ?? [];
|
|
238
|
+
const baseColumns =
|
|
239
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
240
|
+
? result.deduplicatedColumnNames()
|
|
241
|
+
: result.columnNames();
|
|
242
|
+
const columns =
|
|
243
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
244
|
+
? baseColumns
|
|
245
|
+
: deduplicateColumns(baseColumns);
|
|
246
|
+
|
|
247
|
+
return { columns, rows };
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async function materializeRows(
|
|
251
|
+
client: DuckDBClientLike,
|
|
252
|
+
query: string,
|
|
253
|
+
params: unknown[],
|
|
254
|
+
options: ExecuteClientOptions = {}
|
|
255
|
+
): Promise<MaterializedRows> {
|
|
256
|
+
if (isPool(client)) {
|
|
257
|
+
const connection = await client.acquire();
|
|
258
|
+
try {
|
|
259
|
+
return await materializeRows(connection, query, params, options);
|
|
260
|
+
} finally {
|
|
261
|
+
await client.release(connection);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
const values =
|
|
266
|
+
params.length > 0
|
|
267
|
+
? (params.map((param) => toNodeApiValue(param)) as DuckDBValue[])
|
|
268
|
+
: undefined;
|
|
269
|
+
|
|
270
|
+
const connection = client as DuckDBConnection;
|
|
271
|
+
|
|
272
|
+
if (options.prepareCache && typeof connection.prepare === 'function') {
|
|
273
|
+
const cache = getPreparedCache(connection, options.prepareCache.size);
|
|
274
|
+
try {
|
|
275
|
+
const statement = await getOrPrepareStatement(
|
|
276
|
+
connection,
|
|
277
|
+
query,
|
|
278
|
+
options.prepareCache
|
|
279
|
+
);
|
|
280
|
+
if (values) {
|
|
281
|
+
statement.bind(values as DuckDBValue[]);
|
|
282
|
+
} else {
|
|
283
|
+
statement.clearBindings?.();
|
|
284
|
+
}
|
|
285
|
+
const result = await statement.run();
|
|
286
|
+
cache.entries.delete(query);
|
|
287
|
+
cache.entries.set(query, { statement });
|
|
288
|
+
return await materializeResultRows(result);
|
|
289
|
+
} catch (error) {
|
|
290
|
+
evictCacheEntry(cache, query);
|
|
291
|
+
throw error;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
const result = await connection.run(query, values);
|
|
296
|
+
return await materializeResultRows(result);
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
function clearPreparedCache(connection: DuckDBConnection): void {
|
|
300
|
+
const store = connection as unknown as Record<
|
|
301
|
+
symbol,
|
|
302
|
+
PreparedStatementCache | undefined
|
|
303
|
+
>;
|
|
304
|
+
const cache = store[PREPARED_CACHE];
|
|
305
|
+
if (!cache) return;
|
|
306
|
+
for (const entry of cache.entries.values()) {
|
|
307
|
+
destroyPreparedStatement(entry);
|
|
308
|
+
}
|
|
309
|
+
cache.entries.clear();
|
|
310
|
+
}
|
|
311
|
+
|
|
312
|
+
function mapRowsToObjects(columns: string[], rows: unknown[][]): RowData[] {
|
|
313
|
+
return rows.map((vals) => {
|
|
314
|
+
const obj: Record<string, unknown> = {};
|
|
315
|
+
columns.forEach((col, idx) => {
|
|
316
|
+
obj[col] = vals[idx];
|
|
317
|
+
});
|
|
318
|
+
return obj;
|
|
319
|
+
}) as RowData[];
|
|
320
|
+
}
|
|
321
|
+
|
|
322
|
+
export async function closeClientConnection(
|
|
323
|
+
connection: DuckDBConnection
|
|
324
|
+
): Promise<void> {
|
|
325
|
+
clearPreparedCache(connection);
|
|
326
|
+
|
|
327
|
+
if ('close' in connection && typeof connection.close === 'function') {
|
|
328
|
+
await connection.close();
|
|
329
|
+
return;
|
|
330
|
+
}
|
|
331
|
+
|
|
332
|
+
if ('closeSync' in connection && typeof connection.closeSync === 'function') {
|
|
333
|
+
connection.closeSync();
|
|
334
|
+
return;
|
|
335
|
+
}
|
|
336
|
+
|
|
337
|
+
if (
|
|
338
|
+
'disconnectSync' in connection &&
|
|
339
|
+
typeof connection.disconnectSync === 'function'
|
|
340
|
+
) {
|
|
341
|
+
connection.disconnectSync();
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
export async function executeOnClient(
|
|
346
|
+
client: DuckDBClientLike,
|
|
347
|
+
query: string,
|
|
348
|
+
params: unknown[],
|
|
349
|
+
options: ExecuteClientOptions = {}
|
|
350
|
+
): Promise<RowData[]> {
|
|
351
|
+
const { columns, rows } = await materializeRows(
|
|
352
|
+
client,
|
|
353
|
+
query,
|
|
354
|
+
params,
|
|
355
|
+
options
|
|
356
|
+
);
|
|
357
|
+
|
|
358
|
+
if (!rows || rows.length === 0) {
|
|
359
|
+
return [];
|
|
360
|
+
}
|
|
361
|
+
|
|
362
|
+
return mapRowsToObjects(columns, rows);
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
export async function executeArraysOnClient(
|
|
366
|
+
client: DuckDBClientLike,
|
|
367
|
+
query: string,
|
|
368
|
+
params: unknown[],
|
|
369
|
+
options: ExecuteClientOptions = {}
|
|
370
|
+
): Promise<ExecuteArraysResult> {
|
|
371
|
+
return await materializeRows(client, query, params, options);
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
export interface ExecuteInBatchesOptions {
|
|
375
|
+
rowsPerChunk?: number;
|
|
376
|
+
}
|
|
377
|
+
|
|
378
|
+
export interface ExecuteBatchesRawChunk {
|
|
379
|
+
columns: string[];
|
|
380
|
+
rows: unknown[][];
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
/**
|
|
384
|
+
* Stream results from DuckDB in batches to avoid fully materializing rows in JS.
|
|
385
|
+
*/
|
|
386
|
+
export async function* executeInBatches(
|
|
387
|
+
client: DuckDBClientLike,
|
|
388
|
+
query: string,
|
|
389
|
+
params: unknown[],
|
|
390
|
+
options: ExecuteInBatchesOptions = {}
|
|
391
|
+
): AsyncGenerator<RowData[], void, void> {
|
|
392
|
+
if (isPool(client)) {
|
|
393
|
+
const connection = await client.acquire();
|
|
394
|
+
try {
|
|
395
|
+
yield* executeInBatches(connection, query, params, options);
|
|
396
|
+
return;
|
|
397
|
+
} finally {
|
|
398
|
+
await client.release(connection);
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
const rowsPerChunk =
|
|
403
|
+
options.rowsPerChunk && options.rowsPerChunk > 0
|
|
404
|
+
? options.rowsPerChunk
|
|
405
|
+
: 100_000;
|
|
406
|
+
const values =
|
|
407
|
+
params.length > 0
|
|
408
|
+
? (params.map((param) => toNodeApiValue(param)) as DuckDBValue[])
|
|
409
|
+
: undefined;
|
|
410
|
+
|
|
411
|
+
const result = await client.stream(query, values);
|
|
412
|
+
const rawColumns =
|
|
413
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
414
|
+
? result.deduplicatedColumnNames()
|
|
415
|
+
: result.columnNames();
|
|
416
|
+
const columns =
|
|
417
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
418
|
+
? rawColumns
|
|
419
|
+
: deduplicateColumns(rawColumns);
|
|
420
|
+
|
|
421
|
+
let buffer: RowData[] = [];
|
|
422
|
+
|
|
423
|
+
for await (const chunk of result.yieldRowsJs()) {
|
|
424
|
+
const objects = mapRowsToObjects(columns, chunk);
|
|
425
|
+
for (const row of objects) {
|
|
426
|
+
buffer.push(row);
|
|
427
|
+
if (buffer.length >= rowsPerChunk) {
|
|
428
|
+
yield buffer;
|
|
429
|
+
buffer = [];
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
}
|
|
433
|
+
|
|
434
|
+
if (buffer.length > 0) {
|
|
435
|
+
yield buffer;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
export async function* executeInBatchesRaw(
|
|
440
|
+
client: DuckDBClientLike,
|
|
441
|
+
query: string,
|
|
442
|
+
params: unknown[],
|
|
443
|
+
options: ExecuteInBatchesOptions = {}
|
|
444
|
+
): AsyncGenerator<ExecuteBatchesRawChunk, void, void> {
|
|
445
|
+
if (isPool(client)) {
|
|
446
|
+
const connection = await client.acquire();
|
|
447
|
+
try {
|
|
448
|
+
yield* executeInBatchesRaw(connection, query, params, options);
|
|
449
|
+
return;
|
|
450
|
+
} finally {
|
|
451
|
+
await client.release(connection);
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
const rowsPerChunk =
|
|
456
|
+
options.rowsPerChunk && options.rowsPerChunk > 0
|
|
457
|
+
? options.rowsPerChunk
|
|
458
|
+
: 100_000;
|
|
459
|
+
|
|
460
|
+
const values =
|
|
461
|
+
params.length > 0
|
|
462
|
+
? (params.map((param) => toNodeApiValue(param)) as DuckDBValue[])
|
|
463
|
+
: undefined;
|
|
464
|
+
|
|
465
|
+
const result = await client.stream(query, values);
|
|
466
|
+
const rawColumns =
|
|
467
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
468
|
+
? result.deduplicatedColumnNames()
|
|
469
|
+
: result.columnNames();
|
|
470
|
+
const columns =
|
|
471
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
472
|
+
? rawColumns
|
|
473
|
+
: deduplicateColumns(rawColumns);
|
|
474
|
+
|
|
475
|
+
let buffer: unknown[][] = [];
|
|
476
|
+
|
|
477
|
+
for await (const chunk of result.yieldRowsJs()) {
|
|
478
|
+
for (const row of chunk) {
|
|
479
|
+
buffer.push(row as unknown[]);
|
|
480
|
+
if (buffer.length >= rowsPerChunk) {
|
|
481
|
+
yield { columns, rows: buffer };
|
|
482
|
+
buffer = [];
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
if (buffer.length > 0) {
|
|
488
|
+
yield { columns, rows: buffer };
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
|
|
492
|
+
/**
|
|
493
|
+
* Return columnar results when the underlying node-api exposes an Arrow/columnar API.
|
|
494
|
+
* Falls back to column-major JS arrays when Arrow is unavailable.
|
|
495
|
+
*/
|
|
496
|
+
export async function executeArrowOnClient(
|
|
497
|
+
client: DuckDBClientLike,
|
|
498
|
+
query: string,
|
|
499
|
+
params: unknown[]
|
|
500
|
+
): Promise<unknown> {
|
|
501
|
+
if (isPool(client)) {
|
|
502
|
+
const connection = await client.acquire();
|
|
503
|
+
try {
|
|
504
|
+
return await executeArrowOnClient(connection, query, params);
|
|
505
|
+
} finally {
|
|
506
|
+
await client.release(connection);
|
|
507
|
+
}
|
|
508
|
+
}
|
|
509
|
+
|
|
510
|
+
const values =
|
|
511
|
+
params.length > 0
|
|
512
|
+
? (params.map((param) => toNodeApiValue(param)) as DuckDBValue[])
|
|
513
|
+
: undefined;
|
|
514
|
+
const result = await client.run(query, values);
|
|
515
|
+
|
|
516
|
+
// Runtime detection for Arrow API support (optional method, not in base type)
|
|
517
|
+
const maybeArrow =
|
|
518
|
+
(result as unknown as { toArrow?: () => Promise<unknown> }).toArrow ??
|
|
519
|
+
(result as unknown as { getArrowTable?: () => Promise<unknown> })
|
|
520
|
+
.getArrowTable;
|
|
521
|
+
|
|
522
|
+
if (typeof maybeArrow === 'function') {
|
|
523
|
+
return await maybeArrow.call(result);
|
|
524
|
+
}
|
|
525
|
+
|
|
526
|
+
// Fallback: return column-major JS arrays to avoid per-row object creation.
|
|
527
|
+
return result.getColumnsObjectJS();
|
|
528
|
+
}
|