@leonardovida-md/drizzle-neo-duckdb 1.1.1 → 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -85
- package/dist/client.d.ts +15 -1
- package/dist/columns.d.ts +3 -2
- package/dist/driver.d.ts +7 -3
- package/dist/duckdb-introspect.mjs +595 -50
- package/dist/helpers.mjs +31 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.mjs +621 -50
- package/dist/options.d.ts +10 -0
- package/dist/session.d.ts +11 -5
- package/dist/sql/query-rewriters.d.ts +12 -0
- package/package.json +1 -1
- package/src/client.ts +300 -40
- package/src/columns.ts +51 -4
- package/src/driver.ts +30 -5
- package/src/index.ts +1 -0
- package/src/options.ts +40 -0
- package/src/session.ts +128 -27
- package/src/sql/query-rewriters.ts +503 -0
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
export type RewriteArraysMode = 'auto' | 'always' | 'never';
|
|
2
|
+
export type RewriteArraysOption = boolean | RewriteArraysMode;
|
|
3
|
+
export declare function resolveRewriteArraysOption(value?: RewriteArraysOption): RewriteArraysMode;
|
|
4
|
+
export type PrepareCacheOption = boolean | number | {
|
|
5
|
+
size?: number;
|
|
6
|
+
};
|
|
7
|
+
export interface PreparedStatementCacheConfig {
|
|
8
|
+
size: number;
|
|
9
|
+
}
|
|
10
|
+
export declare function resolvePrepareCacheOption(option?: PrepareCacheOption): PreparedStatementCacheConfig | undefined;
|
package/dist/session.d.ts
CHANGED
|
@@ -9,7 +9,8 @@ import { type Query, SQL } from 'drizzle-orm/sql/sql';
|
|
|
9
9
|
import type { Assume } from 'drizzle-orm/utils';
|
|
10
10
|
import type { DuckDBDialect } from './dialect.ts';
|
|
11
11
|
import type { DuckDBClientLike, RowData } from './client.ts';
|
|
12
|
-
import { type ExecuteInBatchesOptions } from './client.ts';
|
|
12
|
+
import { type ExecuteBatchesRawChunk, type ExecuteInBatchesOptions } from './client.ts';
|
|
13
|
+
import type { PreparedStatementCacheConfig, RewriteArraysMode } from './options.ts';
|
|
13
14
|
export type { DuckDBClientLike, RowData } from './client.ts';
|
|
14
15
|
export declare class DuckDBPreparedQuery<T extends PreparedQueryConfig> extends PgPreparedQuery<T> {
|
|
15
16
|
private client;
|
|
@@ -20,19 +21,21 @@ export declare class DuckDBPreparedQuery<T extends PreparedQueryConfig> extends
|
|
|
20
21
|
private fields;
|
|
21
22
|
private _isResponseInArrayMode;
|
|
22
23
|
private customResultMapper;
|
|
23
|
-
private
|
|
24
|
+
private rewriteArraysMode;
|
|
24
25
|
private rejectStringArrayLiterals;
|
|
26
|
+
private prepareCache;
|
|
25
27
|
private warnOnStringArrayLiteral?;
|
|
26
28
|
static readonly [entityKind]: string;
|
|
27
|
-
constructor(client: DuckDBClientLike, dialect: DuckDBDialect, queryString: string, params: unknown[], logger: Logger, fields: SelectedFieldsOrdered | undefined, _isResponseInArrayMode: boolean, customResultMapper: ((rows: unknown[][]) => T['execute']) | undefined,
|
|
29
|
+
constructor(client: DuckDBClientLike, dialect: DuckDBDialect, queryString: string, params: unknown[], logger: Logger, fields: SelectedFieldsOrdered | undefined, _isResponseInArrayMode: boolean, customResultMapper: ((rows: unknown[][]) => T['execute']) | undefined, rewriteArraysMode: RewriteArraysMode, rejectStringArrayLiterals: boolean, prepareCache: PreparedStatementCacheConfig | undefined, warnOnStringArrayLiteral?: ((sql: string) => void) | undefined);
|
|
28
30
|
execute(placeholderValues?: Record<string, unknown> | undefined): Promise<T['execute']>;
|
|
29
31
|
all(placeholderValues?: Record<string, unknown> | undefined): Promise<T['all']>;
|
|
30
32
|
isResponseInArrayMode(): boolean;
|
|
31
33
|
}
|
|
32
34
|
export interface DuckDBSessionOptions {
|
|
33
35
|
logger?: Logger;
|
|
34
|
-
rewriteArrays?:
|
|
36
|
+
rewriteArrays?: RewriteArraysMode;
|
|
35
37
|
rejectStringArrayLiterals?: boolean;
|
|
38
|
+
prepareCache?: PreparedStatementCacheConfig;
|
|
36
39
|
}
|
|
37
40
|
export declare class DuckDBSession<TFullSchema extends Record<string, unknown> = Record<string, never>, TSchema extends TablesRelationalConfig = Record<string, never>> extends PgSession<DuckDBQueryResultHKT, TFullSchema, TSchema> {
|
|
38
41
|
private client;
|
|
@@ -41,8 +44,9 @@ export declare class DuckDBSession<TFullSchema extends Record<string, unknown> =
|
|
|
41
44
|
static readonly [entityKind]: string;
|
|
42
45
|
protected dialect: DuckDBDialect;
|
|
43
46
|
private logger;
|
|
44
|
-
private
|
|
47
|
+
private rewriteArraysMode;
|
|
45
48
|
private rejectStringArrayLiterals;
|
|
49
|
+
private prepareCache;
|
|
46
50
|
private hasWarnedArrayLiteral;
|
|
47
51
|
private rollbackOnly;
|
|
48
52
|
constructor(client: DuckDBClientLike, dialect: DuckDBDialect, schema: RelationalSchemaConfig<TSchema> | undefined, options?: DuckDBSessionOptions);
|
|
@@ -52,6 +56,7 @@ export declare class DuckDBSession<TFullSchema extends Record<string, unknown> =
|
|
|
52
56
|
transaction<T>(transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>, config?: PgTransactionConfig): Promise<T>;
|
|
53
57
|
private warnOnStringArrayLiteral;
|
|
54
58
|
executeBatches<T extends RowData = RowData>(query: SQL, options?: ExecuteInBatchesOptions): AsyncGenerator<GenericRowData<T>[], void, void>;
|
|
59
|
+
executeBatchesRaw(query: SQL, options?: ExecuteInBatchesOptions): AsyncGenerator<ExecuteBatchesRawChunk, void, void>;
|
|
55
60
|
executeArrow(query: SQL): Promise<unknown>;
|
|
56
61
|
markRollbackOnly(): void;
|
|
57
62
|
isRollbackOnly(): boolean;
|
|
@@ -62,6 +67,7 @@ export declare class DuckDBTransaction<TFullSchema extends Record<string, unknow
|
|
|
62
67
|
getTransactionConfigSQL(config: PgTransactionConfig): SQL;
|
|
63
68
|
setTransaction(config: PgTransactionConfig): Promise<void>;
|
|
64
69
|
executeBatches<T extends RowData = RowData>(query: SQL, options?: ExecuteInBatchesOptions): AsyncGenerator<GenericRowData<T>[], void, void>;
|
|
70
|
+
executeBatchesRaw(query: SQL, options?: ExecuteInBatchesOptions): AsyncGenerator<ExecuteBatchesRawChunk, void, void>;
|
|
65
71
|
executeArrow(query: SQL): Promise<unknown>;
|
|
66
72
|
transaction<T>(transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>): Promise<T>;
|
|
67
73
|
private runNestedWithoutSavepoint;
|
|
@@ -1,2 +1,14 @@
|
|
|
1
1
|
export declare function scrubForRewrite(query: string): string;
|
|
2
2
|
export declare function adaptArrayOperators(query: string): string;
|
|
3
|
+
/**
|
|
4
|
+
* Qualifies unqualified column references in JOIN ON clauses.
|
|
5
|
+
*
|
|
6
|
+
* Transforms patterns like:
|
|
7
|
+
* `left join "b" on "col" = "col"`
|
|
8
|
+
* To:
|
|
9
|
+
* `left join "b" on "a"."col" = "b"."col"`
|
|
10
|
+
*
|
|
11
|
+
* This fixes the issue where drizzle-orm generates unqualified column
|
|
12
|
+
* references when joining CTEs with eq().
|
|
13
|
+
*/
|
|
14
|
+
export declare function qualifyJoinColumns(query: string): string;
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"module": "./dist/index.mjs",
|
|
4
4
|
"main": "./dist/index.mjs",
|
|
5
5
|
"types": "./dist/index.d.ts",
|
|
6
|
-
"version": "1.1.
|
|
6
|
+
"version": "1.1.3",
|
|
7
7
|
"description": "A drizzle ORM client for use with DuckDB. Based on drizzle's Postgres client.",
|
|
8
8
|
"type": "module",
|
|
9
9
|
"scripts": {
|
package/src/client.ts
CHANGED
|
@@ -2,6 +2,7 @@ import {
|
|
|
2
2
|
listValue,
|
|
3
3
|
timestampValue,
|
|
4
4
|
type DuckDBConnection,
|
|
5
|
+
type DuckDBPreparedStatement,
|
|
5
6
|
type DuckDBValue,
|
|
6
7
|
} from '@duckdb/node-api';
|
|
7
8
|
import {
|
|
@@ -9,6 +10,7 @@ import {
|
|
|
9
10
|
wrapperToNodeApiValue,
|
|
10
11
|
type AnyDuckDBValueWrapper,
|
|
11
12
|
} from './value-wrappers.ts';
|
|
13
|
+
import type { PreparedStatementCacheConfig } from './options.ts';
|
|
12
14
|
|
|
13
15
|
export type DuckDBClientLike = DuckDBConnection | DuckDBConnectionPool;
|
|
14
16
|
export type RowData = Record<string, unknown>;
|
|
@@ -25,6 +27,25 @@ export function isPool(
|
|
|
25
27
|
return typeof (client as DuckDBConnectionPool).acquire === 'function';
|
|
26
28
|
}
|
|
27
29
|
|
|
30
|
+
export interface ExecuteClientOptions {
|
|
31
|
+
prepareCache?: PreparedStatementCacheConfig;
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
export type ExecuteArraysResult = { columns: string[]; rows: unknown[][] };
|
|
35
|
+
|
|
36
|
+
type MaterializedRows = ExecuteArraysResult;
|
|
37
|
+
|
|
38
|
+
type PreparedCacheEntry = {
|
|
39
|
+
statement: DuckDBPreparedStatement;
|
|
40
|
+
};
|
|
41
|
+
|
|
42
|
+
type PreparedStatementCache = {
|
|
43
|
+
size: number;
|
|
44
|
+
entries: Map<string, PreparedCacheEntry>;
|
|
45
|
+
};
|
|
46
|
+
|
|
47
|
+
const PREPARED_CACHE = Symbol.for('drizzle-duckdb:prepared-cache');
|
|
48
|
+
|
|
28
49
|
export interface PrepareParamsOptions {
|
|
29
50
|
rejectStringArrayLiterals?: boolean;
|
|
30
51
|
warnOnStringArrayLiteral?: () => void;
|
|
@@ -49,19 +70,30 @@ export function prepareParams(
|
|
|
49
70
|
options: PrepareParamsOptions = {}
|
|
50
71
|
): unknown[] {
|
|
51
72
|
return params.map((param) => {
|
|
52
|
-
if (typeof param === 'string') {
|
|
53
|
-
const
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
73
|
+
if (typeof param === 'string' && param.length > 0) {
|
|
74
|
+
const firstChar = param[0];
|
|
75
|
+
const maybeArrayLiteral =
|
|
76
|
+
firstChar === '{' ||
|
|
77
|
+
firstChar === '[' ||
|
|
78
|
+
firstChar === ' ' ||
|
|
79
|
+
firstChar === '\t';
|
|
80
|
+
|
|
81
|
+
if (maybeArrayLiteral) {
|
|
82
|
+
const trimmed =
|
|
83
|
+
firstChar === '{' || firstChar === '[' ? param : param.trim();
|
|
84
|
+
|
|
85
|
+
if (trimmed && isPgArrayLiteral(trimmed)) {
|
|
86
|
+
if (options.rejectStringArrayLiterals) {
|
|
87
|
+
throw new Error(
|
|
88
|
+
'Stringified array literals are not supported. Use duckDbList()/duckDbArray() or pass native arrays.'
|
|
89
|
+
);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
if (options.warnOnStringArrayLiteral) {
|
|
93
|
+
options.warnOnStringArrayLiteral();
|
|
94
|
+
}
|
|
95
|
+
return parsePgArrayLiteral(trimmed);
|
|
63
96
|
}
|
|
64
|
-
return parsePgArrayLiteral(trimmed);
|
|
65
97
|
}
|
|
66
98
|
}
|
|
67
99
|
return param;
|
|
@@ -106,14 +138,177 @@ function toNodeApiValue(value: unknown): DuckDBValue {
|
|
|
106
138
|
}
|
|
107
139
|
|
|
108
140
|
function deduplicateColumns(columns: string[]): string[] {
|
|
109
|
-
const
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
141
|
+
const counts = new Map<string, number>();
|
|
142
|
+
let hasDuplicates = false;
|
|
143
|
+
|
|
144
|
+
for (const column of columns) {
|
|
145
|
+
const next = (counts.get(column) ?? 0) + 1;
|
|
146
|
+
counts.set(column, next);
|
|
147
|
+
if (next > 1) {
|
|
148
|
+
hasDuplicates = true;
|
|
149
|
+
break;
|
|
150
|
+
}
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
if (!hasDuplicates) {
|
|
154
|
+
return columns;
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
counts.clear();
|
|
158
|
+
return columns.map((column) => {
|
|
159
|
+
const count = counts.get(column) ?? 0;
|
|
160
|
+
counts.set(column, count + 1);
|
|
161
|
+
return count === 0 ? column : `${column}_${count}`;
|
|
114
162
|
});
|
|
115
163
|
}
|
|
116
164
|
|
|
165
|
+
function destroyPreparedStatement(entry: PreparedCacheEntry | undefined): void {
|
|
166
|
+
if (!entry) return;
|
|
167
|
+
try {
|
|
168
|
+
entry.statement.destroySync();
|
|
169
|
+
} catch {
|
|
170
|
+
// Ignore cleanup errors
|
|
171
|
+
}
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
function getPreparedCache(
|
|
175
|
+
connection: DuckDBConnection,
|
|
176
|
+
size: number
|
|
177
|
+
): PreparedStatementCache {
|
|
178
|
+
const store = connection as unknown as Record<
|
|
179
|
+
symbol,
|
|
180
|
+
PreparedStatementCache | undefined
|
|
181
|
+
>;
|
|
182
|
+
const existing = store[PREPARED_CACHE];
|
|
183
|
+
if (existing) {
|
|
184
|
+
existing.size = size;
|
|
185
|
+
return existing;
|
|
186
|
+
}
|
|
187
|
+
|
|
188
|
+
const cache: PreparedStatementCache = { size, entries: new Map() };
|
|
189
|
+
store[PREPARED_CACHE] = cache;
|
|
190
|
+
return cache;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
function evictOldest(cache: PreparedStatementCache): void {
|
|
194
|
+
const oldest = cache.entries.keys().next();
|
|
195
|
+
if (!oldest.done) {
|
|
196
|
+
const key = oldest.value as string;
|
|
197
|
+
const entry = cache.entries.get(key);
|
|
198
|
+
cache.entries.delete(key);
|
|
199
|
+
destroyPreparedStatement(entry);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
function evictCacheEntry(cache: PreparedStatementCache, key: string): void {
|
|
204
|
+
const entry = cache.entries.get(key);
|
|
205
|
+
cache.entries.delete(key);
|
|
206
|
+
destroyPreparedStatement(entry);
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
async function getOrPrepareStatement(
|
|
210
|
+
connection: DuckDBConnection,
|
|
211
|
+
query: string,
|
|
212
|
+
cacheConfig: PreparedStatementCacheConfig
|
|
213
|
+
): Promise<DuckDBPreparedStatement> {
|
|
214
|
+
const cache = getPreparedCache(connection, cacheConfig.size);
|
|
215
|
+
const cached = cache.entries.get(query);
|
|
216
|
+
if (cached) {
|
|
217
|
+
cache.entries.delete(query);
|
|
218
|
+
cache.entries.set(query, cached);
|
|
219
|
+
return cached.statement;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
const statement = await connection.prepare(query);
|
|
223
|
+
cache.entries.set(query, { statement });
|
|
224
|
+
|
|
225
|
+
while (cache.entries.size > cache.size) {
|
|
226
|
+
evictOldest(cache);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
return statement;
|
|
230
|
+
}
|
|
231
|
+
|
|
232
|
+
async function materializeResultRows(result: {
|
|
233
|
+
getRowsJS: () => Promise<unknown[][] | undefined>;
|
|
234
|
+
columnNames: () => string[];
|
|
235
|
+
deduplicatedColumnNames?: () => string[];
|
|
236
|
+
}): Promise<MaterializedRows> {
|
|
237
|
+
const rows = (await result.getRowsJS()) ?? [];
|
|
238
|
+
const baseColumns =
|
|
239
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
240
|
+
? result.deduplicatedColumnNames()
|
|
241
|
+
: result.columnNames();
|
|
242
|
+
const columns =
|
|
243
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
244
|
+
? baseColumns
|
|
245
|
+
: deduplicateColumns(baseColumns);
|
|
246
|
+
|
|
247
|
+
return { columns, rows };
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
async function materializeRows(
|
|
251
|
+
client: DuckDBClientLike,
|
|
252
|
+
query: string,
|
|
253
|
+
params: unknown[],
|
|
254
|
+
options: ExecuteClientOptions = {}
|
|
255
|
+
): Promise<MaterializedRows> {
|
|
256
|
+
if (isPool(client)) {
|
|
257
|
+
const connection = await client.acquire();
|
|
258
|
+
try {
|
|
259
|
+
return await materializeRows(connection, query, params, options);
|
|
260
|
+
} finally {
|
|
261
|
+
await client.release(connection);
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
|
|
265
|
+
const values =
|
|
266
|
+
params.length > 0
|
|
267
|
+
? (params.map((param) => toNodeApiValue(param)) as DuckDBValue[])
|
|
268
|
+
: undefined;
|
|
269
|
+
|
|
270
|
+
const connection = client as DuckDBConnection;
|
|
271
|
+
|
|
272
|
+
if (options.prepareCache && typeof connection.prepare === 'function') {
|
|
273
|
+
const cache = getPreparedCache(connection, options.prepareCache.size);
|
|
274
|
+
try {
|
|
275
|
+
const statement = await getOrPrepareStatement(
|
|
276
|
+
connection,
|
|
277
|
+
query,
|
|
278
|
+
options.prepareCache
|
|
279
|
+
);
|
|
280
|
+
if (values) {
|
|
281
|
+
statement.bind(values as DuckDBValue[]);
|
|
282
|
+
} else {
|
|
283
|
+
statement.clearBindings?.();
|
|
284
|
+
}
|
|
285
|
+
const result = await statement.run();
|
|
286
|
+
cache.entries.delete(query);
|
|
287
|
+
cache.entries.set(query, { statement });
|
|
288
|
+
return await materializeResultRows(result);
|
|
289
|
+
} catch (error) {
|
|
290
|
+
evictCacheEntry(cache, query);
|
|
291
|
+
throw error;
|
|
292
|
+
}
|
|
293
|
+
}
|
|
294
|
+
|
|
295
|
+
const result = await connection.run(query, values);
|
|
296
|
+
return await materializeResultRows(result);
|
|
297
|
+
}
|
|
298
|
+
|
|
299
|
+
function clearPreparedCache(connection: DuckDBConnection): void {
|
|
300
|
+
const store = connection as unknown as Record<
|
|
301
|
+
symbol,
|
|
302
|
+
PreparedStatementCache | undefined
|
|
303
|
+
>;
|
|
304
|
+
const cache = store[PREPARED_CACHE];
|
|
305
|
+
if (!cache) return;
|
|
306
|
+
for (const entry of cache.entries.values()) {
|
|
307
|
+
destroyPreparedStatement(entry);
|
|
308
|
+
}
|
|
309
|
+
cache.entries.clear();
|
|
310
|
+
}
|
|
311
|
+
|
|
117
312
|
function mapRowsToObjects(columns: string[], rows: unknown[][]): RowData[] {
|
|
118
313
|
return rows.map((vals) => {
|
|
119
314
|
const obj: Record<string, unknown> = {};
|
|
@@ -127,6 +322,8 @@ function mapRowsToObjects(columns: string[], rows: unknown[][]): RowData[] {
|
|
|
127
322
|
export async function closeClientConnection(
|
|
128
323
|
connection: DuckDBConnection
|
|
129
324
|
): Promise<void> {
|
|
325
|
+
clearPreparedCache(connection);
|
|
326
|
+
|
|
130
327
|
if ('close' in connection && typeof connection.close === 'function') {
|
|
131
328
|
await connection.close();
|
|
132
329
|
return;
|
|
@@ -148,35 +345,41 @@ export async function closeClientConnection(
|
|
|
148
345
|
export async function executeOnClient(
|
|
149
346
|
client: DuckDBClientLike,
|
|
150
347
|
query: string,
|
|
151
|
-
params: unknown[]
|
|
348
|
+
params: unknown[],
|
|
349
|
+
options: ExecuteClientOptions = {}
|
|
152
350
|
): Promise<RowData[]> {
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
351
|
+
const { columns, rows } = await materializeRows(
|
|
352
|
+
client,
|
|
353
|
+
query,
|
|
354
|
+
params,
|
|
355
|
+
options
|
|
356
|
+
);
|
|
357
|
+
|
|
358
|
+
if (!rows || rows.length === 0) {
|
|
359
|
+
return [];
|
|
160
360
|
}
|
|
161
361
|
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
? (params.map((param) => toNodeApiValue(param)) as DuckDBValue[])
|
|
165
|
-
: undefined;
|
|
166
|
-
const result = await client.run(query, values);
|
|
167
|
-
const rows = await result.getRowsJS();
|
|
168
|
-
const columns =
|
|
169
|
-
// prefer deduplicated names when available (Node API >=1.4.2)
|
|
170
|
-
result.deduplicatedColumnNames?.() ?? result.columnNames();
|
|
171
|
-
const uniqueColumns = deduplicateColumns(columns);
|
|
362
|
+
return mapRowsToObjects(columns, rows);
|
|
363
|
+
}
|
|
172
364
|
|
|
173
|
-
|
|
365
|
+
export async function executeArraysOnClient(
|
|
366
|
+
client: DuckDBClientLike,
|
|
367
|
+
query: string,
|
|
368
|
+
params: unknown[],
|
|
369
|
+
options: ExecuteClientOptions = {}
|
|
370
|
+
): Promise<ExecuteArraysResult> {
|
|
371
|
+
return await materializeRows(client, query, params, options);
|
|
174
372
|
}
|
|
175
373
|
|
|
176
374
|
export interface ExecuteInBatchesOptions {
|
|
177
375
|
rowsPerChunk?: number;
|
|
178
376
|
}
|
|
179
377
|
|
|
378
|
+
export interface ExecuteBatchesRawChunk {
|
|
379
|
+
columns: string[];
|
|
380
|
+
rows: unknown[][];
|
|
381
|
+
}
|
|
382
|
+
|
|
180
383
|
/**
|
|
181
384
|
* Stream results from DuckDB in batches to avoid fully materializing rows in JS.
|
|
182
385
|
*/
|
|
@@ -206,15 +409,19 @@ export async function* executeInBatches(
|
|
|
206
409
|
: undefined;
|
|
207
410
|
|
|
208
411
|
const result = await client.stream(query, values);
|
|
412
|
+
const rawColumns =
|
|
413
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
414
|
+
? result.deduplicatedColumnNames()
|
|
415
|
+
: result.columnNames();
|
|
209
416
|
const columns =
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
417
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
418
|
+
? rawColumns
|
|
419
|
+
: deduplicateColumns(rawColumns);
|
|
213
420
|
|
|
214
421
|
let buffer: RowData[] = [];
|
|
215
422
|
|
|
216
423
|
for await (const chunk of result.yieldRowsJs()) {
|
|
217
|
-
const objects = mapRowsToObjects(
|
|
424
|
+
const objects = mapRowsToObjects(columns, chunk);
|
|
218
425
|
for (const row of objects) {
|
|
219
426
|
buffer.push(row);
|
|
220
427
|
if (buffer.length >= rowsPerChunk) {
|
|
@@ -229,6 +436,59 @@ export async function* executeInBatches(
|
|
|
229
436
|
}
|
|
230
437
|
}
|
|
231
438
|
|
|
439
|
+
export async function* executeInBatchesRaw(
|
|
440
|
+
client: DuckDBClientLike,
|
|
441
|
+
query: string,
|
|
442
|
+
params: unknown[],
|
|
443
|
+
options: ExecuteInBatchesOptions = {}
|
|
444
|
+
): AsyncGenerator<ExecuteBatchesRawChunk, void, void> {
|
|
445
|
+
if (isPool(client)) {
|
|
446
|
+
const connection = await client.acquire();
|
|
447
|
+
try {
|
|
448
|
+
yield* executeInBatchesRaw(connection, query, params, options);
|
|
449
|
+
return;
|
|
450
|
+
} finally {
|
|
451
|
+
await client.release(connection);
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
const rowsPerChunk =
|
|
456
|
+
options.rowsPerChunk && options.rowsPerChunk > 0
|
|
457
|
+
? options.rowsPerChunk
|
|
458
|
+
: 100_000;
|
|
459
|
+
|
|
460
|
+
const values =
|
|
461
|
+
params.length > 0
|
|
462
|
+
? (params.map((param) => toNodeApiValue(param)) as DuckDBValue[])
|
|
463
|
+
: undefined;
|
|
464
|
+
|
|
465
|
+
const result = await client.stream(query, values);
|
|
466
|
+
const rawColumns =
|
|
467
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
468
|
+
? result.deduplicatedColumnNames()
|
|
469
|
+
: result.columnNames();
|
|
470
|
+
const columns =
|
|
471
|
+
typeof result.deduplicatedColumnNames === 'function'
|
|
472
|
+
? rawColumns
|
|
473
|
+
: deduplicateColumns(rawColumns);
|
|
474
|
+
|
|
475
|
+
let buffer: unknown[][] = [];
|
|
476
|
+
|
|
477
|
+
for await (const chunk of result.yieldRowsJs()) {
|
|
478
|
+
for (const row of chunk) {
|
|
479
|
+
buffer.push(row as unknown[]);
|
|
480
|
+
if (buffer.length >= rowsPerChunk) {
|
|
481
|
+
yield { columns, rows: buffer };
|
|
482
|
+
buffer = [];
|
|
483
|
+
}
|
|
484
|
+
}
|
|
485
|
+
}
|
|
486
|
+
|
|
487
|
+
if (buffer.length > 0) {
|
|
488
|
+
yield { columns, rows: buffer };
|
|
489
|
+
}
|
|
490
|
+
}
|
|
491
|
+
|
|
232
492
|
/**
|
|
233
493
|
* Return columnar results when the underlying node-api exposes an Arrow/columnar API.
|
|
234
494
|
* Falls back to column-major JS arrays when Arrow is unavailable.
|
package/src/columns.ts
CHANGED
|
@@ -7,11 +7,13 @@ import {
|
|
|
7
7
|
wrapMap,
|
|
8
8
|
wrapBlob,
|
|
9
9
|
wrapJson,
|
|
10
|
+
wrapTimestamp,
|
|
10
11
|
type ListValueWrapper,
|
|
11
12
|
type ArrayValueWrapper,
|
|
12
13
|
type MapValueWrapper,
|
|
13
14
|
type BlobValueWrapper,
|
|
14
15
|
type JsonValueWrapper,
|
|
16
|
+
type TimestampValueWrapper,
|
|
15
17
|
} from './value-wrappers-core.ts';
|
|
16
18
|
|
|
17
19
|
type IntColType =
|
|
@@ -355,12 +357,35 @@ interface TimestampOptions {
|
|
|
355
357
|
withTimezone?: boolean;
|
|
356
358
|
mode?: TimestampMode;
|
|
357
359
|
precision?: number;
|
|
360
|
+
bindMode?: 'auto' | 'bind' | 'literal';
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
function shouldBindTimestamp(options: TimestampOptions): boolean {
|
|
364
|
+
const bindMode = options.bindMode ?? 'auto';
|
|
365
|
+
if (bindMode === 'bind') return true;
|
|
366
|
+
if (bindMode === 'literal') return false;
|
|
367
|
+
|
|
368
|
+
const isBun =
|
|
369
|
+
typeof process !== 'undefined' &&
|
|
370
|
+
typeof process.versions?.bun !== 'undefined';
|
|
371
|
+
if (isBun) return false;
|
|
372
|
+
|
|
373
|
+
const forceLiteral =
|
|
374
|
+
typeof process !== 'undefined'
|
|
375
|
+
? process.env.DRIZZLE_DUCKDB_FORCE_LITERAL_TIMESTAMPS
|
|
376
|
+
: undefined;
|
|
377
|
+
|
|
378
|
+
if (forceLiteral && forceLiteral !== '0') {
|
|
379
|
+
return false;
|
|
380
|
+
}
|
|
381
|
+
|
|
382
|
+
return true;
|
|
358
383
|
}
|
|
359
384
|
|
|
360
385
|
export const duckDbTimestamp = (name: string, options: TimestampOptions = {}) =>
|
|
361
386
|
customType<{
|
|
362
387
|
data: Date | string;
|
|
363
|
-
driverData: SQL | string | Date;
|
|
388
|
+
driverData: SQL | string | Date | TimestampValueWrapper;
|
|
364
389
|
}>({
|
|
365
390
|
dataType() {
|
|
366
391
|
if (options.withTimezone) {
|
|
@@ -369,14 +394,36 @@ export const duckDbTimestamp = (name: string, options: TimestampOptions = {}) =>
|
|
|
369
394
|
const precision = options.precision ? `(${options.precision})` : '';
|
|
370
395
|
return `TIMESTAMP${precision}`;
|
|
371
396
|
},
|
|
372
|
-
toDriver(
|
|
373
|
-
|
|
397
|
+
toDriver(
|
|
398
|
+
value: Date | string
|
|
399
|
+
): SQL | string | Date | TimestampValueWrapper {
|
|
400
|
+
if (shouldBindTimestamp(options)) {
|
|
401
|
+
return wrapTimestamp(
|
|
402
|
+
value,
|
|
403
|
+
options.withTimezone ?? false,
|
|
404
|
+
options.precision
|
|
405
|
+
);
|
|
406
|
+
}
|
|
407
|
+
|
|
374
408
|
const iso = value instanceof Date ? value.toISOString() : value;
|
|
375
409
|
const normalized = iso.replace('T', ' ').replace('Z', '+00');
|
|
376
410
|
const typeKeyword = options.withTimezone ? 'TIMESTAMPTZ' : 'TIMESTAMP';
|
|
377
411
|
return sql.raw(`${typeKeyword} '${normalized}'`);
|
|
378
412
|
},
|
|
379
|
-
fromDriver(value: Date | string | SQL) {
|
|
413
|
+
fromDriver(value: Date | string | SQL | TimestampValueWrapper) {
|
|
414
|
+
if (
|
|
415
|
+
value &&
|
|
416
|
+
typeof value === 'object' &&
|
|
417
|
+
'kind' in value &&
|
|
418
|
+
(value as TimestampValueWrapper).kind === 'timestamp'
|
|
419
|
+
) {
|
|
420
|
+
const wrapped = value as TimestampValueWrapper;
|
|
421
|
+
return wrapped.data instanceof Date
|
|
422
|
+
? wrapped.data
|
|
423
|
+
: typeof wrapped.data === 'number' || typeof wrapped.data === 'bigint'
|
|
424
|
+
? new Date(Number(wrapped.data) / 1000)
|
|
425
|
+
: wrapped.data;
|
|
426
|
+
}
|
|
380
427
|
if (options.mode === 'string') {
|
|
381
428
|
if (value instanceof Date) {
|
|
382
429
|
return value.toISOString().replace('T', ' ').replace('Z', '+00');
|