@leonardovida-md/drizzle-neo-duckdb 1.0.2 → 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +36 -18
- package/dist/client.d.ts +12 -0
- package/dist/columns.d.ts +18 -10
- package/dist/driver.d.ts +4 -0
- package/dist/duckdb-introspect.mjs +171 -23
- package/dist/index.d.ts +3 -0
- package/dist/index.mjs +387 -37
- package/dist/olap.d.ts +46 -0
- package/dist/session.d.ts +5 -0
- package/dist/value-wrappers.d.ts +104 -0
- package/package.json +7 -3
- package/src/bin/duckdb-introspect.ts +12 -3
- package/src/client.ts +135 -18
- package/src/columns.ts +65 -36
- package/src/dialect.ts +2 -2
- package/src/driver.ts +20 -6
- package/src/index.ts +3 -0
- package/src/introspect.ts +15 -10
- package/src/migrator.ts +1 -3
- package/src/olap.ts +189 -0
- package/src/select-builder.ts +3 -7
- package/src/session.ts +87 -18
- package/src/sql/query-rewriters.ts +5 -8
- package/src/sql/result-mapper.ts +6 -6
- package/src/sql/selection.ts +2 -9
- package/src/value-wrappers.ts +324 -0
package/README.md
CHANGED
|
@@ -7,7 +7,7 @@
|
|
|
7
7
|
[](https://www.npmjs.com/package/@leonardovida-md/drizzle-neo-duckdb)
|
|
8
8
|
[](https://opensource.org/licenses/Apache-2.0)
|
|
9
9
|
|
|
10
|
-
[Documentation](
|
|
10
|
+
[Documentation](https://leonardovida-md.github.io/drizzle-neo-duckdb/) • [Examples](./example) • [Contributing](#contributing)
|
|
11
11
|
|
|
12
12
|
</div>
|
|
13
13
|
|
|
@@ -119,7 +119,14 @@ Drizzle DuckDB uses `drizzle-orm/pg-core` for schema definitions since DuckDB's
|
|
|
119
119
|
|
|
120
120
|
```typescript
|
|
121
121
|
import { sql } from 'drizzle-orm';
|
|
122
|
-
import {
|
|
122
|
+
import {
|
|
123
|
+
integer,
|
|
124
|
+
text,
|
|
125
|
+
boolean,
|
|
126
|
+
timestamp,
|
|
127
|
+
pgTable,
|
|
128
|
+
pgSchema,
|
|
129
|
+
} from 'drizzle-orm/pg-core';
|
|
123
130
|
|
|
124
131
|
// Tables in default schema
|
|
125
132
|
const posts = pgTable('posts', {
|
|
@@ -181,7 +188,7 @@ const products = pgTable('products', {
|
|
|
181
188
|
});
|
|
182
189
|
```
|
|
183
190
|
|
|
184
|
-
See [Column Types Documentation](
|
|
191
|
+
See [Column Types Documentation](https://leonardovida-md.github.io/drizzle-neo-duckdb/api/columns) for complete reference.
|
|
185
192
|
|
|
186
193
|
## Querying
|
|
187
194
|
|
|
@@ -189,10 +196,15 @@ All standard Drizzle query methods work:
|
|
|
189
196
|
|
|
190
197
|
```typescript
|
|
191
198
|
// Select
|
|
192
|
-
const users = await db
|
|
199
|
+
const users = await db
|
|
200
|
+
.select()
|
|
201
|
+
.from(usersTable)
|
|
202
|
+
.where(eq(usersTable.active, true));
|
|
193
203
|
|
|
194
204
|
// Insert
|
|
195
|
-
await db
|
|
205
|
+
await db
|
|
206
|
+
.insert(usersTable)
|
|
207
|
+
.values({ name: 'Alice', email: 'alice@example.com' });
|
|
196
208
|
|
|
197
209
|
// Insert with returning
|
|
198
210
|
const inserted = await db
|
|
@@ -201,7 +213,10 @@ const inserted = await db
|
|
|
201
213
|
.returning({ id: usersTable.id });
|
|
202
214
|
|
|
203
215
|
// Update
|
|
204
|
-
await db
|
|
216
|
+
await db
|
|
217
|
+
.update(usersTable)
|
|
218
|
+
.set({ name: 'Updated' })
|
|
219
|
+
.where(eq(usersTable.id, 1));
|
|
205
220
|
|
|
206
221
|
// Delete
|
|
207
222
|
await db.delete(usersTable).where(eq(usersTable.id, 1));
|
|
@@ -228,7 +243,9 @@ const results = await db
|
|
|
228
243
|
const results = await db
|
|
229
244
|
.select()
|
|
230
245
|
.from(products)
|
|
231
|
-
.where(
|
|
246
|
+
.where(
|
|
247
|
+
duckDbArrayContained(products.tags, ['electronics', 'sale', 'featured'])
|
|
248
|
+
);
|
|
232
249
|
|
|
233
250
|
// Check if arrays overlap
|
|
234
251
|
const results = await db
|
|
@@ -258,7 +275,7 @@ import { migrate } from '@leonardovida-md/drizzle-neo-duckdb';
|
|
|
258
275
|
await migrate(db, { migrationsFolder: './drizzle' });
|
|
259
276
|
```
|
|
260
277
|
|
|
261
|
-
Migration metadata is stored in `drizzle.__drizzle_migrations` by default. See [Migrations Documentation](
|
|
278
|
+
Migration metadata is stored in `drizzle.__drizzle_migrations` by default. See [Migrations Documentation](https://leonardovida-md.github.io/drizzle-neo-duckdb/guide/migrations) for configuration options.
|
|
262
279
|
|
|
263
280
|
## Schema Introspection
|
|
264
281
|
|
|
@@ -283,7 +300,7 @@ const result = await introspect(db, {
|
|
|
283
300
|
console.log(result.files.schemaTs);
|
|
284
301
|
```
|
|
285
302
|
|
|
286
|
-
See [Introspection Documentation](
|
|
303
|
+
See [Introspection Documentation](https://leonardovida-md.github.io/drizzle-neo-duckdb/guide/introspection) for all options.
|
|
287
304
|
|
|
288
305
|
## Configuration Options
|
|
289
306
|
|
|
@@ -307,22 +324,23 @@ const db = drizzle(connection, {
|
|
|
307
324
|
|
|
308
325
|
This connector aims for compatibility with Drizzle's Postgres driver but has some differences:
|
|
309
326
|
|
|
310
|
-
| Feature
|
|
311
|
-
|
|
312
|
-
| Basic CRUD operations | Full support
|
|
313
|
-
| Joins and subqueries
|
|
314
|
-
| Transactions
|
|
315
|
-
| JSON/JSONB columns
|
|
316
|
-
| Prepared statements
|
|
317
|
-
| Streaming results
|
|
327
|
+
| Feature | Status |
|
|
328
|
+
| --------------------- | ---------------------------------------------------------- |
|
|
329
|
+
| Basic CRUD operations | Full support |
|
|
330
|
+
| Joins and subqueries | Full support |
|
|
331
|
+
| Transactions | No savepoints (nested transactions reuse outer) |
|
|
332
|
+
| JSON/JSONB columns | Use `duckDbJson()` instead |
|
|
333
|
+
| Prepared statements | No statement caching |
|
|
334
|
+
| Streaming results | Materialized by default; use `executeBatches()` for chunks |
|
|
318
335
|
|
|
319
|
-
See [Limitations Documentation](
|
|
336
|
+
See [Limitations Documentation](https://leonardovida-md.github.io/drizzle-neo-duckdb/guide/limitations) for details.
|
|
320
337
|
|
|
321
338
|
## Examples
|
|
322
339
|
|
|
323
340
|
- **[MotherDuck NYC Taxi](./example/motherduck-nyc.ts)** — Query the built-in NYC taxi dataset from MotherDuck cloud
|
|
324
341
|
|
|
325
342
|
Run examples:
|
|
343
|
+
|
|
326
344
|
```bash
|
|
327
345
|
MOTHERDUCK_TOKEN=your_token bun example/motherduck-nyc.ts
|
|
328
346
|
```
|
package/dist/client.d.ts
CHANGED
|
@@ -8,3 +8,15 @@ export interface PrepareParamsOptions {
|
|
|
8
8
|
export declare function prepareParams(params: unknown[], options?: PrepareParamsOptions): unknown[];
|
|
9
9
|
export declare function closeClientConnection(connection: DuckDBConnection): Promise<void>;
|
|
10
10
|
export declare function executeOnClient(client: DuckDBClientLike, query: string, params: unknown[]): Promise<RowData[]>;
|
|
11
|
+
export interface ExecuteInBatchesOptions {
|
|
12
|
+
rowsPerChunk?: number;
|
|
13
|
+
}
|
|
14
|
+
/**
|
|
15
|
+
* Stream results from DuckDB in batches to avoid fully materializing rows in JS.
|
|
16
|
+
*/
|
|
17
|
+
export declare function executeInBatches(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteInBatchesOptions): AsyncGenerator<RowData[], void, void>;
|
|
18
|
+
/**
|
|
19
|
+
* Return columnar results when the underlying node-api exposes an Arrow/columnar API.
|
|
20
|
+
* Falls back to column-major JS arrays when Arrow is unavailable.
|
|
21
|
+
*/
|
|
22
|
+
export declare function executeArrowOnClient(client: DuckDBClientLike, query: string, params: unknown[]): Promise<unknown>;
|
package/dist/columns.d.ts
CHANGED
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import { type SQL } from 'drizzle-orm';
|
|
2
2
|
import type { SQLWrapper } from 'drizzle-orm/sql/sql';
|
|
3
|
+
import { type ListValueWrapper, type ArrayValueWrapper, type MapValueWrapper, type BlobValueWrapper, type JsonValueWrapper } from './value-wrappers.ts';
|
|
3
4
|
type IntColType = 'SMALLINT' | 'INTEGER' | 'BIGINT' | 'HUGEINT' | 'USMALLINT' | 'UINTEGER' | 'UBIGINT' | 'UHUGEINT' | 'INT' | 'INT16' | 'INT32' | 'INT64' | 'INT128' | 'LONG' | 'VARINT';
|
|
4
5
|
type FloatColType = 'FLOAT' | 'DOUBLE';
|
|
5
6
|
type StringColType = 'STRING' | 'VARCHAR' | 'TEXT';
|
|
@@ -16,7 +17,7 @@ export declare const duckDbList: <TData = unknown>(name: string, elementType: An
|
|
|
16
17
|
dataType: "custom";
|
|
17
18
|
columnType: "PgCustomColumn";
|
|
18
19
|
data: TData[];
|
|
19
|
-
driverParam: string |
|
|
20
|
+
driverParam: string | ListValueWrapper | unknown[];
|
|
20
21
|
enumValues: undefined;
|
|
21
22
|
}>;
|
|
22
23
|
export declare const duckDbArray: <TData = unknown>(name: string, elementType: AnyColType, fixedLength?: number) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
@@ -24,7 +25,7 @@ export declare const duckDbArray: <TData = unknown>(name: string, elementType: A
|
|
|
24
25
|
dataType: "custom";
|
|
25
26
|
columnType: "PgCustomColumn";
|
|
26
27
|
data: TData[];
|
|
27
|
-
driverParam: string | unknown[] |
|
|
28
|
+
driverParam: string | unknown[] | ArrayValueWrapper;
|
|
28
29
|
enumValues: undefined;
|
|
29
30
|
}>;
|
|
30
31
|
export declare const duckDbMap: <TData extends Record<string, any>>(name: string, valueType: AnyColType | ListColType | ArrayColType) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
@@ -32,7 +33,7 @@ export declare const duckDbMap: <TData extends Record<string, any>>(name: string
|
|
|
32
33
|
dataType: "custom";
|
|
33
34
|
columnType: "PgCustomColumn";
|
|
34
35
|
data: TData;
|
|
35
|
-
driverParam: TData;
|
|
36
|
+
driverParam: MapValueWrapper | TData;
|
|
36
37
|
enumValues: undefined;
|
|
37
38
|
}>;
|
|
38
39
|
export declare const duckDbStruct: <TData extends Record<string, any>>(name: string, schema: Record<string, Primitive>) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
@@ -43,12 +44,19 @@ export declare const duckDbStruct: <TData extends Record<string, any>>(name: str
|
|
|
43
44
|
driverParam: TData;
|
|
44
45
|
enumValues: undefined;
|
|
45
46
|
}>;
|
|
47
|
+
/**
|
|
48
|
+
* JSON column type that wraps values and delays JSON.stringify() to binding time.
|
|
49
|
+
* This ensures consistent handling with other wrapped types.
|
|
50
|
+
*
|
|
51
|
+
* Note: DuckDB stores JSON as VARCHAR internally, so the final binding
|
|
52
|
+
* is always a stringified JSON value.
|
|
53
|
+
*/
|
|
46
54
|
export declare const duckDbJson: <TData = unknown>(name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
47
55
|
name: string;
|
|
48
56
|
dataType: "custom";
|
|
49
57
|
columnType: "PgCustomColumn";
|
|
50
58
|
data: TData;
|
|
51
|
-
driverParam: string | SQL<unknown>;
|
|
59
|
+
driverParam: string | JsonValueWrapper | SQL<unknown>;
|
|
52
60
|
enumValues: undefined;
|
|
53
61
|
}>;
|
|
54
62
|
export declare const duckDbBlob: {
|
|
@@ -56,24 +64,24 @@ export declare const duckDbBlob: {
|
|
|
56
64
|
name: "";
|
|
57
65
|
dataType: "custom";
|
|
58
66
|
columnType: "PgCustomColumn";
|
|
59
|
-
data: Buffer
|
|
60
|
-
driverParam:
|
|
67
|
+
data: Buffer<ArrayBufferLike>;
|
|
68
|
+
driverParam: BlobValueWrapper;
|
|
61
69
|
enumValues: undefined;
|
|
62
70
|
}>;
|
|
63
71
|
<TConfig extends Record<string, any>>(fieldConfig?: TConfig | undefined): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
64
72
|
name: "";
|
|
65
73
|
dataType: "custom";
|
|
66
74
|
columnType: "PgCustomColumn";
|
|
67
|
-
data: Buffer
|
|
68
|
-
driverParam:
|
|
75
|
+
data: Buffer<ArrayBufferLike>;
|
|
76
|
+
driverParam: BlobValueWrapper;
|
|
69
77
|
enumValues: undefined;
|
|
70
78
|
}>;
|
|
71
79
|
<TName extends string>(dbName: TName, fieldConfig?: unknown): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
72
80
|
name: TName;
|
|
73
81
|
dataType: "custom";
|
|
74
82
|
columnType: "PgCustomColumn";
|
|
75
|
-
data: Buffer
|
|
76
|
-
driverParam:
|
|
83
|
+
data: Buffer<ArrayBufferLike>;
|
|
84
|
+
driverParam: BlobValueWrapper;
|
|
77
85
|
enumValues: undefined;
|
|
78
86
|
}>;
|
|
79
87
|
};
|
package/dist/driver.d.ts
CHANGED
|
@@ -4,10 +4,12 @@ import { PgDatabase } from 'drizzle-orm/pg-core/db';
|
|
|
4
4
|
import type { SelectedFields } from 'drizzle-orm/pg-core/query-builders';
|
|
5
5
|
import { type ExtractTablesWithRelations, type RelationalSchemaConfig, type TablesRelationalConfig } from 'drizzle-orm/relations';
|
|
6
6
|
import { type DrizzleConfig } from 'drizzle-orm/utils';
|
|
7
|
+
import type { SQL } from 'drizzle-orm/sql/sql';
|
|
7
8
|
import type { DuckDBClientLike, DuckDBQueryResultHKT, DuckDBTransaction } from './session.ts';
|
|
8
9
|
import { DuckDBSession } from './session.ts';
|
|
9
10
|
import { DuckDBDialect } from './dialect.ts';
|
|
10
11
|
import { DuckDBSelectBuilder } from './select-builder.ts';
|
|
12
|
+
import type { ExecuteInBatchesOptions, RowData } from './client.ts';
|
|
11
13
|
export interface PgDriverOptions {
|
|
12
14
|
logger?: Logger;
|
|
13
15
|
rewriteArrays?: boolean;
|
|
@@ -33,5 +35,7 @@ export declare class DuckDBDatabase<TFullSchema extends Record<string, unknown>
|
|
|
33
35
|
constructor(dialect: DuckDBDialect, session: DuckDBSession<TFullSchema, TSchema>, schema: RelationalSchemaConfig<TSchema> | undefined);
|
|
34
36
|
select(): DuckDBSelectBuilder<undefined>;
|
|
35
37
|
select<TSelection extends SelectedFields>(fields: TSelection): DuckDBSelectBuilder<TSelection>;
|
|
38
|
+
executeBatches<T extends RowData = RowData>(query: SQL, options?: ExecuteInBatchesOptions): AsyncGenerator<T[], void, void>;
|
|
39
|
+
executeArrow(query: SQL): Promise<unknown>;
|
|
36
40
|
transaction<T>(transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>): Promise<T>;
|
|
37
41
|
}
|
|
@@ -311,8 +311,78 @@ import { TransactionRollbackError } from "drizzle-orm/errors";
|
|
|
311
311
|
|
|
312
312
|
// src/client.ts
|
|
313
313
|
import {
|
|
314
|
-
listValue
|
|
314
|
+
listValue as listValue2,
|
|
315
|
+
timestampValue as timestampValue2
|
|
315
316
|
} from "@duckdb/node-api";
|
|
317
|
+
|
|
318
|
+
// src/value-wrappers.ts
|
|
319
|
+
import {
|
|
320
|
+
listValue,
|
|
321
|
+
arrayValue,
|
|
322
|
+
structValue,
|
|
323
|
+
mapValue,
|
|
324
|
+
blobValue,
|
|
325
|
+
timestampValue,
|
|
326
|
+
timestampTZValue
|
|
327
|
+
} from "@duckdb/node-api";
|
|
328
|
+
var DUCKDB_VALUE_MARKER = Symbol.for("drizzle-duckdb:value");
|
|
329
|
+
function dateToMicros(value) {
|
|
330
|
+
if (value instanceof Date) {
|
|
331
|
+
return BigInt(value.getTime()) * 1000n;
|
|
332
|
+
}
|
|
333
|
+
let normalized = value;
|
|
334
|
+
if (!value.includes("T") && value.includes(" ")) {
|
|
335
|
+
normalized = value.replace(" ", "T");
|
|
336
|
+
}
|
|
337
|
+
if (!normalized.endsWith("Z") && !/[+-]\d{2}:?\d{2}$/.test(normalized)) {
|
|
338
|
+
normalized += "Z";
|
|
339
|
+
}
|
|
340
|
+
const date = new Date(normalized);
|
|
341
|
+
if (isNaN(date.getTime())) {
|
|
342
|
+
throw new Error(`Invalid timestamp string: ${value}`);
|
|
343
|
+
}
|
|
344
|
+
return BigInt(date.getTime()) * 1000n;
|
|
345
|
+
}
|
|
346
|
+
function toUint8Array(data) {
|
|
347
|
+
return data instanceof Uint8Array && !(data instanceof Buffer) ? data : new Uint8Array(data);
|
|
348
|
+
}
|
|
349
|
+
function convertStructEntries(data, toValue) {
|
|
350
|
+
const entries = {};
|
|
351
|
+
for (const [key, val] of Object.entries(data)) {
|
|
352
|
+
entries[key] = toValue(val);
|
|
353
|
+
}
|
|
354
|
+
return entries;
|
|
355
|
+
}
|
|
356
|
+
function convertMapEntries(data, toValue) {
|
|
357
|
+
return Object.entries(data).map(([key, val]) => ({
|
|
358
|
+
key,
|
|
359
|
+
value: toValue(val)
|
|
360
|
+
}));
|
|
361
|
+
}
|
|
362
|
+
function wrapperToNodeApiValue(wrapper, toValue) {
|
|
363
|
+
switch (wrapper.kind) {
|
|
364
|
+
case "list":
|
|
365
|
+
return listValue(wrapper.data.map(toValue));
|
|
366
|
+
case "array":
|
|
367
|
+
return arrayValue(wrapper.data.map(toValue));
|
|
368
|
+
case "struct":
|
|
369
|
+
return structValue(convertStructEntries(wrapper.data, toValue));
|
|
370
|
+
case "map":
|
|
371
|
+
return mapValue(convertMapEntries(wrapper.data, toValue));
|
|
372
|
+
case "timestamp":
|
|
373
|
+
return wrapper.withTimezone ? timestampTZValue(dateToMicros(wrapper.data)) : timestampValue(dateToMicros(wrapper.data));
|
|
374
|
+
case "blob":
|
|
375
|
+
return blobValue(toUint8Array(wrapper.data));
|
|
376
|
+
case "json":
|
|
377
|
+
return JSON.stringify(wrapper.data);
|
|
378
|
+
default: {
|
|
379
|
+
const _exhaustive = wrapper;
|
|
380
|
+
throw new Error(`Unknown wrapper kind: ${_exhaustive.kind}`);
|
|
381
|
+
}
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
|
|
385
|
+
// src/client.ts
|
|
316
386
|
function isPgArrayLiteral(value) {
|
|
317
387
|
return value.startsWith("{") && value.endsWith("}");
|
|
318
388
|
}
|
|
@@ -341,30 +411,78 @@ function prepareParams(params, options = {}) {
|
|
|
341
411
|
});
|
|
342
412
|
}
|
|
343
413
|
function toNodeApiValue(value) {
|
|
414
|
+
if (value == null)
|
|
415
|
+
return null;
|
|
416
|
+
const t = typeof value;
|
|
417
|
+
if (t === "string" || t === "number" || t === "bigint" || t === "boolean") {
|
|
418
|
+
return value;
|
|
419
|
+
}
|
|
420
|
+
if (t === "object" && DUCKDB_VALUE_MARKER in value) {
|
|
421
|
+
return wrapperToNodeApiValue(value, toNodeApiValue);
|
|
422
|
+
}
|
|
344
423
|
if (Array.isArray(value)) {
|
|
345
|
-
return
|
|
424
|
+
return listValue2(value.map((inner) => toNodeApiValue(inner)));
|
|
425
|
+
}
|
|
426
|
+
if (value instanceof Date) {
|
|
427
|
+
return timestampValue2(BigInt(value.getTime()) * 1000n);
|
|
346
428
|
}
|
|
347
429
|
return value;
|
|
348
430
|
}
|
|
349
|
-
|
|
350
|
-
const values = params.length > 0 ? params.map((param) => toNodeApiValue(param)) : undefined;
|
|
351
|
-
const result = await client.run(query, values);
|
|
352
|
-
const rows = await result.getRowsJS();
|
|
353
|
-
const columns = result.columnNames();
|
|
431
|
+
function deduplicateColumns(columns) {
|
|
354
432
|
const seen = {};
|
|
355
|
-
|
|
433
|
+
return columns.map((col) => {
|
|
356
434
|
const count = seen[col] ?? 0;
|
|
357
435
|
seen[col] = count + 1;
|
|
358
436
|
return count === 0 ? col : `${col}_${count}`;
|
|
359
437
|
});
|
|
360
|
-
|
|
438
|
+
}
|
|
439
|
+
function mapRowsToObjects(columns, rows) {
|
|
440
|
+
return rows.map((vals) => {
|
|
361
441
|
const obj = {};
|
|
362
|
-
|
|
442
|
+
columns.forEach((col, idx) => {
|
|
363
443
|
obj[col] = vals[idx];
|
|
364
444
|
});
|
|
365
445
|
return obj;
|
|
366
446
|
});
|
|
367
447
|
}
|
|
448
|
+
async function executeOnClient(client, query, params) {
|
|
449
|
+
const values = params.length > 0 ? params.map((param) => toNodeApiValue(param)) : undefined;
|
|
450
|
+
const result = await client.run(query, values);
|
|
451
|
+
const rows = await result.getRowsJS();
|
|
452
|
+
const columns = result.deduplicatedColumnNames?.() ?? result.columnNames();
|
|
453
|
+
const uniqueColumns = deduplicateColumns(columns);
|
|
454
|
+
return rows ? mapRowsToObjects(uniqueColumns, rows) : [];
|
|
455
|
+
}
|
|
456
|
+
async function* executeInBatches(client, query, params, options = {}) {
|
|
457
|
+
const rowsPerChunk = options.rowsPerChunk && options.rowsPerChunk > 0 ? options.rowsPerChunk : 1e5;
|
|
458
|
+
const values = params.length > 0 ? params.map((param) => toNodeApiValue(param)) : undefined;
|
|
459
|
+
const result = await client.stream(query, values);
|
|
460
|
+
const columns = result.deduplicatedColumnNames?.() ?? result.columnNames();
|
|
461
|
+
const uniqueColumns = deduplicateColumns(columns);
|
|
462
|
+
let buffer = [];
|
|
463
|
+
for await (const chunk of result.yieldRowsJs()) {
|
|
464
|
+
const objects = mapRowsToObjects(uniqueColumns, chunk);
|
|
465
|
+
for (const row of objects) {
|
|
466
|
+
buffer.push(row);
|
|
467
|
+
if (buffer.length >= rowsPerChunk) {
|
|
468
|
+
yield buffer;
|
|
469
|
+
buffer = [];
|
|
470
|
+
}
|
|
471
|
+
}
|
|
472
|
+
}
|
|
473
|
+
if (buffer.length > 0) {
|
|
474
|
+
yield buffer;
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
async function executeArrowOnClient(client, query, params) {
|
|
478
|
+
const values = params.length > 0 ? params.map((param) => toNodeApiValue(param)) : undefined;
|
|
479
|
+
const result = await client.run(query, values);
|
|
480
|
+
const maybeArrow = result.toArrow ?? result.getArrowTable;
|
|
481
|
+
if (typeof maybeArrow === "function") {
|
|
482
|
+
return await maybeArrow.call(result);
|
|
483
|
+
}
|
|
484
|
+
return result.getColumnsObjectJS();
|
|
485
|
+
}
|
|
368
486
|
|
|
369
487
|
// src/session.ts
|
|
370
488
|
class DuckDBPreparedQuery extends PgPreparedQuery {
|
|
@@ -405,11 +523,7 @@ class DuckDBPreparedQuery extends PgPreparedQuery {
|
|
|
405
523
|
this.logger.logQuery(`[duckdb] original query before array rewrite: ${this.queryString}`, params);
|
|
406
524
|
}
|
|
407
525
|
this.logger.logQuery(rewrittenQuery, params);
|
|
408
|
-
const {
|
|
409
|
-
fields,
|
|
410
|
-
joinsNotNullableMap,
|
|
411
|
-
customResultMapper
|
|
412
|
-
} = this;
|
|
526
|
+
const { fields, joinsNotNullableMap, customResultMapper } = this;
|
|
413
527
|
const rows = await executeOnClient(this.client, rewrittenQuery, params);
|
|
414
528
|
if (rows.length === 0 || !fields) {
|
|
415
529
|
return rows;
|
|
@@ -469,6 +583,32 @@ class DuckDBSession extends PgSession {
|
|
|
469
583
|
this.logger.logQuery(`[duckdb] ${arrayLiteralWarning}
|
|
470
584
|
query: ${query}`, []);
|
|
471
585
|
};
|
|
586
|
+
executeBatches(query, options = {}) {
|
|
587
|
+
const builtQuery = this.dialect.sqlToQuery(query);
|
|
588
|
+
const params = prepareParams(builtQuery.params, {
|
|
589
|
+
rejectStringArrayLiterals: this.rejectStringArrayLiterals,
|
|
590
|
+
warnOnStringArrayLiteral: this.rejectStringArrayLiterals ? undefined : () => this.warnOnStringArrayLiteral(builtQuery.sql)
|
|
591
|
+
});
|
|
592
|
+
const rewrittenQuery = this.rewriteArrays ? adaptArrayOperators(builtQuery.sql) : builtQuery.sql;
|
|
593
|
+
if (this.rewriteArrays && rewrittenQuery !== builtQuery.sql) {
|
|
594
|
+
this.logger.logQuery(`[duckdb] original query before array rewrite: ${builtQuery.sql}`, params);
|
|
595
|
+
}
|
|
596
|
+
this.logger.logQuery(rewrittenQuery, params);
|
|
597
|
+
return executeInBatches(this.client, rewrittenQuery, params, options);
|
|
598
|
+
}
|
|
599
|
+
async executeArrow(query) {
|
|
600
|
+
const builtQuery = this.dialect.sqlToQuery(query);
|
|
601
|
+
const params = prepareParams(builtQuery.params, {
|
|
602
|
+
rejectStringArrayLiterals: this.rejectStringArrayLiterals,
|
|
603
|
+
warnOnStringArrayLiteral: this.rejectStringArrayLiterals ? undefined : () => this.warnOnStringArrayLiteral(builtQuery.sql)
|
|
604
|
+
});
|
|
605
|
+
const rewrittenQuery = this.rewriteArrays ? adaptArrayOperators(builtQuery.sql) : builtQuery.sql;
|
|
606
|
+
if (this.rewriteArrays && rewrittenQuery !== builtQuery.sql) {
|
|
607
|
+
this.logger.logQuery(`[duckdb] original query before array rewrite: ${builtQuery.sql}`, params);
|
|
608
|
+
}
|
|
609
|
+
this.logger.logQuery(rewrittenQuery, params);
|
|
610
|
+
return executeArrowOnClient(this.client, rewrittenQuery, params);
|
|
611
|
+
}
|
|
472
612
|
}
|
|
473
613
|
|
|
474
614
|
class DuckDBTransaction extends PgTransaction {
|
|
@@ -492,6 +632,12 @@ class DuckDBTransaction extends PgTransaction {
|
|
|
492
632
|
setTransaction(config) {
|
|
493
633
|
return this.session.execute(sql`set transaction ${this.getTransactionConfigSQL(config)}`);
|
|
494
634
|
}
|
|
635
|
+
executeBatches(query, options = {}) {
|
|
636
|
+
return this.session.executeBatches(query, options);
|
|
637
|
+
}
|
|
638
|
+
executeArrow(query) {
|
|
639
|
+
return this.session.executeArrow(query);
|
|
640
|
+
}
|
|
495
641
|
async transaction(transaction) {
|
|
496
642
|
const nestedTx = new DuckDBTransaction(this.dialect, this.session, this.schema, this.nestedIndex + 1);
|
|
497
643
|
return transaction(nestedTx);
|
|
@@ -590,13 +736,7 @@ import { PgViewBase } from "drizzle-orm/pg-core/view-base";
|
|
|
590
736
|
import { SQL as SQL4 } from "drizzle-orm/sql/sql";
|
|
591
737
|
|
|
592
738
|
// src/sql/selection.ts
|
|
593
|
-
import {
|
|
594
|
-
Column as Column2,
|
|
595
|
-
SQL as SQL3,
|
|
596
|
-
getTableName as getTableName2,
|
|
597
|
-
is as is3,
|
|
598
|
-
sql as sql3
|
|
599
|
-
} from "drizzle-orm";
|
|
739
|
+
import { Column as Column2, SQL as SQL3, getTableName as getTableName2, is as is3, sql as sql3 } from "drizzle-orm";
|
|
600
740
|
function mapEntries(obj, prefix, fullJoin = false) {
|
|
601
741
|
return Object.fromEntries(Object.entries(obj).filter(([key]) => key !== "enableRLS").map(([key, value]) => {
|
|
602
742
|
const qualified = prefix ? `${prefix}.${key}` : key;
|
|
@@ -737,6 +877,12 @@ class DuckDBDatabase extends PgDatabase {
|
|
|
737
877
|
dialect: this.dialect
|
|
738
878
|
});
|
|
739
879
|
}
|
|
880
|
+
executeBatches(query, options = {}) {
|
|
881
|
+
return this.session.executeBatches(query, options);
|
|
882
|
+
}
|
|
883
|
+
executeArrow(query) {
|
|
884
|
+
return this.session.executeArrow(query);
|
|
885
|
+
}
|
|
740
886
|
async transaction(transaction) {
|
|
741
887
|
return await this.session.transaction(transaction);
|
|
742
888
|
}
|
|
@@ -1046,7 +1192,9 @@ function mapDuckDbType(column, imports, options) {
|
|
|
1046
1192
|
}
|
|
1047
1193
|
if (upper === "BIGINT" || upper === "INT8" || upper === "UBIGINT") {
|
|
1048
1194
|
imports.pgCore.add("bigint");
|
|
1049
|
-
return {
|
|
1195
|
+
return {
|
|
1196
|
+
builder: `bigint(${columnName(column.name)}, { mode: 'number' })`
|
|
1197
|
+
};
|
|
1050
1198
|
}
|
|
1051
1199
|
const decimalMatch = /^DECIMAL\((\d+),(\d+)\)/i.exec(upper);
|
|
1052
1200
|
const numericMatch = /^NUMERIC\((\d+),(\d+)\)/i.exec(upper);
|