@duckdbfan/drizzle-duckdb 0.0.7 → 1.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. package/README.md +344 -62
  2. package/dist/bin/duckdb-introspect.d.ts +2 -0
  3. package/dist/client.d.ts +42 -0
  4. package/dist/columns.d.ts +100 -9
  5. package/dist/dialect.d.ts +27 -2
  6. package/dist/driver.d.ts +53 -37
  7. package/dist/duckdb-introspect.mjs +2890 -0
  8. package/dist/helpers.d.ts +1 -0
  9. package/dist/helpers.mjs +360 -0
  10. package/dist/index.d.ts +7 -0
  11. package/dist/index.mjs +3015 -228
  12. package/dist/introspect.d.ts +74 -0
  13. package/dist/migrator.d.ts +3 -2
  14. package/dist/olap.d.ts +46 -0
  15. package/dist/operators.d.ts +8 -0
  16. package/dist/options.d.ts +7 -0
  17. package/dist/pool.d.ts +30 -0
  18. package/dist/select-builder.d.ts +31 -0
  19. package/dist/session.d.ts +33 -8
  20. package/dist/sql/ast-transformer.d.ts +33 -0
  21. package/dist/sql/result-mapper.d.ts +9 -0
  22. package/dist/sql/selection.d.ts +2 -0
  23. package/dist/sql/visitors/array-operators.d.ts +5 -0
  24. package/dist/sql/visitors/column-qualifier.d.ts +10 -0
  25. package/dist/sql/visitors/generate-series-alias.d.ts +13 -0
  26. package/dist/sql/visitors/union-with-hoister.d.ts +11 -0
  27. package/dist/utils.d.ts +2 -5
  28. package/dist/value-wrappers-core.d.ts +42 -0
  29. package/dist/value-wrappers.d.ts +8 -0
  30. package/package.json +53 -16
  31. package/src/bin/duckdb-introspect.ts +181 -0
  32. package/src/client.ts +528 -0
  33. package/src/columns.ts +420 -65
  34. package/src/dialect.ts +111 -15
  35. package/src/driver.ts +266 -180
  36. package/src/helpers.ts +18 -0
  37. package/src/index.ts +8 -1
  38. package/src/introspect.ts +935 -0
  39. package/src/migrator.ts +10 -5
  40. package/src/olap.ts +190 -0
  41. package/src/operators.ts +27 -0
  42. package/src/options.ts +25 -0
  43. package/src/pool.ts +274 -0
  44. package/src/select-builder.ts +110 -0
  45. package/src/session.ts +306 -66
  46. package/src/sql/ast-transformer.ts +170 -0
  47. package/src/sql/result-mapper.ts +303 -0
  48. package/src/sql/selection.ts +60 -0
  49. package/src/sql/visitors/array-operators.ts +214 -0
  50. package/src/sql/visitors/column-qualifier.ts +586 -0
  51. package/src/sql/visitors/generate-series-alias.ts +291 -0
  52. package/src/sql/visitors/union-with-hoister.ts +106 -0
  53. package/src/utils.ts +2 -222
  54. package/src/value-wrappers-core.ts +168 -0
  55. package/src/value-wrappers.ts +165 -0
package/README.md CHANGED
@@ -1,66 +1,348 @@
1
- # drizzle-duckdb
2
-
3
- ## Description
4
- A drizzle ORM client for use with DuckDB. Based on drizzle's Postgres client. As of writing this, certain things will work, and others won't. Notably, DuckDB-specific column types such as `struct`, `list`, `array` are not implemented, but this could be done using [drizzle custom types](https://orm.drizzle.team/docs/custom-types). (This is planned to be implemented in the package later on)
5
-
6
- ## Disclaimers
7
- - **Experimental**: This project is in an experimental stage. Certain features may be broken or not function as expected.
8
- - **Use at Your Own Risk**: Users should proceed with caution and use this project at their own risk.
9
- - **Maintenance**: This project may not be actively maintained. Updates and bug fixes are not guaranteed.
10
-
11
- ## Getting Started
12
- 1. Install dependencies:
13
- ```sh
14
- bun add @duckdbfan/drizzle-duckdb
15
- ```
16
- 2. Figure it out! (sorry, might flesh this out later- see tests for some examples)
17
- ```typescript
18
- import { Database } from 'duckdb-async';
19
- import { drizzle } from '@duckdbfan/drizzle-duckdb';
20
- import { DefaultLogger, sql } from 'drizzle-orm';
21
- import { char, integer, pgSchema, text } from 'drizzle-orm/pg-core';
22
-
23
- const client = await Database.create(':memory:');
24
- const db = drizzle(client, { logger: new DefaultLogger() });
25
-
26
- const customSchema = pgSchema('custom');
27
-
28
- await db.execute(sql`CREATE SCHEMA IF NOT EXISTS ${customSchema}`);
29
-
30
- const citiesTable = customSchema.table('cities', {
31
- id: integer('id')
32
- .primaryKey()
33
- .default(sql`nextval('serial_cities')`),
34
- name: text('name').notNull(),
35
- state: char('state', { length: 2 }),
36
- });
37
-
38
- await db.execute(sql`CREATE SEQUENCE IF NOT EXISTS serial_cities;`);
39
-
40
- await db.execute(
41
- sql`
42
- create table if not exists ${citiesTable} (
43
- id integer primary key default nextval('serial_cities'),
44
- name text not null,
45
- state char(2)
46
- )
47
- `
48
- );
49
-
50
- const insertedIds = await db
51
- .insert(citiesTable)
52
- .values([
53
- { name: 'Paris', state: 'FR' },
54
- { name: 'London', state: 'UK' },
55
- ])
56
- .returning({ id: citiesTable.id });
57
-
58
- console.log(insertedIds);
59
-
60
- ```
1
+ <div align="center">
2
+
3
+ # Drizzle DuckDB
4
+
5
+ ### DuckDB dialect for [Drizzle ORM](https://orm.drizzle.team/)
6
+
7
+ [![npm version](https://img.shields.io/npm/v/@leonardovida-md/drizzle-neo-duckdb)](https://www.npmjs.com/package/@leonardovida-md/drizzle-neo-duckdb)
8
+ [![License](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://opensource.org/licenses/Apache-2.0)
9
+
10
+ [Documentation](https://leonardovida.github.io/drizzle-neo-duckdb/) • [LLM Context](https://leonardovida.github.io/drizzle-neo-duckdb/llms.txt) • [Examples](./example) • [Contributing](#contributing)
11
+
12
+ </div>
13
+
14
+ <br>
15
+
16
+ **Drizzle DuckDB** brings [Drizzle ORM](https://orm.drizzle.team/) to [DuckDB](https://duckdb.org/), an in-process analytical database. You get Drizzle's type-safe query builder, automatic migrations, and full TypeScript inference while working with DuckDB's analytics engine.
17
+
18
+ Works with local DuckDB files, in-memory databases, and [MotherDuck](https://motherduck.com/) cloud.
19
+
20
+ > **Status:** Experimental. Core query building, migrations, and type inference work well. Some DuckDB-specific types and edge cases are still being refined.
21
+
22
+ > **Note:** The NPM package is `@leonardovida-md/drizzle-neo-duckdb` while the repository is `drizzle-duckdb`. This is due to a migration to preserve the existing NPM package name.
23
+
24
+ Docs tip: every docs page has a **Markdown (raw)** button for LLM-friendly source.
25
+
26
+ ## Installation
27
+
28
+ ```bash
29
+ bun add @leonardovida-md/drizzle-neo-duckdb @duckdb/node-api
30
+ ```
31
+
32
+ ```bash
33
+ npm install @leonardovida-md/drizzle-neo-duckdb @duckdb/node-api
34
+ ```
35
+
36
+ ```bash
37
+ pnpm add @leonardovida-md/drizzle-neo-duckdb @duckdb/node-api
38
+ ```
39
+
40
+ ## Quick Start
41
+
42
+ ```typescript
43
+ import { DuckDBInstance } from '@duckdb/node-api';
44
+ import { drizzle } from '@leonardovida-md/drizzle-neo-duckdb';
45
+ import { sql } from 'drizzle-orm';
46
+ import { integer, text, pgTable } from 'drizzle-orm/pg-core';
47
+
48
+ // Connect to DuckDB
49
+ const instance = await DuckDBInstance.create(':memory:');
50
+ const connection = await instance.connect();
51
+ const db = drizzle(connection);
52
+
53
+ // Define your schema
54
+ const users = pgTable('users', {
55
+ id: integer('id').primaryKey(),
56
+ name: text('name').notNull(),
57
+ email: text('email').notNull(),
58
+ });
59
+
60
+ // Create table
61
+ await db.execute(sql`
62
+ CREATE TABLE IF NOT EXISTS users (
63
+ id INTEGER PRIMARY KEY,
64
+ name TEXT NOT NULL,
65
+ email TEXT NOT NULL
66
+ )
67
+ `);
68
+
69
+ // Insert data
70
+ await db.insert(users).values([
71
+ { id: 1, name: 'Alice', email: 'alice@example.com' },
72
+ { id: 2, name: 'Bob', email: 'bob@example.com' },
73
+ ]);
74
+
75
+ // Query with full type safety
76
+ const allUsers = await db.select().from(users);
77
+ // ^? { id: number; name: string; email: string }[]
78
+
79
+ // Clean up
80
+ connection.closeSync();
81
+ ```
82
+
83
+ ## Connecting to DuckDB
84
+
85
+ ### In-Memory Database
86
+
87
+ ```typescript
88
+ const instance = await DuckDBInstance.create(':memory:');
89
+ const connection = await instance.connect();
90
+ const db = drizzle(connection);
91
+ ```
92
+
93
+ ### Local File
94
+
95
+ ```typescript
96
+ const instance = await DuckDBInstance.create('./my-database.duckdb');
97
+ const connection = await instance.connect();
98
+ const db = drizzle(connection);
99
+ ```
100
+
101
+ ### MotherDuck Cloud
102
+
103
+ ```typescript
104
+ const instance = await DuckDBInstance.create('md:', {
105
+ motherduck_token: process.env.MOTHERDUCK_TOKEN,
106
+ });
107
+ const connection = await instance.connect();
108
+ const db = drizzle(connection);
109
+ ```
110
+
111
+ ### With Logging
112
+
113
+ ```typescript
114
+ import { DefaultLogger } from 'drizzle-orm';
115
+
116
+ const db = drizzle(connection, {
117
+ logger: new DefaultLogger(),
118
+ });
119
+ ```
120
+
121
+ > Tip: With connection strings (recommended), just pass the path: `const db = await drizzle(':memory:')`. Pooling is automatic.
122
+
123
+ ## Connection Pooling
124
+
125
+ DuckDB executes one query per connection. The async `drizzle()` entrypoints create a pool automatically (default size: 4). Options:
126
+
127
+ - Set pool size or MotherDuck preset: `drizzle('md:', { pool: { size: 8 } })` or `pool: 'jumbo'` / `pool: 'giga'`.
128
+ - Disable pooling for single-connection workloads: `pool: false`.
129
+ - Transactions pin one pooled connection for their entire lifetime; non-transactional queries still use the pool.
130
+ - For tuning (acquire timeout, queue limits, idle/lifetime recycling), create the pool manually:
131
+
132
+ ```typescript
133
+ import { DuckDBInstance } from '@duckdb/node-api';
134
+ import {
135
+ createDuckDBConnectionPool,
136
+ drizzle,
137
+ } from '@leonardovida-md/drizzle-neo-duckdb';
138
+
139
+ const instance = await DuckDBInstance.create('md:', {
140
+ motherduck_token: process.env.MOTHERDUCK_TOKEN,
141
+ });
142
+ const pool = createDuckDBConnectionPool(instance, {
143
+ size: 8,
144
+ acquireTimeout: 20_000,
145
+ maxWaitingRequests: 200,
146
+ maxLifetimeMs: 10 * 60_000,
147
+ idleTimeoutMs: 60_000,
148
+ });
149
+ const db = drizzle(pool);
150
+ ```
151
+
152
+ ## Schema & Types
153
+
154
+ - Use `drizzle-orm/pg-core` for schemas; DuckDB SQL is largely Postgres-compatible.
155
+ - DuckDB-specific helpers: `duckDbList`, `duckDbArray`, `duckDbStruct`, `duckDbMap`, `duckDbJson`, `duckDbBlob`, `duckDbInet`, `duckDbInterval`, `duckDbTimestamp`, `duckDbDate`, `duckDbTime`.
156
+ - Browser-safe imports live under `@leonardovida-md/drizzle-neo-duckdb/helpers` (introspection emits this path).
157
+
158
+ See the [column types](https://leonardovida.github.io/drizzle-neo-duckdb/api/columns) docs for full API.
159
+
160
+ ## Postgres Schema Compatibility
161
+
162
+ Use `pgTable`, `pgSchema`, and other `drizzle-orm/pg-core` builders as you do with Postgres. The dialect keeps table definitions and relations intact while adapting queries to DuckDB.
163
+
164
+ ## Querying
165
+
166
+ All standard Drizzle query methods work:
167
+
168
+ ```typescript
169
+ // Select
170
+ const users = await db
171
+ .select()
172
+ .from(usersTable)
173
+ .where(eq(usersTable.active, true));
174
+
175
+ // Insert
176
+ await db
177
+ .insert(usersTable)
178
+ .values({ name: 'Alice', email: 'alice@example.com' });
179
+
180
+ // Insert with returning
181
+ const inserted = await db
182
+ .insert(usersTable)
183
+ .values({ name: 'Bob' })
184
+ .returning({ id: usersTable.id });
185
+
186
+ // Update
187
+ await db
188
+ .update(usersTable)
189
+ .set({ name: 'Updated' })
190
+ .where(eq(usersTable.id, 1));
191
+
192
+ // Delete
193
+ await db.delete(usersTable).where(eq(usersTable.id, 1));
194
+ ```
195
+
196
+ ### Array Operations
197
+
198
+ For DuckDB array operations, use the custom helpers instead of Postgres operators:
199
+
200
+ ```typescript
201
+ import {
202
+ duckDbArrayContains,
203
+ duckDbArrayContained,
204
+ duckDbArrayOverlaps,
205
+ } from '@leonardovida-md/drizzle-neo-duckdb';
206
+
207
+ // Check if array contains all values
208
+ const results = await db
209
+ .select()
210
+ .from(products)
211
+ .where(duckDbArrayContains(products.tags, ['electronics', 'sale']));
212
+
213
+ // Check if array is contained by values
214
+ const results = await db
215
+ .select()
216
+ .from(products)
217
+ .where(
218
+ duckDbArrayContained(products.tags, ['electronics', 'sale', 'featured'])
219
+ );
220
+
221
+ // Check if arrays overlap
222
+ const results = await db
223
+ .select()
224
+ .from(products)
225
+ .where(duckDbArrayOverlaps(products.tags, ['electronics', 'books']));
226
+ ```
227
+
228
+ ## Transactions
229
+
230
+ ```typescript
231
+ await db.transaction(async (tx) => {
232
+ await tx.insert(accounts).values({ balance: 100 });
233
+ await tx.update(accounts).set({ balance: 50 }).where(eq(accounts.id, 1));
234
+ });
235
+ ```
236
+
237
+ > **Note:** DuckDB doesn't support `SAVEPOINT`, so nested transactions reuse the outer transaction context. Inner rollbacks will abort the entire transaction.
238
+
239
+ ## Migrations
240
+
241
+ Apply SQL migration files using the `migrate` function:
242
+
243
+ ```typescript
244
+ import { migrate } from '@leonardovida-md/drizzle-neo-duckdb';
245
+
246
+ await migrate(db, { migrationsFolder: './drizzle' });
247
+ ```
248
+
249
+ Migration metadata is stored in `drizzle.__drizzle_migrations` by default. See [Migrations Documentation](https://leonardovida.github.io/drizzle-neo-duckdb/guide/migrations) for configuration options.
250
+
251
+ ## Schema Introspection
252
+
253
+ Generate Drizzle schema from an existing DuckDB database:
254
+
255
+ ### CLI
256
+
257
+ ```bash
258
+ bunx duckdb-introspect --url ./my-database.duckdb --out ./drizzle/schema.ts
259
+ ```
260
+
261
+ ### Programmatic
262
+
263
+ ```typescript
264
+ import { introspect } from '@leonardovida-md/drizzle-neo-duckdb';
265
+
266
+ const result = await introspect(db, {
267
+ schemas: ['public', 'analytics'],
268
+ includeViews: true,
269
+ });
270
+
271
+ console.log(result.files.schemaTs);
272
+ ```
273
+
274
+ See [Introspection Documentation](https://leonardovida.github.io/drizzle-neo-duckdb/guide/introspection) for all options.
275
+
276
+ ## Configuration Options
277
+
278
+ ```typescript
279
+ const db = drizzle(connection, {
280
+ // Enable query logging
281
+ logger: new DefaultLogger(),
282
+
283
+ // Pool size/preset when using connection strings (default: 4). Set false to disable.
284
+ pool: { size: 8 },
285
+
286
+ // Throw on Postgres-style array literals like '{1,2,3}' (default: false)
287
+ rejectStringArrayLiterals: false,
288
+
289
+ // Pass your schema for relational queries
290
+ schema: mySchema,
291
+ });
292
+ ```
293
+
294
+ Postgres array operators (`@>`, `<@`, `&&`) are automatically rewritten to DuckDB's `array_has_*` functions via AST transformation.
295
+
296
+ ## Known Limitations
297
+
298
+ This connector aims for compatibility with Drizzle's Postgres driver but has some differences:
299
+
300
+ | Feature | Status |
301
+ | --------------------- | ---------------------------------------------------------------------------- |
302
+ | Basic CRUD operations | Full support |
303
+ | Joins and subqueries | Full support |
304
+ | Transactions | No savepoints (nested transactions reuse outer) |
305
+ | JSON/JSONB columns | Use `duckDbJson()` instead |
306
+ | Prepared statements | No statement caching |
307
+ | Streaming results | Chunked reads via `executeBatches()` / `executeArrow()`; no cursor streaming |
308
+ | Concurrent queries | One query per connection; use pooling for parallelism |
309
+
310
+ See [Limitations Documentation](https://leonardovida.github.io/drizzle-neo-duckdb/reference/limitations) for details.
311
+
312
+ ## Examples
313
+
314
+ - **[MotherDuck NYC Taxi](./example/motherduck-nyc.ts)**: Query the built-in NYC taxi dataset from MotherDuck cloud
315
+ - **[Analytics Dashboard](./example/analytics-dashboard.ts)**: Local in-memory analytics with DuckDB types and Parquet loading
316
+
317
+ Run examples:
318
+
319
+ ```bash
320
+ MOTHERDUCK_TOKEN=your_token bun example/motherduck-nyc.ts
321
+ bun example/analytics-dashboard.ts
322
+ ```
61
323
 
62
324
  ## Contributing
63
- Contributions are welcome, although I may not be very responsive.
325
+
326
+ Contributions are welcome! Please:
327
+
328
+ 1. Include tests for new features (`test/<feature>.test.ts`)
329
+ 2. Note any DuckDB-specific quirks you encounter
330
+ 3. Use a clear, imperative commit message
331
+
332
+ ```bash
333
+ # Install dependencies
334
+ bun install
335
+
336
+ # Run tests
337
+ bun test
338
+
339
+ # Run tests with UI
340
+ bun t
341
+
342
+ # Build
343
+ bun run build
344
+ ```
64
345
 
65
346
  ## License
66
- This project is licensed under the Apache License.
347
+
348
+ [Apache-2.0](./LICENSE)
@@ -0,0 +1,2 @@
1
+ #!/usr/bin/env node
2
+ export {};
@@ -0,0 +1,42 @@
1
+ import { type DuckDBConnection } from '@duckdb/node-api';
2
+ import type { PreparedStatementCacheConfig } from './options.ts';
3
+ export type DuckDBClientLike = DuckDBConnection | DuckDBConnectionPool;
4
+ export type RowData = Record<string, unknown>;
5
+ export interface DuckDBConnectionPool {
6
+ acquire(): Promise<DuckDBConnection>;
7
+ release(connection: DuckDBConnection): void | Promise<void>;
8
+ close?(): Promise<void> | void;
9
+ }
10
+ export declare function isPool(client: DuckDBClientLike): client is DuckDBConnectionPool;
11
+ export interface ExecuteClientOptions {
12
+ prepareCache?: PreparedStatementCacheConfig;
13
+ }
14
+ export type ExecuteArraysResult = {
15
+ columns: string[];
16
+ rows: unknown[][];
17
+ };
18
+ export interface PrepareParamsOptions {
19
+ rejectStringArrayLiterals?: boolean;
20
+ warnOnStringArrayLiteral?: () => void;
21
+ }
22
+ export declare function prepareParams(params: unknown[], options?: PrepareParamsOptions): unknown[];
23
+ export declare function closeClientConnection(connection: DuckDBConnection): Promise<void>;
24
+ export declare function executeOnClient(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteClientOptions): Promise<RowData[]>;
25
+ export declare function executeArraysOnClient(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteClientOptions): Promise<ExecuteArraysResult>;
26
+ export interface ExecuteInBatchesOptions {
27
+ rowsPerChunk?: number;
28
+ }
29
+ export interface ExecuteBatchesRawChunk {
30
+ columns: string[];
31
+ rows: unknown[][];
32
+ }
33
+ /**
34
+ * Stream results from DuckDB in batches to avoid fully materializing rows in JS.
35
+ */
36
+ export declare function executeInBatches(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteInBatchesOptions): AsyncGenerator<RowData[], void, void>;
37
+ export declare function executeInBatchesRaw(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteInBatchesOptions): AsyncGenerator<ExecuteBatchesRawChunk, void, void>;
38
+ /**
39
+ * Return columnar results when the underlying node-api exposes an Arrow/columnar API.
40
+ * Falls back to column-major JS arrays when Arrow is unavailable.
41
+ */
42
+ export declare function executeArrowOnClient(client: DuckDBClientLike, query: string, params: unknown[]): Promise<unknown>;
package/dist/columns.d.ts CHANGED
@@ -1,3 +1,6 @@
1
+ import { type SQL } from 'drizzle-orm';
2
+ import type { SQLWrapper } from 'drizzle-orm/sql/sql';
3
+ import { type ListValueWrapper, type ArrayValueWrapper, type MapValueWrapper, type BlobValueWrapper, type JsonValueWrapper, type TimestampValueWrapper } from './value-wrappers-core.ts';
1
4
  type IntColType = 'SMALLINT' | 'INTEGER' | 'BIGINT' | 'HUGEINT' | 'USMALLINT' | 'UINTEGER' | 'UBIGINT' | 'UHUGEINT' | 'INT' | 'INT16' | 'INT32' | 'INT64' | 'INT128' | 'LONG' | 'VARINT';
2
5
  type FloatColType = 'FLOAT' | 'DOUBLE';
3
6
  type StringColType = 'STRING' | 'VARCHAR' | 'TEXT';
@@ -7,20 +10,58 @@ type DateColType = 'DATE' | 'TIME' | 'TIMETZ' | 'TIMESTAMP' | 'DATETIME' | 'TIME
7
10
  type AnyColType = IntColType | FloatColType | StringColType | BoolColType | DateColType | BlobColType;
8
11
  type ListColType = `${AnyColType}[]`;
9
12
  type ArrayColType = `${AnyColType}[${number}]`;
13
+ type StructColType = `STRUCT (${string})`;
14
+ type Primitive = AnyColType | ListColType | ArrayColType | StructColType;
15
+ export declare function coerceArrayString(value: string): unknown[] | undefined;
16
+ export declare function formatLiteral(value: unknown, typeHint?: string): string;
17
+ export declare function buildListLiteral(values: unknown[], elementType?: string): SQL;
18
+ export declare function buildStructLiteral(value: Record<string, unknown>, schema?: Record<string, Primitive>): SQL;
19
+ export declare function buildMapLiteral(value: Record<string, unknown>, valueType?: string): SQL;
20
+ export declare const duckDbList: <TData = unknown>(name: string, elementType: AnyColType) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
21
+ name: string;
22
+ dataType: "custom";
23
+ columnType: "PgCustomColumn";
24
+ data: TData[];
25
+ driverParam: string | ListValueWrapper | unknown[];
26
+ enumValues: undefined;
27
+ }>;
28
+ export declare const duckDbArray: <TData = unknown>(name: string, elementType: AnyColType, fixedLength?: number) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
29
+ name: string;
30
+ dataType: "custom";
31
+ columnType: "PgCustomColumn";
32
+ data: TData[];
33
+ driverParam: string | unknown[] | ArrayValueWrapper;
34
+ enumValues: undefined;
35
+ }>;
10
36
  export declare const duckDbMap: <TData extends Record<string, any>>(name: string, valueType: AnyColType | ListColType | ArrayColType) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
11
37
  name: string;
12
38
  dataType: "custom";
13
39
  columnType: "PgCustomColumn";
14
40
  data: TData;
15
- driverParam: string;
41
+ driverParam: MapValueWrapper | TData;
16
42
  enumValues: undefined;
17
43
  }>;
18
- export declare const duckDbStruct: <TData extends Record<string, any>>(name: string, schema: Record<string, AnyColType | ListColType | ArrayColType>) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
44
+ export declare const duckDbStruct: <TData extends Record<string, any>>(name: string, schema: Record<string, Primitive>) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
19
45
  name: string;
20
46
  dataType: "custom";
21
47
  columnType: "PgCustomColumn";
22
48
  data: TData;
23
- driverParam: string;
49
+ driverParam: TData;
50
+ enumValues: undefined;
51
+ }>;
52
+ /**
53
+ * JSON column type that wraps values and delays JSON.stringify() to binding time.
54
+ * This ensures consistent handling with other wrapped types.
55
+ *
56
+ * Note: DuckDB stores JSON as VARCHAR internally, so the final binding
57
+ * is always a stringified JSON value.
58
+ */
59
+ export declare const duckDbJson: <TData = unknown>(name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
60
+ name: string;
61
+ dataType: "custom";
62
+ columnType: "PgCustomColumn";
63
+ data: TData;
64
+ driverParam: string | JsonValueWrapper | SQL<unknown>;
24
65
  enumValues: undefined;
25
66
  }>;
26
67
  export declare const duckDbBlob: {
@@ -28,25 +69,75 @@ export declare const duckDbBlob: {
28
69
  name: "";
29
70
  dataType: "custom";
30
71
  columnType: "PgCustomColumn";
31
- data: Buffer;
32
- driverParam: unknown;
72
+ data: Buffer<ArrayBufferLike>;
73
+ driverParam: BlobValueWrapper;
33
74
  enumValues: undefined;
34
75
  }>;
35
76
  <TConfig extends Record<string, any>>(fieldConfig?: TConfig | undefined): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
36
77
  name: "";
37
78
  dataType: "custom";
38
79
  columnType: "PgCustomColumn";
39
- data: Buffer;
40
- driverParam: unknown;
80
+ data: Buffer<ArrayBufferLike>;
81
+ driverParam: BlobValueWrapper;
41
82
  enumValues: undefined;
42
83
  }>;
43
84
  <TName extends string>(dbName: TName, fieldConfig?: unknown): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
44
85
  name: TName;
45
86
  dataType: "custom";
46
87
  columnType: "PgCustomColumn";
47
- data: Buffer;
48
- driverParam: unknown;
88
+ data: Buffer<ArrayBufferLike>;
89
+ driverParam: BlobValueWrapper;
49
90
  enumValues: undefined;
50
91
  }>;
51
92
  };
93
+ export declare const duckDbInet: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
94
+ name: string;
95
+ dataType: "custom";
96
+ columnType: "PgCustomColumn";
97
+ data: string;
98
+ driverParam: string;
99
+ enumValues: undefined;
100
+ }>;
101
+ export declare const duckDbInterval: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
102
+ name: string;
103
+ dataType: "custom";
104
+ columnType: "PgCustomColumn";
105
+ data: string;
106
+ driverParam: string;
107
+ enumValues: undefined;
108
+ }>;
109
+ type TimestampMode = 'date' | 'string';
110
+ interface TimestampOptions {
111
+ withTimezone?: boolean;
112
+ mode?: TimestampMode;
113
+ precision?: number;
114
+ bindMode?: 'auto' | 'bind' | 'literal';
115
+ }
116
+ export declare const duckDbTimestamp: (name: string, options?: TimestampOptions) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
117
+ name: string;
118
+ dataType: "custom";
119
+ columnType: "PgCustomColumn";
120
+ data: string | Date;
121
+ driverParam: string | TimestampValueWrapper | Date | SQL<unknown>;
122
+ enumValues: undefined;
123
+ }>;
124
+ export declare const duckDbDate: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
125
+ name: string;
126
+ dataType: "custom";
127
+ columnType: "PgCustomColumn";
128
+ data: string | Date;
129
+ driverParam: string | Date;
130
+ enumValues: undefined;
131
+ }>;
132
+ export declare const duckDbTime: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
133
+ name: string;
134
+ dataType: "custom";
135
+ columnType: "PgCustomColumn";
136
+ data: string;
137
+ driverParam: string | bigint;
138
+ enumValues: undefined;
139
+ }>;
140
+ export declare function duckDbArrayContains(column: SQLWrapper, values: unknown[] | SQLWrapper): SQL;
141
+ export declare function duckDbArrayContained(column: SQLWrapper, values: unknown[] | SQLWrapper): SQL;
142
+ export declare function duckDbArrayOverlaps(column: SQLWrapper, values: unknown[] | SQLWrapper): SQL;
52
143
  export {};
package/dist/dialect.d.ts CHANGED
@@ -1,9 +1,34 @@
1
1
  import { entityKind } from 'drizzle-orm/entity';
2
2
  import type { MigrationConfig, MigrationMeta } from 'drizzle-orm/migrator';
3
3
  import { PgDialect, PgSession } from 'drizzle-orm/pg-core';
4
- import { type DriverValueEncoder, type QueryTypingsValue } from 'drizzle-orm';
4
+ import { SQL, type DriverValueEncoder, type QueryTypingsValue } from 'drizzle-orm';
5
+ import type { QueryWithTypings } from 'drizzle-orm/sql/sql';
5
6
  export declare class DuckDBDialect extends PgDialect {
6
7
  static readonly [entityKind]: string;
7
- migrate(migrations: MigrationMeta[], session: PgSession, config: MigrationConfig): Promise<void>;
8
+ private hasPgJsonColumn;
9
+ private savepointsSupported;
10
+ /**
11
+ * Reset the PG JSON detection flag. Should be called before preparing a new query.
12
+ */
13
+ resetPgJsonFlag(): void;
14
+ /**
15
+ * Mark that a PG JSON/JSONB column was detected during query preparation.
16
+ */
17
+ markPgJsonDetected(): void;
18
+ assertNoPgJsonColumns(): void;
19
+ /**
20
+ * Check if savepoints are known to be unsupported for this dialect instance.
21
+ */
22
+ areSavepointsUnsupported(): boolean;
23
+ /**
24
+ * Mark that savepoints are supported for this dialect instance.
25
+ */
26
+ markSavepointsSupported(): void;
27
+ /**
28
+ * Mark that savepoints are not supported for this dialect instance.
29
+ */
30
+ markSavepointsUnsupported(): void;
31
+ migrate(migrations: MigrationMeta[], session: PgSession, config: MigrationConfig | string): Promise<void>;
8
32
  prepareTyping(encoder: DriverValueEncoder<unknown, unknown>): QueryTypingsValue;
33
+ sqlToQuery(sqlObj: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings;
9
34
  }