@duckdbfan/drizzle-duckdb 0.0.7 → 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +349 -62
- package/dist/bin/duckdb-introspect.d.ts +2 -0
- package/dist/client.d.ts +42 -0
- package/dist/columns.d.ts +100 -9
- package/dist/dialect.d.ts +27 -2
- package/dist/driver.d.ts +53 -37
- package/dist/duckdb-introspect.mjs +2890 -0
- package/dist/helpers.d.ts +1 -0
- package/dist/helpers.mjs +360 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.mjs +3015 -228
- package/dist/introspect.d.ts +74 -0
- package/dist/migrator.d.ts +3 -2
- package/dist/olap.d.ts +46 -0
- package/dist/operators.d.ts +8 -0
- package/dist/options.d.ts +7 -0
- package/dist/pool.d.ts +30 -0
- package/dist/select-builder.d.ts +31 -0
- package/dist/session.d.ts +33 -8
- package/dist/sql/ast-transformer.d.ts +33 -0
- package/dist/sql/result-mapper.d.ts +9 -0
- package/dist/sql/selection.d.ts +2 -0
- package/dist/sql/visitors/array-operators.d.ts +5 -0
- package/dist/sql/visitors/column-qualifier.d.ts +10 -0
- package/dist/sql/visitors/generate-series-alias.d.ts +13 -0
- package/dist/sql/visitors/union-with-hoister.d.ts +11 -0
- package/dist/utils.d.ts +2 -5
- package/dist/value-wrappers-core.d.ts +42 -0
- package/dist/value-wrappers.d.ts +8 -0
- package/package.json +53 -16
- package/src/bin/duckdb-introspect.ts +181 -0
- package/src/client.ts +528 -0
- package/src/columns.ts +420 -65
- package/src/dialect.ts +111 -15
- package/src/driver.ts +266 -180
- package/src/helpers.ts +18 -0
- package/src/index.ts +8 -1
- package/src/introspect.ts +935 -0
- package/src/migrator.ts +10 -5
- package/src/olap.ts +190 -0
- package/src/operators.ts +27 -0
- package/src/options.ts +25 -0
- package/src/pool.ts +274 -0
- package/src/select-builder.ts +110 -0
- package/src/session.ts +306 -66
- package/src/sql/ast-transformer.ts +170 -0
- package/src/sql/result-mapper.ts +303 -0
- package/src/sql/selection.ts +60 -0
- package/src/sql/visitors/array-operators.ts +214 -0
- package/src/sql/visitors/column-qualifier.ts +586 -0
- package/src/sql/visitors/generate-series-alias.ts +291 -0
- package/src/sql/visitors/union-with-hoister.ts +106 -0
- package/src/utils.ts +2 -222
- package/src/value-wrappers-core.ts +168 -0
- package/src/value-wrappers.ts +165 -0
package/README.md
CHANGED
|
@@ -1,66 +1,353 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
1
|
+
<div align="center">
|
|
2
|
+
|
|
3
|
+
# Drizzle DuckDB
|
|
4
|
+
|
|
5
|
+
### DuckDB dialect for [Drizzle ORM](https://orm.drizzle.team/)
|
|
6
|
+
|
|
7
|
+
[](https://www.npmjs.com/package/@duckdbfan/drizzle-duckdb)
|
|
8
|
+
[](https://opensource.org/licenses/Apache-2.0)
|
|
9
|
+
|
|
10
|
+
[Documentation](https://leonardovida.github.io/drizzle-duckdb/) • [LLM Context](https://leonardovida.github.io/drizzle-duckdb/llms.txt) • [Examples](./example) • [Contributing](#contributing)
|
|
11
|
+
|
|
12
|
+
</div>
|
|
13
|
+
|
|
14
|
+
<br>
|
|
15
|
+
|
|
16
|
+
**Drizzle DuckDB** brings [Drizzle ORM](https://orm.drizzle.team/) to [DuckDB](https://duckdb.org/), an in-process analytical database. You get Drizzle's type-safe query builder, automatic migrations, and full TypeScript inference while working with DuckDB's analytics engine.
|
|
17
|
+
|
|
18
|
+
Works with local DuckDB files, in-memory databases, and [MotherDuck](https://motherduck.com/) cloud.
|
|
19
|
+
|
|
20
|
+
> **Status:** Experimental. Core query building, migrations, and type inference work well. Some DuckDB-specific types and edge cases are still being refined.
|
|
21
|
+
|
|
22
|
+
> **Note:** The main NPM package is now `@duckdbfan/drizzle-duckdb`.
|
|
23
|
+
> The previous package `@leonardovida-md/drizzle-neo-duckdb` remains published but will be deprecated.
|
|
24
|
+
> Updates will land in both packages through May 2, 2026.
|
|
25
|
+
> After that date, only `@duckdbfan/drizzle-duckdb` will receive updates.
|
|
26
|
+
|
|
27
|
+
Docs tip: every docs page has a **Markdown (raw)** button for LLM-friendly source.
|
|
28
|
+
|
|
29
|
+
## Installation
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
bun add @duckdbfan/drizzle-duckdb @duckdb/node-api
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
npm install @duckdbfan/drizzle-duckdb @duckdb/node-api
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
pnpm add @duckdbfan/drizzle-duckdb @duckdb/node-api
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
Recommended client version is `@duckdb/node-api@1.4.4-r.1`, which bundles DuckDB 1.4.4.
|
|
44
|
+
|
|
45
|
+
## Quick Start
|
|
46
|
+
|
|
47
|
+
```typescript
|
|
48
|
+
import { DuckDBInstance } from '@duckdb/node-api';
|
|
49
|
+
import { drizzle } from '@duckdbfan/drizzle-duckdb';
|
|
50
|
+
import { sql } from 'drizzle-orm';
|
|
51
|
+
import { integer, text, pgTable } from 'drizzle-orm/pg-core';
|
|
52
|
+
|
|
53
|
+
// Connect to DuckDB
|
|
54
|
+
const instance = await DuckDBInstance.create(':memory:');
|
|
55
|
+
const connection = await instance.connect();
|
|
56
|
+
const db = drizzle(connection);
|
|
57
|
+
|
|
58
|
+
// Define your schema
|
|
59
|
+
const users = pgTable('users', {
|
|
60
|
+
id: integer('id').primaryKey(),
|
|
61
|
+
name: text('name').notNull(),
|
|
62
|
+
email: text('email').notNull(),
|
|
63
|
+
});
|
|
64
|
+
|
|
65
|
+
// Create table
|
|
66
|
+
await db.execute(sql`
|
|
67
|
+
CREATE TABLE IF NOT EXISTS users (
|
|
68
|
+
id INTEGER PRIMARY KEY,
|
|
69
|
+
name TEXT NOT NULL,
|
|
70
|
+
email TEXT NOT NULL
|
|
71
|
+
)
|
|
72
|
+
`);
|
|
73
|
+
|
|
74
|
+
// Insert data
|
|
75
|
+
await db.insert(users).values([
|
|
76
|
+
{ id: 1, name: 'Alice', email: 'alice@example.com' },
|
|
77
|
+
{ id: 2, name: 'Bob', email: 'bob@example.com' },
|
|
78
|
+
]);
|
|
79
|
+
|
|
80
|
+
// Query with full type safety
|
|
81
|
+
const allUsers = await db.select().from(users);
|
|
82
|
+
// ^? { id: number; name: string; email: string }[]
|
|
83
|
+
|
|
84
|
+
// Clean up
|
|
85
|
+
connection.closeSync();
|
|
86
|
+
```
|
|
87
|
+
|
|
88
|
+
## Connecting to DuckDB
|
|
89
|
+
|
|
90
|
+
### In-Memory Database
|
|
91
|
+
|
|
92
|
+
```typescript
|
|
93
|
+
const instance = await DuckDBInstance.create(':memory:');
|
|
94
|
+
const connection = await instance.connect();
|
|
95
|
+
const db = drizzle(connection);
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
### Local File
|
|
99
|
+
|
|
100
|
+
```typescript
|
|
101
|
+
const instance = await DuckDBInstance.create('./my-database.duckdb');
|
|
102
|
+
const connection = await instance.connect();
|
|
103
|
+
const db = drizzle(connection);
|
|
104
|
+
```
|
|
105
|
+
|
|
106
|
+
### MotherDuck Cloud
|
|
107
|
+
|
|
108
|
+
```typescript
|
|
109
|
+
const instance = await DuckDBInstance.create('md:', {
|
|
110
|
+
motherduck_token: process.env.MOTHERDUCK_TOKEN,
|
|
111
|
+
});
|
|
112
|
+
const connection = await instance.connect();
|
|
113
|
+
const db = drizzle(connection);
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
### With Logging
|
|
117
|
+
|
|
118
|
+
```typescript
|
|
119
|
+
import { DefaultLogger } from 'drizzle-orm';
|
|
120
|
+
|
|
121
|
+
const db = drizzle(connection, {
|
|
122
|
+
logger: new DefaultLogger(),
|
|
123
|
+
});
|
|
124
|
+
```
|
|
125
|
+
|
|
126
|
+
> Tip: With connection strings (recommended), just pass the path: `const db = await drizzle(':memory:')`. Pooling is automatic.
|
|
127
|
+
|
|
128
|
+
## Connection Pooling
|
|
129
|
+
|
|
130
|
+
DuckDB executes one query per connection. The async `drizzle()` entrypoints create a pool automatically (default size: 4). Options:
|
|
131
|
+
|
|
132
|
+
- Set pool size or MotherDuck preset: `drizzle('md:', { pool: { size: 8 } })` or `pool: 'jumbo'` / `pool: 'giga'`.
|
|
133
|
+
- Disable pooling for single-connection workloads: `pool: false`.
|
|
134
|
+
- Transactions pin one pooled connection for their entire lifetime; non-transactional queries still use the pool.
|
|
135
|
+
- For tuning (acquire timeout, queue limits, idle/lifetime recycling), create the pool manually:
|
|
136
|
+
|
|
137
|
+
```typescript
|
|
138
|
+
import { DuckDBInstance } from '@duckdb/node-api';
|
|
139
|
+
import {
|
|
140
|
+
createDuckDBConnectionPool,
|
|
141
|
+
drizzle,
|
|
142
|
+
} from '@duckdbfan/drizzle-duckdb';
|
|
143
|
+
|
|
144
|
+
const instance = await DuckDBInstance.create('md:', {
|
|
145
|
+
motherduck_token: process.env.MOTHERDUCK_TOKEN,
|
|
146
|
+
});
|
|
147
|
+
const pool = createDuckDBConnectionPool(instance, {
|
|
148
|
+
size: 8,
|
|
149
|
+
acquireTimeout: 20_000,
|
|
150
|
+
maxWaitingRequests: 200,
|
|
151
|
+
maxLifetimeMs: 10 * 60_000,
|
|
152
|
+
idleTimeoutMs: 60_000,
|
|
153
|
+
});
|
|
154
|
+
const db = drizzle(pool);
|
|
155
|
+
```
|
|
156
|
+
|
|
157
|
+
## Schema & Types
|
|
158
|
+
|
|
159
|
+
- Use `drizzle-orm/pg-core` for schemas; DuckDB SQL is largely Postgres-compatible.
|
|
160
|
+
- DuckDB-specific helpers: `duckDbList`, `duckDbArray`, `duckDbStruct`, `duckDbMap`, `duckDbJson`, `duckDbBlob`, `duckDbInet`, `duckDbInterval`, `duckDbTimestamp`, `duckDbDate`, `duckDbTime`.
|
|
161
|
+
- Browser-safe imports live under `@duckdbfan/drizzle-duckdb/helpers` (introspection emits this path).
|
|
162
|
+
|
|
163
|
+
See the [column types](https://leonardovida.github.io/drizzle-duckdb/api/columns) docs for full API.
|
|
164
|
+
|
|
165
|
+
## Postgres Schema Compatibility
|
|
166
|
+
|
|
167
|
+
Use `pgTable`, `pgSchema`, and other `drizzle-orm/pg-core` builders as you do with Postgres. The dialect keeps table definitions and relations intact while adapting queries to DuckDB.
|
|
168
|
+
|
|
169
|
+
## Querying
|
|
170
|
+
|
|
171
|
+
All standard Drizzle query methods work:
|
|
172
|
+
|
|
173
|
+
```typescript
|
|
174
|
+
// Select
|
|
175
|
+
const users = await db
|
|
176
|
+
.select()
|
|
177
|
+
.from(usersTable)
|
|
178
|
+
.where(eq(usersTable.active, true));
|
|
179
|
+
|
|
180
|
+
// Insert
|
|
181
|
+
await db
|
|
182
|
+
.insert(usersTable)
|
|
183
|
+
.values({ name: 'Alice', email: 'alice@example.com' });
|
|
184
|
+
|
|
185
|
+
// Insert with returning
|
|
186
|
+
const inserted = await db
|
|
187
|
+
.insert(usersTable)
|
|
188
|
+
.values({ name: 'Bob' })
|
|
189
|
+
.returning({ id: usersTable.id });
|
|
190
|
+
|
|
191
|
+
// Update
|
|
192
|
+
await db
|
|
193
|
+
.update(usersTable)
|
|
194
|
+
.set({ name: 'Updated' })
|
|
195
|
+
.where(eq(usersTable.id, 1));
|
|
196
|
+
|
|
197
|
+
// Delete
|
|
198
|
+
await db.delete(usersTable).where(eq(usersTable.id, 1));
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
### Array Operations
|
|
202
|
+
|
|
203
|
+
For DuckDB array operations, use the custom helpers instead of Postgres operators:
|
|
204
|
+
|
|
205
|
+
```typescript
|
|
206
|
+
import {
|
|
207
|
+
duckDbArrayContains,
|
|
208
|
+
duckDbArrayContained,
|
|
209
|
+
duckDbArrayOverlaps,
|
|
210
|
+
} from '@duckdbfan/drizzle-duckdb';
|
|
211
|
+
|
|
212
|
+
// Check if array contains all values
|
|
213
|
+
const results = await db
|
|
214
|
+
.select()
|
|
215
|
+
.from(products)
|
|
216
|
+
.where(duckDbArrayContains(products.tags, ['electronics', 'sale']));
|
|
217
|
+
|
|
218
|
+
// Check if array is contained by values
|
|
219
|
+
const results = await db
|
|
220
|
+
.select()
|
|
221
|
+
.from(products)
|
|
222
|
+
.where(
|
|
223
|
+
duckDbArrayContained(products.tags, ['electronics', 'sale', 'featured'])
|
|
224
|
+
);
|
|
225
|
+
|
|
226
|
+
// Check if arrays overlap
|
|
227
|
+
const results = await db
|
|
228
|
+
.select()
|
|
229
|
+
.from(products)
|
|
230
|
+
.where(duckDbArrayOverlaps(products.tags, ['electronics', 'books']));
|
|
231
|
+
```
|
|
232
|
+
|
|
233
|
+
## Transactions
|
|
234
|
+
|
|
235
|
+
```typescript
|
|
236
|
+
await db.transaction(async (tx) => {
|
|
237
|
+
await tx.insert(accounts).values({ balance: 100 });
|
|
238
|
+
await tx.update(accounts).set({ balance: 50 }).where(eq(accounts.id, 1));
|
|
239
|
+
});
|
|
240
|
+
```
|
|
241
|
+
|
|
242
|
+
> **Note:** DuckDB doesn't support `SAVEPOINT`, so nested transactions reuse the outer transaction context. Inner rollbacks will abort the entire transaction.
|
|
243
|
+
|
|
244
|
+
## Migrations
|
|
245
|
+
|
|
246
|
+
Apply SQL migration files using the `migrate` function:
|
|
247
|
+
|
|
248
|
+
```typescript
|
|
249
|
+
import { migrate } from '@duckdbfan/drizzle-duckdb';
|
|
250
|
+
|
|
251
|
+
await migrate(db, { migrationsFolder: './drizzle' });
|
|
252
|
+
```
|
|
253
|
+
|
|
254
|
+
Migration metadata is stored in `drizzle.__drizzle_migrations` by default. See [Migrations Documentation](https://leonardovida.github.io/drizzle-duckdb/guide/migrations) for configuration options.
|
|
255
|
+
|
|
256
|
+
## Schema Introspection
|
|
257
|
+
|
|
258
|
+
Generate Drizzle schema from an existing DuckDB database:
|
|
259
|
+
|
|
260
|
+
### CLI
|
|
261
|
+
|
|
262
|
+
```bash
|
|
263
|
+
bunx duckdb-introspect --url ./my-database.duckdb --out ./drizzle/schema.ts
|
|
264
|
+
```
|
|
265
|
+
|
|
266
|
+
### Programmatic
|
|
267
|
+
|
|
268
|
+
```typescript
|
|
269
|
+
import { introspect } from '@duckdbfan/drizzle-duckdb';
|
|
270
|
+
|
|
271
|
+
const result = await introspect(db, {
|
|
272
|
+
schemas: ['public', 'analytics'],
|
|
273
|
+
includeViews: true,
|
|
274
|
+
});
|
|
275
|
+
|
|
276
|
+
console.log(result.files.schemaTs);
|
|
277
|
+
```
|
|
278
|
+
|
|
279
|
+
See [Introspection Documentation](https://leonardovida.github.io/drizzle-duckdb/guide/introspection) for all options.
|
|
280
|
+
|
|
281
|
+
## Configuration Options
|
|
282
|
+
|
|
283
|
+
```typescript
|
|
284
|
+
const db = drizzle(connection, {
|
|
285
|
+
// Enable query logging
|
|
286
|
+
logger: new DefaultLogger(),
|
|
287
|
+
|
|
288
|
+
// Pool size/preset when using connection strings (default: 4). Set false to disable.
|
|
289
|
+
pool: { size: 8 },
|
|
290
|
+
|
|
291
|
+
// Throw on Postgres-style array literals like '{1,2,3}' (default: false)
|
|
292
|
+
rejectStringArrayLiterals: false,
|
|
293
|
+
|
|
294
|
+
// Pass your schema for relational queries
|
|
295
|
+
schema: mySchema,
|
|
296
|
+
});
|
|
297
|
+
```
|
|
298
|
+
|
|
299
|
+
Postgres array operators (`@>`, `<@`, `&&`) are automatically rewritten to DuckDB's `array_has_*` functions via AST transformation.
|
|
300
|
+
|
|
301
|
+
## Known Limitations
|
|
302
|
+
|
|
303
|
+
This connector aims for compatibility with Drizzle's Postgres driver but has some differences:
|
|
304
|
+
|
|
305
|
+
| Feature | Status |
|
|
306
|
+
| --------------------- | ---------------------------------------------------------------------------- |
|
|
307
|
+
| Basic CRUD operations | Full support |
|
|
308
|
+
| Joins and subqueries | Full support |
|
|
309
|
+
| Transactions | No savepoints (nested transactions reuse outer) |
|
|
310
|
+
| JSON/JSONB columns | Use `duckDbJson()` instead |
|
|
311
|
+
| Prepared statements | No statement caching |
|
|
312
|
+
| Streaming results | Chunked reads via `executeBatches()` / `executeArrow()`; no cursor streaming |
|
|
313
|
+
| Concurrent queries | One query per connection; use pooling for parallelism |
|
|
314
|
+
|
|
315
|
+
See [Limitations Documentation](https://leonardovida.github.io/drizzle-duckdb/reference/limitations) for details.
|
|
316
|
+
|
|
317
|
+
## Examples
|
|
318
|
+
|
|
319
|
+
- **[MotherDuck NYC Taxi](./example/motherduck-nyc.ts)**: Query the built-in NYC taxi dataset from MotherDuck cloud
|
|
320
|
+
- **[Analytics Dashboard](./example/analytics-dashboard.ts)**: Local in-memory analytics with DuckDB types and Parquet loading
|
|
321
|
+
|
|
322
|
+
Run examples:
|
|
323
|
+
|
|
324
|
+
```bash
|
|
325
|
+
MOTHERDUCK_TOKEN=your_token bun example/motherduck-nyc.ts
|
|
326
|
+
bun example/analytics-dashboard.ts
|
|
327
|
+
```
|
|
61
328
|
|
|
62
329
|
## Contributing
|
|
63
|
-
|
|
330
|
+
|
|
331
|
+
Contributions are welcome! Please:
|
|
332
|
+
|
|
333
|
+
1. Include tests for new features (`test/<feature>.test.ts`)
|
|
334
|
+
2. Note any DuckDB-specific quirks you encounter
|
|
335
|
+
3. Use a clear, imperative commit message
|
|
336
|
+
|
|
337
|
+
```bash
|
|
338
|
+
# Install dependencies
|
|
339
|
+
bun install
|
|
340
|
+
|
|
341
|
+
# Run tests
|
|
342
|
+
bun test
|
|
343
|
+
|
|
344
|
+
# Run tests with UI
|
|
345
|
+
bun t
|
|
346
|
+
|
|
347
|
+
# Build
|
|
348
|
+
bun run build
|
|
349
|
+
```
|
|
64
350
|
|
|
65
351
|
## License
|
|
66
|
-
|
|
352
|
+
|
|
353
|
+
[Apache-2.0](./LICENSE)
|
package/dist/client.d.ts
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import { type DuckDBConnection } from '@duckdb/node-api';
|
|
2
|
+
import type { PreparedStatementCacheConfig } from './options.ts';
|
|
3
|
+
export type DuckDBClientLike = DuckDBConnection | DuckDBConnectionPool;
|
|
4
|
+
export type RowData = Record<string, unknown>;
|
|
5
|
+
export interface DuckDBConnectionPool {
|
|
6
|
+
acquire(): Promise<DuckDBConnection>;
|
|
7
|
+
release(connection: DuckDBConnection): void | Promise<void>;
|
|
8
|
+
close?(): Promise<void> | void;
|
|
9
|
+
}
|
|
10
|
+
export declare function isPool(client: DuckDBClientLike): client is DuckDBConnectionPool;
|
|
11
|
+
export interface ExecuteClientOptions {
|
|
12
|
+
prepareCache?: PreparedStatementCacheConfig;
|
|
13
|
+
}
|
|
14
|
+
export type ExecuteArraysResult = {
|
|
15
|
+
columns: string[];
|
|
16
|
+
rows: unknown[][];
|
|
17
|
+
};
|
|
18
|
+
export interface PrepareParamsOptions {
|
|
19
|
+
rejectStringArrayLiterals?: boolean;
|
|
20
|
+
warnOnStringArrayLiteral?: () => void;
|
|
21
|
+
}
|
|
22
|
+
export declare function prepareParams(params: unknown[], options?: PrepareParamsOptions): unknown[];
|
|
23
|
+
export declare function closeClientConnection(connection: DuckDBConnection): Promise<void>;
|
|
24
|
+
export declare function executeOnClient(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteClientOptions): Promise<RowData[]>;
|
|
25
|
+
export declare function executeArraysOnClient(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteClientOptions): Promise<ExecuteArraysResult>;
|
|
26
|
+
export interface ExecuteInBatchesOptions {
|
|
27
|
+
rowsPerChunk?: number;
|
|
28
|
+
}
|
|
29
|
+
export interface ExecuteBatchesRawChunk {
|
|
30
|
+
columns: string[];
|
|
31
|
+
rows: unknown[][];
|
|
32
|
+
}
|
|
33
|
+
/**
|
|
34
|
+
* Stream results from DuckDB in batches to avoid fully materializing rows in JS.
|
|
35
|
+
*/
|
|
36
|
+
export declare function executeInBatches(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteInBatchesOptions): AsyncGenerator<RowData[], void, void>;
|
|
37
|
+
export declare function executeInBatchesRaw(client: DuckDBClientLike, query: string, params: unknown[], options?: ExecuteInBatchesOptions): AsyncGenerator<ExecuteBatchesRawChunk, void, void>;
|
|
38
|
+
/**
|
|
39
|
+
* Return columnar results when the underlying node-api exposes an Arrow/columnar API.
|
|
40
|
+
* Falls back to column-major JS arrays when Arrow is unavailable.
|
|
41
|
+
*/
|
|
42
|
+
export declare function executeArrowOnClient(client: DuckDBClientLike, query: string, params: unknown[]): Promise<unknown>;
|
package/dist/columns.d.ts
CHANGED
|
@@ -1,3 +1,6 @@
|
|
|
1
|
+
import { type SQL } from 'drizzle-orm';
|
|
2
|
+
import type { SQLWrapper } from 'drizzle-orm/sql/sql';
|
|
3
|
+
import { type ListValueWrapper, type ArrayValueWrapper, type MapValueWrapper, type BlobValueWrapper, type JsonValueWrapper, type TimestampValueWrapper } from './value-wrappers-core.ts';
|
|
1
4
|
type IntColType = 'SMALLINT' | 'INTEGER' | 'BIGINT' | 'HUGEINT' | 'USMALLINT' | 'UINTEGER' | 'UBIGINT' | 'UHUGEINT' | 'INT' | 'INT16' | 'INT32' | 'INT64' | 'INT128' | 'LONG' | 'VARINT';
|
|
2
5
|
type FloatColType = 'FLOAT' | 'DOUBLE';
|
|
3
6
|
type StringColType = 'STRING' | 'VARCHAR' | 'TEXT';
|
|
@@ -7,20 +10,58 @@ type DateColType = 'DATE' | 'TIME' | 'TIMETZ' | 'TIMESTAMP' | 'DATETIME' | 'TIME
|
|
|
7
10
|
type AnyColType = IntColType | FloatColType | StringColType | BoolColType | DateColType | BlobColType;
|
|
8
11
|
type ListColType = `${AnyColType}[]`;
|
|
9
12
|
type ArrayColType = `${AnyColType}[${number}]`;
|
|
13
|
+
type StructColType = `STRUCT (${string})`;
|
|
14
|
+
type Primitive = AnyColType | ListColType | ArrayColType | StructColType;
|
|
15
|
+
export declare function coerceArrayString(value: string): unknown[] | undefined;
|
|
16
|
+
export declare function formatLiteral(value: unknown, typeHint?: string): string;
|
|
17
|
+
export declare function buildListLiteral(values: unknown[], elementType?: string): SQL;
|
|
18
|
+
export declare function buildStructLiteral(value: Record<string, unknown>, schema?: Record<string, Primitive>): SQL;
|
|
19
|
+
export declare function buildMapLiteral(value: Record<string, unknown>, valueType?: string): SQL;
|
|
20
|
+
export declare const duckDbList: <TData = unknown>(name: string, elementType: AnyColType) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
21
|
+
name: string;
|
|
22
|
+
dataType: "custom";
|
|
23
|
+
columnType: "PgCustomColumn";
|
|
24
|
+
data: TData[];
|
|
25
|
+
driverParam: string | ListValueWrapper | unknown[];
|
|
26
|
+
enumValues: undefined;
|
|
27
|
+
}>;
|
|
28
|
+
export declare const duckDbArray: <TData = unknown>(name: string, elementType: AnyColType, fixedLength?: number) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
29
|
+
name: string;
|
|
30
|
+
dataType: "custom";
|
|
31
|
+
columnType: "PgCustomColumn";
|
|
32
|
+
data: TData[];
|
|
33
|
+
driverParam: string | unknown[] | ArrayValueWrapper;
|
|
34
|
+
enumValues: undefined;
|
|
35
|
+
}>;
|
|
10
36
|
export declare const duckDbMap: <TData extends Record<string, any>>(name: string, valueType: AnyColType | ListColType | ArrayColType) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
11
37
|
name: string;
|
|
12
38
|
dataType: "custom";
|
|
13
39
|
columnType: "PgCustomColumn";
|
|
14
40
|
data: TData;
|
|
15
|
-
driverParam:
|
|
41
|
+
driverParam: MapValueWrapper | TData;
|
|
16
42
|
enumValues: undefined;
|
|
17
43
|
}>;
|
|
18
|
-
export declare const duckDbStruct: <TData extends Record<string, any>>(name: string, schema: Record<string,
|
|
44
|
+
export declare const duckDbStruct: <TData extends Record<string, any>>(name: string, schema: Record<string, Primitive>) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
19
45
|
name: string;
|
|
20
46
|
dataType: "custom";
|
|
21
47
|
columnType: "PgCustomColumn";
|
|
22
48
|
data: TData;
|
|
23
|
-
driverParam:
|
|
49
|
+
driverParam: TData;
|
|
50
|
+
enumValues: undefined;
|
|
51
|
+
}>;
|
|
52
|
+
/**
|
|
53
|
+
* JSON column type that wraps values and delays JSON.stringify() to binding time.
|
|
54
|
+
* This ensures consistent handling with other wrapped types.
|
|
55
|
+
*
|
|
56
|
+
* Note: DuckDB stores JSON as VARCHAR internally, so the final binding
|
|
57
|
+
* is always a stringified JSON value.
|
|
58
|
+
*/
|
|
59
|
+
export declare const duckDbJson: <TData = unknown>(name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
60
|
+
name: string;
|
|
61
|
+
dataType: "custom";
|
|
62
|
+
columnType: "PgCustomColumn";
|
|
63
|
+
data: TData;
|
|
64
|
+
driverParam: string | JsonValueWrapper | SQL<unknown>;
|
|
24
65
|
enumValues: undefined;
|
|
25
66
|
}>;
|
|
26
67
|
export declare const duckDbBlob: {
|
|
@@ -28,25 +69,75 @@ export declare const duckDbBlob: {
|
|
|
28
69
|
name: "";
|
|
29
70
|
dataType: "custom";
|
|
30
71
|
columnType: "PgCustomColumn";
|
|
31
|
-
data: Buffer
|
|
32
|
-
driverParam:
|
|
72
|
+
data: Buffer<ArrayBufferLike>;
|
|
73
|
+
driverParam: BlobValueWrapper;
|
|
33
74
|
enumValues: undefined;
|
|
34
75
|
}>;
|
|
35
76
|
<TConfig extends Record<string, any>>(fieldConfig?: TConfig | undefined): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
36
77
|
name: "";
|
|
37
78
|
dataType: "custom";
|
|
38
79
|
columnType: "PgCustomColumn";
|
|
39
|
-
data: Buffer
|
|
40
|
-
driverParam:
|
|
80
|
+
data: Buffer<ArrayBufferLike>;
|
|
81
|
+
driverParam: BlobValueWrapper;
|
|
41
82
|
enumValues: undefined;
|
|
42
83
|
}>;
|
|
43
84
|
<TName extends string>(dbName: TName, fieldConfig?: unknown): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
44
85
|
name: TName;
|
|
45
86
|
dataType: "custom";
|
|
46
87
|
columnType: "PgCustomColumn";
|
|
47
|
-
data: Buffer
|
|
48
|
-
driverParam:
|
|
88
|
+
data: Buffer<ArrayBufferLike>;
|
|
89
|
+
driverParam: BlobValueWrapper;
|
|
49
90
|
enumValues: undefined;
|
|
50
91
|
}>;
|
|
51
92
|
};
|
|
93
|
+
export declare const duckDbInet: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
94
|
+
name: string;
|
|
95
|
+
dataType: "custom";
|
|
96
|
+
columnType: "PgCustomColumn";
|
|
97
|
+
data: string;
|
|
98
|
+
driverParam: string;
|
|
99
|
+
enumValues: undefined;
|
|
100
|
+
}>;
|
|
101
|
+
export declare const duckDbInterval: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
102
|
+
name: string;
|
|
103
|
+
dataType: "custom";
|
|
104
|
+
columnType: "PgCustomColumn";
|
|
105
|
+
data: string;
|
|
106
|
+
driverParam: string;
|
|
107
|
+
enumValues: undefined;
|
|
108
|
+
}>;
|
|
109
|
+
type TimestampMode = 'date' | 'string';
|
|
110
|
+
interface TimestampOptions {
|
|
111
|
+
withTimezone?: boolean;
|
|
112
|
+
mode?: TimestampMode;
|
|
113
|
+
precision?: number;
|
|
114
|
+
bindMode?: 'auto' | 'bind' | 'literal';
|
|
115
|
+
}
|
|
116
|
+
export declare const duckDbTimestamp: (name: string, options?: TimestampOptions) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
117
|
+
name: string;
|
|
118
|
+
dataType: "custom";
|
|
119
|
+
columnType: "PgCustomColumn";
|
|
120
|
+
data: string | Date;
|
|
121
|
+
driverParam: string | TimestampValueWrapper | Date | SQL<unknown>;
|
|
122
|
+
enumValues: undefined;
|
|
123
|
+
}>;
|
|
124
|
+
export declare const duckDbDate: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
125
|
+
name: string;
|
|
126
|
+
dataType: "custom";
|
|
127
|
+
columnType: "PgCustomColumn";
|
|
128
|
+
data: string | Date;
|
|
129
|
+
driverParam: string | Date;
|
|
130
|
+
enumValues: undefined;
|
|
131
|
+
}>;
|
|
132
|
+
export declare const duckDbTime: (name: string) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
133
|
+
name: string;
|
|
134
|
+
dataType: "custom";
|
|
135
|
+
columnType: "PgCustomColumn";
|
|
136
|
+
data: string;
|
|
137
|
+
driverParam: string | bigint;
|
|
138
|
+
enumValues: undefined;
|
|
139
|
+
}>;
|
|
140
|
+
export declare function duckDbArrayContains(column: SQLWrapper, values: unknown[] | SQLWrapper): SQL;
|
|
141
|
+
export declare function duckDbArrayContained(column: SQLWrapper, values: unknown[] | SQLWrapper): SQL;
|
|
142
|
+
export declare function duckDbArrayOverlaps(column: SQLWrapper, values: unknown[] | SQLWrapper): SQL;
|
|
52
143
|
export {};
|
package/dist/dialect.d.ts
CHANGED
|
@@ -1,9 +1,34 @@
|
|
|
1
1
|
import { entityKind } from 'drizzle-orm/entity';
|
|
2
2
|
import type { MigrationConfig, MigrationMeta } from 'drizzle-orm/migrator';
|
|
3
3
|
import { PgDialect, PgSession } from 'drizzle-orm/pg-core';
|
|
4
|
-
import { type DriverValueEncoder, type QueryTypingsValue } from 'drizzle-orm';
|
|
4
|
+
import { SQL, type DriverValueEncoder, type QueryTypingsValue } from 'drizzle-orm';
|
|
5
|
+
import type { QueryWithTypings } from 'drizzle-orm/sql/sql';
|
|
5
6
|
export declare class DuckDBDialect extends PgDialect {
|
|
6
7
|
static readonly [entityKind]: string;
|
|
7
|
-
|
|
8
|
+
private hasPgJsonColumn;
|
|
9
|
+
private savepointsSupported;
|
|
10
|
+
/**
|
|
11
|
+
* Reset the PG JSON detection flag. Should be called before preparing a new query.
|
|
12
|
+
*/
|
|
13
|
+
resetPgJsonFlag(): void;
|
|
14
|
+
/**
|
|
15
|
+
* Mark that a PG JSON/JSONB column was detected during query preparation.
|
|
16
|
+
*/
|
|
17
|
+
markPgJsonDetected(): void;
|
|
18
|
+
assertNoPgJsonColumns(): void;
|
|
19
|
+
/**
|
|
20
|
+
* Check if savepoints are known to be unsupported for this dialect instance.
|
|
21
|
+
*/
|
|
22
|
+
areSavepointsUnsupported(): boolean;
|
|
23
|
+
/**
|
|
24
|
+
* Mark that savepoints are supported for this dialect instance.
|
|
25
|
+
*/
|
|
26
|
+
markSavepointsSupported(): void;
|
|
27
|
+
/**
|
|
28
|
+
* Mark that savepoints are not supported for this dialect instance.
|
|
29
|
+
*/
|
|
30
|
+
markSavepointsUnsupported(): void;
|
|
31
|
+
migrate(migrations: MigrationMeta[], session: PgSession, config: MigrationConfig | string): Promise<void>;
|
|
8
32
|
prepareTyping(encoder: DriverValueEncoder<unknown, unknown>): QueryTypingsValue;
|
|
33
|
+
sqlToQuery(sqlObj: SQL, invokeSource?: 'indexes' | undefined): QueryWithTypings;
|
|
9
34
|
}
|