@leonardovida-md/drizzle-neo-duckdb 1.0.3 → 1.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +20 -5
- package/dist/client.d.ts +7 -1
- package/dist/columns.d.ts +6 -1
- package/dist/dialect.d.ts +21 -0
- package/dist/driver.d.ts +33 -1
- package/dist/duckdb-introspect.mjs +610 -114
- package/dist/helpers.d.ts +1 -0
- package/dist/helpers.mjs +319 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.mjs +603 -117
- package/dist/introspect.d.ts +9 -0
- package/dist/pool.d.ts +30 -0
- package/dist/session.d.ts +7 -1
- package/dist/sql/query-rewriters.d.ts +1 -1
- package/dist/sql/result-mapper.d.ts +7 -0
- package/dist/utils.d.ts +1 -1
- package/dist/value-wrappers-core.d.ts +42 -0
- package/dist/value-wrappers.d.ts +2 -98
- package/package.json +6 -2
- package/src/bin/duckdb-introspect.ts +27 -0
- package/src/client.ts +54 -13
- package/src/columns.ts +10 -10
- package/src/dialect.ts +51 -3
- package/src/driver.ts +204 -7
- package/src/helpers.ts +18 -0
- package/src/index.ts +1 -0
- package/src/introspect.ts +47 -29
- package/src/migrator.ts +1 -1
- package/src/olap.ts +1 -0
- package/src/pool.ts +274 -0
- package/src/session.ts +134 -15
- package/src/sql/query-rewriters.ts +177 -116
- package/src/sql/result-mapper.ts +7 -7
- package/src/utils.ts +1 -1
- package/src/value-wrappers-core.ts +156 -0
- package/src/value-wrappers.ts +60 -219
package/src/pool.ts
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import { DuckDBConnection, DuckDBInstance } from '@duckdb/node-api';
|
|
2
|
+
import { closeClientConnection, type DuckDBConnectionPool } from './client.ts';
|
|
3
|
+
|
|
4
|
+
/** Pool size presets for different MotherDuck instance types */
|
|
5
|
+
export type PoolPreset =
|
|
6
|
+
| 'pulse'
|
|
7
|
+
| 'standard'
|
|
8
|
+
| 'jumbo'
|
|
9
|
+
| 'mega'
|
|
10
|
+
| 'giga'
|
|
11
|
+
| 'local'
|
|
12
|
+
| 'memory';
|
|
13
|
+
|
|
14
|
+
/** Pool sizes optimized for each MotherDuck instance type */
|
|
15
|
+
export const POOL_PRESETS: Record<PoolPreset, number> = {
|
|
16
|
+
pulse: 4, // Auto-scaling, ad-hoc analytics
|
|
17
|
+
standard: 6, // Balanced ETL/ELT workloads
|
|
18
|
+
jumbo: 8, // Complex queries, high-volume
|
|
19
|
+
mega: 12, // Large-scale transformations
|
|
20
|
+
giga: 16, // Maximum parallelism
|
|
21
|
+
local: 8, // Local DuckDB file
|
|
22
|
+
memory: 4, // In-memory testing
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
export interface DuckDBPoolConfig {
|
|
26
|
+
/** Maximum concurrent connections. Defaults to 4. */
|
|
27
|
+
size?: number;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Resolve pool configuration to a concrete size.
|
|
32
|
+
* Returns false if pooling is disabled.
|
|
33
|
+
*/
|
|
34
|
+
export function resolvePoolSize(
|
|
35
|
+
pool: DuckDBPoolConfig | PoolPreset | false | undefined
|
|
36
|
+
): number | false {
|
|
37
|
+
if (pool === false) return false;
|
|
38
|
+
if (pool === undefined) return 4;
|
|
39
|
+
if (typeof pool === 'string') return POOL_PRESETS[pool];
|
|
40
|
+
return pool.size ?? 4;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface DuckDBConnectionPoolOptions {
|
|
44
|
+
/** Maximum concurrent connections. Defaults to 4. */
|
|
45
|
+
size?: number;
|
|
46
|
+
/** Timeout in milliseconds to wait for a connection. Defaults to 30000 (30s). */
|
|
47
|
+
acquireTimeout?: number;
|
|
48
|
+
/** Maximum number of requests waiting for a connection. Defaults to 100. */
|
|
49
|
+
maxWaitingRequests?: number;
|
|
50
|
+
/** Max time (ms) a connection may live before being recycled. */
|
|
51
|
+
maxLifetimeMs?: number;
|
|
52
|
+
/** Max idle time (ms) before an idle connection is discarded. */
|
|
53
|
+
idleTimeoutMs?: number;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function createDuckDBConnectionPool(
|
|
57
|
+
instance: DuckDBInstance,
|
|
58
|
+
options: DuckDBConnectionPoolOptions = {}
|
|
59
|
+
): DuckDBConnectionPool & { size: number } {
|
|
60
|
+
const size = options.size && options.size > 0 ? options.size : 4;
|
|
61
|
+
const acquireTimeout = options.acquireTimeout ?? 30_000;
|
|
62
|
+
const maxWaitingRequests = options.maxWaitingRequests ?? 100;
|
|
63
|
+
const maxLifetimeMs = options.maxLifetimeMs;
|
|
64
|
+
const idleTimeoutMs = options.idleTimeoutMs;
|
|
65
|
+
const metadata = new WeakMap<
|
|
66
|
+
DuckDBConnection,
|
|
67
|
+
{ createdAt: number; lastUsedAt: number }
|
|
68
|
+
>();
|
|
69
|
+
|
|
70
|
+
type PooledConnection = {
|
|
71
|
+
connection: DuckDBConnection;
|
|
72
|
+
createdAt: number;
|
|
73
|
+
lastUsedAt: number;
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
const idle: PooledConnection[] = [];
|
|
77
|
+
const waiting: Array<{
|
|
78
|
+
resolve: (conn: DuckDBConnection) => void;
|
|
79
|
+
reject: (error: Error) => void;
|
|
80
|
+
timeoutId: ReturnType<typeof setTimeout>;
|
|
81
|
+
}> = [];
|
|
82
|
+
let total = 0;
|
|
83
|
+
let closed = false;
|
|
84
|
+
// Track pending acquires to handle race conditions during close
|
|
85
|
+
let pendingAcquires = 0;
|
|
86
|
+
|
|
87
|
+
const shouldRecycle = (conn: PooledConnection, now: number): boolean => {
|
|
88
|
+
if (maxLifetimeMs !== undefined && now - conn.createdAt >= maxLifetimeMs) {
|
|
89
|
+
return true;
|
|
90
|
+
}
|
|
91
|
+
if (idleTimeoutMs !== undefined && now - conn.lastUsedAt >= idleTimeoutMs) {
|
|
92
|
+
return true;
|
|
93
|
+
}
|
|
94
|
+
return false;
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
const acquire = async (): Promise<DuckDBConnection> => {
|
|
98
|
+
if (closed) {
|
|
99
|
+
throw new Error('DuckDB connection pool is closed');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
while (idle.length > 0) {
|
|
103
|
+
const pooled = idle.pop() as PooledConnection;
|
|
104
|
+
const now = Date.now();
|
|
105
|
+
if (shouldRecycle(pooled, now)) {
|
|
106
|
+
await closeClientConnection(pooled.connection);
|
|
107
|
+
total = Math.max(0, total - 1);
|
|
108
|
+
metadata.delete(pooled.connection);
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
pooled.lastUsedAt = now;
|
|
112
|
+
metadata.set(pooled.connection, {
|
|
113
|
+
createdAt: pooled.createdAt,
|
|
114
|
+
lastUsedAt: pooled.lastUsedAt,
|
|
115
|
+
});
|
|
116
|
+
return pooled.connection;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (total < size) {
|
|
120
|
+
pendingAcquires += 1;
|
|
121
|
+
total += 1;
|
|
122
|
+
try {
|
|
123
|
+
const connection = await DuckDBConnection.create(instance);
|
|
124
|
+
// Check if pool was closed during async connection creation
|
|
125
|
+
if (closed) {
|
|
126
|
+
await closeClientConnection(connection);
|
|
127
|
+
total -= 1;
|
|
128
|
+
throw new Error('DuckDB connection pool is closed');
|
|
129
|
+
}
|
|
130
|
+
const now = Date.now();
|
|
131
|
+
metadata.set(connection, { createdAt: now, lastUsedAt: now });
|
|
132
|
+
return connection;
|
|
133
|
+
} catch (error) {
|
|
134
|
+
total -= 1;
|
|
135
|
+
throw error;
|
|
136
|
+
} finally {
|
|
137
|
+
pendingAcquires -= 1;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Check queue limit before waiting
|
|
142
|
+
if (waiting.length >= maxWaitingRequests) {
|
|
143
|
+
throw new Error(
|
|
144
|
+
`DuckDB connection pool queue is full (max ${maxWaitingRequests} waiting requests)`
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return await new Promise((resolve, reject) => {
|
|
149
|
+
const timeoutId = setTimeout(() => {
|
|
150
|
+
// Remove this waiter from the queue
|
|
151
|
+
const idx = waiting.findIndex((w) => w.timeoutId === timeoutId);
|
|
152
|
+
if (idx !== -1) {
|
|
153
|
+
waiting.splice(idx, 1);
|
|
154
|
+
}
|
|
155
|
+
reject(
|
|
156
|
+
new Error(
|
|
157
|
+
`DuckDB connection pool acquire timeout after ${acquireTimeout}ms`
|
|
158
|
+
)
|
|
159
|
+
);
|
|
160
|
+
}, acquireTimeout);
|
|
161
|
+
|
|
162
|
+
waiting.push({ resolve, reject, timeoutId });
|
|
163
|
+
});
|
|
164
|
+
};
|
|
165
|
+
|
|
166
|
+
const release = async (connection: DuckDBConnection): Promise<void> => {
|
|
167
|
+
const waiter = waiting.shift();
|
|
168
|
+
if (waiter) {
|
|
169
|
+
clearTimeout(waiter.timeoutId);
|
|
170
|
+
const now = Date.now();
|
|
171
|
+
const meta =
|
|
172
|
+
metadata.get(connection) ??
|
|
173
|
+
({ createdAt: now, lastUsedAt: now } as {
|
|
174
|
+
createdAt: number;
|
|
175
|
+
lastUsedAt: number;
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
const expired =
|
|
179
|
+
maxLifetimeMs !== undefined && now - meta.createdAt >= maxLifetimeMs;
|
|
180
|
+
|
|
181
|
+
if (closed) {
|
|
182
|
+
await closeClientConnection(connection);
|
|
183
|
+
total = Math.max(0, total - 1);
|
|
184
|
+
metadata.delete(connection);
|
|
185
|
+
waiter.reject(new Error('DuckDB connection pool is closed'));
|
|
186
|
+
return;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
if (expired) {
|
|
190
|
+
await closeClientConnection(connection);
|
|
191
|
+
total = Math.max(0, total - 1);
|
|
192
|
+
metadata.delete(connection);
|
|
193
|
+
try {
|
|
194
|
+
const replacement = await acquire();
|
|
195
|
+
waiter.resolve(replacement);
|
|
196
|
+
} catch (error) {
|
|
197
|
+
waiter.reject(error as Error);
|
|
198
|
+
}
|
|
199
|
+
return;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
meta.lastUsedAt = now;
|
|
203
|
+
metadata.set(connection, meta);
|
|
204
|
+
waiter.resolve(connection);
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
if (closed) {
|
|
209
|
+
await closeClientConnection(connection);
|
|
210
|
+
metadata.delete(connection);
|
|
211
|
+
total = Math.max(0, total - 1);
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
const now = Date.now();
|
|
216
|
+
const existingMeta =
|
|
217
|
+
metadata.get(connection) ??
|
|
218
|
+
({ createdAt: now, lastUsedAt: now } as {
|
|
219
|
+
createdAt: number;
|
|
220
|
+
lastUsedAt: number;
|
|
221
|
+
});
|
|
222
|
+
existingMeta.lastUsedAt = now;
|
|
223
|
+
metadata.set(connection, existingMeta);
|
|
224
|
+
|
|
225
|
+
if (
|
|
226
|
+
maxLifetimeMs !== undefined &&
|
|
227
|
+
now - existingMeta.createdAt >= maxLifetimeMs
|
|
228
|
+
) {
|
|
229
|
+
await closeClientConnection(connection);
|
|
230
|
+
total -= 1;
|
|
231
|
+
metadata.delete(connection);
|
|
232
|
+
return;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
idle.push({
|
|
236
|
+
connection,
|
|
237
|
+
createdAt: existingMeta.createdAt,
|
|
238
|
+
lastUsedAt: existingMeta.lastUsedAt,
|
|
239
|
+
});
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
const close = async (): Promise<void> => {
|
|
243
|
+
closed = true;
|
|
244
|
+
|
|
245
|
+
// Clear all waiting requests with their timeouts
|
|
246
|
+
const waiters = waiting.splice(0, waiting.length);
|
|
247
|
+
for (const waiter of waiters) {
|
|
248
|
+
clearTimeout(waiter.timeoutId);
|
|
249
|
+
waiter.reject(new Error('DuckDB connection pool is closed'));
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Close all idle connections (use allSettled to ensure all are attempted)
|
|
253
|
+
const toClose = idle.splice(0, idle.length);
|
|
254
|
+
await Promise.allSettled(
|
|
255
|
+
toClose.map((item) => closeClientConnection(item.connection))
|
|
256
|
+
);
|
|
257
|
+
total = Math.max(0, total - toClose.length);
|
|
258
|
+
toClose.forEach((item) => metadata.delete(item.connection));
|
|
259
|
+
|
|
260
|
+
// Wait for pending acquires to complete (with a reasonable timeout)
|
|
261
|
+
const maxWait = 5000;
|
|
262
|
+
const start = Date.now();
|
|
263
|
+
while (pendingAcquires > 0 && Date.now() - start < maxWait) {
|
|
264
|
+
await new Promise((r) => setTimeout(r, 10));
|
|
265
|
+
}
|
|
266
|
+
};
|
|
267
|
+
|
|
268
|
+
return {
|
|
269
|
+
acquire,
|
|
270
|
+
release,
|
|
271
|
+
close,
|
|
272
|
+
size,
|
|
273
|
+
};
|
|
274
|
+
}
|
package/src/session.ts
CHANGED
|
@@ -17,9 +17,13 @@ import { fillPlaceholders, type Query, SQL, sql } from 'drizzle-orm/sql/sql';
|
|
|
17
17
|
import type { Assume } from 'drizzle-orm/utils';
|
|
18
18
|
import { adaptArrayOperators } from './sql/query-rewriters.ts';
|
|
19
19
|
import { mapResultRow } from './sql/result-mapper.ts';
|
|
20
|
-
import type { DuckDBDialect } from './dialect.ts';
|
|
21
20
|
import { TransactionRollbackError } from 'drizzle-orm/errors';
|
|
22
|
-
import type {
|
|
21
|
+
import type { DuckDBDialect } from './dialect.ts';
|
|
22
|
+
import type {
|
|
23
|
+
DuckDBClientLike,
|
|
24
|
+
DuckDBConnectionPool,
|
|
25
|
+
RowData,
|
|
26
|
+
} from './client.ts';
|
|
23
27
|
import {
|
|
24
28
|
executeArrowOnClient,
|
|
25
29
|
executeInBatches,
|
|
@@ -27,9 +31,21 @@ import {
|
|
|
27
31
|
prepareParams,
|
|
28
32
|
type ExecuteInBatchesOptions,
|
|
29
33
|
} from './client.ts';
|
|
34
|
+
import { isPool } from './client.ts';
|
|
35
|
+
import type { DuckDBConnection } from '@duckdb/node-api';
|
|
30
36
|
|
|
31
37
|
export type { DuckDBClientLike, RowData } from './client.ts';
|
|
32
38
|
|
|
39
|
+
function isSavepointSyntaxError(error: unknown): boolean {
|
|
40
|
+
if (!(error instanceof Error) || !error.message) {
|
|
41
|
+
return false;
|
|
42
|
+
}
|
|
43
|
+
return (
|
|
44
|
+
error.message.toLowerCase().includes('savepoint') &&
|
|
45
|
+
error.message.toLowerCase().includes('syntax error')
|
|
46
|
+
);
|
|
47
|
+
}
|
|
48
|
+
|
|
33
49
|
export class DuckDBPreparedQuery<
|
|
34
50
|
T extends PreparedQueryConfig,
|
|
35
51
|
> extends PgPreparedQuery<T> {
|
|
@@ -125,6 +141,7 @@ export class DuckDBSession<
|
|
|
125
141
|
private rewriteArrays: boolean;
|
|
126
142
|
private rejectStringArrayLiterals: boolean;
|
|
127
143
|
private hasWarnedArrayLiteral = false;
|
|
144
|
+
private rollbackOnly = false;
|
|
128
145
|
|
|
129
146
|
constructor(
|
|
130
147
|
private client: DuckDBClientLike,
|
|
@@ -162,11 +179,32 @@ export class DuckDBSession<
|
|
|
162
179
|
);
|
|
163
180
|
}
|
|
164
181
|
|
|
182
|
+
override execute<T>(query: SQL): Promise<T> {
|
|
183
|
+
this.dialect.resetPgJsonFlag();
|
|
184
|
+
return super.execute(query);
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
override all<T = unknown>(query: SQL): Promise<T[]> {
|
|
188
|
+
this.dialect.resetPgJsonFlag();
|
|
189
|
+
return super.all(query);
|
|
190
|
+
}
|
|
191
|
+
|
|
165
192
|
override async transaction<T>(
|
|
166
|
-
transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T
|
|
193
|
+
transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>,
|
|
194
|
+
config?: PgTransactionConfig
|
|
167
195
|
): Promise<T> {
|
|
196
|
+
let pinnedConnection: DuckDBConnection | undefined;
|
|
197
|
+
let pool: DuckDBConnectionPool | undefined;
|
|
198
|
+
|
|
199
|
+
let clientForTx: DuckDBClientLike = this.client;
|
|
200
|
+
if (isPool(this.client)) {
|
|
201
|
+
pool = this.client;
|
|
202
|
+
pinnedConnection = await pool.acquire();
|
|
203
|
+
clientForTx = pinnedConnection;
|
|
204
|
+
}
|
|
205
|
+
|
|
168
206
|
const session = new DuckDBSession(
|
|
169
|
-
|
|
207
|
+
clientForTx,
|
|
170
208
|
this.dialect,
|
|
171
209
|
this.schema,
|
|
172
210
|
this.options
|
|
@@ -178,15 +216,29 @@ export class DuckDBSession<
|
|
|
178
216
|
this.schema
|
|
179
217
|
);
|
|
180
218
|
|
|
181
|
-
await tx.execute(sql`BEGIN TRANSACTION;`);
|
|
182
|
-
|
|
183
219
|
try {
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
220
|
+
await tx.execute(sql`BEGIN TRANSACTION;`);
|
|
221
|
+
|
|
222
|
+
if (config) {
|
|
223
|
+
await tx.setTransaction(config);
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
try {
|
|
227
|
+
const result = await transaction(tx);
|
|
228
|
+
if (session.isRollbackOnly()) {
|
|
229
|
+
await tx.execute(sql`rollback`);
|
|
230
|
+
throw new TransactionRollbackError();
|
|
231
|
+
}
|
|
232
|
+
await tx.execute(sql`commit`);
|
|
233
|
+
return result;
|
|
234
|
+
} catch (error) {
|
|
235
|
+
await tx.execute(sql`rollback`);
|
|
236
|
+
throw error;
|
|
237
|
+
}
|
|
238
|
+
} finally {
|
|
239
|
+
if (pinnedConnection && pool) {
|
|
240
|
+
await pool.release(pinnedConnection);
|
|
241
|
+
}
|
|
190
242
|
}
|
|
191
243
|
}
|
|
192
244
|
|
|
@@ -205,7 +257,9 @@ export class DuckDBSession<
|
|
|
205
257
|
query: SQL,
|
|
206
258
|
options: ExecuteInBatchesOptions = {}
|
|
207
259
|
): AsyncGenerator<GenericRowData<T>[], void, void> {
|
|
260
|
+
this.dialect.resetPgJsonFlag();
|
|
208
261
|
const builtQuery = this.dialect.sqlToQuery(query);
|
|
262
|
+
this.dialect.assertNoPgJsonColumns();
|
|
209
263
|
const params = prepareParams(builtQuery.params, {
|
|
210
264
|
rejectStringArrayLiterals: this.rejectStringArrayLiterals,
|
|
211
265
|
warnOnStringArrayLiteral: this.rejectStringArrayLiterals
|
|
@@ -234,7 +288,9 @@ export class DuckDBSession<
|
|
|
234
288
|
}
|
|
235
289
|
|
|
236
290
|
async executeArrow(query: SQL): Promise<unknown> {
|
|
291
|
+
this.dialect.resetPgJsonFlag();
|
|
237
292
|
const builtQuery = this.dialect.sqlToQuery(query);
|
|
293
|
+
this.dialect.assertNoPgJsonColumns();
|
|
238
294
|
const params = prepareParams(builtQuery.params, {
|
|
239
295
|
rejectStringArrayLiterals: this.rejectStringArrayLiterals,
|
|
240
296
|
warnOnStringArrayLiteral: this.rejectStringArrayLiterals
|
|
@@ -256,6 +312,14 @@ export class DuckDBSession<
|
|
|
256
312
|
|
|
257
313
|
return executeArrowOnClient(this.client, rewrittenQuery, params);
|
|
258
314
|
}
|
|
315
|
+
|
|
316
|
+
markRollbackOnly(): void {
|
|
317
|
+
this.rollbackOnly = true;
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
isRollbackOnly(): boolean {
|
|
321
|
+
return this.rollbackOnly;
|
|
322
|
+
}
|
|
259
323
|
}
|
|
260
324
|
|
|
261
325
|
type PgTransactionInternals<
|
|
@@ -297,6 +361,7 @@ export class DuckDBTransaction<
|
|
|
297
361
|
}
|
|
298
362
|
|
|
299
363
|
setTransaction(config: PgTransactionConfig): Promise<void> {
|
|
364
|
+
// Cast needed: PgTransaction doesn't expose dialect/session properties in public API
|
|
300
365
|
type Tx = DuckDBTransactionWithInternals<TFullSchema, TSchema>;
|
|
301
366
|
return (this as unknown as Tx).session.execute(
|
|
302
367
|
sql`set transaction ${this.getTransactionConfigSQL(config)}`
|
|
@@ -307,11 +372,13 @@ export class DuckDBTransaction<
|
|
|
307
372
|
query: SQL,
|
|
308
373
|
options: ExecuteInBatchesOptions = {}
|
|
309
374
|
): AsyncGenerator<GenericRowData<T>[], void, void> {
|
|
375
|
+
// Cast needed: PgTransaction doesn't expose session property in public API
|
|
310
376
|
type Tx = DuckDBTransactionWithInternals<TFullSchema, TSchema>;
|
|
311
377
|
return (this as unknown as Tx).session.executeBatches<T>(query, options);
|
|
312
378
|
}
|
|
313
379
|
|
|
314
380
|
executeArrow(query: SQL): Promise<unknown> {
|
|
381
|
+
// Cast needed: PgTransaction doesn't expose session property in public API
|
|
315
382
|
type Tx = DuckDBTransactionWithInternals<TFullSchema, TSchema>;
|
|
316
383
|
return (this as unknown as Tx).session.executeArrow(query);
|
|
317
384
|
}
|
|
@@ -319,15 +386,67 @@ export class DuckDBTransaction<
|
|
|
319
386
|
override async transaction<T>(
|
|
320
387
|
transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>
|
|
321
388
|
): Promise<T> {
|
|
389
|
+
// Cast needed: PgTransaction doesn't expose dialect/session properties in public API
|
|
322
390
|
type Tx = DuckDBTransactionWithInternals<TFullSchema, TSchema>;
|
|
391
|
+
const internals = this as unknown as Tx;
|
|
392
|
+
const savepoint = `drizzle_savepoint_${this.nestedIndex + 1}`;
|
|
393
|
+
const savepointSql = sql.raw(`savepoint ${savepoint}`);
|
|
394
|
+
const releaseSql = sql.raw(`release savepoint ${savepoint}`);
|
|
395
|
+
const rollbackSql = sql.raw(`rollback to savepoint ${savepoint}`);
|
|
396
|
+
|
|
323
397
|
const nestedTx = new DuckDBTransaction<TFullSchema, TSchema>(
|
|
324
|
-
|
|
325
|
-
|
|
398
|
+
internals.dialect,
|
|
399
|
+
internals.session,
|
|
326
400
|
this.schema,
|
|
327
401
|
this.nestedIndex + 1
|
|
328
402
|
);
|
|
329
403
|
|
|
330
|
-
|
|
404
|
+
// Check dialect-level savepoint support (per-instance, not global)
|
|
405
|
+
if (internals.dialect.areSavepointsUnsupported()) {
|
|
406
|
+
return this.runNestedWithoutSavepoint(transaction, nestedTx, internals);
|
|
407
|
+
}
|
|
408
|
+
|
|
409
|
+
let createdSavepoint = false;
|
|
410
|
+
try {
|
|
411
|
+
await internals.session.execute(savepointSql);
|
|
412
|
+
internals.dialect.markSavepointsSupported();
|
|
413
|
+
createdSavepoint = true;
|
|
414
|
+
} catch (error) {
|
|
415
|
+
if (!isSavepointSyntaxError(error)) {
|
|
416
|
+
throw error;
|
|
417
|
+
}
|
|
418
|
+
internals.dialect.markSavepointsUnsupported();
|
|
419
|
+
return this.runNestedWithoutSavepoint(transaction, nestedTx, internals);
|
|
420
|
+
}
|
|
421
|
+
|
|
422
|
+
try {
|
|
423
|
+
const result = await transaction(nestedTx);
|
|
424
|
+
if (createdSavepoint) {
|
|
425
|
+
await internals.session.execute(releaseSql);
|
|
426
|
+
}
|
|
427
|
+
return result;
|
|
428
|
+
} catch (error) {
|
|
429
|
+
if (createdSavepoint) {
|
|
430
|
+
await internals.session.execute(rollbackSql);
|
|
431
|
+
}
|
|
432
|
+
(
|
|
433
|
+
internals.session as DuckDBSession<TFullSchema, TSchema>
|
|
434
|
+
).markRollbackOnly();
|
|
435
|
+
throw error;
|
|
436
|
+
}
|
|
437
|
+
}
|
|
438
|
+
|
|
439
|
+
private runNestedWithoutSavepoint<T>(
|
|
440
|
+
transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>,
|
|
441
|
+
nestedTx: DuckDBTransaction<TFullSchema, TSchema>,
|
|
442
|
+
internals: DuckDBTransactionWithInternals<TFullSchema, TSchema>
|
|
443
|
+
): Promise<T> {
|
|
444
|
+
return transaction(nestedTx).catch((error) => {
|
|
445
|
+
(
|
|
446
|
+
internals.session as DuckDBSession<TFullSchema, TSchema>
|
|
447
|
+
).markRollbackOnly();
|
|
448
|
+
throw error;
|
|
449
|
+
});
|
|
331
450
|
}
|
|
332
451
|
}
|
|
333
452
|
|