@duckdbfan/drizzle-duckdb 0.0.7 → 1.3.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +349 -62
- package/dist/bin/duckdb-introspect.d.ts +2 -0
- package/dist/client.d.ts +42 -0
- package/dist/columns.d.ts +100 -9
- package/dist/dialect.d.ts +27 -2
- package/dist/driver.d.ts +53 -37
- package/dist/duckdb-introspect.mjs +2890 -0
- package/dist/helpers.d.ts +1 -0
- package/dist/helpers.mjs +360 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.mjs +3015 -228
- package/dist/introspect.d.ts +74 -0
- package/dist/migrator.d.ts +3 -2
- package/dist/olap.d.ts +46 -0
- package/dist/operators.d.ts +8 -0
- package/dist/options.d.ts +7 -0
- package/dist/pool.d.ts +30 -0
- package/dist/select-builder.d.ts +31 -0
- package/dist/session.d.ts +33 -8
- package/dist/sql/ast-transformer.d.ts +33 -0
- package/dist/sql/result-mapper.d.ts +9 -0
- package/dist/sql/selection.d.ts +2 -0
- package/dist/sql/visitors/array-operators.d.ts +5 -0
- package/dist/sql/visitors/column-qualifier.d.ts +10 -0
- package/dist/sql/visitors/generate-series-alias.d.ts +13 -0
- package/dist/sql/visitors/union-with-hoister.d.ts +11 -0
- package/dist/utils.d.ts +2 -5
- package/dist/value-wrappers-core.d.ts +42 -0
- package/dist/value-wrappers.d.ts +8 -0
- package/package.json +53 -16
- package/src/bin/duckdb-introspect.ts +181 -0
- package/src/client.ts +528 -0
- package/src/columns.ts +420 -65
- package/src/dialect.ts +111 -15
- package/src/driver.ts +266 -180
- package/src/helpers.ts +18 -0
- package/src/index.ts +8 -1
- package/src/introspect.ts +935 -0
- package/src/migrator.ts +10 -5
- package/src/olap.ts +190 -0
- package/src/operators.ts +27 -0
- package/src/options.ts +25 -0
- package/src/pool.ts +274 -0
- package/src/select-builder.ts +110 -0
- package/src/session.ts +306 -66
- package/src/sql/ast-transformer.ts +170 -0
- package/src/sql/result-mapper.ts +303 -0
- package/src/sql/selection.ts +60 -0
- package/src/sql/visitors/array-operators.ts +214 -0
- package/src/sql/visitors/column-qualifier.ts +586 -0
- package/src/sql/visitors/generate-series-alias.ts +291 -0
- package/src/sql/visitors/union-with-hoister.ts +106 -0
- package/src/utils.ts +2 -222
- package/src/value-wrappers-core.ts +168 -0
- package/src/value-wrappers.ts +165 -0
package/src/migrator.ts
CHANGED
|
@@ -1,18 +1,23 @@
|
|
|
1
1
|
import type { MigrationConfig } from 'drizzle-orm/migrator';
|
|
2
2
|
import { readMigrationFiles } from 'drizzle-orm/migrator';
|
|
3
|
-
import type { DuckDBDatabase } from './driver';
|
|
3
|
+
import type { DuckDBDatabase } from './driver.ts';
|
|
4
4
|
import type { PgSession } from 'drizzle-orm/pg-core/session';
|
|
5
5
|
|
|
6
|
+
export type DuckDbMigrationConfig = MigrationConfig | string;
|
|
7
|
+
|
|
6
8
|
export async function migrate<TSchema extends Record<string, unknown>>(
|
|
7
9
|
db: DuckDBDatabase<TSchema>,
|
|
8
|
-
config:
|
|
10
|
+
config: DuckDbMigrationConfig
|
|
9
11
|
) {
|
|
10
|
-
const
|
|
12
|
+
const migrationConfig: MigrationConfig =
|
|
13
|
+
typeof config === 'string' ? { migrationsFolder: config } : config;
|
|
14
|
+
|
|
15
|
+
const migrations = readMigrationFiles(migrationConfig);
|
|
11
16
|
|
|
17
|
+
// Cast needed: Drizzle's internal PgSession type differs from exported type
|
|
12
18
|
await db.dialect.migrate(
|
|
13
19
|
migrations,
|
|
14
|
-
// Need to work around omitted internal types from drizzle...
|
|
15
20
|
db.session as unknown as PgSession,
|
|
16
|
-
|
|
21
|
+
migrationConfig
|
|
17
22
|
);
|
|
18
23
|
}
|
package/src/olap.ts
ADDED
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
import { is } from 'drizzle-orm/entity';
|
|
2
|
+
import { sql, Subquery, type SQLWrapper } from 'drizzle-orm';
|
|
3
|
+
import type { AnyPgColumn, PgTable } from 'drizzle-orm/pg-core';
|
|
4
|
+
import type { PgViewBase } from 'drizzle-orm/pg-core/view-base';
|
|
5
|
+
import type { SelectedFields } from 'drizzle-orm/pg-core/query-builders';
|
|
6
|
+
import { SQL } from 'drizzle-orm/sql/sql';
|
|
7
|
+
import { Column, getTableName } from 'drizzle-orm';
|
|
8
|
+
import type { DuckDBDatabase } from './driver.ts';
|
|
9
|
+
|
|
10
|
+
export const countN = (expr: SQLWrapper = sql`*`) =>
|
|
11
|
+
sql<number>`count(${expr})`.mapWith(Number);
|
|
12
|
+
|
|
13
|
+
export const sumN = (expr: SQLWrapper) =>
|
|
14
|
+
sql<number>`sum(${expr})`.mapWith(Number);
|
|
15
|
+
|
|
16
|
+
export const avgN = (expr: SQLWrapper) =>
|
|
17
|
+
sql<number>`avg(${expr})`.mapWith(Number);
|
|
18
|
+
|
|
19
|
+
export const sumDistinctN = (expr: SQLWrapper) =>
|
|
20
|
+
sql<number>`sum(distinct ${expr})`.mapWith(Number);
|
|
21
|
+
|
|
22
|
+
export const percentileCont = (p: number, expr: SQLWrapper) =>
|
|
23
|
+
sql<number>`percentile_cont(${p}) within group (order by ${expr})`.mapWith(
|
|
24
|
+
Number
|
|
25
|
+
);
|
|
26
|
+
|
|
27
|
+
export const median = (expr: SQLWrapper) => percentileCont(0.5, expr);
|
|
28
|
+
|
|
29
|
+
export const anyValue = <T = unknown>(expr: SQLWrapper) =>
|
|
30
|
+
sql<T>`any_value(${expr})`;
|
|
31
|
+
|
|
32
|
+
type PartitionOrder =
|
|
33
|
+
| {
|
|
34
|
+
partitionBy?: SQLWrapper | SQLWrapper[];
|
|
35
|
+
orderBy?: SQLWrapper | SQLWrapper[];
|
|
36
|
+
}
|
|
37
|
+
| undefined;
|
|
38
|
+
|
|
39
|
+
function normalizeArray<T>(value?: T | T[]): T[] {
|
|
40
|
+
if (!value) return [];
|
|
41
|
+
return Array.isArray(value) ? value : [value];
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
function overClause(options?: PartitionOrder) {
|
|
45
|
+
const partitions = normalizeArray(options?.partitionBy);
|
|
46
|
+
const orders = normalizeArray(options?.orderBy);
|
|
47
|
+
|
|
48
|
+
const chunks: SQLWrapper[] = [];
|
|
49
|
+
|
|
50
|
+
if (partitions.length > 0) {
|
|
51
|
+
chunks.push(sql`partition by ${sql.join(partitions, sql`, `)}`);
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
if (orders.length > 0) {
|
|
55
|
+
chunks.push(sql`order by ${sql.join(orders, sql`, `)}`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
if (chunks.length === 0) {
|
|
59
|
+
return sql``;
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
return sql`over (${sql.join(chunks, sql` `)})`;
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
export const rowNumber = (options?: PartitionOrder) =>
|
|
66
|
+
sql<number>`row_number() ${overClause(options)}`.mapWith(Number);
|
|
67
|
+
|
|
68
|
+
export const rank = (options?: PartitionOrder) =>
|
|
69
|
+
sql<number>`rank() ${overClause(options)}`.mapWith(Number);
|
|
70
|
+
|
|
71
|
+
export const denseRank = (options?: PartitionOrder) =>
|
|
72
|
+
sql<number>`dense_rank() ${overClause(options)}`.mapWith(Number);
|
|
73
|
+
|
|
74
|
+
export const lag = <T = unknown>(
|
|
75
|
+
expr: SQLWrapper,
|
|
76
|
+
offset = 1,
|
|
77
|
+
defaultValue?: SQLWrapper,
|
|
78
|
+
options?: PartitionOrder
|
|
79
|
+
) =>
|
|
80
|
+
defaultValue
|
|
81
|
+
? sql<T>`lag(${expr}, ${offset}, ${defaultValue}) ${overClause(options)}`
|
|
82
|
+
: sql<T>`lag(${expr}, ${offset}) ${overClause(options)}`;
|
|
83
|
+
|
|
84
|
+
export const lead = <T = unknown>(
|
|
85
|
+
expr: SQLWrapper,
|
|
86
|
+
offset = 1,
|
|
87
|
+
defaultValue?: SQLWrapper,
|
|
88
|
+
options?: PartitionOrder
|
|
89
|
+
) =>
|
|
90
|
+
defaultValue
|
|
91
|
+
? sql<T>`lead(${expr}, ${offset}, ${defaultValue}) ${overClause(options)}`
|
|
92
|
+
: sql<T>`lead(${expr}, ${offset}) ${overClause(options)}`;
|
|
93
|
+
|
|
94
|
+
type ValueExpr = SQL | SQL.Aliased | AnyPgColumn;
|
|
95
|
+
type GroupKey = ValueExpr;
|
|
96
|
+
type MeasureMap = Record<string, ValueExpr>;
|
|
97
|
+
type NonAggMap = Record<string, ValueExpr>;
|
|
98
|
+
|
|
99
|
+
function keyAlias(key: SQLWrapper, fallback: string): string {
|
|
100
|
+
if (is(key, SQL.Aliased)) {
|
|
101
|
+
return key.fieldAlias ?? fallback;
|
|
102
|
+
}
|
|
103
|
+
if (is(key, Column)) {
|
|
104
|
+
return `${getTableName(key.table)}.${key.name}`;
|
|
105
|
+
}
|
|
106
|
+
return fallback;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
export class OlapBuilder {
|
|
110
|
+
private source?: PgTable | Subquery | PgViewBase | SQL;
|
|
111
|
+
private keys: GroupKey[] = [];
|
|
112
|
+
private measureMap: MeasureMap = {};
|
|
113
|
+
private nonAggregates: NonAggMap = {};
|
|
114
|
+
private wrapNonAggWithAnyValue = false;
|
|
115
|
+
private orderByClauses: ValueExpr[] = [];
|
|
116
|
+
|
|
117
|
+
constructor(private db: DuckDBDatabase) {}
|
|
118
|
+
|
|
119
|
+
from(source: PgTable | Subquery | PgViewBase | SQL): this {
|
|
120
|
+
this.source = source;
|
|
121
|
+
return this;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
groupBy(keys: GroupKey[]): this {
|
|
125
|
+
this.keys = keys;
|
|
126
|
+
return this;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
measures(measures: MeasureMap): this {
|
|
130
|
+
this.measureMap = measures;
|
|
131
|
+
return this;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
selectNonAggregates(
|
|
135
|
+
fields: NonAggMap,
|
|
136
|
+
options: { anyValue?: boolean } = {}
|
|
137
|
+
): this {
|
|
138
|
+
this.nonAggregates = fields;
|
|
139
|
+
this.wrapNonAggWithAnyValue = options.anyValue ?? false;
|
|
140
|
+
return this;
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
orderBy(...clauses: ValueExpr[]): this {
|
|
144
|
+
this.orderByClauses = clauses;
|
|
145
|
+
return this;
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
build() {
|
|
149
|
+
if (!this.source) {
|
|
150
|
+
throw new Error('olap: .from() is required');
|
|
151
|
+
}
|
|
152
|
+
if (this.keys.length === 0) {
|
|
153
|
+
throw new Error('olap: .groupBy() is required');
|
|
154
|
+
}
|
|
155
|
+
if (Object.keys(this.measureMap).length === 0) {
|
|
156
|
+
throw new Error('olap: .measures() is required');
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
const selection: Record<string, ValueExpr> = {};
|
|
160
|
+
|
|
161
|
+
this.keys.forEach((key, idx) => {
|
|
162
|
+
const alias = keyAlias(key, `key_${idx}`);
|
|
163
|
+
selection[alias] = key;
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
Object.entries(this.nonAggregates).forEach(([alias, expr]) => {
|
|
167
|
+
selection[alias] = this.wrapNonAggWithAnyValue ? anyValue(expr) : expr;
|
|
168
|
+
});
|
|
169
|
+
|
|
170
|
+
Object.assign(selection, this.measureMap);
|
|
171
|
+
|
|
172
|
+
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- Drizzle's query builder types don't allow reassignment after groupBy
|
|
173
|
+
let query: any = this.db
|
|
174
|
+
.select(selection as SelectedFields)
|
|
175
|
+
.from(this.source!)
|
|
176
|
+
.groupBy(...this.keys);
|
|
177
|
+
|
|
178
|
+
if (this.orderByClauses.length > 0) {
|
|
179
|
+
query = query.orderBy(...this.orderByClauses);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
return query;
|
|
183
|
+
}
|
|
184
|
+
|
|
185
|
+
run() {
|
|
186
|
+
return this.build();
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
export const olap = (db: DuckDBDatabase) => new OlapBuilder(db);
|
package/src/operators.ts
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* DuckDB-native array operators. Generate DuckDB-compatible SQL directly
|
|
3
|
+
* without query rewriting.
|
|
4
|
+
*/
|
|
5
|
+
|
|
6
|
+
import { sql, type SQL, type SQLWrapper } from 'drizzle-orm';
|
|
7
|
+
|
|
8
|
+
export function arrayHasAll<T>(
|
|
9
|
+
column: SQLWrapper,
|
|
10
|
+
values: T[] | SQLWrapper
|
|
11
|
+
): SQL {
|
|
12
|
+
return sql`array_has_all(${column}, ${values})`;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
export function arrayHasAny<T>(
|
|
16
|
+
column: SQLWrapper,
|
|
17
|
+
values: T[] | SQLWrapper
|
|
18
|
+
): SQL {
|
|
19
|
+
return sql`array_has_any(${column}, ${values})`;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export function arrayContainedBy<T>(
|
|
23
|
+
column: SQLWrapper,
|
|
24
|
+
values: T[] | SQLWrapper
|
|
25
|
+
): SQL {
|
|
26
|
+
return sql`array_has_all(${values}, ${column})`;
|
|
27
|
+
}
|
package/src/options.ts
ADDED
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
export type PrepareCacheOption = boolean | number | { size?: number };
|
|
2
|
+
|
|
3
|
+
export interface PreparedStatementCacheConfig {
|
|
4
|
+
size: number;
|
|
5
|
+
}
|
|
6
|
+
|
|
7
|
+
const DEFAULT_PREPARED_CACHE_SIZE = 32;
|
|
8
|
+
|
|
9
|
+
export function resolvePrepareCacheOption(
|
|
10
|
+
option?: PrepareCacheOption
|
|
11
|
+
): PreparedStatementCacheConfig | undefined {
|
|
12
|
+
if (!option) return undefined;
|
|
13
|
+
|
|
14
|
+
if (option === true) {
|
|
15
|
+
return { size: DEFAULT_PREPARED_CACHE_SIZE };
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
if (typeof option === 'number') {
|
|
19
|
+
const size = Math.max(1, Math.floor(option));
|
|
20
|
+
return { size };
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
const size = option.size ?? DEFAULT_PREPARED_CACHE_SIZE;
|
|
24
|
+
return { size: Math.max(1, Math.floor(size)) };
|
|
25
|
+
}
|
package/src/pool.ts
ADDED
|
@@ -0,0 +1,274 @@
|
|
|
1
|
+
import { DuckDBConnection, DuckDBInstance } from '@duckdb/node-api';
|
|
2
|
+
import { closeClientConnection, type DuckDBConnectionPool } from './client.ts';
|
|
3
|
+
|
|
4
|
+
/** Pool size presets for different MotherDuck instance types */
|
|
5
|
+
export type PoolPreset =
|
|
6
|
+
| 'pulse'
|
|
7
|
+
| 'standard'
|
|
8
|
+
| 'jumbo'
|
|
9
|
+
| 'mega'
|
|
10
|
+
| 'giga'
|
|
11
|
+
| 'local'
|
|
12
|
+
| 'memory';
|
|
13
|
+
|
|
14
|
+
/** Pool sizes optimized for each MotherDuck instance type */
|
|
15
|
+
export const POOL_PRESETS: Record<PoolPreset, number> = {
|
|
16
|
+
pulse: 4, // Auto-scaling, ad-hoc analytics
|
|
17
|
+
standard: 6, // Balanced ETL/ELT workloads
|
|
18
|
+
jumbo: 8, // Complex queries, high-volume
|
|
19
|
+
mega: 12, // Large-scale transformations
|
|
20
|
+
giga: 16, // Maximum parallelism
|
|
21
|
+
local: 8, // Local DuckDB file
|
|
22
|
+
memory: 4, // In-memory testing
|
|
23
|
+
};
|
|
24
|
+
|
|
25
|
+
export interface DuckDBPoolConfig {
|
|
26
|
+
/** Maximum concurrent connections. Defaults to 4. */
|
|
27
|
+
size?: number;
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
/**
|
|
31
|
+
* Resolve pool configuration to a concrete size.
|
|
32
|
+
* Returns false if pooling is disabled.
|
|
33
|
+
*/
|
|
34
|
+
export function resolvePoolSize(
|
|
35
|
+
pool: DuckDBPoolConfig | PoolPreset | false | undefined
|
|
36
|
+
): number | false {
|
|
37
|
+
if (pool === false) return false;
|
|
38
|
+
if (pool === undefined) return 4;
|
|
39
|
+
if (typeof pool === 'string') return POOL_PRESETS[pool];
|
|
40
|
+
return pool.size ?? 4;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
export interface DuckDBConnectionPoolOptions {
|
|
44
|
+
/** Maximum concurrent connections. Defaults to 4. */
|
|
45
|
+
size?: number;
|
|
46
|
+
/** Timeout in milliseconds to wait for a connection. Defaults to 30000 (30s). */
|
|
47
|
+
acquireTimeout?: number;
|
|
48
|
+
/** Maximum number of requests waiting for a connection. Defaults to 100. */
|
|
49
|
+
maxWaitingRequests?: number;
|
|
50
|
+
/** Max time (ms) a connection may live before being recycled. */
|
|
51
|
+
maxLifetimeMs?: number;
|
|
52
|
+
/** Max idle time (ms) before an idle connection is discarded. */
|
|
53
|
+
idleTimeoutMs?: number;
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
export function createDuckDBConnectionPool(
|
|
57
|
+
instance: DuckDBInstance,
|
|
58
|
+
options: DuckDBConnectionPoolOptions = {}
|
|
59
|
+
): DuckDBConnectionPool & { size: number } {
|
|
60
|
+
const size = options.size && options.size > 0 ? options.size : 4;
|
|
61
|
+
const acquireTimeout = options.acquireTimeout ?? 30_000;
|
|
62
|
+
const maxWaitingRequests = options.maxWaitingRequests ?? 100;
|
|
63
|
+
const maxLifetimeMs = options.maxLifetimeMs;
|
|
64
|
+
const idleTimeoutMs = options.idleTimeoutMs;
|
|
65
|
+
const metadata = new WeakMap<
|
|
66
|
+
DuckDBConnection,
|
|
67
|
+
{ createdAt: number; lastUsedAt: number }
|
|
68
|
+
>();
|
|
69
|
+
|
|
70
|
+
type PooledConnection = {
|
|
71
|
+
connection: DuckDBConnection;
|
|
72
|
+
createdAt: number;
|
|
73
|
+
lastUsedAt: number;
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
const idle: PooledConnection[] = [];
|
|
77
|
+
const waiting: Array<{
|
|
78
|
+
resolve: (conn: DuckDBConnection) => void;
|
|
79
|
+
reject: (error: Error) => void;
|
|
80
|
+
timeoutId: ReturnType<typeof setTimeout>;
|
|
81
|
+
}> = [];
|
|
82
|
+
let total = 0;
|
|
83
|
+
let closed = false;
|
|
84
|
+
// Track pending acquires to handle race conditions during close
|
|
85
|
+
let pendingAcquires = 0;
|
|
86
|
+
|
|
87
|
+
const shouldRecycle = (conn: PooledConnection, now: number): boolean => {
|
|
88
|
+
if (maxLifetimeMs !== undefined && now - conn.createdAt >= maxLifetimeMs) {
|
|
89
|
+
return true;
|
|
90
|
+
}
|
|
91
|
+
if (idleTimeoutMs !== undefined && now - conn.lastUsedAt >= idleTimeoutMs) {
|
|
92
|
+
return true;
|
|
93
|
+
}
|
|
94
|
+
return false;
|
|
95
|
+
};
|
|
96
|
+
|
|
97
|
+
const acquire = async (): Promise<DuckDBConnection> => {
|
|
98
|
+
if (closed) {
|
|
99
|
+
throw new Error('DuckDB connection pool is closed');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
while (idle.length > 0) {
|
|
103
|
+
const pooled = idle.pop() as PooledConnection;
|
|
104
|
+
const now = Date.now();
|
|
105
|
+
if (shouldRecycle(pooled, now)) {
|
|
106
|
+
await closeClientConnection(pooled.connection);
|
|
107
|
+
total = Math.max(0, total - 1);
|
|
108
|
+
metadata.delete(pooled.connection);
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
pooled.lastUsedAt = now;
|
|
112
|
+
metadata.set(pooled.connection, {
|
|
113
|
+
createdAt: pooled.createdAt,
|
|
114
|
+
lastUsedAt: pooled.lastUsedAt,
|
|
115
|
+
});
|
|
116
|
+
return pooled.connection;
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
if (total < size) {
|
|
120
|
+
pendingAcquires += 1;
|
|
121
|
+
total += 1;
|
|
122
|
+
try {
|
|
123
|
+
const connection = await DuckDBConnection.create(instance);
|
|
124
|
+
// Check if pool was closed during async connection creation
|
|
125
|
+
if (closed) {
|
|
126
|
+
await closeClientConnection(connection);
|
|
127
|
+
total -= 1;
|
|
128
|
+
throw new Error('DuckDB connection pool is closed');
|
|
129
|
+
}
|
|
130
|
+
const now = Date.now();
|
|
131
|
+
metadata.set(connection, { createdAt: now, lastUsedAt: now });
|
|
132
|
+
return connection;
|
|
133
|
+
} catch (error) {
|
|
134
|
+
total -= 1;
|
|
135
|
+
throw error;
|
|
136
|
+
} finally {
|
|
137
|
+
pendingAcquires -= 1;
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
// Check queue limit before waiting
|
|
142
|
+
if (waiting.length >= maxWaitingRequests) {
|
|
143
|
+
throw new Error(
|
|
144
|
+
`DuckDB connection pool queue is full (max ${maxWaitingRequests} waiting requests)`
|
|
145
|
+
);
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
return await new Promise((resolve, reject) => {
|
|
149
|
+
const timeoutId = setTimeout(() => {
|
|
150
|
+
// Remove this waiter from the queue
|
|
151
|
+
const idx = waiting.findIndex((w) => w.timeoutId === timeoutId);
|
|
152
|
+
if (idx !== -1) {
|
|
153
|
+
waiting.splice(idx, 1);
|
|
154
|
+
}
|
|
155
|
+
reject(
|
|
156
|
+
new Error(
|
|
157
|
+
`DuckDB connection pool acquire timeout after ${acquireTimeout}ms`
|
|
158
|
+
)
|
|
159
|
+
);
|
|
160
|
+
}, acquireTimeout);
|
|
161
|
+
|
|
162
|
+
waiting.push({ resolve, reject, timeoutId });
|
|
163
|
+
});
|
|
164
|
+
};
|
|
165
|
+
|
|
166
|
+
const release = async (connection: DuckDBConnection): Promise<void> => {
|
|
167
|
+
const waiter = waiting.shift();
|
|
168
|
+
if (waiter) {
|
|
169
|
+
clearTimeout(waiter.timeoutId);
|
|
170
|
+
const now = Date.now();
|
|
171
|
+
const meta =
|
|
172
|
+
metadata.get(connection) ??
|
|
173
|
+
({ createdAt: now, lastUsedAt: now } as {
|
|
174
|
+
createdAt: number;
|
|
175
|
+
lastUsedAt: number;
|
|
176
|
+
});
|
|
177
|
+
|
|
178
|
+
const expired =
|
|
179
|
+
maxLifetimeMs !== undefined && now - meta.createdAt >= maxLifetimeMs;
|
|
180
|
+
|
|
181
|
+
if (closed) {
|
|
182
|
+
await closeClientConnection(connection);
|
|
183
|
+
total = Math.max(0, total - 1);
|
|
184
|
+
metadata.delete(connection);
|
|
185
|
+
waiter.reject(new Error('DuckDB connection pool is closed'));
|
|
186
|
+
return;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
if (expired) {
|
|
190
|
+
await closeClientConnection(connection);
|
|
191
|
+
total = Math.max(0, total - 1);
|
|
192
|
+
metadata.delete(connection);
|
|
193
|
+
try {
|
|
194
|
+
const replacement = await acquire();
|
|
195
|
+
waiter.resolve(replacement);
|
|
196
|
+
} catch (error) {
|
|
197
|
+
waiter.reject(error as Error);
|
|
198
|
+
}
|
|
199
|
+
return;
|
|
200
|
+
}
|
|
201
|
+
|
|
202
|
+
meta.lastUsedAt = now;
|
|
203
|
+
metadata.set(connection, meta);
|
|
204
|
+
waiter.resolve(connection);
|
|
205
|
+
return;
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
if (closed) {
|
|
209
|
+
await closeClientConnection(connection);
|
|
210
|
+
metadata.delete(connection);
|
|
211
|
+
total = Math.max(0, total - 1);
|
|
212
|
+
return;
|
|
213
|
+
}
|
|
214
|
+
|
|
215
|
+
const now = Date.now();
|
|
216
|
+
const existingMeta =
|
|
217
|
+
metadata.get(connection) ??
|
|
218
|
+
({ createdAt: now, lastUsedAt: now } as {
|
|
219
|
+
createdAt: number;
|
|
220
|
+
lastUsedAt: number;
|
|
221
|
+
});
|
|
222
|
+
existingMeta.lastUsedAt = now;
|
|
223
|
+
metadata.set(connection, existingMeta);
|
|
224
|
+
|
|
225
|
+
if (
|
|
226
|
+
maxLifetimeMs !== undefined &&
|
|
227
|
+
now - existingMeta.createdAt >= maxLifetimeMs
|
|
228
|
+
) {
|
|
229
|
+
await closeClientConnection(connection);
|
|
230
|
+
total -= 1;
|
|
231
|
+
metadata.delete(connection);
|
|
232
|
+
return;
|
|
233
|
+
}
|
|
234
|
+
|
|
235
|
+
idle.push({
|
|
236
|
+
connection,
|
|
237
|
+
createdAt: existingMeta.createdAt,
|
|
238
|
+
lastUsedAt: existingMeta.lastUsedAt,
|
|
239
|
+
});
|
|
240
|
+
};
|
|
241
|
+
|
|
242
|
+
const close = async (): Promise<void> => {
|
|
243
|
+
closed = true;
|
|
244
|
+
|
|
245
|
+
// Clear all waiting requests with their timeouts
|
|
246
|
+
const waiters = waiting.splice(0, waiting.length);
|
|
247
|
+
for (const waiter of waiters) {
|
|
248
|
+
clearTimeout(waiter.timeoutId);
|
|
249
|
+
waiter.reject(new Error('DuckDB connection pool is closed'));
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
// Close all idle connections (use allSettled to ensure all are attempted)
|
|
253
|
+
const toClose = idle.splice(0, idle.length);
|
|
254
|
+
await Promise.allSettled(
|
|
255
|
+
toClose.map((item) => closeClientConnection(item.connection))
|
|
256
|
+
);
|
|
257
|
+
total = Math.max(0, total - toClose.length);
|
|
258
|
+
toClose.forEach((item) => metadata.delete(item.connection));
|
|
259
|
+
|
|
260
|
+
// Wait for pending acquires to complete (with a reasonable timeout)
|
|
261
|
+
const maxWait = 5000;
|
|
262
|
+
const start = Date.now();
|
|
263
|
+
while (pendingAcquires > 0 && Date.now() - start < maxWait) {
|
|
264
|
+
await new Promise((r) => setTimeout(r, 10));
|
|
265
|
+
}
|
|
266
|
+
};
|
|
267
|
+
|
|
268
|
+
return {
|
|
269
|
+
acquire,
|
|
270
|
+
release,
|
|
271
|
+
close,
|
|
272
|
+
size,
|
|
273
|
+
};
|
|
274
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
import { is } from 'drizzle-orm/entity';
|
|
2
|
+
import {
|
|
3
|
+
PgSelectBase,
|
|
4
|
+
PgSelectBuilder,
|
|
5
|
+
type CreatePgSelectFromBuilderMode,
|
|
6
|
+
type SelectedFields,
|
|
7
|
+
type TableLikeHasEmptySelection,
|
|
8
|
+
} from 'drizzle-orm/pg-core/query-builders';
|
|
9
|
+
import { PgColumn, PgTable, type PgSession } from 'drizzle-orm/pg-core';
|
|
10
|
+
import { Subquery, ViewBaseConfig, type SQLWrapper } from 'drizzle-orm';
|
|
11
|
+
import { PgViewBase } from 'drizzle-orm/pg-core/view-base';
|
|
12
|
+
import type {
|
|
13
|
+
GetSelectTableName,
|
|
14
|
+
GetSelectTableSelection,
|
|
15
|
+
} from 'drizzle-orm/query-builders/select.types';
|
|
16
|
+
import { SQL, type ColumnsSelection } from 'drizzle-orm/sql/sql';
|
|
17
|
+
import { aliasFields } from './sql/selection.ts';
|
|
18
|
+
import type { DuckDBDialect } from './dialect.ts';
|
|
19
|
+
import { getTableColumns, type DrizzleTypeError } from 'drizzle-orm/utils';
|
|
20
|
+
|
|
21
|
+
interface PgViewBaseInternal<
|
|
22
|
+
TName extends string = string,
|
|
23
|
+
TExisting extends boolean = boolean,
|
|
24
|
+
TSelectedFields extends ColumnsSelection = ColumnsSelection,
|
|
25
|
+
> extends PgViewBase<TName, TExisting, TSelectedFields> {
|
|
26
|
+
[ViewBaseConfig]?: {
|
|
27
|
+
selectedFields: SelectedFields;
|
|
28
|
+
};
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
export class DuckDBSelectBuilder<
|
|
32
|
+
TSelection extends SelectedFields | undefined,
|
|
33
|
+
TBuilderMode extends 'db' | 'qb' = 'db',
|
|
34
|
+
> extends PgSelectBuilder<TSelection, TBuilderMode> {
|
|
35
|
+
private _fields: TSelection;
|
|
36
|
+
private _session: PgSession | undefined;
|
|
37
|
+
private _dialect: DuckDBDialect;
|
|
38
|
+
private _withList: Subquery[] = [];
|
|
39
|
+
private _distinct:
|
|
40
|
+
| boolean
|
|
41
|
+
| {
|
|
42
|
+
on: (PgColumn | SQLWrapper)[];
|
|
43
|
+
}
|
|
44
|
+
| undefined;
|
|
45
|
+
|
|
46
|
+
constructor(config: {
|
|
47
|
+
fields: TSelection;
|
|
48
|
+
session: PgSession | undefined;
|
|
49
|
+
dialect: DuckDBDialect;
|
|
50
|
+
withList?: Subquery[];
|
|
51
|
+
distinct?:
|
|
52
|
+
| boolean
|
|
53
|
+
| {
|
|
54
|
+
on: (PgColumn | SQLWrapper)[];
|
|
55
|
+
};
|
|
56
|
+
}) {
|
|
57
|
+
super(config);
|
|
58
|
+
this._fields = config.fields;
|
|
59
|
+
this._session = config.session;
|
|
60
|
+
this._dialect = config.dialect;
|
|
61
|
+
if (config.withList) {
|
|
62
|
+
this._withList = config.withList;
|
|
63
|
+
}
|
|
64
|
+
this._distinct = config.distinct;
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
from<TFrom extends PgTable | Subquery | PgViewBaseInternal | SQL>(
|
|
68
|
+
source: TableLikeHasEmptySelection<TFrom> extends true
|
|
69
|
+
? DrizzleTypeError<"Cannot reference a data-modifying statement subquery if it doesn't contain a `returning` clause">
|
|
70
|
+
: TFrom
|
|
71
|
+
): CreatePgSelectFromBuilderMode<
|
|
72
|
+
TBuilderMode,
|
|
73
|
+
GetSelectTableName<TFrom>,
|
|
74
|
+
TSelection extends undefined ? GetSelectTableSelection<TFrom> : TSelection,
|
|
75
|
+
TSelection extends undefined ? 'single' : 'partial'
|
|
76
|
+
> {
|
|
77
|
+
const isPartialSelect = !!this._fields;
|
|
78
|
+
const src = source as TFrom;
|
|
79
|
+
|
|
80
|
+
let fields: SelectedFields;
|
|
81
|
+
if (this._fields) {
|
|
82
|
+
fields = this._fields;
|
|
83
|
+
} else if (is(src, Subquery)) {
|
|
84
|
+
fields = Object.fromEntries(
|
|
85
|
+
Object.keys(src._.selectedFields).map((key) => [
|
|
86
|
+
key,
|
|
87
|
+
src[
|
|
88
|
+
key as unknown as keyof typeof src
|
|
89
|
+
] as unknown as SelectedFields[string],
|
|
90
|
+
])
|
|
91
|
+
);
|
|
92
|
+
} else if (is(src, PgViewBase)) {
|
|
93
|
+
fields = src[ViewBaseConfig]?.selectedFields as SelectedFields;
|
|
94
|
+
} else if (is(src, SQL)) {
|
|
95
|
+
fields = {};
|
|
96
|
+
} else {
|
|
97
|
+
fields = aliasFields(getTableColumns<PgTable>(src), !isPartialSelect);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
return new PgSelectBase({
|
|
101
|
+
table: src,
|
|
102
|
+
fields,
|
|
103
|
+
isPartialSelect,
|
|
104
|
+
session: this._session,
|
|
105
|
+
dialect: this._dialect,
|
|
106
|
+
withList: this._withList,
|
|
107
|
+
distinct: this._distinct,
|
|
108
|
+
}) as any;
|
|
109
|
+
}
|
|
110
|
+
}
|