@duckdbfan/drizzle-duckdb 0.0.6 → 1.3.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +344 -62
- package/dist/bin/duckdb-introspect.d.ts +2 -0
- package/dist/client.d.ts +42 -0
- package/dist/columns.d.ts +142 -0
- package/dist/dialect.d.ts +27 -2
- package/dist/driver.d.ts +53 -37
- package/dist/duckdb-introspect.mjs +2890 -0
- package/dist/helpers.d.ts +1 -0
- package/dist/helpers.mjs +360 -0
- package/dist/index.d.ts +7 -0
- package/dist/index.mjs +3071 -209
- package/dist/introspect.d.ts +74 -0
- package/dist/migrator.d.ts +3 -2
- package/dist/olap.d.ts +46 -0
- package/dist/operators.d.ts +8 -0
- package/dist/options.d.ts +7 -0
- package/dist/pool.d.ts +30 -0
- package/dist/select-builder.d.ts +31 -0
- package/dist/session.d.ts +33 -8
- package/dist/sql/ast-transformer.d.ts +33 -0
- package/dist/sql/result-mapper.d.ts +9 -0
- package/dist/sql/selection.d.ts +2 -0
- package/dist/sql/visitors/array-operators.d.ts +5 -0
- package/dist/sql/visitors/column-qualifier.d.ts +10 -0
- package/dist/sql/visitors/generate-series-alias.d.ts +13 -0
- package/dist/sql/visitors/union-with-hoister.d.ts +11 -0
- package/dist/utils.d.ts +2 -5
- package/dist/value-wrappers-core.d.ts +42 -0
- package/dist/value-wrappers.d.ts +8 -0
- package/package.json +53 -16
- package/src/bin/duckdb-introspect.ts +181 -0
- package/src/client.ts +528 -0
- package/src/columns.ts +510 -1
- package/src/dialect.ts +111 -15
- package/src/driver.ts +266 -180
- package/src/helpers.ts +18 -0
- package/src/index.ts +8 -1
- package/src/introspect.ts +935 -0
- package/src/migrator.ts +10 -5
- package/src/olap.ts +190 -0
- package/src/operators.ts +27 -0
- package/src/options.ts +25 -0
- package/src/pool.ts +274 -0
- package/src/select-builder.ts +110 -0
- package/src/session.ts +306 -66
- package/src/sql/ast-transformer.ts +170 -0
- package/src/sql/result-mapper.ts +303 -0
- package/src/sql/selection.ts +60 -0
- package/src/sql/visitors/array-operators.ts +214 -0
- package/src/sql/visitors/column-qualifier.ts +586 -0
- package/src/sql/visitors/generate-series-alias.ts +291 -0
- package/src/sql/visitors/union-with-hoister.ts +106 -0
- package/src/utils.ts +2 -216
- package/src/value-wrappers-core.ts +168 -0
- package/src/value-wrappers.ts +165 -0
package/src/columns.ts
CHANGED
|
@@ -1 +1,510 @@
|
|
|
1
|
-
|
|
1
|
+
import { sql, type SQL } from 'drizzle-orm';
|
|
2
|
+
import type { SQLWrapper } from 'drizzle-orm/sql/sql';
|
|
3
|
+
import { customType } from 'drizzle-orm/pg-core';
|
|
4
|
+
import {
|
|
5
|
+
wrapList,
|
|
6
|
+
wrapArray,
|
|
7
|
+
wrapMap,
|
|
8
|
+
wrapBlob,
|
|
9
|
+
wrapJson,
|
|
10
|
+
wrapTimestamp,
|
|
11
|
+
type ListValueWrapper,
|
|
12
|
+
type ArrayValueWrapper,
|
|
13
|
+
type MapValueWrapper,
|
|
14
|
+
type BlobValueWrapper,
|
|
15
|
+
type JsonValueWrapper,
|
|
16
|
+
type TimestampValueWrapper,
|
|
17
|
+
} from './value-wrappers-core.ts';
|
|
18
|
+
|
|
19
|
+
type IntColType =
|
|
20
|
+
| 'SMALLINT'
|
|
21
|
+
| 'INTEGER'
|
|
22
|
+
| 'BIGINT'
|
|
23
|
+
| 'HUGEINT'
|
|
24
|
+
| 'USMALLINT'
|
|
25
|
+
| 'UINTEGER'
|
|
26
|
+
| 'UBIGINT'
|
|
27
|
+
| 'UHUGEINT'
|
|
28
|
+
| 'INT'
|
|
29
|
+
| 'INT16'
|
|
30
|
+
| 'INT32'
|
|
31
|
+
| 'INT64'
|
|
32
|
+
| 'INT128'
|
|
33
|
+
| 'LONG'
|
|
34
|
+
| 'VARINT';
|
|
35
|
+
|
|
36
|
+
type FloatColType = 'FLOAT' | 'DOUBLE';
|
|
37
|
+
|
|
38
|
+
type StringColType = 'STRING' | 'VARCHAR' | 'TEXT';
|
|
39
|
+
|
|
40
|
+
type BoolColType = 'BOOLEAN' | 'BOOL';
|
|
41
|
+
|
|
42
|
+
type BlobColType = 'BLOB' | 'BYTEA' | 'VARBINARY';
|
|
43
|
+
|
|
44
|
+
type DateColType =
|
|
45
|
+
| 'DATE'
|
|
46
|
+
| 'TIME'
|
|
47
|
+
| 'TIMETZ'
|
|
48
|
+
| 'TIMESTAMP'
|
|
49
|
+
| 'DATETIME'
|
|
50
|
+
| 'TIMESTAMPTZ'
|
|
51
|
+
| 'TIMESTAMP_MS'
|
|
52
|
+
| 'TIMESTAMP_S';
|
|
53
|
+
|
|
54
|
+
type AnyColType =
|
|
55
|
+
| IntColType
|
|
56
|
+
| FloatColType
|
|
57
|
+
| StringColType
|
|
58
|
+
| BoolColType
|
|
59
|
+
| DateColType
|
|
60
|
+
| BlobColType;
|
|
61
|
+
|
|
62
|
+
type ListColType = `${AnyColType}[]`;
|
|
63
|
+
type ArrayColType = `${AnyColType}[${number}]`;
|
|
64
|
+
type StructColType = `STRUCT (${string})`;
|
|
65
|
+
|
|
66
|
+
type Primitive = AnyColType | ListColType | ArrayColType | StructColType;
|
|
67
|
+
|
|
68
|
+
export function coerceArrayString(value: string): unknown[] | undefined {
|
|
69
|
+
const trimmed = value.trim();
|
|
70
|
+
if (!trimmed) {
|
|
71
|
+
return [];
|
|
72
|
+
}
|
|
73
|
+
if (trimmed.startsWith('[')) {
|
|
74
|
+
try {
|
|
75
|
+
return JSON.parse(trimmed) as unknown[];
|
|
76
|
+
} catch {
|
|
77
|
+
return undefined;
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
if (trimmed.startsWith('{') && trimmed.endsWith('}')) {
|
|
81
|
+
try {
|
|
82
|
+
const json = trimmed.replace(/{/g, '[').replace(/}/g, ']');
|
|
83
|
+
return JSON.parse(json) as unknown[];
|
|
84
|
+
} catch {
|
|
85
|
+
return undefined;
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
return undefined;
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
export function formatLiteral(value: unknown, typeHint?: string): string {
|
|
92
|
+
if (value === null || value === undefined) {
|
|
93
|
+
return 'NULL';
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
const upperType = typeHint?.toUpperCase() ?? '';
|
|
97
|
+
if (value instanceof Date) {
|
|
98
|
+
return `'${value.toISOString()}'`;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
if (typeof value === 'number' || typeof value === 'bigint') {
|
|
102
|
+
return value.toString();
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
if (typeof value === 'boolean') {
|
|
106
|
+
return value ? 'TRUE' : 'FALSE';
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
const str =
|
|
110
|
+
typeof value === 'string'
|
|
111
|
+
? value
|
|
112
|
+
: (JSON.stringify(value) ?? String(value));
|
|
113
|
+
|
|
114
|
+
const escaped = str.replace(/'/g, "''");
|
|
115
|
+
// Simple quoting based on hint.
|
|
116
|
+
if (
|
|
117
|
+
upperType.includes('CHAR') ||
|
|
118
|
+
upperType.includes('TEXT') ||
|
|
119
|
+
upperType.includes('STRING') ||
|
|
120
|
+
upperType.includes('VARCHAR')
|
|
121
|
+
) {
|
|
122
|
+
return `'${escaped}'`;
|
|
123
|
+
}
|
|
124
|
+
|
|
125
|
+
return `'${escaped}'`;
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
export function buildListLiteral(values: unknown[], elementType?: string): SQL {
|
|
129
|
+
if (values.length === 0) {
|
|
130
|
+
return sql`[]`;
|
|
131
|
+
}
|
|
132
|
+
const chunks = values.map((v) =>
|
|
133
|
+
typeof v === 'object' && !Array.isArray(v)
|
|
134
|
+
? sql`${v as SQLWrapper}`
|
|
135
|
+
: sql.raw(formatLiteral(v, elementType))
|
|
136
|
+
);
|
|
137
|
+
return sql`list_value(${sql.join(chunks, sql.raw(', '))})`;
|
|
138
|
+
}
|
|
139
|
+
|
|
140
|
+
export function buildStructLiteral(
|
|
141
|
+
value: Record<string, unknown>,
|
|
142
|
+
schema?: Record<string, Primitive>
|
|
143
|
+
): SQL {
|
|
144
|
+
const parts = Object.entries(value).map(([key, val]) => {
|
|
145
|
+
const typeHint = schema?.[key];
|
|
146
|
+
if (Array.isArray(val)) {
|
|
147
|
+
const inner =
|
|
148
|
+
typeof typeHint === 'string' && typeHint.endsWith('[]')
|
|
149
|
+
? typeHint.slice(0, -2)
|
|
150
|
+
: undefined;
|
|
151
|
+
|
|
152
|
+
return sql`${sql.identifier(key)} := ${buildListLiteral(val, inner)}`;
|
|
153
|
+
}
|
|
154
|
+
return sql`${sql.identifier(key)} := ${val}`;
|
|
155
|
+
});
|
|
156
|
+
return sql`struct_pack(${sql.join(parts, sql.raw(', '))})`;
|
|
157
|
+
}
|
|
158
|
+
|
|
159
|
+
export function buildMapLiteral(
|
|
160
|
+
value: Record<string, unknown>,
|
|
161
|
+
valueType?: string
|
|
162
|
+
): SQL {
|
|
163
|
+
const keys = Object.keys(value);
|
|
164
|
+
const vals = Object.values(value);
|
|
165
|
+
const keyList = buildListLiteral(keys, 'TEXT');
|
|
166
|
+
const valList = buildListLiteral(
|
|
167
|
+
vals,
|
|
168
|
+
valueType?.endsWith('[]') ? valueType.slice(0, -2) : valueType
|
|
169
|
+
);
|
|
170
|
+
return sql`map(${keyList}, ${valList})`;
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
export const duckDbList = <TData = unknown>(
|
|
174
|
+
name: string,
|
|
175
|
+
elementType: AnyColType
|
|
176
|
+
) =>
|
|
177
|
+
customType<{
|
|
178
|
+
data: TData[];
|
|
179
|
+
driverData: ListValueWrapper | unknown[] | string;
|
|
180
|
+
}>({
|
|
181
|
+
dataType() {
|
|
182
|
+
return `${elementType}[]`;
|
|
183
|
+
},
|
|
184
|
+
toDriver(value: TData[]): ListValueWrapper {
|
|
185
|
+
return wrapList(value, elementType);
|
|
186
|
+
},
|
|
187
|
+
fromDriver(value: unknown[] | string | ListValueWrapper): TData[] {
|
|
188
|
+
if (Array.isArray(value)) {
|
|
189
|
+
return value as TData[];
|
|
190
|
+
}
|
|
191
|
+
if (typeof value === 'string') {
|
|
192
|
+
const parsed = coerceArrayString(value);
|
|
193
|
+
if (parsed !== undefined) {
|
|
194
|
+
return parsed as TData[];
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
return value as unknown as TData[];
|
|
198
|
+
},
|
|
199
|
+
})(name);
|
|
200
|
+
|
|
201
|
+
export const duckDbArray = <TData = unknown>(
|
|
202
|
+
name: string,
|
|
203
|
+
elementType: AnyColType,
|
|
204
|
+
fixedLength?: number
|
|
205
|
+
) =>
|
|
206
|
+
customType<{
|
|
207
|
+
data: TData[];
|
|
208
|
+
driverData: ArrayValueWrapper | unknown[] | string;
|
|
209
|
+
}>({
|
|
210
|
+
dataType() {
|
|
211
|
+
return fixedLength
|
|
212
|
+
? `${elementType}[${fixedLength}]`
|
|
213
|
+
: `${elementType}[]`;
|
|
214
|
+
},
|
|
215
|
+
toDriver(value: TData[]): ArrayValueWrapper {
|
|
216
|
+
return wrapArray(value, elementType, fixedLength);
|
|
217
|
+
},
|
|
218
|
+
fromDriver(value: unknown[] | string | ArrayValueWrapper): TData[] {
|
|
219
|
+
if (Array.isArray(value)) {
|
|
220
|
+
return value as TData[];
|
|
221
|
+
}
|
|
222
|
+
if (typeof value === 'string') {
|
|
223
|
+
const parsed = coerceArrayString(value);
|
|
224
|
+
if (parsed !== undefined) {
|
|
225
|
+
return parsed as TData[];
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
return value as unknown as TData[];
|
|
229
|
+
},
|
|
230
|
+
})(name);
|
|
231
|
+
|
|
232
|
+
export const duckDbMap = <TData extends Record<string, any>>(
|
|
233
|
+
name: string,
|
|
234
|
+
valueType: AnyColType | ListColType | ArrayColType
|
|
235
|
+
) =>
|
|
236
|
+
customType<{ data: TData; driverData: MapValueWrapper | TData }>({
|
|
237
|
+
dataType() {
|
|
238
|
+
return `MAP (STRING, ${valueType})`;
|
|
239
|
+
},
|
|
240
|
+
toDriver(value: TData) {
|
|
241
|
+
// Use SQL literals for empty maps due to DuckDB type inference issues
|
|
242
|
+
// with mapValue() when there are no entries to infer types from
|
|
243
|
+
if (Object.keys(value).length === 0) {
|
|
244
|
+
return buildMapLiteral(value, valueType);
|
|
245
|
+
}
|
|
246
|
+
return wrapMap(value, valueType);
|
|
247
|
+
},
|
|
248
|
+
fromDriver(value: TData | MapValueWrapper): TData {
|
|
249
|
+
return value as TData;
|
|
250
|
+
},
|
|
251
|
+
})(name);
|
|
252
|
+
|
|
253
|
+
export const duckDbStruct = <TData extends Record<string, any>>(
|
|
254
|
+
name: string,
|
|
255
|
+
schema: Record<string, Primitive>
|
|
256
|
+
) =>
|
|
257
|
+
customType<{ data: TData; driverData: TData }>({
|
|
258
|
+
dataType() {
|
|
259
|
+
const fields = Object.entries(schema).map(
|
|
260
|
+
([key, type]) => `${key} ${type}`
|
|
261
|
+
);
|
|
262
|
+
|
|
263
|
+
return `STRUCT (${fields.join(', ')})`;
|
|
264
|
+
},
|
|
265
|
+
toDriver(value: TData) {
|
|
266
|
+
// Use SQL literals for structs due to DuckDB type inference issues
|
|
267
|
+
// with nested empty lists
|
|
268
|
+
return buildStructLiteral(value, schema);
|
|
269
|
+
},
|
|
270
|
+
fromDriver(value: TData | string): TData {
|
|
271
|
+
if (typeof value === 'string') {
|
|
272
|
+
try {
|
|
273
|
+
return JSON.parse(value) as TData;
|
|
274
|
+
} catch {
|
|
275
|
+
return value as unknown as TData;
|
|
276
|
+
}
|
|
277
|
+
}
|
|
278
|
+
return value;
|
|
279
|
+
},
|
|
280
|
+
})(name);
|
|
281
|
+
|
|
282
|
+
/**
|
|
283
|
+
* JSON column type that wraps values and delays JSON.stringify() to binding time.
|
|
284
|
+
* This ensures consistent handling with other wrapped types.
|
|
285
|
+
*
|
|
286
|
+
* Note: DuckDB stores JSON as VARCHAR internally, so the final binding
|
|
287
|
+
* is always a stringified JSON value.
|
|
288
|
+
*/
|
|
289
|
+
export const duckDbJson = <TData = unknown>(name: string) =>
|
|
290
|
+
customType<{ data: TData; driverData: JsonValueWrapper | SQL | string }>({
|
|
291
|
+
dataType() {
|
|
292
|
+
return 'JSON';
|
|
293
|
+
},
|
|
294
|
+
toDriver(value: TData): JsonValueWrapper | SQL | string {
|
|
295
|
+
// Pass through strings directly
|
|
296
|
+
if (typeof value === 'string') {
|
|
297
|
+
return value;
|
|
298
|
+
}
|
|
299
|
+
// Pass through SQL objects (for raw SQL expressions)
|
|
300
|
+
if (
|
|
301
|
+
value !== null &&
|
|
302
|
+
typeof value === 'object' &&
|
|
303
|
+
'queryChunks' in (value as Record<string, unknown>)
|
|
304
|
+
) {
|
|
305
|
+
return value as unknown as SQL;
|
|
306
|
+
}
|
|
307
|
+
// Wrap non-string values for delayed stringify at binding time
|
|
308
|
+
return wrapJson(value);
|
|
309
|
+
},
|
|
310
|
+
fromDriver(value: SQL | string | JsonValueWrapper) {
|
|
311
|
+
if (typeof value !== 'string') {
|
|
312
|
+
return value as unknown as TData;
|
|
313
|
+
}
|
|
314
|
+
const trimmed = value.trim();
|
|
315
|
+
if (!trimmed) {
|
|
316
|
+
return value as unknown as TData;
|
|
317
|
+
}
|
|
318
|
+
try {
|
|
319
|
+
return JSON.parse(trimmed) as TData;
|
|
320
|
+
} catch {
|
|
321
|
+
return value as unknown as TData;
|
|
322
|
+
}
|
|
323
|
+
},
|
|
324
|
+
})(name);
|
|
325
|
+
|
|
326
|
+
export const duckDbBlob = customType<{
|
|
327
|
+
data: Buffer;
|
|
328
|
+
driverData: BlobValueWrapper;
|
|
329
|
+
default: false;
|
|
330
|
+
}>({
|
|
331
|
+
dataType() {
|
|
332
|
+
return 'BLOB';
|
|
333
|
+
},
|
|
334
|
+
toDriver(value: Buffer): BlobValueWrapper {
|
|
335
|
+
return wrapBlob(value);
|
|
336
|
+
},
|
|
337
|
+
});
|
|
338
|
+
|
|
339
|
+
export const duckDbInet = (name: string) =>
|
|
340
|
+
customType<{ data: string; driverData: string }>({
|
|
341
|
+
dataType() {
|
|
342
|
+
return 'INET';
|
|
343
|
+
},
|
|
344
|
+
toDriver(value: string) {
|
|
345
|
+
return value;
|
|
346
|
+
},
|
|
347
|
+
})(name);
|
|
348
|
+
|
|
349
|
+
export const duckDbInterval = (name: string) =>
|
|
350
|
+
customType<{ data: string; driverData: string }>({
|
|
351
|
+
dataType() {
|
|
352
|
+
return 'INTERVAL';
|
|
353
|
+
},
|
|
354
|
+
toDriver(value: string) {
|
|
355
|
+
return value;
|
|
356
|
+
},
|
|
357
|
+
})(name);
|
|
358
|
+
|
|
359
|
+
type TimestampMode = 'date' | 'string';
|
|
360
|
+
|
|
361
|
+
interface TimestampOptions {
|
|
362
|
+
withTimezone?: boolean;
|
|
363
|
+
mode?: TimestampMode;
|
|
364
|
+
precision?: number;
|
|
365
|
+
bindMode?: 'auto' | 'bind' | 'literal';
|
|
366
|
+
}
|
|
367
|
+
|
|
368
|
+
function shouldBindTimestamp(options: TimestampOptions): boolean {
|
|
369
|
+
const bindMode = options.bindMode ?? 'auto';
|
|
370
|
+
if (bindMode === 'bind') return true;
|
|
371
|
+
if (bindMode === 'literal') return false;
|
|
372
|
+
|
|
373
|
+
const isBun =
|
|
374
|
+
typeof process !== 'undefined' &&
|
|
375
|
+
typeof process.versions?.bun !== 'undefined';
|
|
376
|
+
if (isBun) return false;
|
|
377
|
+
|
|
378
|
+
const forceLiteral =
|
|
379
|
+
typeof process !== 'undefined'
|
|
380
|
+
? process.env.DRIZZLE_DUCKDB_FORCE_LITERAL_TIMESTAMPS
|
|
381
|
+
: undefined;
|
|
382
|
+
|
|
383
|
+
if (forceLiteral && forceLiteral !== '0') {
|
|
384
|
+
return false;
|
|
385
|
+
}
|
|
386
|
+
|
|
387
|
+
return true;
|
|
388
|
+
}
|
|
389
|
+
|
|
390
|
+
export const duckDbTimestamp = (name: string, options: TimestampOptions = {}) =>
|
|
391
|
+
customType<{
|
|
392
|
+
data: Date | string;
|
|
393
|
+
driverData: SQL | string | Date | TimestampValueWrapper;
|
|
394
|
+
}>({
|
|
395
|
+
dataType() {
|
|
396
|
+
if (options.withTimezone) {
|
|
397
|
+
return 'TIMESTAMPTZ';
|
|
398
|
+
}
|
|
399
|
+
const precision = options.precision ? `(${options.precision})` : '';
|
|
400
|
+
return `TIMESTAMP${precision}`;
|
|
401
|
+
},
|
|
402
|
+
toDriver(
|
|
403
|
+
value: Date | string
|
|
404
|
+
): SQL | string | Date | TimestampValueWrapper {
|
|
405
|
+
if (shouldBindTimestamp(options)) {
|
|
406
|
+
return wrapTimestamp(
|
|
407
|
+
value,
|
|
408
|
+
options.withTimezone ?? false,
|
|
409
|
+
options.precision
|
|
410
|
+
);
|
|
411
|
+
}
|
|
412
|
+
|
|
413
|
+
const iso = value instanceof Date ? value.toISOString() : value;
|
|
414
|
+
const normalized = iso.replace('T', ' ').replace('Z', '+00');
|
|
415
|
+
const typeKeyword = options.withTimezone ? 'TIMESTAMPTZ' : 'TIMESTAMP';
|
|
416
|
+
return sql.raw(`${typeKeyword} '${normalized}'`);
|
|
417
|
+
},
|
|
418
|
+
fromDriver(value: Date | string | SQL | TimestampValueWrapper) {
|
|
419
|
+
if (
|
|
420
|
+
value &&
|
|
421
|
+
typeof value === 'object' &&
|
|
422
|
+
'kind' in value &&
|
|
423
|
+
(value as TimestampValueWrapper).kind === 'timestamp'
|
|
424
|
+
) {
|
|
425
|
+
const wrapped = value as TimestampValueWrapper;
|
|
426
|
+
return wrapped.data instanceof Date
|
|
427
|
+
? wrapped.data
|
|
428
|
+
: typeof wrapped.data === 'number' || typeof wrapped.data === 'bigint'
|
|
429
|
+
? new Date(Number(wrapped.data) / 1000)
|
|
430
|
+
: wrapped.data;
|
|
431
|
+
}
|
|
432
|
+
if (options.mode === 'string') {
|
|
433
|
+
if (value instanceof Date) {
|
|
434
|
+
return value.toISOString().replace('T', ' ').replace('Z', '+00');
|
|
435
|
+
}
|
|
436
|
+
return typeof value === 'string' ? value : value.toString();
|
|
437
|
+
}
|
|
438
|
+
if (value instanceof Date) {
|
|
439
|
+
return value;
|
|
440
|
+
}
|
|
441
|
+
const stringValue = typeof value === 'string' ? value : value.toString();
|
|
442
|
+
const hasOffset =
|
|
443
|
+
stringValue.endsWith('Z') || /[+-]\d{2}:?\d{2}$/.test(stringValue);
|
|
444
|
+
const normalized = hasOffset
|
|
445
|
+
? stringValue.replace(' ', 'T')
|
|
446
|
+
: `${stringValue.replace(' ', 'T')}Z`;
|
|
447
|
+
return new Date(normalized);
|
|
448
|
+
},
|
|
449
|
+
})(name);
|
|
450
|
+
|
|
451
|
+
export const duckDbDate = (name: string) =>
|
|
452
|
+
customType<{ data: string | Date; driverData: string | Date }>({
|
|
453
|
+
dataType() {
|
|
454
|
+
return 'DATE';
|
|
455
|
+
},
|
|
456
|
+
toDriver(value: string | Date) {
|
|
457
|
+
return value;
|
|
458
|
+
},
|
|
459
|
+
fromDriver(value: string | Date) {
|
|
460
|
+
const str =
|
|
461
|
+
value instanceof Date ? value.toISOString().slice(0, 10) : value;
|
|
462
|
+
return str;
|
|
463
|
+
},
|
|
464
|
+
})(name);
|
|
465
|
+
|
|
466
|
+
export const duckDbTime = (name: string) =>
|
|
467
|
+
customType<{ data: string; driverData: string | bigint }>({
|
|
468
|
+
dataType() {
|
|
469
|
+
return 'TIME';
|
|
470
|
+
},
|
|
471
|
+
toDriver(value: string) {
|
|
472
|
+
return value;
|
|
473
|
+
},
|
|
474
|
+
fromDriver(value: string | bigint) {
|
|
475
|
+
if (typeof value === 'bigint') {
|
|
476
|
+
const totalMillis = Number(value) / 1000;
|
|
477
|
+
const date = new Date(totalMillis);
|
|
478
|
+
return date.toISOString().split('T')[1]!.replace('Z', '');
|
|
479
|
+
}
|
|
480
|
+
return value;
|
|
481
|
+
},
|
|
482
|
+
})(name);
|
|
483
|
+
|
|
484
|
+
function toListValue(values: (unknown | SQLWrapper)[]): SQL {
|
|
485
|
+
return buildListLiteral(values);
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
export function duckDbArrayContains(
|
|
489
|
+
column: SQLWrapper,
|
|
490
|
+
values: unknown[] | SQLWrapper
|
|
491
|
+
): SQL {
|
|
492
|
+
const rhs = Array.isArray(values) ? toListValue(values) : values;
|
|
493
|
+
return sql`array_has_all(${column}, ${rhs})`;
|
|
494
|
+
}
|
|
495
|
+
|
|
496
|
+
export function duckDbArrayContained(
|
|
497
|
+
column: SQLWrapper,
|
|
498
|
+
values: unknown[] | SQLWrapper
|
|
499
|
+
): SQL {
|
|
500
|
+
const rhs = Array.isArray(values) ? toListValue(values) : values;
|
|
501
|
+
return sql`array_has_all(${rhs}, ${column})`;
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
export function duckDbArrayOverlaps(
|
|
505
|
+
column: SQLWrapper,
|
|
506
|
+
values: unknown[] | SQLWrapper
|
|
507
|
+
): SQL {
|
|
508
|
+
const rhs = Array.isArray(values) ? toListValue(values) : values;
|
|
509
|
+
return sql`array_has_any(${column}, ${rhs})`;
|
|
510
|
+
}
|
package/src/dialect.ts
CHANGED
|
@@ -13,41 +13,118 @@ import {
|
|
|
13
13
|
PgTimestampString,
|
|
14
14
|
PgUUID,
|
|
15
15
|
} from 'drizzle-orm/pg-core';
|
|
16
|
-
import { DuckDBSession } from './session';
|
|
17
16
|
import {
|
|
18
17
|
sql,
|
|
18
|
+
SQL,
|
|
19
19
|
type DriverValueEncoder,
|
|
20
20
|
type QueryTypingsValue,
|
|
21
21
|
} from 'drizzle-orm';
|
|
22
|
+
import type { QueryWithTypings } from 'drizzle-orm/sql/sql';
|
|
23
|
+
|
|
24
|
+
import { transformSQL } from './sql/ast-transformer.ts';
|
|
25
|
+
|
|
26
|
+
const enum SavepointSupport {
|
|
27
|
+
Unknown = 0,
|
|
28
|
+
Yes = 1,
|
|
29
|
+
No = 2,
|
|
30
|
+
}
|
|
22
31
|
|
|
23
32
|
export class DuckDBDialect extends PgDialect {
|
|
24
33
|
static readonly [entityKind]: string = 'DuckDBPgDialect';
|
|
34
|
+
// Track if PG JSON columns were detected during the current query preparation.
|
|
35
|
+
// Reset before each query via DuckDBSession to keep detection per-query.
|
|
36
|
+
private hasPgJsonColumn = false;
|
|
37
|
+
// Track savepoint support per-dialect instance to avoid cross-contamination
|
|
38
|
+
// when multiple database connections with different capabilities exist.
|
|
39
|
+
private savepointsSupported: SavepointSupport = SavepointSupport.Unknown;
|
|
40
|
+
|
|
41
|
+
/**
|
|
42
|
+
* Reset the PG JSON detection flag. Should be called before preparing a new query.
|
|
43
|
+
*/
|
|
44
|
+
resetPgJsonFlag(): void {
|
|
45
|
+
this.hasPgJsonColumn = false;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
/**
|
|
49
|
+
* Mark that a PG JSON/JSONB column was detected during query preparation.
|
|
50
|
+
*/
|
|
51
|
+
markPgJsonDetected(): void {
|
|
52
|
+
this.hasPgJsonColumn = true;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
assertNoPgJsonColumns(): void {
|
|
56
|
+
if (this.hasPgJsonColumn) {
|
|
57
|
+
throw new Error(
|
|
58
|
+
"Pg JSON/JSONB columns are not supported in DuckDB. Replace them with duckDbJson() to use DuckDB's native JSON type."
|
|
59
|
+
);
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Check if savepoints are known to be unsupported for this dialect instance.
|
|
65
|
+
*/
|
|
66
|
+
areSavepointsUnsupported(): boolean {
|
|
67
|
+
return this.savepointsSupported === SavepointSupport.No;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* Mark that savepoints are supported for this dialect instance.
|
|
72
|
+
*/
|
|
73
|
+
markSavepointsSupported(): void {
|
|
74
|
+
this.savepointsSupported = SavepointSupport.Yes;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
/**
|
|
78
|
+
* Mark that savepoints are not supported for this dialect instance.
|
|
79
|
+
*/
|
|
80
|
+
markSavepointsUnsupported(): void {
|
|
81
|
+
this.savepointsSupported = SavepointSupport.No;
|
|
82
|
+
}
|
|
25
83
|
|
|
26
84
|
override async migrate(
|
|
27
85
|
migrations: MigrationMeta[],
|
|
28
86
|
session: PgSession,
|
|
29
|
-
config: MigrationConfig
|
|
87
|
+
config: MigrationConfig | string
|
|
30
88
|
): Promise<void> {
|
|
31
|
-
const
|
|
89
|
+
const migrationConfig: MigrationConfig =
|
|
90
|
+
typeof config === 'string' ? { migrationsFolder: config } : config;
|
|
32
91
|
|
|
33
|
-
const
|
|
92
|
+
const migrationsSchema = migrationConfig.migrationsSchema ?? 'drizzle';
|
|
93
|
+
const migrationsTable =
|
|
94
|
+
migrationConfig.migrationsTable ?? '__drizzle_migrations';
|
|
95
|
+
const migrationsSequence = `${migrationsTable}_id_seq`;
|
|
96
|
+
const legacySequence = 'migrations_pk_seq';
|
|
97
|
+
|
|
98
|
+
const escapeIdentifier = (value: string) => value.replace(/"/g, '""');
|
|
99
|
+
const sequenceLiteral = `"${escapeIdentifier(
|
|
100
|
+
migrationsSchema
|
|
101
|
+
)}"."${escapeIdentifier(migrationsSequence)}"`;
|
|
34
102
|
|
|
35
103
|
const migrationTableCreate = sql`
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
104
|
+
CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsSchema)}.${sql.identifier(
|
|
105
|
+
migrationsTable
|
|
106
|
+
)} (
|
|
107
|
+
id integer PRIMARY KEY default nextval('${sql.raw(sequenceLiteral)}'),
|
|
108
|
+
hash text NOT NULL,
|
|
109
|
+
created_at bigint
|
|
110
|
+
)
|
|
111
|
+
`;
|
|
44
112
|
|
|
45
113
|
await session.execute(
|
|
46
|
-
sql
|
|
114
|
+
sql`CREATE SCHEMA IF NOT EXISTS ${sql.identifier(migrationsSchema)}`
|
|
47
115
|
);
|
|
48
116
|
await session.execute(
|
|
49
|
-
sql`CREATE
|
|
117
|
+
sql`CREATE SEQUENCE IF NOT EXISTS ${sql.identifier(
|
|
118
|
+
migrationsSchema
|
|
119
|
+
)}.${sql.identifier(migrationsSequence)}`
|
|
50
120
|
);
|
|
121
|
+
if (legacySequence !== migrationsSequence) {
|
|
122
|
+
await session.execute(
|
|
123
|
+
sql`CREATE SEQUENCE IF NOT EXISTS ${sql.identifier(
|
|
124
|
+
migrationsSchema
|
|
125
|
+
)}.${sql.identifier(legacySequence)}`
|
|
126
|
+
);
|
|
127
|
+
}
|
|
51
128
|
await session.execute(migrationTableCreate);
|
|
52
129
|
|
|
53
130
|
const dbMigrations = await session.all<{
|
|
@@ -90,7 +167,10 @@ export class DuckDBDialect extends PgDialect {
|
|
|
90
167
|
encoder: DriverValueEncoder<unknown, unknown>
|
|
91
168
|
): QueryTypingsValue {
|
|
92
169
|
if (is(encoder, PgJsonb) || is(encoder, PgJson)) {
|
|
93
|
-
|
|
170
|
+
this.markPgJsonDetected();
|
|
171
|
+
throw new Error(
|
|
172
|
+
"Pg JSON/JSONB columns are not supported in DuckDB. Replace them with duckDbJson() to use DuckDB's native JSON type."
|
|
173
|
+
);
|
|
94
174
|
} else if (is(encoder, PgNumeric)) {
|
|
95
175
|
return 'decimal';
|
|
96
176
|
} else if (is(encoder, PgTime)) {
|
|
@@ -105,4 +185,20 @@ export class DuckDBDialect extends PgDialect {
|
|
|
105
185
|
return 'none';
|
|
106
186
|
}
|
|
107
187
|
}
|
|
188
|
+
|
|
189
|
+
override sqlToQuery(
|
|
190
|
+
sqlObj: SQL,
|
|
191
|
+
invokeSource?: 'indexes' | undefined
|
|
192
|
+
): QueryWithTypings {
|
|
193
|
+
// First, let the parent generate the SQL string
|
|
194
|
+
const result = super.sqlToQuery(sqlObj, invokeSource);
|
|
195
|
+
|
|
196
|
+
// Apply AST-based transformations for DuckDB compatibility
|
|
197
|
+
const transformed = transformSQL(result.sql);
|
|
198
|
+
|
|
199
|
+
return {
|
|
200
|
+
...result,
|
|
201
|
+
sql: transformed.sql,
|
|
202
|
+
};
|
|
203
|
+
}
|
|
108
204
|
}
|