@leonardovida-md/drizzle-neo-duckdb 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/columns.ts ADDED
@@ -0,0 +1,429 @@
1
+ import { sql, type SQL } from 'drizzle-orm';
2
+ import type { SQLWrapper } from 'drizzle-orm/sql/sql';
3
+ import { customType } from 'drizzle-orm/pg-core';
4
+
5
+ type IntColType =
6
+ | 'SMALLINT'
7
+ | 'INTEGER'
8
+ | 'BIGINT'
9
+ | 'HUGEINT'
10
+ | 'USMALLINT'
11
+ | 'UINTEGER'
12
+ | 'UBIGINT'
13
+ | 'UHUGEINT'
14
+ | 'INT'
15
+ | 'INT16'
16
+ | 'INT32'
17
+ | 'INT64'
18
+ | 'INT128'
19
+ | 'LONG'
20
+ | 'VARINT';
21
+
22
+ type FloatColType = 'FLOAT' | 'DOUBLE';
23
+
24
+ type StringColType = 'STRING' | 'VARCHAR' | 'TEXT';
25
+
26
+ type BoolColType = 'BOOLEAN' | 'BOOL';
27
+
28
+ type BlobColType = 'BLOB' | 'BYTEA' | 'VARBINARY';
29
+
30
+ type DateColType =
31
+ | 'DATE'
32
+ | 'TIME'
33
+ | 'TIMETZ'
34
+ | 'TIMESTAMP'
35
+ | 'DATETIME'
36
+ | 'TIMESTAMPTZ'
37
+ | 'TIMESTAMP_MS'
38
+ | 'TIMESTAMP_S';
39
+
40
+ type AnyColType =
41
+ | IntColType
42
+ | FloatColType
43
+ | StringColType
44
+ | BoolColType
45
+ | DateColType
46
+ | BlobColType;
47
+
48
+ type ListColType = `${AnyColType}[]`;
49
+ type ArrayColType = `${AnyColType}[${number}]`;
50
+ type StructColType = `STRUCT (${string})`;
51
+
52
+ type Primitive = AnyColType | ListColType | ArrayColType | StructColType;
53
+
54
+ function coerceArrayString(value: string): unknown[] | undefined {
55
+ const trimmed = value.trim();
56
+ if (!trimmed) {
57
+ return [];
58
+ }
59
+ if (trimmed.startsWith('[')) {
60
+ try {
61
+ return JSON.parse(trimmed) as unknown[];
62
+ } catch {
63
+ return undefined;
64
+ }
65
+ }
66
+ if (trimmed.startsWith('{') && trimmed.endsWith('}')) {
67
+ try {
68
+ const json = trimmed.replace(/{/g, '[').replace(/}/g, ']');
69
+ return JSON.parse(json) as unknown[];
70
+ } catch {
71
+ return undefined;
72
+ }
73
+ }
74
+ return undefined;
75
+ }
76
+
77
+ function formatLiteral(value: unknown, typeHint?: string): string {
78
+ if (value === null || value === undefined) {
79
+ return 'NULL';
80
+ }
81
+
82
+ const upperType = typeHint?.toUpperCase() ?? '';
83
+ if (value instanceof Date) {
84
+ return `'${value.toISOString()}'`;
85
+ }
86
+
87
+ if (typeof value === 'number' || typeof value === 'bigint') {
88
+ return value.toString();
89
+ }
90
+
91
+ if (typeof value === 'boolean') {
92
+ return value ? 'TRUE' : 'FALSE';
93
+ }
94
+
95
+ const str =
96
+ typeof value === 'string' ? value : JSON.stringify(value) ?? String(value);
97
+
98
+ const escaped = str.replace(/'/g, "''");
99
+ // Simple quoting based on hint.
100
+ if (
101
+ upperType.includes('CHAR') ||
102
+ upperType.includes('TEXT') ||
103
+ upperType.includes('STRING') ||
104
+ upperType.includes('VARCHAR')
105
+ ) {
106
+ return `'${escaped}'`;
107
+ }
108
+
109
+ return `'${escaped}'`;
110
+ }
111
+
112
+ function buildListLiteral(values: unknown[], elementType?: string): SQL {
113
+ if (values.length === 0) {
114
+ return sql`[]`;
115
+ }
116
+ const chunks = values.map((v) =>
117
+ typeof v === 'object' && !Array.isArray(v)
118
+ ? sql`${v as SQLWrapper}`
119
+ : sql.raw(formatLiteral(v, elementType))
120
+ );
121
+ return sql`list_value(${sql.join(chunks, sql.raw(', '))})`;
122
+ }
123
+
124
+ function buildStructLiteral(
125
+ value: Record<string, unknown>,
126
+ schema?: Record<string, Primitive>
127
+ ): SQL {
128
+ const parts = Object.entries(value).map(([key, val]) => {
129
+ const typeHint = schema?.[key];
130
+ if (Array.isArray(val)) {
131
+ const inner =
132
+ typeof typeHint === 'string' && typeHint.endsWith('[]')
133
+ ? typeHint.slice(0, -2)
134
+ : undefined;
135
+
136
+ return sql`${sql.identifier(key)} := ${buildListLiteral(val, inner)}`;
137
+ }
138
+ return sql`${sql.identifier(key)} := ${val}`;
139
+ });
140
+ return sql`struct_pack(${sql.join(parts, sql.raw(', '))})`;
141
+ }
142
+
143
+ function buildMapLiteral(value: Record<string, unknown>, valueType?: string): SQL {
144
+ const keys = Object.keys(value);
145
+ const vals = Object.values(value);
146
+ const keyList = buildListLiteral(keys, 'TEXT');
147
+ const valList = buildListLiteral(
148
+ vals,
149
+ valueType?.endsWith('[]') ? valueType.slice(0, -2) : valueType
150
+ );
151
+ return sql`map(${keyList}, ${valList})`;
152
+ }
153
+
154
+ export const duckDbList = <TData = unknown>(
155
+ name: string,
156
+ elementType: AnyColType
157
+ ) =>
158
+ customType<{ data: TData[]; driverData: SQL | unknown[] | string }>({
159
+ dataType() {
160
+ return `${elementType}[]`;
161
+ },
162
+ toDriver(value: TData[]) {
163
+ return buildListLiteral(value, elementType);
164
+ },
165
+ fromDriver(value: unknown[] | string | SQL): TData[] {
166
+ if (Array.isArray(value)) {
167
+ return value as TData[];
168
+ }
169
+ if (typeof value === 'string') {
170
+ const parsed = coerceArrayString(value);
171
+ if (parsed) {
172
+ return parsed as TData[];
173
+ }
174
+ }
175
+ return [] as TData[];
176
+ },
177
+ })(name);
178
+
179
+ export const duckDbArray = <TData = unknown>(
180
+ name: string,
181
+ elementType: AnyColType,
182
+ fixedLength?: number
183
+ ) =>
184
+ customType<{ data: TData[]; driverData: SQL | unknown[] | string }>({
185
+ dataType() {
186
+ return fixedLength
187
+ ? `${elementType}[${fixedLength}]`
188
+ : `${elementType}[]`;
189
+ },
190
+ toDriver(value: TData[]) {
191
+ return buildListLiteral(value, elementType);
192
+ },
193
+ fromDriver(value: unknown[] | string | SQL): TData[] {
194
+ if (Array.isArray(value)) {
195
+ return value as TData[];
196
+ }
197
+ if (typeof value === 'string') {
198
+ const parsed = coerceArrayString(value);
199
+ if (parsed) {
200
+ return parsed as TData[];
201
+ }
202
+ }
203
+ return [] as TData[];
204
+ },
205
+ })(name);
206
+
207
+ export const duckDbMap = <TData extends Record<string, any>>(
208
+ name: string,
209
+ valueType: AnyColType | ListColType | ArrayColType
210
+ ) =>
211
+ customType<{ data: TData; driverData: TData }>({
212
+ dataType() {
213
+ return `MAP (STRING, ${valueType})`;
214
+ },
215
+ toDriver(value: TData) {
216
+ return buildMapLiteral(value, valueType);
217
+ },
218
+ fromDriver(value: TData): TData {
219
+ return value;
220
+ },
221
+ })(name);
222
+
223
+ export const duckDbStruct = <TData extends Record<string, any>>(
224
+ name: string,
225
+ schema: Record<string, Primitive>
226
+ ) =>
227
+ customType<{ data: TData; driverData: TData }>({
228
+ dataType() {
229
+ const fields = Object.entries(schema).map(
230
+ ([key, type]) => `${key} ${type}`
231
+ );
232
+
233
+ return `STRUCT (${fields.join(', ')})`;
234
+ },
235
+ toDriver(value: TData) {
236
+ return buildStructLiteral(value, schema);
237
+ },
238
+ fromDriver(value: TData | string): TData {
239
+ if (typeof value === 'string') {
240
+ try {
241
+ return JSON.parse(value) as TData;
242
+ } catch {
243
+ return value as unknown as TData;
244
+ }
245
+ }
246
+ return value;
247
+ },
248
+ })(name);
249
+
250
+ export const duckDbJson = <TData = unknown>(name: string) =>
251
+ customType<{ data: TData; driverData: SQL | string }>({
252
+ dataType() {
253
+ return 'JSON';
254
+ },
255
+ toDriver(value: TData) {
256
+ if (typeof value === 'string') {
257
+ return value;
258
+ }
259
+ if (
260
+ value !== null &&
261
+ typeof value === 'object' &&
262
+ 'queryChunks' in (value as Record<string, unknown>)
263
+ ) {
264
+ return value as unknown as SQL;
265
+ }
266
+ return JSON.stringify(value ?? null);
267
+ },
268
+ fromDriver(value: SQL | string) {
269
+ if (typeof value !== 'string') {
270
+ return value as unknown as TData;
271
+ }
272
+ const trimmed = value.trim();
273
+ if (!trimmed) {
274
+ return value as unknown as TData;
275
+ }
276
+ try {
277
+ return JSON.parse(trimmed) as TData;
278
+ } catch {
279
+ return value as unknown as TData;
280
+ }
281
+ },
282
+ })(name);
283
+
284
+ export const duckDbBlob = customType<{
285
+ data: Buffer;
286
+ default: false;
287
+ }>({
288
+ dataType() {
289
+ return 'BLOB';
290
+ },
291
+ toDriver(value: Buffer) {
292
+ const hexString = value.toString('hex');
293
+ return sql`from_hex(${hexString})`;
294
+ },
295
+ });
296
+
297
+ export const duckDbInet = (name: string) =>
298
+ customType<{ data: string; driverData: string }>({
299
+ dataType() {
300
+ return 'INET';
301
+ },
302
+ toDriver(value: string) {
303
+ return value;
304
+ },
305
+ })(name);
306
+
307
+ export const duckDbInterval = (name: string) =>
308
+ customType<{ data: string; driverData: string }>({
309
+ dataType() {
310
+ return 'INTERVAL';
311
+ },
312
+ toDriver(value: string) {
313
+ return value;
314
+ },
315
+ })(name);
316
+
317
+ type TimestampMode = 'date' | 'string';
318
+
319
+ interface TimestampOptions {
320
+ withTimezone?: boolean;
321
+ mode?: TimestampMode;
322
+ precision?: number;
323
+ }
324
+
325
+ export const duckDbTimestamp = (
326
+ name: string,
327
+ options: TimestampOptions = {}
328
+ ) =>
329
+ customType<{
330
+ data: Date | string;
331
+ driverData: SQL | string | Date;
332
+ }>({
333
+ dataType() {
334
+ if (options.withTimezone) {
335
+ return 'TIMESTAMPTZ';
336
+ }
337
+ const precision = options.precision ? `(${options.precision})` : '';
338
+ return `TIMESTAMP${precision}`;
339
+ },
340
+ toDriver(value: Date | string) {
341
+ const iso = value instanceof Date ? value.toISOString() : value;
342
+ const normalized = iso.replace('T', ' ').replace('Z', '+00');
343
+ const typeKeyword = options.withTimezone ? 'TIMESTAMPTZ' : 'TIMESTAMP';
344
+ return sql.raw(`${typeKeyword} '${normalized}'`);
345
+ },
346
+ fromDriver(value: Date | string | SQL) {
347
+ if (options.mode === 'string') {
348
+ if (value instanceof Date) {
349
+ return value.toISOString().replace('T', ' ').replace('Z', '+00');
350
+ }
351
+ return typeof value === 'string' ? value : value.toString();
352
+ }
353
+ if (value instanceof Date) {
354
+ return value;
355
+ }
356
+ const stringValue =
357
+ typeof value === 'string' ? value : value.toString();
358
+ const hasOffset =
359
+ stringValue.endsWith('Z') ||
360
+ /[+-]\d{2}:?\d{2}$/.test(stringValue);
361
+ const normalized = hasOffset
362
+ ? stringValue.replace(' ', 'T')
363
+ : `${stringValue.replace(' ', 'T')}Z`;
364
+ return new Date(normalized);
365
+ },
366
+ })(name);
367
+
368
+ export const duckDbDate = (name: string) =>
369
+ customType<{ data: string | Date; driverData: string | Date }>({
370
+ dataType() {
371
+ return 'DATE';
372
+ },
373
+ toDriver(value: string | Date) {
374
+ return value;
375
+ },
376
+ fromDriver(value: string | Date) {
377
+ const str =
378
+ value instanceof Date
379
+ ? value.toISOString().slice(0, 10)
380
+ : value;
381
+ return str;
382
+ },
383
+ })(name);
384
+
385
+ export const duckDbTime = (name: string) =>
386
+ customType<{ data: string; driverData: string | bigint }>({
387
+ dataType() {
388
+ return 'TIME';
389
+ },
390
+ toDriver(value: string) {
391
+ return value;
392
+ },
393
+ fromDriver(value: string | bigint) {
394
+ if (typeof value === 'bigint') {
395
+ const totalMillis = Number(value) / 1000;
396
+ const date = new Date(totalMillis);
397
+ return date.toISOString().split('T')[1]!.replace('Z', '');
398
+ }
399
+ return value;
400
+ },
401
+ })(name);
402
+
403
+ function toListValue(values: (unknown | SQLWrapper)[]): SQL {
404
+ return buildListLiteral(values);
405
+ }
406
+
407
+ export function duckDbArrayContains(
408
+ column: SQLWrapper,
409
+ values: unknown[] | SQLWrapper
410
+ ): SQL {
411
+ const rhs = Array.isArray(values) ? toListValue(values) : values;
412
+ return sql`array_has_all(${column}, ${rhs})`;
413
+ }
414
+
415
+ export function duckDbArrayContained(
416
+ column: SQLWrapper,
417
+ values: unknown[] | SQLWrapper
418
+ ): SQL {
419
+ const rhs = Array.isArray(values) ? toListValue(values) : values;
420
+ return sql`array_has_all(${rhs}, ${column})`;
421
+ }
422
+
423
+ export function duckDbArrayOverlaps(
424
+ column: SQLWrapper,
425
+ values: unknown[] | SQLWrapper
426
+ ): SQL {
427
+ const rhs = Array.isArray(values) ? toListValue(values) : values;
428
+ return sql`array_has_any(${column}, ${rhs})`;
429
+ }
package/src/dialect.ts ADDED
@@ -0,0 +1,136 @@
1
+ import { entityKind, is } from 'drizzle-orm/entity';
2
+ import type { MigrationConfig, MigrationMeta } from 'drizzle-orm/migrator';
3
+ import {
4
+ PgDate,
5
+ PgDateString,
6
+ PgDialect,
7
+ PgJson,
8
+ PgJsonb,
9
+ PgNumeric,
10
+ PgSession,
11
+ PgTime,
12
+ PgTimestamp,
13
+ PgTimestampString,
14
+ PgUUID,
15
+ } from 'drizzle-orm/pg-core';
16
+ import {
17
+ sql,
18
+ type DriverValueEncoder,
19
+ type QueryTypingsValue,
20
+ } from 'drizzle-orm';
21
+
22
+ export class DuckDBDialect extends PgDialect {
23
+ static readonly [entityKind]: string = 'DuckDBPgDialect';
24
+ private hasPgJsonColumn = false;
25
+
26
+ assertNoPgJsonColumns(): void {
27
+ if (this.hasPgJsonColumn) {
28
+ throw new Error(
29
+ 'Pg JSON/JSONB columns are not supported in DuckDB. Replace them with duckDbJson() to use DuckDB’s native JSON type.'
30
+ );
31
+ }
32
+ }
33
+
34
+ override async migrate(
35
+ migrations: MigrationMeta[],
36
+ session: PgSession,
37
+ config: MigrationConfig | string
38
+ ): Promise<void> {
39
+ const migrationConfig: MigrationConfig =
40
+ typeof config === 'string' ? { migrationsFolder: config } : config;
41
+
42
+ const migrationsSchema = migrationConfig.migrationsSchema ?? 'drizzle';
43
+ const migrationsTable =
44
+ migrationConfig.migrationsTable ?? '__drizzle_migrations';
45
+ const migrationsSequence = `${migrationsTable}_id_seq`;
46
+ const legacySequence = 'migrations_pk_seq';
47
+
48
+ const escapeIdentifier = (value: string) => value.replace(/"/g, '""');
49
+ const sequenceLiteral = `"${escapeIdentifier(
50
+ migrationsSchema
51
+ )}"."${escapeIdentifier(migrationsSequence)}"`;
52
+
53
+ const migrationTableCreate = sql`
54
+ CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsSchema)}.${sql.identifier(
55
+ migrationsTable
56
+ )} (
57
+ id integer PRIMARY KEY default nextval('${sql.raw(sequenceLiteral)}'),
58
+ hash text NOT NULL,
59
+ created_at bigint
60
+ )
61
+ `;
62
+
63
+ await session.execute(
64
+ sql`CREATE SCHEMA IF NOT EXISTS ${sql.identifier(migrationsSchema)}`
65
+ );
66
+ await session.execute(
67
+ sql`CREATE SEQUENCE IF NOT EXISTS ${sql.identifier(
68
+ migrationsSchema
69
+ )}.${sql.identifier(migrationsSequence)}`
70
+ );
71
+ if (legacySequence !== migrationsSequence) {
72
+ await session.execute(
73
+ sql`CREATE SEQUENCE IF NOT EXISTS ${sql.identifier(
74
+ migrationsSchema
75
+ )}.${sql.identifier(legacySequence)}`
76
+ );
77
+ }
78
+ await session.execute(migrationTableCreate);
79
+
80
+ const dbMigrations = await session.all<{
81
+ id: number;
82
+ hash: string;
83
+ created_at: string;
84
+ }>(
85
+ sql`select id, hash, created_at from ${sql.identifier(
86
+ migrationsSchema
87
+ )}.${sql.identifier(migrationsTable)} order by created_at desc limit 1`
88
+ );
89
+
90
+ const lastDbMigration = dbMigrations[0];
91
+
92
+ await session.transaction(async (tx) => {
93
+ for await (const migration of migrations) {
94
+ if (
95
+ !lastDbMigration ||
96
+ Number(lastDbMigration.created_at) < migration.folderMillis
97
+ ) {
98
+ for (const stmt of migration.sql) {
99
+ await tx.execute(sql.raw(stmt));
100
+ }
101
+
102
+ await tx.execute(
103
+ sql`insert into ${sql.identifier(
104
+ migrationsSchema
105
+ )}.${sql.identifier(
106
+ migrationsTable
107
+ )} ("hash", "created_at") values(${migration.hash}, ${
108
+ migration.folderMillis
109
+ })`
110
+ );
111
+ }
112
+ }
113
+ });
114
+ }
115
+
116
+ override prepareTyping(
117
+ encoder: DriverValueEncoder<unknown, unknown>
118
+ ): QueryTypingsValue {
119
+ if (is(encoder, PgJsonb) || is(encoder, PgJson)) {
120
+ this.hasPgJsonColumn = true;
121
+ return 'none';
122
+ } else if (is(encoder, PgNumeric)) {
123
+ return 'decimal';
124
+ } else if (is(encoder, PgTime)) {
125
+ return 'time';
126
+ } else if (is(encoder, PgTimestamp) || is(encoder, PgTimestampString)) {
127
+ return 'timestamp';
128
+ } else if (is(encoder, PgDate) || is(encoder, PgDateString)) {
129
+ return 'date';
130
+ } else if (is(encoder, PgUUID)) {
131
+ return 'uuid';
132
+ } else {
133
+ return 'none';
134
+ }
135
+ }
136
+ }
package/src/driver.ts ADDED
@@ -0,0 +1,131 @@
1
+ import { entityKind } from 'drizzle-orm/entity';
2
+ import type { Logger } from 'drizzle-orm/logger';
3
+ import { DefaultLogger } from 'drizzle-orm/logger';
4
+ import { PgDatabase } from 'drizzle-orm/pg-core/db';
5
+ import type { SelectedFields } from 'drizzle-orm/pg-core/query-builders';
6
+ import type { PgSession } from 'drizzle-orm/pg-core';
7
+ import {
8
+ createTableRelationsHelpers,
9
+ extractTablesRelationalConfig,
10
+ type ExtractTablesWithRelations,
11
+ type RelationalSchemaConfig,
12
+ type TablesRelationalConfig,
13
+ } from 'drizzle-orm/relations';
14
+ import { type DrizzleConfig } from 'drizzle-orm/utils';
15
+ import type {
16
+ DuckDBClientLike,
17
+ DuckDBQueryResultHKT,
18
+ DuckDBTransaction,
19
+ } from './session.ts';
20
+ import { DuckDBSession } from './session.ts';
21
+ import { DuckDBDialect } from './dialect.ts';
22
+ import { DuckDBSelectBuilder } from './select-builder.ts';
23
+ import { aliasFields } from './sql/selection.ts';
24
+
25
+ export interface PgDriverOptions {
26
+ logger?: Logger;
27
+ rewriteArrays?: boolean;
28
+ rejectStringArrayLiterals?: boolean;
29
+ }
30
+
31
+ export class DuckDBDriver {
32
+ static readonly [entityKind]: string = 'DuckDBDriver';
33
+
34
+ constructor(
35
+ private client: DuckDBClientLike,
36
+ private dialect: DuckDBDialect,
37
+ private options: PgDriverOptions = {}
38
+ ) {}
39
+
40
+ createSession(
41
+ schema: RelationalSchemaConfig<TablesRelationalConfig> | undefined
42
+ ): DuckDBSession<Record<string, unknown>, TablesRelationalConfig> {
43
+ return new DuckDBSession(this.client, this.dialect, schema, {
44
+ logger: this.options.logger,
45
+ rewriteArrays: this.options.rewriteArrays,
46
+ rejectStringArrayLiterals: this.options.rejectStringArrayLiterals,
47
+ });
48
+ }
49
+ }
50
+
51
+ export interface DuckDBDrizzleConfig<
52
+ TSchema extends Record<string, unknown> = Record<string, never>
53
+ > extends DrizzleConfig<TSchema> {
54
+ rewriteArrays?: boolean;
55
+ rejectStringArrayLiterals?: boolean;
56
+ }
57
+
58
+ export function drizzle<
59
+ TSchema extends Record<string, unknown> = Record<string, never>
60
+ >(
61
+ client: DuckDBClientLike,
62
+ config: DuckDBDrizzleConfig<TSchema> = {}
63
+ ): DuckDBDatabase<TSchema, ExtractTablesWithRelations<TSchema>> {
64
+ const dialect = new DuckDBDialect();
65
+
66
+ const logger =
67
+ config.logger === true ? new DefaultLogger() : config.logger || undefined;
68
+
69
+ let schema: RelationalSchemaConfig<TablesRelationalConfig> | undefined;
70
+
71
+ if (config.schema) {
72
+ const tablesConfig = extractTablesRelationalConfig(
73
+ config.schema,
74
+ createTableRelationsHelpers
75
+ );
76
+ schema = {
77
+ fullSchema: config.schema,
78
+ schema: tablesConfig.tables,
79
+ tableNamesMap: tablesConfig.tableNamesMap,
80
+ };
81
+ }
82
+
83
+ const driver = new DuckDBDriver(client, dialect, {
84
+ logger,
85
+ rewriteArrays: config.rewriteArrays,
86
+ rejectStringArrayLiterals: config.rejectStringArrayLiterals,
87
+ });
88
+ const session = driver.createSession(schema);
89
+
90
+ return new DuckDBDatabase(dialect, session, schema) as DuckDBDatabase<
91
+ TSchema,
92
+ ExtractTablesWithRelations<TSchema>
93
+ >;
94
+ }
95
+
96
+ export class DuckDBDatabase<
97
+ TFullSchema extends Record<string, unknown> = Record<string, never>,
98
+ TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>
99
+ > extends PgDatabase<DuckDBQueryResultHKT, TFullSchema, TSchema> {
100
+ static readonly [entityKind]: string = 'DuckDBDatabase';
101
+
102
+ constructor(
103
+ readonly dialect: DuckDBDialect,
104
+ readonly session: DuckDBSession<TFullSchema, TSchema>,
105
+ schema: RelationalSchemaConfig<TSchema> | undefined
106
+ ) {
107
+ super(dialect, session, schema);
108
+ }
109
+
110
+ select(): DuckDBSelectBuilder<undefined>;
111
+ select<TSelection extends SelectedFields>(
112
+ fields: TSelection
113
+ ): DuckDBSelectBuilder<TSelection>;
114
+ select(fields?: SelectedFields): DuckDBSelectBuilder<
115
+ SelectedFields | undefined
116
+ > {
117
+ const selectedFields = fields ? aliasFields(fields) : undefined;
118
+
119
+ return new DuckDBSelectBuilder({
120
+ fields: selectedFields ?? undefined,
121
+ session: this.session as unknown as PgSession<DuckDBQueryResultHKT>,
122
+ dialect: this.dialect,
123
+ });
124
+ }
125
+
126
+ override async transaction<T>(
127
+ transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>
128
+ ): Promise<T> {
129
+ return await this.session.transaction<T>(transaction);
130
+ }
131
+ }
package/src/index.ts ADDED
@@ -0,0 +1,5 @@
1
+ export * from './driver.ts';
2
+ export * from './session.ts';
3
+ export * from './columns.ts';
4
+ export * from './migrator.ts';
5
+ export * from './introspect.ts';