@leonardovida-md/drizzle-neo-duckdb 1.0.1 → 1.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/src/columns.ts CHANGED
@@ -1,6 +1,18 @@
1
1
  import { sql, type SQL } from 'drizzle-orm';
2
2
  import type { SQLWrapper } from 'drizzle-orm/sql/sql';
3
3
  import { customType } from 'drizzle-orm/pg-core';
4
+ import {
5
+ wrapList,
6
+ wrapArray,
7
+ wrapMap,
8
+ wrapBlob,
9
+ wrapJson,
10
+ type ListValueWrapper,
11
+ type ArrayValueWrapper,
12
+ type MapValueWrapper,
13
+ type BlobValueWrapper,
14
+ type JsonValueWrapper,
15
+ } from './value-wrappers.ts';
4
16
 
5
17
  type IntColType =
6
18
  | 'SMALLINT'
@@ -93,7 +105,9 @@ function formatLiteral(value: unknown, typeHint?: string): string {
93
105
  }
94
106
 
95
107
  const str =
96
- typeof value === 'string' ? value : JSON.stringify(value) ?? String(value);
108
+ typeof value === 'string'
109
+ ? value
110
+ : (JSON.stringify(value) ?? String(value));
97
111
 
98
112
  const escaped = str.replace(/'/g, "''");
99
113
  // Simple quoting based on hint.
@@ -140,7 +154,10 @@ function buildStructLiteral(
140
154
  return sql`struct_pack(${sql.join(parts, sql.raw(', '))})`;
141
155
  }
142
156
 
143
- function buildMapLiteral(value: Record<string, unknown>, valueType?: string): SQL {
157
+ function buildMapLiteral(
158
+ value: Record<string, unknown>,
159
+ valueType?: string
160
+ ): SQL {
144
161
  const keys = Object.keys(value);
145
162
  const vals = Object.values(value);
146
163
  const keyList = buildListLiteral(keys, 'TEXT');
@@ -155,14 +172,17 @@ export const duckDbList = <TData = unknown>(
155
172
  name: string,
156
173
  elementType: AnyColType
157
174
  ) =>
158
- customType<{ data: TData[]; driverData: SQL | unknown[] | string }>({
175
+ customType<{
176
+ data: TData[];
177
+ driverData: ListValueWrapper | unknown[] | string;
178
+ }>({
159
179
  dataType() {
160
180
  return `${elementType}[]`;
161
181
  },
162
- toDriver(value: TData[]) {
163
- return buildListLiteral(value, elementType);
182
+ toDriver(value: TData[]): ListValueWrapper {
183
+ return wrapList(value, elementType);
164
184
  },
165
- fromDriver(value: unknown[] | string | SQL): TData[] {
185
+ fromDriver(value: unknown[] | string | ListValueWrapper): TData[] {
166
186
  if (Array.isArray(value)) {
167
187
  return value as TData[];
168
188
  }
@@ -181,16 +201,19 @@ export const duckDbArray = <TData = unknown>(
181
201
  elementType: AnyColType,
182
202
  fixedLength?: number
183
203
  ) =>
184
- customType<{ data: TData[]; driverData: SQL | unknown[] | string }>({
204
+ customType<{
205
+ data: TData[];
206
+ driverData: ArrayValueWrapper | unknown[] | string;
207
+ }>({
185
208
  dataType() {
186
209
  return fixedLength
187
210
  ? `${elementType}[${fixedLength}]`
188
211
  : `${elementType}[]`;
189
212
  },
190
- toDriver(value: TData[]) {
191
- return buildListLiteral(value, elementType);
213
+ toDriver(value: TData[]): ArrayValueWrapper {
214
+ return wrapArray(value, elementType, fixedLength);
192
215
  },
193
- fromDriver(value: unknown[] | string | SQL): TData[] {
216
+ fromDriver(value: unknown[] | string | ArrayValueWrapper): TData[] {
194
217
  if (Array.isArray(value)) {
195
218
  return value as TData[];
196
219
  }
@@ -208,15 +231,15 @@ export const duckDbMap = <TData extends Record<string, any>>(
208
231
  name: string,
209
232
  valueType: AnyColType | ListColType | ArrayColType
210
233
  ) =>
211
- customType<{ data: TData; driverData: TData }>({
212
- dataType() {
234
+ customType<{ data: TData; driverData: MapValueWrapper | TData }>({
235
+ dataType() {
213
236
  return `MAP (STRING, ${valueType})`;
214
237
  },
215
- toDriver(value: TData) {
216
- return buildMapLiteral(value, valueType);
238
+ toDriver(value: TData): MapValueWrapper {
239
+ return wrapMap(value, valueType);
217
240
  },
218
- fromDriver(value: TData): TData {
219
- return value;
241
+ fromDriver(value: TData | MapValueWrapper): TData {
242
+ return value as TData;
220
243
  },
221
244
  })(name);
222
245
 
@@ -233,6 +256,8 @@ export const duckDbStruct = <TData extends Record<string, any>>(
233
256
  return `STRUCT (${fields.join(', ')})`;
234
257
  },
235
258
  toDriver(value: TData) {
259
+ // Use SQL literals for structs due to DuckDB type inference issues
260
+ // with nested empty lists
236
261
  return buildStructLiteral(value, schema);
237
262
  },
238
263
  fromDriver(value: TData | string): TData {
@@ -247,15 +272,24 @@ export const duckDbStruct = <TData extends Record<string, any>>(
247
272
  },
248
273
  })(name);
249
274
 
275
+ /**
276
+ * JSON column type that wraps values and delays JSON.stringify() to binding time.
277
+ * This ensures consistent handling with other wrapped types.
278
+ *
279
+ * Note: DuckDB stores JSON as VARCHAR internally, so the final binding
280
+ * is always a stringified JSON value.
281
+ */
250
282
  export const duckDbJson = <TData = unknown>(name: string) =>
251
- customType<{ data: TData; driverData: SQL | string }>({
283
+ customType<{ data: TData; driverData: JsonValueWrapper | SQL | string }>({
252
284
  dataType() {
253
285
  return 'JSON';
254
286
  },
255
- toDriver(value: TData) {
287
+ toDriver(value: TData): JsonValueWrapper | SQL | string {
288
+ // Pass through strings directly
256
289
  if (typeof value === 'string') {
257
290
  return value;
258
291
  }
292
+ // Pass through SQL objects (for raw SQL expressions)
259
293
  if (
260
294
  value !== null &&
261
295
  typeof value === 'object' &&
@@ -263,9 +297,10 @@ export const duckDbJson = <TData = unknown>(name: string) =>
263
297
  ) {
264
298
  return value as unknown as SQL;
265
299
  }
266
- return JSON.stringify(value ?? null);
300
+ // Wrap non-string values for delayed stringify at binding time
301
+ return wrapJson(value);
267
302
  },
268
- fromDriver(value: SQL | string) {
303
+ fromDriver(value: SQL | string | JsonValueWrapper) {
269
304
  if (typeof value !== 'string') {
270
305
  return value as unknown as TData;
271
306
  }
@@ -283,14 +318,14 @@ export const duckDbJson = <TData = unknown>(name: string) =>
283
318
 
284
319
  export const duckDbBlob = customType<{
285
320
  data: Buffer;
321
+ driverData: BlobValueWrapper;
286
322
  default: false;
287
323
  }>({
288
324
  dataType() {
289
325
  return 'BLOB';
290
326
  },
291
- toDriver(value: Buffer) {
292
- const hexString = value.toString('hex');
293
- return sql`from_hex(${hexString})`;
327
+ toDriver(value: Buffer): BlobValueWrapper {
328
+ return wrapBlob(value);
294
329
  },
295
330
  });
296
331
 
@@ -322,10 +357,7 @@ interface TimestampOptions {
322
357
  precision?: number;
323
358
  }
324
359
 
325
- export const duckDbTimestamp = (
326
- name: string,
327
- options: TimestampOptions = {}
328
- ) =>
360
+ export const duckDbTimestamp = (name: string, options: TimestampOptions = {}) =>
329
361
  customType<{
330
362
  data: Date | string;
331
363
  driverData: SQL | string | Date;
@@ -338,6 +370,7 @@ export const duckDbTimestamp = (
338
370
  return `TIMESTAMP${precision}`;
339
371
  },
340
372
  toDriver(value: Date | string) {
373
+ // Use SQL literals for timestamps due to Bun/DuckDB bigint binding issues
341
374
  const iso = value instanceof Date ? value.toISOString() : value;
342
375
  const normalized = iso.replace('T', ' ').replace('Z', '+00');
343
376
  const typeKeyword = options.withTimezone ? 'TIMESTAMPTZ' : 'TIMESTAMP';
@@ -353,11 +386,9 @@ export const duckDbTimestamp = (
353
386
  if (value instanceof Date) {
354
387
  return value;
355
388
  }
356
- const stringValue =
357
- typeof value === 'string' ? value : value.toString();
389
+ const stringValue = typeof value === 'string' ? value : value.toString();
358
390
  const hasOffset =
359
- stringValue.endsWith('Z') ||
360
- /[+-]\d{2}:?\d{2}$/.test(stringValue);
391
+ stringValue.endsWith('Z') || /[+-]\d{2}:?\d{2}$/.test(stringValue);
361
392
  const normalized = hasOffset
362
393
  ? stringValue.replace(' ', 'T')
363
394
  : `${stringValue.replace(' ', 'T')}Z`;
@@ -374,11 +405,9 @@ export const duckDbDate = (name: string) =>
374
405
  return value;
375
406
  },
376
407
  fromDriver(value: string | Date) {
377
- const str =
378
- value instanceof Date
379
- ? value.toISOString().slice(0, 10)
380
- : value;
381
- return str;
408
+ const str =
409
+ value instanceof Date ? value.toISOString().slice(0, 10) : value;
410
+ return str;
382
411
  },
383
412
  })(name);
384
413
 
package/src/dialect.ts CHANGED
@@ -52,8 +52,8 @@ export class DuckDBDialect extends PgDialect {
52
52
 
53
53
  const migrationTableCreate = sql`
54
54
  CREATE TABLE IF NOT EXISTS ${sql.identifier(migrationsSchema)}.${sql.identifier(
55
- migrationsTable
56
- )} (
55
+ migrationsTable
56
+ )} (
57
57
  id integer PRIMARY KEY default nextval('${sql.raw(sequenceLiteral)}'),
58
58
  hash text NOT NULL,
59
59
  created_at bigint
package/src/driver.ts CHANGED
@@ -12,6 +12,7 @@ import {
12
12
  type TablesRelationalConfig,
13
13
  } from 'drizzle-orm/relations';
14
14
  import { type DrizzleConfig } from 'drizzle-orm/utils';
15
+ import type { SQL } from 'drizzle-orm/sql/sql';
15
16
  import type {
16
17
  DuckDBClientLike,
17
18
  DuckDBQueryResultHKT,
@@ -21,6 +22,7 @@ import { DuckDBSession } from './session.ts';
21
22
  import { DuckDBDialect } from './dialect.ts';
22
23
  import { DuckDBSelectBuilder } from './select-builder.ts';
23
24
  import { aliasFields } from './sql/selection.ts';
25
+ import type { ExecuteInBatchesOptions, RowData } from './client.ts';
24
26
 
25
27
  export interface PgDriverOptions {
26
28
  logger?: Logger;
@@ -49,14 +51,14 @@ export class DuckDBDriver {
49
51
  }
50
52
 
51
53
  export interface DuckDBDrizzleConfig<
52
- TSchema extends Record<string, unknown> = Record<string, never>
54
+ TSchema extends Record<string, unknown> = Record<string, never>,
53
55
  > extends DrizzleConfig<TSchema> {
54
56
  rewriteArrays?: boolean;
55
57
  rejectStringArrayLiterals?: boolean;
56
58
  }
57
59
 
58
60
  export function drizzle<
59
- TSchema extends Record<string, unknown> = Record<string, never>
61
+ TSchema extends Record<string, unknown> = Record<string, never>,
60
62
  >(
61
63
  client: DuckDBClientLike,
62
64
  config: DuckDBDrizzleConfig<TSchema> = {}
@@ -95,7 +97,8 @@ export function drizzle<
95
97
 
96
98
  export class DuckDBDatabase<
97
99
  TFullSchema extends Record<string, unknown> = Record<string, never>,
98
- TSchema extends TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>
100
+ TSchema extends
101
+ TablesRelationalConfig = ExtractTablesWithRelations<TFullSchema>,
99
102
  > extends PgDatabase<DuckDBQueryResultHKT, TFullSchema, TSchema> {
100
103
  static readonly [entityKind]: string = 'DuckDBDatabase';
101
104
 
@@ -111,9 +114,9 @@ export class DuckDBDatabase<
111
114
  select<TSelection extends SelectedFields>(
112
115
  fields: TSelection
113
116
  ): DuckDBSelectBuilder<TSelection>;
114
- select(fields?: SelectedFields): DuckDBSelectBuilder<
115
- SelectedFields | undefined
116
- > {
117
+ select(
118
+ fields?: SelectedFields
119
+ ): DuckDBSelectBuilder<SelectedFields | undefined> {
117
120
  const selectedFields = fields ? aliasFields(fields) : undefined;
118
121
 
119
122
  return new DuckDBSelectBuilder({
@@ -123,6 +126,17 @@ export class DuckDBDatabase<
123
126
  });
124
127
  }
125
128
 
129
+ executeBatches<T extends RowData = RowData>(
130
+ query: SQL,
131
+ options: ExecuteInBatchesOptions = {}
132
+ ): AsyncGenerator<T[], void, void> {
133
+ return this.session.executeBatches<T>(query, options);
134
+ }
135
+
136
+ executeArrow(query: SQL): Promise<unknown> {
137
+ return this.session.executeArrow(query);
138
+ }
139
+
126
140
  override async transaction<T>(
127
141
  transaction: (tx: DuckDBTransaction<TFullSchema, TSchema>) => Promise<T>
128
142
  ): Promise<T> {
package/src/index.ts CHANGED
@@ -3,3 +3,6 @@ export * from './session.ts';
3
3
  export * from './columns.ts';
4
4
  export * from './migrator.ts';
5
5
  export * from './introspect.ts';
6
+ export * from './client.ts';
7
+ export * from './olap.ts';
8
+ export * from './value-wrappers.ts';
package/src/introspect.ts CHANGED
@@ -5,6 +5,18 @@ import type { DuckDBDatabase } from './driver.ts';
5
5
  const SYSTEM_SCHEMAS = new Set(['information_schema', 'pg_catalog']);
6
6
 
7
7
  export interface IntrospectOptions {
8
+ /**
9
+ * Database/catalog to introspect. If not specified, uses the current database
10
+ * (via `SELECT current_database()`). This prevents returning tables from all
11
+ * attached databases in MotherDuck workspaces.
12
+ */
13
+ database?: string;
14
+ /**
15
+ * When true, introspects all attached databases instead of just the current one.
16
+ * Ignored if `database` is explicitly set.
17
+ * @default false
18
+ */
19
+ allDatabases?: boolean;
8
20
  schemas?: string[];
9
21
  includeViews?: boolean;
10
22
  useCustomTimeTypes?: boolean;
@@ -102,13 +114,14 @@ export async function introspect(
102
114
  db: DuckDBDatabase,
103
115
  opts: IntrospectOptions = {}
104
116
  ): Promise<IntrospectResult> {
105
- const schemas = await resolveSchemas(db, opts.schemas);
117
+ const database = await resolveDatabase(db, opts.database, opts.allDatabases);
118
+ const schemas = await resolveSchemas(db, database, opts.schemas);
106
119
  const includeViews = opts.includeViews ?? false;
107
120
 
108
- const tables = await loadTables(db, schemas, includeViews);
109
- const columns = await loadColumns(db, schemas);
110
- const constraints = await loadConstraints(db, schemas);
111
- const indexes = await loadIndexes(db, schemas);
121
+ const tables = await loadTables(db, database, schemas, includeViews);
122
+ const columns = await loadColumns(db, database, schemas);
123
+ const constraints = await loadConstraints(db, database, schemas);
124
+ const indexes = await loadIndexes(db, database, schemas);
112
125
 
113
126
  const grouped = buildTables(tables, columns, constraints, indexes);
114
127
 
@@ -126,16 +139,39 @@ export async function introspect(
126
139
  };
127
140
  }
128
141
 
142
+ async function resolveDatabase(
143
+ db: DuckDBDatabase,
144
+ targetDatabase?: string,
145
+ allDatabases?: boolean
146
+ ): Promise<string | null> {
147
+ if (allDatabases) {
148
+ return null;
149
+ }
150
+ if (targetDatabase) {
151
+ return targetDatabase;
152
+ }
153
+
154
+ const rows = await db.execute<{ current_database: string }>(
155
+ sql`SELECT current_database() as current_database`
156
+ );
157
+ return rows[0]?.current_database ?? null;
158
+ }
159
+
129
160
  async function resolveSchemas(
130
161
  db: DuckDBDatabase,
162
+ database: string | null,
131
163
  targetSchemas?: string[]
132
164
  ): Promise<string[]> {
133
165
  if (targetSchemas?.length) {
134
166
  return targetSchemas;
135
167
  }
136
168
 
169
+ const databaseFilter = database
170
+ ? sql`catalog_name = ${database}`
171
+ : sql`1 = 1`;
172
+
137
173
  const rows = await db.execute<{ schema_name: string }>(
138
- sql`select schema_name from information_schema.schemata`
174
+ sql`SELECT schema_name FROM information_schema.schemata WHERE ${databaseFilter}`
139
175
  );
140
176
 
141
177
  return rows
@@ -145,30 +181,40 @@ async function resolveSchemas(
145
181
 
146
182
  async function loadTables(
147
183
  db: DuckDBDatabase,
184
+ database: string | null,
148
185
  schemas: string[],
149
186
  includeViews: boolean
150
187
  ): Promise<DuckDbTableRow[]> {
151
188
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
189
+ const databaseFilter = database
190
+ ? sql`table_catalog = ${database}`
191
+ : sql`1 = 1`;
152
192
 
153
193
  return await db.execute<DuckDbTableRow>(
154
194
  sql`
155
- select table_schema as schema_name, table_name, table_type
156
- from information_schema.tables
157
- where table_schema in (${sql.join(schemaFragments, sql.raw(', '))})
158
- and ${includeViews ? sql`1 = 1` : sql`table_type = 'BASE TABLE'`}
159
- order by table_schema, table_name
195
+ SELECT table_schema as schema_name, table_name, table_type
196
+ FROM information_schema.tables
197
+ WHERE ${databaseFilter}
198
+ AND table_schema IN (${sql.join(schemaFragments, sql.raw(', '))})
199
+ AND ${includeViews ? sql`1 = 1` : sql`table_type = 'BASE TABLE'`}
200
+ ORDER BY table_schema, table_name
160
201
  `
161
202
  );
162
203
  }
163
204
 
164
205
  async function loadColumns(
165
206
  db: DuckDBDatabase,
207
+ database: string | null,
166
208
  schemas: string[]
167
209
  ): Promise<DuckDbColumnRow[]> {
168
210
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
211
+ const databaseFilter = database
212
+ ? sql`database_name = ${database}`
213
+ : sql`1 = 1`;
214
+
169
215
  return await db.execute<DuckDbColumnRow>(
170
216
  sql`
171
- select
217
+ SELECT
172
218
  schema_name,
173
219
  table_name,
174
220
  column_name,
@@ -180,21 +226,27 @@ async function loadColumns(
180
226
  numeric_precision,
181
227
  numeric_scale,
182
228
  internal
183
- from duckdb_columns()
184
- where schema_name in (${sql.join(schemaFragments, sql.raw(', '))})
185
- order by schema_name, table_name, column_index
229
+ FROM duckdb_columns()
230
+ WHERE ${databaseFilter}
231
+ AND schema_name IN (${sql.join(schemaFragments, sql.raw(', '))})
232
+ ORDER BY schema_name, table_name, column_index
186
233
  `
187
234
  );
188
235
  }
189
236
 
190
237
  async function loadConstraints(
191
238
  db: DuckDBDatabase,
239
+ database: string | null,
192
240
  schemas: string[]
193
241
  ): Promise<DuckDbConstraintRow[]> {
194
242
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
243
+ const databaseFilter = database
244
+ ? sql`database_name = ${database}`
245
+ : sql`1 = 1`;
246
+
195
247
  return await db.execute<DuckDbConstraintRow>(
196
248
  sql`
197
- select
249
+ SELECT
198
250
  schema_name,
199
251
  table_name,
200
252
  constraint_name,
@@ -203,29 +255,36 @@ async function loadConstraints(
203
255
  constraint_column_names,
204
256
  referenced_table,
205
257
  referenced_column_names
206
- from duckdb_constraints()
207
- where schema_name in (${sql.join(schemaFragments, sql.raw(', '))})
208
- order by schema_name, table_name, constraint_index
258
+ FROM duckdb_constraints()
259
+ WHERE ${databaseFilter}
260
+ AND schema_name IN (${sql.join(schemaFragments, sql.raw(', '))})
261
+ ORDER BY schema_name, table_name, constraint_index
209
262
  `
210
263
  );
211
264
  }
212
265
 
213
266
  async function loadIndexes(
214
267
  db: DuckDBDatabase,
268
+ database: string | null,
215
269
  schemas: string[]
216
270
  ): Promise<DuckDbIndexRow[]> {
217
271
  const schemaFragments = schemas.map((schema) => sql`${schema}`);
272
+ const databaseFilter = database
273
+ ? sql`database_name = ${database}`
274
+ : sql`1 = 1`;
275
+
218
276
  return await db.execute<DuckDbIndexRow>(
219
277
  sql`
220
- select
278
+ SELECT
221
279
  schema_name,
222
280
  table_name,
223
281
  index_name,
224
282
  is_unique,
225
283
  expressions
226
- from duckdb_indexes()
227
- where schema_name in (${sql.join(schemaFragments, sql.raw(', '))})
228
- order by schema_name, table_name, index_name
284
+ FROM duckdb_indexes()
285
+ WHERE ${databaseFilter}
286
+ AND schema_name IN (${sql.join(schemaFragments, sql.raw(', '))})
287
+ ORDER BY schema_name, table_name, index_name
229
288
  `
230
289
  );
231
290
  }
@@ -407,17 +466,17 @@ function emitConstraints(
407
466
  .map((col) => `t.${toIdentifier(col)}`)
408
467
  .join(', ')}], name: ${JSON.stringify(constraint.name)} })`
409
468
  );
410
- } else if (
411
- constraint.type === 'UNIQUE' &&
412
- constraint.columns.length > 1
413
- ) {
469
+ } else if (constraint.type === 'UNIQUE' && constraint.columns.length > 1) {
414
470
  imports.pgCore.add('unique');
415
471
  entries.push(
416
472
  `${key}: unique(${JSON.stringify(constraint.name)}).on(${constraint.columns
417
473
  .map((col) => `t.${toIdentifier(col)}`)
418
474
  .join(', ')})`
419
475
  );
420
- } else if (constraint.type === 'FOREIGN KEY' && constraint.referencedTable) {
476
+ } else if (
477
+ constraint.type === 'FOREIGN KEY' &&
478
+ constraint.referencedTable
479
+ ) {
421
480
  imports.pgCore.add('foreignKey');
422
481
  const targetTable = toIdentifier(constraint.referencedTable.name);
423
482
  entries.push(
@@ -487,7 +546,10 @@ function buildDefault(defaultValue: string | null): string {
487
546
  if (/^nextval\(/i.test(trimmed)) {
488
547
  return `.default(sql\`${trimmed}\`)`;
489
548
  }
490
- if (/^current_timestamp(?:\(\))?$/i.test(trimmed) || /^now\(\)$/i.test(trimmed)) {
549
+ if (
550
+ /^current_timestamp(?:\(\))?$/i.test(trimmed) ||
551
+ /^now\(\)$/i.test(trimmed)
552
+ ) {
491
553
  return `.defaultNow()`;
492
554
  }
493
555
  if (trimmed === 'true' || trimmed === 'false') {
@@ -545,7 +607,11 @@ function mapDuckDbType(
545
607
 
546
608
  if (upper === 'BIGINT' || upper === 'INT8' || upper === 'UBIGINT') {
547
609
  imports.pgCore.add('bigint');
548
- return { builder: `bigint(${columnName(column.name)})` };
610
+ // Drizzle's bigint helper requires an explicit mode. Default to 'number'
611
+ // to mirror DuckDB's typical 64-bit integer behavior in JS.
612
+ return {
613
+ builder: `bigint(${columnName(column.name)}, { mode: 'number' })`,
614
+ };
549
615
  }
550
616
 
551
617
  const decimalMatch = /^DECIMAL\((\d+),(\d+)\)/i.exec(upper);
@@ -835,9 +901,7 @@ function renderImports(imports: ImportBuckets, importBasePath: string): string {
835
901
  const pgCore = [...imports.pgCore];
836
902
  if (pgCore.length) {
837
903
  lines.push(
838
- `import { ${pgCore
839
- .sort()
840
- .join(', ')} } from 'drizzle-orm/pg-core';`
904
+ `import { ${pgCore.sort().join(', ')} } from 'drizzle-orm/pg-core';`
841
905
  );
842
906
  }
843
907
 
package/src/migrator.ts CHANGED
@@ -10,9 +10,7 @@ export async function migrate<TSchema extends Record<string, unknown>>(
10
10
  config: DuckDbMigrationConfig
11
11
  ) {
12
12
  const migrationConfig: MigrationConfig =
13
- typeof config === 'string'
14
- ? { migrationsFolder: config }
15
- : config;
13
+ typeof config === 'string' ? { migrationsFolder: config } : config;
16
14
 
17
15
  const migrations = readMigrationFiles(migrationConfig);
18
16