@duckdbfan/drizzle-duckdb 0.0.5 → 0.0.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/columns.d.ts +51 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.mjs +83 -1
- package/package.json +1 -1
- package/src/columns.ts +155 -1
- package/src/index.ts +2 -1
- package/src/utils.ts +8 -2
package/dist/columns.d.ts
CHANGED
|
@@ -1 +1,52 @@
|
|
|
1
|
+
type IntColType = 'SMALLINT' | 'INTEGER' | 'BIGINT' | 'HUGEINT' | 'USMALLINT' | 'UINTEGER' | 'UBIGINT' | 'UHUGEINT' | 'INT' | 'INT16' | 'INT32' | 'INT64' | 'INT128' | 'LONG' | 'VARINT';
|
|
2
|
+
type FloatColType = 'FLOAT' | 'DOUBLE';
|
|
3
|
+
type StringColType = 'STRING' | 'VARCHAR' | 'TEXT';
|
|
4
|
+
type BoolColType = 'BOOLEAN' | 'BOOL';
|
|
5
|
+
type BlobColType = 'BLOB' | 'BYTEA' | 'VARBINARY';
|
|
6
|
+
type DateColType = 'DATE' | 'TIME' | 'TIMETZ' | 'TIMESTAMP' | 'DATETIME' | 'TIMESTAMPTZ' | 'TIMESTAMP_MS' | 'TIMESTAMP_S';
|
|
7
|
+
type AnyColType = IntColType | FloatColType | StringColType | BoolColType | DateColType | BlobColType;
|
|
8
|
+
type ListColType = `${AnyColType}[]`;
|
|
9
|
+
type ArrayColType = `${AnyColType}[${number}]`;
|
|
10
|
+
export declare const duckDbMap: <TData extends Record<string, any>>(name: string, valueType: AnyColType | ListColType | ArrayColType) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
11
|
+
name: string;
|
|
12
|
+
dataType: "custom";
|
|
13
|
+
columnType: "PgCustomColumn";
|
|
14
|
+
data: TData;
|
|
15
|
+
driverParam: string;
|
|
16
|
+
enumValues: undefined;
|
|
17
|
+
}>;
|
|
18
|
+
export declare const duckDbStruct: <TData extends Record<string, any>>(name: string, schema: Record<string, AnyColType | ListColType | ArrayColType>) => import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
19
|
+
name: string;
|
|
20
|
+
dataType: "custom";
|
|
21
|
+
columnType: "PgCustomColumn";
|
|
22
|
+
data: TData;
|
|
23
|
+
driverParam: string;
|
|
24
|
+
enumValues: undefined;
|
|
25
|
+
}>;
|
|
26
|
+
export declare const duckDbBlob: {
|
|
27
|
+
(): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
28
|
+
name: "";
|
|
29
|
+
dataType: "custom";
|
|
30
|
+
columnType: "PgCustomColumn";
|
|
31
|
+
data: Buffer;
|
|
32
|
+
driverParam: unknown;
|
|
33
|
+
enumValues: undefined;
|
|
34
|
+
}>;
|
|
35
|
+
<TConfig extends Record<string, any>>(fieldConfig?: TConfig | undefined): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
36
|
+
name: "";
|
|
37
|
+
dataType: "custom";
|
|
38
|
+
columnType: "PgCustomColumn";
|
|
39
|
+
data: Buffer;
|
|
40
|
+
driverParam: unknown;
|
|
41
|
+
enumValues: undefined;
|
|
42
|
+
}>;
|
|
43
|
+
<TName extends string>(dbName: TName, fieldConfig?: unknown): import("drizzle-orm/pg-core").PgCustomColumnBuilder<{
|
|
44
|
+
name: TName;
|
|
45
|
+
dataType: "custom";
|
|
46
|
+
columnType: "PgCustomColumn";
|
|
47
|
+
data: Buffer;
|
|
48
|
+
driverParam: unknown;
|
|
49
|
+
enumValues: undefined;
|
|
50
|
+
}>;
|
|
51
|
+
};
|
|
1
52
|
export {};
|
package/dist/index.d.ts
CHANGED
package/dist/index.mjs
CHANGED
|
@@ -25,6 +25,9 @@ import {
|
|
|
25
25
|
getTableName,
|
|
26
26
|
sql
|
|
27
27
|
} from "drizzle-orm";
|
|
28
|
+
import {
|
|
29
|
+
PgCustomColumn
|
|
30
|
+
} from "drizzle-orm/pg-core";
|
|
28
31
|
function mapResultRow(columns, row, joinsNotNullableMap) {
|
|
29
32
|
const nullifyMap = {};
|
|
30
33
|
const result = columns.reduce((result2, { path, field }, columnIndex) => {
|
|
@@ -34,7 +37,12 @@ function mapResultRow(columns, row, joinsNotNullableMap) {
|
|
|
34
37
|
} else if (is(field, SQL)) {
|
|
35
38
|
decoder = field.decoder;
|
|
36
39
|
} else {
|
|
37
|
-
|
|
40
|
+
const col = field.sql.queryChunks.find((chunk) => is(chunk, Column));
|
|
41
|
+
if (is(col, PgCustomColumn)) {
|
|
42
|
+
decoder = col;
|
|
43
|
+
} else {
|
|
44
|
+
decoder = field.sql.decoder;
|
|
45
|
+
}
|
|
38
46
|
}
|
|
39
47
|
let node = result2;
|
|
40
48
|
for (const [pathChunkIndex, pathChunk] of path.entries()) {
|
|
@@ -418,7 +426,81 @@ class DuckDBSelectBuilder extends PgSelectBuilder {
|
|
|
418
426
|
});
|
|
419
427
|
}
|
|
420
428
|
}
|
|
429
|
+
// src/columns.ts
|
|
430
|
+
import { sql as sql5 } from "drizzle-orm";
|
|
431
|
+
import { customType } from "drizzle-orm/pg-core";
|
|
432
|
+
var duckDbMap = (name, valueType) => customType({
|
|
433
|
+
dataType() {
|
|
434
|
+
console.log("dataType");
|
|
435
|
+
return `MAP (STRING, ${valueType})`;
|
|
436
|
+
},
|
|
437
|
+
toDriver(value) {
|
|
438
|
+
console.log("toDriver");
|
|
439
|
+
const valueFormatter = (value2) => {
|
|
440
|
+
if (["STRING", "TEXT", "VARCHAR"].includes(valueType)) {
|
|
441
|
+
return `'${value2}'`;
|
|
442
|
+
}
|
|
443
|
+
return JSON.stringify(value2);
|
|
444
|
+
};
|
|
445
|
+
const values = Object.entries(value).map(([key, value2]) => {
|
|
446
|
+
return sql5.raw(`'${key}': ${valueFormatter(value2)}`);
|
|
447
|
+
});
|
|
448
|
+
const sqlChunks = [];
|
|
449
|
+
for (const value2 of values) {
|
|
450
|
+
sqlChunks.push(value2);
|
|
451
|
+
}
|
|
452
|
+
return sql5`MAP {${sql5.join(sqlChunks, sql5.raw(", "))}}`;
|
|
453
|
+
},
|
|
454
|
+
fromDriver(value) {
|
|
455
|
+
console.log("fromDriver");
|
|
456
|
+
const replacedValue = value.replaceAll(/(?:^{)?([^=]+?)=(.+)(?:}$)?/g, '"$1":"$2"');
|
|
457
|
+
const formattedValue = `{${replacedValue}}`;
|
|
458
|
+
const valueObj = JSON.parse(formattedValue);
|
|
459
|
+
return Object.fromEntries(Object.entries(valueObj).map(([key, value2]) => {
|
|
460
|
+
return [key, JSON.parse(value2)];
|
|
461
|
+
}));
|
|
462
|
+
}
|
|
463
|
+
})(name);
|
|
464
|
+
var duckDbStruct = (name, schema) => customType({
|
|
465
|
+
dataType() {
|
|
466
|
+
const fields = Object.entries(schema).map(([key, type]) => `${key} ${type}`);
|
|
467
|
+
return `STRUCT (${fields.join(", ")})`;
|
|
468
|
+
},
|
|
469
|
+
toDriver(value) {
|
|
470
|
+
const valueFormatter = (value2) => JSON.stringify(value2).replaceAll(/(?<!\\)"/g, "'");
|
|
471
|
+
const values = Object.entries(value).map(([key, value2]) => {
|
|
472
|
+
return sql5.raw(`'${key}': ${valueFormatter(value2)}`);
|
|
473
|
+
});
|
|
474
|
+
const sqlChunks = [];
|
|
475
|
+
for (const value2 of values) {
|
|
476
|
+
sqlChunks.push(value2);
|
|
477
|
+
}
|
|
478
|
+
return sql5`(SELECT {${sql5.join(sqlChunks, sql5.raw(", "))}})`;
|
|
479
|
+
},
|
|
480
|
+
fromDriver(value) {
|
|
481
|
+
return value;
|
|
482
|
+
}
|
|
483
|
+
})(name);
|
|
484
|
+
var duckDbBlob = customType({
|
|
485
|
+
dataType() {
|
|
486
|
+
return "BLOB";
|
|
487
|
+
},
|
|
488
|
+
toDriver(value) {
|
|
489
|
+
const hexString = value.toString("hex");
|
|
490
|
+
return sql5`from_hex(${hexString})`;
|
|
491
|
+
}
|
|
492
|
+
});
|
|
493
|
+
// src/migrator.ts
|
|
494
|
+
import { readMigrationFiles } from "drizzle-orm/migrator";
|
|
495
|
+
async function migrate(db, config) {
|
|
496
|
+
const migrations = readMigrationFiles(config);
|
|
497
|
+
await db.dialect.migrate(migrations, db.session, config);
|
|
498
|
+
}
|
|
421
499
|
export {
|
|
500
|
+
migrate,
|
|
501
|
+
duckDbStruct,
|
|
502
|
+
duckDbMap,
|
|
503
|
+
duckDbBlob,
|
|
422
504
|
drizzle,
|
|
423
505
|
DuckDBTransaction,
|
|
424
506
|
DuckDBSession,
|
package/package.json
CHANGED
package/src/columns.ts
CHANGED
|
@@ -1 +1,155 @@
|
|
|
1
|
-
|
|
1
|
+
import { sql, type SQL } from 'drizzle-orm';
|
|
2
|
+
import { customType } from 'drizzle-orm/pg-core';
|
|
3
|
+
|
|
4
|
+
type IntColType =
|
|
5
|
+
| 'SMALLINT'
|
|
6
|
+
| 'INTEGER'
|
|
7
|
+
| 'BIGINT'
|
|
8
|
+
| 'HUGEINT'
|
|
9
|
+
| 'USMALLINT'
|
|
10
|
+
| 'UINTEGER'
|
|
11
|
+
| 'UBIGINT'
|
|
12
|
+
| 'UHUGEINT'
|
|
13
|
+
| 'INT'
|
|
14
|
+
| 'INT16'
|
|
15
|
+
| 'INT32'
|
|
16
|
+
| 'INT64'
|
|
17
|
+
| 'INT128'
|
|
18
|
+
| 'LONG'
|
|
19
|
+
| 'VARINT';
|
|
20
|
+
|
|
21
|
+
type FloatColType = 'FLOAT' | 'DOUBLE';
|
|
22
|
+
|
|
23
|
+
type StringColType = 'STRING' | 'VARCHAR' | 'TEXT';
|
|
24
|
+
|
|
25
|
+
type BoolColType = 'BOOLEAN' | 'BOOL';
|
|
26
|
+
|
|
27
|
+
type BlobColType = 'BLOB' | 'BYTEA' | 'VARBINARY';
|
|
28
|
+
|
|
29
|
+
type DateColType =
|
|
30
|
+
| 'DATE'
|
|
31
|
+
| 'TIME'
|
|
32
|
+
| 'TIMETZ'
|
|
33
|
+
| 'TIMESTAMP'
|
|
34
|
+
| 'DATETIME'
|
|
35
|
+
| 'TIMESTAMPTZ'
|
|
36
|
+
| 'TIMESTAMP_MS'
|
|
37
|
+
| 'TIMESTAMP_S';
|
|
38
|
+
|
|
39
|
+
type AnyColType =
|
|
40
|
+
| IntColType
|
|
41
|
+
| FloatColType
|
|
42
|
+
| StringColType
|
|
43
|
+
| BoolColType
|
|
44
|
+
| DateColType
|
|
45
|
+
| BlobColType;
|
|
46
|
+
|
|
47
|
+
type ListColType = `${AnyColType}[]`;
|
|
48
|
+
type ArrayColType = `${AnyColType}[${number}]`;
|
|
49
|
+
/**
|
|
50
|
+
* @example
|
|
51
|
+
* const structColType: StructColType = 'STRUCT (name: STRING, age: INT)';
|
|
52
|
+
*/
|
|
53
|
+
type StructColType = `STRUCT (${string})`;
|
|
54
|
+
|
|
55
|
+
export const duckDbMap = <TData extends Record<string, any>>(
|
|
56
|
+
name: string,
|
|
57
|
+
valueType: AnyColType | ListColType | ArrayColType
|
|
58
|
+
) =>
|
|
59
|
+
customType<{ data: TData; driverData: string }>({
|
|
60
|
+
dataType() {
|
|
61
|
+
console.log('dataType');
|
|
62
|
+
return `MAP (STRING, ${valueType})`;
|
|
63
|
+
},
|
|
64
|
+
toDriver(value: TData) {
|
|
65
|
+
console.log('toDriver');
|
|
66
|
+
// todo: more sophisticated encoding based on data type
|
|
67
|
+
const valueFormatter = (value: any) => {
|
|
68
|
+
if (['STRING', 'TEXT', 'VARCHAR'].includes(valueType)) {
|
|
69
|
+
return `'${value}'`;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
return JSON.stringify(value);
|
|
73
|
+
};
|
|
74
|
+
|
|
75
|
+
const values = Object.entries(value).map(([key, value]) => {
|
|
76
|
+
return sql.raw(`'${key}': ${valueFormatter(value)}`);
|
|
77
|
+
});
|
|
78
|
+
|
|
79
|
+
const sqlChunks: SQL[] = [];
|
|
80
|
+
|
|
81
|
+
for (const value of values) {
|
|
82
|
+
sqlChunks.push(value);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
return sql`MAP {${sql.join(sqlChunks, sql.raw(', '))}}`;
|
|
86
|
+
},
|
|
87
|
+
// ! this won't actually ever work because of how map values are returned
|
|
88
|
+
fromDriver(value: string): TData {
|
|
89
|
+
console.log('fromDriver');
|
|
90
|
+
// todo: more sophisticated decoding based on data type
|
|
91
|
+
|
|
92
|
+
const replacedValue = value.replaceAll(
|
|
93
|
+
/(?:^{)?([^=]+?)=(.+)(?:}$)?/g,
|
|
94
|
+
'"$1":"$2"'
|
|
95
|
+
);
|
|
96
|
+
const formattedValue = `{${replacedValue}}`;
|
|
97
|
+
|
|
98
|
+
const valueObj = JSON.parse(formattedValue);
|
|
99
|
+
|
|
100
|
+
return Object.fromEntries(
|
|
101
|
+
Object.entries(valueObj).map(([key, value]) => {
|
|
102
|
+
return [key, JSON.parse(value as string)];
|
|
103
|
+
})
|
|
104
|
+
) as TData;
|
|
105
|
+
},
|
|
106
|
+
})(name);
|
|
107
|
+
|
|
108
|
+
export const duckDbStruct = <TData extends Record<string, any>>(
|
|
109
|
+
name: string,
|
|
110
|
+
schema: Record<string, AnyColType | ListColType | ArrayColType>
|
|
111
|
+
) =>
|
|
112
|
+
customType<{ data: TData; driverData: string }>({
|
|
113
|
+
dataType() {
|
|
114
|
+
const fields = Object.entries(schema).map(
|
|
115
|
+
([key, type]) => `${key} ${type}`
|
|
116
|
+
);
|
|
117
|
+
|
|
118
|
+
return `STRUCT (${fields.join(', ')})`;
|
|
119
|
+
},
|
|
120
|
+
toDriver(value: TData) {
|
|
121
|
+
// todo: more sophisticated encoding based on data type
|
|
122
|
+
const valueFormatter = (value: any) =>
|
|
123
|
+
JSON.stringify(value).replaceAll(/(?<!\\)"/g, "'");
|
|
124
|
+
|
|
125
|
+
const values = Object.entries(value).map(([key, value]) => {
|
|
126
|
+
return sql.raw(`'${key}': ${valueFormatter(value)}`);
|
|
127
|
+
});
|
|
128
|
+
|
|
129
|
+
const sqlChunks: SQL[] = [];
|
|
130
|
+
|
|
131
|
+
for (const value of values) {
|
|
132
|
+
sqlChunks.push(value);
|
|
133
|
+
}
|
|
134
|
+
|
|
135
|
+
return sql`(SELECT {${sql.join(sqlChunks, sql.raw(', '))}})`;
|
|
136
|
+
},
|
|
137
|
+
fromDriver(value: string): TData {
|
|
138
|
+
return value as unknown as TData;
|
|
139
|
+
},
|
|
140
|
+
})(name);
|
|
141
|
+
|
|
142
|
+
export const duckDbBlob = customType<{
|
|
143
|
+
data: Buffer;
|
|
144
|
+
default: false;
|
|
145
|
+
}>({
|
|
146
|
+
dataType() {
|
|
147
|
+
return 'BLOB';
|
|
148
|
+
},
|
|
149
|
+
toDriver(value: Buffer) {
|
|
150
|
+
const hexString = value.toString('hex');
|
|
151
|
+
return sql`from_hex(${hexString})`;
|
|
152
|
+
},
|
|
153
|
+
});
|
|
154
|
+
|
|
155
|
+
// todo: date/time types
|
package/src/index.ts
CHANGED
package/src/utils.ts
CHANGED
|
@@ -9,7 +9,7 @@ import {
|
|
|
9
9
|
getTableName,
|
|
10
10
|
sql,
|
|
11
11
|
} from 'drizzle-orm';
|
|
12
|
-
import {
|
|
12
|
+
import { PgCustomColumn, type SelectedFields } from 'drizzle-orm/pg-core';
|
|
13
13
|
|
|
14
14
|
// Need to get around "decoder" property being marked as internal
|
|
15
15
|
type SQLInternal<T = unknown> = SQL<T> & {
|
|
@@ -32,7 +32,13 @@ export function mapResultRow<TResult>(
|
|
|
32
32
|
} else if (is(field, SQL)) {
|
|
33
33
|
decoder = (field as SQLInternal).decoder;
|
|
34
34
|
} else {
|
|
35
|
-
|
|
35
|
+
const col = field.sql.queryChunks.find((chunk) => is(chunk, Column));
|
|
36
|
+
|
|
37
|
+
if (is(col, PgCustomColumn)) {
|
|
38
|
+
decoder = col;
|
|
39
|
+
} else {
|
|
40
|
+
decoder = (field.sql as SQLInternal).decoder;
|
|
41
|
+
}
|
|
36
42
|
}
|
|
37
43
|
let node = result;
|
|
38
44
|
for (const [pathChunkIndex, pathChunk] of path.entries()) {
|