drizzle-kit 1.0.0-beta.3-a8902bc → 1.0.0-beta.3-702eadc
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/api-mysql.js +129 -29
- package/api-mysql.mjs +129 -29
- package/api-postgres.js +177 -87
- package/api-postgres.mjs +177 -87
- package/api-sqlite.js +129 -29
- package/api-sqlite.mjs +129 -29
- package/bin.cjs +338 -288
- package/index.d.mts +6 -1
- package/index.d.ts +6 -1
- package/package.json +3 -1
package/api-postgres.js
CHANGED
|
@@ -5993,7 +5993,6 @@ var init_stringify = __esm({
|
|
|
5993
5993
|
case `number`:
|
|
5994
5994
|
return Number.isFinite(value) ? value.toString() : `null`;
|
|
5995
5995
|
case `boolean`:
|
|
5996
|
-
return value.toString();
|
|
5997
5996
|
case `bigint`:
|
|
5998
5997
|
return n6 ? `${value.toString()}n` : value.toString();
|
|
5999
5998
|
case `object`: {
|
|
@@ -6230,7 +6229,7 @@ function formatTime(date2) {
|
|
|
6230
6229
|
const iso = instant.toString({ timeZone: "UTC" });
|
|
6231
6230
|
return iso;
|
|
6232
6231
|
}
|
|
6233
|
-
var import_polyfill, SmallInt, Int, BigInt2, Numeric, Real, Double, Boolean2, Char, Varchar, Text, toDefaultArray, Json, Jsonb, Time, TimeTz, DateType, Timestamp, TimestampTz, Uuid, Interval2, Inet, Cidr, MacAddr, MacAddr8, Vector, HalfVec, SparseVec, Bit, Point, Line, GeometryPoint, Enum, Serial, BigSerial, SmallSerial, Custom, typeFor, splitSqlType, vectorOps, indexName, isSerialExpression, parseOnType, systemNamespaceNames, isSystemNamespace, wrapRecord, parseViewDefinition, defaultNameForIdentitySequence, defaultNameForPK, defaultNameForFK, defaultNameForUnique, defaultNameForIndex, trimDefaultValueSuffix, defaultForColumn, defaultToSQL, isDefaultAction, isSerialType,
|
|
6232
|
+
var import_polyfill, SmallInt, Int, BigInt2, Numeric, Real, Double, Boolean2, Char, Varchar, Text, toDefaultArray, Json, Jsonb, Time, TimeTz, DateType, Timestamp, TimestampTz, Uuid, Interval2, Inet, Cidr, MacAddr, MacAddr8, Vector, HalfVec, SparseVec, Bit, Point, Line, GeometryPoint, Enum, Serial, BigSerial, SmallSerial, Custom, typeFor, splitSqlType, vectorOps, indexName, isSerialExpression, parseOnType, systemNamespaceNames, isSystemNamespace, wrapRecord, parseViewDefinition, defaultNameForIdentitySequence, defaultNameForPK, defaultNameForFK, defaultNameForUnique, defaultNameForIndex, trimDefaultValueSuffix, defaultForColumn, defaultToSQL, isDefaultAction, isSerialType, defaultsCommutative, defaults;
|
|
6234
6233
|
var init_grammar = __esm({
|
|
6235
6234
|
"src/dialects/postgres/grammar.ts"() {
|
|
6236
6235
|
"use strict";
|
|
@@ -7820,18 +7819,6 @@ var init_grammar = __esm({
|
|
|
7820
7819
|
isSerialType = (type) => {
|
|
7821
7820
|
return /^(?:serial|bigserial|smallserial)$/i.test(type);
|
|
7822
7821
|
};
|
|
7823
|
-
mapSerialToInt = (type) => {
|
|
7824
|
-
switch (type) {
|
|
7825
|
-
case "smallserial":
|
|
7826
|
-
return "smallint";
|
|
7827
|
-
case "serial":
|
|
7828
|
-
return "int";
|
|
7829
|
-
case "bigserial":
|
|
7830
|
-
return "bigint";
|
|
7831
|
-
default:
|
|
7832
|
-
throw new Error(`Unsupported type: ${type}`);
|
|
7833
|
-
}
|
|
7834
|
-
};
|
|
7835
7822
|
defaultsCommutative = (diffDef, type, dimensions) => {
|
|
7836
7823
|
if (!diffDef) return false;
|
|
7837
7824
|
let from = diffDef.from;
|
|
@@ -15221,7 +15208,8 @@ var init_schemaValidator = __esm({
|
|
|
15221
15208
|
"singlestore",
|
|
15222
15209
|
"gel",
|
|
15223
15210
|
"mssql",
|
|
15224
|
-
"cockroach"
|
|
15211
|
+
"cockroach",
|
|
15212
|
+
"duckdb"
|
|
15225
15213
|
];
|
|
15226
15214
|
dialect = enumType(dialects);
|
|
15227
15215
|
}
|
|
@@ -23011,7 +22999,7 @@ var init_convertor = __esm({
|
|
|
23011
22999
|
return [drop, add];
|
|
23012
23000
|
});
|
|
23013
23001
|
alterColumnConvertor = convertor("alter_column", (st) => {
|
|
23014
|
-
const { diff: diff2, to: column7, isEnum, wasEnum, wasSerial
|
|
23002
|
+
const { diff: diff2, to: column7, isEnum, wasEnum, wasSerial } = st;
|
|
23015
23003
|
const statements = [];
|
|
23016
23004
|
const key = column7.schema !== "public" ? `"${column7.schema}"."${column7.table}"` : `"${column7.table}"`;
|
|
23017
23005
|
const recreateDefault = diff2.type && (isEnum || wasEnum) && diff2.$left.default;
|
|
@@ -23021,21 +23009,17 @@ var init_convertor = __esm({
|
|
|
23021
23009
|
if (diff2.type) {
|
|
23022
23010
|
const typeSchema = column7.typeSchema && column7.typeSchema !== "public" ? `"${column7.typeSchema}".` : "";
|
|
23023
23011
|
const textProxy = wasEnum && isEnum ? "text::" : "";
|
|
23024
|
-
const suffix = isEnum ? ` USING "${column7.name}"::${textProxy}${typeSchema}"${column7.type}"${"[]".repeat(column7.dimensions)}` : ` USING "${column7.name}"::${
|
|
23025
|
-
|
|
23026
|
-
if (
|
|
23027
|
-
|
|
23028
|
-
|
|
23029
|
-
|
|
23012
|
+
const suffix = isEnum ? ` USING "${column7.name}"::${textProxy}${typeSchema}"${column7.type}"${"[]".repeat(column7.dimensions)}` : ` USING "${column7.name}"::${column7.type}${"[]".repeat(column7.dimensions)}`;
|
|
23013
|
+
let type;
|
|
23014
|
+
if (diff2.type) {
|
|
23015
|
+
type = diff2.typeSchema?.to && diff2.typeSchema.to !== "public" ? `"${diff2.typeSchema.to}"."${diff2.type.to}"` : isEnum ? `"${diff2.type.to}"` : diff2.type.to;
|
|
23016
|
+
} else {
|
|
23017
|
+
type = `${typeSchema}${column7.typeSchema ? `"${column7.type}"` : column7.type}`;
|
|
23030
23018
|
}
|
|
23031
|
-
if (
|
|
23019
|
+
if (wasSerial) {
|
|
23020
|
+
statements.push(`ALTER TABLE ${key} ALTER COLUMN "${column7.name}" DROP DEFAULT`);
|
|
23032
23021
|
const sequenceKey = column7.schema !== "public" ? `"${column7.schema}"."${column7.table}_${column7.name}_seq"` : `"${column7.table}_${column7.name}_seq"`;
|
|
23033
|
-
|
|
23034
|
-
statements.push(`CREATE SEQUENCE ${sequenceKey};`);
|
|
23035
|
-
statements.push(
|
|
23036
|
-
`ALTER TABLE ${key} ALTER COLUMN "${column7.name}" SET DEFAULT nextval('${sequenceName}')`
|
|
23037
|
-
);
|
|
23038
|
-
statements.push(`ALTER SEQUENCE ${sequenceKey} OWNED BY "${column7.schema}"."${column7.table}"."${column7.name}";`);
|
|
23022
|
+
statements.push(`DROP SEQUENCE ${sequenceKey}`);
|
|
23039
23023
|
}
|
|
23040
23024
|
statements.push(
|
|
23041
23025
|
`ALTER TABLE ${key} ALTER COLUMN "${column7.name}" SET DATA TYPE ${type}${"[]".repeat(column7.dimensions)}${suffix};`
|
|
@@ -24337,7 +24321,6 @@ var init_diff = __esm({
|
|
|
24337
24321
|
}).map((it) => {
|
|
24338
24322
|
const column7 = it.$right;
|
|
24339
24323
|
const wasSerial = isSerialType(it.$left.type);
|
|
24340
|
-
const toSerial = !isSerialType(it.$left.type) && isSerialType(it.$right.type);
|
|
24341
24324
|
const isEnum = ddl22.enums.one({ schema: column7.typeSchema ?? "public", name: column7.type }) !== null;
|
|
24342
24325
|
const wasEnum = (it.type && ddl1.enums.one({ schema: column7.typeSchema ?? "public", name: it.type.from }) !== null) ?? false;
|
|
24343
24326
|
return prepareStatement("alter_column", {
|
|
@@ -24345,8 +24328,7 @@ var init_diff = __esm({
|
|
|
24345
24328
|
to: column7,
|
|
24346
24329
|
isEnum,
|
|
24347
24330
|
wasEnum,
|
|
24348
|
-
wasSerial
|
|
24349
|
-
toSerial
|
|
24331
|
+
wasSerial
|
|
24350
24332
|
});
|
|
24351
24333
|
});
|
|
24352
24334
|
const createSequences = createdSequences.map((it) => prepareStatement("create_sequence", { sequence: it }));
|
|
@@ -25033,7 +25015,7 @@ var init_introspect = __esm({
|
|
|
25033
25015
|
});
|
|
25034
25016
|
}
|
|
25035
25017
|
for (const seq of sequencesList) {
|
|
25036
|
-
const depend = dependList.find((it) => it.oid === seq.oid);
|
|
25018
|
+
const depend = dependList.find((it) => Number(it.oid) === Number(seq.oid));
|
|
25037
25019
|
if (depend && (depend.deptype === "a" || depend.deptype === "i")) {
|
|
25038
25020
|
continue;
|
|
25039
25021
|
}
|
|
@@ -25097,22 +25079,22 @@ var init_introspect = __esm({
|
|
|
25097
25079
|
continue;
|
|
25098
25080
|
}
|
|
25099
25081
|
const expr = serialsList.find(
|
|
25100
|
-
(it) => it.tableId === column7.tableId && it.ordinality === column7.ordinality
|
|
25082
|
+
(it) => Number(it.tableId) === Number(column7.tableId) && it.ordinality === column7.ordinality
|
|
25101
25083
|
);
|
|
25102
25084
|
if (expr) {
|
|
25103
|
-
const table6 = tablesList.find((it) => it.oid === column7.tableId);
|
|
25085
|
+
const table6 = tablesList.find((it) => Number(it.oid) === Number(column7.tableId));
|
|
25104
25086
|
const isSerial = isSerialExpression(expr.expression, table6.schema);
|
|
25105
25087
|
column7.type = isSerial ? type === "bigint" ? "bigserial" : type === "integer" ? "serial" : "smallserial" : type;
|
|
25106
25088
|
}
|
|
25107
25089
|
}
|
|
25108
25090
|
for (const column7 of columnsList.filter((x6) => x6.kind === "r" || x6.kind === "p")) {
|
|
25109
|
-
const table6 = tablesList.find((it) => it.oid === column7.tableId);
|
|
25091
|
+
const table6 = tablesList.find((it) => Number(it.oid) === Number(column7.tableId));
|
|
25110
25092
|
const enumType2 = column7.typeId in groupedEnums ? groupedEnums[column7.typeId] : column7.typeId in groupedArrEnums ? groupedArrEnums[column7.typeId] : null;
|
|
25111
25093
|
let columnTypeMapped = enumType2 ? enumType2.name : column7.type.replaceAll("[]", "");
|
|
25112
25094
|
columnTypeMapped = columnTypeMapped.replace("character varying", "varchar").replace(" without time zone", "").replace("character", "char").replace("geometry(Point", "geometry(point");
|
|
25113
25095
|
columnTypeMapped = trimChar(columnTypeMapped, '"');
|
|
25114
25096
|
const columnDefault = defaultsList.find(
|
|
25115
|
-
(it) => it.tableId === column7.tableId && it.ordinality === column7.ordinality
|
|
25097
|
+
(it) => Number(it.tableId) === Number(column7.tableId) && it.ordinality === column7.ordinality
|
|
25116
25098
|
);
|
|
25117
25099
|
const defaultValue = defaultForColumn(
|
|
25118
25100
|
columnTypeMapped,
|
|
@@ -25121,10 +25103,10 @@ var init_introspect = __esm({
|
|
|
25121
25103
|
Boolean(enumType2)
|
|
25122
25104
|
);
|
|
25123
25105
|
const unique = constraintsList.find((it) => {
|
|
25124
|
-
return it.type === "u" && it.tableId === column7.tableId && it.columnsOrdinals.length === 1 && it.columnsOrdinals.includes(column7.ordinality);
|
|
25106
|
+
return it.type === "u" && Number(it.tableId) === Number(column7.tableId) && it.columnsOrdinals.length === 1 && it.columnsOrdinals.includes(column7.ordinality);
|
|
25125
25107
|
}) ?? null;
|
|
25126
25108
|
const pk = constraintsList.find((it) => {
|
|
25127
|
-
return it.type === "p" && it.tableId === column7.tableId && it.columnsOrdinals.length === 1 && it.columnsOrdinals.includes(column7.ordinality);
|
|
25109
|
+
return it.type === "p" && Number(it.tableId) === Number(column7.tableId) && it.columnsOrdinals.length === 1 && it.columnsOrdinals.includes(column7.ordinality);
|
|
25128
25110
|
}) ?? null;
|
|
25129
25111
|
const metadata = column7.metadata;
|
|
25130
25112
|
if (column7.generatedType === "s" && (!metadata || !metadata.expression)) {
|
|
@@ -25139,7 +25121,7 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25139
25121
|
${JSON.stringify(column7.metadata)}`
|
|
25140
25122
|
);
|
|
25141
25123
|
}
|
|
25142
|
-
const sequence = metadata?.seqId ? sequencesList.find((it) => it.oid === Number(metadata.seqId)) ?? null : null;
|
|
25124
|
+
const sequence = metadata?.seqId ? sequencesList.find((it) => Number(it.oid) === Number(metadata.seqId)) ?? null : null;
|
|
25143
25125
|
columns.push({
|
|
25144
25126
|
entityType: "columns",
|
|
25145
25127
|
schema: table6.schema,
|
|
@@ -25169,10 +25151,12 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25169
25151
|
});
|
|
25170
25152
|
}
|
|
25171
25153
|
for (const unique of constraintsList.filter((it) => it.type === "u")) {
|
|
25172
|
-
const table6 = tablesList.find((it) => it.oid === unique.tableId);
|
|
25173
|
-
const schema5 = namespaces.find((it) => it.oid === unique.schemaId);
|
|
25154
|
+
const table6 = tablesList.find((it) => Number(it.oid) === Number(unique.tableId));
|
|
25155
|
+
const schema5 = namespaces.find((it) => Number(it.oid) === Number(unique.schemaId));
|
|
25174
25156
|
const columns2 = unique.columnsOrdinals.map((it) => {
|
|
25175
|
-
const column7 = columnsList.find(
|
|
25157
|
+
const column7 = columnsList.find(
|
|
25158
|
+
(column8) => Number(column8.tableId) === Number(unique.tableId) && column8.ordinality === it
|
|
25159
|
+
);
|
|
25176
25160
|
return column7.name;
|
|
25177
25161
|
});
|
|
25178
25162
|
uniques.push({
|
|
@@ -25186,10 +25170,12 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25186
25170
|
});
|
|
25187
25171
|
}
|
|
25188
25172
|
for (const pk of constraintsList.filter((it) => it.type === "p")) {
|
|
25189
|
-
const table6 = tablesList.find((it) => it.oid === pk.tableId);
|
|
25190
|
-
const schema5 = namespaces.find((it) => it.oid === pk.schemaId);
|
|
25173
|
+
const table6 = tablesList.find((it) => Number(it.oid) === Number(pk.tableId));
|
|
25174
|
+
const schema5 = namespaces.find((it) => Number(it.oid) === Number(pk.schemaId));
|
|
25191
25175
|
const columns2 = pk.columnsOrdinals.map((it) => {
|
|
25192
|
-
const column7 = columnsList.find(
|
|
25176
|
+
const column7 = columnsList.find(
|
|
25177
|
+
(column8) => Number(column8.tableId) === Number(pk.tableId) && column8.ordinality === it
|
|
25178
|
+
);
|
|
25193
25179
|
return column7.name;
|
|
25194
25180
|
});
|
|
25195
25181
|
pks.push({
|
|
@@ -25202,15 +25188,19 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25202
25188
|
});
|
|
25203
25189
|
}
|
|
25204
25190
|
for (const fk5 of constraintsList.filter((it) => it.type === "f")) {
|
|
25205
|
-
const table6 = tablesList.find((it) => it.oid === fk5.tableId);
|
|
25206
|
-
const schema5 = namespaces.find((it) => it.oid === fk5.schemaId);
|
|
25207
|
-
const tableTo = tablesList.find((it) => it.oid === fk5.tableToId);
|
|
25191
|
+
const table6 = tablesList.find((it) => Number(it.oid) === Number(fk5.tableId));
|
|
25192
|
+
const schema5 = namespaces.find((it) => Number(it.oid) === Number(fk5.schemaId));
|
|
25193
|
+
const tableTo = tablesList.find((it) => Number(it.oid) === Number(fk5.tableToId));
|
|
25208
25194
|
const columns2 = fk5.columnsOrdinals.map((it) => {
|
|
25209
|
-
const column7 = columnsList.find(
|
|
25195
|
+
const column7 = columnsList.find(
|
|
25196
|
+
(column8) => Number(column8.tableId) === Number(fk5.tableId) && column8.ordinality === it
|
|
25197
|
+
);
|
|
25210
25198
|
return column7.name;
|
|
25211
25199
|
});
|
|
25212
25200
|
const columnsTo = fk5.columnsToOrdinals.map((it) => {
|
|
25213
|
-
const column7 = columnsList.find(
|
|
25201
|
+
const column7 = columnsList.find(
|
|
25202
|
+
(column8) => Number(column8.tableId) === Number(fk5.tableToId) && column8.ordinality === it
|
|
25203
|
+
);
|
|
25214
25204
|
return column7.name;
|
|
25215
25205
|
});
|
|
25216
25206
|
fks.push({
|
|
@@ -25228,8 +25218,8 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25228
25218
|
});
|
|
25229
25219
|
}
|
|
25230
25220
|
for (const check of constraintsList.filter((it) => it.type === "c")) {
|
|
25231
|
-
const table6 = tablesList.find((it) => it.oid === check.tableId);
|
|
25232
|
-
const schema5 = namespaces.find((it) => it.oid === check.schemaId);
|
|
25221
|
+
const table6 = tablesList.find((it) => Number(it.oid) === Number(check.tableId));
|
|
25222
|
+
const schema5 = namespaces.find((it) => Number(it.oid) === Number(check.schemaId));
|
|
25233
25223
|
checks.push({
|
|
25234
25224
|
entityType: "checks",
|
|
25235
25225
|
schema: schema5.name,
|
|
@@ -25290,10 +25280,10 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25290
25280
|
});
|
|
25291
25281
|
for (const idx of idxs) {
|
|
25292
25282
|
const { metadata } = idx;
|
|
25293
|
-
const forUnique = metadata.isUnique && constraintsList.some((x6) => x6.type === "u" && x6.indexId === idx.oid);
|
|
25294
|
-
const forPK = metadata.isPrimary && constraintsList.some((x6) => x6.type === "p" && x6.indexId === idx.oid);
|
|
25283
|
+
const forUnique = metadata.isUnique && constraintsList.some((x6) => x6.type === "u" && Number(x6.indexId) === Number(idx.oid));
|
|
25284
|
+
const forPK = metadata.isPrimary && constraintsList.some((x6) => x6.type === "p" && Number(x6.indexId) === Number(idx.oid));
|
|
25295
25285
|
const expr = splitExpressions(metadata.expression);
|
|
25296
|
-
const table6 = tablesList.find((it) => it.oid === idx.metadata.tableId);
|
|
25286
|
+
const table6 = tablesList.find((it) => Number(it.oid) === Number(idx.metadata.tableId));
|
|
25297
25287
|
const nonColumnsCount = metadata.columnOrdinals.reduce((acc, it) => {
|
|
25298
25288
|
if (it === 0) acc += 1;
|
|
25299
25289
|
return acc;
|
|
@@ -25325,7 +25315,7 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25325
25315
|
k6 += 1;
|
|
25326
25316
|
} else {
|
|
25327
25317
|
const column7 = columnsList.find((column8) => {
|
|
25328
|
-
return column8.tableId === metadata.tableId && column8.ordinality === ordinal;
|
|
25318
|
+
return Number(column8.tableId) === Number(metadata.tableId) && column8.ordinality === ordinal;
|
|
25329
25319
|
});
|
|
25330
25320
|
if (!column7) throw new Error(`missing column: ${metadata.tableId}:${ordinal}`);
|
|
25331
25321
|
const options = opts[i7];
|
|
@@ -25370,7 +25360,7 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25370
25360
|
});
|
|
25371
25361
|
}
|
|
25372
25362
|
for (const it of columnsList.filter((x6) => x6.kind === "m" || x6.kind === "v")) {
|
|
25373
|
-
const view5 = viewsList.find((x6) => x6.oid === it.tableId);
|
|
25363
|
+
const view5 = viewsList.find((x6) => Number(x6.oid) === Number(it.tableId));
|
|
25374
25364
|
const typeDimensions = it.type.split("[]").length - 1;
|
|
25375
25365
|
const enumType2 = it.typeId in groupedEnums ? groupedEnums[it.typeId] : it.typeId in groupedArrEnums ? groupedArrEnums[it.typeId] : null;
|
|
25376
25366
|
let columnTypeMapped = enumType2 ? enumType2.name : it.type.replace("[]", "");
|
|
@@ -25392,8 +25382,8 @@ ${JSON.stringify(column7.metadata)}`
|
|
|
25392
25382
|
});
|
|
25393
25383
|
}
|
|
25394
25384
|
for (const view5 of viewsList) {
|
|
25395
|
-
const accessMethod = view5.accessMethod === 0 ? null : ams.find((it) => it.oid === view5.accessMethod);
|
|
25396
|
-
const tablespace = view5.tablespaceid === 0 ? null : tablespaces.find((it) => it.oid === view5.tablespaceid).name;
|
|
25385
|
+
const accessMethod = Number(view5.accessMethod) === 0 ? null : ams.find((it) => Number(it.oid) === Number(view5.accessMethod));
|
|
25386
|
+
const tablespace = Number(view5.tablespaceid) === 0 ? null : tablespaces.find((it) => Number(it.oid) === Number(view5.tablespaceid)).name;
|
|
25397
25387
|
const definition = parseViewDefinition(view5.definition);
|
|
25398
25388
|
const withOpts = wrapRecord(
|
|
25399
25389
|
view5.options?.reduce((acc, it) => {
|
|
@@ -145263,6 +145253,7 @@ __export(connections_exports, {
|
|
|
145263
145253
|
connectToSQLite: () => connectToSQLite,
|
|
145264
145254
|
connectToSingleStore: () => connectToSingleStore,
|
|
145265
145255
|
prepareCockroach: () => prepareCockroach,
|
|
145256
|
+
prepareDuckDb: () => prepareDuckDb,
|
|
145266
145257
|
prepareGelDB: () => prepareGelDB,
|
|
145267
145258
|
preparePostgresDB: () => preparePostgresDB
|
|
145268
145259
|
});
|
|
@@ -145279,7 +145270,7 @@ function parseMssqlUrl(url) {
|
|
|
145279
145270
|
}
|
|
145280
145271
|
};
|
|
145281
145272
|
}
|
|
145282
|
-
var import_net, ms, normalisePGliteUrl, preparePostgresDB, prepareCockroach, prepareGelDB, parseSingleStoreCredentials, connectToSingleStore, parseMysqlCredentials, connectToMySQL, parseMssqlCredentials, connectToMsSQL, prepareSqliteParams, preparePGliteParams, connectToSQLite, connectToLibSQL;
|
|
145273
|
+
var import_net, ms, normalisePGliteUrl, preparePostgresDB, prepareDuckDb, prepareCockroach, prepareGelDB, parseSingleStoreCredentials, connectToSingleStore, parseMysqlCredentials, connectToMySQL, parseMssqlCredentials, connectToMsSQL, prepareSqliteParams, preparePGliteParams, connectToSQLite, connectToLibSQL;
|
|
145283
145274
|
var init_connections = __esm({
|
|
145284
145275
|
"src/cli/connections.ts"() {
|
|
145285
145276
|
"use strict";
|
|
@@ -145415,7 +145406,13 @@ var init_connections = __esm({
|
|
|
145415
145406
|
}
|
|
145416
145407
|
return results;
|
|
145417
145408
|
};
|
|
145418
|
-
return {
|
|
145409
|
+
return {
|
|
145410
|
+
packageName: "pglite",
|
|
145411
|
+
query,
|
|
145412
|
+
proxy,
|
|
145413
|
+
transactionProxy,
|
|
145414
|
+
migrate: migrateFn
|
|
145415
|
+
};
|
|
145419
145416
|
}
|
|
145420
145417
|
assertUnreachable(driver2);
|
|
145421
145418
|
}
|
|
@@ -145656,7 +145653,13 @@ var init_connections = __esm({
|
|
|
145656
145653
|
}
|
|
145657
145654
|
return results;
|
|
145658
145655
|
};
|
|
145659
|
-
return {
|
|
145656
|
+
return {
|
|
145657
|
+
packageName: "postgres",
|
|
145658
|
+
query,
|
|
145659
|
+
proxy,
|
|
145660
|
+
transactionProxy,
|
|
145661
|
+
migrate: migrateFn
|
|
145662
|
+
};
|
|
145660
145663
|
}
|
|
145661
145664
|
if (await checkPackage("@vercel/postgres")) {
|
|
145662
145665
|
console.log(
|
|
@@ -145737,7 +145740,13 @@ var init_connections = __esm({
|
|
|
145737
145740
|
}
|
|
145738
145741
|
return results;
|
|
145739
145742
|
};
|
|
145740
|
-
return {
|
|
145743
|
+
return {
|
|
145744
|
+
packageName: "@vercel/postgres",
|
|
145745
|
+
query,
|
|
145746
|
+
proxy,
|
|
145747
|
+
transactionProxy,
|
|
145748
|
+
migrate: migrateFn
|
|
145749
|
+
};
|
|
145741
145750
|
}
|
|
145742
145751
|
if (await checkPackage("@neondatabase/serverless")) {
|
|
145743
145752
|
console.log(
|
|
@@ -145750,7 +145759,11 @@ var init_connections = __esm({
|
|
|
145750
145759
|
"'@neondatabase/serverless' can only connect to remote Neon/Vercel Postgres/Supabase instances through a websocket"
|
|
145751
145760
|
)
|
|
145752
145761
|
);
|
|
145753
|
-
const {
|
|
145762
|
+
const {
|
|
145763
|
+
Pool,
|
|
145764
|
+
neonConfig,
|
|
145765
|
+
types: pgTypes
|
|
145766
|
+
} = require("@neondatabase/serverless");
|
|
145754
145767
|
const { drizzle } = require("drizzle-orm/neon-serverless");
|
|
145755
145768
|
const { migrate } = require("drizzle-orm/neon-serverless/migrator");
|
|
145756
145769
|
const ssl = "ssl" in credentials ? credentials.ssl === "prefer" || credentials.ssl === "require" || credentials.ssl === "allow" ? { rejectUnauthorized: false } : credentials.ssl === "verify-full" ? {} : credentials.ssl : {};
|
|
@@ -145820,7 +145833,13 @@ var init_connections = __esm({
|
|
|
145820
145833
|
}
|
|
145821
145834
|
return results;
|
|
145822
145835
|
};
|
|
145823
|
-
return {
|
|
145836
|
+
return {
|
|
145837
|
+
packageName: "@neondatabase/serverless",
|
|
145838
|
+
query,
|
|
145839
|
+
proxy,
|
|
145840
|
+
transactionProxy,
|
|
145841
|
+
migrate: migrateFn
|
|
145842
|
+
};
|
|
145824
145843
|
}
|
|
145825
145844
|
if (await checkPackage("bun")) {
|
|
145826
145845
|
console.log(withStyle.info(`Using 'bun' driver for database querying`));
|
|
@@ -145871,6 +145890,54 @@ var init_connections = __esm({
|
|
|
145871
145890
|
console.warn("For the 'bun' driver, run your script using: bun --bun");
|
|
145872
145891
|
process.exit(1);
|
|
145873
145892
|
};
|
|
145893
|
+
prepareDuckDb = async (credentials) => {
|
|
145894
|
+
if (await checkPackage("@duckdb/node-api")) {
|
|
145895
|
+
console.log(
|
|
145896
|
+
withStyle.info(`Using '@duckdb/node-api' driver for database querying`)
|
|
145897
|
+
);
|
|
145898
|
+
const { DuckDBInstance } = require("@duckdb/node-api");
|
|
145899
|
+
const instance = await DuckDBInstance.create(credentials.url);
|
|
145900
|
+
const client = await instance.connect();
|
|
145901
|
+
const query = async (sql, params = []) => {
|
|
145902
|
+
const result2 = await client.run(sql, params);
|
|
145903
|
+
const rows = await result2.getRowObjectsJson();
|
|
145904
|
+
return rows;
|
|
145905
|
+
};
|
|
145906
|
+
const proxy = async (params) => {
|
|
145907
|
+
const result2 = await client.run(params.sql, params.params);
|
|
145908
|
+
return params.mode === "array" ? await result2.getRowsJson() : await result2.getRowObjectsJson();
|
|
145909
|
+
};
|
|
145910
|
+
const transactionProxy = async (queries) => {
|
|
145911
|
+
const results = [];
|
|
145912
|
+
try {
|
|
145913
|
+
await client.run("BEGIN");
|
|
145914
|
+
for (const query2 of queries) {
|
|
145915
|
+
const result2 = await client.run(query2.sql);
|
|
145916
|
+
results.push(await result2.getRowObjectsJson());
|
|
145917
|
+
}
|
|
145918
|
+
await client.run("COMMIT");
|
|
145919
|
+
} catch (error3) {
|
|
145920
|
+
await client.run("ROLLBACK");
|
|
145921
|
+
results.push(error3);
|
|
145922
|
+
}
|
|
145923
|
+
return results;
|
|
145924
|
+
};
|
|
145925
|
+
return {
|
|
145926
|
+
packageName: "@duckdb/node-api",
|
|
145927
|
+
query,
|
|
145928
|
+
proxy,
|
|
145929
|
+
transactionProxy,
|
|
145930
|
+
migrate: () => {
|
|
145931
|
+
throw new Error("DuckDB does not support migrations");
|
|
145932
|
+
}
|
|
145933
|
+
};
|
|
145934
|
+
}
|
|
145935
|
+
console.error(
|
|
145936
|
+
// "To connect to DuckDb database - please install either of 'duckdb', '@duckdb/node-api' drivers",
|
|
145937
|
+
"To connect to DuckDb database - please install '@duckdb/node-api' driver"
|
|
145938
|
+
);
|
|
145939
|
+
process.exit(1);
|
|
145940
|
+
};
|
|
145874
145941
|
prepareCockroach = async (credentials) => {
|
|
145875
145942
|
if (await checkPackage("pg")) {
|
|
145876
145943
|
const { default: pg } = require("pg");
|
|
@@ -145923,9 +145990,7 @@ var init_connections = __esm({
|
|
|
145923
145990
|
};
|
|
145924
145991
|
return { query, proxy, migrate: migrateFn };
|
|
145925
145992
|
}
|
|
145926
|
-
console.error(
|
|
145927
|
-
"To connect to Cockroach - please install 'pg' package"
|
|
145928
|
-
);
|
|
145993
|
+
console.error("To connect to Cockroach - please install 'pg' package");
|
|
145929
145994
|
process.exit(1);
|
|
145930
145995
|
};
|
|
145931
145996
|
prepareGelDB = async (credentials) => {
|
|
@@ -145984,9 +146049,7 @@ To link your project, please refer https://docs.geldata.com/reference/cli/gel_in
|
|
|
145984
146049
|
};
|
|
145985
146050
|
return { packageName: "gel", query, proxy, transactionProxy };
|
|
145986
146051
|
}
|
|
145987
|
-
console.error(
|
|
145988
|
-
"To connect to gel database - please install 'edgedb' driver"
|
|
145989
|
-
);
|
|
146052
|
+
console.error("To connect to gel database - please install 'edgedb' driver");
|
|
145990
146053
|
process.exit(1);
|
|
145991
146054
|
};
|
|
145992
146055
|
parseSingleStoreCredentials = (credentials) => {
|
|
@@ -146438,9 +146501,7 @@ To link your project, please refer https://docs.geldata.com/reference/cli/gel_in
|
|
|
146438
146501
|
migrate: migrateFn
|
|
146439
146502
|
};
|
|
146440
146503
|
}
|
|
146441
|
-
console.error(
|
|
146442
|
-
"To connect to MsSQL database - please install 'mssql' driver"
|
|
146443
|
-
);
|
|
146504
|
+
console.error("To connect to MsSQL database - please install 'mssql' driver");
|
|
146444
146505
|
process.exit(1);
|
|
146445
146506
|
};
|
|
146446
146507
|
prepareSqliteParams = (params, driver2) => {
|
|
@@ -146545,7 +146606,10 @@ To link your project, please refer https://docs.geldata.com/reference/cli/gel_in
|
|
|
146545
146606
|
await remoteCallback(query2, [], "run");
|
|
146546
146607
|
};
|
|
146547
146608
|
const proxy = async (params) => {
|
|
146548
|
-
const preparedParams = prepareSqliteParams(
|
|
146609
|
+
const preparedParams = prepareSqliteParams(
|
|
146610
|
+
params.params || [],
|
|
146611
|
+
"d1-http"
|
|
146612
|
+
);
|
|
146549
146613
|
const result2 = await remoteCallback(
|
|
146550
146614
|
params.sql,
|
|
146551
146615
|
preparedParams,
|
|
@@ -146770,17 +146834,19 @@ To link your project, please refer https://docs.geldata.com/reference/cli/gel_in
|
|
|
146770
146834
|
};
|
|
146771
146835
|
const transactionProxy = async (queries) => {
|
|
146772
146836
|
const results = [];
|
|
146773
|
-
const tx = sqlite.transaction(
|
|
146774
|
-
|
|
146775
|
-
|
|
146776
|
-
|
|
146777
|
-
|
|
146778
|
-
|
|
146779
|
-
|
|
146837
|
+
const tx = sqlite.transaction(
|
|
146838
|
+
(queries2) => {
|
|
146839
|
+
for (const query of queries2) {
|
|
146840
|
+
let result2 = [];
|
|
146841
|
+
if (query.method === "values" || query.method === "get" || query.method === "all") {
|
|
146842
|
+
result2 = sqlite.prepare(query.sql).all();
|
|
146843
|
+
} else {
|
|
146844
|
+
sqlite.prepare(query.sql).run();
|
|
146845
|
+
}
|
|
146846
|
+
results.push(result2);
|
|
146780
146847
|
}
|
|
146781
|
-
results.push(result2);
|
|
146782
146848
|
}
|
|
146783
|
-
|
|
146849
|
+
);
|
|
146784
146850
|
try {
|
|
146785
146851
|
tx(queries);
|
|
146786
146852
|
} catch (error3) {
|
|
@@ -146788,7 +146854,13 @@ To link your project, please refer https://docs.geldata.com/reference/cli/gel_in
|
|
|
146788
146854
|
}
|
|
146789
146855
|
return results;
|
|
146790
146856
|
};
|
|
146791
|
-
return {
|
|
146857
|
+
return {
|
|
146858
|
+
...db,
|
|
146859
|
+
packageName: "better-sqlite3",
|
|
146860
|
+
proxy,
|
|
146861
|
+
transactionProxy,
|
|
146862
|
+
migrate: migrateFn
|
|
146863
|
+
};
|
|
146792
146864
|
}
|
|
146793
146865
|
if (await checkPackage("bun")) {
|
|
146794
146866
|
console.log(withStyle.info(`Using 'bun' driver for database querying`));
|
|
@@ -152328,6 +152400,7 @@ var init_cors = __esm({
|
|
|
152328
152400
|
// src/cli/commands/studio.ts
|
|
152329
152401
|
var studio_exports = {};
|
|
152330
152402
|
__export(studio_exports, {
|
|
152403
|
+
drizzleForDuckDb: () => drizzleForDuckDb,
|
|
152331
152404
|
drizzleForLibSQL: () => drizzleForLibSQL,
|
|
152332
152405
|
drizzleForMySQL: () => drizzleForMySQL,
|
|
152333
152406
|
drizzleForPostgres: () => drizzleForPostgres,
|
|
@@ -152341,7 +152414,7 @@ __export(studio_exports, {
|
|
|
152341
152414
|
prepareServer: () => prepareServer,
|
|
152342
152415
|
prepareSingleStoreSchema: () => prepareSingleStoreSchema
|
|
152343
152416
|
});
|
|
152344
|
-
var import_crypto10, import_drizzle_orm3, import_relations3, import_mssql_core2, import_mysql_core2, import_pg_core3, import_singlestore_core, import_sqlite_core2, import_fs6, import_node_https2, preparePgSchema, prepareMySqlSchema, prepareMsSqlSchema, prepareSQLiteSchema, prepareSingleStoreSchema, getCustomDefaults, drizzleForPostgres, drizzleForMySQL, drizzleForSQLite, drizzleForLibSQL, drizzleForSingleStore, extractRelations, init2, proxySchema, transactionProxySchema, benchmarkProxySchema, defaultsSchema, schema4, jsonStringify, prepareServer;
|
|
152417
|
+
var import_crypto10, import_drizzle_orm3, import_relations3, import_mssql_core2, import_mysql_core2, import_pg_core3, import_singlestore_core, import_sqlite_core2, import_fs6, import_node_https2, preparePgSchema, prepareMySqlSchema, prepareMsSqlSchema, prepareSQLiteSchema, prepareSingleStoreSchema, getCustomDefaults, drizzleForPostgres, drizzleForDuckDb, drizzleForMySQL, drizzleForSQLite, drizzleForLibSQL, drizzleForSingleStore, extractRelations, init2, proxySchema, transactionProxySchema, benchmarkProxySchema, defaultsSchema, schema4, jsonStringify, prepareServer;
|
|
152345
152418
|
var init_studio = __esm({
|
|
152346
152419
|
"src/cli/commands/studio.ts"() {
|
|
152347
152420
|
"use strict";
|
|
@@ -152573,6 +152646,23 @@ var init_studio = __esm({
|
|
|
152573
152646
|
casing: casing2
|
|
152574
152647
|
};
|
|
152575
152648
|
};
|
|
152649
|
+
drizzleForDuckDb = async (credentials) => {
|
|
152650
|
+
const { prepareDuckDb: prepareDuckDb2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
|
|
152651
|
+
const db = await prepareDuckDb2(credentials);
|
|
152652
|
+
const dbUrl = `duckdb://${credentials.url}`;
|
|
152653
|
+
const dbHash = (0, import_crypto10.createHash)("sha256").update(dbUrl).digest("hex");
|
|
152654
|
+
return {
|
|
152655
|
+
dbHash,
|
|
152656
|
+
dialect: "duckdb",
|
|
152657
|
+
driver: void 0,
|
|
152658
|
+
packageName: db.packageName,
|
|
152659
|
+
proxy: db.proxy,
|
|
152660
|
+
transactionProxy: db.transactionProxy,
|
|
152661
|
+
customDefaults: [],
|
|
152662
|
+
schema: {},
|
|
152663
|
+
relations: {}
|
|
152664
|
+
};
|
|
152665
|
+
};
|
|
152576
152666
|
drizzleForMySQL = async (credentials, mysqlSchema, relations2, schemaFiles, casing2) => {
|
|
152577
152667
|
const { connectToMySQL: connectToMySQL2 } = await Promise.resolve().then(() => (init_connections(), connections_exports));
|
|
152578
152668
|
const { proxy, transactionProxy, benchmarkProxy, database, packageName } = await connectToMySQL2(credentials);
|