zenstack-kit 0.1.1 → 0.1.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli/app.d.ts.map +1 -1
- package/dist/cli/app.js +9 -28
- package/dist/cli/commands.d.ts +3 -1
- package/dist/cli/commands.d.ts.map +1 -1
- package/dist/cli/commands.js +17 -1
- package/dist/cli/index.js +0 -0
- package/dist/cli/prompts.d.ts +9 -0
- package/dist/cli/prompts.d.ts.map +1 -1
- package/dist/cli/prompts.js +57 -1
- package/package.json +1 -5
- package/dist/cli.d.ts +0 -12
- package/dist/cli.d.ts.map +0 -1
- package/dist/cli.js +0 -240
- package/dist/config-loader.d.ts +0 -6
- package/dist/config-loader.d.ts.map +0 -1
- package/dist/config-loader.js +0 -36
- package/dist/config.d.ts +0 -62
- package/dist/config.d.ts.map +0 -1
- package/dist/config.js +0 -44
- package/dist/init-prompts.d.ts +0 -13
- package/dist/init-prompts.d.ts.map +0 -1
- package/dist/init-prompts.js +0 -64
- package/dist/introspect.d.ts +0 -54
- package/dist/introspect.d.ts.map +0 -1
- package/dist/introspect.js +0 -75
- package/dist/kysely-adapter.d.ts +0 -49
- package/dist/kysely-adapter.d.ts.map +0 -1
- package/dist/kysely-adapter.js +0 -74
- package/dist/migrate-apply.d.ts +0 -18
- package/dist/migrate-apply.d.ts.map +0 -1
- package/dist/migrate-apply.js +0 -61
- package/dist/migrations.d.ts +0 -161
- package/dist/migrations.d.ts.map +0 -1
- package/dist/migrations.js +0 -620
- package/dist/prisma-migrations.d.ts +0 -160
- package/dist/prisma-migrations.d.ts.map +0 -1
- package/dist/prisma-migrations.js +0 -789
- package/dist/prompts.d.ts +0 -10
- package/dist/prompts.d.ts.map +0 -1
- package/dist/prompts.js +0 -41
- package/dist/pull.d.ts +0 -23
- package/dist/pull.d.ts.map +0 -1
- package/dist/pull.js +0 -424
- package/dist/schema-snapshot.d.ts +0 -45
- package/dist/schema-snapshot.d.ts.map +0 -1
- package/dist/schema-snapshot.js +0 -265
- package/dist/sql-compiler.d.ts +0 -74
- package/dist/sql-compiler.d.ts.map +0 -1
- package/dist/sql-compiler.js +0 -243
package/dist/schema-snapshot.js
DELETED
|
@@ -1,265 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* Schema snapshot utilities for ZenStack schemas
|
|
3
|
-
*
|
|
4
|
-
* Uses ZenStack's AST to create a stable, diffable schema snapshot.
|
|
5
|
-
*/
|
|
6
|
-
import { loadDocument } from "@zenstackhq/language";
|
|
7
|
-
import { isDataField, isDataModel, isEnum } from "@zenstackhq/language/ast";
|
|
8
|
-
function getAttribute(node, name) {
|
|
9
|
-
return node.attributes.find((attr) => attr.decl.$refText === name);
|
|
10
|
-
}
|
|
11
|
-
function getAttributeStringArg(attr, names) {
|
|
12
|
-
if (!attr)
|
|
13
|
-
return undefined;
|
|
14
|
-
for (const arg of attr.args) {
|
|
15
|
-
const paramName = arg.$resolvedParam?.name;
|
|
16
|
-
if (paramName && names.includes(paramName)) {
|
|
17
|
-
if (arg.value?.$type === "StringLiteral") {
|
|
18
|
-
return arg.value.value;
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
}
|
|
22
|
-
const firstArg = attr.args[0];
|
|
23
|
-
if (firstArg?.value?.$type === "StringLiteral") {
|
|
24
|
-
return firstArg.value.value;
|
|
25
|
-
}
|
|
26
|
-
return undefined;
|
|
27
|
-
}
|
|
28
|
-
function getAttributeArrayRefs(attr, name) {
|
|
29
|
-
if (!attr)
|
|
30
|
-
return undefined;
|
|
31
|
-
const arg = attr.args.find((item) => item.$resolvedParam?.name === name) ?? attr.args[0];
|
|
32
|
-
if (!arg?.value || arg.value.$type !== "ArrayExpr") {
|
|
33
|
-
return undefined;
|
|
34
|
-
}
|
|
35
|
-
const refs = arg.value.items
|
|
36
|
-
.filter((item) => item.$type === "ReferenceExpr")
|
|
37
|
-
.map((item) => item.target.$refText);
|
|
38
|
-
return refs.length > 0 ? refs : undefined;
|
|
39
|
-
}
|
|
40
|
-
function getDefaultValue(field) {
|
|
41
|
-
const attr = getAttribute(field, "@default");
|
|
42
|
-
if (!attr) {
|
|
43
|
-
return { hasDefault: false };
|
|
44
|
-
}
|
|
45
|
-
const valueArg = attr.args.find((arg) => arg.$resolvedParam?.name === "value") ?? attr.args[0];
|
|
46
|
-
const expr = valueArg?.value;
|
|
47
|
-
if (!expr) {
|
|
48
|
-
return { hasDefault: true };
|
|
49
|
-
}
|
|
50
|
-
if (expr.$type === "StringLiteral") {
|
|
51
|
-
return { hasDefault: true, default: expr.value };
|
|
52
|
-
}
|
|
53
|
-
if (expr.$type === "NumberLiteral") {
|
|
54
|
-
return { hasDefault: true, default: Number(expr.value) };
|
|
55
|
-
}
|
|
56
|
-
if (expr.$type === "BooleanLiteral") {
|
|
57
|
-
return { hasDefault: true, default: expr.value };
|
|
58
|
-
}
|
|
59
|
-
return { hasDefault: true };
|
|
60
|
-
}
|
|
61
|
-
function getTableName(model) {
|
|
62
|
-
const mapAttr = getAttribute(model, "@@map");
|
|
63
|
-
const mapped = getAttributeStringArg(mapAttr, ["name", "map"]);
|
|
64
|
-
return mapped ?? model.name.toLowerCase();
|
|
65
|
-
}
|
|
66
|
-
function getColumnName(field) {
|
|
67
|
-
const mapAttr = getAttribute(field, "@map");
|
|
68
|
-
return getAttributeStringArg(mapAttr, ["name", "map"]) ?? field.name;
|
|
69
|
-
}
|
|
70
|
-
function mapFieldTypeToSQL(fieldType) {
|
|
71
|
-
const typeMap = {
|
|
72
|
-
String: "text",
|
|
73
|
-
Int: "integer",
|
|
74
|
-
Float: "double precision",
|
|
75
|
-
Boolean: "boolean",
|
|
76
|
-
DateTime: "timestamp",
|
|
77
|
-
BigInt: "bigint",
|
|
78
|
-
Decimal: "decimal",
|
|
79
|
-
Json: "json",
|
|
80
|
-
Bytes: "blob",
|
|
81
|
-
};
|
|
82
|
-
return typeMap[fieldType] ?? "text";
|
|
83
|
-
}
|
|
84
|
-
/**
|
|
85
|
-
* Prisma-compatible constraint naming conventions
|
|
86
|
-
*
|
|
87
|
-
* Prisma uses PostgreSQL-aligned naming:
|
|
88
|
-
* - Primary Key: {Table}_pkey
|
|
89
|
-
* - Unique: {Table}_{columns}_key
|
|
90
|
-
* - Index: {Table}_{columns}_idx
|
|
91
|
-
* - Foreign Key: {Table}_{columns}_fkey
|
|
92
|
-
*/
|
|
93
|
-
function buildPrimaryKeyName(tableName, explicitName) {
|
|
94
|
-
return explicitName ?? `${tableName}_pkey`;
|
|
95
|
-
}
|
|
96
|
-
function buildUniqueName(tableName, columns, explicitName) {
|
|
97
|
-
if (explicitName)
|
|
98
|
-
return explicitName;
|
|
99
|
-
return `${tableName}_${columns.join("_")}_key`;
|
|
100
|
-
}
|
|
101
|
-
function buildIndexName(tableName, columns, explicitName) {
|
|
102
|
-
if (explicitName)
|
|
103
|
-
return explicitName;
|
|
104
|
-
return `${tableName}_${columns.join("_")}_idx`;
|
|
105
|
-
}
|
|
106
|
-
function buildForeignKeyName(tableName, columns, _referencedTable, _referencedColumns, explicitName) {
|
|
107
|
-
if (explicitName)
|
|
108
|
-
return explicitName;
|
|
109
|
-
// Prisma uses {Table}_{columns}_fkey (doesn't include referenced table/columns in name)
|
|
110
|
-
return `${tableName}_${columns.join("_")}_fkey`;
|
|
111
|
-
}
|
|
112
|
-
function getFieldType(field) {
|
|
113
|
-
const ref = field.type.reference?.ref;
|
|
114
|
-
if (ref && isDataModel(ref)) {
|
|
115
|
-
return { type: ref.name, isRelation: true };
|
|
116
|
-
}
|
|
117
|
-
if (ref && isEnum(ref)) {
|
|
118
|
-
return { type: ref.name, isRelation: false };
|
|
119
|
-
}
|
|
120
|
-
return { type: field.type.type ?? "String", isRelation: false };
|
|
121
|
-
}
|
|
122
|
-
function getRelationFieldNames(field) {
|
|
123
|
-
const relationAttr = getAttribute(field, "@relation");
|
|
124
|
-
if (!relationAttr)
|
|
125
|
-
return null;
|
|
126
|
-
const fields = getAttributeArrayRefs(relationAttr, "fields");
|
|
127
|
-
const references = getAttributeArrayRefs(relationAttr, "references");
|
|
128
|
-
if (!fields || !references)
|
|
129
|
-
return null;
|
|
130
|
-
const mapName = getAttributeStringArg(relationAttr, ["map", "name"]);
|
|
131
|
-
return { fields, references, mapName };
|
|
132
|
-
}
|
|
133
|
-
function buildFieldNameMap(model) {
|
|
134
|
-
const map = new Map();
|
|
135
|
-
for (const field of model.fields) {
|
|
136
|
-
if (!isDataField(field))
|
|
137
|
-
continue;
|
|
138
|
-
map.set(field.name, getColumnName(field));
|
|
139
|
-
}
|
|
140
|
-
return map;
|
|
141
|
-
}
|
|
142
|
-
function parseModel(model) {
|
|
143
|
-
const tableName = getTableName(model);
|
|
144
|
-
const columns = [];
|
|
145
|
-
const fieldNameMap = buildFieldNameMap(model);
|
|
146
|
-
for (const field of model.fields) {
|
|
147
|
-
if (!isDataField(field))
|
|
148
|
-
continue;
|
|
149
|
-
const typeInfo = getFieldType(field);
|
|
150
|
-
if (typeInfo.isRelation) {
|
|
151
|
-
continue;
|
|
152
|
-
}
|
|
153
|
-
const defaultInfo = getDefaultValue(field);
|
|
154
|
-
const columnName = getColumnName(field);
|
|
155
|
-
const sqlType = mapFieldTypeToSQL(typeInfo.type);
|
|
156
|
-
columns.push({
|
|
157
|
-
name: columnName,
|
|
158
|
-
type: sqlType,
|
|
159
|
-
notNull: !field.type.optional,
|
|
160
|
-
isArray: field.type.array ?? false,
|
|
161
|
-
default: defaultInfo.default,
|
|
162
|
-
});
|
|
163
|
-
}
|
|
164
|
-
const modelIdAttr = getAttribute(model, "@@id");
|
|
165
|
-
const modelIdFields = getAttributeArrayRefs(modelIdAttr, "fields");
|
|
166
|
-
const modelIdName = getAttributeStringArg(modelIdAttr, ["name", "map"]);
|
|
167
|
-
const primaryKeyColumns = modelIdFields?.map((name) => fieldNameMap.get(name) ?? name) ?? [];
|
|
168
|
-
const fieldIdColumns = model.fields
|
|
169
|
-
.filter((field) => isDataField(field))
|
|
170
|
-
.filter((field) => !!getAttribute(field, "@id"))
|
|
171
|
-
.map((field) => getColumnName(field));
|
|
172
|
-
const resolvedPrimaryKeyColumns = primaryKeyColumns.length > 0 ? primaryKeyColumns : fieldIdColumns;
|
|
173
|
-
const primaryKey = resolvedPrimaryKeyColumns.length > 0
|
|
174
|
-
? {
|
|
175
|
-
name: buildPrimaryKeyName(tableName, modelIdName),
|
|
176
|
-
columns: resolvedPrimaryKeyColumns,
|
|
177
|
-
}
|
|
178
|
-
: undefined;
|
|
179
|
-
const uniqueConstraints = [];
|
|
180
|
-
const uniqueAttrs = model.attributes.filter((attr) => attr.decl.$refText === "@@unique");
|
|
181
|
-
for (const attr of uniqueAttrs) {
|
|
182
|
-
const columns = getAttributeArrayRefs(attr, "fields");
|
|
183
|
-
if (!columns || columns.length === 0)
|
|
184
|
-
continue;
|
|
185
|
-
const resolvedColumns = columns.map((name) => fieldNameMap.get(name) ?? name);
|
|
186
|
-
const explicitName = getAttributeStringArg(attr, ["name", "map"]);
|
|
187
|
-
uniqueConstraints.push({
|
|
188
|
-
name: buildUniqueName(tableName, resolvedColumns, explicitName),
|
|
189
|
-
columns: resolvedColumns,
|
|
190
|
-
});
|
|
191
|
-
}
|
|
192
|
-
for (const field of model.fields) {
|
|
193
|
-
if (!isDataField(field))
|
|
194
|
-
continue;
|
|
195
|
-
if (!getAttribute(field, "@unique"))
|
|
196
|
-
continue;
|
|
197
|
-
const columnName = getColumnName(field);
|
|
198
|
-
const constraintName = buildUniqueName(tableName, [columnName]);
|
|
199
|
-
if (!uniqueConstraints.some((constraint) => constraint.name === constraintName)) {
|
|
200
|
-
uniqueConstraints.push({ name: constraintName, columns: [columnName] });
|
|
201
|
-
}
|
|
202
|
-
}
|
|
203
|
-
const indexes = [];
|
|
204
|
-
const indexAttrs = model.attributes.filter((attr) => attr.decl.$refText === "@@index");
|
|
205
|
-
for (const attr of indexAttrs) {
|
|
206
|
-
const columns = getAttributeArrayRefs(attr, "fields");
|
|
207
|
-
if (!columns || columns.length === 0)
|
|
208
|
-
continue;
|
|
209
|
-
const resolvedColumns = columns.map((name) => fieldNameMap.get(name) ?? name);
|
|
210
|
-
const explicitName = getAttributeStringArg(attr, ["name", "map"]);
|
|
211
|
-
indexes.push({
|
|
212
|
-
name: buildIndexName(tableName, resolvedColumns, explicitName),
|
|
213
|
-
columns: resolvedColumns,
|
|
214
|
-
});
|
|
215
|
-
}
|
|
216
|
-
const foreignKeys = [];
|
|
217
|
-
for (const field of model.fields) {
|
|
218
|
-
if (!isDataField(field))
|
|
219
|
-
continue;
|
|
220
|
-
const relation = getRelationFieldNames(field);
|
|
221
|
-
if (!relation)
|
|
222
|
-
continue;
|
|
223
|
-
const refModel = field.type.reference?.ref;
|
|
224
|
-
if (!refModel || !isDataModel(refModel))
|
|
225
|
-
continue;
|
|
226
|
-
const referencedTable = getTableName(refModel);
|
|
227
|
-
const referencedFieldMap = buildFieldNameMap(refModel);
|
|
228
|
-
const referencedColumnNames = relation.references.map((name) => referencedFieldMap.get(name) ?? name);
|
|
229
|
-
const columnNames = relation.fields.map((name) => fieldNameMap.get(name) ?? name);
|
|
230
|
-
foreignKeys.push({
|
|
231
|
-
name: buildForeignKeyName(tableName, columnNames, referencedTable, referencedColumnNames, relation.mapName),
|
|
232
|
-
columns: columnNames,
|
|
233
|
-
referencedTable,
|
|
234
|
-
referencedColumns: referencedColumnNames,
|
|
235
|
-
});
|
|
236
|
-
}
|
|
237
|
-
const sortedColumns = columns.sort((a, b) => a.name.localeCompare(b.name));
|
|
238
|
-
return {
|
|
239
|
-
name: tableName,
|
|
240
|
-
columns: sortedColumns,
|
|
241
|
-
primaryKey,
|
|
242
|
-
uniqueConstraints: uniqueConstraints.sort((a, b) => a.name.localeCompare(b.name)),
|
|
243
|
-
indexes: indexes.sort((a, b) => a.name.localeCompare(b.name)),
|
|
244
|
-
foreignKeys: foreignKeys.sort((a, b) => a.name.localeCompare(b.name)),
|
|
245
|
-
};
|
|
246
|
-
}
|
|
247
|
-
export async function generateSchemaSnapshot(schemaPath) {
|
|
248
|
-
const loadResult = await loadDocument(schemaPath);
|
|
249
|
-
if (!loadResult.success) {
|
|
250
|
-
const messages = loadResult.errors.map((error) => String(error)).join("\n");
|
|
251
|
-
throw new Error(`Failed to load schema:\n${messages}`);
|
|
252
|
-
}
|
|
253
|
-
const dataModels = loadResult.model.declarations.filter(isDataModel);
|
|
254
|
-
const tables = dataModels
|
|
255
|
-
.map((model) => parseModel(model))
|
|
256
|
-
.sort((a, b) => a.name.localeCompare(b.name));
|
|
257
|
-
return { tables };
|
|
258
|
-
}
|
|
259
|
-
export function createSnapshot(schema) {
|
|
260
|
-
return {
|
|
261
|
-
version: 2,
|
|
262
|
-
createdAt: new Date().toISOString(),
|
|
263
|
-
schema,
|
|
264
|
-
};
|
|
265
|
-
}
|
package/dist/sql-compiler.d.ts
DELETED
|
@@ -1,74 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* SQL Compiler - Generates raw SQL from schema operations using Kysely's compile()
|
|
3
|
-
*
|
|
4
|
-
* Uses Kysely with DummyDriver to compile schema operations to dialect-specific SQL
|
|
5
|
-
* without requiring a database connection.
|
|
6
|
-
*/
|
|
7
|
-
import type { KyselyDialect } from "./kysely-adapter.js";
|
|
8
|
-
import type { SchemaTable, SchemaColumn } from "./schema-snapshot.js";
|
|
9
|
-
export interface SqlMigration {
|
|
10
|
-
up: string[];
|
|
11
|
-
down: string[];
|
|
12
|
-
}
|
|
13
|
-
export interface CompileSqlOptions {
|
|
14
|
-
dialect: KyselyDialect;
|
|
15
|
-
}
|
|
16
|
-
/**
|
|
17
|
-
* Compile a CREATE TABLE statement to SQL
|
|
18
|
-
*/
|
|
19
|
-
export declare function compileCreateTable(model: SchemaTable, options: CompileSqlOptions): string;
|
|
20
|
-
/**
|
|
21
|
-
* Compile a DROP TABLE statement to SQL
|
|
22
|
-
*/
|
|
23
|
-
export declare function compileDropTable(tableName: string, options: CompileSqlOptions): string;
|
|
24
|
-
/**
|
|
25
|
-
* Compile an ADD COLUMN statement to SQL
|
|
26
|
-
*/
|
|
27
|
-
export declare function compileAddColumn(tableName: string, column: SchemaColumn, options: CompileSqlOptions): string;
|
|
28
|
-
/**
|
|
29
|
-
* Compile a DROP COLUMN statement to SQL
|
|
30
|
-
*/
|
|
31
|
-
export declare function compileDropColumn(tableName: string, columnName: string, options: CompileSqlOptions): string;
|
|
32
|
-
/**
|
|
33
|
-
* Compile a RENAME TABLE statement to SQL
|
|
34
|
-
*/
|
|
35
|
-
export declare function compileRenameTable(fromName: string, toName: string, options: CompileSqlOptions): string;
|
|
36
|
-
/**
|
|
37
|
-
* Compile a RENAME COLUMN statement to SQL
|
|
38
|
-
*/
|
|
39
|
-
export declare function compileRenameColumn(tableName: string, fromName: string, toName: string, options: CompileSqlOptions): string;
|
|
40
|
-
/**
|
|
41
|
-
* Compile a CREATE INDEX statement to SQL
|
|
42
|
-
*/
|
|
43
|
-
export declare function compileCreateIndex(tableName: string, indexName: string, columns: string[], options: CompileSqlOptions): string;
|
|
44
|
-
/**
|
|
45
|
-
* Compile a DROP INDEX statement to SQL
|
|
46
|
-
*/
|
|
47
|
-
export declare function compileDropIndex(indexName: string, options: CompileSqlOptions): string;
|
|
48
|
-
/**
|
|
49
|
-
* Compile an ADD CONSTRAINT (unique) statement to SQL
|
|
50
|
-
*/
|
|
51
|
-
export declare function compileAddUniqueConstraint(tableName: string, constraintName: string, columns: string[], options: CompileSqlOptions): string;
|
|
52
|
-
/**
|
|
53
|
-
* Compile a DROP CONSTRAINT statement to SQL
|
|
54
|
-
*/
|
|
55
|
-
export declare function compileDropConstraint(tableName: string, constraintName: string, options: CompileSqlOptions): string;
|
|
56
|
-
/**
|
|
57
|
-
* Compile an ADD FOREIGN KEY CONSTRAINT statement to SQL
|
|
58
|
-
*/
|
|
59
|
-
export declare function compileAddForeignKeyConstraint(tableName: string, constraintName: string, columns: string[], referencedTable: string, referencedColumns: string[], options: CompileSqlOptions): string;
|
|
60
|
-
/**
|
|
61
|
-
* Compile an ADD PRIMARY KEY CONSTRAINT statement to SQL
|
|
62
|
-
*/
|
|
63
|
-
export declare function compileAddPrimaryKeyConstraint(tableName: string, constraintName: string, columns: string[], options: CompileSqlOptions): string;
|
|
64
|
-
/**
|
|
65
|
-
* Compile ALTER COLUMN statements for type/nullability/default changes
|
|
66
|
-
*/
|
|
67
|
-
export declare function compileAlterColumn(tableName: string, columnName: string, changes: {
|
|
68
|
-
setType?: string;
|
|
69
|
-
setNotNull?: boolean;
|
|
70
|
-
dropNotNull?: boolean;
|
|
71
|
-
setDefault?: string | number | boolean;
|
|
72
|
-
dropDefault?: boolean;
|
|
73
|
-
}, options: CompileSqlOptions): string[];
|
|
74
|
-
//# sourceMappingURL=sql-compiler.d.ts.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"sql-compiler.d.ts","sourceRoot":"","sources":["../src/sql-compiler.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAgBH,OAAO,KAAK,EAAE,aAAa,EAAE,MAAM,qBAAqB,CAAC;AACzD,OAAO,KAAK,EAAE,WAAW,EAAE,YAAY,EAAE,MAAM,sBAAsB,CAAC;AAoCtE,MAAM,WAAW,YAAY;IAC3B,EAAE,EAAE,MAAM,EAAE,CAAC;IACb,IAAI,EAAE,MAAM,EAAE,CAAC;CAChB;AAED,MAAM,WAAW,iBAAiB;IAChC,OAAO,EAAE,aAAa,CAAC;CACxB;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAChC,KAAK,EAAE,WAAW,EAClB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAyCR;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAGR;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,MAAM,EACjB,MAAM,EAAE,YAAY,EACpB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAkBR;AAED;;GAEG;AACH,wBAAgB,iBAAiB,CAC/B,SAAS,EAAE,MAAM,EACjB,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAGR;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAChC,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAGR;AAED;;GAEG;AACH,wBAAgB,mBAAmB,CACjC,SAAS,EAAE,MAAM,EACjB,QAAQ,EAAE,MAAM,EAChB,MAAM,EAAE,MAAM,EACd,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAKR;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAChC,SAAS,EAAE,MAAM,EACjB,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAOR;AAED;;GAEG;AACH,wBAAgB,gBAAgB,CAC9B,SAAS,EAAE,MAAM,EACjB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAGR;AAED;;GAEG;AACH,wBAAgB,0BAA0B,CACxC,SAAS,EAAE,MAAM,EACjB,cAAc,EAAE,MAAM,EACtB,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAQR;AAED;;GAEG;AACH,wBAAgB,qBAAqB,CACnC,SAAS,EAAE,MAAM,EACjB,cAAc,EAAE,MAAM,EACtB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAKR;AAED;;GAEG;AACH,wBAAgB,8BAA8B,CAC5C,SAAS,EAAE,MAAM,EACjB,cAAc,EAAE,MAAM,EACtB,OAAO,EAAE,MAAM,EAAE,EACjB,eAAe,EAAE,MAAM,EACvB,iBAAiB,EAAE,MAAM,EAAE,EAC3B,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAaR;AAED;;GAEG;AACH,wBAAgB,8BAA8B,CAC5C,SAAS,EAAE,MAAM,EACjB,cAAc,EAAE,MAAM,EACtB,OAAO,EAAE,MAAM,EAAE,EACjB,OAAO,EAAE,iBAAiB,GACzB,MAAM,CAQR;AAED;;GAEG;AACH,wBAAgB,kBAAkB,CAChC,SAAS,EAAE,MAAM,EACjB,UAAU,EAAE,MAAM,EAClB,OAAO,EAAE;IACP,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,UAAU,CAAC,EAAE,MAAM,GAAG,MAAM,GAAG,OAAO,CAAC;IACvC,WAAW,CAAC,EAAE,OAAO,CAAC;CACvB,EACD,OAAO,EAAE,iBAAiB,GACzB,MAAM,EAAE,CAqDV"}
|
package/dist/sql-compiler.js
DELETED
|
@@ -1,243 +0,0 @@
|
|
|
1
|
-
/**
|
|
2
|
-
* SQL Compiler - Generates raw SQL from schema operations using Kysely's compile()
|
|
3
|
-
*
|
|
4
|
-
* Uses Kysely with DummyDriver to compile schema operations to dialect-specific SQL
|
|
5
|
-
* without requiring a database connection.
|
|
6
|
-
*/
|
|
7
|
-
import { Kysely, DummyDriver, SqliteAdapter, SqliteIntrospector, SqliteQueryCompiler, PostgresAdapter, PostgresIntrospector, PostgresQueryCompiler, MysqlAdapter, MysqlIntrospector, MysqlQueryCompiler, sql, } from "kysely";
|
|
8
|
-
/**
|
|
9
|
-
* Create a Kysely instance configured for SQL compilation only (no actual DB connection)
|
|
10
|
-
*/
|
|
11
|
-
function createCompilerDb(dialect) {
|
|
12
|
-
if (dialect === "sqlite") {
|
|
13
|
-
return new Kysely({
|
|
14
|
-
dialect: {
|
|
15
|
-
createAdapter: () => new SqliteAdapter(),
|
|
16
|
-
createDriver: () => new DummyDriver(),
|
|
17
|
-
createIntrospector: (db) => new SqliteIntrospector(db),
|
|
18
|
-
createQueryCompiler: () => new SqliteQueryCompiler(),
|
|
19
|
-
},
|
|
20
|
-
});
|
|
21
|
-
}
|
|
22
|
-
else if (dialect === "postgres") {
|
|
23
|
-
return new Kysely({
|
|
24
|
-
dialect: {
|
|
25
|
-
createAdapter: () => new PostgresAdapter(),
|
|
26
|
-
createDriver: () => new DummyDriver(),
|
|
27
|
-
createIntrospector: (db) => new PostgresIntrospector(db),
|
|
28
|
-
createQueryCompiler: () => new PostgresQueryCompiler(),
|
|
29
|
-
},
|
|
30
|
-
});
|
|
31
|
-
}
|
|
32
|
-
else {
|
|
33
|
-
return new Kysely({
|
|
34
|
-
dialect: {
|
|
35
|
-
createAdapter: () => new MysqlAdapter(),
|
|
36
|
-
createDriver: () => new DummyDriver(),
|
|
37
|
-
createIntrospector: (db) => new MysqlIntrospector(db),
|
|
38
|
-
createQueryCompiler: () => new MysqlQueryCompiler(),
|
|
39
|
-
},
|
|
40
|
-
});
|
|
41
|
-
}
|
|
42
|
-
}
|
|
43
|
-
/**
|
|
44
|
-
* Compile a CREATE TABLE statement to SQL
|
|
45
|
-
*/
|
|
46
|
-
export function compileCreateTable(model, options) {
|
|
47
|
-
const db = createCompilerDb(options.dialect);
|
|
48
|
-
let builder = db.schema.createTable(model.name);
|
|
49
|
-
for (const column of model.columns) {
|
|
50
|
-
const columnType = mapColumnType(column.type, options.dialect);
|
|
51
|
-
builder = builder.addColumn(column.name, sql.raw(columnType), (cb) => {
|
|
52
|
-
if (column.notNull) {
|
|
53
|
-
cb = cb.notNull();
|
|
54
|
-
}
|
|
55
|
-
if (column.default !== undefined) {
|
|
56
|
-
cb = cb.defaultTo(sql.raw(formatDefault(column.default, options.dialect)));
|
|
57
|
-
}
|
|
58
|
-
return cb;
|
|
59
|
-
});
|
|
60
|
-
}
|
|
61
|
-
// Add primary key constraint
|
|
62
|
-
if (model.primaryKey) {
|
|
63
|
-
builder = builder.addPrimaryKeyConstraint(model.primaryKey.name, model.primaryKey.columns);
|
|
64
|
-
}
|
|
65
|
-
// Add unique constraints
|
|
66
|
-
for (const unique of model.uniqueConstraints) {
|
|
67
|
-
builder = builder.addUniqueConstraint(unique.name, unique.columns);
|
|
68
|
-
}
|
|
69
|
-
// Add foreign key constraints
|
|
70
|
-
for (const fk of model.foreignKeys) {
|
|
71
|
-
builder = builder.addForeignKeyConstraint(fk.name, fk.columns, fk.referencedTable, fk.referencedColumns);
|
|
72
|
-
}
|
|
73
|
-
return builder.compile().sql + ";";
|
|
74
|
-
}
|
|
75
|
-
/**
|
|
76
|
-
* Compile a DROP TABLE statement to SQL
|
|
77
|
-
*/
|
|
78
|
-
export function compileDropTable(tableName, options) {
|
|
79
|
-
const db = createCompilerDb(options.dialect);
|
|
80
|
-
return db.schema.dropTable(tableName).ifExists().compile().sql + ";";
|
|
81
|
-
}
|
|
82
|
-
/**
|
|
83
|
-
* Compile an ADD COLUMN statement to SQL
|
|
84
|
-
*/
|
|
85
|
-
export function compileAddColumn(tableName, column, options) {
|
|
86
|
-
const db = createCompilerDb(options.dialect);
|
|
87
|
-
const columnType = mapColumnType(column.type, options.dialect);
|
|
88
|
-
return (db.schema
|
|
89
|
-
.alterTable(tableName)
|
|
90
|
-
.addColumn(column.name, sql.raw(columnType), (cb) => {
|
|
91
|
-
if (column.notNull) {
|
|
92
|
-
cb = cb.notNull();
|
|
93
|
-
}
|
|
94
|
-
if (column.default !== undefined) {
|
|
95
|
-
cb = cb.defaultTo(sql.raw(formatDefault(column.default, options.dialect)));
|
|
96
|
-
}
|
|
97
|
-
return cb;
|
|
98
|
-
})
|
|
99
|
-
.compile().sql + ";");
|
|
100
|
-
}
|
|
101
|
-
/**
|
|
102
|
-
* Compile a DROP COLUMN statement to SQL
|
|
103
|
-
*/
|
|
104
|
-
export function compileDropColumn(tableName, columnName, options) {
|
|
105
|
-
const db = createCompilerDb(options.dialect);
|
|
106
|
-
return db.schema.alterTable(tableName).dropColumn(columnName).compile().sql + ";";
|
|
107
|
-
}
|
|
108
|
-
/**
|
|
109
|
-
* Compile a RENAME TABLE statement to SQL
|
|
110
|
-
*/
|
|
111
|
-
export function compileRenameTable(fromName, toName, options) {
|
|
112
|
-
const db = createCompilerDb(options.dialect);
|
|
113
|
-
return db.schema.alterTable(fromName).renameTo(toName).compile().sql + ";";
|
|
114
|
-
}
|
|
115
|
-
/**
|
|
116
|
-
* Compile a RENAME COLUMN statement to SQL
|
|
117
|
-
*/
|
|
118
|
-
export function compileRenameColumn(tableName, fromName, toName, options) {
|
|
119
|
-
const db = createCompilerDb(options.dialect);
|
|
120
|
-
return (db.schema.alterTable(tableName).renameColumn(fromName, toName).compile().sql + ";");
|
|
121
|
-
}
|
|
122
|
-
/**
|
|
123
|
-
* Compile a CREATE INDEX statement to SQL
|
|
124
|
-
*/
|
|
125
|
-
export function compileCreateIndex(tableName, indexName, columns, options) {
|
|
126
|
-
const db = createCompilerDb(options.dialect);
|
|
127
|
-
let builder = db.schema.createIndex(indexName).on(tableName);
|
|
128
|
-
for (const col of columns) {
|
|
129
|
-
builder = builder.column(col);
|
|
130
|
-
}
|
|
131
|
-
return builder.compile().sql + ";";
|
|
132
|
-
}
|
|
133
|
-
/**
|
|
134
|
-
* Compile a DROP INDEX statement to SQL
|
|
135
|
-
*/
|
|
136
|
-
export function compileDropIndex(indexName, options) {
|
|
137
|
-
const db = createCompilerDb(options.dialect);
|
|
138
|
-
return db.schema.dropIndex(indexName).compile().sql + ";";
|
|
139
|
-
}
|
|
140
|
-
/**
|
|
141
|
-
* Compile an ADD CONSTRAINT (unique) statement to SQL
|
|
142
|
-
*/
|
|
143
|
-
export function compileAddUniqueConstraint(tableName, constraintName, columns, options) {
|
|
144
|
-
const db = createCompilerDb(options.dialect);
|
|
145
|
-
return (db.schema
|
|
146
|
-
.alterTable(tableName)
|
|
147
|
-
.addUniqueConstraint(constraintName, columns)
|
|
148
|
-
.compile().sql + ";");
|
|
149
|
-
}
|
|
150
|
-
/**
|
|
151
|
-
* Compile a DROP CONSTRAINT statement to SQL
|
|
152
|
-
*/
|
|
153
|
-
export function compileDropConstraint(tableName, constraintName, options) {
|
|
154
|
-
const db = createCompilerDb(options.dialect);
|
|
155
|
-
return (db.schema.alterTable(tableName).dropConstraint(constraintName).compile().sql + ";");
|
|
156
|
-
}
|
|
157
|
-
/**
|
|
158
|
-
* Compile an ADD FOREIGN KEY CONSTRAINT statement to SQL
|
|
159
|
-
*/
|
|
160
|
-
export function compileAddForeignKeyConstraint(tableName, constraintName, columns, referencedTable, referencedColumns, options) {
|
|
161
|
-
const db = createCompilerDb(options.dialect);
|
|
162
|
-
return (db.schema
|
|
163
|
-
.alterTable(tableName)
|
|
164
|
-
.addForeignKeyConstraint(constraintName, columns, referencedTable, referencedColumns)
|
|
165
|
-
.compile().sql + ";");
|
|
166
|
-
}
|
|
167
|
-
/**
|
|
168
|
-
* Compile an ADD PRIMARY KEY CONSTRAINT statement to SQL
|
|
169
|
-
*/
|
|
170
|
-
export function compileAddPrimaryKeyConstraint(tableName, constraintName, columns, options) {
|
|
171
|
-
const db = createCompilerDb(options.dialect);
|
|
172
|
-
return (db.schema
|
|
173
|
-
.alterTable(tableName)
|
|
174
|
-
.addPrimaryKeyConstraint(constraintName, columns)
|
|
175
|
-
.compile().sql + ";");
|
|
176
|
-
}
|
|
177
|
-
/**
|
|
178
|
-
* Compile ALTER COLUMN statements for type/nullability/default changes
|
|
179
|
-
*/
|
|
180
|
-
export function compileAlterColumn(tableName, columnName, changes, options) {
|
|
181
|
-
const db = createCompilerDb(options.dialect);
|
|
182
|
-
const statements = [];
|
|
183
|
-
if (changes.setType) {
|
|
184
|
-
const columnType = mapColumnType(changes.setType, options.dialect);
|
|
185
|
-
statements.push(db.schema
|
|
186
|
-
.alterTable(tableName)
|
|
187
|
-
.alterColumn(columnName, (ac) => ac.setDataType(columnType))
|
|
188
|
-
.compile().sql + ";");
|
|
189
|
-
}
|
|
190
|
-
if (changes.setNotNull) {
|
|
191
|
-
statements.push(db.schema
|
|
192
|
-
.alterTable(tableName)
|
|
193
|
-
.alterColumn(columnName, (ac) => ac.setNotNull())
|
|
194
|
-
.compile().sql + ";");
|
|
195
|
-
}
|
|
196
|
-
if (changes.dropNotNull) {
|
|
197
|
-
statements.push(db.schema
|
|
198
|
-
.alterTable(tableName)
|
|
199
|
-
.alterColumn(columnName, (ac) => ac.dropNotNull())
|
|
200
|
-
.compile().sql + ";");
|
|
201
|
-
}
|
|
202
|
-
if (changes.setDefault !== undefined) {
|
|
203
|
-
statements.push(db.schema
|
|
204
|
-
.alterTable(tableName)
|
|
205
|
-
.alterColumn(columnName, (ac) => ac.setDefault(sql.raw(formatDefault(changes.setDefault, options.dialect))))
|
|
206
|
-
.compile().sql + ";");
|
|
207
|
-
}
|
|
208
|
-
if (changes.dropDefault) {
|
|
209
|
-
statements.push(db.schema
|
|
210
|
-
.alterTable(tableName)
|
|
211
|
-
.alterColumn(columnName, (ac) => ac.dropDefault())
|
|
212
|
-
.compile().sql + ";");
|
|
213
|
-
}
|
|
214
|
-
return statements;
|
|
215
|
-
}
|
|
216
|
-
/**
|
|
217
|
-
* Map our internal type names to dialect-specific SQL types
|
|
218
|
-
*/
|
|
219
|
-
function mapColumnType(type, dialect) {
|
|
220
|
-
// Most types are already SQL types from our snapshot, just return as-is
|
|
221
|
-
// The Kysely compiler will handle dialect-specific adjustments
|
|
222
|
-
return type;
|
|
223
|
-
}
|
|
224
|
-
/**
|
|
225
|
-
* Format a default value for SQL
|
|
226
|
-
*/
|
|
227
|
-
function formatDefault(value, dialect) {
|
|
228
|
-
if (typeof value === "string") {
|
|
229
|
-
// Check if it's a function call like now() or autoincrement()
|
|
230
|
-
if (/^\w+\([^)]*\)$/.test(value)) {
|
|
231
|
-
return value;
|
|
232
|
-
}
|
|
233
|
-
// Escape string values
|
|
234
|
-
return `'${value.replace(/'/g, "''")}'`;
|
|
235
|
-
}
|
|
236
|
-
if (typeof value === "boolean") {
|
|
237
|
-
if (dialect === "sqlite") {
|
|
238
|
-
return value ? "1" : "0";
|
|
239
|
-
}
|
|
240
|
-
return value ? "true" : "false";
|
|
241
|
-
}
|
|
242
|
-
return String(value);
|
|
243
|
-
}
|