zenstack-kit 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +313 -0
- package/dist/cli/app.d.ts +12 -0
- package/dist/cli/app.d.ts.map +1 -0
- package/dist/cli/app.js +253 -0
- package/dist/cli/commands.d.ts +70 -0
- package/dist/cli/commands.d.ts.map +1 -0
- package/dist/cli/commands.js +308 -0
- package/dist/cli/index.d.ts +12 -0
- package/dist/cli/index.d.ts.map +1 -0
- package/dist/cli/index.js +12 -0
- package/dist/cli/prompt-provider.d.ts +10 -0
- package/dist/cli/prompt-provider.d.ts.map +1 -0
- package/dist/cli/prompt-provider.js +41 -0
- package/dist/cli/prompts.d.ts +27 -0
- package/dist/cli/prompts.d.ts.map +1 -0
- package/dist/cli/prompts.js +133 -0
- package/dist/cli.d.ts +12 -0
- package/dist/cli.d.ts.map +1 -0
- package/dist/cli.js +240 -0
- package/dist/config/index.d.ts +96 -0
- package/dist/config/index.d.ts.map +1 -0
- package/dist/config/index.js +48 -0
- package/dist/config/loader.d.ts +11 -0
- package/dist/config/loader.d.ts.map +1 -0
- package/dist/config/loader.js +44 -0
- package/dist/config-loader.d.ts +6 -0
- package/dist/config-loader.d.ts.map +1 -0
- package/dist/config-loader.js +36 -0
- package/dist/config.d.ts +62 -0
- package/dist/config.d.ts.map +1 -0
- package/dist/config.js +44 -0
- package/dist/index.d.ts +19 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +23 -0
- package/dist/init-prompts.d.ts +13 -0
- package/dist/init-prompts.d.ts.map +1 -0
- package/dist/init-prompts.js +64 -0
- package/dist/introspect.d.ts +54 -0
- package/dist/introspect.d.ts.map +1 -0
- package/dist/introspect.js +75 -0
- package/dist/kysely-adapter.d.ts +49 -0
- package/dist/kysely-adapter.d.ts.map +1 -0
- package/dist/kysely-adapter.js +74 -0
- package/dist/migrate-apply.d.ts +18 -0
- package/dist/migrate-apply.d.ts.map +1 -0
- package/dist/migrate-apply.js +61 -0
- package/dist/migrate.d.ts +108 -0
- package/dist/migrate.d.ts.map +1 -0
- package/dist/migrate.js +127 -0
- package/dist/migrations/apply.d.ts +18 -0
- package/dist/migrations/apply.d.ts.map +1 -0
- package/dist/migrations/apply.js +61 -0
- package/dist/migrations/diff.d.ts +161 -0
- package/dist/migrations/diff.d.ts.map +1 -0
- package/dist/migrations/diff.js +620 -0
- package/dist/migrations/prisma.d.ts +193 -0
- package/dist/migrations/prisma.d.ts.map +1 -0
- package/dist/migrations/prisma.js +929 -0
- package/dist/migrations.d.ts +161 -0
- package/dist/migrations.d.ts.map +1 -0
- package/dist/migrations.js +620 -0
- package/dist/prisma-migrations.d.ts +160 -0
- package/dist/prisma-migrations.d.ts.map +1 -0
- package/dist/prisma-migrations.js +789 -0
- package/dist/prompts.d.ts +10 -0
- package/dist/prompts.d.ts.map +1 -0
- package/dist/prompts.js +41 -0
- package/dist/pull.d.ts +23 -0
- package/dist/pull.d.ts.map +1 -0
- package/dist/pull.js +424 -0
- package/dist/schema/introspect.d.ts +54 -0
- package/dist/schema/introspect.d.ts.map +1 -0
- package/dist/schema/introspect.js +75 -0
- package/dist/schema/pull.d.ts +23 -0
- package/dist/schema/pull.d.ts.map +1 -0
- package/dist/schema/pull.js +424 -0
- package/dist/schema/snapshot.d.ts +46 -0
- package/dist/schema/snapshot.d.ts.map +1 -0
- package/dist/schema/snapshot.js +278 -0
- package/dist/schema-snapshot.d.ts +45 -0
- package/dist/schema-snapshot.d.ts.map +1 -0
- package/dist/schema-snapshot.js +265 -0
- package/dist/sql/compiler.d.ts +74 -0
- package/dist/sql/compiler.d.ts.map +1 -0
- package/dist/sql/compiler.js +270 -0
- package/dist/sql/kysely-adapter.d.ts +49 -0
- package/dist/sql/kysely-adapter.d.ts.map +1 -0
- package/dist/sql/kysely-adapter.js +74 -0
- package/dist/sql-compiler.d.ts +74 -0
- package/dist/sql-compiler.d.ts.map +1 -0
- package/dist/sql-compiler.js +243 -0
- package/package.json +81 -0
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Prompt utilities with injectable provider for tests.
|
|
3
|
+
*/
|
|
4
|
+
export interface PromptProvider {
|
|
5
|
+
question(message: string): Promise<string>;
|
|
6
|
+
}
|
|
7
|
+
export declare function createDefaultPromptProvider(): PromptProvider;
|
|
8
|
+
export declare function setPromptProvider(provider: PromptProvider | null): void;
|
|
9
|
+
export declare function getPromptProvider(): PromptProvider;
|
|
10
|
+
//# sourceMappingURL=prompts.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"prompts.d.ts","sourceRoot":"","sources":["../src/prompts.ts"],"names":[],"mappings":"AAAA;;GAEG;AAKH,MAAM,WAAW,cAAc;IAC7B,QAAQ,CAAC,OAAO,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC,CAAC;CAC5C;AAED,wBAAgB,2BAA2B,IAAI,cAAc,CAS5D;AAID,wBAAgB,iBAAiB,CAAC,QAAQ,EAAE,cAAc,GAAG,IAAI,GAAG,IAAI,CAEvE;AAED,wBAAgB,iBAAiB,IAAI,cAAc,CAsBlD"}
|
package/dist/prompts.js
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Prompt utilities with injectable provider for tests.
|
|
3
|
+
*/
|
|
4
|
+
import { createInterface } from "readline/promises";
|
|
5
|
+
import { stdin as input, stdout as output } from "process";
|
|
6
|
+
export function createDefaultPromptProvider() {
|
|
7
|
+
return {
|
|
8
|
+
async question(message) {
|
|
9
|
+
const rl = createInterface({ input, output });
|
|
10
|
+
const answer = await rl.question(message);
|
|
11
|
+
rl.close();
|
|
12
|
+
return answer;
|
|
13
|
+
},
|
|
14
|
+
};
|
|
15
|
+
}
|
|
16
|
+
let currentProvider = null;
|
|
17
|
+
export function setPromptProvider(provider) {
|
|
18
|
+
currentProvider = provider;
|
|
19
|
+
}
|
|
20
|
+
export function getPromptProvider() {
|
|
21
|
+
if (currentProvider) {
|
|
22
|
+
return currentProvider;
|
|
23
|
+
}
|
|
24
|
+
const envAnswers = process.env.ZENSTACK_KIT_PROMPT_ANSWERS;
|
|
25
|
+
if (envAnswers) {
|
|
26
|
+
try {
|
|
27
|
+
const parsed = JSON.parse(envAnswers);
|
|
28
|
+
const queue = Array.isArray(parsed) ? [...parsed] : [];
|
|
29
|
+
currentProvider = {
|
|
30
|
+
async question() {
|
|
31
|
+
return queue.shift() ?? "";
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
return currentProvider;
|
|
35
|
+
}
|
|
36
|
+
catch {
|
|
37
|
+
return createDefaultPromptProvider();
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return createDefaultPromptProvider();
|
|
41
|
+
}
|
package/dist/pull.d.ts
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database pull utilities
|
|
3
|
+
*
|
|
4
|
+
* Uses Kysely introspection to generate a ZenStack schema from a live database.
|
|
5
|
+
*/
|
|
6
|
+
import { type KyselyDialect } from "./kysely-adapter.js";
|
|
7
|
+
export interface PullOptions {
|
|
8
|
+
/** Database dialect */
|
|
9
|
+
dialect: KyselyDialect;
|
|
10
|
+
/** Database connection URL */
|
|
11
|
+
connectionUrl?: string;
|
|
12
|
+
/** SQLite database path (for SQLite dialect) */
|
|
13
|
+
databasePath?: string;
|
|
14
|
+
/** Output path for schema */
|
|
15
|
+
outputPath: string;
|
|
16
|
+
}
|
|
17
|
+
export interface PullResult {
|
|
18
|
+
outputPath: string;
|
|
19
|
+
schema: string;
|
|
20
|
+
tableCount: number;
|
|
21
|
+
}
|
|
22
|
+
export declare function pullSchema(options: PullOptions): Promise<PullResult>;
|
|
23
|
+
//# sourceMappingURL=pull.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pull.d.ts","sourceRoot":"","sources":["../src/pull.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,EAAuB,KAAK,aAAa,EAAE,MAAM,qBAAqB,CAAC;AAE9E,MAAM,WAAW,WAAW;IAC1B,uBAAuB;IACvB,OAAO,EAAE,aAAa,CAAC;IACvB,8BAA8B;IAC9B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gDAAgD;IAChD,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,6BAA6B;IAC7B,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,UAAU;IACzB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;CACpB;AAmfD,wBAAsB,UAAU,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC,CAwC1E"}
|
package/dist/pull.js
ADDED
|
@@ -0,0 +1,424 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database pull utilities
|
|
3
|
+
*
|
|
4
|
+
* Uses Kysely introspection to generate a ZenStack schema from a live database.
|
|
5
|
+
*/
|
|
6
|
+
import * as fs from "fs/promises";
|
|
7
|
+
import * as path from "path";
|
|
8
|
+
import { sql } from "kysely";
|
|
9
|
+
import { createKyselyAdapter } from "./kysely-adapter.js";
|
|
10
|
+
const INTERNAL_TABLES = new Set([
|
|
11
|
+
"_kysely_migration",
|
|
12
|
+
"_kysely_migration_lock",
|
|
13
|
+
"__drizzle_migrations",
|
|
14
|
+
"sqlite_sequence",
|
|
15
|
+
]);
|
|
16
|
+
function isInternalTable(name) {
|
|
17
|
+
return INTERNAL_TABLES.has(name) || name.startsWith("sqlite_");
|
|
18
|
+
}
|
|
19
|
+
function pluralize(word) {
|
|
20
|
+
if (word.endsWith("s") || word.endsWith("x") || word.endsWith("ch") || word.endsWith("sh")) {
|
|
21
|
+
return word + "es";
|
|
22
|
+
}
|
|
23
|
+
if (word.endsWith("y") && !/[aeiou]y$/i.test(word)) {
|
|
24
|
+
return word.slice(0, -1) + "ies";
|
|
25
|
+
}
|
|
26
|
+
return word + "s";
|
|
27
|
+
}
|
|
28
|
+
function singularize(word) {
|
|
29
|
+
if (word.endsWith("ies")) {
|
|
30
|
+
return word.slice(0, -3) + "y";
|
|
31
|
+
}
|
|
32
|
+
if (word.endsWith("es") && (word.endsWith("ses") || word.endsWith("xes") || word.endsWith("ches") || word.endsWith("shes"))) {
|
|
33
|
+
return word.slice(0, -2);
|
|
34
|
+
}
|
|
35
|
+
if (word.endsWith("s") && !word.endsWith("ss")) {
|
|
36
|
+
return word.slice(0, -1);
|
|
37
|
+
}
|
|
38
|
+
return word;
|
|
39
|
+
}
|
|
40
|
+
function toPascalCase(value) {
|
|
41
|
+
const parts = value.split(/[^a-zA-Z0-9]+/).filter(Boolean);
|
|
42
|
+
return parts
|
|
43
|
+
.map((part) => part.charAt(0).toUpperCase() + part.slice(1).toLowerCase())
|
|
44
|
+
.join("");
|
|
45
|
+
}
|
|
46
|
+
function toCamelCase(value) {
|
|
47
|
+
const pascal = toPascalCase(value);
|
|
48
|
+
return pascal.length > 0 ? pascal.charAt(0).toLowerCase() + pascal.slice(1) : value;
|
|
49
|
+
}
|
|
50
|
+
function normalizeType(dataType) {
|
|
51
|
+
const lower = dataType.toLowerCase();
|
|
52
|
+
const isArray = lower.endsWith("[]");
|
|
53
|
+
const base = isArray ? lower.slice(0, -2) : lower;
|
|
54
|
+
const normalized = base.replace(/\(.+\)/, "").trim();
|
|
55
|
+
if (normalized.includes("bigint"))
|
|
56
|
+
return { type: "BigInt", isArray };
|
|
57
|
+
if (normalized.includes("int"))
|
|
58
|
+
return { type: "Int", isArray };
|
|
59
|
+
if (normalized.includes("bool"))
|
|
60
|
+
return { type: "Boolean", isArray };
|
|
61
|
+
if (normalized.includes("date") || normalized.includes("time"))
|
|
62
|
+
return { type: "DateTime", isArray };
|
|
63
|
+
if (normalized.includes("json"))
|
|
64
|
+
return { type: "Json", isArray };
|
|
65
|
+
if (normalized.includes("blob") || normalized.includes("bytea") || normalized.includes("binary"))
|
|
66
|
+
return { type: "Bytes", isArray };
|
|
67
|
+
if (normalized.includes("decimal") || normalized.includes("numeric"))
|
|
68
|
+
return { type: "Decimal", isArray };
|
|
69
|
+
if (normalized.includes("real") || normalized.includes("double") || normalized.includes("float"))
|
|
70
|
+
return { type: "Float", isArray };
|
|
71
|
+
if (normalized.includes("char") || normalized.includes("text") || normalized.includes("uuid"))
|
|
72
|
+
return { type: "String", isArray };
|
|
73
|
+
return { type: "String", isArray };
|
|
74
|
+
}
|
|
75
|
+
function buildDatasourceBlock(dialect) {
|
|
76
|
+
return [
|
|
77
|
+
"datasource db {",
|
|
78
|
+
` provider = \"${dialect}\"`,
|
|
79
|
+
" url = env(\"DATABASE_URL\")",
|
|
80
|
+
"}",
|
|
81
|
+
"",
|
|
82
|
+
"generator client {",
|
|
83
|
+
" provider = \"prisma-client-js\"",
|
|
84
|
+
"}",
|
|
85
|
+
"",
|
|
86
|
+
].join("\n");
|
|
87
|
+
}
|
|
88
|
+
function buildModelBlock(options) {
|
|
89
|
+
const { table, foreignKeys, indexes, primaryKeys, allTables } = options;
|
|
90
|
+
const modelName = toPascalCase(table.name) || "Model";
|
|
91
|
+
const fieldLines = [];
|
|
92
|
+
// Get primary key columns for this table
|
|
93
|
+
const tablePk = primaryKeys.find((pk) => pk.table === table.name);
|
|
94
|
+
const pkColumns = new Set(tablePk?.columns ?? []);
|
|
95
|
+
const isCompositePk = pkColumns.size > 1;
|
|
96
|
+
// Group foreign keys by fromColumn for this table
|
|
97
|
+
const fkByColumn = new Map();
|
|
98
|
+
const incomingFks = [];
|
|
99
|
+
for (const fk of foreignKeys) {
|
|
100
|
+
if (fk.fromTable === table.name) {
|
|
101
|
+
fkByColumn.set(fk.fromColumn, fk);
|
|
102
|
+
}
|
|
103
|
+
if (fk.toTable === table.name) {
|
|
104
|
+
incomingFks.push(fk);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
// Get unique columns from indexes (excluding PK columns)
|
|
108
|
+
const uniqueColumns = new Set();
|
|
109
|
+
const compositeUniques = [];
|
|
110
|
+
for (const idx of indexes) {
|
|
111
|
+
if (idx.table === table.name && idx.isUnique) {
|
|
112
|
+
// Skip if this index matches the primary key
|
|
113
|
+
const isPkIndex = idx.columns.length === pkColumns.size &&
|
|
114
|
+
idx.columns.every((c) => pkColumns.has(c));
|
|
115
|
+
if (isPkIndex)
|
|
116
|
+
continue;
|
|
117
|
+
if (idx.columns.length === 1) {
|
|
118
|
+
uniqueColumns.add(idx.columns[0]);
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
compositeUniques.push(idx.columns);
|
|
122
|
+
}
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
const sortedColumns = [...table.columns].sort((a, b) => a.name.localeCompare(b.name));
|
|
126
|
+
for (const column of sortedColumns) {
|
|
127
|
+
const fieldName = toCamelCase(column.name) || column.name;
|
|
128
|
+
const mapped = fieldName !== column.name;
|
|
129
|
+
const { type, isArray } = normalizeType(column.dataType);
|
|
130
|
+
const optional = column.isNullable ? "?" : "";
|
|
131
|
+
const modifiers = [];
|
|
132
|
+
const isPkColumn = pkColumns.has(column.name);
|
|
133
|
+
// For single-column PK, add @id to the field
|
|
134
|
+
if (isPkColumn && !isCompositePk) {
|
|
135
|
+
modifiers.push("@id");
|
|
136
|
+
if (column.isAutoIncrementing) {
|
|
137
|
+
modifiers.push("@default(autoincrement())");
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
// Add @unique for unique columns (but not PK columns)
|
|
141
|
+
if (uniqueColumns.has(column.name) && !isPkColumn) {
|
|
142
|
+
modifiers.push("@unique");
|
|
143
|
+
}
|
|
144
|
+
if (mapped) {
|
|
145
|
+
modifiers.push(`@map("${column.name}")`);
|
|
146
|
+
}
|
|
147
|
+
if (column.hasDefaultValue && !modifiers.some((m) => m.includes("@default"))) {
|
|
148
|
+
modifiers.push("@default(dbgenerated())");
|
|
149
|
+
}
|
|
150
|
+
const typeSuffix = isArray ? "[]" : "";
|
|
151
|
+
const modifierText = modifiers.length > 0 ? ` ${modifiers.join(" ")}` : "";
|
|
152
|
+
fieldLines.push(` ${fieldName} ${type}${typeSuffix}${optional}${modifierText}`);
|
|
153
|
+
}
|
|
154
|
+
// Add relation fields for outgoing foreign keys
|
|
155
|
+
for (const fk of fkByColumn.values()) {
|
|
156
|
+
if (!allTables.has(fk.toTable))
|
|
157
|
+
continue;
|
|
158
|
+
const relatedModel = toPascalCase(fk.toTable);
|
|
159
|
+
const relationFieldName = toCamelCase(singularize(fk.toTable));
|
|
160
|
+
const fkFieldName = toCamelCase(fk.fromColumn);
|
|
161
|
+
// Find if the FK column is nullable
|
|
162
|
+
const fkColumn = table.columns.find((c) => c.name === fk.fromColumn);
|
|
163
|
+
const optional = fkColumn?.isNullable ? "?" : "";
|
|
164
|
+
fieldLines.push(` ${relationFieldName} ${relatedModel}${optional} @relation(fields: [${fkFieldName}], references: [${toCamelCase(fk.toColumn)}])`);
|
|
165
|
+
}
|
|
166
|
+
// Add reverse relation fields for incoming foreign keys
|
|
167
|
+
const incomingByTable = new Map();
|
|
168
|
+
for (const fk of incomingFks) {
|
|
169
|
+
if (!allTables.has(fk.fromTable))
|
|
170
|
+
continue;
|
|
171
|
+
const existing = incomingByTable.get(fk.fromTable) ?? [];
|
|
172
|
+
existing.push(fk);
|
|
173
|
+
incomingByTable.set(fk.fromTable, existing);
|
|
174
|
+
}
|
|
175
|
+
for (const [fromTable, fks] of incomingByTable) {
|
|
176
|
+
const relatedModel = toPascalCase(fromTable);
|
|
177
|
+
const relationFieldName = toCamelCase(pluralize(fromTable));
|
|
178
|
+
// If there are multiple FKs from the same table, we need to name them
|
|
179
|
+
if (fks.length > 1) {
|
|
180
|
+
for (const fk of fks) {
|
|
181
|
+
const suffix = toPascalCase(fk.fromColumn.replace(/Id$/, ""));
|
|
182
|
+
fieldLines.push(` ${relationFieldName}By${suffix} ${relatedModel}[]`);
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
else {
|
|
186
|
+
fieldLines.push(` ${relationFieldName} ${relatedModel}[]`);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
const lines = [`model ${modelName} {`, ...fieldLines];
|
|
190
|
+
// Add composite primary key
|
|
191
|
+
if (isCompositePk && tablePk) {
|
|
192
|
+
const fieldNames = tablePk.columns.map((c) => toCamelCase(c));
|
|
193
|
+
lines.push(` @@id([${fieldNames.join(", ")}])`);
|
|
194
|
+
}
|
|
195
|
+
// Add composite unique constraints
|
|
196
|
+
for (const columns of compositeUniques) {
|
|
197
|
+
const fieldNames = columns.map((c) => toCamelCase(c));
|
|
198
|
+
lines.push(` @@unique([${fieldNames.join(", ")}])`);
|
|
199
|
+
}
|
|
200
|
+
if (table.name !== modelName.toLowerCase()) {
|
|
201
|
+
lines.push(` @@map("${table.name}")`);
|
|
202
|
+
}
|
|
203
|
+
lines.push("}");
|
|
204
|
+
return lines.join("\n");
|
|
205
|
+
}
|
|
206
|
+
async function extractForeignKeys(db, dialect) {
|
|
207
|
+
const foreignKeys = [];
|
|
208
|
+
if (dialect === "sqlite") {
|
|
209
|
+
// SQLite: query each table's foreign keys via PRAGMA
|
|
210
|
+
const tables = await db.introspection.getTables({ withInternalKyselyTables: false });
|
|
211
|
+
for (const table of tables) {
|
|
212
|
+
if (table.isView || isInternalTable(table.name))
|
|
213
|
+
continue;
|
|
214
|
+
const result = await sql `PRAGMA foreign_key_list(${sql.raw(`"${table.name}"`)})`.execute(db);
|
|
215
|
+
for (const row of result.rows) {
|
|
216
|
+
foreignKeys.push({
|
|
217
|
+
constraintName: `fk_${table.name}_${row.from}`,
|
|
218
|
+
fromTable: table.name,
|
|
219
|
+
fromColumn: row.from,
|
|
220
|
+
toTable: row.table,
|
|
221
|
+
toColumn: row.to,
|
|
222
|
+
});
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
else if (dialect === "postgres") {
|
|
227
|
+
const result = await sql `
|
|
228
|
+
SELECT
|
|
229
|
+
tc.constraint_name,
|
|
230
|
+
tc.table_name as from_table,
|
|
231
|
+
kcu.column_name as from_column,
|
|
232
|
+
ccu.table_name as to_table,
|
|
233
|
+
ccu.column_name as to_column
|
|
234
|
+
FROM information_schema.table_constraints tc
|
|
235
|
+
JOIN information_schema.key_column_usage kcu
|
|
236
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
237
|
+
AND tc.table_schema = kcu.table_schema
|
|
238
|
+
JOIN information_schema.constraint_column_usage ccu
|
|
239
|
+
ON ccu.constraint_name = tc.constraint_name
|
|
240
|
+
AND ccu.table_schema = tc.table_schema
|
|
241
|
+
WHERE tc.constraint_type = 'FOREIGN KEY'
|
|
242
|
+
AND tc.table_schema = 'public'
|
|
243
|
+
`.execute(db);
|
|
244
|
+
for (const row of result.rows) {
|
|
245
|
+
foreignKeys.push({
|
|
246
|
+
constraintName: row.constraint_name,
|
|
247
|
+
fromTable: row.from_table,
|
|
248
|
+
fromColumn: row.from_column,
|
|
249
|
+
toTable: row.to_table,
|
|
250
|
+
toColumn: row.to_column,
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
else if (dialect === "mysql") {
|
|
255
|
+
const result = await sql `
|
|
256
|
+
SELECT
|
|
257
|
+
CONSTRAINT_NAME,
|
|
258
|
+
TABLE_NAME,
|
|
259
|
+
COLUMN_NAME,
|
|
260
|
+
REFERENCED_TABLE_NAME,
|
|
261
|
+
REFERENCED_COLUMN_NAME
|
|
262
|
+
FROM information_schema.KEY_COLUMN_USAGE
|
|
263
|
+
WHERE REFERENCED_TABLE_NAME IS NOT NULL
|
|
264
|
+
AND TABLE_SCHEMA = DATABASE()
|
|
265
|
+
`.execute(db);
|
|
266
|
+
for (const row of result.rows) {
|
|
267
|
+
foreignKeys.push({
|
|
268
|
+
constraintName: row.CONSTRAINT_NAME,
|
|
269
|
+
fromTable: row.TABLE_NAME,
|
|
270
|
+
fromColumn: row.COLUMN_NAME,
|
|
271
|
+
toTable: row.REFERENCED_TABLE_NAME,
|
|
272
|
+
toColumn: row.REFERENCED_COLUMN_NAME,
|
|
273
|
+
});
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
return foreignKeys;
|
|
277
|
+
}
|
|
278
|
+
async function extractIndexes(db, dialect, tableNames) {
|
|
279
|
+
const indexes = [];
|
|
280
|
+
if (dialect === "sqlite") {
|
|
281
|
+
for (const tableName of tableNames) {
|
|
282
|
+
const indexList = await sql `PRAGMA index_list(${sql.raw(`"${tableName}"`)})`.execute(db);
|
|
283
|
+
for (const idx of indexList.rows) {
|
|
284
|
+
// Skip internal sqlite indexes, but allow sqlite_autoindex_ which are auto-created for UNIQUE constraints
|
|
285
|
+
if (idx.name.startsWith("sqlite_") && !idx.name.startsWith("sqlite_autoindex_"))
|
|
286
|
+
continue;
|
|
287
|
+
const indexInfo = await sql `PRAGMA index_info(${sql.raw(`"${idx.name}"`)})`.execute(db);
|
|
288
|
+
indexes.push({
|
|
289
|
+
name: idx.name,
|
|
290
|
+
table: tableName,
|
|
291
|
+
columns: indexInfo.rows.map((r) => r.name),
|
|
292
|
+
isUnique: idx.unique === 1,
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
else if (dialect === "postgres") {
|
|
298
|
+
const result = await sql `
|
|
299
|
+
SELECT indexname, tablename, indexdef
|
|
300
|
+
FROM pg_indexes
|
|
301
|
+
WHERE schemaname = 'public'
|
|
302
|
+
`.execute(db);
|
|
303
|
+
for (const row of result.rows) {
|
|
304
|
+
const isUnique = row.indexdef.toUpperCase().includes("UNIQUE");
|
|
305
|
+
const colMatch = row.indexdef.match(/\(([^)]+)\)/);
|
|
306
|
+
const columns = colMatch
|
|
307
|
+
? colMatch[1].split(",").map((c) => c.trim().replace(/"/g, ""))
|
|
308
|
+
: [];
|
|
309
|
+
indexes.push({
|
|
310
|
+
name: row.indexname,
|
|
311
|
+
table: row.tablename,
|
|
312
|
+
columns,
|
|
313
|
+
isUnique,
|
|
314
|
+
});
|
|
315
|
+
}
|
|
316
|
+
}
|
|
317
|
+
else if (dialect === "mysql") {
|
|
318
|
+
for (const tableName of tableNames) {
|
|
319
|
+
const result = await sql `SHOW INDEX FROM ${sql.raw(`\`${tableName}\``)}`.execute(db);
|
|
320
|
+
const byName = new Map();
|
|
321
|
+
for (const row of result.rows) {
|
|
322
|
+
if (!byName.has(row.Key_name)) {
|
|
323
|
+
byName.set(row.Key_name, { columns: [], isUnique: row.Non_unique === 0 });
|
|
324
|
+
}
|
|
325
|
+
byName.get(row.Key_name).columns.push(row.Column_name);
|
|
326
|
+
}
|
|
327
|
+
for (const [name, info] of byName) {
|
|
328
|
+
indexes.push({
|
|
329
|
+
name,
|
|
330
|
+
table: tableName,
|
|
331
|
+
columns: info.columns,
|
|
332
|
+
isUnique: info.isUnique,
|
|
333
|
+
});
|
|
334
|
+
}
|
|
335
|
+
}
|
|
336
|
+
}
|
|
337
|
+
return indexes;
|
|
338
|
+
}
|
|
339
|
+
async function extractPrimaryKeys(db, dialect, tableNames) {
|
|
340
|
+
const primaryKeys = [];
|
|
341
|
+
if (dialect === "sqlite") {
|
|
342
|
+
for (const tableName of tableNames) {
|
|
343
|
+
const tableInfo = await sql `PRAGMA table_info(${sql.raw(`"${tableName}"`)})`.execute(db);
|
|
344
|
+
const pkColumns = tableInfo.rows
|
|
345
|
+
.filter((row) => row.pk > 0)
|
|
346
|
+
.sort((a, b) => a.pk - b.pk)
|
|
347
|
+
.map((row) => row.name);
|
|
348
|
+
if (pkColumns.length > 0) {
|
|
349
|
+
primaryKeys.push({ table: tableName, columns: pkColumns });
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
else if (dialect === "postgres") {
|
|
354
|
+
const result = await sql `
|
|
355
|
+
SELECT
|
|
356
|
+
tc.table_name,
|
|
357
|
+
kcu.column_name,
|
|
358
|
+
kcu.ordinal_position
|
|
359
|
+
FROM information_schema.table_constraints tc
|
|
360
|
+
JOIN information_schema.key_column_usage kcu
|
|
361
|
+
ON tc.constraint_name = kcu.constraint_name
|
|
362
|
+
AND tc.table_schema = kcu.table_schema
|
|
363
|
+
WHERE tc.constraint_type = 'PRIMARY KEY'
|
|
364
|
+
AND tc.table_schema = 'public'
|
|
365
|
+
ORDER BY tc.table_name, kcu.ordinal_position
|
|
366
|
+
`.execute(db);
|
|
367
|
+
const byTable = new Map();
|
|
368
|
+
for (const row of result.rows) {
|
|
369
|
+
if (!byTable.has(row.table_name)) {
|
|
370
|
+
byTable.set(row.table_name, []);
|
|
371
|
+
}
|
|
372
|
+
byTable.get(row.table_name).push(row.column_name);
|
|
373
|
+
}
|
|
374
|
+
for (const [table, columns] of byTable) {
|
|
375
|
+
primaryKeys.push({ table, columns });
|
|
376
|
+
}
|
|
377
|
+
}
|
|
378
|
+
else if (dialect === "mysql") {
|
|
379
|
+
for (const tableName of tableNames) {
|
|
380
|
+
const result = await sql `SHOW INDEX FROM ${sql.raw(`\`${tableName}\``)} WHERE Key_name = 'PRIMARY'`.execute(db);
|
|
381
|
+
const columns = result.rows
|
|
382
|
+
.sort((a, b) => a.Seq_in_index - b.Seq_in_index)
|
|
383
|
+
.map((row) => row.Column_name);
|
|
384
|
+
if (columns.length > 0) {
|
|
385
|
+
primaryKeys.push({ table: tableName, columns });
|
|
386
|
+
}
|
|
387
|
+
}
|
|
388
|
+
}
|
|
389
|
+
return primaryKeys;
|
|
390
|
+
}
|
|
391
|
+
export async function pullSchema(options) {
|
|
392
|
+
const { db, destroy } = await createKyselyAdapter({
|
|
393
|
+
dialect: options.dialect,
|
|
394
|
+
connectionUrl: options.connectionUrl,
|
|
395
|
+
databasePath: options.databasePath,
|
|
396
|
+
});
|
|
397
|
+
try {
|
|
398
|
+
const tables = await db.introspection.getTables({ withInternalKyselyTables: false });
|
|
399
|
+
const filtered = tables.filter((table) => !table.isView && !isInternalTable(table.name));
|
|
400
|
+
const tableNames = filtered.map((t) => t.name);
|
|
401
|
+
const allTables = new Set(tableNames);
|
|
402
|
+
const foreignKeys = await extractForeignKeys(db, options.dialect);
|
|
403
|
+
const indexes = await extractIndexes(db, options.dialect, tableNames);
|
|
404
|
+
const primaryKeys = await extractPrimaryKeys(db, options.dialect, tableNames);
|
|
405
|
+
const blocks = filtered.map((table) => buildModelBlock({
|
|
406
|
+
table,
|
|
407
|
+
foreignKeys,
|
|
408
|
+
indexes,
|
|
409
|
+
primaryKeys,
|
|
410
|
+
allTables,
|
|
411
|
+
}));
|
|
412
|
+
const schema = [buildDatasourceBlock(options.dialect), ...blocks].join("\n\n");
|
|
413
|
+
await fs.mkdir(path.dirname(options.outputPath), { recursive: true });
|
|
414
|
+
await fs.writeFile(options.outputPath, schema.trimEnd() + "\n", "utf-8");
|
|
415
|
+
return {
|
|
416
|
+
outputPath: options.outputPath,
|
|
417
|
+
schema,
|
|
418
|
+
tableCount: filtered.length,
|
|
419
|
+
};
|
|
420
|
+
}
|
|
421
|
+
finally {
|
|
422
|
+
await destroy();
|
|
423
|
+
}
|
|
424
|
+
}
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Schema introspection utilities
|
|
3
|
+
*
|
|
4
|
+
* Provides functionality to introspect ZenStack schemas and databases,
|
|
5
|
+
* extracting model and field information for code generation.
|
|
6
|
+
*/
|
|
7
|
+
export interface FieldInfo {
|
|
8
|
+
/** Field name */
|
|
9
|
+
name: string;
|
|
10
|
+
/** Field type (String, Int, Boolean, etc.) */
|
|
11
|
+
type: string;
|
|
12
|
+
/** Whether the field is optional */
|
|
13
|
+
isOptional: boolean;
|
|
14
|
+
/** Whether the field is an array */
|
|
15
|
+
isArray: boolean;
|
|
16
|
+
/** Whether this is a relation field */
|
|
17
|
+
isRelation: boolean;
|
|
18
|
+
/** Whether this is the primary key */
|
|
19
|
+
isId: boolean;
|
|
20
|
+
/** Whether the field has a default value */
|
|
21
|
+
hasDefault: boolean;
|
|
22
|
+
/** Whether the field is unique */
|
|
23
|
+
isUnique: boolean;
|
|
24
|
+
/** Related model name (for relations) */
|
|
25
|
+
relationModel?: string;
|
|
26
|
+
}
|
|
27
|
+
export interface ModelInfo {
|
|
28
|
+
/** Model name */
|
|
29
|
+
name: string;
|
|
30
|
+
/** Table name in database */
|
|
31
|
+
tableName: string;
|
|
32
|
+
/** Model fields */
|
|
33
|
+
fields: FieldInfo[];
|
|
34
|
+
}
|
|
35
|
+
export interface SchemaInfo {
|
|
36
|
+
/** All models in the schema */
|
|
37
|
+
models: ModelInfo[];
|
|
38
|
+
/** Schema version or hash */
|
|
39
|
+
version: string;
|
|
40
|
+
}
|
|
41
|
+
interface IntrospectOptions {
|
|
42
|
+
/** Path to ZenStack schema file */
|
|
43
|
+
schemaPath?: string;
|
|
44
|
+
/** Database connection URL (for database introspection) */
|
|
45
|
+
databaseUrl?: string;
|
|
46
|
+
/** Output path for generated schema */
|
|
47
|
+
outputPath?: string;
|
|
48
|
+
}
|
|
49
|
+
/**
|
|
50
|
+
* Introspect schema from file or database
|
|
51
|
+
*/
|
|
52
|
+
export declare function introspectSchema(options: IntrospectOptions): Promise<SchemaInfo>;
|
|
53
|
+
export {};
|
|
54
|
+
//# sourceMappingURL=introspect.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"introspect.d.ts","sourceRoot":"","sources":["../../src/schema/introspect.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAKH,MAAM,WAAW,SAAS;IACxB,iBAAiB;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,8CAA8C;IAC9C,IAAI,EAAE,MAAM,CAAC;IACb,oCAAoC;IACpC,UAAU,EAAE,OAAO,CAAC;IACpB,oCAAoC;IACpC,OAAO,EAAE,OAAO,CAAC;IACjB,uCAAuC;IACvC,UAAU,EAAE,OAAO,CAAC;IACpB,sCAAsC;IACtC,IAAI,EAAE,OAAO,CAAC;IACd,4CAA4C;IAC5C,UAAU,EAAE,OAAO,CAAC;IACpB,kCAAkC;IAClC,QAAQ,EAAE,OAAO,CAAC;IAClB,yCAAyC;IACzC,aAAa,CAAC,EAAE,MAAM,CAAC;CACxB;AAED,MAAM,WAAW,SAAS;IACxB,iBAAiB;IACjB,IAAI,EAAE,MAAM,CAAC;IACb,6BAA6B;IAC7B,SAAS,EAAE,MAAM,CAAC;IAClB,mBAAmB;IACnB,MAAM,EAAE,SAAS,EAAE,CAAC;CACrB;AAED,MAAM,WAAW,UAAU;IACzB,+BAA+B;IAC/B,MAAM,EAAE,SAAS,EAAE,CAAC;IACpB,6BAA6B;IAC7B,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,UAAU,iBAAiB;IACzB,mCAAmC;IACnC,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,2DAA2D;IAC3D,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,uCAAuC;IACvC,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAoED;;GAEG;AACH,wBAAsB,gBAAgB,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,UAAU,CAAC,CAUtF"}
|
|
@@ -0,0 +1,75 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Schema introspection utilities
|
|
3
|
+
*
|
|
4
|
+
* Provides functionality to introspect ZenStack schemas and databases,
|
|
5
|
+
* extracting model and field information for code generation.
|
|
6
|
+
*/
|
|
7
|
+
import * as fs from "fs/promises";
|
|
8
|
+
/**
|
|
9
|
+
* Parse a .zmodel file and extract schema information
|
|
10
|
+
* This is a simplified parser - in production, you'd use ZenStack's AST
|
|
11
|
+
*/
|
|
12
|
+
async function parseZModelFile(schemaPath) {
|
|
13
|
+
const content = await fs.readFile(schemaPath, "utf-8");
|
|
14
|
+
const models = [];
|
|
15
|
+
// Simple regex-based parser for demonstration
|
|
16
|
+
// In production, integrate with ZenStack's parser
|
|
17
|
+
const modelRegex = /model\s+(\w+)\s*\{([^}]+)\}/g;
|
|
18
|
+
const fieldRegex = /^\s*(\w+)\s+(\w+)(\[\])?\s*(\?)?\s*(.*?)$/gm;
|
|
19
|
+
let modelMatch;
|
|
20
|
+
while ((modelMatch = modelRegex.exec(content)) !== null) {
|
|
21
|
+
const modelName = modelMatch[1];
|
|
22
|
+
const modelBody = modelMatch[2];
|
|
23
|
+
const fields = [];
|
|
24
|
+
let fieldMatch;
|
|
25
|
+
const fieldPattern = /^\s*(\w+)\s+(\w+)(\[\])?\s*(\?)?(.*)$/gm;
|
|
26
|
+
while ((fieldMatch = fieldPattern.exec(modelBody)) !== null) {
|
|
27
|
+
const [, name, type, isArray, isOptional, modifiers] = fieldMatch;
|
|
28
|
+
// Skip if it looks like a directive
|
|
29
|
+
if (name.startsWith("@@") || name.startsWith("//"))
|
|
30
|
+
continue;
|
|
31
|
+
const isId = modifiers?.includes("@id") || false;
|
|
32
|
+
const hasDefault = modifiers?.includes("@default") || false;
|
|
33
|
+
const isUnique = modifiers?.includes("@unique") || isId;
|
|
34
|
+
const isRelation = modifiers?.includes("@relation") || false;
|
|
35
|
+
fields.push({
|
|
36
|
+
name,
|
|
37
|
+
type,
|
|
38
|
+
isOptional: !!isOptional,
|
|
39
|
+
isArray: !!isArray,
|
|
40
|
+
isRelation,
|
|
41
|
+
isId,
|
|
42
|
+
hasDefault,
|
|
43
|
+
isUnique,
|
|
44
|
+
relationModel: isRelation ? type : undefined,
|
|
45
|
+
});
|
|
46
|
+
}
|
|
47
|
+
models.push({
|
|
48
|
+
name: modelName,
|
|
49
|
+
tableName: modelName.toLowerCase(),
|
|
50
|
+
fields,
|
|
51
|
+
});
|
|
52
|
+
}
|
|
53
|
+
// Generate a simple version hash
|
|
54
|
+
const version = Buffer.from(content).toString("base64").slice(0, 8);
|
|
55
|
+
return { models, version };
|
|
56
|
+
}
|
|
57
|
+
/**
|
|
58
|
+
* Introspect a database and generate schema information
|
|
59
|
+
*/
|
|
60
|
+
async function introspectDatabase(databaseUrl) {
|
|
61
|
+
void databaseUrl;
|
|
62
|
+
throw new Error("Database introspection is not supported.");
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Introspect schema from file or database
|
|
66
|
+
*/
|
|
67
|
+
export async function introspectSchema(options) {
|
|
68
|
+
if (options.schemaPath) {
|
|
69
|
+
return parseZModelFile(options.schemaPath);
|
|
70
|
+
}
|
|
71
|
+
if (options.databaseUrl) {
|
|
72
|
+
return introspectDatabase(options.databaseUrl);
|
|
73
|
+
}
|
|
74
|
+
throw new Error("Either schemaPath or databaseUrl must be provided");
|
|
75
|
+
}
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database pull utilities
|
|
3
|
+
*
|
|
4
|
+
* Uses Kysely introspection to generate a ZenStack schema from a live database.
|
|
5
|
+
*/
|
|
6
|
+
import { type KyselyDialect } from "../sql/kysely-adapter.js";
|
|
7
|
+
export interface PullOptions {
|
|
8
|
+
/** Database dialect */
|
|
9
|
+
dialect: KyselyDialect;
|
|
10
|
+
/** Database connection URL */
|
|
11
|
+
connectionUrl?: string;
|
|
12
|
+
/** SQLite database path (for SQLite dialect) */
|
|
13
|
+
databasePath?: string;
|
|
14
|
+
/** Output path for schema */
|
|
15
|
+
outputPath: string;
|
|
16
|
+
}
|
|
17
|
+
export interface PullResult {
|
|
18
|
+
outputPath: string;
|
|
19
|
+
schema: string;
|
|
20
|
+
tableCount: number;
|
|
21
|
+
}
|
|
22
|
+
export declare function pullSchema(options: PullOptions): Promise<PullResult>;
|
|
23
|
+
//# sourceMappingURL=pull.d.ts.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"pull.d.ts","sourceRoot":"","sources":["../../src/schema/pull.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAKH,OAAO,EAAuB,KAAK,aAAa,EAAE,MAAM,0BAA0B,CAAC;AAEnF,MAAM,WAAW,WAAW;IAC1B,uBAAuB;IACvB,OAAO,EAAE,aAAa,CAAC;IACvB,8BAA8B;IAC9B,aAAa,CAAC,EAAE,MAAM,CAAC;IACvB,gDAAgD;IAChD,YAAY,CAAC,EAAE,MAAM,CAAC;IACtB,6BAA6B;IAC7B,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,UAAU;IACzB,UAAU,EAAE,MAAM,CAAC;IACnB,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;CACpB;AAmfD,wBAAsB,UAAU,CAAC,OAAO,EAAE,WAAW,GAAG,OAAO,CAAC,UAAU,CAAC,CAwC1E"}
|