@sedrino/db-schema 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +69 -0
- package/dist/index.d.ts +1222 -0
- package/dist/index.js +1456 -0
- package/dist/index.js.map +1 -0
- package/docs/cli.md +70 -0
- package/docs/index.md +11 -0
- package/docs/migrations.md +65 -0
- package/docs/schema-document.md +48 -0
- package/package.json +41 -0
- package/src/apply.ts +234 -0
- package/src/cli.ts +209 -0
- package/src/drizzle.ts +178 -0
- package/src/index.ts +62 -0
- package/src/migration.ts +456 -0
- package/src/planner.ts +247 -0
- package/src/project.ts +79 -0
- package/src/schema.ts +53 -0
- package/src/sqlite.ts +172 -0
- package/src/types.ts +145 -0
- package/src/utils.ts +286 -0
package/src/apply.ts
ADDED
|
@@ -0,0 +1,234 @@
|
|
|
1
|
+
import { createClient, type Client } from "@libsql/client";
|
|
2
|
+
import type { DatabaseSchemaDocument } from "./types";
|
|
3
|
+
import type { MigrationDefinition } from "./migration";
|
|
4
|
+
import { materializeSchema, planMigration, type PlannedMigration } from "./planner";
|
|
5
|
+
import { schemaHash } from "./schema";
|
|
6
|
+
|
|
7
|
+
const MIGRATIONS_TABLE = "_sedrino_schema_migrations";
|
|
8
|
+
const STATE_TABLE = "_sedrino_schema_state";
|
|
9
|
+
|
|
10
|
+
type SqlValue = string | number | null;
|
|
11
|
+
|
|
12
|
+
type SchemaStateRow = {
|
|
13
|
+
schemaHash: string;
|
|
14
|
+
schemaJson: string;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
export type ApplyMigrationsResult = {
|
|
18
|
+
appliedPlans: PlannedMigration[];
|
|
19
|
+
skippedMigrationIds: string[];
|
|
20
|
+
currentSchema: DatabaseSchemaDocument;
|
|
21
|
+
currentSchemaHash: string;
|
|
22
|
+
};
|
|
23
|
+
|
|
24
|
+
export type LibsqlConnectionOptions = {
|
|
25
|
+
url: string;
|
|
26
|
+
authToken?: string;
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
export function createLibsqlClient(options: LibsqlConnectionOptions) {
|
|
30
|
+
return createClient({
|
|
31
|
+
url: options.url,
|
|
32
|
+
authToken: options.authToken,
|
|
33
|
+
concurrency: 0,
|
|
34
|
+
});
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
export async function applyMigrations(args: {
|
|
38
|
+
client?: Client;
|
|
39
|
+
connection?: LibsqlConnectionOptions;
|
|
40
|
+
migrations: MigrationDefinition[];
|
|
41
|
+
baseSchema?: DatabaseSchemaDocument;
|
|
42
|
+
}) {
|
|
43
|
+
const client = args.client ?? createLibsqlClient(assertConnection(args.connection));
|
|
44
|
+
|
|
45
|
+
await ensureMetadataTables(client);
|
|
46
|
+
|
|
47
|
+
const appliedRows = await listAppliedMigrations(client);
|
|
48
|
+
const appliedIds = new Set(appliedRows.map((row) => row.migrationId));
|
|
49
|
+
const migrationMap = new Map(args.migrations.map((migration) => [migration.meta.id, migration]));
|
|
50
|
+
|
|
51
|
+
for (const applied of appliedRows) {
|
|
52
|
+
if (!migrationMap.has(applied.migrationId)) {
|
|
53
|
+
throw new Error(
|
|
54
|
+
`Database contains applied migration ${applied.migrationId}, but it is not present locally`,
|
|
55
|
+
);
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
const appliedLocalMigrations = args.migrations.filter((migration) =>
|
|
60
|
+
appliedIds.has(migration.meta.id),
|
|
61
|
+
);
|
|
62
|
+
const expectedCurrent = materializeSchema({
|
|
63
|
+
baseSchema: args.baseSchema,
|
|
64
|
+
migrations: appliedLocalMigrations,
|
|
65
|
+
}).schema;
|
|
66
|
+
const expectedHash = schemaHash(expectedCurrent);
|
|
67
|
+
const currentState = await getSchemaState(client);
|
|
68
|
+
|
|
69
|
+
if (currentState) {
|
|
70
|
+
if (currentState.schemaHash !== expectedHash) {
|
|
71
|
+
throw new Error(
|
|
72
|
+
`Schema drift detected. Database hash ${currentState.schemaHash} does not match expected local hash ${expectedHash}`,
|
|
73
|
+
);
|
|
74
|
+
}
|
|
75
|
+
} else if (appliedRows.length > 0) {
|
|
76
|
+
throw new Error(
|
|
77
|
+
`Database has applied migrations recorded in ${MIGRATIONS_TABLE} but is missing ${STATE_TABLE}`,
|
|
78
|
+
);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
const pendingMigrations = args.migrations.filter(
|
|
82
|
+
(migration) => !appliedIds.has(migration.meta.id),
|
|
83
|
+
);
|
|
84
|
+
const appliedPlans: PlannedMigration[] = [];
|
|
85
|
+
let currentSchema = expectedCurrent;
|
|
86
|
+
|
|
87
|
+
for (const migration of pendingMigrations) {
|
|
88
|
+
const plan = planMigration({
|
|
89
|
+
currentSchema,
|
|
90
|
+
migration,
|
|
91
|
+
});
|
|
92
|
+
|
|
93
|
+
if (plan.sql.warnings.length > 0) {
|
|
94
|
+
throw new Error(
|
|
95
|
+
`Migration ${plan.migrationId} cannot be applied safely:\n${plan.sql.warnings
|
|
96
|
+
.map((warning) => `- ${warning}`)
|
|
97
|
+
.join("\n")}`,
|
|
98
|
+
);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
await executePlan(client, plan);
|
|
102
|
+
currentSchema = plan.nextSchema;
|
|
103
|
+
appliedPlans.push(plan);
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
return {
|
|
107
|
+
appliedPlans,
|
|
108
|
+
skippedMigrationIds: appliedLocalMigrations.map((migration) => migration.meta.id),
|
|
109
|
+
currentSchema,
|
|
110
|
+
currentSchemaHash: schemaHash(currentSchema),
|
|
111
|
+
} satisfies ApplyMigrationsResult;
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
export async function listAppliedMigrations(client: Client) {
|
|
115
|
+
const result = await client.execute(
|
|
116
|
+
`SELECT migration_id, migration_name, schema_hash, applied_at
|
|
117
|
+
FROM ${MIGRATIONS_TABLE}
|
|
118
|
+
ORDER BY applied_at ASC, migration_id ASC`,
|
|
119
|
+
);
|
|
120
|
+
|
|
121
|
+
return (result.rows as Array<Record<string, unknown>>).map((row) => ({
|
|
122
|
+
migrationId: getString(row.migration_id) ?? "",
|
|
123
|
+
migrationName: getString(row.migration_name) ?? "",
|
|
124
|
+
schemaHash: getString(row.schema_hash) ?? "",
|
|
125
|
+
appliedAt: getNumber(row.applied_at) ?? 0,
|
|
126
|
+
}));
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
export async function getSchemaState(client: Client): Promise<SchemaStateRow | null> {
|
|
130
|
+
const result = await client.execute(
|
|
131
|
+
`SELECT schema_hash, schema_json
|
|
132
|
+
FROM ${STATE_TABLE}
|
|
133
|
+
WHERE singleton_id = 1`,
|
|
134
|
+
);
|
|
135
|
+
|
|
136
|
+
const row = (result.rows as Array<Record<string, unknown>>)[0];
|
|
137
|
+
if (!row) return null;
|
|
138
|
+
|
|
139
|
+
const schemaHashValue = getString(row.schema_hash);
|
|
140
|
+
const schemaJsonValue = getString(row.schema_json);
|
|
141
|
+
if (!schemaHashValue || !schemaJsonValue) return null;
|
|
142
|
+
|
|
143
|
+
return {
|
|
144
|
+
schemaHash: schemaHashValue,
|
|
145
|
+
schemaJson: schemaJsonValue,
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
async function ensureMetadataTables(client: Client) {
|
|
150
|
+
await client.batch(
|
|
151
|
+
[
|
|
152
|
+
"PRAGMA foreign_keys = ON",
|
|
153
|
+
`
|
|
154
|
+
CREATE TABLE IF NOT EXISTS ${MIGRATIONS_TABLE} (
|
|
155
|
+
migration_id TEXT PRIMARY KEY,
|
|
156
|
+
migration_name TEXT NOT NULL,
|
|
157
|
+
schema_hash TEXT NOT NULL,
|
|
158
|
+
applied_at INTEGER NOT NULL,
|
|
159
|
+
sql_statements_json TEXT NOT NULL
|
|
160
|
+
)
|
|
161
|
+
`,
|
|
162
|
+
`
|
|
163
|
+
CREATE TABLE IF NOT EXISTS ${STATE_TABLE} (
|
|
164
|
+
singleton_id INTEGER PRIMARY KEY CHECK (singleton_id = 1),
|
|
165
|
+
schema_hash TEXT NOT NULL,
|
|
166
|
+
schema_json TEXT NOT NULL,
|
|
167
|
+
updated_at INTEGER NOT NULL
|
|
168
|
+
)
|
|
169
|
+
`,
|
|
170
|
+
],
|
|
171
|
+
"write",
|
|
172
|
+
);
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
async function executePlan(client: Client, plan: PlannedMigration) {
|
|
176
|
+
const appliedAt = Date.now();
|
|
177
|
+
const statements: Array<string | { sql: string; args: SqlValue[] }> = [
|
|
178
|
+
...plan.sql.statements,
|
|
179
|
+
{
|
|
180
|
+
sql: `INSERT INTO ${MIGRATIONS_TABLE} (
|
|
181
|
+
migration_id,
|
|
182
|
+
migration_name,
|
|
183
|
+
schema_hash,
|
|
184
|
+
applied_at,
|
|
185
|
+
sql_statements_json
|
|
186
|
+
) VALUES (?, ?, ?, ?, ?)`,
|
|
187
|
+
args: [
|
|
188
|
+
plan.migrationId,
|
|
189
|
+
plan.migrationName,
|
|
190
|
+
plan.toSchemaHash,
|
|
191
|
+
appliedAt,
|
|
192
|
+
JSON.stringify(plan.sql.statements),
|
|
193
|
+
],
|
|
194
|
+
},
|
|
195
|
+
{
|
|
196
|
+
sql: `INSERT INTO ${STATE_TABLE} (
|
|
197
|
+
singleton_id,
|
|
198
|
+
schema_hash,
|
|
199
|
+
schema_json,
|
|
200
|
+
updated_at
|
|
201
|
+
) VALUES (1, ?, ?, ?)
|
|
202
|
+
ON CONFLICT(singleton_id) DO UPDATE SET
|
|
203
|
+
schema_hash = excluded.schema_hash,
|
|
204
|
+
schema_json = excluded.schema_json,
|
|
205
|
+
updated_at = excluded.updated_at`,
|
|
206
|
+
args: [plan.toSchemaHash, JSON.stringify(plan.nextSchema), appliedAt],
|
|
207
|
+
},
|
|
208
|
+
];
|
|
209
|
+
|
|
210
|
+
await client.batch(statements, "write");
|
|
211
|
+
}
|
|
212
|
+
|
|
213
|
+
function assertConnection(
|
|
214
|
+
connection: LibsqlConnectionOptions | undefined,
|
|
215
|
+
): LibsqlConnectionOptions {
|
|
216
|
+
if (!connection?.url) {
|
|
217
|
+
throw new Error("Missing database connection. Provide --url or pass a connection object.");
|
|
218
|
+
}
|
|
219
|
+
return connection;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
function getString(value: unknown) {
|
|
223
|
+
return typeof value === "string" && value.trim().length > 0 ? value : null;
|
|
224
|
+
}
|
|
225
|
+
|
|
226
|
+
function getNumber(value: unknown) {
|
|
227
|
+
if (typeof value === "number" && Number.isFinite(value)) return value;
|
|
228
|
+
if (typeof value === "bigint") return Number(value);
|
|
229
|
+
if (typeof value === "string" && value.trim().length > 0) {
|
|
230
|
+
const parsed = Number(value);
|
|
231
|
+
return Number.isFinite(parsed) ? parsed : null;
|
|
232
|
+
}
|
|
233
|
+
return null;
|
|
234
|
+
}
|
package/src/cli.ts
ADDED
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
|
|
3
|
+
import path from "node:path";
|
|
4
|
+
import { env, exit } from "node:process";
|
|
5
|
+
import { compileSchemaToDrizzle } from "./drizzle";
|
|
6
|
+
import { applyMigrations, createLibsqlClient } from "./apply";
|
|
7
|
+
import {
|
|
8
|
+
materializeProjectMigrations,
|
|
9
|
+
resolveDbProjectLayout,
|
|
10
|
+
writeDrizzleSchema,
|
|
11
|
+
writeSchemaSnapshot,
|
|
12
|
+
} from "./project";
|
|
13
|
+
|
|
14
|
+
type ParsedArgs = {
|
|
15
|
+
positionals: string[];
|
|
16
|
+
options: Map<string, string | boolean>;
|
|
17
|
+
};
|
|
18
|
+
|
|
19
|
+
async function main() {
|
|
20
|
+
const args = parseArgs(process.argv.slice(2));
|
|
21
|
+
const [group, command] = args.positionals;
|
|
22
|
+
|
|
23
|
+
if (!group || group === "help" || hasFlag(args, "help")) {
|
|
24
|
+
printHelp();
|
|
25
|
+
return;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
switch (`${group} ${command ?? ""}`.trim()) {
|
|
29
|
+
case "migrate plan":
|
|
30
|
+
await handleMigratePlan(args);
|
|
31
|
+
return;
|
|
32
|
+
case "migrate apply":
|
|
33
|
+
await handleMigrateApply(args);
|
|
34
|
+
return;
|
|
35
|
+
case "schema print":
|
|
36
|
+
await handleSchemaPrint(args);
|
|
37
|
+
return;
|
|
38
|
+
case "schema drizzle":
|
|
39
|
+
await handleSchemaDrizzle(args);
|
|
40
|
+
return;
|
|
41
|
+
default:
|
|
42
|
+
throw new Error(`Unknown command: ${[group, command].filter(Boolean).join(" ")}`);
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
async function handleMigratePlan(args: ParsedArgs) {
|
|
47
|
+
const layout = resolveLayoutFromArgs(args);
|
|
48
|
+
const { schema, plans } = await materializeProjectMigrations(layout);
|
|
49
|
+
|
|
50
|
+
await writeSchemaSnapshot(schema, getStringOption(args, "snapshot") ?? layout.snapshotPath);
|
|
51
|
+
await writeDrizzleSchema(schema, getStringOption(args, "drizzle-out") ?? layout.drizzlePath);
|
|
52
|
+
|
|
53
|
+
const warnings = plans.flatMap((plan) =>
|
|
54
|
+
plan.sql.warnings.map((warning) => `${plan.migrationId}: ${warning}`),
|
|
55
|
+
);
|
|
56
|
+
const statementCount = plans.reduce((total, plan) => total + plan.sql.statements.length, 0);
|
|
57
|
+
|
|
58
|
+
console.log(`Planned ${plans.length} migration(s)`);
|
|
59
|
+
console.log(`Schema snapshot: ${layout.snapshotPath}`);
|
|
60
|
+
console.log(`Drizzle output: ${layout.drizzlePath}`);
|
|
61
|
+
console.log(`SQL statements: ${statementCount}`);
|
|
62
|
+
console.log(`Schema hash: ${plans.at(-1)?.toSchemaHash ?? "schema_00000000"}`);
|
|
63
|
+
|
|
64
|
+
if (warnings.length > 0) {
|
|
65
|
+
console.log("");
|
|
66
|
+
console.log("Warnings:");
|
|
67
|
+
for (const warning of warnings) console.log(`- ${warning}`);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
if (hasFlag(args, "sql")) {
|
|
71
|
+
console.log("");
|
|
72
|
+
for (const plan of plans) {
|
|
73
|
+
console.log(`# ${plan.migrationId} ${plan.migrationName}`);
|
|
74
|
+
for (const statement of plan.sql.statements) console.log(statement);
|
|
75
|
+
if (plan.sql.warnings.length > 0) {
|
|
76
|
+
for (const warning of plan.sql.warnings) console.log(`-- warning: ${warning}`);
|
|
77
|
+
}
|
|
78
|
+
console.log("");
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
async function handleMigrateApply(args: ParsedArgs) {
|
|
84
|
+
const layout = resolveLayoutFromArgs(args);
|
|
85
|
+
const { migrations } = await materializeProjectMigrations(layout);
|
|
86
|
+
const url = getStringOption(args, "url") ?? env.LIBSQL_URL;
|
|
87
|
+
const authToken = getStringOption(args, "auth-token") ?? env.LIBSQL_AUTH_TOKEN;
|
|
88
|
+
|
|
89
|
+
if (!url) {
|
|
90
|
+
throw new Error("Missing database URL. Use --url or set LIBSQL_URL.");
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const result = await applyMigrations({
|
|
94
|
+
client: createLibsqlClient({ url, authToken }),
|
|
95
|
+
migrations,
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
await writeSchemaSnapshot(
|
|
99
|
+
result.currentSchema,
|
|
100
|
+
getStringOption(args, "snapshot") ?? layout.snapshotPath,
|
|
101
|
+
);
|
|
102
|
+
await writeDrizzleSchema(
|
|
103
|
+
result.currentSchema,
|
|
104
|
+
getStringOption(args, "drizzle-out") ?? layout.drizzlePath,
|
|
105
|
+
);
|
|
106
|
+
|
|
107
|
+
console.log(`Applied ${result.appliedPlans.length} migration(s)`);
|
|
108
|
+
console.log(`Skipped ${result.skippedMigrationIds.length} already-applied migration(s)`);
|
|
109
|
+
console.log(`Current schema hash: ${result.currentSchemaHash}`);
|
|
110
|
+
|
|
111
|
+
if (result.appliedPlans.length > 0) {
|
|
112
|
+
console.log("");
|
|
113
|
+
console.log("Applied:");
|
|
114
|
+
for (const plan of result.appliedPlans) {
|
|
115
|
+
console.log(`- ${plan.migrationId} (${plan.sql.statements.length} statement(s))`);
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
|
|
120
|
+
async function handleSchemaPrint(args: ParsedArgs) {
|
|
121
|
+
const layout = resolveLayoutFromArgs(args);
|
|
122
|
+
const { schema } = await materializeProjectMigrations(layout);
|
|
123
|
+
console.log(JSON.stringify(schema, null, 2));
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
async function handleSchemaDrizzle(args: ParsedArgs) {
|
|
127
|
+
const layout = resolveLayoutFromArgs(args);
|
|
128
|
+
const { schema } = await materializeProjectMigrations(layout);
|
|
129
|
+
const source = compileSchemaToDrizzle(schema);
|
|
130
|
+
const outputPath = getStringOption(args, "out");
|
|
131
|
+
|
|
132
|
+
if (outputPath) {
|
|
133
|
+
await writeDrizzleSchema(schema, path.resolve(outputPath));
|
|
134
|
+
console.log(`Wrote Drizzle schema to ${path.resolve(outputPath)}`);
|
|
135
|
+
return;
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
process.stdout.write(source);
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
function resolveLayoutFromArgs(args: ParsedArgs) {
|
|
142
|
+
return resolveDbProjectLayout(getStringOption(args, "dir") ?? "db");
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
function parseArgs(argv: string[]): ParsedArgs {
|
|
146
|
+
const positionals: string[] = [];
|
|
147
|
+
const options = new Map<string, string | boolean>();
|
|
148
|
+
|
|
149
|
+
for (let index = 0; index < argv.length; index += 1) {
|
|
150
|
+
const value = argv[index]!;
|
|
151
|
+
if (!value.startsWith("--")) {
|
|
152
|
+
positionals.push(value);
|
|
153
|
+
continue;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
const withoutPrefix = value.slice(2);
|
|
157
|
+
const [rawKey, inlineValue] = withoutPrefix.split("=", 2);
|
|
158
|
+
const key = rawKey ?? "";
|
|
159
|
+
if (inlineValue !== undefined) {
|
|
160
|
+
options.set(key, inlineValue);
|
|
161
|
+
continue;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
const next = argv[index + 1];
|
|
165
|
+
if (next && !next.startsWith("--")) {
|
|
166
|
+
options.set(key, next);
|
|
167
|
+
index += 1;
|
|
168
|
+
continue;
|
|
169
|
+
}
|
|
170
|
+
|
|
171
|
+
options.set(key, true);
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
return {
|
|
175
|
+
positionals,
|
|
176
|
+
options,
|
|
177
|
+
};
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
function hasFlag(args: ParsedArgs, name: string) {
|
|
181
|
+
return args.options.get(name) === true;
|
|
182
|
+
}
|
|
183
|
+
|
|
184
|
+
function getStringOption(args: ParsedArgs, name: string) {
|
|
185
|
+
const value = args.options.get(name);
|
|
186
|
+
return typeof value === "string" && value.length > 0 ? value : null;
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
function printHelp() {
|
|
190
|
+
console.log(`sedrino-db
|
|
191
|
+
|
|
192
|
+
Usage:
|
|
193
|
+
sedrino-db migrate plan [--dir db] [--sql] [--snapshot path] [--drizzle-out path]
|
|
194
|
+
sedrino-db migrate apply --url <libsql-url> [--auth-token token] [--dir db]
|
|
195
|
+
sedrino-db schema print [--dir db]
|
|
196
|
+
sedrino-db schema drizzle [--dir db] [--out path]
|
|
197
|
+
|
|
198
|
+
Defaults:
|
|
199
|
+
--dir defaults to ./db
|
|
200
|
+
schema snapshot defaults to ./db/schema/schema.snapshot.json
|
|
201
|
+
drizzle output defaults to ./db/schema/schema.generated.ts
|
|
202
|
+
`);
|
|
203
|
+
}
|
|
204
|
+
|
|
205
|
+
main().catch((error) => {
|
|
206
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
207
|
+
console.error(message);
|
|
208
|
+
exit(1);
|
|
209
|
+
});
|
package/src/drizzle.ts
ADDED
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
import type {
|
|
2
|
+
DatabaseSchemaDocument,
|
|
3
|
+
DefaultSpec,
|
|
4
|
+
FieldSpec,
|
|
5
|
+
IndexSpec,
|
|
6
|
+
TableSpec,
|
|
7
|
+
UniqueSpec,
|
|
8
|
+
} from "./types";
|
|
9
|
+
import { tableVariableName, toPascalCase } from "./utils";
|
|
10
|
+
|
|
11
|
+
export function compileSchemaToDrizzle(schema: DatabaseSchemaDocument) {
|
|
12
|
+
const sqliteImports = new Set(["sqliteTable"]);
|
|
13
|
+
const toolkitImports = new Set<string>();
|
|
14
|
+
let needsUlid = false;
|
|
15
|
+
|
|
16
|
+
for (const table of schema.tables) {
|
|
17
|
+
for (const field of table.fields) {
|
|
18
|
+
switch (field.logical.kind) {
|
|
19
|
+
case "id":
|
|
20
|
+
case "string":
|
|
21
|
+
case "text":
|
|
22
|
+
case "enum":
|
|
23
|
+
case "json":
|
|
24
|
+
sqliteImports.add("text");
|
|
25
|
+
break;
|
|
26
|
+
case "boolean":
|
|
27
|
+
case "integer":
|
|
28
|
+
sqliteImports.add("integer");
|
|
29
|
+
break;
|
|
30
|
+
case "number":
|
|
31
|
+
sqliteImports.add("real");
|
|
32
|
+
break;
|
|
33
|
+
case "temporal.instant":
|
|
34
|
+
toolkitImports.add("temporalInstantEpochMs");
|
|
35
|
+
break;
|
|
36
|
+
case "temporal.plainDate":
|
|
37
|
+
toolkitImports.add("temporalPlainDateText");
|
|
38
|
+
break;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
if (field.default?.kind === "generatedId") needsUlid = true;
|
|
42
|
+
if (field.default?.kind === "now") toolkitImports.add("epochMsNow");
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
if (table.indexes.length > 0) sqliteImports.add("index");
|
|
46
|
+
if (table.uniques.length > 0) sqliteImports.add("uniqueIndex");
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
const lines: string[] = [];
|
|
50
|
+
lines.push(
|
|
51
|
+
`import { ${Array.from(sqliteImports).sort().join(", ")} } from "drizzle-orm/sqlite-core";`,
|
|
52
|
+
);
|
|
53
|
+
if (toolkitImports.size > 0) {
|
|
54
|
+
lines.push(
|
|
55
|
+
`import { ${Array.from(toolkitImports).sort().join(", ")} } from "@sedrino/toolkit/drizzle/sqlite";`,
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
if (needsUlid) {
|
|
59
|
+
lines.push(`import { ulid } from "ulid";`);
|
|
60
|
+
}
|
|
61
|
+
lines.push("");
|
|
62
|
+
|
|
63
|
+
for (const table of schema.tables) {
|
|
64
|
+
lines.push(renderTable(table));
|
|
65
|
+
lines.push("");
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
return lines.join("\n").trimEnd() + "\n";
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function renderTable(table: TableSpec) {
|
|
72
|
+
const variableName = tableVariableName(table.name);
|
|
73
|
+
const fieldLines = table.fields.map((field) => ` ${field.name}: ${renderField(field)},`);
|
|
74
|
+
const tableConfig = renderTableConfig(table);
|
|
75
|
+
|
|
76
|
+
if (!tableConfig) {
|
|
77
|
+
return `export const ${variableName} = sqliteTable("${table.name}", {\n${fieldLines.join("\n")}\n});`;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
return `export const ${variableName} = sqliteTable("${table.name}", {\n${fieldLines.join(
|
|
81
|
+
"\n",
|
|
82
|
+
)}\n}, (table) => [\n${tableConfig}\n]);`;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
function renderTableConfig(table: TableSpec) {
|
|
86
|
+
const lines: string[] = [];
|
|
87
|
+
|
|
88
|
+
for (const index of table.indexes) {
|
|
89
|
+
lines.push(` ${renderIndex(table.name, index)},`);
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
for (const unique of table.uniques) {
|
|
93
|
+
lines.push(` ${renderUnique(table.name, unique)},`);
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
return lines.length > 0 ? lines.join("\n") : null;
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
function renderIndex(tableName: string, index: IndexSpec) {
|
|
100
|
+
const name = index.name ?? `${tableName}_${index.fields.join("_")}_idx`;
|
|
101
|
+
return `index("${name}").on(${index.fields.map((field) => `table.${field}`).join(", ")})`;
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
function renderUnique(tableName: string, unique: UniqueSpec) {
|
|
105
|
+
const name = unique.name ?? `${tableName}_${unique.fields.join("_")}_unique`;
|
|
106
|
+
return `uniqueIndex("${name}").on(${unique.fields.map((field) => `table.${field}`).join(", ")})`;
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
function renderField(field: FieldSpec) {
|
|
110
|
+
let expression = renderBaseColumn(field);
|
|
111
|
+
|
|
112
|
+
if (field.logical.kind === "json") {
|
|
113
|
+
expression += `.$type<${field.logical.tsType}>()`;
|
|
114
|
+
}
|
|
115
|
+
|
|
116
|
+
if (field.primaryKey) expression += ".primaryKey()";
|
|
117
|
+
if (!field.nullable) expression += ".notNull()";
|
|
118
|
+
if (field.unique && !field.primaryKey) expression += ".unique()";
|
|
119
|
+
|
|
120
|
+
const defaultExpression = renderDrizzleDefault(field.default);
|
|
121
|
+
if (defaultExpression) expression += `.default(${defaultExpression})`;
|
|
122
|
+
|
|
123
|
+
if (field.default?.kind === "generatedId") {
|
|
124
|
+
expression += `.$default(() => \`${field.default.prefix}-\${ulid()}\`)`;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
if (field.references) {
|
|
128
|
+
const targetTable = tableVariableName(field.references.table);
|
|
129
|
+
const referenceParts = [`() => ${targetTable}.${field.references.field}`];
|
|
130
|
+
const options: string[] = [];
|
|
131
|
+
if (field.references.onDelete) options.push(`onDelete: "${field.references.onDelete}"`);
|
|
132
|
+
if (field.references.onUpdate) options.push(`onUpdate: "${field.references.onUpdate}"`);
|
|
133
|
+
if (options.length > 0) {
|
|
134
|
+
referenceParts.push(`{ ${options.join(", ")} }`);
|
|
135
|
+
}
|
|
136
|
+
expression += `.references(${referenceParts.join(", ")})`;
|
|
137
|
+
}
|
|
138
|
+
|
|
139
|
+
return expression;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
function renderBaseColumn(field: FieldSpec) {
|
|
143
|
+
switch (field.logical.kind) {
|
|
144
|
+
case "id":
|
|
145
|
+
case "string":
|
|
146
|
+
case "text":
|
|
147
|
+
return `text("${field.storage.column}")`;
|
|
148
|
+
case "enum":
|
|
149
|
+
return `text("${field.storage.column}", { enum: [${field.logical.values
|
|
150
|
+
.map((value) => JSON.stringify(value))
|
|
151
|
+
.join(", ")}] })`;
|
|
152
|
+
case "json":
|
|
153
|
+
return `text("${field.storage.column}", { mode: "json" })`;
|
|
154
|
+
case "boolean":
|
|
155
|
+
return `integer("${field.storage.column}", { mode: "boolean" })`;
|
|
156
|
+
case "integer":
|
|
157
|
+
return `integer("${field.storage.column}", { mode: "number" })`;
|
|
158
|
+
case "number":
|
|
159
|
+
return `real("${field.storage.column}")`;
|
|
160
|
+
case "temporal.instant":
|
|
161
|
+
return `temporalInstantEpochMs("${field.storage.column}")`;
|
|
162
|
+
case "temporal.plainDate":
|
|
163
|
+
return `temporalPlainDateText("${field.storage.column}")`;
|
|
164
|
+
}
|
|
165
|
+
}
|
|
166
|
+
|
|
167
|
+
function renderDrizzleDefault(defaultValue: DefaultSpec | undefined) {
|
|
168
|
+
if (!defaultValue) return null;
|
|
169
|
+
|
|
170
|
+
switch (defaultValue.kind) {
|
|
171
|
+
case "generatedId":
|
|
172
|
+
return null;
|
|
173
|
+
case "now":
|
|
174
|
+
return "epochMsNow()";
|
|
175
|
+
case "literal":
|
|
176
|
+
return JSON.stringify(defaultValue.value);
|
|
177
|
+
}
|
|
178
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
export {
|
|
2
|
+
type DatabaseSchemaDocument,
|
|
3
|
+
type DefaultSpec,
|
|
4
|
+
type FieldReferenceSpec,
|
|
5
|
+
type FieldSpec,
|
|
6
|
+
type ForeignKeyAction,
|
|
7
|
+
type IndexSpec,
|
|
8
|
+
type LogicalTypeSpec,
|
|
9
|
+
type SchemaValidationIssue,
|
|
10
|
+
type StorageSpec,
|
|
11
|
+
type TableSpec,
|
|
12
|
+
type UniqueSpec,
|
|
13
|
+
defaultSpecSchema,
|
|
14
|
+
fieldReferenceSpecSchema,
|
|
15
|
+
fieldSpecSchema,
|
|
16
|
+
foreignKeyActionSchema,
|
|
17
|
+
indexSpecSchema,
|
|
18
|
+
logicalTypeSpecSchema,
|
|
19
|
+
schemaDocumentSchema,
|
|
20
|
+
storageSpecSchema,
|
|
21
|
+
tableSpecSchema,
|
|
22
|
+
uniqueSpecSchema,
|
|
23
|
+
} from "./types";
|
|
24
|
+
export {
|
|
25
|
+
type MigrationDefinition,
|
|
26
|
+
type MigrationMeta,
|
|
27
|
+
type MigrationOperation,
|
|
28
|
+
createMigration,
|
|
29
|
+
} from "./migration";
|
|
30
|
+
export {
|
|
31
|
+
type PlannedMigration,
|
|
32
|
+
applyOperationsToSchema,
|
|
33
|
+
materializeSchema,
|
|
34
|
+
planMigration,
|
|
35
|
+
} from "./planner";
|
|
36
|
+
export { compileSchemaToDrizzle } from "./drizzle";
|
|
37
|
+
export { compileSchemaToSqlite, renderSqliteMigration } from "./sqlite";
|
|
38
|
+
export {
|
|
39
|
+
type ApplyMigrationsResult,
|
|
40
|
+
type LibsqlConnectionOptions,
|
|
41
|
+
applyMigrations,
|
|
42
|
+
createLibsqlClient,
|
|
43
|
+
getSchemaState,
|
|
44
|
+
listAppliedMigrations,
|
|
45
|
+
} from "./apply";
|
|
46
|
+
export {
|
|
47
|
+
type DbProjectLayout,
|
|
48
|
+
loadMigrationDefinitionsFromDirectory,
|
|
49
|
+
materializeProjectMigrations,
|
|
50
|
+
resolveDbProjectLayout,
|
|
51
|
+
writeDrizzleSchema,
|
|
52
|
+
writeSchemaSnapshot,
|
|
53
|
+
} from "./project";
|
|
54
|
+
export {
|
|
55
|
+
assertValidSchemaDocument,
|
|
56
|
+
createEmptySchema,
|
|
57
|
+
findField,
|
|
58
|
+
findTable,
|
|
59
|
+
parseSchemaDocument,
|
|
60
|
+
schemaHash,
|
|
61
|
+
validateSchemaDocument,
|
|
62
|
+
} from "./schema";
|