@vibeorm/migrate 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,226 @@
1
+ /**
2
+ * Schema Printer — Schema IR → .prisma text
3
+ *
4
+ * Converts a Schema IR back into Prisma schema language text.
5
+ * Used by `db pull` to generate a .prisma file from the database.
6
+ */
7
+
8
+ import type {
9
+ Schema,
10
+ Model,
11
+ Field,
12
+ ScalarField,
13
+ EnumField,
14
+ RelationField,
15
+ Enum,
16
+ DefaultValue,
17
+ } from "@vibeorm/parser";
18
+
19
+ // ─── Helpers ──────────────────────────────────────────────────────
20
+
21
+ function indent(params: { text: string; level?: number }): string {
22
+ const spaces = " ".repeat(params.level ?? 1);
23
+ return `${spaces}${params.text}`;
24
+ }
25
+
26
+ function formatDefault(params: { def: DefaultValue }): string {
27
+ const { def } = params;
28
+ switch (def.kind) {
29
+ case "autoincrement":
30
+ return "@default(autoincrement())";
31
+ case "now":
32
+ return "@default(now())";
33
+ case "uuid":
34
+ return "@default(uuid())";
35
+ case "cuid":
36
+ return "@default(cuid())";
37
+ case "nanoid":
38
+ return "@default(nanoid())";
39
+ case "ulid":
40
+ return "@default(ulid())";
41
+ case "dbgenerated":
42
+ return `@default(dbgenerated("${def.value}"))`;
43
+ case "literal": {
44
+ const val = def.value;
45
+ if (typeof val === "string") return `@default("${val}")`;
46
+ if (typeof val === "boolean") return `@default(${val})`;
47
+ return `@default(${val})`;
48
+ }
49
+ default:
50
+ return "";
51
+ }
52
+ }
53
+
54
+ // ─── Field Printers ───────────────────────────────────────────────
55
+
56
+ function printScalarField(params: { field: ScalarField }): string {
57
+ const { field } = params;
58
+ const parts: string[] = [field.name];
59
+
60
+ // Type
61
+ let typeName = field.prismaType;
62
+ if (field.isList) typeName += "[]";
63
+ else if (!field.isRequired) typeName += "?";
64
+ parts.push(typeName);
65
+
66
+ // Attributes
67
+ if (field.isId) parts.push("@id");
68
+ if (field.isUnique) parts.push("@unique");
69
+ if (field.default) parts.push(formatDefault({ def: field.default }));
70
+ if (field.isUpdatedAt) parts.push("@updatedAt");
71
+ if (field.dbName !== field.name) parts.push(`@map("${field.dbName}")`);
72
+ if (field.nativeType) parts.push(`@db.${field.nativeType}`);
73
+
74
+ return indent({ text: parts.join(" ") });
75
+ }
76
+
77
+ function printEnumField(params: { field: EnumField }): string {
78
+ const { field } = params;
79
+ const parts: string[] = [field.name];
80
+
81
+ let typeName = field.enumName;
82
+ if (field.isList) typeName += "[]";
83
+ else if (!field.isRequired) typeName += "?";
84
+ parts.push(typeName);
85
+
86
+ if (field.isId) parts.push("@id");
87
+ if (field.isUnique) parts.push("@unique");
88
+ if (field.default) parts.push(formatDefault({ def: field.default }));
89
+ if (field.dbName !== field.name) parts.push(`@map("${field.dbName}")`);
90
+
91
+ return indent({ text: parts.join(" ") });
92
+ }
93
+
94
+ function printRelationField(params: { field: RelationField }): string {
95
+ const { field } = params;
96
+ const parts: string[] = [field.name];
97
+
98
+ // Type
99
+ let typeName = field.relatedModel;
100
+ if (field.isList) typeName += "[]";
101
+ else if (!field.isRequired) typeName += "?";
102
+ parts.push(typeName);
103
+
104
+ // @relation with fields/references if this side owns the FK
105
+ if (field.relation.isForeignKey && field.relation.fields.length > 0) {
106
+ const fieldsStr = field.relation.fields.map((f: string) => f).join(", ");
107
+ const refsStr = field.relation.references.map((f: string) => f).join(", ");
108
+ let relAttr = `@relation(fields: [${fieldsStr}], references: [${refsStr}]`;
109
+ if (field.relation.name) {
110
+ relAttr = `@relation("${field.relation.name}", fields: [${fieldsStr}], references: [${refsStr}]`;
111
+ }
112
+ relAttr += ")";
113
+ parts.push(relAttr);
114
+ } else if (field.relation.name) {
115
+ parts.push(`@relation("${field.relation.name}")`);
116
+ }
117
+
118
+ return indent({ text: parts.join(" ") });
119
+ }
120
+
121
+ function printField(params: { field: Field }): string {
122
+ switch (params.field.kind) {
123
+ case "scalar":
124
+ return printScalarField({ field: params.field });
125
+ case "enum":
126
+ return printEnumField({ field: params.field });
127
+ case "relation":
128
+ return printRelationField({ field: params.field });
129
+ }
130
+ }
131
+
132
+ // ─── Model Printer ────────────────────────────────────────────────
133
+
134
+ function printModel(params: { model: Model }): string {
135
+ const { model } = params;
136
+ const lines: string[] = [];
137
+
138
+ lines.push(`model ${model.name} {`);
139
+
140
+ // Fields
141
+ for (const field of model.fields) {
142
+ lines.push(printField({ field }));
143
+ }
144
+
145
+ // Block-level attributes
146
+ if (model.dbName !== model.name) {
147
+ lines.push("");
148
+ lines.push(indent({ text: `@@map("${model.dbName}")` }));
149
+ }
150
+
151
+ // Composite primary key
152
+ if (model.primaryKey.isComposite) {
153
+ const pkFields = model.primaryKey.fields.join(", ");
154
+ lines.push("");
155
+ lines.push(indent({ text: `@@id([${pkFields}])` }));
156
+ }
157
+
158
+ // Multi-column unique constraints
159
+ for (const uc of model.uniqueConstraints) {
160
+ if (uc.fields.length > 1) {
161
+ const fields = uc.fields.join(", ");
162
+ lines.push(indent({ text: `@@unique([${fields}])` }));
163
+ }
164
+ }
165
+
166
+ // Indexes
167
+ for (const idx of model.indexes) {
168
+ const fields = idx.fields.join(", ");
169
+ lines.push(indent({ text: `@@index([${fields}])` }));
170
+ }
171
+
172
+ lines.push("}");
173
+ return lines.join("\n");
174
+ }
175
+
176
+ // ─── Enum Printer ─────────────────────────────────────────────────
177
+
178
+ function printEnum(params: { enumDef: Enum }): string {
179
+ const { enumDef } = params;
180
+ const lines: string[] = [];
181
+
182
+ lines.push(`enum ${enumDef.name} {`);
183
+
184
+ for (const val of enumDef.values) {
185
+ let line = indent({ text: val.name });
186
+ if (val.dbName && val.dbName !== val.name) {
187
+ line += ` @map("${val.dbName}")`;
188
+ }
189
+ lines.push(line);
190
+ }
191
+
192
+ if (enumDef.dbName && enumDef.dbName !== enumDef.name) {
193
+ lines.push("");
194
+ lines.push(indent({ text: `@@map("${enumDef.dbName}")` }));
195
+ }
196
+
197
+ lines.push("}");
198
+ return lines.join("\n");
199
+ }
200
+
201
+ // ─── Main Entry Point ─────────────────────────────────────────────
202
+
203
+ export function printSchema(params: { schema: Schema }): string {
204
+ const { schema } = params;
205
+ const sections: string[] = [];
206
+
207
+ // Datasource block
208
+ sections.push(
209
+ `datasource db {\n` +
210
+ ` provider = "postgresql"\n` +
211
+ ` url = env("DATABASE_URL")\n` +
212
+ `}`
213
+ );
214
+
215
+ // Enums
216
+ for (const enumDef of schema.enums) {
217
+ sections.push(printEnum({ enumDef }));
218
+ }
219
+
220
+ // Models
221
+ for (const model of schema.models) {
222
+ sections.push(printModel({ model }));
223
+ }
224
+
225
+ return sections.join("\n\n") + "\n";
226
+ }
@@ -0,0 +1,141 @@
1
+ /**
2
+ * Snapshot Management — Schema IR → JSON + Journal
3
+ *
4
+ * Handles reading/writing JSON snapshots of the Schema IR
5
+ * and maintaining the migration journal.
6
+ */
7
+
8
+ import type { Schema } from "@vibeorm/parser";
9
+ import { validateSchema, formatValidationErrors } from "@vibeorm/parser";
10
+ import { existsSync, mkdirSync, readFileSync, writeFileSync, readdirSync } from "fs";
11
+ import { join } from "path";
12
+
13
+ // ─── Types ────────────────────────────────────────────────────────
14
+
15
+ export type JournalEntry = {
16
+ idx: number;
17
+ timestamp: string;
18
+ name: string;
19
+ checksum: string;
20
+ };
21
+
22
+ export type Journal = {
23
+ version: number;
24
+ entries: JournalEntry[];
25
+ };
26
+
27
+ // ─── Snapshot I/O ─────────────────────────────────────────────────
28
+
29
+ export function saveSnapshot(params: {
30
+ schema: Schema;
31
+ migrationsDir: string;
32
+ timestamp: string;
33
+ }): void {
34
+ const { schema, migrationsDir, timestamp } = params;
35
+ const metaDir = join(migrationsDir, "meta");
36
+
37
+ if (!existsSync(metaDir)) {
38
+ mkdirSync(metaDir, { recursive: true });
39
+ }
40
+
41
+ const snapshotPath = join(metaDir, `${timestamp}_snapshot.json`);
42
+ writeFileSync(snapshotPath, JSON.stringify(schema, null, 2), "utf-8");
43
+ }
44
+
45
+ export function loadLatestSnapshot(params: { migrationsDir: string }): Schema | null {
46
+ const { migrationsDir } = params;
47
+ const metaDir = join(migrationsDir, "meta");
48
+
49
+ if (!existsSync(metaDir)) return null;
50
+
51
+ const files = readdirSync(metaDir)
52
+ .filter((f: string) => f.endsWith("_snapshot.json"))
53
+ .sort();
54
+
55
+ if (files.length === 0) return null;
56
+
57
+ const latestFile = files[files.length - 1]!;
58
+ const content = readFileSync(join(metaDir, latestFile), "utf-8");
59
+ const schema = JSON.parse(content) as Schema;
60
+
61
+ // Validate deserialized snapshot for integrity
62
+ const validation = validateSchema({ schema });
63
+ if (!validation.valid) {
64
+ console.warn(`Warning: Snapshot "${latestFile}" has validation issues:`);
65
+ console.warn(formatValidationErrors({ result: validation }));
66
+ }
67
+
68
+ return schema;
69
+ }
70
+
71
+ export function loadSnapshot(params: {
72
+ migrationsDir: string;
73
+ timestamp: string;
74
+ }): Schema | null {
75
+ const { migrationsDir, timestamp } = params;
76
+ const snapshotPath = join(migrationsDir, "meta", `${timestamp}_snapshot.json`);
77
+
78
+ if (!existsSync(snapshotPath)) return null;
79
+
80
+ const content = readFileSync(snapshotPath, "utf-8");
81
+ const schema = JSON.parse(content) as Schema;
82
+
83
+ // Validate deserialized snapshot for integrity
84
+ const validation = validateSchema({ schema });
85
+ if (!validation.valid) {
86
+ console.warn(`Warning: Snapshot "${timestamp}" has validation issues:`);
87
+ console.warn(formatValidationErrors({ result: validation }));
88
+ }
89
+
90
+ return schema;
91
+ }
92
+
93
+ // ─── Journal I/O ──────────────────────────────────────────────────
94
+
95
+ export function loadJournal(params: { migrationsDir: string }): Journal {
96
+ const { migrationsDir } = params;
97
+ const journalPath = join(migrationsDir, "meta", "_journal.json");
98
+
99
+ if (!existsSync(journalPath)) {
100
+ return { version: 1, entries: [] };
101
+ }
102
+
103
+ const content = readFileSync(journalPath, "utf-8");
104
+ return JSON.parse(content) as Journal;
105
+ }
106
+
107
+ export function saveJournal(params: {
108
+ migrationsDir: string;
109
+ journal: Journal;
110
+ }): void {
111
+ const { migrationsDir, journal } = params;
112
+ const metaDir = join(migrationsDir, "meta");
113
+
114
+ if (!existsSync(metaDir)) {
115
+ mkdirSync(metaDir, { recursive: true });
116
+ }
117
+
118
+ const journalPath = join(metaDir, "_journal.json");
119
+ writeFileSync(journalPath, JSON.stringify(journal, null, 2), "utf-8");
120
+ }
121
+
122
+ // ─── Timestamp Generation ─────────────────────────────────────────
123
+
124
+ export function generateTimestamp(): string {
125
+ const now = new Date();
126
+ const year = now.getFullYear();
127
+ const month = String(now.getMonth() + 1).padStart(2, "0");
128
+ const day = String(now.getDate()).padStart(2, "0");
129
+ const hours = String(now.getHours()).padStart(2, "0");
130
+ const minutes = String(now.getMinutes()).padStart(2, "0");
131
+ const seconds = String(now.getSeconds()).padStart(2, "0");
132
+ return `${year}${month}${day}${hours}${minutes}${seconds}`;
133
+ }
134
+
135
+ // ─── Checksum ─────────────────────────────────────────────────────
136
+
137
+ export function computeChecksum(params: { content: string }): string {
138
+ const hasher = new Bun.CryptoHasher("sha256");
139
+ hasher.update(params.content);
140
+ return `sha256:${hasher.digest("hex")}`;
141
+ }
@@ -0,0 +1,45 @@
1
+ /**
2
+ * SQL Utility Functions
3
+ *
4
+ * Shared SQL parsing/splitting utilities used by migration runner and CLI.
5
+ */
6
+
7
+ /**
8
+ * Split a SQL string into individual statements, handling DO $$ blocks correctly.
9
+ * DO $$ ... END $$; blocks contain semicolons that are NOT statement terminators.
10
+ */
11
+ export function splitSqlStatements(params: { sql: string }): string[] {
12
+ const { sql } = params;
13
+ const statements: string[] = [];
14
+ let current = "";
15
+ let inDollarBlock = false;
16
+
17
+ const lines = sql.split("\n");
18
+ for (const line of lines) {
19
+ const trimmed = line.trim();
20
+
21
+ if (trimmed.startsWith("DO $$") || trimmed.startsWith("DO $")) {
22
+ inDollarBlock = true;
23
+ }
24
+
25
+ current += (current ? "\n" : "") + line;
26
+
27
+ if (inDollarBlock && trimmed.startsWith("END $$;")) {
28
+ inDollarBlock = false;
29
+ statements.push(current.trim());
30
+ current = "";
31
+ continue;
32
+ }
33
+
34
+ if (!inDollarBlock && trimmed.endsWith(";")) {
35
+ const stmt = current.trim();
36
+ if (stmt) statements.push(stmt);
37
+ current = "";
38
+ }
39
+ }
40
+
41
+ const remainder = current.trim();
42
+ if (remainder) statements.push(remainder);
43
+
44
+ return statements.filter((s) => s.length > 0);
45
+ }
package/src/types.ts ADDED
@@ -0,0 +1,13 @@
1
+ /**
2
+ * Shared types for the @vibeorm/migrate package.
3
+ */
4
+
5
+ /**
6
+ * Generic SQL executor function.
7
+ * Accepts a SQL string and optional parameter values, returns rows.
8
+ * This decouples all modules from bun:sql for testability.
9
+ */
10
+ export type SqlExecutor = (params: {
11
+ text: string;
12
+ values?: unknown[];
13
+ }) => Promise<Record<string, unknown>[]>;