vibeorm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +96 -0
- package/package.json +40 -0
- package/src/commands/db-execute.ts +44 -0
- package/src/commands/db-pull.ts +57 -0
- package/src/commands/db-push.ts +120 -0
- package/src/commands/db-reset.ts +110 -0
- package/src/commands/db-seed.ts +48 -0
- package/src/commands/generate.ts +64 -0
- package/src/commands/migrate-apply.ts +119 -0
- package/src/commands/migrate-generate.ts +134 -0
- package/src/commands/migrate-resolve.ts +89 -0
- package/src/commands/migrate-status.ts +101 -0
- package/src/config.ts +107 -0
- package/src/connection.ts +48 -0
- package/src/index.ts +110 -0
- package/src/schema-loader.ts +50 -0
package/README.md
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
# vibeorm
|
|
2
|
+
|
|
3
|
+
CLI for VibeORM — generate clients, run migrations, and manage your PostgreSQL database.
|
|
4
|
+
|
|
5
|
+
## Installation
|
|
6
|
+
|
|
7
|
+
```bash
|
|
8
|
+
bun add -d vibeorm
|
|
9
|
+
```
|
|
10
|
+
|
|
11
|
+
## Commands
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
bunx vibeorm <command> [options]
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
### Code Generation
|
|
18
|
+
|
|
19
|
+
| Command | Description |
|
|
20
|
+
|---------|-------------|
|
|
21
|
+
| `generate` | Parse `.prisma` schema and generate TypeScript client files |
|
|
22
|
+
|
|
23
|
+
### Database Management
|
|
24
|
+
|
|
25
|
+
| Command | Description |
|
|
26
|
+
|---------|-------------|
|
|
27
|
+
| `db push` | Diff schema against database and apply changes |
|
|
28
|
+
| `db pull` | Introspect database and write `.prisma` schema |
|
|
29
|
+
| `db reset` | Drop schema, re-apply migrations, optionally seed |
|
|
30
|
+
| `db seed` | Run configured seed script |
|
|
31
|
+
| `db execute` | Execute a raw SQL file |
|
|
32
|
+
|
|
33
|
+
Options for `db push`: `--force`, `--accept-data-loss`, `--dry-run`
|
|
34
|
+
Options for `db pull`: `--print`
|
|
35
|
+
Options for `db reset`: `--force` (required), `--skip-seed`
|
|
36
|
+
Options for `db execute`: `--file <path>`
|
|
37
|
+
|
|
38
|
+
### Migrations
|
|
39
|
+
|
|
40
|
+
| Command | Description |
|
|
41
|
+
|---------|-------------|
|
|
42
|
+
| `migrate generate` | Create migration SQL from schema diff |
|
|
43
|
+
| `migrate apply` | Apply pending migrations |
|
|
44
|
+
| `migrate status` | Show applied vs pending migrations |
|
|
45
|
+
| `migrate resolve` | Repair migration state |
|
|
46
|
+
|
|
47
|
+
Options for `migrate generate`: `--name <name>`, `--dry-run`
|
|
48
|
+
Options for `migrate apply`: `--dry-run`
|
|
49
|
+
Options for `migrate resolve`: `--applied <name>`, `--rolled-back <name>`
|
|
50
|
+
|
|
51
|
+
### Global Options
|
|
52
|
+
|
|
53
|
+
All commands accept:
|
|
54
|
+
|
|
55
|
+
- `--schema <path>` — path to `.prisma` schema file or directory
|
|
56
|
+
- `--output <path>` — output directory for generated files
|
|
57
|
+
- `--migrations <path>` — migrations directory
|
|
58
|
+
- `--seed <path>` — seed script path
|
|
59
|
+
- `--url <postgres-url>` — database connection string
|
|
60
|
+
|
|
61
|
+
## Configuration
|
|
62
|
+
|
|
63
|
+
Config priority (last wins):
|
|
64
|
+
|
|
65
|
+
1. Defaults (`./prisma/schema.prisma`, `./generated/vibeorm`, `./migrations`)
|
|
66
|
+
2. `package.json` `vibeorm` key
|
|
67
|
+
3. `vibeorm.config.ts`
|
|
68
|
+
4. CLI flags
|
|
69
|
+
|
|
70
|
+
### package.json
|
|
71
|
+
|
|
72
|
+
```json
|
|
73
|
+
{
|
|
74
|
+
"vibeorm": {
|
|
75
|
+
"schema": "./prisma/schema.prisma",
|
|
76
|
+
"output": "./generated/vibeorm",
|
|
77
|
+
"migrations": "./migrations",
|
|
78
|
+
"seed": "./prisma/seed.ts"
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
### vibeorm.config.ts
|
|
84
|
+
|
|
85
|
+
```ts
|
|
86
|
+
export const config = {
|
|
87
|
+
schema: "./prisma/schema.prisma",
|
|
88
|
+
output: "./generated/vibeorm",
|
|
89
|
+
migrations: "./migrations",
|
|
90
|
+
seed: "./prisma/seed.ts",
|
|
91
|
+
};
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
## License
|
|
95
|
+
|
|
96
|
+
[MIT](../../LICENSE)
|
package/package.json
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "vibeorm",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "CLI for VibeORM — generate clients, run migrations, introspect databases",
|
|
5
|
+
"license": "MIT",
|
|
6
|
+
"keywords": ["orm", "cli", "prisma", "postgresql", "bun", "typescript", "migrations"],
|
|
7
|
+
"type": "module",
|
|
8
|
+
"bin": {
|
|
9
|
+
"vibeorm": "./src/index.ts"
|
|
10
|
+
},
|
|
11
|
+
"exports": {
|
|
12
|
+
".": {
|
|
13
|
+
"default": "./src/index.ts",
|
|
14
|
+
"types": "./src/index.ts"
|
|
15
|
+
}
|
|
16
|
+
},
|
|
17
|
+
"files": ["src"],
|
|
18
|
+
"repository": {
|
|
19
|
+
"type": "git",
|
|
20
|
+
"url": "https://github.com/vibeorm/vibeorm.git",
|
|
21
|
+
"directory": "packages/cli"
|
|
22
|
+
},
|
|
23
|
+
"homepage": "https://github.com/vibeorm/vibeorm",
|
|
24
|
+
"bugs": {
|
|
25
|
+
"url": "https://github.com/vibeorm/vibeorm/issues"
|
|
26
|
+
},
|
|
27
|
+
"publishConfig": {
|
|
28
|
+
"access": "public"
|
|
29
|
+
},
|
|
30
|
+
"engines": {
|
|
31
|
+
"bun": ">=1.1.0"
|
|
32
|
+
},
|
|
33
|
+
"dependencies": {
|
|
34
|
+
"@vibeorm/parser": "0.1.0",
|
|
35
|
+
"@vibeorm/generator": "0.1.0",
|
|
36
|
+
"@vibeorm/migrate": "0.1.0",
|
|
37
|
+
"@vibeorm/runtime": "0.1.0",
|
|
38
|
+
"@vibeorm/adapter-bun": "0.1.0"
|
|
39
|
+
}
|
|
40
|
+
}
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm db execute
|
|
3
|
+
*
|
|
4
|
+
* Execute a raw .sql file against the database.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { existsSync, readFileSync } from "fs";
|
|
8
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
9
|
+
import { createConnection } from "../connection.ts";
|
|
10
|
+
|
|
11
|
+
export async function runDbExecute(params: {
|
|
12
|
+
config: VibeOrmConfig;
|
|
13
|
+
flags: Record<string, string>;
|
|
14
|
+
}): Promise<void> {
|
|
15
|
+
const { flags } = params;
|
|
16
|
+
|
|
17
|
+
const filePath = flags.file;
|
|
18
|
+
if (!filePath) {
|
|
19
|
+
console.error("Error: --file <path> is required.");
|
|
20
|
+
console.error("Usage: vibeorm db execute --file ./scripts/cleanup.sql");
|
|
21
|
+
process.exit(1);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
if (!existsSync(filePath)) {
|
|
25
|
+
console.error(`Error: SQL file not found at ${filePath}`);
|
|
26
|
+
process.exit(1);
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
console.log("VibeORM DB Execute");
|
|
30
|
+
console.log(` File: ${filePath}`);
|
|
31
|
+
console.log("");
|
|
32
|
+
|
|
33
|
+
const sql = readFileSync(filePath, "utf-8");
|
|
34
|
+
const conn = createConnection({ url: flags.url });
|
|
35
|
+
|
|
36
|
+
try {
|
|
37
|
+
console.log("Executing SQL...");
|
|
38
|
+
const result = await conn.executor({ text: sql });
|
|
39
|
+
console.log(`Executed successfully. Rows returned: ${result.length}`);
|
|
40
|
+
console.log("Done!");
|
|
41
|
+
} finally {
|
|
42
|
+
await conn.close();
|
|
43
|
+
}
|
|
44
|
+
}
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm db pull
|
|
3
|
+
*
|
|
4
|
+
* Introspect an existing PostgreSQL database and generate a .prisma schema file.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { introspect, printSchema } from "@vibeorm/migrate";
|
|
8
|
+
import { existsSync, mkdirSync, writeFileSync } from "fs";
|
|
9
|
+
import { dirname } from "path";
|
|
10
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
11
|
+
import { createConnection } from "../connection.ts";
|
|
12
|
+
|
|
13
|
+
export async function runDbPull(params: {
|
|
14
|
+
config: VibeOrmConfig;
|
|
15
|
+
flags: Record<string, string>;
|
|
16
|
+
}): Promise<void> {
|
|
17
|
+
const { config, flags } = params;
|
|
18
|
+
|
|
19
|
+
const printToStdout = flags.print === "true";
|
|
20
|
+
const outputPath = flags.schema ?? config.schema;
|
|
21
|
+
|
|
22
|
+
console.log("VibeORM DB Pull");
|
|
23
|
+
console.log("");
|
|
24
|
+
|
|
25
|
+
// Connect to database
|
|
26
|
+
const conn = createConnection({ url: flags.url });
|
|
27
|
+
|
|
28
|
+
try {
|
|
29
|
+
// Introspect
|
|
30
|
+
console.log("Introspecting database...");
|
|
31
|
+
const schema = await introspect({ executor: conn.executor });
|
|
32
|
+
|
|
33
|
+
console.log(`Found ${schema.models.length} models, ${schema.enums.length} enums`);
|
|
34
|
+
|
|
35
|
+
// Convert to .prisma text
|
|
36
|
+
const prismaText = printSchema({ schema });
|
|
37
|
+
|
|
38
|
+
if (printToStdout) {
|
|
39
|
+
console.log("");
|
|
40
|
+
console.log(prismaText);
|
|
41
|
+
return;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
// Write to file
|
|
45
|
+
const dir = dirname(outputPath);
|
|
46
|
+
if (!existsSync(dir)) {
|
|
47
|
+
mkdirSync(dir, { recursive: true });
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
writeFileSync(outputPath, prismaText, "utf-8");
|
|
51
|
+
console.log("");
|
|
52
|
+
console.log(`Schema written to ${outputPath}`);
|
|
53
|
+
console.log("Done!");
|
|
54
|
+
} finally {
|
|
55
|
+
await conn.close();
|
|
56
|
+
}
|
|
57
|
+
}
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm db push
|
|
3
|
+
*
|
|
4
|
+
* Push schema directly to database (prototyping, no migration files).
|
|
5
|
+
*
|
|
6
|
+
* Flow:
|
|
7
|
+
* 1. Parse .prisma schema → IR
|
|
8
|
+
* 2. Connect to PostgreSQL
|
|
9
|
+
* 3. Introspect current database state → IR
|
|
10
|
+
* 4. Diff the two IRs → DDL operations
|
|
11
|
+
* 5. Classify as safe/destructive
|
|
12
|
+
* 6. If destructive, require --force or --accept-data-loss
|
|
13
|
+
* 7. Execute DDL in a transaction
|
|
14
|
+
*/
|
|
15
|
+
|
|
16
|
+
import { parsePrismaSchema, validateSchema, formatValidationErrors } from "@vibeorm/parser";
|
|
17
|
+
import { introspect, diffSchemas, buildDDL, splitSqlStatements } from "@vibeorm/migrate";
|
|
18
|
+
import type { DiffOperation } from "@vibeorm/migrate";
|
|
19
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
20
|
+
import { createConnection } from "../connection.ts";
|
|
21
|
+
import { loadSchemaSource } from "../schema-loader.ts";
|
|
22
|
+
|
|
23
|
+
export async function runDbPush(params: {
|
|
24
|
+
config: VibeOrmConfig;
|
|
25
|
+
flags: Record<string, string>;
|
|
26
|
+
}): Promise<void> {
|
|
27
|
+
const { config, flags } = params;
|
|
28
|
+
|
|
29
|
+
const force = flags.force === "true" || flags["accept-data-loss"] === "true";
|
|
30
|
+
const dryRun = flags["dry-run"] === "true";
|
|
31
|
+
|
|
32
|
+
console.log("VibeORM DB Push");
|
|
33
|
+
console.log(` Schema: ${config.schema}`);
|
|
34
|
+
console.log("");
|
|
35
|
+
|
|
36
|
+
// 1. Parse schema (file or directory)
|
|
37
|
+
const source = loadSchemaSource({ schemaPath: config.schema });
|
|
38
|
+
const currentSchema = parsePrismaSchema({ source });
|
|
39
|
+
console.log(`Parsed ${currentSchema.models.length} models, ${currentSchema.enums.length} enums`);
|
|
40
|
+
|
|
41
|
+
// Validate schema
|
|
42
|
+
const validation = validateSchema({ schema: currentSchema });
|
|
43
|
+
if (!validation.valid) {
|
|
44
|
+
console.error("");
|
|
45
|
+
console.error(formatValidationErrors({ result: validation }));
|
|
46
|
+
process.exit(1);
|
|
47
|
+
}
|
|
48
|
+
if (validation.warnings.length > 0) {
|
|
49
|
+
console.warn("");
|
|
50
|
+
console.warn(formatValidationErrors({ result: validation }));
|
|
51
|
+
console.warn("");
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
// 2. Connect to database
|
|
55
|
+
const conn = createConnection({ url: flags.url });
|
|
56
|
+
|
|
57
|
+
try {
|
|
58
|
+
// 3. Introspect database
|
|
59
|
+
console.log("Introspecting database...");
|
|
60
|
+
const dbSchema = await introspect({ executor: conn.executor });
|
|
61
|
+
console.log(`Found ${dbSchema.models.length} existing tables`);
|
|
62
|
+
|
|
63
|
+
// 4. Diff schemas
|
|
64
|
+
const operations = diffSchemas({ previous: dbSchema, current: currentSchema });
|
|
65
|
+
|
|
66
|
+
if (operations.length === 0) {
|
|
67
|
+
console.log("");
|
|
68
|
+
console.log("Database is already in sync with the schema. No changes needed.");
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// 5. Classify operations
|
|
73
|
+
const safeOps = operations.filter((op: DiffOperation) => !op.isDestructive);
|
|
74
|
+
const destructiveOps = operations.filter((op: DiffOperation) => op.isDestructive);
|
|
75
|
+
|
|
76
|
+
console.log("");
|
|
77
|
+
console.log(`Changes detected: ${safeOps.length} safe, ${destructiveOps.length} destructive`);
|
|
78
|
+
console.log("");
|
|
79
|
+
|
|
80
|
+
// Print all operations
|
|
81
|
+
for (const op of operations) {
|
|
82
|
+
const prefix = op.isDestructive ? "[DESTRUCTIVE]" : "[SAFE]";
|
|
83
|
+
console.log(` ${prefix} ${op.description}`);
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// 6. Check for destructive operations
|
|
87
|
+
if (destructiveOps.length > 0 && !force) {
|
|
88
|
+
console.log("");
|
|
89
|
+
console.error("Error: Destructive changes detected.");
|
|
90
|
+
console.error("Use --force or --accept-data-loss to apply these changes.");
|
|
91
|
+
process.exit(1);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
// 7. Generate and execute DDL
|
|
95
|
+
// Each operation's sql may contain multiple statements (e.g. join tables).
|
|
96
|
+
// We need to split them carefully — DO $$ blocks contain semicolons internally.
|
|
97
|
+
const allStatements: string[] = [];
|
|
98
|
+
for (const op of operations) {
|
|
99
|
+
splitSqlStatements({ sql: op.sql }).forEach((stmt: string) => allStatements.push(stmt));
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
if (dryRun) {
|
|
103
|
+
console.log("");
|
|
104
|
+
console.log("=== DRY RUN — SQL that would be executed ===");
|
|
105
|
+
console.log("");
|
|
106
|
+
console.log(allStatements.join("\n\n"));
|
|
107
|
+
return;
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
console.log("");
|
|
111
|
+
console.log("Applying changes...");
|
|
112
|
+
await conn.executeInTransaction({ statements: allStatements });
|
|
113
|
+
|
|
114
|
+
console.log("");
|
|
115
|
+
console.log(`Applied ${operations.length} changes successfully.`);
|
|
116
|
+
console.log("Database is now in sync with the schema.");
|
|
117
|
+
} finally {
|
|
118
|
+
await conn.close();
|
|
119
|
+
}
|
|
120
|
+
}
|
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm db reset
|
|
3
|
+
*
|
|
4
|
+
* Drop all tables + re-apply all migrations + seed.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import {
|
|
8
|
+
ensureMigrationTable,
|
|
9
|
+
getAppliedMigrations,
|
|
10
|
+
applyMigration,
|
|
11
|
+
loadJournal,
|
|
12
|
+
computeChecksum,
|
|
13
|
+
} from "@vibeorm/migrate";
|
|
14
|
+
import { existsSync, readFileSync } from "fs";
|
|
15
|
+
import { join } from "path";
|
|
16
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
17
|
+
import { createConnection } from "../connection.ts";
|
|
18
|
+
|
|
19
|
+
export async function runDbReset(params: {
|
|
20
|
+
config: VibeOrmConfig;
|
|
21
|
+
flags: Record<string, string>;
|
|
22
|
+
}): Promise<void> {
|
|
23
|
+
const { config, flags } = params;
|
|
24
|
+
|
|
25
|
+
const force = flags.force === "true";
|
|
26
|
+
const skipSeed = flags["skip-seed"] === "true";
|
|
27
|
+
|
|
28
|
+
if (!force) {
|
|
29
|
+
console.log("WARNING: This will drop all tables in the public schema and re-apply all migrations.");
|
|
30
|
+
console.log("Use --force to skip this confirmation.");
|
|
31
|
+
console.log("");
|
|
32
|
+
console.error("Aborted. Use --force to proceed.");
|
|
33
|
+
process.exit(1);
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
console.log("VibeORM DB Reset");
|
|
37
|
+
console.log("");
|
|
38
|
+
|
|
39
|
+
const conn = createConnection({ url: flags.url });
|
|
40
|
+
|
|
41
|
+
try {
|
|
42
|
+
// 1. Drop everything in public schema
|
|
43
|
+
console.log("Dropping all tables and types...");
|
|
44
|
+
await conn.executor({ text: "DROP SCHEMA public CASCADE;" });
|
|
45
|
+
await conn.executor({ text: "CREATE SCHEMA public;" });
|
|
46
|
+
console.log("Schema dropped and recreated.");
|
|
47
|
+
|
|
48
|
+
// 2. Re-apply all migrations
|
|
49
|
+
console.log("");
|
|
50
|
+
console.log("Re-applying migrations...");
|
|
51
|
+
|
|
52
|
+
await ensureMigrationTable({ executor: conn.executor });
|
|
53
|
+
|
|
54
|
+
const journal = loadJournal({ migrationsDir: config.migrations });
|
|
55
|
+
|
|
56
|
+
if (journal.entries.length === 0) {
|
|
57
|
+
console.log("No migrations found to apply.");
|
|
58
|
+
} else {
|
|
59
|
+
for (const entry of journal.entries) {
|
|
60
|
+
const migrationDir = join(config.migrations, `${entry.timestamp}_${entry.name}`);
|
|
61
|
+
const sqlPath = join(migrationDir, "migration.sql");
|
|
62
|
+
|
|
63
|
+
if (!existsSync(sqlPath)) {
|
|
64
|
+
console.error(`Error: Migration file not found: ${sqlPath}`);
|
|
65
|
+
process.exit(1);
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const sql = readFileSync(sqlPath, "utf-8");
|
|
69
|
+
const checksum = computeChecksum({ content: sql });
|
|
70
|
+
|
|
71
|
+
console.log(` Applying ${entry.timestamp}_${entry.name}...`);
|
|
72
|
+
const result = await applyMigration({
|
|
73
|
+
executor: conn.executor,
|
|
74
|
+
migrationName: `${entry.timestamp}_${entry.name}`,
|
|
75
|
+
sql,
|
|
76
|
+
checksum,
|
|
77
|
+
});
|
|
78
|
+
console.log(` Done in ${result.executionTime}ms`);
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
console.log(`Applied ${journal.entries.length} migrations.`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
// 3. Run seed
|
|
85
|
+
if (!skipSeed && config.seed) {
|
|
86
|
+
console.log("");
|
|
87
|
+
console.log("Running seed...");
|
|
88
|
+
|
|
89
|
+
if (!existsSync(config.seed)) {
|
|
90
|
+
console.warn(`Warning: Seed file not found at ${config.seed}. Skipping.`);
|
|
91
|
+
} else {
|
|
92
|
+
const proc = Bun.spawn(["bun", "run", config.seed], {
|
|
93
|
+
stdout: "inherit",
|
|
94
|
+
stderr: "inherit",
|
|
95
|
+
});
|
|
96
|
+
const exitCode = await proc.exited;
|
|
97
|
+
if (exitCode !== 0) {
|
|
98
|
+
console.error(`Seed script exited with code ${exitCode}`);
|
|
99
|
+
process.exit(1);
|
|
100
|
+
}
|
|
101
|
+
console.log("Seed completed.");
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
console.log("");
|
|
106
|
+
console.log("Database reset complete.");
|
|
107
|
+
} finally {
|
|
108
|
+
await conn.close();
|
|
109
|
+
}
|
|
110
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm db seed
|
|
3
|
+
*
|
|
4
|
+
* Run a user-defined seed script.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { existsSync } from "fs";
|
|
8
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
9
|
+
|
|
10
|
+
export async function runDbSeed(params: {
|
|
11
|
+
config: VibeOrmConfig;
|
|
12
|
+
flags: Record<string, string>;
|
|
13
|
+
}): Promise<void> {
|
|
14
|
+
const { config } = params;
|
|
15
|
+
|
|
16
|
+
console.log("VibeORM DB Seed");
|
|
17
|
+
console.log("");
|
|
18
|
+
|
|
19
|
+
// Resolve seed path
|
|
20
|
+
const seedPath = config.seed;
|
|
21
|
+
if (!seedPath) {
|
|
22
|
+
console.error("Error: No seed script configured.");
|
|
23
|
+
console.error("Set 'seed' in vibeorm.config.ts, package.json vibeorm.seed, or use --seed <path>.");
|
|
24
|
+
process.exit(1);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
if (!existsSync(seedPath)) {
|
|
28
|
+
console.error(`Error: Seed file not found at ${seedPath}`);
|
|
29
|
+
process.exit(1);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
console.log(`Running seed: ${seedPath}`);
|
|
33
|
+
console.log("");
|
|
34
|
+
|
|
35
|
+
const proc = Bun.spawn(["bun", "run", seedPath], {
|
|
36
|
+
stdout: "inherit",
|
|
37
|
+
stderr: "inherit",
|
|
38
|
+
});
|
|
39
|
+
|
|
40
|
+
const exitCode = await proc.exited;
|
|
41
|
+
if (exitCode !== 0) {
|
|
42
|
+
console.error(`Seed script exited with code ${exitCode}`);
|
|
43
|
+
process.exit(1);
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
console.log("");
|
|
47
|
+
console.log("Seed completed.");
|
|
48
|
+
}
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm generate
|
|
3
|
+
*
|
|
4
|
+
* Parse .prisma schema → generate TypeScript client files.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import { parsePrismaSchema, validateSchema, formatValidationErrors } from "@vibeorm/parser";
|
|
8
|
+
import { generate } from "@vibeorm/generator";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
import { existsSync, mkdirSync, writeFileSync } from "fs";
|
|
11
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
12
|
+
import { loadSchemaSource } from "../schema-loader.ts";
|
|
13
|
+
|
|
14
|
+
export async function runGenerate(params: {
|
|
15
|
+
config: VibeOrmConfig;
|
|
16
|
+
flags: Record<string, string>;
|
|
17
|
+
}): Promise<void> {
|
|
18
|
+
const { config } = params;
|
|
19
|
+
|
|
20
|
+
console.log("VibeORM Generate");
|
|
21
|
+
console.log(` Schema: ${config.schema}`);
|
|
22
|
+
console.log(` Output: ${config.output}`);
|
|
23
|
+
console.log("");
|
|
24
|
+
|
|
25
|
+
// Read schema (file or directory)
|
|
26
|
+
const source = loadSchemaSource({ schemaPath: config.schema });
|
|
27
|
+
|
|
28
|
+
// Parse
|
|
29
|
+
console.log("Parsing schema...");
|
|
30
|
+
const schema = parsePrismaSchema({ source });
|
|
31
|
+
console.log(` Found ${schema.models.length} models, ${schema.enums.length} enums`);
|
|
32
|
+
|
|
33
|
+
// Validate
|
|
34
|
+
const validation = validateSchema({ schema });
|
|
35
|
+
if (!validation.valid) {
|
|
36
|
+
console.error("");
|
|
37
|
+
console.error(formatValidationErrors({ result: validation }));
|
|
38
|
+
process.exit(1);
|
|
39
|
+
}
|
|
40
|
+
if (validation.warnings.length > 0) {
|
|
41
|
+
console.warn("");
|
|
42
|
+
console.warn(formatValidationErrors({ result: validation }));
|
|
43
|
+
console.warn("");
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Generate
|
|
47
|
+
console.log("Generating client...");
|
|
48
|
+
const files = generate({ schema });
|
|
49
|
+
|
|
50
|
+
// Write files
|
|
51
|
+
if (!existsSync(config.output)) {
|
|
52
|
+
mkdirSync(config.output, { recursive: true });
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
for (const file of files) {
|
|
56
|
+
const filePath = join(config.output, file.filename);
|
|
57
|
+
writeFileSync(filePath, file.content, "utf-8");
|
|
58
|
+
console.log(` Written: ${file.filename}`);
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
console.log("");
|
|
62
|
+
console.log(`Generated ${files.length} files in ${config.output}`);
|
|
63
|
+
console.log("Done!");
|
|
64
|
+
}
|
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm migrate apply
|
|
3
|
+
*
|
|
4
|
+
* Apply all pending migrations to the database.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import {
|
|
8
|
+
ensureMigrationTable,
|
|
9
|
+
getAppliedMigrations,
|
|
10
|
+
applyMigration,
|
|
11
|
+
loadJournal,
|
|
12
|
+
computeChecksum,
|
|
13
|
+
} from "@vibeorm/migrate";
|
|
14
|
+
import type { AppliedMigration, JournalEntry } from "@vibeorm/migrate";
|
|
15
|
+
import { existsSync, readFileSync } from "fs";
|
|
16
|
+
import { join } from "path";
|
|
17
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
18
|
+
import { createConnection } from "../connection.ts";
|
|
19
|
+
|
|
20
|
+
export async function runMigrateApply(params: {
|
|
21
|
+
config: VibeOrmConfig;
|
|
22
|
+
flags: Record<string, string>;
|
|
23
|
+
}): Promise<void> {
|
|
24
|
+
const { config, flags } = params;
|
|
25
|
+
const dryRun = flags["dry-run"] === "true";
|
|
26
|
+
|
|
27
|
+
console.log("VibeORM Migrate Apply");
|
|
28
|
+
console.log(` Migrations: ${config.migrations}`);
|
|
29
|
+
console.log("");
|
|
30
|
+
|
|
31
|
+
// 1. Load journal
|
|
32
|
+
const journal = loadJournal({ migrationsDir: config.migrations });
|
|
33
|
+
if (journal.entries.length === 0) {
|
|
34
|
+
console.log("No migrations found. Run 'vibeorm migrate generate' first.");
|
|
35
|
+
return;
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
// 2. Connect to database
|
|
39
|
+
const conn = createConnection({ url: flags.url });
|
|
40
|
+
|
|
41
|
+
try {
|
|
42
|
+
// 3. Ensure migration table
|
|
43
|
+
await ensureMigrationTable({ executor: conn.executor });
|
|
44
|
+
|
|
45
|
+
// 4. Get applied migrations
|
|
46
|
+
const applied = await getAppliedMigrations({ executor: conn.executor });
|
|
47
|
+
const appliedSet = new Map(applied.map((m: AppliedMigration) => [m.migrationName, m]));
|
|
48
|
+
|
|
49
|
+
// 5. Find pending migrations
|
|
50
|
+
const pending: JournalEntry[] = [];
|
|
51
|
+
for (const entry of journal.entries) {
|
|
52
|
+
const migrationName = `${entry.timestamp}_${entry.name}`;
|
|
53
|
+
const appliedMigration = appliedSet.get(migrationName);
|
|
54
|
+
|
|
55
|
+
if (appliedMigration) {
|
|
56
|
+
// Check for checksum mismatch
|
|
57
|
+
const sqlPath = join(config.migrations, migrationName, "migration.sql");
|
|
58
|
+
if (existsSync(sqlPath)) {
|
|
59
|
+
const sql = readFileSync(sqlPath, "utf-8");
|
|
60
|
+
const currentChecksum = computeChecksum({ content: sql });
|
|
61
|
+
if (currentChecksum !== appliedMigration.checksum) {
|
|
62
|
+
console.warn(` WARNING: Checksum mismatch for ${migrationName}`);
|
|
63
|
+
console.warn(` Expected: ${appliedMigration.checksum}`);
|
|
64
|
+
console.warn(` Current: ${currentChecksum}`);
|
|
65
|
+
console.warn(" The migration file has been modified after being applied.");
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
continue;
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
pending.push(entry);
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
if (pending.length === 0) {
|
|
75
|
+
console.log("All migrations are already applied. Database is up to date.");
|
|
76
|
+
return;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
console.log(`Found ${pending.length} pending migration(s):`);
|
|
80
|
+
for (const entry of pending) {
|
|
81
|
+
console.log(` - ${entry.timestamp}_${entry.name}`);
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
if (dryRun) {
|
|
85
|
+
console.log("");
|
|
86
|
+
console.log("DRY RUN — No changes applied.");
|
|
87
|
+
return;
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// 6. Apply pending migrations in order
|
|
91
|
+
console.log("");
|
|
92
|
+
for (const entry of pending) {
|
|
93
|
+
const migrationName = `${entry.timestamp}_${entry.name}`;
|
|
94
|
+
const sqlPath = join(config.migrations, migrationName, "migration.sql");
|
|
95
|
+
|
|
96
|
+
if (!existsSync(sqlPath)) {
|
|
97
|
+
console.error(`Error: Migration file not found: ${sqlPath}`);
|
|
98
|
+
process.exit(1);
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const sql = readFileSync(sqlPath, "utf-8");
|
|
102
|
+
const checksum = computeChecksum({ content: sql });
|
|
103
|
+
|
|
104
|
+
console.log(`Applying ${migrationName}...`);
|
|
105
|
+
const result = await applyMigration({
|
|
106
|
+
executor: conn.executor,
|
|
107
|
+
migrationName,
|
|
108
|
+
sql,
|
|
109
|
+
checksum,
|
|
110
|
+
});
|
|
111
|
+
console.log(` Done in ${result.executionTime}ms`);
|
|
112
|
+
}
|
|
113
|
+
|
|
114
|
+
console.log("");
|
|
115
|
+
console.log(`Applied ${pending.length} migration(s) successfully.`);
|
|
116
|
+
} finally {
|
|
117
|
+
await conn.close();
|
|
118
|
+
}
|
|
119
|
+
}
|
|
@@ -0,0 +1,134 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm migrate generate
|
|
3
|
+
*
|
|
4
|
+
* Diff the current .prisma schema against the last snapshot
|
|
5
|
+
* and generate a migration .sql file. Does NOT require a database connection.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import { parsePrismaSchema, validateSchema, formatValidationErrors } from "@vibeorm/parser";
|
|
9
|
+
import {
|
|
10
|
+
diffSchemas,
|
|
11
|
+
buildDDL,
|
|
12
|
+
loadLatestSnapshot,
|
|
13
|
+
saveSnapshot,
|
|
14
|
+
loadJournal,
|
|
15
|
+
saveJournal,
|
|
16
|
+
generateTimestamp,
|
|
17
|
+
computeChecksum,
|
|
18
|
+
} from "@vibeorm/migrate";
|
|
19
|
+
import type { DiffOperation, Journal, JournalEntry } from "@vibeorm/migrate";
|
|
20
|
+
import { existsSync, mkdirSync, writeFileSync } from "fs";
|
|
21
|
+
import { join } from "path";
|
|
22
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
23
|
+
import { loadSchemaSource } from "../schema-loader.ts";
|
|
24
|
+
|
|
25
|
+
export async function runMigrateGenerate(params: {
|
|
26
|
+
config: VibeOrmConfig;
|
|
27
|
+
flags: Record<string, string>;
|
|
28
|
+
}): Promise<void> {
|
|
29
|
+
const { config, flags } = params;
|
|
30
|
+
|
|
31
|
+
const migrationName = flags.name ?? "migration";
|
|
32
|
+
const dryRun = flags["dry-run"] === "true";
|
|
33
|
+
|
|
34
|
+
console.log("VibeORM Migrate Generate");
|
|
35
|
+
console.log(` Schema: ${config.schema}`);
|
|
36
|
+
console.log(` Migrations: ${config.migrations}`);
|
|
37
|
+
console.log("");
|
|
38
|
+
|
|
39
|
+
// 1. Parse current schema (file or directory)
|
|
40
|
+
const source = loadSchemaSource({ schemaPath: config.schema });
|
|
41
|
+
const currentSchema = parsePrismaSchema({ source });
|
|
42
|
+
console.log(`Parsed ${currentSchema.models.length} models, ${currentSchema.enums.length} enums`);
|
|
43
|
+
|
|
44
|
+
// Validate schema
|
|
45
|
+
const validation = validateSchema({ schema: currentSchema });
|
|
46
|
+
if (!validation.valid) {
|
|
47
|
+
console.error("");
|
|
48
|
+
console.error(formatValidationErrors({ result: validation }));
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|
|
51
|
+
if (validation.warnings.length > 0) {
|
|
52
|
+
console.warn("");
|
|
53
|
+
console.warn(formatValidationErrors({ result: validation }));
|
|
54
|
+
console.warn("");
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
// 2. Load latest snapshot (previous state)
|
|
58
|
+
const previousSchema = loadLatestSnapshot({ migrationsDir: config.migrations });
|
|
59
|
+
const emptySchema = { models: [], enums: [] };
|
|
60
|
+
const prevSchema = previousSchema ?? emptySchema;
|
|
61
|
+
|
|
62
|
+
if (!previousSchema) {
|
|
63
|
+
console.log("No previous snapshot found. This will be the initial migration.");
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
// 3. Diff schemas
|
|
67
|
+
const operations = diffSchemas({ previous: prevSchema, current: currentSchema });
|
|
68
|
+
|
|
69
|
+
if (operations.length === 0) {
|
|
70
|
+
console.log("");
|
|
71
|
+
console.log("No changes detected. Schema is up to date.");
|
|
72
|
+
return;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
// 4. Generate SQL
|
|
76
|
+
let migrationSql: string;
|
|
77
|
+
if (!previousSchema) {
|
|
78
|
+
// First migration: use buildDDL for a clean full schema creation
|
|
79
|
+
const ddl = buildDDL({ schema: currentSchema });
|
|
80
|
+
migrationSql = ddl.statements.join("\n\n");
|
|
81
|
+
} else {
|
|
82
|
+
// Incremental migration: use diff operations
|
|
83
|
+
migrationSql = operations.map((op: DiffOperation) => op.sql).join("\n\n");
|
|
84
|
+
}
|
|
85
|
+
|
|
86
|
+
// Print summary
|
|
87
|
+
console.log("");
|
|
88
|
+
console.log("Changes:");
|
|
89
|
+
for (const op of operations) {
|
|
90
|
+
const prefix = op.isDestructive ? "[!]" : "[+]";
|
|
91
|
+
console.log(` ${prefix} ${op.description}`);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (dryRun) {
|
|
95
|
+
console.log("");
|
|
96
|
+
console.log("=== DRY RUN — SQL that would be generated ===");
|
|
97
|
+
console.log("");
|
|
98
|
+
console.log(migrationSql);
|
|
99
|
+
return;
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
// 5. Create migration files
|
|
103
|
+
const timestamp = generateTimestamp();
|
|
104
|
+
const dirName = `${timestamp}_${migrationName}`;
|
|
105
|
+
const migrationDir = join(config.migrations, dirName);
|
|
106
|
+
const sqlPath = join(migrationDir, "migration.sql");
|
|
107
|
+
|
|
108
|
+
mkdirSync(migrationDir, { recursive: true });
|
|
109
|
+
writeFileSync(sqlPath, migrationSql, "utf-8");
|
|
110
|
+
|
|
111
|
+
// 6. Save snapshot
|
|
112
|
+
saveSnapshot({ schema: currentSchema, migrationsDir: config.migrations, timestamp });
|
|
113
|
+
|
|
114
|
+
// 7. Update journal
|
|
115
|
+
const checksum = computeChecksum({ content: migrationSql });
|
|
116
|
+
const journal = loadJournal({ migrationsDir: config.migrations });
|
|
117
|
+
|
|
118
|
+
const entry: JournalEntry = {
|
|
119
|
+
idx: journal.entries.length,
|
|
120
|
+
timestamp,
|
|
121
|
+
name: migrationName,
|
|
122
|
+
checksum,
|
|
123
|
+
};
|
|
124
|
+
|
|
125
|
+
journal.entries.push(entry);
|
|
126
|
+
saveJournal({ migrationsDir: config.migrations, journal });
|
|
127
|
+
|
|
128
|
+
console.log("");
|
|
129
|
+
console.log(`Created migration: ${dirName}`);
|
|
130
|
+
console.log(` SQL: ${sqlPath}`);
|
|
131
|
+
console.log("");
|
|
132
|
+
console.log("Generated SQL:");
|
|
133
|
+
console.log(migrationSql);
|
|
134
|
+
}
|
|
@@ -0,0 +1,89 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm migrate resolve
|
|
3
|
+
*
|
|
4
|
+
* Repair tool for when migrations get into a bad state.
|
|
5
|
+
*
|
|
6
|
+
* --applied <name> Mark a migration as already applied (without running it)
|
|
7
|
+
* --rolled-back <name> Remove a migration record from tracking table
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import {
|
|
11
|
+
ensureMigrationTable,
|
|
12
|
+
markMigrationApplied,
|
|
13
|
+
removeMigrationRecord,
|
|
14
|
+
loadJournal,
|
|
15
|
+
computeChecksum,
|
|
16
|
+
} from "@vibeorm/migrate";
|
|
17
|
+
import { existsSync, readFileSync } from "fs";
|
|
18
|
+
import { join } from "path";
|
|
19
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
20
|
+
import { createConnection } from "../connection.ts";
|
|
21
|
+
|
|
22
|
+
export async function runMigrateResolve(params: {
|
|
23
|
+
config: VibeOrmConfig;
|
|
24
|
+
flags: Record<string, string>;
|
|
25
|
+
}): Promise<void> {
|
|
26
|
+
const { config, flags } = params;
|
|
27
|
+
|
|
28
|
+
const appliedName = flags.applied;
|
|
29
|
+
const rolledBackName = flags["rolled-back"];
|
|
30
|
+
|
|
31
|
+
if (!appliedName && !rolledBackName) {
|
|
32
|
+
console.error("Error: Specify either --applied <name> or --rolled-back <name>");
|
|
33
|
+
console.error("");
|
|
34
|
+
console.error("Usage:");
|
|
35
|
+
console.error(" vibeorm migrate resolve --applied 20250206140000_init");
|
|
36
|
+
console.error(" vibeorm migrate resolve --rolled-back 20250206140000_init");
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (appliedName && rolledBackName) {
|
|
41
|
+
console.error("Error: Cannot use both --applied and --rolled-back at the same time.");
|
|
42
|
+
process.exit(1);
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
console.log("VibeORM Migrate Resolve");
|
|
46
|
+
console.log("");
|
|
47
|
+
|
|
48
|
+
const conn = createConnection({ url: flags.url });
|
|
49
|
+
|
|
50
|
+
try {
|
|
51
|
+
await ensureMigrationTable({ executor: conn.executor });
|
|
52
|
+
|
|
53
|
+
if (appliedName) {
|
|
54
|
+
// Mark as applied
|
|
55
|
+
const sqlPath = join(config.migrations, appliedName, "migration.sql");
|
|
56
|
+
let checksum = "sha256:manual";
|
|
57
|
+
|
|
58
|
+
if (existsSync(sqlPath)) {
|
|
59
|
+
const sql = readFileSync(sqlPath, "utf-8");
|
|
60
|
+
checksum = computeChecksum({ content: sql });
|
|
61
|
+
} else {
|
|
62
|
+
console.warn(`Warning: Migration file not found at ${sqlPath}`);
|
|
63
|
+
console.warn("Using placeholder checksum.");
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
await markMigrationApplied({
|
|
67
|
+
executor: conn.executor,
|
|
68
|
+
migrationName: appliedName,
|
|
69
|
+
checksum,
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
console.log(`Marked "${appliedName}" as applied.`);
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
if (rolledBackName) {
|
|
76
|
+
// Remove from tracking
|
|
77
|
+
await removeMigrationRecord({
|
|
78
|
+
executor: conn.executor,
|
|
79
|
+
migrationName: rolledBackName,
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
console.log(`Removed "${rolledBackName}" from migration tracking.`);
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
console.log("Done!");
|
|
86
|
+
} finally {
|
|
87
|
+
await conn.close();
|
|
88
|
+
}
|
|
89
|
+
}
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* vibeorm migrate status
|
|
3
|
+
*
|
|
4
|
+
* Show which migrations are applied and which are pending.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import {
|
|
8
|
+
ensureMigrationTable,
|
|
9
|
+
getAppliedMigrations,
|
|
10
|
+
loadJournal,
|
|
11
|
+
computeChecksum,
|
|
12
|
+
} from "@vibeorm/migrate";
|
|
13
|
+
import type { AppliedMigration } from "@vibeorm/migrate";
|
|
14
|
+
import { existsSync, readFileSync } from "fs";
|
|
15
|
+
import { join } from "path";
|
|
16
|
+
import type { VibeOrmConfig } from "../config.ts";
|
|
17
|
+
import { createConnection } from "../connection.ts";
|
|
18
|
+
|
|
19
|
+
export async function runMigrateStatus(params: {
|
|
20
|
+
config: VibeOrmConfig;
|
|
21
|
+
flags: Record<string, string>;
|
|
22
|
+
}): Promise<void> {
|
|
23
|
+
const { config, flags } = params;
|
|
24
|
+
|
|
25
|
+
console.log("VibeORM Migrate Status");
|
|
26
|
+
console.log(` Migrations: ${config.migrations}`);
|
|
27
|
+
console.log("");
|
|
28
|
+
|
|
29
|
+
// Load journal
|
|
30
|
+
const journal = loadJournal({ migrationsDir: config.migrations });
|
|
31
|
+
|
|
32
|
+
if (journal.entries.length === 0) {
|
|
33
|
+
console.log("No migrations found.");
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
// Connect to database
|
|
38
|
+
const conn = createConnection({ url: flags.url });
|
|
39
|
+
|
|
40
|
+
try {
|
|
41
|
+
await ensureMigrationTable({ executor: conn.executor });
|
|
42
|
+
const applied = await getAppliedMigrations({ executor: conn.executor });
|
|
43
|
+
const appliedMap = new Map(applied.map((m: AppliedMigration) => [m.migrationName, m]));
|
|
44
|
+
|
|
45
|
+
// Print header
|
|
46
|
+
const nameWidth = 45;
|
|
47
|
+
const statusWidth = 15;
|
|
48
|
+
const dateWidth = 22;
|
|
49
|
+
|
|
50
|
+
console.log(
|
|
51
|
+
"Migration".padEnd(nameWidth) +
|
|
52
|
+
"Status".padEnd(statusWidth) +
|
|
53
|
+
"Applied At"
|
|
54
|
+
);
|
|
55
|
+
console.log("-".repeat(nameWidth + statusWidth + dateWidth));
|
|
56
|
+
|
|
57
|
+
// Print each migration
|
|
58
|
+
for (const entry of journal.entries) {
|
|
59
|
+
const migrationName = `${entry.timestamp}_${entry.name}`;
|
|
60
|
+
const appliedMigration = appliedMap.get(migrationName);
|
|
61
|
+
|
|
62
|
+
let status: string;
|
|
63
|
+
let appliedAt: string;
|
|
64
|
+
|
|
65
|
+
if (appliedMigration) {
|
|
66
|
+
status = "Applied";
|
|
67
|
+
appliedAt = appliedMigration.appliedAt.toISOString().replace("T", " ").slice(0, 19);
|
|
68
|
+
|
|
69
|
+
// Check for checksum mismatch
|
|
70
|
+
const sqlPath = join(config.migrations, migrationName, "migration.sql");
|
|
71
|
+
if (existsSync(sqlPath)) {
|
|
72
|
+
const sql = readFileSync(sqlPath, "utf-8");
|
|
73
|
+
const currentChecksum = computeChecksum({ content: sql });
|
|
74
|
+
if (currentChecksum !== appliedMigration.checksum) {
|
|
75
|
+
status = "Modified!";
|
|
76
|
+
}
|
|
77
|
+
}
|
|
78
|
+
} else {
|
|
79
|
+
status = "Pending";
|
|
80
|
+
appliedAt = "-";
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
console.log(
|
|
84
|
+
migrationName.padEnd(nameWidth) +
|
|
85
|
+
status.padEnd(statusWidth) +
|
|
86
|
+
appliedAt
|
|
87
|
+
);
|
|
88
|
+
}
|
|
89
|
+
|
|
90
|
+
// Summary
|
|
91
|
+
const pendingCount = journal.entries.filter(
|
|
92
|
+
(e) => !appliedMap.has(`${e.timestamp}_${e.name}`)
|
|
93
|
+
).length;
|
|
94
|
+
const appliedCount = journal.entries.length - pendingCount;
|
|
95
|
+
|
|
96
|
+
console.log("");
|
|
97
|
+
console.log(`Total: ${journal.entries.length} migrations (${appliedCount} applied, ${pendingCount} pending)`);
|
|
98
|
+
} finally {
|
|
99
|
+
await conn.close();
|
|
100
|
+
}
|
|
101
|
+
}
|
package/src/config.ts
ADDED
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Unified config resolution for the VibeORM CLI.
|
|
3
|
+
*
|
|
4
|
+
* Priority (later overrides earlier):
|
|
5
|
+
* 1. Defaults
|
|
6
|
+
* 2. package.json → vibeorm key
|
|
7
|
+
* 3. vibeorm.config.ts file (if exists)
|
|
8
|
+
* 4. CLI flags (--schema, --output, etc.)
|
|
9
|
+
*/
|
|
10
|
+
|
|
11
|
+
import { resolve } from "path";
|
|
12
|
+
import { existsSync, readFileSync } from "fs";
|
|
13
|
+
|
|
14
|
+
export type VibeOrmConfig = {
|
|
15
|
+
schema: string;
|
|
16
|
+
output: string;
|
|
17
|
+
migrations: string;
|
|
18
|
+
seed?: string;
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
const DEFAULTS: VibeOrmConfig = {
|
|
22
|
+
schema: "./prisma/schema.prisma",
|
|
23
|
+
output: "./generated/vibeorm",
|
|
24
|
+
migrations: "./migrations",
|
|
25
|
+
};
|
|
26
|
+
|
|
27
|
+
export function parseFlags(params: { args: string[] }): Record<string, string> {
|
|
28
|
+
const { args } = params;
|
|
29
|
+
const flags: Record<string, string> = {};
|
|
30
|
+
for (let i = 0; i < args.length; i++) {
|
|
31
|
+
const arg = args[i]!;
|
|
32
|
+
if (arg.startsWith("--") && args[i + 1] && !args[i + 1]!.startsWith("--")) {
|
|
33
|
+
flags[arg.slice(2)] = args[i + 1]!;
|
|
34
|
+
i++;
|
|
35
|
+
} else if (arg.startsWith("--")) {
|
|
36
|
+
// Boolean flag like --force, --dry-run
|
|
37
|
+
flags[arg.slice(2)] = "true";
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
return flags;
|
|
41
|
+
}
|
|
42
|
+
|
|
43
|
+
function loadPackageJsonConfig(): Partial<VibeOrmConfig> {
|
|
44
|
+
const pkgJsonPath = resolve(process.cwd(), "package.json");
|
|
45
|
+
if (!existsSync(pkgJsonPath)) return {};
|
|
46
|
+
|
|
47
|
+
try {
|
|
48
|
+
const pkgJson = JSON.parse(readFileSync(pkgJsonPath, "utf-8"));
|
|
49
|
+
if (pkgJson.vibeorm && typeof pkgJson.vibeorm === "object") {
|
|
50
|
+
return pkgJson.vibeorm as Partial<VibeOrmConfig>;
|
|
51
|
+
}
|
|
52
|
+
} catch {
|
|
53
|
+
// Ignore parse errors
|
|
54
|
+
}
|
|
55
|
+
return {};
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
async function loadConfigFile(): Promise<Partial<VibeOrmConfig>> {
|
|
59
|
+
const configPath = resolve(process.cwd(), "vibeorm.config.ts");
|
|
60
|
+
if (!existsSync(configPath)) return {};
|
|
61
|
+
|
|
62
|
+
try {
|
|
63
|
+
const mod = await import(configPath);
|
|
64
|
+
const config = mod.default ?? mod;
|
|
65
|
+
if (config && typeof config === "object") {
|
|
66
|
+
return config as Partial<VibeOrmConfig>;
|
|
67
|
+
}
|
|
68
|
+
} catch {
|
|
69
|
+
// Ignore import errors
|
|
70
|
+
}
|
|
71
|
+
return {};
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
export async function resolveConfig(params: { flags?: Record<string, string> }): Promise<VibeOrmConfig> {
|
|
75
|
+
const { flags = {} } = params;
|
|
76
|
+
|
|
77
|
+
// 1. Start with defaults
|
|
78
|
+
const config: VibeOrmConfig = { ...DEFAULTS };
|
|
79
|
+
|
|
80
|
+
// 2. Merge package.json config
|
|
81
|
+
const pkgConfig = loadPackageJsonConfig();
|
|
82
|
+
if (pkgConfig.schema) config.schema = pkgConfig.schema;
|
|
83
|
+
if (pkgConfig.output) config.output = pkgConfig.output;
|
|
84
|
+
if (pkgConfig.migrations) config.migrations = pkgConfig.migrations;
|
|
85
|
+
if (pkgConfig.seed) config.seed = pkgConfig.seed;
|
|
86
|
+
|
|
87
|
+
// 3. Merge vibeorm.config.ts
|
|
88
|
+
const fileConfig = await loadConfigFile();
|
|
89
|
+
if (fileConfig.schema) config.schema = fileConfig.schema;
|
|
90
|
+
if (fileConfig.output) config.output = fileConfig.output;
|
|
91
|
+
if (fileConfig.migrations) config.migrations = fileConfig.migrations;
|
|
92
|
+
if (fileConfig.seed) config.seed = fileConfig.seed;
|
|
93
|
+
|
|
94
|
+
// 4. Merge CLI flags (highest priority)
|
|
95
|
+
if (flags.schema) config.schema = flags.schema;
|
|
96
|
+
if (flags.output) config.output = flags.output;
|
|
97
|
+
if (flags.migrations) config.migrations = flags.migrations;
|
|
98
|
+
if (flags.seed) config.seed = flags.seed;
|
|
99
|
+
|
|
100
|
+
// Resolve all paths relative to cwd
|
|
101
|
+
return {
|
|
102
|
+
schema: resolve(process.cwd(), config.schema),
|
|
103
|
+
output: resolve(process.cwd(), config.output),
|
|
104
|
+
migrations: resolve(process.cwd(), config.migrations),
|
|
105
|
+
seed: config.seed ? resolve(process.cwd(), config.seed) : undefined,
|
|
106
|
+
};
|
|
107
|
+
}
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Database connection helper for CLI commands.
|
|
3
|
+
*
|
|
4
|
+
* Creates a database adapter and wraps it in an SqlExecutor
|
|
5
|
+
* compatible with @vibeorm/migrate modules.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { SqlExecutor } from "@vibeorm/migrate";
|
|
9
|
+
import { bunAdapter } from "@vibeorm/adapter-bun";
|
|
10
|
+
import { toSqlExecutor } from "@vibeorm/runtime";
|
|
11
|
+
import type { DatabaseAdapter } from "@vibeorm/runtime";
|
|
12
|
+
|
|
13
|
+
export type Connection = {
|
|
14
|
+
executor: SqlExecutor;
|
|
15
|
+
/** Execute multiple SQL statements in a transaction */
|
|
16
|
+
executeInTransaction: (params: { statements: string[] }) => Promise<void>;
|
|
17
|
+
close: () => Promise<void>;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
export function createConnection(params: { url?: string }): Connection {
|
|
21
|
+
const { url } = params;
|
|
22
|
+
const dbUrl = url ?? process.env.DATABASE_URL;
|
|
23
|
+
|
|
24
|
+
if (!dbUrl) {
|
|
25
|
+
console.error("Error: No database URL provided.");
|
|
26
|
+
console.error("Set DATABASE_URL environment variable or use --url flag.");
|
|
27
|
+
process.exit(1);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
const adapter = bunAdapter({ url: dbUrl });
|
|
31
|
+
const executor = toSqlExecutor({ adapter });
|
|
32
|
+
|
|
33
|
+
const executeInTransaction = async (txParams: { statements: string[] }) => {
|
|
34
|
+
await adapter.transaction(async (txAdapter) => {
|
|
35
|
+
for (const stmt of txParams.statements) {
|
|
36
|
+
if (stmt.trim()) {
|
|
37
|
+
await txAdapter.executeUnsafe({ text: stmt });
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
});
|
|
41
|
+
};
|
|
42
|
+
|
|
43
|
+
return {
|
|
44
|
+
executor,
|
|
45
|
+
executeInTransaction,
|
|
46
|
+
close: () => adapter.disconnect(),
|
|
47
|
+
};
|
|
48
|
+
}
|
package/src/index.ts
ADDED
|
@@ -0,0 +1,110 @@
|
|
|
1
|
+
#!/usr/bin/env bun
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* VibeORM CLI
|
|
5
|
+
*
|
|
6
|
+
* Usage:
|
|
7
|
+
* vibeorm generate Parse .prisma → generate TypeScript client
|
|
8
|
+
* vibeorm db push [--force] [--accept-data-loss] [--dry-run] Push schema directly to DB
|
|
9
|
+
* vibeorm db pull [--print] Introspect DB → generate .prisma schema
|
|
10
|
+
* vibeorm db reset [--force] [--skip-seed] Drop all + re-apply migrations + seed
|
|
11
|
+
* vibeorm db seed Run seed script
|
|
12
|
+
* vibeorm db execute --file <path> Execute raw .sql file
|
|
13
|
+
* vibeorm migrate generate [--name <n>] [--dry-run] Generate migration from schema diff
|
|
14
|
+
* vibeorm migrate apply [--dry-run] Apply pending migrations
|
|
15
|
+
* vibeorm migrate status Show migration state
|
|
16
|
+
* vibeorm migrate resolve --applied <name> | --rolled-back <name> Repair migration state
|
|
17
|
+
*/
|
|
18
|
+
|
|
19
|
+
import { resolveConfig, parseFlags } from "./config.ts";
|
|
20
|
+
import type { VibeOrmConfig } from "./config.ts";
|
|
21
|
+
|
|
22
|
+
import { runGenerate } from "./commands/generate.ts";
|
|
23
|
+
import { runDbPush } from "./commands/db-push.ts";
|
|
24
|
+
import { runDbPull } from "./commands/db-pull.ts";
|
|
25
|
+
import { runDbReset } from "./commands/db-reset.ts";
|
|
26
|
+
import { runDbSeed } from "./commands/db-seed.ts";
|
|
27
|
+
import { runDbExecute } from "./commands/db-execute.ts";
|
|
28
|
+
import { runMigrateGenerate } from "./commands/migrate-generate.ts";
|
|
29
|
+
import { runMigrateApply } from "./commands/migrate-apply.ts";
|
|
30
|
+
import { runMigrateStatus } from "./commands/migrate-status.ts";
|
|
31
|
+
import { runMigrateResolve } from "./commands/migrate-resolve.ts";
|
|
32
|
+
|
|
33
|
+
// ─── Command Router ───────────────────────────────────────────────
|
|
34
|
+
|
|
35
|
+
const COMMANDS: Record<string, (params: { config: VibeOrmConfig; flags: Record<string, string> }) => Promise<void>> = {
|
|
36
|
+
"generate": runGenerate,
|
|
37
|
+
"db push": runDbPush,
|
|
38
|
+
"db pull": runDbPull,
|
|
39
|
+
"db reset": runDbReset,
|
|
40
|
+
"db seed": runDbSeed,
|
|
41
|
+
"db execute": runDbExecute,
|
|
42
|
+
"migrate generate": runMigrateGenerate,
|
|
43
|
+
"migrate apply": runMigrateApply,
|
|
44
|
+
"migrate status": runMigrateStatus,
|
|
45
|
+
"migrate resolve": runMigrateResolve,
|
|
46
|
+
};
|
|
47
|
+
|
|
48
|
+
function printHelp(): void {
|
|
49
|
+
console.log("VibeORM CLI");
|
|
50
|
+
console.log("");
|
|
51
|
+
console.log("Usage: vibeorm <command> [options]");
|
|
52
|
+
console.log("");
|
|
53
|
+
console.log("Commands:");
|
|
54
|
+
console.log(" generate Parse .prisma schema and generate TypeScript client");
|
|
55
|
+
console.log(" db push [--force] [--accept-data-loss] [--dry-run] Push schema directly to database (prototyping)");
|
|
56
|
+
console.log(" db pull [--print] Introspect database and generate .prisma schema");
|
|
57
|
+
console.log(" db reset [--force] [--skip-seed] Drop all tables, re-apply migrations, and seed");
|
|
58
|
+
console.log(" db seed Run the configured seed script");
|
|
59
|
+
console.log(" db execute --file <path> Execute a raw .sql file against the database");
|
|
60
|
+
console.log(" migrate generate [--name <n>] [--dry-run] Generate a new migration from schema changes");
|
|
61
|
+
console.log(" migrate apply [--dry-run] Apply all pending migrations");
|
|
62
|
+
console.log(" migrate status Show which migrations are applied/pending");
|
|
63
|
+
console.log(" migrate resolve --applied <name> | --rolled-back <name> Repair migration state");
|
|
64
|
+
console.log("");
|
|
65
|
+
console.log("Options:");
|
|
66
|
+
console.log(" --schema <path> Path to .prisma schema file");
|
|
67
|
+
console.log(" --output <path> Path to generated client output directory");
|
|
68
|
+
console.log(" --migrations <path> Path to migrations directory");
|
|
69
|
+
console.log(" --seed <path> Path to seed script");
|
|
70
|
+
console.log(" --url <url> Database connection URL");
|
|
71
|
+
console.log(" --help Show this help message");
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
async function main(): Promise<void> {
|
|
75
|
+
const args = process.argv.slice(2);
|
|
76
|
+
|
|
77
|
+
if (args.length === 0 || args[0] === "--help" || args[0] === "-h") {
|
|
78
|
+
printHelp();
|
|
79
|
+
process.exit(0);
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
// Determine command: try two-word commands first, then single-word
|
|
83
|
+
let command: string | undefined;
|
|
84
|
+
let flagArgs: string[];
|
|
85
|
+
|
|
86
|
+
const twoWord = `${args[0]} ${args[1]}`;
|
|
87
|
+
if (COMMANDS[twoWord]) {
|
|
88
|
+
command = twoWord;
|
|
89
|
+
flagArgs = args.slice(2);
|
|
90
|
+
} else if (COMMANDS[args[0]!]) {
|
|
91
|
+
command = args[0]!;
|
|
92
|
+
flagArgs = args.slice(1);
|
|
93
|
+
} else {
|
|
94
|
+
console.error(`Unknown command: ${args[0]}${args[1] ? " " + args[1] : ""}`);
|
|
95
|
+
console.error("");
|
|
96
|
+
printHelp();
|
|
97
|
+
process.exit(1);
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
const flags = parseFlags({ args: flagArgs });
|
|
101
|
+
const config = await resolveConfig({ flags });
|
|
102
|
+
|
|
103
|
+
const handler = COMMANDS[command]!;
|
|
104
|
+
await handler({ config, flags });
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
main().catch((err) => {
|
|
108
|
+
console.error("Fatal error:", err.message ?? err);
|
|
109
|
+
process.exit(1);
|
|
110
|
+
});
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Schema Loader — supports both single .prisma files and directories
|
|
3
|
+
* containing multiple .prisma files.
|
|
4
|
+
*
|
|
5
|
+
* If the path is a file, reads it directly.
|
|
6
|
+
* If the path is a directory, reads all *.prisma files within it
|
|
7
|
+
* (sorted alphabetically) and concatenates them.
|
|
8
|
+
*/
|
|
9
|
+
|
|
10
|
+
import { existsSync, readFileSync, readdirSync, statSync } from "fs";
|
|
11
|
+
import { join } from "path";
|
|
12
|
+
|
|
13
|
+
export function loadSchemaSource(params: { schemaPath: string }): string {
|
|
14
|
+
const { schemaPath } = params;
|
|
15
|
+
|
|
16
|
+
if (!existsSync(schemaPath)) {
|
|
17
|
+
console.error(`Error: Schema path not found at ${schemaPath}`);
|
|
18
|
+
console.error("Create a schema.prisma file or specify --schema <path>");
|
|
19
|
+
process.exit(1);
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
const stat = statSync(schemaPath);
|
|
23
|
+
|
|
24
|
+
if (stat.isFile()) {
|
|
25
|
+
return readFileSync(schemaPath, "utf-8");
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
if (stat.isDirectory()) {
|
|
29
|
+
const files = readdirSync(schemaPath)
|
|
30
|
+
.filter((f: string) => f.endsWith(".prisma"))
|
|
31
|
+
.sort();
|
|
32
|
+
|
|
33
|
+
if (files.length === 0) {
|
|
34
|
+
console.error(`Error: No .prisma files found in ${schemaPath}`);
|
|
35
|
+
process.exit(1);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
const sources: string[] = [];
|
|
39
|
+
for (const file of files) {
|
|
40
|
+
const filePath = join(schemaPath, file);
|
|
41
|
+
sources.push(`// --- ${file} ---\n` + readFileSync(filePath, "utf-8"));
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
console.log(` Loaded ${files.length} schema files from ${schemaPath}`);
|
|
45
|
+
return sources.join("\n\n");
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
console.error(`Error: ${schemaPath} is neither a file nor a directory`);
|
|
49
|
+
process.exit(1);
|
|
50
|
+
}
|