@prisma-next/cli 0.5.0-dev.4 → 0.5.0-dev.41
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +56 -21
- package/dist/agent-skill-mongo.md +63 -31
- package/dist/agent-skill-postgres.md +1 -1
- package/dist/cli-errors-By1iVE3z.mjs +34 -0
- package/dist/cli-errors-By1iVE3z.mjs.map +1 -0
- package/dist/{cli-errors-C0JhVj0c.d.mts → cli-errors-DDeVsP2Y.d.mts} +1 -0
- package/dist/cli.mjs +123 -15
- package/dist/cli.mjs.map +1 -1
- package/dist/{client-TG7rbCWT.mjs → client-1JqqkiC7.mjs} +45 -20
- package/dist/client-1JqqkiC7.mjs.map +1 -0
- package/dist/commands/contract-emit.d.mts.map +1 -1
- package/dist/commands/contract-emit.mjs +2 -2
- package/dist/commands/contract-infer.d.mts.map +1 -1
- package/dist/commands/contract-infer.mjs +2 -2
- package/dist/commands/db-init.d.mts.map +1 -1
- package/dist/commands/db-init.mjs +10 -9
- package/dist/commands/db-init.mjs.map +1 -1
- package/dist/commands/db-schema.mjs +5 -5
- package/dist/commands/db-sign.mjs +7 -7
- package/dist/commands/db-update.mjs +9 -9
- package/dist/commands/db-update.mjs.map +1 -1
- package/dist/commands/db-verify.mjs +9 -9
- package/dist/commands/migration-apply.d.mts +5 -2
- package/dist/commands/migration-apply.d.mts.map +1 -1
- package/dist/commands/migration-apply.mjs +55 -56
- package/dist/commands/migration-apply.mjs.map +1 -1
- package/dist/commands/migration-new.d.mts.map +1 -1
- package/dist/commands/migration-new.mjs +26 -32
- package/dist/commands/migration-new.mjs.map +1 -1
- package/dist/commands/migration-plan.d.mts +14 -5
- package/dist/commands/migration-plan.d.mts.map +1 -1
- package/dist/commands/migration-plan.mjs +45 -48
- package/dist/commands/migration-plan.mjs.map +1 -1
- package/dist/commands/migration-ref.d.mts +1 -1
- package/dist/commands/migration-ref.d.mts.map +1 -1
- package/dist/commands/migration-ref.mjs +6 -10
- package/dist/commands/migration-ref.mjs.map +1 -1
- package/dist/commands/migration-show.d.mts +13 -7
- package/dist/commands/migration-show.d.mts.map +1 -1
- package/dist/commands/migration-show.mjs +27 -29
- package/dist/commands/migration-show.mjs.map +1 -1
- package/dist/commands/migration-status.d.mts +23 -5
- package/dist/commands/migration-status.d.mts.map +1 -1
- package/dist/commands/migration-status.mjs +3 -3
- package/dist/{config-loader-_W4T21X1.mjs → config-loader-ih8ViDb_.mjs} +2 -2
- package/dist/config-loader-ih8ViDb_.mjs.map +1 -0
- package/dist/config-loader.mjs +1 -1
- package/dist/contract-emit-LjzCoicC.mjs +4 -0
- package/dist/contract-emit-RZBWzkop.mjs +329 -0
- package/dist/contract-emit-RZBWzkop.mjs.map +1 -0
- package/dist/contract-emit-rt_Nmdwq.mjs +150 -0
- package/dist/contract-emit-rt_Nmdwq.mjs.map +1 -0
- package/dist/{contract-enrichment-CGW6mm-E.mjs → contract-enrichment-4Ptgw3Pe.mjs} +1 -1
- package/dist/{contract-enrichment-CGW6mm-E.mjs.map → contract-enrichment-4Ptgw3Pe.mjs.map} +1 -1
- package/dist/{contract-infer-BS4kIX9c.mjs → contract-infer-Cf5J2wVg.mjs} +11 -19
- package/dist/contract-infer-Cf5J2wVg.mjs.map +1 -0
- package/dist/exports/control-api.d.mts +86 -21
- package/dist/exports/control-api.d.mts.map +1 -1
- package/dist/exports/control-api.mjs +5 -5
- package/dist/exports/index.mjs +3 -3
- package/dist/exports/init-output.d.mts +39 -0
- package/dist/exports/init-output.d.mts.map +1 -0
- package/dist/exports/init-output.mjs +3 -0
- package/dist/{framework-components-DfZKQBQ2.mjs → framework-components-Bgcre3Z6.mjs} +2 -2
- package/dist/{framework-components-DfZKQBQ2.mjs.map → framework-components-Bgcre3Z6.mjs.map} +1 -1
- package/dist/init-C7dE9KOJ.mjs +2062 -0
- package/dist/init-C7dE9KOJ.mjs.map +1 -0
- package/dist/{inspect-live-schema-BsoFVoS1.mjs → inspect-live-schema-LWtXfxm_.mjs} +9 -9
- package/dist/inspect-live-schema-LWtXfxm_.mjs.map +1 -0
- package/dist/migration-cli.d.mts +41 -11
- package/dist/migration-cli.d.mts.map +1 -1
- package/dist/migration-cli.mjs +308 -84
- package/dist/migration-cli.mjs.map +1 -1
- package/dist/{migration-command-scaffold-DOXnheFa.mjs → migration-command-scaffold-CU452v9h.mjs} +7 -7
- package/dist/{migration-command-scaffold-DOXnheFa.mjs.map → migration-command-scaffold-CU452v9h.mjs.map} +1 -1
- package/dist/{migration-status-Ry3TnEya.mjs → migration-status-DoPrFIOQ.mjs} +114 -57
- package/dist/migration-status-DoPrFIOQ.mjs.map +1 -0
- package/dist/{migrations-fU0xoKjS.mjs → migrations-MEoKMiV5.mjs} +42 -21
- package/dist/migrations-MEoKMiV5.mjs.map +1 -0
- package/dist/output-BpcQrnnq.mjs +103 -0
- package/dist/output-BpcQrnnq.mjs.map +1 -0
- package/dist/{progress-adapter-B-YvmcDu.mjs → progress-adapter-DgRGldpT.mjs} +1 -1
- package/dist/{progress-adapter-B-YvmcDu.mjs.map → progress-adapter-DgRGldpT.mjs.map} +1 -1
- package/dist/quick-reference-mongo.md +34 -13
- package/dist/quick-reference-postgres.md +11 -9
- package/dist/{result-handler-BJwA7ufw.mjs → result-handler-Ch6hVnOo.mjs} +35 -93
- package/dist/result-handler-Ch6hVnOo.mjs.map +1 -0
- package/dist/{terminal-ui-C5k88MmW.mjs → terminal-ui-u2YgKghu.mjs} +76 -2
- package/dist/terminal-ui-u2YgKghu.mjs.map +1 -0
- package/dist/{verify-bl__PkXk.mjs → verify-BT9tgCOH.mjs} +2 -2
- package/dist/{verify-bl__PkXk.mjs.map → verify-BT9tgCOH.mjs.map} +1 -1
- package/package.json +22 -16
- package/src/cli.ts +32 -6
- package/src/commands/contract-emit.ts +67 -163
- package/src/commands/contract-infer.ts +7 -20
- package/src/commands/db-init.ts +1 -0
- package/src/commands/db-update.ts +1 -1
- package/src/commands/init/detect-pnpm-catalog.ts +141 -0
- package/src/commands/init/errors.ts +254 -0
- package/src/commands/init/exit-codes.ts +62 -0
- package/src/commands/init/hygiene-gitattributes.ts +97 -0
- package/src/commands/init/hygiene-gitignore.ts +48 -0
- package/src/commands/init/hygiene-package-scripts.ts +91 -0
- package/src/commands/init/index.ts +112 -7
- package/src/commands/init/init.ts +766 -144
- package/src/commands/init/inputs.ts +421 -0
- package/src/commands/init/output.ts +147 -0
- package/src/commands/init/probe-db.ts +308 -0
- package/src/commands/init/reinit-cleanup.ts +83 -0
- package/src/commands/init/templates/agent-skill-mongo.md +63 -31
- package/src/commands/init/templates/agent-skill-postgres.md +1 -1
- package/src/commands/init/templates/agent-skill.ts +25 -3
- package/src/commands/init/templates/code-templates.ts +125 -32
- package/src/commands/init/templates/env.ts +80 -0
- package/src/commands/init/templates/quick-reference-mongo.md +34 -13
- package/src/commands/init/templates/quick-reference-postgres.md +11 -9
- package/src/commands/init/templates/quick-reference.ts +42 -3
- package/src/commands/init/templates/tsconfig.ts +167 -5
- package/src/commands/inspect-live-schema.ts +10 -5
- package/src/commands/migration-apply.ts +84 -63
- package/src/commands/migration-new.ts +28 -34
- package/src/commands/migration-plan.ts +80 -56
- package/src/commands/migration-ref.ts +8 -7
- package/src/commands/migration-show.ts +53 -36
- package/src/commands/migration-status.ts +194 -58
- package/src/config-path-validation.ts +0 -1
- package/src/control-api/client.ts +21 -0
- package/src/control-api/operations/contract-emit.ts +198 -115
- package/src/control-api/operations/db-init.ts +10 -6
- package/src/control-api/operations/db-update.ts +10 -6
- package/src/control-api/operations/migration-apply.ts +30 -9
- package/src/control-api/types.ts +69 -7
- package/src/exports/control-api.ts +2 -1
- package/src/exports/init-output.ts +10 -0
- package/src/migration-cli.ts +445 -122
- package/src/utils/cli-errors.ts +49 -2
- package/src/utils/command-helpers.ts +45 -23
- package/src/utils/emit-queue.ts +26 -0
- package/src/utils/formatters/graph-migration-mapper.ts +7 -3
- package/src/utils/formatters/migrations.ts +62 -26
- package/src/utils/publish-contract-artifact-pair.ts +134 -0
- package/dist/cli-errors-DHq6GQGu.mjs +0 -5
- package/dist/client-TG7rbCWT.mjs.map +0 -1
- package/dist/config-loader-_W4T21X1.mjs.map +0 -1
- package/dist/contract-emit-CQfj7xJn.mjs +0 -122
- package/dist/contract-emit-CQfj7xJn.mjs.map +0 -1
- package/dist/contract-emit-DpPjuFy-.mjs +0 -195
- package/dist/contract-emit-DpPjuFy-.mjs.map +0 -1
- package/dist/contract-emit-fhNwwhkQ.mjs +0 -4
- package/dist/contract-infer-BS4kIX9c.mjs.map +0 -1
- package/dist/extract-operation-statements-DZUJNmL3.mjs +0 -13
- package/dist/extract-operation-statements-DZUJNmL3.mjs.map +0 -1
- package/dist/extract-sql-ddl-DDMX-9mz.mjs +0 -26
- package/dist/extract-sql-ddl-DDMX-9mz.mjs.map +0 -1
- package/dist/init-CQfo_4Ro.mjs +0 -430
- package/dist/init-CQfo_4Ro.mjs.map +0 -1
- package/dist/inspect-live-schema-BsoFVoS1.mjs.map +0 -1
- package/dist/migration-status-Ry3TnEya.mjs.map +0 -1
- package/dist/migrations-fU0xoKjS.mjs.map +0 -1
- package/dist/result-handler-BJwA7ufw.mjs.map +0 -1
- package/dist/terminal-ui-C5k88MmW.mjs.map +0 -1
- package/dist/validate-contract-deps-esa-VQ0h.mjs +0 -37
- package/dist/validate-contract-deps-esa-VQ0h.mjs.map +0 -1
- package/src/control-api/operations/extract-operation-statements.ts +0 -14
- package/src/control-api/operations/extract-sql-ddl.ts +0 -47
|
@@ -0,0 +1,2062 @@
|
|
|
1
|
+
import { t as CliStructuredError } from "./cli-errors-By1iVE3z.mjs";
|
|
2
|
+
import { i as formatErrorOutput, r as formatErrorJson, t as TerminalUI } from "./terminal-ui-u2YgKghu.mjs";
|
|
3
|
+
import { a as INIT_EXIT_PRECONDITION, i as INIT_EXIT_OK, n as INIT_EXIT_INSTALL_FAILED, o as INIT_EXIT_USER_ABORTED, r as INIT_EXIT_INTERNAL_ERROR, t as INIT_EXIT_EMIT_FAILED } from "./cli.mjs";
|
|
4
|
+
import { i as renderInitOutro, n as buildNextSteps, r as formatInitJson, t as InitOutputSchema } from "./output-BpcQrnnq.mjs";
|
|
5
|
+
import { createRequire } from "node:module";
|
|
6
|
+
import { dirname, extname, isAbsolute, join, normalize } from "pathe";
|
|
7
|
+
import * as clack from "@clack/prompts";
|
|
8
|
+
import { existsSync, mkdirSync, readFileSync, unlinkSync, writeFileSync } from "node:fs";
|
|
9
|
+
import { execFile } from "node:child_process";
|
|
10
|
+
import { promisify } from "node:util";
|
|
11
|
+
import { detect } from "package-manager-detector/detect";
|
|
12
|
+
import { applyEdits, modify, parse, printParseErrorCode } from "jsonc-parser";
|
|
13
|
+
|
|
14
|
+
//#region src/commands/init/detect-package-manager.ts
|
|
15
|
+
const KNOWN = new Set([
|
|
16
|
+
"pnpm",
|
|
17
|
+
"npm",
|
|
18
|
+
"yarn",
|
|
19
|
+
"bun",
|
|
20
|
+
"deno"
|
|
21
|
+
]);
|
|
22
|
+
async function detectPackageManager(cwd) {
|
|
23
|
+
const result = await detect({ cwd });
|
|
24
|
+
if (result && KNOWN.has(result.name)) return result.name;
|
|
25
|
+
return "npm";
|
|
26
|
+
}
|
|
27
|
+
function hasProjectManifest(cwd) {
|
|
28
|
+
return existsSync(join(cwd, "package.json")) || existsSync(join(cwd, "deno.json")) || existsSync(join(cwd, "deno.jsonc"));
|
|
29
|
+
}
|
|
30
|
+
function formatRunCommand(pm, bin, args) {
|
|
31
|
+
if (pm === "npm") return `npx ${bin} ${args}`;
|
|
32
|
+
if (pm === "deno") return `deno run npm:${bin} ${args}`;
|
|
33
|
+
return `${pm} ${bin} ${args}`;
|
|
34
|
+
}
|
|
35
|
+
function formatAddArgs(pm, packages) {
|
|
36
|
+
if (pm === "deno") return ["add", ...packages.map((p) => `npm:${p}`)];
|
|
37
|
+
return ["add", ...packages];
|
|
38
|
+
}
|
|
39
|
+
function formatAddDevArgs(pm, packages) {
|
|
40
|
+
if (pm === "deno") return [
|
|
41
|
+
"add",
|
|
42
|
+
"--dev",
|
|
43
|
+
...packages.map((p) => `npm:${p}`)
|
|
44
|
+
];
|
|
45
|
+
return [
|
|
46
|
+
"add",
|
|
47
|
+
"-D",
|
|
48
|
+
...packages
|
|
49
|
+
];
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
//#endregion
|
|
53
|
+
//#region src/commands/init/detect-pnpm-catalog.ts
|
|
54
|
+
/**
|
|
55
|
+
* Walks up from `baseDir` looking for `pnpm-workspace.yaml`, then scans
|
|
56
|
+
* its top-level `catalog:` block for entries that match any of `packages`.
|
|
57
|
+
*
|
|
58
|
+
* Implements FR7.3 / Spec Decision 8 (honour-and-warn): when `init` runs
|
|
59
|
+
* inside a pnpm workspace whose catalog overrides one of the packages it
|
|
60
|
+
* installs, surface a structured warning so the user knows the catalog
|
|
61
|
+
* version (not the published `latest`) is what ended up in their
|
|
62
|
+
* `node_modules`. pnpm itself does this silently; the warning closes the
|
|
63
|
+
* "looks fine, must be wrong version six months later" gap.
|
|
64
|
+
*
|
|
65
|
+
* Notes / scope:
|
|
66
|
+
*
|
|
67
|
+
* - We only inspect the unnamed top-level `catalog:` block. pnpm also
|
|
68
|
+
* supports `catalogs:` (plural — *named* catalogs referenced via
|
|
69
|
+
* `catalog:foo` specifiers); those don't apply to a vanilla
|
|
70
|
+
* `pnpm add prisma-next` invocation, so we skip them.
|
|
71
|
+
* - We don't validate YAML syntax exhaustively. The file format pnpm
|
|
72
|
+
* ships is line-oriented and well-known; a minimal regex is more
|
|
73
|
+
* robust than depending on a YAML parser for one warning.
|
|
74
|
+
* - We don't compare against the registry's `latest` — pnpm uses the
|
|
75
|
+
* catalog version regardless, so the warning fires whenever a match
|
|
76
|
+
* exists. The user-facing copy explains how to opt out.
|
|
77
|
+
*/
|
|
78
|
+
function detectPnpmCatalogOverrides(baseDir, packages) {
|
|
79
|
+
const workspaceFile = findNearestPnpmWorkspaceFile(baseDir);
|
|
80
|
+
if (workspaceFile === null) return null;
|
|
81
|
+
const catalog = extractCatalogBlock(readFileSync(workspaceFile, "utf-8"));
|
|
82
|
+
if (catalog === null) return {
|
|
83
|
+
workspaceFile,
|
|
84
|
+
entries: []
|
|
85
|
+
};
|
|
86
|
+
const wanted = new Set(packages);
|
|
87
|
+
const entries = [];
|
|
88
|
+
for (const [name, version] of catalog) if (wanted.has(name)) entries.push({
|
|
89
|
+
name,
|
|
90
|
+
version
|
|
91
|
+
});
|
|
92
|
+
return {
|
|
93
|
+
workspaceFile,
|
|
94
|
+
entries
|
|
95
|
+
};
|
|
96
|
+
}
|
|
97
|
+
function findNearestPnpmWorkspaceFile(baseDir) {
|
|
98
|
+
let dir = baseDir;
|
|
99
|
+
let prev = "";
|
|
100
|
+
while (dir !== prev) {
|
|
101
|
+
const candidate = join(dir, "pnpm-workspace.yaml");
|
|
102
|
+
if (existsSync(candidate)) return candidate;
|
|
103
|
+
prev = dir;
|
|
104
|
+
dir = dirname(dir);
|
|
105
|
+
}
|
|
106
|
+
return null;
|
|
107
|
+
}
|
|
108
|
+
/**
|
|
109
|
+
* Returns the entries inside the top-level `catalog:` block as `[name, version]`
|
|
110
|
+
* pairs in document order, or `null` when no `catalog:` block exists.
|
|
111
|
+
*
|
|
112
|
+
* The parser is intentionally minimal: it reads line-by-line, locates the
|
|
113
|
+
* top-level `catalog:` line (no leading whitespace), then collects every
|
|
114
|
+
* subsequent indented line of the form `<key>: <value>` until the next
|
|
115
|
+
* top-level key (or end of file). Quotes around `<key>` and `<value>`
|
|
116
|
+
* are stripped; comments (`#…`) are ignored.
|
|
117
|
+
*/
|
|
118
|
+
function extractCatalogBlock(contents) {
|
|
119
|
+
const lines = contents.split(/\r?\n/);
|
|
120
|
+
const startIdx = lines.findIndex((line) => /^catalog\s*:\s*$/.test(line));
|
|
121
|
+
if (startIdx === -1) return null;
|
|
122
|
+
const entries = [];
|
|
123
|
+
for (let i = startIdx + 1; i < lines.length; i++) {
|
|
124
|
+
const raw = lines[i] ?? "";
|
|
125
|
+
if (raw.trim() === "" || /^\s*#/.test(raw)) continue;
|
|
126
|
+
if (!/^\s/.test(raw)) break;
|
|
127
|
+
const match = raw.match(/^\s+(?:'([^']+)'|"([^"]+)"|([^:\s'"]+))\s*:\s*(.*?)\s*(?:#.*)?$/);
|
|
128
|
+
if (!match) continue;
|
|
129
|
+
const name = match[1] ?? match[2] ?? match[3];
|
|
130
|
+
if (name === void 0) continue;
|
|
131
|
+
const version = stripQuotes((match[4] ?? "").trim());
|
|
132
|
+
if (version === "") continue;
|
|
133
|
+
entries.push([name, version]);
|
|
134
|
+
}
|
|
135
|
+
return entries;
|
|
136
|
+
}
|
|
137
|
+
function stripQuotes(value) {
|
|
138
|
+
if (value.length >= 2) {
|
|
139
|
+
const first = value[0];
|
|
140
|
+
const last = value[value.length - 1];
|
|
141
|
+
if (first === "\"" && last === "\"" || first === "'" && last === "'") return value.slice(1, -1);
|
|
142
|
+
}
|
|
143
|
+
return value;
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
//#endregion
|
|
147
|
+
//#region src/commands/init/errors.ts
|
|
148
|
+
/**
|
|
149
|
+
* No `package.json` / `deno.json` / `deno.jsonc` in the target directory.
|
|
150
|
+
*
|
|
151
|
+
* `init` cannot bootstrap a fresh project from a bare directory (NG1) — that
|
|
152
|
+
* gap is tracked separately. The fix is for the user to run `npm init` (or
|
|
153
|
+
* the equivalent for their package manager) first.
|
|
154
|
+
*/
|
|
155
|
+
function errorInitMissingManifest() {
|
|
156
|
+
return new CliStructuredError("5001", "No project manifest found", {
|
|
157
|
+
domain: "CLI",
|
|
158
|
+
why: "No package.json or deno.json found in the target directory. `prisma-next init` requires an existing project to attach to.",
|
|
159
|
+
fix: "Initialize your project first (e.g. `npm init -y` or `deno init`), then re-run `prisma-next init`.",
|
|
160
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init"
|
|
161
|
+
});
|
|
162
|
+
}
|
|
163
|
+
/**
|
|
164
|
+
* Re-init in non-interactive mode without `--force`. Distinct from the
|
|
165
|
+
* decline-the-prompt path (which is `errorInitUserAborted`) because here
|
|
166
|
+
* the user was never given the choice — `--force` is the contract.
|
|
167
|
+
*/
|
|
168
|
+
function errorInitReinitNeedsForce() {
|
|
169
|
+
return new CliStructuredError("5002", "Project is already initialized", {
|
|
170
|
+
domain: "CLI",
|
|
171
|
+
why: "A `prisma-next.config.ts` already exists in this directory. Re-running `init` would overwrite the scaffolded files; in non-interactive mode `init` will not do that without `--force`.",
|
|
172
|
+
fix: "Pass `--force` to overwrite the existing scaffold, or run `init` interactively to confirm.",
|
|
173
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init"
|
|
174
|
+
});
|
|
175
|
+
}
|
|
176
|
+
/**
|
|
177
|
+
* Non-interactive mode is missing one or more required inputs. Lists every
|
|
178
|
+
* missing flag in the error so an agent / CI script can react without
|
|
179
|
+
* needing to parse English.
|
|
180
|
+
*
|
|
181
|
+
* @param missing — kebab-case flag names without leading dashes
|
|
182
|
+
* @param why — additional context (e.g. "stdin is not a TTY") that helps
|
|
183
|
+
* the user understand why interactive fallback was skipped.
|
|
184
|
+
*/
|
|
185
|
+
function errorInitMissingFlags(options) {
|
|
186
|
+
const flagList = options.missing.map((flag) => `--${flag}`).join(", ");
|
|
187
|
+
const fixList = options.missing.map((flag) => {
|
|
188
|
+
switch (flag) {
|
|
189
|
+
case "target": return "--target postgres|mongodb";
|
|
190
|
+
case "authoring": return "--authoring psl|typescript";
|
|
191
|
+
case "schema-path": return "--schema-path <path>";
|
|
192
|
+
default: return `--${flag} <value>`;
|
|
193
|
+
}
|
|
194
|
+
}).join(" ");
|
|
195
|
+
return new CliStructuredError("5003", "Missing required flags", {
|
|
196
|
+
domain: "CLI",
|
|
197
|
+
why: `${options.why} Missing required flag(s): ${flagList}.`,
|
|
198
|
+
fix: `Re-run with the missing flag(s) supplied, e.g. \`prisma-next init --yes ${fixList}\`. Use \`prisma-next init --help\` to see every flag.`,
|
|
199
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init",
|
|
200
|
+
meta: { missingFlags: options.missing }
|
|
201
|
+
});
|
|
202
|
+
}
|
|
203
|
+
/**
|
|
204
|
+
* A flag value was supplied but is not in the allowed set. Lists the
|
|
205
|
+
* allowed values in `meta` for machine-readable consumption.
|
|
206
|
+
*/
|
|
207
|
+
function errorInitInvalidFlagValue(options) {
|
|
208
|
+
return new CliStructuredError("5004", `Invalid value for --${options.flag}`, {
|
|
209
|
+
domain: "CLI",
|
|
210
|
+
why: `\`--${options.flag} ${options.value}\` is not one of: ${options.allowed.join(", ")}.`,
|
|
211
|
+
fix: `Use one of: ${options.allowed.map((v) => `--${options.flag} ${v}`).join(", ")}.`,
|
|
212
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init",
|
|
213
|
+
meta: {
|
|
214
|
+
flag: options.flag,
|
|
215
|
+
value: options.value,
|
|
216
|
+
allowed: options.allowed
|
|
217
|
+
}
|
|
218
|
+
});
|
|
219
|
+
}
|
|
220
|
+
/**
|
|
221
|
+
* The user cancelled an interactive prompt (Ctrl-C, escape, declined a
|
|
222
|
+
* selection). Distinct from `errorInitReinitNeedsForce` because that path
|
|
223
|
+
* applies to non-interactive mode where the user was never given the
|
|
224
|
+
* choice; this one is the generic "user said no" path. Maps to exit code
|
|
225
|
+
* 3 (USER_ABORTED).
|
|
226
|
+
*/
|
|
227
|
+
function errorInitUserAborted() {
|
|
228
|
+
return new CliStructuredError("5006", "Init cancelled", {
|
|
229
|
+
domain: "CLI",
|
|
230
|
+
why: "The interactive prompt was cancelled before all required inputs were supplied. No files were modified.",
|
|
231
|
+
fix: "Re-run `prisma-next init` and complete the prompts, or pass the required inputs as flags (see `--help`) for a non-interactive run.",
|
|
232
|
+
severity: "info"
|
|
233
|
+
});
|
|
234
|
+
}
|
|
235
|
+
/**
|
|
236
|
+
* `--strict-probe` was supplied without `--probe-db`. Per FR8.3 / NFR9
|
|
237
|
+
* (offline-by-default), `--strict-probe` is a no-op without `--probe-db` —
|
|
238
|
+
* but rather than silently ignoring it we tell the user what they probably
|
|
239
|
+
* meant. Without this guard, the flag combination silently does nothing,
|
|
240
|
+
* which is exactly the kind of "looks like it worked" trap that a strict
|
|
241
|
+
* mode is supposed to prevent.
|
|
242
|
+
*/
|
|
243
|
+
function errorInitStrictProbeWithoutProbe() {
|
|
244
|
+
return new CliStructuredError("5005", "`--strict-probe` requires `--probe-db`", {
|
|
245
|
+
domain: "CLI",
|
|
246
|
+
why: "`--strict-probe` only changes how a *failed* probe is reported; without `--probe-db` no probe is attempted in the first place. (`init` is offline-by-default — it never opens a connection to your database without explicit consent.)",
|
|
247
|
+
fix: "Add `--probe-db` to opt in to the probe, or drop `--strict-probe` if you do not need the version check.",
|
|
248
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init"
|
|
249
|
+
});
|
|
250
|
+
}
|
|
251
|
+
/**
|
|
252
|
+
* Dependency installation failed and the pnpm → npm fallback (FR7.2)
|
|
253
|
+
* either did not apply (pm ≠ pnpm or stderr did not match a recognised
|
|
254
|
+
* leak) or also failed. Files scaffolded before the install step are
|
|
255
|
+
* already on disk; `meta.filesWritten` carries the list so a follow-up
|
|
256
|
+
* agent can resume manually. Maps to exit code `4 = INSTALL_FAILED`.
|
|
257
|
+
*/
|
|
258
|
+
function errorInitInstallFailed(options) {
|
|
259
|
+
const trimmed = options.stderrLines.map((s) => s.trim()).filter(Boolean);
|
|
260
|
+
return new CliStructuredError("5007", "Failed to install dependencies", {
|
|
261
|
+
domain: "CLI",
|
|
262
|
+
why: trimmed.length === 0 ? "The package manager exited with an error and no recoverable fallback applied." : `The package manager exited with: ${trimmed[0]}`,
|
|
263
|
+
fix: `Install manually:\n ${options.addCommand}\n ${options.addDevCommand}\nThen run \`${options.emitCommand}\` to emit the contract.`,
|
|
264
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init",
|
|
265
|
+
meta: {
|
|
266
|
+
filesWritten: options.filesWritten,
|
|
267
|
+
stderr: trimmed
|
|
268
|
+
}
|
|
269
|
+
});
|
|
270
|
+
}
|
|
271
|
+
/**
|
|
272
|
+
* The user's project manifest (typically `package.json`) failed to parse
|
|
273
|
+
* as JSON. Init reads the manifest to merge `scripts` (FR3.5) and to
|
|
274
|
+
* skip `@types/node` when it is already declared (FR2.1); a malformed
|
|
275
|
+
* file would otherwise surface as an `INTERNAL_ERROR` with a raw
|
|
276
|
+
* `SyntaxError` stack, which violates the FR1.6 contract that every
|
|
277
|
+
* documented failure mode maps to a stable exit code.
|
|
278
|
+
*
|
|
279
|
+
* Maps to exit code `2 = PRECONDITION` — the user can fix the manifest
|
|
280
|
+
* and re-run.
|
|
281
|
+
*/
|
|
282
|
+
function errorInitInvalidManifest(options) {
|
|
283
|
+
return new CliStructuredError("5010", `Failed to parse ${options.path}`, {
|
|
284
|
+
domain: "CLI",
|
|
285
|
+
why: `\`${options.path}\` is not valid JSON: ${options.cause}`,
|
|
286
|
+
fix: `Fix the JSON syntax in \`${options.path}\` (a missing comma or unbalanced brace is the most common cause), then re-run \`prisma-next init\`.`,
|
|
287
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init",
|
|
288
|
+
meta: {
|
|
289
|
+
path: options.path,
|
|
290
|
+
cause: options.cause
|
|
291
|
+
}
|
|
292
|
+
});
|
|
293
|
+
}
|
|
294
|
+
/**
|
|
295
|
+
* The user's existing `tsconfig.json` could not be parsed even with JSONC
|
|
296
|
+
* tolerance (comments + trailing commas) enabled. Init merges the
|
|
297
|
+
* minimum compiler options the scaffolded files need (FR2.2), so an
|
|
298
|
+
* unparseable tsconfig is a hard precondition failure: we cannot
|
|
299
|
+
* faithfully edit a file we cannot read.
|
|
300
|
+
*
|
|
301
|
+
* Init must surface this **before** writing any scaffold file so the
|
|
302
|
+
* user's working tree stays byte-identical (FR6.2 / NFR3) — see
|
|
303
|
+
* `runInit` for the precondition gate.
|
|
304
|
+
*
|
|
305
|
+
* Maps to exit code `2 = PRECONDITION` — the user can fix the file and
|
|
306
|
+
* re-run.
|
|
307
|
+
*/
|
|
308
|
+
function errorInitInvalidTsconfig(options) {
|
|
309
|
+
return new CliStructuredError("5011", `Failed to parse ${options.path}`, {
|
|
310
|
+
domain: "CLI",
|
|
311
|
+
why: `\`${options.path}\` is not valid JSON or JSONC: ${options.cause}`,
|
|
312
|
+
fix: `Fix the syntax in \`${options.path}\` and re-run \`prisma-next init\`. \`init\` accepts JSONC (comments and trailing commas) but cannot recover from unbalanced braces or missing commas.`,
|
|
313
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init",
|
|
314
|
+
meta: {
|
|
315
|
+
path: options.path,
|
|
316
|
+
cause: options.cause
|
|
317
|
+
}
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
/**
|
|
321
|
+
* `--probe-db` was supplied along with `--strict-probe` and the probe
|
|
322
|
+
* could not complete (no `DATABASE_URL`, network/auth error, the target
|
|
323
|
+
* driver was not installed, …). Without `--strict-probe` the probe
|
|
324
|
+
* surfaces these as warnings; `--strict-probe` escalates them to
|
|
325
|
+
* fatal so a CI gate can rely on "init exit code 2 means something
|
|
326
|
+
* about the runtime environment is wrong" (FR8.3).
|
|
327
|
+
*
|
|
328
|
+
* Maps to exit code `2 = PRECONDITION`. The caller's project files
|
|
329
|
+
* are already on disk by this point — the probe runs after the write
|
|
330
|
+
* phase — but the install/emit steps may or may not have completed
|
|
331
|
+
* depending on `--no-install` and the exact failure mode; `meta`
|
|
332
|
+
* carries `filesWritten` so a follow-up agent can resume manually.
|
|
333
|
+
*/
|
|
334
|
+
function errorInitProbeFailed(options) {
|
|
335
|
+
return new CliStructuredError("5012", "Database probe failed", {
|
|
336
|
+
domain: "CLI",
|
|
337
|
+
why: `\`--probe-db\` could not complete and \`--strict-probe\` was set: ${options.cause}`,
|
|
338
|
+
fix: "Confirm `DATABASE_URL` points at a reachable server, or drop `--strict-probe` to treat probe failures as warnings.",
|
|
339
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init",
|
|
340
|
+
meta: {
|
|
341
|
+
filesWritten: options.filesWritten,
|
|
342
|
+
cause: options.cause
|
|
343
|
+
}
|
|
344
|
+
});
|
|
345
|
+
}
|
|
346
|
+
/**
|
|
347
|
+
* `prisma-next contract emit` failed after a successful install. Surface
|
|
348
|
+
* the underlying error so the user can fix it and re-run; files and
|
|
349
|
+
* dependencies remain on disk untouched. Maps to exit code
|
|
350
|
+
* `5 = EMIT_FAILED`.
|
|
351
|
+
*/
|
|
352
|
+
function errorInitEmitFailed(options) {
|
|
353
|
+
return new CliStructuredError("5008", "Failed to emit contract", {
|
|
354
|
+
domain: "CLI",
|
|
355
|
+
why: `\`prisma-next contract emit\` failed: ${options.cause}`,
|
|
356
|
+
fix: `Inspect your contract file, fix the underlying issue, then re-run \`${options.emitCommand}\`. Pass \`-v\` for the full error envelope.`,
|
|
357
|
+
docsUrl: "https://prisma-next.dev/docs/cli/contract-emit",
|
|
358
|
+
meta: {
|
|
359
|
+
filesWritten: options.filesWritten,
|
|
360
|
+
cause: options.cause
|
|
361
|
+
}
|
|
362
|
+
});
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
//#endregion
|
|
366
|
+
//#region src/commands/init/hygiene-gitattributes.ts
|
|
367
|
+
/**
|
|
368
|
+
* The schema-relative `.gitattributes` entries written for a freshly
|
|
369
|
+
* initialised project (FR3.4). Mirrors the relevant subset of the
|
|
370
|
+
* repo-root [`.gitattributes`](../../../../../../../../.gitattributes):
|
|
371
|
+
*
|
|
372
|
+
* - **Today**: `contract.json`, `contract.d.ts` are emitted on every
|
|
373
|
+
* `prisma-next contract emit`. Marking them `linguist-generated`
|
|
374
|
+
* keeps GitHub's diff stats honest and collapses the file in code
|
|
375
|
+
* review by default.
|
|
376
|
+
* - **Forward-looking**: `end-contract.*`, `start-contract.*`, `ops.json`,
|
|
377
|
+
* `migration.json` are not yet emitted by `init` flows but will be
|
|
378
|
+
* produced by adjacent commands (lower / migration tooling). Adding
|
|
379
|
+
* them now matches Decision 5 (forward-looking subset) so the file
|
|
380
|
+
* does not need to be amended every time a new artefact lands.
|
|
381
|
+
*
|
|
382
|
+
* Patterns are written relative to the schema directory so a user
|
|
383
|
+
* who runs `init --schema-path db/contract.prisma` gets
|
|
384
|
+
* `db/contract.json linguist-generated` — not the workspace-glob form
|
|
385
|
+
* `<glob>/contract.json` (which would over-match any unrelated
|
|
386
|
+
* `contract.json` the user has elsewhere) and not the absolute
|
|
387
|
+
* `prisma/contract.json` (which would silently break for a non-default
|
|
388
|
+
* schema path).
|
|
389
|
+
*/
|
|
390
|
+
const ARTEFACT_FILENAMES$1 = [
|
|
391
|
+
"contract.json",
|
|
392
|
+
"contract.d.ts",
|
|
393
|
+
"end-contract.json",
|
|
394
|
+
"end-contract.d.ts",
|
|
395
|
+
"start-contract.json",
|
|
396
|
+
"start-contract.d.ts",
|
|
397
|
+
"ops.json",
|
|
398
|
+
"migration.json"
|
|
399
|
+
];
|
|
400
|
+
const ATTRIBUTE = "linguist-generated";
|
|
401
|
+
/**
|
|
402
|
+
* Computes the `.gitattributes` lines this scaffold expects to own. Each
|
|
403
|
+
* line has the shape `<path> linguist-generated`. The `target` parameter
|
|
404
|
+
* is currently unused but accepted for symmetry with the other hygiene
|
|
405
|
+
* helpers and to leave room for target-specific entries (e.g. a future
|
|
406
|
+
* Mongo-only artefact) without a signature break.
|
|
407
|
+
*/
|
|
408
|
+
function requiredGitattributesLines(schemaDir, _target) {
|
|
409
|
+
const dir = schemaDir === "." ? "" : schemaDir.replace(/\/+$/, "");
|
|
410
|
+
const prefix = dir === "" ? "" : `${dir}/`;
|
|
411
|
+
return ARTEFACT_FILENAMES$1.map((file) => `${prefix}${file} ${ATTRIBUTE}`);
|
|
412
|
+
}
|
|
413
|
+
/**
|
|
414
|
+
* Idempotent `.gitattributes` merge (FR3.4 / FR9.3). Returns the new file
|
|
415
|
+
* content given the existing content (or `undefined` if the file does
|
|
416
|
+
* not yet exist).
|
|
417
|
+
*
|
|
418
|
+
* Equivalence is exact-line: a user-customised line like
|
|
419
|
+
* `prisma/*.json linguist-generated` is *not* recognised as covering
|
|
420
|
+
* `prisma/contract.json linguist-generated`. We accept that
|
|
421
|
+
* over-specification — preserving the user's broad pattern *and*
|
|
422
|
+
* appending the narrow one — because the narrow lines are what the
|
|
423
|
+
* acceptance criteria pin (FR3.4 AC).
|
|
424
|
+
*
|
|
425
|
+
* Returns `null` when no changes are required (file already contains
|
|
426
|
+
* every required entry).
|
|
427
|
+
*/
|
|
428
|
+
function mergeGitattributes(existing, required) {
|
|
429
|
+
if (existing === void 0) return `${required.join("\n")}\n`;
|
|
430
|
+
const presentLines = new Set(existing.split("\n").map((line) => line.trim()).filter((line) => line.length > 0 && !line.startsWith("#")));
|
|
431
|
+
const missing = required.filter((line) => !presentLines.has(line));
|
|
432
|
+
if (missing.length === 0) return null;
|
|
433
|
+
return `${existing}${existing.length === 0 || existing.endsWith("\n") ? "" : "\n"}${missing.join("\n")}\n`;
|
|
434
|
+
}
|
|
435
|
+
|
|
436
|
+
//#endregion
|
|
437
|
+
//#region src/commands/init/hygiene-gitignore.ts
|
|
438
|
+
/**
|
|
439
|
+
* The minimal `.gitignore` lines a Prisma Next scaffold needs (FR3.3).
|
|
440
|
+
* Order matches what Node tooling typically writes today.
|
|
441
|
+
*
|
|
442
|
+
* `node_modules/` first because it's the byte-largest miss; `dist/`
|
|
443
|
+
* because the scaffolded `tsconfig.json` writes there; `.env` last so
|
|
444
|
+
* the secret-bearing file is the one most-recently visible in any diff
|
|
445
|
+
* (a paranoid-correct ordering — humans skim from the top).
|
|
446
|
+
*/
|
|
447
|
+
const REQUIRED_GITIGNORE_ENTRIES = [
|
|
448
|
+
"node_modules/",
|
|
449
|
+
"dist/",
|
|
450
|
+
".env"
|
|
451
|
+
];
|
|
452
|
+
/**
|
|
453
|
+
* Idempotent `.gitignore` merge (FR3.3 / FR9.3). Returns the new file
|
|
454
|
+
* content given the existing content (or `undefined` if the file does
|
|
455
|
+
* not yet exist). Adds only entries that are not already present and
|
|
456
|
+
* never duplicates a line. Existing comments and blank lines are
|
|
457
|
+
* preserved verbatim — `.gitignore` is parsed by `git` without a tree,
|
|
458
|
+
* so any line modification risks changing semantics.
|
|
459
|
+
*
|
|
460
|
+
* Pattern equivalence is line-literal: `node_modules/` and `node_modules`
|
|
461
|
+
* are treated as different entries. This is intentional — `git` treats
|
|
462
|
+
* them differently (the trailing slash restricts the match to
|
|
463
|
+
* directories), and the AC pins the trailing-slash form.
|
|
464
|
+
*
|
|
465
|
+
* Returns `null` when no changes are required (file already contains
|
|
466
|
+
* every required entry). The caller can use this to decide whether to
|
|
467
|
+
* include `.gitignore` in `filesWritten`.
|
|
468
|
+
*/
|
|
469
|
+
function mergeGitignore(existing) {
|
|
470
|
+
if (existing === void 0) return `${REQUIRED_GITIGNORE_ENTRIES.join("\n")}\n`;
|
|
471
|
+
const present = new Set(existing.split("\n").map((line) => line.trim()).filter((line) => line.length > 0 && !line.startsWith("#")));
|
|
472
|
+
const missing = REQUIRED_GITIGNORE_ENTRIES.filter((entry) => !present.has(entry));
|
|
473
|
+
if (missing.length === 0) return null;
|
|
474
|
+
return `${existing}${existing.length === 0 || existing.endsWith("\n") ? "" : "\n"}${missing.join("\n")}\n`;
|
|
475
|
+
}
|
|
476
|
+
|
|
477
|
+
//#endregion
|
|
478
|
+
//#region src/commands/init/hygiene-package-scripts.ts
|
|
479
|
+
const REQUIRED_SCRIPTS = [{
|
|
480
|
+
name: "contract:emit",
|
|
481
|
+
command: "prisma-next contract emit"
|
|
482
|
+
}];
|
|
483
|
+
/**
|
|
484
|
+
* Idempotent `package.json#scripts` merge with collision detection
|
|
485
|
+
* (FR3.5 / FR9.3):
|
|
486
|
+
*
|
|
487
|
+
* - If a required script is **missing**, append it.
|
|
488
|
+
* - If a required script is **already present and identical**, leave
|
|
489
|
+
* the file alone (idempotency).
|
|
490
|
+
* - If a required script is **present but maps to a different command**,
|
|
491
|
+
* skip the write for that script and surface a structured warning.
|
|
492
|
+
* The user's override is sacred — `init` should never silently
|
|
493
|
+
* overwrite a custom build pipeline.
|
|
494
|
+
*
|
|
495
|
+
* Preserves the existing key order (so a user who has alphabetised
|
|
496
|
+
* their scripts does not see them reshuffled) and appends new entries
|
|
497
|
+
* at the end.
|
|
498
|
+
*
|
|
499
|
+
* The `package.json` is parsed and re-stringified through `JSON` —
|
|
500
|
+
* comments are not preserved (package.json does not support them per
|
|
501
|
+
* spec). Trailing newline matches the original input's trailing
|
|
502
|
+
* newline behaviour.
|
|
503
|
+
*/
|
|
504
|
+
function mergePackageScripts(existing, required = REQUIRED_SCRIPTS) {
|
|
505
|
+
const parsed = JSON.parse(existing);
|
|
506
|
+
const scripts = typeof parsed["scripts"] === "object" && parsed["scripts"] !== null ? { ...parsed["scripts"] } : {};
|
|
507
|
+
const warnings = [];
|
|
508
|
+
let mutated = false;
|
|
509
|
+
for (const { name, command } of required) {
|
|
510
|
+
const existingValue = scripts[name];
|
|
511
|
+
if (existingValue === void 0) {
|
|
512
|
+
scripts[name] = command;
|
|
513
|
+
mutated = true;
|
|
514
|
+
continue;
|
|
515
|
+
}
|
|
516
|
+
if (existingValue !== command) warnings.push(`package.json already has a "${name}" script with a different command — keeping yours.\n existing: ${existingValue}\n expected: ${command}\nIf you want the default, remove your "${name}" script and re-run \`init\`.`);
|
|
517
|
+
}
|
|
518
|
+
if (!mutated) return {
|
|
519
|
+
content: null,
|
|
520
|
+
warnings
|
|
521
|
+
};
|
|
522
|
+
parsed["scripts"] = scripts;
|
|
523
|
+
const trailingNewline = existing.endsWith("\n") ? "\n" : "";
|
|
524
|
+
return {
|
|
525
|
+
content: `${JSON.stringify(parsed, null, 2)}${trailingNewline}`,
|
|
526
|
+
warnings
|
|
527
|
+
};
|
|
528
|
+
}
|
|
529
|
+
|
|
530
|
+
//#endregion
|
|
531
|
+
//#region src/commands/init/templates/code-templates.ts
|
|
532
|
+
function targetPackageName(target) {
|
|
533
|
+
return target === "postgres" ? "@prisma-next/postgres" : "@prisma-next/mongo";
|
|
534
|
+
}
|
|
535
|
+
function targetLabel(target) {
|
|
536
|
+
return target === "postgres" ? "PostgreSQL" : "MongoDB";
|
|
537
|
+
}
|
|
538
|
+
function defaultSchemaPath(authoring) {
|
|
539
|
+
if (authoring === "typescript") return "prisma/contract.ts";
|
|
540
|
+
return "prisma/contract.prisma";
|
|
541
|
+
}
|
|
542
|
+
function starterSchema(target, authoring) {
|
|
543
|
+
if (authoring === "typescript") return target === "mongo" ? starterSchemaTsMongo() : starterSchemaTsPostgres();
|
|
544
|
+
return target === "mongo" ? starterSchemaPslMongo() : starterSchemaPslPostgres();
|
|
545
|
+
}
|
|
546
|
+
/**
|
|
547
|
+
* Renders a short authoring-appropriate schema sample (FR5.1) for embedding
|
|
548
|
+
* in `prisma-next.md`. Returns a complete fenced markdown code block.
|
|
549
|
+
*
|
|
550
|
+
* The sample intentionally shows just one model: it's illustrative, not
|
|
551
|
+
* a substitute for the full scaffolded contract file. The TS samples use
|
|
552
|
+
* the same outer shape as `starterSchemaTs*` (FR5.3) so a user reading
|
|
553
|
+
* the doc and the file side-by-side sees the same structure.
|
|
554
|
+
*/
|
|
555
|
+
function schemaSample(target, authoring) {
|
|
556
|
+
if (authoring === "typescript") return target === "mongo" ? schemaSampleTsMongo() : schemaSampleTsPostgres();
|
|
557
|
+
return target === "mongo" ? schemaSamplePslMongo() : schemaSamplePslPostgres();
|
|
558
|
+
}
|
|
559
|
+
function schemaSamplePslPostgres() {
|
|
560
|
+
return `\`\`\`prisma
|
|
561
|
+
model User {
|
|
562
|
+
id Int @id @default(autoincrement())
|
|
563
|
+
email String @unique
|
|
564
|
+
name String?
|
|
565
|
+
}
|
|
566
|
+
\`\`\``;
|
|
567
|
+
}
|
|
568
|
+
function schemaSamplePslMongo() {
|
|
569
|
+
return `\`\`\`prisma
|
|
570
|
+
model User {
|
|
571
|
+
id ObjectId @id @map("_id")
|
|
572
|
+
email String @unique
|
|
573
|
+
name String?
|
|
574
|
+
@@map("users")
|
|
575
|
+
}
|
|
576
|
+
\`\`\``;
|
|
577
|
+
}
|
|
578
|
+
function schemaSampleTsPostgres() {
|
|
579
|
+
return `\`\`\`typescript
|
|
580
|
+
import sqlFamily from '@prisma-next/family-sql/pack';
|
|
581
|
+
import { defineContract } from '@prisma-next/sql-contract-ts/contract-builder';
|
|
582
|
+
import postgresPack from '@prisma-next/target-postgres/pack';
|
|
583
|
+
|
|
584
|
+
export const contract = defineContract(
|
|
585
|
+
{ family: sqlFamily, target: postgresPack },
|
|
586
|
+
({ field, model }) => ({
|
|
587
|
+
models: {
|
|
588
|
+
User: model('User', {
|
|
589
|
+
fields: {
|
|
590
|
+
id: field.id.uuidv7(),
|
|
591
|
+
email: field.text().unique(),
|
|
592
|
+
name: field.text().optional(),
|
|
593
|
+
},
|
|
594
|
+
}),
|
|
595
|
+
},
|
|
596
|
+
}),
|
|
597
|
+
);
|
|
598
|
+
\`\`\``;
|
|
599
|
+
}
|
|
600
|
+
function schemaSampleTsMongo() {
|
|
601
|
+
return `\`\`\`typescript
|
|
602
|
+
import mongoFamily from '@prisma-next/family-mongo/pack';
|
|
603
|
+
import { defineContract } from '@prisma-next/mongo-contract-ts/contract-builder';
|
|
604
|
+
import mongoTarget from '@prisma-next/target-mongo/pack';
|
|
605
|
+
|
|
606
|
+
export const contract = defineContract(
|
|
607
|
+
{ family: mongoFamily, target: mongoTarget },
|
|
608
|
+
({ field, model }) => ({
|
|
609
|
+
models: {
|
|
610
|
+
User: model('User', {
|
|
611
|
+
collection: 'users',
|
|
612
|
+
fields: {
|
|
613
|
+
_id: field.objectId(),
|
|
614
|
+
email: field.string(),
|
|
615
|
+
name: field.string().optional(),
|
|
616
|
+
},
|
|
617
|
+
}),
|
|
618
|
+
},
|
|
619
|
+
}),
|
|
620
|
+
);
|
|
621
|
+
\`\`\``;
|
|
622
|
+
}
|
|
623
|
+
function starterSchemaPslPostgres() {
|
|
624
|
+
return `model User {
|
|
625
|
+
id Int @id @default(autoincrement())
|
|
626
|
+
email String @unique
|
|
627
|
+
name String?
|
|
628
|
+
posts Post[]
|
|
629
|
+
createdAt DateTime @default(now())
|
|
630
|
+
}
|
|
631
|
+
|
|
632
|
+
model Post {
|
|
633
|
+
id Int @id @default(autoincrement())
|
|
634
|
+
title String
|
|
635
|
+
content String?
|
|
636
|
+
author User @relation(fields: [authorId], references: [id])
|
|
637
|
+
authorId Int
|
|
638
|
+
createdAt DateTime @default(now())
|
|
639
|
+
}
|
|
640
|
+
`;
|
|
641
|
+
}
|
|
642
|
+
function starterSchemaPslMongo() {
|
|
643
|
+
return `model User {
|
|
644
|
+
id ObjectId @id @map("_id")
|
|
645
|
+
email String @unique
|
|
646
|
+
name String?
|
|
647
|
+
posts Post[]
|
|
648
|
+
@@map("users")
|
|
649
|
+
}
|
|
650
|
+
|
|
651
|
+
model Post {
|
|
652
|
+
id ObjectId @id @map("_id")
|
|
653
|
+
title String
|
|
654
|
+
content String?
|
|
655
|
+
author User @relation(fields: [authorId], references: [id])
|
|
656
|
+
authorId ObjectId
|
|
657
|
+
@@map("posts")
|
|
658
|
+
}
|
|
659
|
+
`;
|
|
660
|
+
}
|
|
661
|
+
function starterSchemaTsPostgres() {
|
|
662
|
+
return `import sqlFamily from '@prisma-next/family-sql/pack';
|
|
663
|
+
import { defineContract } from '@prisma-next/sql-contract-ts/contract-builder';
|
|
664
|
+
import postgresPack from '@prisma-next/target-postgres/pack';
|
|
665
|
+
|
|
666
|
+
export const contract = defineContract(
|
|
667
|
+
{ family: sqlFamily, target: postgresPack },
|
|
668
|
+
({ field, model, rel }) => ({
|
|
669
|
+
models: {
|
|
670
|
+
User: model('User', {
|
|
671
|
+
fields: {
|
|
672
|
+
id: field.id.uuidv7(),
|
|
673
|
+
email: field.text().unique(),
|
|
674
|
+
name: field.text().optional(),
|
|
675
|
+
createdAt: field.createdAt(),
|
|
676
|
+
},
|
|
677
|
+
relations: {
|
|
678
|
+
posts: rel.hasMany('Post', { by: 'authorId' }),
|
|
679
|
+
},
|
|
680
|
+
}),
|
|
681
|
+
|
|
682
|
+
Post: model('Post', {
|
|
683
|
+
fields: {
|
|
684
|
+
id: field.id.uuidv7(),
|
|
685
|
+
title: field.text(),
|
|
686
|
+
content: field.text().optional(),
|
|
687
|
+
authorId: field.uuid(),
|
|
688
|
+
createdAt: field.createdAt(),
|
|
689
|
+
},
|
|
690
|
+
relations: {
|
|
691
|
+
author: rel.belongsTo('User', { from: 'authorId', to: 'id' }),
|
|
692
|
+
},
|
|
693
|
+
}),
|
|
694
|
+
},
|
|
695
|
+
}),
|
|
696
|
+
);
|
|
697
|
+
`;
|
|
698
|
+
}
|
|
699
|
+
function starterSchemaTsMongo() {
|
|
700
|
+
return `import mongoFamily from '@prisma-next/family-mongo/pack';
|
|
701
|
+
import { defineContract } from '@prisma-next/mongo-contract-ts/contract-builder';
|
|
702
|
+
import mongoTarget from '@prisma-next/target-mongo/pack';
|
|
703
|
+
|
|
704
|
+
export const contract = defineContract(
|
|
705
|
+
{ family: mongoFamily, target: mongoTarget },
|
|
706
|
+
({ field, model, rel }) => ({
|
|
707
|
+
models: {
|
|
708
|
+
User: model('User', {
|
|
709
|
+
collection: 'users',
|
|
710
|
+
fields: {
|
|
711
|
+
_id: field.objectId(),
|
|
712
|
+
email: field.string(),
|
|
713
|
+
name: field.string().optional(),
|
|
714
|
+
},
|
|
715
|
+
relations: {
|
|
716
|
+
posts: rel.hasMany('Post', { from: '_id', to: 'authorId' }),
|
|
717
|
+
},
|
|
718
|
+
}),
|
|
719
|
+
|
|
720
|
+
Post: model('Post', {
|
|
721
|
+
collection: 'posts',
|
|
722
|
+
fields: {
|
|
723
|
+
_id: field.objectId(),
|
|
724
|
+
title: field.string(),
|
|
725
|
+
content: field.string().optional(),
|
|
726
|
+
authorId: field.objectId(),
|
|
727
|
+
},
|
|
728
|
+
relations: {
|
|
729
|
+
author: rel.belongsTo('User', { from: 'authorId', to: '_id' }),
|
|
730
|
+
},
|
|
731
|
+
}),
|
|
732
|
+
},
|
|
733
|
+
}),
|
|
734
|
+
);
|
|
735
|
+
`;
|
|
736
|
+
}
|
|
737
|
+
function configFile(target, contractPath) {
|
|
738
|
+
return `import 'dotenv/config';
|
|
739
|
+
import { defineConfig } from '${targetPackageName(target)}/config';
|
|
740
|
+
|
|
741
|
+
export default defineConfig({
|
|
742
|
+
contract: ${JSON.stringify(contractPath)},
|
|
743
|
+
db: {
|
|
744
|
+
connection: process.env['DATABASE_URL']!,
|
|
745
|
+
},
|
|
746
|
+
});
|
|
747
|
+
`;
|
|
748
|
+
}
|
|
749
|
+
function dbFile(target) {
|
|
750
|
+
if (target === "postgres") return `import postgres from '@prisma-next/postgres/runtime';
|
|
751
|
+
import type { Contract } from './contract.d';
|
|
752
|
+
import contractJson from './contract.json' with { type: 'json' };
|
|
753
|
+
|
|
754
|
+
export const db = postgres<Contract>({ contractJson });
|
|
755
|
+
`;
|
|
756
|
+
return `import mongo from '@prisma-next/mongo/runtime';
|
|
757
|
+
import type { Contract } from './contract.d';
|
|
758
|
+
import contractJson from './contract.json' with { type: 'json' };
|
|
759
|
+
|
|
760
|
+
export const db = mongo<Contract>({
|
|
761
|
+
contractJson,
|
|
762
|
+
url: process.env['DATABASE_URL']!,
|
|
763
|
+
});
|
|
764
|
+
`;
|
|
765
|
+
}
|
|
766
|
+
|
|
767
|
+
//#endregion
|
|
768
|
+
//#region src/commands/init/inputs.ts
|
|
769
|
+
const TARGET_ALIASES = new Map([
|
|
770
|
+
["postgres", "postgres"],
|
|
771
|
+
["postgresql", "postgres"],
|
|
772
|
+
["mongo", "mongo"],
|
|
773
|
+
["mongodb", "mongo"]
|
|
774
|
+
]);
|
|
775
|
+
const AUTHORING_VALUES = new Map([
|
|
776
|
+
["psl", "psl"],
|
|
777
|
+
["typescript", "typescript"],
|
|
778
|
+
["ts", "typescript"]
|
|
779
|
+
]);
|
|
780
|
+
/**
|
|
781
|
+
* Resolves every required input for `runInit`. In interactive mode, missing
|
|
782
|
+
* inputs are prompted via clack; in non-interactive mode, missing required
|
|
783
|
+
* inputs throw a structured error listing exactly which flags are missing
|
|
784
|
+
* (FR1.4). Throws `CliStructuredError` on any unrecoverable input issue.
|
|
785
|
+
*
|
|
786
|
+
* `canPrompt` is decoupled from `flags.interactive` so the action handler
|
|
787
|
+
* (`./index.ts`) owns the merge of stdout-TTY (decoration) and stdin-TTY
|
|
788
|
+
* (prompts). `flags.interactive` continues to gate `TerminalUI` decoration
|
|
789
|
+
* — see [Style Guide § Interactivity](../../../../../../../docs/CLI%20Style%20Guide.md#interactivity).
|
|
790
|
+
*/
|
|
791
|
+
async function resolveInitInputs(ctx) {
|
|
792
|
+
const { baseDir, options, flags, canPrompt } = ctx;
|
|
793
|
+
const force = Boolean(options.force);
|
|
794
|
+
const autoAcceptPrompts = Boolean(flags.yes);
|
|
795
|
+
if (options.strictProbe && !options.probeDb) throw errorInitStrictProbeWithoutProbe();
|
|
796
|
+
const reinit = await resolveReinit({
|
|
797
|
+
baseDir,
|
|
798
|
+
force,
|
|
799
|
+
canPrompt,
|
|
800
|
+
autoAcceptPrompts
|
|
801
|
+
});
|
|
802
|
+
const target = resolveTarget(options.target);
|
|
803
|
+
const authoring = resolveAuthoring(options.authoring);
|
|
804
|
+
const missing = [];
|
|
805
|
+
if (target === void 0) missing.push("target");
|
|
806
|
+
if (authoring === void 0) missing.push("authoring");
|
|
807
|
+
if (!canPrompt && missing.length > 0) throw errorInitMissingFlags({
|
|
808
|
+
missing,
|
|
809
|
+
why: process.stdin.isTTY ? "Non-interactive mode is active (`--no-interactive` or stdout is piped)." : "stdin is not a TTY, so `init` cannot prompt interactively."
|
|
810
|
+
});
|
|
811
|
+
const finalTarget = target ?? await promptTarget();
|
|
812
|
+
const finalAuthoring = authoring ?? await promptAuthoring();
|
|
813
|
+
const finalSchemaPath = options.schemaPath !== void 0 ? validateSchemaPath(options.schemaPath, finalAuthoring) : canPrompt ? await promptSchemaPath(finalAuthoring) : defaultSchemaPath(finalAuthoring);
|
|
814
|
+
const writeEnv = await resolveWriteEnv({
|
|
815
|
+
flag: options.writeEnv,
|
|
816
|
+
canPrompt,
|
|
817
|
+
autoAcceptPrompts
|
|
818
|
+
});
|
|
819
|
+
const removePreviousFacade = await resolveRemovePreviousFacade({
|
|
820
|
+
baseDir,
|
|
821
|
+
target: finalTarget,
|
|
822
|
+
reinit,
|
|
823
|
+
force,
|
|
824
|
+
canPrompt,
|
|
825
|
+
autoAcceptPrompts
|
|
826
|
+
});
|
|
827
|
+
return {
|
|
828
|
+
target: finalTarget,
|
|
829
|
+
authoring: finalAuthoring,
|
|
830
|
+
schemaPath: finalSchemaPath,
|
|
831
|
+
install: options.install !== false,
|
|
832
|
+
writeEnv,
|
|
833
|
+
probeDb: Boolean(options.probeDb),
|
|
834
|
+
strictProbe: Boolean(options.strictProbe),
|
|
835
|
+
reinit,
|
|
836
|
+
removePreviousFacade
|
|
837
|
+
};
|
|
838
|
+
}
|
|
839
|
+
async function resolveWriteEnv(opts) {
|
|
840
|
+
if (opts.flag !== void 0) return Boolean(opts.flag);
|
|
841
|
+
if (!opts.canPrompt || opts.autoAcceptPrompts) return false;
|
|
842
|
+
const result = await clack.confirm({
|
|
843
|
+
message: "Also write a .env file from .env.example? (gitignored)",
|
|
844
|
+
initialValue: false,
|
|
845
|
+
output: process.stderr
|
|
846
|
+
});
|
|
847
|
+
if (clack.isCancel(result)) throw errorInitUserAborted();
|
|
848
|
+
return Boolean(result);
|
|
849
|
+
}
|
|
850
|
+
/**
|
|
851
|
+
* FR9.2 — detects whether re-init is switching targets (the previous
|
|
852
|
+
* facade differs from the chosen target's facade) and resolves the
|
|
853
|
+
* remove-or-keep question.
|
|
854
|
+
*
|
|
855
|
+
* The non-interactive contract is the same as the `--force` re-init
|
|
856
|
+
* gate above: a non-interactive run that reaches this helper always
|
|
857
|
+
* has `--force` (otherwise `resolveReinit` would have thrown 5002), so
|
|
858
|
+
* the removal proceeds without further prompting. Interactive runs see
|
|
859
|
+
* a `clack.confirm` with `initialValue: true` — the destructive default
|
|
860
|
+
* is correct because keeping both facades produces a project that
|
|
861
|
+
* imports from one but pays for both in the lockfile, which is a
|
|
862
|
+
* silent foot-gun the user almost never wants.
|
|
863
|
+
*
|
|
864
|
+
* Returns the previous facade package name when the user consented (or
|
|
865
|
+
* was force-ed) to remove it, otherwise `null`. Parse failures on
|
|
866
|
+
* `package.json` resolve to `null` here — `runInit`'s precondition
|
|
867
|
+
* gate surfaces a structured 5010 error for the same file shortly
|
|
868
|
+
* after, so we avoid double-reporting and keep this helper side-effect
|
|
869
|
+
* free under hostile inputs.
|
|
870
|
+
*/
|
|
871
|
+
async function resolveRemovePreviousFacade(opts) {
|
|
872
|
+
if (!opts.reinit) return null;
|
|
873
|
+
const packageJsonPath = join(opts.baseDir, "package.json");
|
|
874
|
+
if (!existsSync(packageJsonPath)) return null;
|
|
875
|
+
const otherTarget = opts.target === "postgres" ? "mongo" : "postgres";
|
|
876
|
+
const otherFacade = targetPackageName(otherTarget);
|
|
877
|
+
let parsed;
|
|
878
|
+
try {
|
|
879
|
+
parsed = JSON.parse(readFileSync(packageJsonPath, "utf-8"));
|
|
880
|
+
} catch {
|
|
881
|
+
return null;
|
|
882
|
+
}
|
|
883
|
+
const deps = parsed["dependencies"];
|
|
884
|
+
if (deps === null || typeof deps !== "object" || Array.isArray(deps)) return null;
|
|
885
|
+
if (!Object.hasOwn(deps, otherFacade)) return null;
|
|
886
|
+
if (opts.force || opts.canPrompt && opts.autoAcceptPrompts) return otherFacade;
|
|
887
|
+
if (!opts.canPrompt) return otherFacade;
|
|
888
|
+
const result = await clack.confirm({
|
|
889
|
+
message: `Switching from ${targetLabel(otherTarget)} to ${targetLabel(opts.target)} — remove ${otherFacade} from package.json dependencies?`,
|
|
890
|
+
initialValue: true,
|
|
891
|
+
output: process.stderr
|
|
892
|
+
});
|
|
893
|
+
if (clack.isCancel(result)) throw errorInitUserAborted();
|
|
894
|
+
return result === true ? otherFacade : null;
|
|
895
|
+
}
|
|
896
|
+
async function resolveReinit(opts) {
|
|
897
|
+
if (!existsSync(join(opts.baseDir, "prisma-next.config.ts"))) return false;
|
|
898
|
+
if (opts.force) return true;
|
|
899
|
+
if (!opts.canPrompt) throw errorInitReinitNeedsForce();
|
|
900
|
+
if (opts.autoAcceptPrompts) return true;
|
|
901
|
+
const result = await clack.confirm({
|
|
902
|
+
message: "This project is already initialized. Re-initialize? This will overwrite all generated files.",
|
|
903
|
+
initialValue: false,
|
|
904
|
+
output: process.stderr
|
|
905
|
+
});
|
|
906
|
+
if (clack.isCancel(result) || result !== true) throw errorInitUserAborted();
|
|
907
|
+
return true;
|
|
908
|
+
}
|
|
909
|
+
function resolveTarget(value) {
|
|
910
|
+
if (value === void 0) return void 0;
|
|
911
|
+
const mapped = TARGET_ALIASES.get(value.toLowerCase());
|
|
912
|
+
if (mapped === void 0) throw errorInitInvalidFlagValue({
|
|
913
|
+
flag: "target",
|
|
914
|
+
value,
|
|
915
|
+
allowed: ["postgres", "mongodb"]
|
|
916
|
+
});
|
|
917
|
+
return mapped;
|
|
918
|
+
}
|
|
919
|
+
function resolveAuthoring(value) {
|
|
920
|
+
if (value === void 0) return void 0;
|
|
921
|
+
const mapped = AUTHORING_VALUES.get(value.toLowerCase());
|
|
922
|
+
if (mapped === void 0) throw errorInitInvalidFlagValue({
|
|
923
|
+
flag: "authoring",
|
|
924
|
+
value,
|
|
925
|
+
allowed: ["psl", "typescript"]
|
|
926
|
+
});
|
|
927
|
+
return mapped;
|
|
928
|
+
}
|
|
929
|
+
/**
|
|
930
|
+
* Validates `--schema-path` against the chosen `--authoring` style: PSL
|
|
931
|
+
* authoring requires a `.prisma` file and TypeScript authoring requires a
|
|
932
|
+
* `.ts` file. Mismatched combinations would silently scaffold PSL content
|
|
933
|
+
* into a `.ts` file (or vice versa); this validator surfaces the mistake
|
|
934
|
+
* as a precondition error naming both flags.
|
|
935
|
+
*/
|
|
936
|
+
function validateSchemaPath(value, authoring) {
|
|
937
|
+
const trimmed = value.trim();
|
|
938
|
+
if (trimmed.length === 0) throw errorInitInvalidFlagValue({
|
|
939
|
+
flag: "schema-path",
|
|
940
|
+
value,
|
|
941
|
+
allowed: ["<non-empty file path with .prisma or .ts extension>"]
|
|
942
|
+
});
|
|
943
|
+
if (trimmed.endsWith("/") || trimmed.endsWith("\\")) throw errorInitInvalidFlagValue({
|
|
944
|
+
flag: "schema-path",
|
|
945
|
+
value,
|
|
946
|
+
allowed: ["<file path, not a directory>"]
|
|
947
|
+
});
|
|
948
|
+
const ext = extname(trimmed).toLowerCase();
|
|
949
|
+
const expected = authoring === "typescript" ? ".ts" : ".prisma";
|
|
950
|
+
if (ext !== expected) throw errorInitInvalidFlagValue({
|
|
951
|
+
flag: "schema-path",
|
|
952
|
+
value,
|
|
953
|
+
allowed: [`<file path ending in ${expected} for --authoring ${authoring}>`]
|
|
954
|
+
});
|
|
955
|
+
return normalize(trimmed);
|
|
956
|
+
}
|
|
957
|
+
async function promptTarget() {
|
|
958
|
+
const result = await clack.select({
|
|
959
|
+
message: "What database are you using?",
|
|
960
|
+
options: [{
|
|
961
|
+
value: "postgres",
|
|
962
|
+
label: "PostgreSQL"
|
|
963
|
+
}, {
|
|
964
|
+
value: "mongo",
|
|
965
|
+
label: "MongoDB"
|
|
966
|
+
}],
|
|
967
|
+
output: process.stderr
|
|
968
|
+
});
|
|
969
|
+
if (clack.isCancel(result)) throw errorInitUserAborted();
|
|
970
|
+
return result;
|
|
971
|
+
}
|
|
972
|
+
async function promptAuthoring() {
|
|
973
|
+
const result = await clack.select({
|
|
974
|
+
message: "How do you want to write your schema?",
|
|
975
|
+
options: [{
|
|
976
|
+
value: "psl",
|
|
977
|
+
label: "Prisma Schema Language (.prisma)"
|
|
978
|
+
}, {
|
|
979
|
+
value: "typescript",
|
|
980
|
+
label: "TypeScript (.ts)"
|
|
981
|
+
}],
|
|
982
|
+
output: process.stderr
|
|
983
|
+
});
|
|
984
|
+
if (clack.isCancel(result)) throw errorInitUserAborted();
|
|
985
|
+
return result;
|
|
986
|
+
}
|
|
987
|
+
async function promptSchemaPath(authoring) {
|
|
988
|
+
const expectedExt = authoring === "typescript" ? ".ts" : ".prisma";
|
|
989
|
+
const result = await clack.text({
|
|
990
|
+
message: "Where should the schema file go?",
|
|
991
|
+
initialValue: defaultSchemaPath(authoring),
|
|
992
|
+
validate(value = "") {
|
|
993
|
+
const trimmed = value.trim();
|
|
994
|
+
if (trimmed.length === 0) return "Path cannot be empty";
|
|
995
|
+
if (trimmed.endsWith("/") || trimmed.endsWith("\\")) return "Path must be a file, not a directory";
|
|
996
|
+
const ext = extname(trimmed).toLowerCase();
|
|
997
|
+
if (ext === "") return "Path must include a file extension (e.g. .prisma or .ts)";
|
|
998
|
+
if (ext !== expectedExt) return `Schema path must end in ${expectedExt} for --authoring ${authoring} (got ${ext}).`;
|
|
999
|
+
},
|
|
1000
|
+
output: process.stderr
|
|
1001
|
+
});
|
|
1002
|
+
if (clack.isCancel(result)) throw errorInitUserAborted();
|
|
1003
|
+
return validateSchemaPath(result, authoring);
|
|
1004
|
+
}
|
|
1005
|
+
|
|
1006
|
+
//#endregion
|
|
1007
|
+
//#region src/commands/init/probe-db.ts
|
|
1008
|
+
/**
|
|
1009
|
+
* Connects (when configured) to the user's database and returns a
|
|
1010
|
+
* structured outcome describing whether the server meets the declared
|
|
1011
|
+
* minimum (FR8.1). Pure with respect to its inputs: no I/O happens
|
|
1012
|
+
* unless `databaseUrl` is set.
|
|
1013
|
+
*
|
|
1014
|
+
* The outcome is shaped so that `--strict-probe` can branch on the
|
|
1015
|
+
* `kind`/`meetsMinimum` pair without re-stringifying the message:
|
|
1016
|
+
*
|
|
1017
|
+
* - `ok` — informational; `init` continues.
|
|
1018
|
+
* - `below-minimum` — warning; `init` continues regardless of
|
|
1019
|
+
* `--strict-probe` (the spec scopes strict-probe to "probe
|
|
1020
|
+
* *failures*", and a successful probe that finds an old server is
|
|
1021
|
+
* not a failure).
|
|
1022
|
+
* - `no-database-url` / `connection-failed` / `driver-missing` —
|
|
1023
|
+
* warning by default, fatal under `--strict-probe`.
|
|
1024
|
+
*/
|
|
1025
|
+
async function probeServerVersion(ctx, overrides = {}) {
|
|
1026
|
+
const { databaseUrl, minVersion, target } = ctx;
|
|
1027
|
+
if (databaseUrl === void 0 || databaseUrl.trim().length === 0) return {
|
|
1028
|
+
kind: "no-database-url",
|
|
1029
|
+
minVersion,
|
|
1030
|
+
meetsMinimum: null,
|
|
1031
|
+
message: "Skipped --probe-db: DATABASE_URL is not set in the current shell environment. (init does not read .env for the probe; export the variable or drop --probe-db.)"
|
|
1032
|
+
};
|
|
1033
|
+
let driverResult;
|
|
1034
|
+
try {
|
|
1035
|
+
if (target === "postgres") driverResult = overrides.probePostgres !== void 0 ? await overrides.probePostgres(databaseUrl) : await defaultProbePostgres(databaseUrl, ctx.baseDir, overrides);
|
|
1036
|
+
else driverResult = overrides.probeMongo !== void 0 ? await overrides.probeMongo(databaseUrl) : await defaultProbeMongo(databaseUrl, ctx.baseDir, overrides);
|
|
1037
|
+
} catch (err) {
|
|
1038
|
+
if (err instanceof DriverMissingError) return {
|
|
1039
|
+
kind: "driver-missing",
|
|
1040
|
+
minVersion,
|
|
1041
|
+
meetsMinimum: null,
|
|
1042
|
+
cause: err.message,
|
|
1043
|
+
message: `Skipped --probe-db: ${err.message}. (Run with install enabled, or install the driver yourself, then re-run \`prisma-next init --probe-db\`.)`
|
|
1044
|
+
};
|
|
1045
|
+
const cause = redactDatabaseUrlSecrets(causeMessage$1(err));
|
|
1046
|
+
return {
|
|
1047
|
+
kind: "connection-failed",
|
|
1048
|
+
minVersion,
|
|
1049
|
+
meetsMinimum: null,
|
|
1050
|
+
cause,
|
|
1051
|
+
message: `--probe-db could not connect: ${cause}.`
|
|
1052
|
+
};
|
|
1053
|
+
}
|
|
1054
|
+
if (compareVersionPrefix(driverResult.serverVersion, minVersion) < 0) return {
|
|
1055
|
+
kind: "below-minimum",
|
|
1056
|
+
serverVersion: driverResult.serverVersion,
|
|
1057
|
+
minVersion,
|
|
1058
|
+
meetsMinimum: false,
|
|
1059
|
+
message: `--probe-db: server reports version ${driverResult.serverVersion}, below the declared minimum (${minVersion}). Some queries may fail until the server is upgraded.`
|
|
1060
|
+
};
|
|
1061
|
+
return {
|
|
1062
|
+
kind: "ok",
|
|
1063
|
+
serverVersion: driverResult.serverVersion,
|
|
1064
|
+
minVersion,
|
|
1065
|
+
meetsMinimum: true,
|
|
1066
|
+
message: `--probe-db: server reports version ${driverResult.serverVersion} (>= ${minVersion}).`
|
|
1067
|
+
};
|
|
1068
|
+
}
|
|
1069
|
+
/**
|
|
1070
|
+
* Compares two semver-prefix strings ("14", "14.2", "6.0", …) by
|
|
1071
|
+
* numeric components left-to-right. Returns a negative number when `a`
|
|
1072
|
+
* is older than `b`, zero when both versions agree on every numeric
|
|
1073
|
+
* component (treating missing trailing components as `0`), and a
|
|
1074
|
+
* positive number when `a` is newer.
|
|
1075
|
+
*
|
|
1076
|
+
* The loop runs over the **longer** of the two prefixes so that
|
|
1077
|
+
* `'14'` compares less than `'14.1'` — without that, the shorter
|
|
1078
|
+
* prefix would be silently accepted whenever the configured minimum
|
|
1079
|
+
* has a non-zero minor or patch.
|
|
1080
|
+
*
|
|
1081
|
+
* Exported for unit tests.
|
|
1082
|
+
*/
|
|
1083
|
+
function compareVersionPrefix(a, b) {
|
|
1084
|
+
const aParts = parseNumericParts(a);
|
|
1085
|
+
const bParts = parseNumericParts(b);
|
|
1086
|
+
const len = Math.max(aParts.length, bParts.length);
|
|
1087
|
+
for (let i = 0; i < len; i += 1) {
|
|
1088
|
+
const aPart = aParts[i] ?? 0;
|
|
1089
|
+
const bPart = bParts[i] ?? 0;
|
|
1090
|
+
if (aPart !== bPart) return aPart - bPart;
|
|
1091
|
+
}
|
|
1092
|
+
return 0;
|
|
1093
|
+
}
|
|
1094
|
+
function parseNumericParts(version) {
|
|
1095
|
+
const match = version.match(/^[^\d]*(\d+(?:\.\d+){0,3})/);
|
|
1096
|
+
if (match === null) return [];
|
|
1097
|
+
return (match[1] ?? "").split(".").map((part) => Number.parseInt(part, 10));
|
|
1098
|
+
}
|
|
1099
|
+
var DriverMissingError = class extends Error {};
|
|
1100
|
+
function causeMessage$1(err) {
|
|
1101
|
+
if (err instanceof Error) return err.message;
|
|
1102
|
+
return String(err);
|
|
1103
|
+
}
|
|
1104
|
+
/**
|
|
1105
|
+
* Strips `user:password@` userinfo from any URL-shaped substring before
|
|
1106
|
+
* we surface the cause to the user. Mirrors `redactSecrets` in
|
|
1107
|
+
* `init.ts` — the probe path has its own redactor because the inputs
|
|
1108
|
+
* here include the raw connection string by construction (driver
|
|
1109
|
+
* errors echo the URL back).
|
|
1110
|
+
*
|
|
1111
|
+
* Exported for unit tests.
|
|
1112
|
+
*/
|
|
1113
|
+
function redactDatabaseUrlSecrets(text) {
|
|
1114
|
+
if (!text) return text;
|
|
1115
|
+
return text.replace(/([a-zA-Z][a-zA-Z0-9+.-]*:\/\/)([^/@\s]+)@/g, "$1***@");
|
|
1116
|
+
}
|
|
1117
|
+
async function defaultProbePostgres(databaseUrl, baseDir, overrides) {
|
|
1118
|
+
const client = new (requirePeer("pg", baseDir, overrides)).Client({ connectionString: databaseUrl });
|
|
1119
|
+
await client.connect();
|
|
1120
|
+
try {
|
|
1121
|
+
const result = await client.query("SELECT version() as version");
|
|
1122
|
+
return { serverVersion: parsePostgresVersion(String(result?.rows?.[0]?.version ?? "")) };
|
|
1123
|
+
} finally {
|
|
1124
|
+
await client.end().catch(() => void 0);
|
|
1125
|
+
}
|
|
1126
|
+
}
|
|
1127
|
+
/**
|
|
1128
|
+
* Extracts the numeric prefix from a Postgres `version()` row, e.g.
|
|
1129
|
+
*
|
|
1130
|
+
* `PostgreSQL 14.10 on x86_64-pc-linux-gnu, ...` → `"14.10"`
|
|
1131
|
+
* `PostgreSQL 16beta1 on …` → `"16"` (we
|
|
1132
|
+
* conservatively drop the suffix; minimum-version comparisons
|
|
1133
|
+
* treat 16beta1 as 16, which is what every reasonable user
|
|
1134
|
+
* expects).
|
|
1135
|
+
*
|
|
1136
|
+
* Exported for unit tests.
|
|
1137
|
+
*/
|
|
1138
|
+
function parsePostgresVersion(versionString) {
|
|
1139
|
+
const match = versionString.match(/PostgreSQL\s+(\d+(?:\.\d+)?)/i);
|
|
1140
|
+
if (match === null || match[1] === void 0) throw new Error(`Could not parse PostgreSQL version from \`${versionString}\``);
|
|
1141
|
+
return match[1];
|
|
1142
|
+
}
|
|
1143
|
+
async function defaultProbeMongo(databaseUrl, baseDir, overrides) {
|
|
1144
|
+
const client = new (requirePeer("mongodb", baseDir, overrides)).MongoClient(databaseUrl);
|
|
1145
|
+
await client.connect();
|
|
1146
|
+
try {
|
|
1147
|
+
const buildInfo = await client.db().admin().command({ buildInfo: 1 });
|
|
1148
|
+
const versionString = String(buildInfo.version ?? "");
|
|
1149
|
+
if (versionString.length === 0) throw new Error("buildInfo did not include a `version` field");
|
|
1150
|
+
return { serverVersion: versionString };
|
|
1151
|
+
} finally {
|
|
1152
|
+
await client.close().catch(() => void 0);
|
|
1153
|
+
}
|
|
1154
|
+
}
|
|
1155
|
+
/**
|
|
1156
|
+
* Loads a peer driver (`pg` / `mongodb`) from the user's project
|
|
1157
|
+
* `node_modules`. We deliberately resolve from `baseDir` rather than
|
|
1158
|
+
* from the CLI bundle — the CLI does not depend on `pg` or `mongodb`
|
|
1159
|
+
* directly, but the user's `init`-generated `package.json` does (via
|
|
1160
|
+
* the target facade). Failure to resolve is folded into a typed
|
|
1161
|
+
* `DriverMissingError` so `probeServerVersion` can map it to a
|
|
1162
|
+
* `driver-missing` outcome rather than letting a `MODULE_NOT_FOUND`
|
|
1163
|
+
* leak as a generic connection failure.
|
|
1164
|
+
*/
|
|
1165
|
+
function requirePeer(moduleId, baseDir, overrides) {
|
|
1166
|
+
try {
|
|
1167
|
+
if (overrides.requireFromBaseDir !== void 0) return overrides.requireFromBaseDir(baseDir, moduleId);
|
|
1168
|
+
return createRequire(join(baseDir, "package.json"))(moduleId);
|
|
1169
|
+
} catch (err) {
|
|
1170
|
+
throw new DriverMissingError(`\`${moduleId}\` is not installed in this project (resolved from ${baseDir}; cause: ${causeMessage$1(err)})`);
|
|
1171
|
+
}
|
|
1172
|
+
}
|
|
1173
|
+
|
|
1174
|
+
//#endregion
|
|
1175
|
+
//#region src/commands/init/reinit-cleanup.ts
|
|
1176
|
+
/**
|
|
1177
|
+
* Filenames the contract pipeline emits next to the user's schema source
|
|
1178
|
+
* (`<schemaDir>/contract.json`, `<schemaDir>/contract.d.ts`, …). Mirrors
|
|
1179
|
+
* `ARTEFACT_FILENAMES` in `hygiene-gitattributes.ts`; kept as a separate
|
|
1180
|
+
* constant here because the cleanup contract is target-agnostic and we
|
|
1181
|
+
* deliberately do not want a stale `start-contract.json` from a previous
|
|
1182
|
+
* target lingering after a re-init.
|
|
1183
|
+
*
|
|
1184
|
+
* If a future emit pipeline produces an additional artefact, add it here
|
|
1185
|
+
* **and** to the gitattributes list — the two stay in lockstep so the
|
|
1186
|
+
* file `init` advertises as `linguist-generated` is exactly the file
|
|
1187
|
+
* `init` is willing to delete on re-init.
|
|
1188
|
+
*/
|
|
1189
|
+
const ARTEFACT_FILENAMES = [
|
|
1190
|
+
"contract.json",
|
|
1191
|
+
"contract.d.ts",
|
|
1192
|
+
"end-contract.json",
|
|
1193
|
+
"end-contract.d.ts",
|
|
1194
|
+
"start-contract.json",
|
|
1195
|
+
"start-contract.d.ts",
|
|
1196
|
+
"ops.json",
|
|
1197
|
+
"migration.json"
|
|
1198
|
+
];
|
|
1199
|
+
/**
|
|
1200
|
+
* Returns the schema-relative paths of stale contract artefacts the
|
|
1201
|
+
* previous `init` run (or a `contract emit`) left behind in `schemaDir`.
|
|
1202
|
+
* Paths are returned relative to `baseDir` so the caller can plumb them
|
|
1203
|
+
* into `filesWritten`-style logging without re-deriving the path.
|
|
1204
|
+
*
|
|
1205
|
+
* Pure function: no filesystem mutation. Used by `runInit`'s precondition
|
|
1206
|
+
* phase (FR6.2 / NFR3 atomicity) so a downstream parse failure leaves
|
|
1207
|
+
* the artefacts on disk and the project byte-identical to its pre-init
|
|
1208
|
+
* state.
|
|
1209
|
+
*/
|
|
1210
|
+
function findStaleArtefacts(baseDir, schemaDir) {
|
|
1211
|
+
const result = [];
|
|
1212
|
+
for (const filename of ARTEFACT_FILENAMES) {
|
|
1213
|
+
const rel = join(schemaDir, filename);
|
|
1214
|
+
if (existsSync(join(baseDir, rel))) result.push(rel);
|
|
1215
|
+
}
|
|
1216
|
+
return result;
|
|
1217
|
+
}
|
|
1218
|
+
/**
|
|
1219
|
+
* Drops a single key from `package.json#dependencies`, returning the new
|
|
1220
|
+
* file content. Returns `null` when the dependency was already absent —
|
|
1221
|
+
* the caller can skip the write to keep re-init idempotent (FR9.3).
|
|
1222
|
+
*
|
|
1223
|
+
* Used by `runInit` for the FR9.2 target-switch path: when the user
|
|
1224
|
+
* re-inits a project from `--target postgres` to `--target mongodb` (or
|
|
1225
|
+
* vice versa), the previous facade is removed from `dependencies` so the
|
|
1226
|
+
* resulting project depends only on the chosen target's facade.
|
|
1227
|
+
*
|
|
1228
|
+
* Devs/peers/optional dep groups are intentionally *not* touched — the
|
|
1229
|
+
* facades are only ever in `dependencies` (FR4 / FR7), and broadening
|
|
1230
|
+
* the search would risk clobbering an unrelated dep with the same name
|
|
1231
|
+
* in `peerDependencies`.
|
|
1232
|
+
*
|
|
1233
|
+
* Throws `SyntaxError` if `existing` is not parseable as JSON; the
|
|
1234
|
+
* caller (`runInit`) already guards on that with a structured 5010
|
|
1235
|
+
* error before this helper is reached.
|
|
1236
|
+
*/
|
|
1237
|
+
function removeDependency(existing, depName) {
|
|
1238
|
+
const parsed = JSON.parse(existing);
|
|
1239
|
+
const deps = parsed["dependencies"];
|
|
1240
|
+
if (deps === null || typeof deps !== "object" || Array.isArray(deps)) return null;
|
|
1241
|
+
if (!Object.hasOwn(deps, depName)) return null;
|
|
1242
|
+
const next = { ...deps };
|
|
1243
|
+
delete next[depName];
|
|
1244
|
+
parsed["dependencies"] = next;
|
|
1245
|
+
const trailingNewline = existing.endsWith("\n") ? "\n" : "";
|
|
1246
|
+
return `${JSON.stringify(parsed, null, 2)}${trailingNewline}`;
|
|
1247
|
+
}
|
|
1248
|
+
|
|
1249
|
+
//#endregion
|
|
1250
|
+
//#region src/commands/init/templates/render.ts
|
|
1251
|
+
function renderTemplate(templateFile, variableNames, vars) {
|
|
1252
|
+
let result = readFileSync(join(import.meta.dirname, templateFile), "utf-8");
|
|
1253
|
+
for (const key of variableNames) {
|
|
1254
|
+
const value = vars[key];
|
|
1255
|
+
if (value === void 0) throw new Error(`Template variable '${key}' is not defined`);
|
|
1256
|
+
result = result.replaceAll(`{{${key}}}`, value);
|
|
1257
|
+
}
|
|
1258
|
+
return result;
|
|
1259
|
+
}
|
|
1260
|
+
|
|
1261
|
+
//#endregion
|
|
1262
|
+
//#region src/commands/init/templates/agent-skill.ts
|
|
1263
|
+
const variables$1 = [
|
|
1264
|
+
"schemaPath",
|
|
1265
|
+
"schemaDir",
|
|
1266
|
+
"dbImportPath",
|
|
1267
|
+
"pkgRun",
|
|
1268
|
+
"authoringLabel"
|
|
1269
|
+
];
|
|
1270
|
+
/**
|
|
1271
|
+
* Renders the per-project agent skill (FR5.2). The skill template is
|
|
1272
|
+
* target-specific (Postgres vs Mongo query syntax differs); the authoring
|
|
1273
|
+
* style enters via:
|
|
1274
|
+
*
|
|
1275
|
+
* - `schemaPath` — already routed through {@link agentSkillMd}'s caller
|
|
1276
|
+
* (the AC says a TS-authoring scaffold must reference `prisma/contract.ts`).
|
|
1277
|
+
* - `authoringLabel` — a short human-readable note (`PSL` / `TypeScript`)
|
|
1278
|
+
* the skill template uses when describing the contract file.
|
|
1279
|
+
*/
|
|
1280
|
+
function agentSkillMd(target, authoring, schemaPath, pkgRun) {
|
|
1281
|
+
const schemaDir = dirname(schemaPath);
|
|
1282
|
+
const vars = {
|
|
1283
|
+
schemaPath,
|
|
1284
|
+
schemaDir,
|
|
1285
|
+
dbImportPath: `./${schemaDir}/db`,
|
|
1286
|
+
pkgRun,
|
|
1287
|
+
authoringLabel: authoring === "typescript" ? "TypeScript" : "PSL"
|
|
1288
|
+
};
|
|
1289
|
+
return renderTemplate(`agent-skill-${target}.md`, variables$1, vars);
|
|
1290
|
+
}
|
|
1291
|
+
|
|
1292
|
+
//#endregion
|
|
1293
|
+
//#region src/commands/init/templates/env.ts
|
|
1294
|
+
/**
|
|
1295
|
+
* The minimum supported server version for each target (FR8.1). The
|
|
1296
|
+
* authoritative source of truth is each target package's
|
|
1297
|
+
* `package.json#prismaNext.minServerVersion` field — this module
|
|
1298
|
+
* mirrors those values and a workspace-level test asserts the two
|
|
1299
|
+
* never drift (`templates/tsconfig-env.test.ts`).
|
|
1300
|
+
*
|
|
1301
|
+
* Bumping a value here in isolation is **not** safe: edit the
|
|
1302
|
+
* corresponding target package's `package.json` first, then mirror
|
|
1303
|
+
* here. The scaffold's `.env.example` (FR3.1, FR8.2) and the
|
|
1304
|
+
* "Requirements" section of `prisma-next.md` both read from this
|
|
1305
|
+
* constant, so a stale value lies to every freshly initialised user.
|
|
1306
|
+
*/
|
|
1307
|
+
const MIN_SERVER_VERSION = {
|
|
1308
|
+
postgres: "14",
|
|
1309
|
+
mongo: "6.0"
|
|
1310
|
+
};
|
|
1311
|
+
const TARGET_LABEL = {
|
|
1312
|
+
postgres: "PostgreSQL",
|
|
1313
|
+
mongo: "MongoDB"
|
|
1314
|
+
};
|
|
1315
|
+
/**
|
|
1316
|
+
* Renders the placeholder body shared by `.env` and `.env.example`:
|
|
1317
|
+
* the target-specific connection-string requirement comments and the
|
|
1318
|
+
* commented-shape `DATABASE_URL` line. The output is identical for both
|
|
1319
|
+
* authoring styles — the env file is orthogonal to PSL vs TS schema
|
|
1320
|
+
* authoring.
|
|
1321
|
+
*/
|
|
1322
|
+
function envPlaceholderBody(target) {
|
|
1323
|
+
const label = TARGET_LABEL[target];
|
|
1324
|
+
const minVersion = MIN_SERVER_VERSION[target];
|
|
1325
|
+
const lines = [];
|
|
1326
|
+
lines.push(`# Connection string for ${label}.`);
|
|
1327
|
+
lines.push(`# Requires ${label} >= ${minVersion}.`);
|
|
1328
|
+
lines.push("");
|
|
1329
|
+
if (target === "postgres") lines.push("DATABASE_URL=\"postgresql://user:password@localhost:5432/mydb\"");
|
|
1330
|
+
else lines.push("DATABASE_URL=\"mongodb://localhost:27017/mydb\"");
|
|
1331
|
+
lines.push("");
|
|
1332
|
+
return lines.join("\n");
|
|
1333
|
+
}
|
|
1334
|
+
/**
|
|
1335
|
+
* Renders the `.env.example` content for a given target (FR3.1):
|
|
1336
|
+
*
|
|
1337
|
+
* - Carries a "Copy this file to `.env`…" intro that only makes sense
|
|
1338
|
+
* for the example file (the real `.env` is the destination of that
|
|
1339
|
+
* copy and so does not get the same intro).
|
|
1340
|
+
* - Documents the `DATABASE_URL` placeholder in the target's native URL
|
|
1341
|
+
* shape (Postgres: standard `postgresql://`, Mongo: `mongodb://` plus
|
|
1342
|
+
* a `mydb` database segment so the lazy facade has a `dbName`).
|
|
1343
|
+
* - Carries a `# Requires <db> >= <version>` comment so a fresh user
|
|
1344
|
+
* knows the minimum supported server before they first try to
|
|
1345
|
+
* connect (FR8.2).
|
|
1346
|
+
*/
|
|
1347
|
+
function envExampleContent(target) {
|
|
1348
|
+
const lines = [];
|
|
1349
|
+
lines.push("# Copy this file to `.env` and replace the placeholder with your real connection string.");
|
|
1350
|
+
lines.push(envPlaceholderBody(target));
|
|
1351
|
+
return lines.join("\n");
|
|
1352
|
+
}
|
|
1353
|
+
/**
|
|
1354
|
+
* Renders the initial `.env` content for `--write-env` / interactive
|
|
1355
|
+
* opt-in (FR3.2). Same placeholder body as `.env.example`, **without**
|
|
1356
|
+
* the example file's "Copy this file to `.env`…" intro: the real `.env`
|
|
1357
|
+
* is the destination of that copy, so the line would lie. Writing this
|
|
1358
|
+
* file is gitignored (FR3.3 ensures `.env` lands in `.gitignore`).
|
|
1359
|
+
*/
|
|
1360
|
+
function envFileContent(target) {
|
|
1361
|
+
return envPlaceholderBody(target);
|
|
1362
|
+
}
|
|
1363
|
+
|
|
1364
|
+
//#endregion
|
|
1365
|
+
//#region src/commands/init/templates/quick-reference.ts
|
|
1366
|
+
const variables = [
|
|
1367
|
+
"schemaPath",
|
|
1368
|
+
"schemaDir",
|
|
1369
|
+
"dbImportPath",
|
|
1370
|
+
"pkgRun",
|
|
1371
|
+
"schemaSample",
|
|
1372
|
+
"requirements"
|
|
1373
|
+
];
|
|
1374
|
+
function quickReferenceMd(target, authoring, schemaPath, pkgRun) {
|
|
1375
|
+
const schemaDir = dirname(schemaPath);
|
|
1376
|
+
const vars = {
|
|
1377
|
+
schemaPath,
|
|
1378
|
+
schemaDir,
|
|
1379
|
+
dbImportPath: `./${schemaDir}/db`,
|
|
1380
|
+
pkgRun,
|
|
1381
|
+
schemaSample: schemaSample(target, authoring),
|
|
1382
|
+
requirements: requirementsBlock(target)
|
|
1383
|
+
};
|
|
1384
|
+
return renderTemplate(`quick-reference-${target}.md`, variables, vars);
|
|
1385
|
+
}
|
|
1386
|
+
/**
|
|
1387
|
+
* Renders the FR8.2 "Requirements" block injected into `prisma-next.md`
|
|
1388
|
+
* (the user-facing quick reference). Sources the minimum server
|
|
1389
|
+
* version from `MIN_SERVER_VERSION` — itself mirrored from each
|
|
1390
|
+
* target package's `package.json#prismaNext.minServerVersion`
|
|
1391
|
+
* (FR8.1).
|
|
1392
|
+
*
|
|
1393
|
+
* The verification command is target-specific — Postgres scaffolds
|
|
1394
|
+
* shouldn't ship Mongo's `db.runCommand` (and vice versa) just because
|
|
1395
|
+
* we couldn't be bothered to branch.
|
|
1396
|
+
*/
|
|
1397
|
+
function requirementsBlock(target) {
|
|
1398
|
+
return [
|
|
1399
|
+
"## Requirements",
|
|
1400
|
+
"",
|
|
1401
|
+
`- **${TARGET_LABEL[target]} ${MIN_SERVER_VERSION[target]} or newer.** Older servers are not supported. Run ${target === "postgres" ? "`SELECT version()`" : "`db.runCommand({ buildInfo: 1 })`"} against your server to verify.`,
|
|
1402
|
+
"- The CLI never connects to your database without explicit consent. Pass `--probe-db` to `prisma-next init` if you want `init` to verify the server version itself."
|
|
1403
|
+
].join("\n");
|
|
1404
|
+
}
|
|
1405
|
+
|
|
1406
|
+
//#endregion
|
|
1407
|
+
//#region src/commands/init/templates/tsconfig.ts
|
|
1408
|
+
/**
|
|
1409
|
+
* Compiler options the scaffolded `prisma-next.config.ts` and `db.ts` need
|
|
1410
|
+
* to typecheck:
|
|
1411
|
+
*
|
|
1412
|
+
* - `module: 'preserve'` + `moduleResolution: 'bundler'` align with how
|
|
1413
|
+
* modern bundlers (and `tsdown`) consume our facade packages.
|
|
1414
|
+
* - `resolveJsonModule` lets `db.ts` import `contract.json with { type:
|
|
1415
|
+
* 'json' }` — the runtime path the facades document (FR4).
|
|
1416
|
+
*
|
|
1417
|
+
* `types: ['node']` is FR2.2 territory and lives in
|
|
1418
|
+
* `REQUIRED_COMPILER_OPTIONS_TYPES` because TS only honours an _array_
|
|
1419
|
+
* here, and a string-keyed merge would clobber any user-specified entries.
|
|
1420
|
+
* Merge handling preserves any extra `types` the user added.
|
|
1421
|
+
*/
|
|
1422
|
+
const REQUIRED_COMPILER_OPTIONS = {
|
|
1423
|
+
module: "preserve",
|
|
1424
|
+
moduleResolution: "bundler",
|
|
1425
|
+
resolveJsonModule: true
|
|
1426
|
+
};
|
|
1427
|
+
/**
|
|
1428
|
+
* Types that must be present in `compilerOptions.types` for the scaffold
|
|
1429
|
+
* to typecheck. With `moduleResolution: 'bundler'`, TypeScript does not
|
|
1430
|
+
* implicitly include all `@types/*` packages — `process.env` only resolves
|
|
1431
|
+
* when `node` is in this array (or `types` is omitted, but then any other
|
|
1432
|
+
* type listed here would force the same behaviour). Listing `node`
|
|
1433
|
+
* explicitly is the documented escape hatch (FR2.2).
|
|
1434
|
+
*/
|
|
1435
|
+
const REQUIRED_COMPILER_OPTIONS_TYPES = ["node"];
|
|
1436
|
+
function defaultTsConfig() {
|
|
1437
|
+
return JSON.stringify({
|
|
1438
|
+
compilerOptions: {
|
|
1439
|
+
target: "ES2022",
|
|
1440
|
+
...REQUIRED_COMPILER_OPTIONS,
|
|
1441
|
+
types: [...REQUIRED_COMPILER_OPTIONS_TYPES],
|
|
1442
|
+
strict: true,
|
|
1443
|
+
skipLibCheck: true,
|
|
1444
|
+
esModuleInterop: true,
|
|
1445
|
+
outDir: "dist"
|
|
1446
|
+
},
|
|
1447
|
+
include: ["**/*.ts"]
|
|
1448
|
+
}, null, 2);
|
|
1449
|
+
}
|
|
1450
|
+
/**
|
|
1451
|
+
* Thrown by `mergeTsConfig` when the user's existing `tsconfig.json` is
|
|
1452
|
+
* not parseable as JSONC (TypeScript's actual configured dialect — see
|
|
1453
|
+
* FR6.1). Carries the raw parse errors so the caller can render an
|
|
1454
|
+
* actionable, location-aware message.
|
|
1455
|
+
*
|
|
1456
|
+
* `runInit` catches this exception during the precondition phase and
|
|
1457
|
+
* maps it to a `CliStructuredError(5011)` so the user's working tree
|
|
1458
|
+
* stays byte-identical when init bails (FR6.2 / NFR3).
|
|
1459
|
+
*/
|
|
1460
|
+
var TsConfigParseError = class extends Error {
|
|
1461
|
+
errors;
|
|
1462
|
+
constructor(errors) {
|
|
1463
|
+
super(formatTsConfigParseErrors(errors));
|
|
1464
|
+
this.errors = errors;
|
|
1465
|
+
this.name = "TsConfigParseError";
|
|
1466
|
+
}
|
|
1467
|
+
};
|
|
1468
|
+
function formatTsConfigParseErrors(errors) {
|
|
1469
|
+
if (errors.length === 0) return "tsconfig.json is empty or not an object";
|
|
1470
|
+
return errors.map((e) => `${printParseErrorCode(e.error)} at offset ${e.offset}`).join("; ");
|
|
1471
|
+
}
|
|
1472
|
+
/**
|
|
1473
|
+
* Merges the required compiler options into an existing `tsconfig.json`.
|
|
1474
|
+
*
|
|
1475
|
+
* Parsing is delegated to `jsonc-parser` so JSONC inputs (comments,
|
|
1476
|
+
* trailing commas) — TypeScript's real configuration dialect — survive
|
|
1477
|
+
* unchanged: edits are applied as text patches via `modify` /
|
|
1478
|
+
* `applyEdits`, preserving the user's formatting, key ordering, and
|
|
1479
|
+
* comments wherever the touched paths permit (FR6.1, AC "Hostile
|
|
1480
|
+
* inputs").
|
|
1481
|
+
*
|
|
1482
|
+
* Throws `TsConfigParseError` when the input is not parseable as JSONC.
|
|
1483
|
+
* The caller must catch this and surface a structured error before
|
|
1484
|
+
* writing any scaffold files (FR6.2 atomicity).
|
|
1485
|
+
*/
|
|
1486
|
+
function mergeTsConfig(existing) {
|
|
1487
|
+
const { config } = parseTsConfigText(existing);
|
|
1488
|
+
const formattingOptions = {
|
|
1489
|
+
tabSize: detectIndent(existing),
|
|
1490
|
+
insertSpaces: true,
|
|
1491
|
+
eol: existing.includes("\r\n") ? "\r\n" : "\n"
|
|
1492
|
+
};
|
|
1493
|
+
let result = existing;
|
|
1494
|
+
for (const [key, value] of Object.entries(REQUIRED_COMPILER_OPTIONS)) {
|
|
1495
|
+
const edits = modify(result, ["compilerOptions", key], value, { formattingOptions });
|
|
1496
|
+
result = applyEdits(result, edits);
|
|
1497
|
+
}
|
|
1498
|
+
const existingTypes = config["compilerOptions"]?.["types"];
|
|
1499
|
+
const mergedTypes = mergeTypesArray(existingTypes);
|
|
1500
|
+
const typesEdits = modify(result, ["compilerOptions", "types"], mergedTypes, { formattingOptions });
|
|
1501
|
+
result = applyEdits(result, typesEdits);
|
|
1502
|
+
return result;
|
|
1503
|
+
}
|
|
1504
|
+
/**
|
|
1505
|
+
* Parses an existing `tsconfig.json` (JSONC) and returns the structured
|
|
1506
|
+
* config alongside any non-fatal parse warnings. Throws
|
|
1507
|
+
* `TsConfigParseError` if the input cannot be parsed at all or does
|
|
1508
|
+
* not resolve to a JSON object — both cases mean we cannot safely
|
|
1509
|
+
* apply edits.
|
|
1510
|
+
*
|
|
1511
|
+
* Exposed independently so callers (notably `runInit`'s precondition
|
|
1512
|
+
* gate) can validate the file *before* any scaffold file is written.
|
|
1513
|
+
*/
|
|
1514
|
+
function parseTsConfigText(text) {
|
|
1515
|
+
const errors = [];
|
|
1516
|
+
const value = parse(text, errors, {
|
|
1517
|
+
allowTrailingComma: true,
|
|
1518
|
+
disallowComments: false,
|
|
1519
|
+
allowEmptyContent: false
|
|
1520
|
+
});
|
|
1521
|
+
if (value === void 0 || value === null || typeof value !== "object" || Array.isArray(value)) throw new TsConfigParseError(errors);
|
|
1522
|
+
if (errors.length > 0) throw new TsConfigParseError(errors);
|
|
1523
|
+
return { config: value };
|
|
1524
|
+
}
|
|
1525
|
+
function detectIndent(text) {
|
|
1526
|
+
const match = text.match(/^([ \t]+)\S/m);
|
|
1527
|
+
if (match === null) return 2;
|
|
1528
|
+
const indent = match[1] ?? "";
|
|
1529
|
+
if (indent.startsWith(" ")) return 1;
|
|
1530
|
+
return indent.length || 2;
|
|
1531
|
+
}
|
|
1532
|
+
/**
|
|
1533
|
+
* Merges `REQUIRED_COMPILER_OPTIONS_TYPES` into the user's existing
|
|
1534
|
+
* `compilerOptions.types` array. Preserves order and dedupes. If the
|
|
1535
|
+
* user has no `types` array (or has set it to a non-array), we replace
|
|
1536
|
+
* with the required minimum — overwriting a non-array `types` is the
|
|
1537
|
+
* correct fix because anything other than a string array is invalid TS
|
|
1538
|
+
* config.
|
|
1539
|
+
*/
|
|
1540
|
+
function mergeTypesArray(existing) {
|
|
1541
|
+
const result = [];
|
|
1542
|
+
if (Array.isArray(existing)) {
|
|
1543
|
+
for (const item of existing) if (typeof item === "string" && !result.includes(item)) result.push(item);
|
|
1544
|
+
}
|
|
1545
|
+
for (const required of REQUIRED_COMPILER_OPTIONS_TYPES) if (!result.includes(required)) result.push(required);
|
|
1546
|
+
return result;
|
|
1547
|
+
}
|
|
1548
|
+
|
|
1549
|
+
//#endregion
|
|
1550
|
+
//#region src/commands/init/init.ts
|
|
1551
|
+
/**
|
|
1552
|
+
* Runs the `init` command end-to-end and returns the exit code. Catches
|
|
1553
|
+
* structured CLI errors raised at every phase (input resolution, install,
|
|
1554
|
+
* emit) and renders them via the same UI surface as success output
|
|
1555
|
+
* (`--json` to stdout, human to stderr). Exit codes follow the documented
|
|
1556
|
+
* stable set in `./exit-codes.ts` (FR1.6) and the
|
|
1557
|
+
* [Style Guide § Exit Codes](../../../../../../../docs/CLI%20Style%20Guide.md#exit-codes).
|
|
1558
|
+
*
|
|
1559
|
+
* Layered for testability: the action handler in `./index.ts` is
|
|
1560
|
+
* responsible for parsing flags and constructing `runOptions`; this
|
|
1561
|
+
* function does no flag parsing of its own.
|
|
1562
|
+
*/
|
|
1563
|
+
async function runInit(baseDir, runOptions) {
|
|
1564
|
+
const { options, flags, canPrompt, probeOverrides } = runOptions;
|
|
1565
|
+
const ui = new TerminalUI({
|
|
1566
|
+
color: flags.color,
|
|
1567
|
+
interactive: flags.interactive
|
|
1568
|
+
});
|
|
1569
|
+
const warnings = [];
|
|
1570
|
+
const filesWritten = [];
|
|
1571
|
+
const filesDeleted = [];
|
|
1572
|
+
if (!flags.json && !flags.quiet) clack.intro("prisma-next init", { output: process.stderr });
|
|
1573
|
+
if (!hasProjectManifest(baseDir)) return emitError(ui, flags, errorInitMissingManifest());
|
|
1574
|
+
let inputs;
|
|
1575
|
+
try {
|
|
1576
|
+
inputs = await resolveInitInputs({
|
|
1577
|
+
baseDir,
|
|
1578
|
+
options,
|
|
1579
|
+
flags,
|
|
1580
|
+
canPrompt
|
|
1581
|
+
});
|
|
1582
|
+
} catch (error) {
|
|
1583
|
+
if (CliStructuredError.is(error)) return emitError(ui, flags, error);
|
|
1584
|
+
throw error;
|
|
1585
|
+
}
|
|
1586
|
+
const pm = await detectPackageManager(baseDir);
|
|
1587
|
+
const pkgRun = formatRunCommand(pm, "prisma-next", "").trimEnd();
|
|
1588
|
+
const schemaDir = dirname(inputs.schemaPath);
|
|
1589
|
+
const configContractPath = isAbsolute(inputs.schemaPath) ? inputs.schemaPath : `./${inputs.schemaPath}`;
|
|
1590
|
+
const filesToWrite = [
|
|
1591
|
+
{
|
|
1592
|
+
path: inputs.schemaPath,
|
|
1593
|
+
content: starterSchema(inputs.target, inputs.authoring)
|
|
1594
|
+
},
|
|
1595
|
+
{
|
|
1596
|
+
path: "prisma-next.config.ts",
|
|
1597
|
+
content: configFile(inputs.target, configContractPath)
|
|
1598
|
+
},
|
|
1599
|
+
{
|
|
1600
|
+
path: join(schemaDir, "db.ts"),
|
|
1601
|
+
content: dbFile(inputs.target)
|
|
1602
|
+
},
|
|
1603
|
+
{
|
|
1604
|
+
path: "prisma-next.md",
|
|
1605
|
+
content: quickReferenceMd(inputs.target, inputs.authoring, inputs.schemaPath, pkgRun)
|
|
1606
|
+
},
|
|
1607
|
+
{
|
|
1608
|
+
path: ".agents/skills/prisma-next/SKILL.md",
|
|
1609
|
+
content: agentSkillMd(inputs.target, inputs.authoring, inputs.schemaPath, pkgRun)
|
|
1610
|
+
},
|
|
1611
|
+
{
|
|
1612
|
+
path: ".env.example",
|
|
1613
|
+
content: envExampleContent(inputs.target)
|
|
1614
|
+
}
|
|
1615
|
+
];
|
|
1616
|
+
const filesToDelete = inputs.reinit ? [...findStaleArtefacts(baseDir, schemaDir)] : [];
|
|
1617
|
+
if (inputs.writeEnv) if (!existsSync(join(baseDir, ".env"))) filesToWrite.push({
|
|
1618
|
+
path: ".env",
|
|
1619
|
+
content: envFileContent(inputs.target)
|
|
1620
|
+
});
|
|
1621
|
+
else warnings.push(".env already exists; leaving it untouched. Compare with .env.example for any new keys.");
|
|
1622
|
+
const tsconfigPath = join(baseDir, "tsconfig.json");
|
|
1623
|
+
if (existsSync(tsconfigPath)) {
|
|
1624
|
+
const existing = readFileSync(tsconfigPath, "utf-8");
|
|
1625
|
+
let merged;
|
|
1626
|
+
try {
|
|
1627
|
+
merged = mergeTsConfig(existing);
|
|
1628
|
+
} catch (err) {
|
|
1629
|
+
if (err instanceof TsConfigParseError) return emitError(ui, flags, errorInitInvalidTsconfig({
|
|
1630
|
+
path: "tsconfig.json",
|
|
1631
|
+
cause: err.message
|
|
1632
|
+
}));
|
|
1633
|
+
throw err;
|
|
1634
|
+
}
|
|
1635
|
+
filesToWrite.push({
|
|
1636
|
+
path: "tsconfig.json",
|
|
1637
|
+
content: merged,
|
|
1638
|
+
logMessage: "Updated tsconfig.json with required compiler options."
|
|
1639
|
+
});
|
|
1640
|
+
} else filesToWrite.push({
|
|
1641
|
+
path: "tsconfig.json",
|
|
1642
|
+
content: defaultTsConfig()
|
|
1643
|
+
});
|
|
1644
|
+
const gitignorePath = join(baseDir, ".gitignore");
|
|
1645
|
+
const newGitignore = mergeGitignore(existsSync(gitignorePath) ? readFileSync(gitignorePath, "utf-8") : void 0);
|
|
1646
|
+
if (newGitignore !== null) filesToWrite.push({
|
|
1647
|
+
path: ".gitignore",
|
|
1648
|
+
content: newGitignore
|
|
1649
|
+
});
|
|
1650
|
+
const gitattributesPath = join(baseDir, ".gitattributes");
|
|
1651
|
+
const newGitattributes = mergeGitattributes(existsSync(gitattributesPath) ? readFileSync(gitattributesPath, "utf-8") : void 0, requiredGitattributesLines(schemaDir, inputs.target));
|
|
1652
|
+
if (newGitattributes !== null) filesToWrite.push({
|
|
1653
|
+
path: ".gitattributes",
|
|
1654
|
+
content: newGitattributes
|
|
1655
|
+
});
|
|
1656
|
+
const packageJsonPath = join(baseDir, "package.json");
|
|
1657
|
+
let parsedPackageJson = null;
|
|
1658
|
+
if (existsSync(packageJsonPath)) {
|
|
1659
|
+
const pkgRaw = readFileSync(packageJsonPath, "utf-8");
|
|
1660
|
+
try {
|
|
1661
|
+
parsedPackageJson = JSON.parse(pkgRaw);
|
|
1662
|
+
} catch (err) {
|
|
1663
|
+
if (err instanceof SyntaxError) return emitError(ui, flags, errorInitInvalidManifest({
|
|
1664
|
+
path: "package.json",
|
|
1665
|
+
cause: err.message
|
|
1666
|
+
}));
|
|
1667
|
+
throw err;
|
|
1668
|
+
}
|
|
1669
|
+
let workingPkg = pkgRaw;
|
|
1670
|
+
let pkgChanged = false;
|
|
1671
|
+
if (inputs.removePreviousFacade !== null) {
|
|
1672
|
+
const next = removeDependency(workingPkg, inputs.removePreviousFacade);
|
|
1673
|
+
if (next !== null) {
|
|
1674
|
+
workingPkg = next;
|
|
1675
|
+
pkgChanged = true;
|
|
1676
|
+
}
|
|
1677
|
+
}
|
|
1678
|
+
const { content: nextPkg, warnings: scriptWarnings } = mergePackageScripts(workingPkg, REQUIRED_SCRIPTS);
|
|
1679
|
+
if (nextPkg !== null) {
|
|
1680
|
+
workingPkg = nextPkg;
|
|
1681
|
+
pkgChanged = true;
|
|
1682
|
+
}
|
|
1683
|
+
if (pkgChanged) filesToWrite.push({
|
|
1684
|
+
path: "package.json",
|
|
1685
|
+
content: workingPkg
|
|
1686
|
+
});
|
|
1687
|
+
warnings.push(...scriptWarnings);
|
|
1688
|
+
}
|
|
1689
|
+
for (const file of filesToWrite) {
|
|
1690
|
+
const fullPath = join(baseDir, file.path);
|
|
1691
|
+
mkdirSync(dirname(fullPath), { recursive: true });
|
|
1692
|
+
writeFileSync(fullPath, file.content, "utf-8");
|
|
1693
|
+
filesWritten.push(file.path);
|
|
1694
|
+
if (file.logMessage !== void 0 && !flags.json && !flags.quiet) ui.log(file.logMessage);
|
|
1695
|
+
}
|
|
1696
|
+
for (const rel of filesToDelete) {
|
|
1697
|
+
const fullPath = join(baseDir, rel);
|
|
1698
|
+
if (!existsSync(fullPath)) continue;
|
|
1699
|
+
try {
|
|
1700
|
+
unlinkSync(fullPath);
|
|
1701
|
+
filesDeleted.push(rel);
|
|
1702
|
+
} catch (err) {
|
|
1703
|
+
if (!(err instanceof Error && "code" in err && err.code === "ENOENT")) throw err;
|
|
1704
|
+
}
|
|
1705
|
+
}
|
|
1706
|
+
const emitCommand = formatRunCommand(pm, "prisma-next", "contract emit");
|
|
1707
|
+
let install;
|
|
1708
|
+
try {
|
|
1709
|
+
install = await runInstall({
|
|
1710
|
+
baseDir,
|
|
1711
|
+
pm,
|
|
1712
|
+
target: inputs.target,
|
|
1713
|
+
install: inputs.install,
|
|
1714
|
+
flags,
|
|
1715
|
+
ui,
|
|
1716
|
+
filesWritten,
|
|
1717
|
+
hasTypesNode: parsedPackageJson !== null ? hasDirectDep(parsedPackageJson, "@types/node") : false
|
|
1718
|
+
});
|
|
1719
|
+
} catch (error) {
|
|
1720
|
+
if (CliStructuredError.is(error)) return emitError(ui, flags, error);
|
|
1721
|
+
throw error;
|
|
1722
|
+
}
|
|
1723
|
+
warnings.push(...install.warnings);
|
|
1724
|
+
let contractEmitted = false;
|
|
1725
|
+
if (!install.skipped) try {
|
|
1726
|
+
await runEmit({
|
|
1727
|
+
baseDir,
|
|
1728
|
+
ui,
|
|
1729
|
+
filesWritten,
|
|
1730
|
+
emitCommand
|
|
1731
|
+
});
|
|
1732
|
+
contractEmitted = true;
|
|
1733
|
+
} catch (error) {
|
|
1734
|
+
if (CliStructuredError.is(error)) return emitError(ui, flags, error);
|
|
1735
|
+
throw error;
|
|
1736
|
+
}
|
|
1737
|
+
if (inputs.probeDb) {
|
|
1738
|
+
const escalated = applyProbeOutcome(await probeServerVersion({
|
|
1739
|
+
baseDir,
|
|
1740
|
+
target: inputs.target,
|
|
1741
|
+
databaseUrl: process.env["DATABASE_URL"],
|
|
1742
|
+
minVersion: MIN_SERVER_VERSION[inputs.target]
|
|
1743
|
+
}, probeOverrides ?? {}), {
|
|
1744
|
+
strictProbe: inputs.strictProbe,
|
|
1745
|
+
warnings
|
|
1746
|
+
});
|
|
1747
|
+
if (escalated !== null) return emitError(ui, flags, errorInitProbeFailed({
|
|
1748
|
+
cause: escalated,
|
|
1749
|
+
filesWritten
|
|
1750
|
+
}));
|
|
1751
|
+
}
|
|
1752
|
+
const output = {
|
|
1753
|
+
ok: true,
|
|
1754
|
+
target: inputs.target === "mongo" ? "mongodb" : "postgres",
|
|
1755
|
+
authoring: inputs.authoring,
|
|
1756
|
+
schemaPath: inputs.schemaPath,
|
|
1757
|
+
filesWritten,
|
|
1758
|
+
filesDeleted,
|
|
1759
|
+
packagesInstalled: {
|
|
1760
|
+
skipped: install.skipped,
|
|
1761
|
+
deps: [...install.deps],
|
|
1762
|
+
devDeps: [...install.devDeps]
|
|
1763
|
+
},
|
|
1764
|
+
contractEmitted,
|
|
1765
|
+
nextSteps: buildNextSteps({
|
|
1766
|
+
target: inputs.target === "mongo" ? "mongodb" : "postgres",
|
|
1767
|
+
contractEmitted,
|
|
1768
|
+
emitCommand,
|
|
1769
|
+
schemaPath: inputs.schemaPath
|
|
1770
|
+
}),
|
|
1771
|
+
warnings
|
|
1772
|
+
};
|
|
1773
|
+
const validated = InitOutputSchema(output);
|
|
1774
|
+
if (validated instanceof Error || validated.problems !== void 0) return emitError(ui, flags, new CliStructuredError("5009", "Init produced an invalid output document", {
|
|
1775
|
+
domain: "CLI",
|
|
1776
|
+
why: `The success document failed schema validation: ${String(validated)}`,
|
|
1777
|
+
fix: "This is a bug in prisma-next. Please report it with the full `-v` output.",
|
|
1778
|
+
docsUrl: "https://prisma-next.dev/docs/cli/init"
|
|
1779
|
+
}));
|
|
1780
|
+
if (flags.json) ui.output(formatInitJson(output));
|
|
1781
|
+
else {
|
|
1782
|
+
renderInitOutro(ui, output, flags);
|
|
1783
|
+
if (!flags.quiet) clack.outro("Done. Open prisma-next.md to get started.", { output: process.stderr });
|
|
1784
|
+
}
|
|
1785
|
+
return INIT_EXIT_OK;
|
|
1786
|
+
}
|
|
1787
|
+
/**
|
|
1788
|
+
* Renders a structured CLI error to the right channel and returns the exit
|
|
1789
|
+
* code derived from the error's PN code. JSON-mode errors go to stdout
|
|
1790
|
+
* (so consumers always parse from one place); human-mode errors go to
|
|
1791
|
+
* stderr. Mirrors `handleResult` but returns init-specific exit codes
|
|
1792
|
+
* rather than the CLI/RUN binary.
|
|
1793
|
+
*/
|
|
1794
|
+
function emitError(ui, flags, error) {
|
|
1795
|
+
const envelope = error.toEnvelope();
|
|
1796
|
+
if (flags.json) ui.output(formatErrorJson(envelope));
|
|
1797
|
+
else ui.error(formatErrorOutput(envelope, flags));
|
|
1798
|
+
return exitCodeForError(error);
|
|
1799
|
+
}
|
|
1800
|
+
/**
|
|
1801
|
+
* Maps a structured init error to its documented exit code. Centralised so
|
|
1802
|
+
* the error → exit-code contract lives next to the codes themselves.
|
|
1803
|
+
*
|
|
1804
|
+
* `5009` (and the unknown-code default branch) routes to
|
|
1805
|
+
* `INIT_EXIT_INTERNAL_ERROR` because those represent prisma-next bugs the
|
|
1806
|
+
* user did not cause — surfacing them as `PRECONDITION` would mislead
|
|
1807
|
+
* automation into thinking the caller mis-invoked the CLI.
|
|
1808
|
+
*
|
|
1809
|
+
* See [exit-codes.ts](./exit-codes.ts) for the canonical list and
|
|
1810
|
+
* [Style Guide § Exit Codes](../../../../../../../docs/CLI%20Style%20Guide.md#exit-codes)
|
|
1811
|
+
* for the reservation policy.
|
|
1812
|
+
*
|
|
1813
|
+
* Exported for unit tests so the mapping can be asserted without
|
|
1814
|
+
* round-tripping a full `runInit` invocation.
|
|
1815
|
+
*/
|
|
1816
|
+
function exitCodeForError(error) {
|
|
1817
|
+
switch (error.code) {
|
|
1818
|
+
case "5001":
|
|
1819
|
+
case "5002":
|
|
1820
|
+
case "5003":
|
|
1821
|
+
case "5004":
|
|
1822
|
+
case "5005":
|
|
1823
|
+
case "5010":
|
|
1824
|
+
case "5011":
|
|
1825
|
+
case "5012": return INIT_EXIT_PRECONDITION;
|
|
1826
|
+
case "5006": return INIT_EXIT_USER_ABORTED;
|
|
1827
|
+
case "5007": return INIT_EXIT_INSTALL_FAILED;
|
|
1828
|
+
case "5008": return INIT_EXIT_EMIT_FAILED;
|
|
1829
|
+
case "5009": return INIT_EXIT_INTERNAL_ERROR;
|
|
1830
|
+
default: return INIT_EXIT_INTERNAL_ERROR;
|
|
1831
|
+
}
|
|
1832
|
+
}
|
|
1833
|
+
/**
|
|
1834
|
+
* Folds a `ProbeOutcome` into init's warning channel and returns the
|
|
1835
|
+
* fatal cause string when `--strict-probe` should escalate. Mirrors
|
|
1836
|
+
* the FR8.3 contract:
|
|
1837
|
+
*
|
|
1838
|
+
* - `ok` — informational; nothing surfaced unless verbose. (We could
|
|
1839
|
+
* plumb a `note` here, but the spec only requires the warning side
|
|
1840
|
+
* of the contract; an "all good" line would just be noise on the
|
|
1841
|
+
* common path.)
|
|
1842
|
+
* - `below-minimum` — warning regardless of `--strict-probe`. The
|
|
1843
|
+
* probe ran successfully and found an old server; that is not a
|
|
1844
|
+
* probe *failure* (which is what `--strict-probe` escalates), it
|
|
1845
|
+
* is the probe doing its job.
|
|
1846
|
+
* - `no-database-url` / `connection-failed` / `driver-missing` —
|
|
1847
|
+
* warning by default, fatal under `--strict-probe`.
|
|
1848
|
+
*
|
|
1849
|
+
* Exported for unit tests so the branching contract can be asserted
|
|
1850
|
+
* without spinning up a full `runInit` round trip.
|
|
1851
|
+
*/
|
|
1852
|
+
function applyProbeOutcome(outcome, ctx) {
|
|
1853
|
+
switch (outcome.kind) {
|
|
1854
|
+
case "ok": return null;
|
|
1855
|
+
case "below-minimum":
|
|
1856
|
+
ctx.warnings.push(outcome.message);
|
|
1857
|
+
return null;
|
|
1858
|
+
case "no-database-url":
|
|
1859
|
+
case "connection-failed":
|
|
1860
|
+
case "driver-missing":
|
|
1861
|
+
if (ctx.strictProbe) return outcome.message;
|
|
1862
|
+
ctx.warnings.push(outcome.message);
|
|
1863
|
+
return null;
|
|
1864
|
+
}
|
|
1865
|
+
}
|
|
1866
|
+
/**
|
|
1867
|
+
* Drives the `pnpm add` / `npm install` step. Failures are escalated to
|
|
1868
|
+
* a structured `errorInitInstallFailed` (exit code 4) — the spec treats
|
|
1869
|
+
* an unrecoverable install as a hard outcome rather than a warning so
|
|
1870
|
+
* CI/agents can branch on the exit code (FR1.6).
|
|
1871
|
+
*
|
|
1872
|
+
* For pnpm specifically, we additionally implement the FR7.2 fallback:
|
|
1873
|
+
* if pnpm fails with a recognised workspace/catalog resolution error
|
|
1874
|
+
* class (typically caused by a registry version that leaked
|
|
1875
|
+
* `workspace:*` or `catalog:` specifiers), we retry the install using
|
|
1876
|
+
* `npm` and surface a non-fatal warning explaining the swap.
|
|
1877
|
+
*/
|
|
1878
|
+
async function runInstall(ctx) {
|
|
1879
|
+
const { baseDir, pm, target, install, flags, ui, filesWritten, hasTypesNode } = ctx;
|
|
1880
|
+
const deps = [targetPackageName(target), "dotenv"];
|
|
1881
|
+
const devDeps = hasTypesNode ? ["prisma-next"] : ["prisma-next", "@types/node"];
|
|
1882
|
+
const addCommand = `${pm} ${formatAddArgs(pm, deps).join(" ")}`;
|
|
1883
|
+
const addDevCommand = `${pm} ${formatAddDevArgs(pm, devDeps).join(" ")}`;
|
|
1884
|
+
const emitCommand = formatRunCommand(pm, "prisma-next", "contract emit");
|
|
1885
|
+
const catalogWarnings = pm === "pnpm" ? buildCatalogWarnings(baseDir, [...deps, ...devDeps]) : [];
|
|
1886
|
+
if (!install) {
|
|
1887
|
+
if (!flags.json && !flags.quiet) ui.note([
|
|
1888
|
+
"Run the following commands to complete setup:",
|
|
1889
|
+
"",
|
|
1890
|
+
" 1. Install dependencies:",
|
|
1891
|
+
` ${addCommand}`,
|
|
1892
|
+
` ${addDevCommand}`,
|
|
1893
|
+
"",
|
|
1894
|
+
" 2. Emit the contract:",
|
|
1895
|
+
` ${emitCommand}`
|
|
1896
|
+
].join("\n"), "Manual steps");
|
|
1897
|
+
return {
|
|
1898
|
+
skipped: true,
|
|
1899
|
+
deps: [],
|
|
1900
|
+
devDeps: [],
|
|
1901
|
+
warnings: catalogWarnings
|
|
1902
|
+
};
|
|
1903
|
+
}
|
|
1904
|
+
const exec = promisify(execFile);
|
|
1905
|
+
const runPair = async (manager) => {
|
|
1906
|
+
await exec(manager, formatAddArgs(manager, deps), { cwd: baseDir });
|
|
1907
|
+
await exec(manager, formatAddDevArgs(manager, devDeps), { cwd: baseDir });
|
|
1908
|
+
};
|
|
1909
|
+
const allPackages = [...deps, ...devDeps].join(", ");
|
|
1910
|
+
const spinner = ui.spinner();
|
|
1911
|
+
spinner.start(`Installing ${allPackages}...`);
|
|
1912
|
+
try {
|
|
1913
|
+
await runPair(pm);
|
|
1914
|
+
spinner.stop(`Installed ${allPackages}`);
|
|
1915
|
+
return {
|
|
1916
|
+
skipped: false,
|
|
1917
|
+
deps,
|
|
1918
|
+
devDeps,
|
|
1919
|
+
warnings: catalogWarnings
|
|
1920
|
+
};
|
|
1921
|
+
} catch (err) {
|
|
1922
|
+
const stderrText = redactSecrets(readChildStderr(err));
|
|
1923
|
+
if (pm === "pnpm" && isRecognisedPnpmResolutionError(stderrText)) {
|
|
1924
|
+
spinner.message("pnpm could not resolve a workspace/catalog dependency, retrying with npm...");
|
|
1925
|
+
try {
|
|
1926
|
+
await runPair("npm");
|
|
1927
|
+
spinner.stop(`Installed ${allPackages} via npm (pnpm fallback)`);
|
|
1928
|
+
return {
|
|
1929
|
+
skipped: false,
|
|
1930
|
+
deps,
|
|
1931
|
+
devDeps,
|
|
1932
|
+
warnings: [[
|
|
1933
|
+
"pnpm could not install: a published Prisma Next dependency leaked a `workspace:*` or `catalog:` specifier.",
|
|
1934
|
+
"Falling back to `npm install` so init can complete.",
|
|
1935
|
+
stderrText ? ` pnpm error: ${stderrText.trim().split("\n")[0]}` : "",
|
|
1936
|
+
"Once the offending package republishes a clean version, re-run `pnpm install` to switch back."
|
|
1937
|
+
].filter(Boolean).join("\n")]
|
|
1938
|
+
};
|
|
1939
|
+
} catch (npmErr) {
|
|
1940
|
+
spinner.stop("Installation failed");
|
|
1941
|
+
throw errorInitInstallFailed({
|
|
1942
|
+
addCommand,
|
|
1943
|
+
addDevCommand,
|
|
1944
|
+
emitCommand,
|
|
1945
|
+
filesWritten,
|
|
1946
|
+
stderrLines: [stderrText, redactSecrets(readChildStderr(npmErr))]
|
|
1947
|
+
});
|
|
1948
|
+
}
|
|
1949
|
+
}
|
|
1950
|
+
spinner.stop("Installation failed");
|
|
1951
|
+
throw errorInitInstallFailed({
|
|
1952
|
+
addCommand,
|
|
1953
|
+
addDevCommand,
|
|
1954
|
+
emitCommand,
|
|
1955
|
+
filesWritten,
|
|
1956
|
+
stderrLines: [stderrText]
|
|
1957
|
+
});
|
|
1958
|
+
}
|
|
1959
|
+
}
|
|
1960
|
+
/**
|
|
1961
|
+
* Builds the FR7.3 catalog-honoured warning(s) for the surrounding pnpm
|
|
1962
|
+
* workspace, if any. Returns an empty array when no `pnpm-workspace.yaml`
|
|
1963
|
+
* exists in any ancestor or when the workspace's catalog has no entry
|
|
1964
|
+
* for any of the packages `init` is about to install.
|
|
1965
|
+
*
|
|
1966
|
+
* Exported for unit tests.
|
|
1967
|
+
*/
|
|
1968
|
+
function buildCatalogWarnings(baseDir, packages) {
|
|
1969
|
+
const result = detectPnpmCatalogOverrides(baseDir, packages);
|
|
1970
|
+
if (result === null || result.entries.length === 0) return [];
|
|
1971
|
+
return [formatCatalogWarning(result.workspaceFile, result.entries)];
|
|
1972
|
+
}
|
|
1973
|
+
function formatCatalogWarning(workspaceFile, entries) {
|
|
1974
|
+
return [
|
|
1975
|
+
"pnpm workspace catalog overrides detected — pnpm will install these versions instead of `latest`:",
|
|
1976
|
+
entries.map((entry) => ` • ${entry.name}: ${entry.version}`).join("\n"),
|
|
1977
|
+
`Catalog source: ${workspaceFile}`,
|
|
1978
|
+
"To use the published `latest` instead, remove or update the catalog entry, then re-run `pnpm install`."
|
|
1979
|
+
].join("\n");
|
|
1980
|
+
}
|
|
1981
|
+
/**
|
|
1982
|
+
* Recognised pnpm error signatures that justify a fallback to npm.
|
|
1983
|
+
*
|
|
1984
|
+
* These patterns indicate the published artefact itself is at fault
|
|
1985
|
+
* (a leaked `workspace:*` or `catalog:` specifier), not the user's
|
|
1986
|
+
* environment — pnpm is faithfully reporting "I cannot resolve this
|
|
1987
|
+
* registry version", and npm is willing to install it because npm
|
|
1988
|
+
* doesn't care about the protocol prefix when there's a fallback range.
|
|
1989
|
+
*
|
|
1990
|
+
* Exported for unit tests; do not depend on this from outside the init
|
|
1991
|
+
* command.
|
|
1992
|
+
*/
|
|
1993
|
+
function isRecognisedPnpmResolutionError(stderr) {
|
|
1994
|
+
if (!stderr) return false;
|
|
1995
|
+
return stderr.includes("ERR_PNPM_WORKSPACE_PKG_NOT_FOUND") || stderr.includes("ERR_PNPM_NO_MATCHING_VERSION") || /No matching version found for .* in the catalog/i.test(stderr) || /workspace:[^\s]+ is not a valid (version|spec)/i.test(stderr) || /catalog:[^\s]* is not a valid (version|spec)/i.test(stderr);
|
|
1996
|
+
}
|
|
1997
|
+
/**
|
|
1998
|
+
* FR2.1 — true when the parsed `package.json` declares `name` directly
|
|
1999
|
+
* in either `dependencies` or `devDependencies`. We deliberately don't
|
|
2000
|
+
* inspect `peerDependencies` (irrelevant for a leaf project) or the
|
|
2001
|
+
* lockfile (transitive presence is brittle to detect and not the
|
|
2002
|
+
* realistic clobber-risk path).
|
|
2003
|
+
*
|
|
2004
|
+
* Exported for unit tests.
|
|
2005
|
+
*/
|
|
2006
|
+
function hasDirectDep(parsed, name) {
|
|
2007
|
+
for (const field of ["dependencies", "devDependencies"]) {
|
|
2008
|
+
const value = parsed[field];
|
|
2009
|
+
if (value !== null && typeof value === "object" && name in value) return true;
|
|
2010
|
+
}
|
|
2011
|
+
return false;
|
|
2012
|
+
}
|
|
2013
|
+
function readChildStderr(err) {
|
|
2014
|
+
if (err instanceof Error && "stderr" in err) return String(err.stderr ?? "");
|
|
2015
|
+
return "";
|
|
2016
|
+
}
|
|
2017
|
+
/**
|
|
2018
|
+
* Redacts userinfo (`user:password@`) from any URL-shaped substring inside
|
|
2019
|
+
* package-manager stderr before we surface it in a warning or error
|
|
2020
|
+
* meta. pnpm and npm both include the offending registry URL in resolve
|
|
2021
|
+
* errors, and that URL can carry an auth token (e.g. corporate registry
|
|
2022
|
+
* mirrors that bake `_authToken` into the URL). The Style Guide
|
|
2023
|
+
* (Testing & Accessibility — "Security: never print secrets") requires
|
|
2024
|
+
* we never surface those.
|
|
2025
|
+
*
|
|
2026
|
+
* Exported for unit tests.
|
|
2027
|
+
*/
|
|
2028
|
+
function redactSecrets(stderr) {
|
|
2029
|
+
if (!stderr) return stderr;
|
|
2030
|
+
return stderr.replace(/([a-zA-Z][a-zA-Z0-9+.-]*:\/\/)([^/@\s]+)@/g, "$1***@");
|
|
2031
|
+
}
|
|
2032
|
+
/**
|
|
2033
|
+
* Drives `prisma-next contract emit` against the freshly scaffolded
|
|
2034
|
+
* project. On failure, throws `errorInitEmitFailed` with the underlying
|
|
2035
|
+
* cause embedded in `meta.cause` so the user can re-run with `-v` to see
|
|
2036
|
+
* the full envelope and follow the fix steps. Maps to exit code
|
|
2037
|
+
* `5 = EMIT_FAILED` (FR1.6).
|
|
2038
|
+
*/
|
|
2039
|
+
async function runEmit(ctx) {
|
|
2040
|
+
const spinner = ctx.ui.spinner();
|
|
2041
|
+
spinner.start("Emitting contract...");
|
|
2042
|
+
try {
|
|
2043
|
+
const { executeContractEmit } = await import("./contract-emit-LjzCoicC.mjs");
|
|
2044
|
+
await executeContractEmit({ configPath: join(ctx.baseDir, "prisma-next.config.ts") });
|
|
2045
|
+
spinner.stop("Contract emitted");
|
|
2046
|
+
} catch (err) {
|
|
2047
|
+
spinner.stop("Contract emission failed");
|
|
2048
|
+
throw errorInitEmitFailed({
|
|
2049
|
+
emitCommand: ctx.emitCommand,
|
|
2050
|
+
filesWritten: ctx.filesWritten,
|
|
2051
|
+
cause: causeMessage(err)
|
|
2052
|
+
});
|
|
2053
|
+
}
|
|
2054
|
+
}
|
|
2055
|
+
function causeMessage(err) {
|
|
2056
|
+
if (err instanceof Error) return err.message;
|
|
2057
|
+
return String(err);
|
|
2058
|
+
}
|
|
2059
|
+
|
|
2060
|
+
//#endregion
|
|
2061
|
+
export { runInit };
|
|
2062
|
+
//# sourceMappingURL=init-C7dE9KOJ.mjs.map
|