@wrkspace-co/env 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +107 -0
- package/dist/chunk-32DQKIYR.js +4474 -0
- package/dist/chunk-32DQKIYR.js.map +1 -0
- package/dist/cli.js +4299 -0
- package/dist/cli.js.map +1 -0
- package/dist/index.d.ts +707 -0
- package/dist/index.js +123 -0
- package/dist/index.js.map +1 -0
- package/dist/plugin-Dj0BMNPC.d.ts +241 -0
- package/dist/plugin.d.ts +2 -0
- package/dist/plugin.js +47 -0
- package/dist/plugin.js.map +1 -0
- package/package.json +68 -0
|
@@ -0,0 +1,4474 @@
|
|
|
1
|
+
// src/plugin.ts
|
|
2
|
+
import { Command } from "commander";
|
|
3
|
+
|
|
4
|
+
// src/audit-command.ts
|
|
5
|
+
import { access as access4, readFile as readFile4 } from "fs/promises";
|
|
6
|
+
import path4 from "path";
|
|
7
|
+
|
|
8
|
+
// src/dotenv.ts
|
|
9
|
+
function detectNewline(content) {
|
|
10
|
+
const match = content.match(/\r\n|\n/);
|
|
11
|
+
if (!match) {
|
|
12
|
+
return "\n";
|
|
13
|
+
}
|
|
14
|
+
return match[0] === "\r\n" ? "\r\n" : "\n";
|
|
15
|
+
}
|
|
16
|
+
function decodeDoubleQuoted(input) {
|
|
17
|
+
return input.replace(/\\([nrt"\\])/g, (_fullMatch, token) => {
|
|
18
|
+
switch (token) {
|
|
19
|
+
case "n":
|
|
20
|
+
return "\n";
|
|
21
|
+
case "r":
|
|
22
|
+
return "\r";
|
|
23
|
+
case "t":
|
|
24
|
+
return " ";
|
|
25
|
+
case '"':
|
|
26
|
+
return '"';
|
|
27
|
+
case "\\":
|
|
28
|
+
return "\\";
|
|
29
|
+
default:
|
|
30
|
+
return token;
|
|
31
|
+
}
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
function encodeDoubleQuoted(input) {
|
|
35
|
+
return input.replace(/\\/g, "\\\\").replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t").replace(/"/g, '\\"');
|
|
36
|
+
}
|
|
37
|
+
function parseInlineComment(input) {
|
|
38
|
+
const trimmedStart = input.trimStart();
|
|
39
|
+
if (!trimmedStart.startsWith("#")) {
|
|
40
|
+
return void 0;
|
|
41
|
+
}
|
|
42
|
+
return trimmedStart;
|
|
43
|
+
}
|
|
44
|
+
function parseUnquotedValue(input) {
|
|
45
|
+
let splitIndex = input.length;
|
|
46
|
+
for (let i = 0; i < input.length; i += 1) {
|
|
47
|
+
if (input[i] !== "#") {
|
|
48
|
+
continue;
|
|
49
|
+
}
|
|
50
|
+
if (i === 0 || /\s/.test(input[i - 1])) {
|
|
51
|
+
splitIndex = i;
|
|
52
|
+
break;
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
const rawValue = input.slice(0, splitIndex).trimEnd();
|
|
56
|
+
const remainder = input.slice(splitIndex);
|
|
57
|
+
return {
|
|
58
|
+
value: rawValue.replace(/\\n/g, "\n"),
|
|
59
|
+
quote: "none",
|
|
60
|
+
inlineComment: parseInlineComment(remainder)
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
function parseQuotedValue(input, quote) {
|
|
64
|
+
const escaped = quote === '"';
|
|
65
|
+
let cursor = 1;
|
|
66
|
+
let isEscaping = false;
|
|
67
|
+
while (cursor < input.length) {
|
|
68
|
+
const current = input[cursor];
|
|
69
|
+
if (escaped && !isEscaping && current === "\\") {
|
|
70
|
+
isEscaping = true;
|
|
71
|
+
cursor += 1;
|
|
72
|
+
continue;
|
|
73
|
+
}
|
|
74
|
+
if (!isEscaping && current === quote) {
|
|
75
|
+
break;
|
|
76
|
+
}
|
|
77
|
+
isEscaping = false;
|
|
78
|
+
cursor += 1;
|
|
79
|
+
}
|
|
80
|
+
if (cursor >= input.length || input[cursor] !== quote) {
|
|
81
|
+
return {
|
|
82
|
+
value: input,
|
|
83
|
+
quote: "none"
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
const innerValue = input.slice(1, cursor);
|
|
87
|
+
const remainder = input.slice(cursor + 1);
|
|
88
|
+
return {
|
|
89
|
+
value: quote === '"' ? decodeDoubleQuoted(innerValue) : innerValue,
|
|
90
|
+
quote: quote === '"' ? "double" : "single",
|
|
91
|
+
inlineComment: parseInlineComment(remainder)
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
function parseValueSegment(rawValueSegment) {
|
|
95
|
+
const trimmed = rawValueSegment.trimStart();
|
|
96
|
+
if (trimmed.startsWith('"')) {
|
|
97
|
+
return parseQuotedValue(trimmed, '"');
|
|
98
|
+
}
|
|
99
|
+
if (trimmed.startsWith("'")) {
|
|
100
|
+
return parseQuotedValue(trimmed, "'");
|
|
101
|
+
}
|
|
102
|
+
return parseUnquotedValue(trimmed);
|
|
103
|
+
}
|
|
104
|
+
function parseDotenv(content) {
|
|
105
|
+
const newline = detectNewline(content);
|
|
106
|
+
const hasTrailingNewline = content.endsWith(newline);
|
|
107
|
+
const rawLines = content.length > 0 ? content.split(/\r?\n/) : [];
|
|
108
|
+
if (hasTrailingNewline) {
|
|
109
|
+
rawLines.pop();
|
|
110
|
+
}
|
|
111
|
+
const lines = rawLines.map((rawLine) => {
|
|
112
|
+
const trimmed = rawLine.trim();
|
|
113
|
+
if (trimmed.length === 0) {
|
|
114
|
+
return {
|
|
115
|
+
type: "blank",
|
|
116
|
+
raw: rawLine
|
|
117
|
+
};
|
|
118
|
+
}
|
|
119
|
+
if (trimmed.startsWith("#")) {
|
|
120
|
+
return {
|
|
121
|
+
type: "comment",
|
|
122
|
+
raw: rawLine
|
|
123
|
+
};
|
|
124
|
+
}
|
|
125
|
+
const pairMatch = rawLine.match(/^\s*(export\s+)?([A-Za-z_][A-Za-z0-9_]*)\s*=(.*)$/);
|
|
126
|
+
if (!pairMatch) {
|
|
127
|
+
return {
|
|
128
|
+
type: "unknown",
|
|
129
|
+
raw: rawLine
|
|
130
|
+
};
|
|
131
|
+
}
|
|
132
|
+
const exported = Boolean(pairMatch[1]);
|
|
133
|
+
const key = pairMatch[2];
|
|
134
|
+
const parsed = parseValueSegment(pairMatch[3] ?? "");
|
|
135
|
+
return {
|
|
136
|
+
type: "pair",
|
|
137
|
+
raw: rawLine,
|
|
138
|
+
key,
|
|
139
|
+
value: parsed.value,
|
|
140
|
+
quote: parsed.quote,
|
|
141
|
+
exported,
|
|
142
|
+
inlineComment: parsed.inlineComment
|
|
143
|
+
};
|
|
144
|
+
});
|
|
145
|
+
return {
|
|
146
|
+
lines,
|
|
147
|
+
newline,
|
|
148
|
+
hasTrailingNewline
|
|
149
|
+
};
|
|
150
|
+
}
|
|
151
|
+
function canUseUnquoted(value) {
|
|
152
|
+
return /^[A-Za-z0-9_./:@+-]*$/.test(value);
|
|
153
|
+
}
|
|
154
|
+
function formatDotenvValue(value, preferredQuote = "none") {
|
|
155
|
+
if (value.length === 0) {
|
|
156
|
+
return "";
|
|
157
|
+
}
|
|
158
|
+
if (preferredQuote === "single" && !value.includes("'") && !value.includes("\n")) {
|
|
159
|
+
return `'${value}'`;
|
|
160
|
+
}
|
|
161
|
+
if (preferredQuote === "double") {
|
|
162
|
+
return `"${encodeDoubleQuoted(value)}"`;
|
|
163
|
+
}
|
|
164
|
+
if (preferredQuote === "none" && canUseUnquoted(value)) {
|
|
165
|
+
return value;
|
|
166
|
+
}
|
|
167
|
+
return `"${encodeDoubleQuoted(value)}"`;
|
|
168
|
+
}
|
|
169
|
+
function writeDotenv(document, options = {}) {
|
|
170
|
+
const updates = options.updates ?? {};
|
|
171
|
+
const seenKeys = /* @__PURE__ */ new Set();
|
|
172
|
+
const renderedLines = [];
|
|
173
|
+
for (const line of document.lines) {
|
|
174
|
+
if (line.type !== "pair") {
|
|
175
|
+
renderedLines.push(line.raw);
|
|
176
|
+
continue;
|
|
177
|
+
}
|
|
178
|
+
if (!Object.hasOwn(updates, line.key)) {
|
|
179
|
+
renderedLines.push(line.raw);
|
|
180
|
+
continue;
|
|
181
|
+
}
|
|
182
|
+
seenKeys.add(line.key);
|
|
183
|
+
const updatedValue = updates[line.key] ?? "";
|
|
184
|
+
const renderedValue = formatDotenvValue(updatedValue, line.quote);
|
|
185
|
+
const commentSuffix = line.inlineComment ? ` ${line.inlineComment}` : "";
|
|
186
|
+
const exportPrefix = line.exported ? "export " : "";
|
|
187
|
+
renderedLines.push(`${exportPrefix}${line.key}=${renderedValue}${commentSuffix}`);
|
|
188
|
+
}
|
|
189
|
+
let pendingKeys = Object.keys(updates).filter((key) => !seenKeys.has(key));
|
|
190
|
+
if (options.sortNewKeys !== false) {
|
|
191
|
+
pendingKeys = pendingKeys.sort();
|
|
192
|
+
}
|
|
193
|
+
if (pendingKeys.length > 0 && renderedLines.length > 0 && renderedLines.at(-1) !== "") {
|
|
194
|
+
renderedLines.push("");
|
|
195
|
+
}
|
|
196
|
+
for (const key of pendingKeys) {
|
|
197
|
+
const value = updates[key] ?? "";
|
|
198
|
+
renderedLines.push(`${key}=${formatDotenvValue(value)}`);
|
|
199
|
+
}
|
|
200
|
+
let output = renderedLines.join(document.newline);
|
|
201
|
+
if (document.hasTrailingNewline || pendingKeys.length > 0) {
|
|
202
|
+
output += document.newline;
|
|
203
|
+
}
|
|
204
|
+
return output;
|
|
205
|
+
}
|
|
206
|
+
function dotenvToObject(document) {
|
|
207
|
+
const output = {};
|
|
208
|
+
for (const line of document.lines) {
|
|
209
|
+
if (line.type === "pair") {
|
|
210
|
+
output[line.key] = line.value;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
return output;
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
// src/env.ts
|
|
217
|
+
var TARGETS = ["dev", "preview", "prod"];
|
|
218
|
+
function normalizeTargets(requiredIn) {
|
|
219
|
+
if (!requiredIn || requiredIn.length === 0) {
|
|
220
|
+
return TARGETS;
|
|
221
|
+
}
|
|
222
|
+
const uniqueTargets = Array.from(new Set(requiredIn));
|
|
223
|
+
for (const target of uniqueTargets) {
|
|
224
|
+
if (!TARGETS.includes(target)) {
|
|
225
|
+
throw new Error(`Unknown env target "${target}". Expected one of: ${TARGETS.join(", ")}.`);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
return uniqueTargets;
|
|
229
|
+
}
|
|
230
|
+
function createBaseDefinition(kind, config = {}) {
|
|
231
|
+
return {
|
|
232
|
+
kind,
|
|
233
|
+
scope: config.scope ?? "server",
|
|
234
|
+
requiredIn: normalizeTargets(config.requiredIn),
|
|
235
|
+
description: config.description,
|
|
236
|
+
example: config.example,
|
|
237
|
+
sensitive: config.sensitive,
|
|
238
|
+
owner: config.owner,
|
|
239
|
+
rotationDays: config.rotationDays,
|
|
240
|
+
deprecated: config.deprecated ? { ...config.deprecated } : void 0,
|
|
241
|
+
provider: config.provider ? { ...config.provider } : void 0,
|
|
242
|
+
tags: config.tags ? [...config.tags] : void 0,
|
|
243
|
+
defaultValue: config.defaultValue
|
|
244
|
+
};
|
|
245
|
+
}
|
|
246
|
+
function string(config = {}) {
|
|
247
|
+
return createBaseDefinition("string", config);
|
|
248
|
+
}
|
|
249
|
+
function url(config = {}) {
|
|
250
|
+
return createBaseDefinition("url", config);
|
|
251
|
+
}
|
|
252
|
+
function number(config = {}) {
|
|
253
|
+
return createBaseDefinition("number", config);
|
|
254
|
+
}
|
|
255
|
+
function boolean(config = {}) {
|
|
256
|
+
return createBaseDefinition("boolean", config);
|
|
257
|
+
}
|
|
258
|
+
function enumBuilder(values, config = {}) {
|
|
259
|
+
return {
|
|
260
|
+
...createBaseDefinition("enum", config),
|
|
261
|
+
values: [...values]
|
|
262
|
+
};
|
|
263
|
+
}
|
|
264
|
+
function json(config = {}) {
|
|
265
|
+
return createBaseDefinition("json", config);
|
|
266
|
+
}
|
|
267
|
+
var builders = {
|
|
268
|
+
string,
|
|
269
|
+
url,
|
|
270
|
+
number,
|
|
271
|
+
boolean,
|
|
272
|
+
enum: enumBuilder,
|
|
273
|
+
json
|
|
274
|
+
};
|
|
275
|
+
function defineSchema(schema) {
|
|
276
|
+
return schema;
|
|
277
|
+
}
|
|
278
|
+
var prefixPolicies = {
|
|
279
|
+
nextjs: {
|
|
280
|
+
name: "nextjs",
|
|
281
|
+
publicPrefixes: ["NEXT_PUBLIC_"]
|
|
282
|
+
},
|
|
283
|
+
vite: {
|
|
284
|
+
name: "vite",
|
|
285
|
+
publicPrefixes: ["VITE_"]
|
|
286
|
+
}
|
|
287
|
+
};
|
|
288
|
+
function parseVariableValue(definition, rawValue) {
|
|
289
|
+
switch (definition.kind) {
|
|
290
|
+
case "string":
|
|
291
|
+
return rawValue;
|
|
292
|
+
case "url": {
|
|
293
|
+
return new URL(rawValue);
|
|
294
|
+
}
|
|
295
|
+
case "number": {
|
|
296
|
+
if (rawValue.trim().length === 0) {
|
|
297
|
+
throw new Error("must be a finite number");
|
|
298
|
+
}
|
|
299
|
+
const parsed = Number(rawValue);
|
|
300
|
+
if (!Number.isFinite(parsed)) {
|
|
301
|
+
throw new Error("must be a finite number");
|
|
302
|
+
}
|
|
303
|
+
return parsed;
|
|
304
|
+
}
|
|
305
|
+
case "boolean": {
|
|
306
|
+
const normalized = rawValue.trim().toLowerCase();
|
|
307
|
+
if (["true", "1", "yes", "on"].includes(normalized)) {
|
|
308
|
+
return true;
|
|
309
|
+
}
|
|
310
|
+
if (["false", "0", "no", "off"].includes(normalized)) {
|
|
311
|
+
return false;
|
|
312
|
+
}
|
|
313
|
+
throw new Error('must be a boolean ("true" or "false")');
|
|
314
|
+
}
|
|
315
|
+
case "enum": {
|
|
316
|
+
if (!definition.values.includes(rawValue)) {
|
|
317
|
+
throw new Error(`must be one of: ${definition.values.join(", ")}`);
|
|
318
|
+
}
|
|
319
|
+
return rawValue;
|
|
320
|
+
}
|
|
321
|
+
case "json": {
|
|
322
|
+
return JSON.parse(rawValue);
|
|
323
|
+
}
|
|
324
|
+
default:
|
|
325
|
+
throw new Error(`Unsupported variable kind "${String(definition.kind)}".`);
|
|
326
|
+
}
|
|
327
|
+
}
|
|
328
|
+
function parseRemoveAfter(removeAfter) {
|
|
329
|
+
if (/^\d{4}-\d{2}-\d{2}$/.test(removeAfter)) {
|
|
330
|
+
return /* @__PURE__ */ new Date(`${removeAfter}T23:59:59.999Z`);
|
|
331
|
+
}
|
|
332
|
+
const parsed = new Date(removeAfter);
|
|
333
|
+
if (Number.isNaN(parsed.getTime())) {
|
|
334
|
+
return void 0;
|
|
335
|
+
}
|
|
336
|
+
return parsed;
|
|
337
|
+
}
|
|
338
|
+
function hasPublicPrefix(key, prefixes) {
|
|
339
|
+
return prefixes.some((prefix) => key.startsWith(prefix));
|
|
340
|
+
}
|
|
341
|
+
function isUnsetRawValue(rawValue) {
|
|
342
|
+
return rawValue === void 0 || rawValue.trim().length === 0;
|
|
343
|
+
}
|
|
344
|
+
function createScopeIssue(key, target, scope, strict, code, message, fix) {
|
|
345
|
+
return {
|
|
346
|
+
key,
|
|
347
|
+
target,
|
|
348
|
+
scope,
|
|
349
|
+
code,
|
|
350
|
+
message,
|
|
351
|
+
fix,
|
|
352
|
+
severity: strict ? "error" : "warning"
|
|
353
|
+
};
|
|
354
|
+
}
|
|
355
|
+
function formatIssue(issue) {
|
|
356
|
+
if (!issue.fix) {
|
|
357
|
+
return `- ${issue.key}: ${issue.message}`;
|
|
358
|
+
}
|
|
359
|
+
return `- ${issue.key}: ${issue.message} Fix: ${issue.fix}`;
|
|
360
|
+
}
|
|
361
|
+
function formatValidationIssues(issues) {
|
|
362
|
+
if (issues.length === 0) {
|
|
363
|
+
return "Environment validation failed.";
|
|
364
|
+
}
|
|
365
|
+
const grouped = /* @__PURE__ */ new Map();
|
|
366
|
+
for (const issue of issues) {
|
|
367
|
+
const groupKey = `${issue.target}|${issue.scope}`;
|
|
368
|
+
const current = grouped.get(groupKey);
|
|
369
|
+
if (current) {
|
|
370
|
+
current.push(issue);
|
|
371
|
+
} else {
|
|
372
|
+
grouped.set(groupKey, [issue]);
|
|
373
|
+
}
|
|
374
|
+
}
|
|
375
|
+
const lines = ["Environment validation failed:"];
|
|
376
|
+
for (const [groupKey, groupIssues] of grouped) {
|
|
377
|
+
const [target, scope] = groupKey.split("|");
|
|
378
|
+
lines.push("");
|
|
379
|
+
lines.push(`[target=${target} scope=${scope}]`);
|
|
380
|
+
for (const issue of groupIssues) {
|
|
381
|
+
lines.push(formatIssue(issue));
|
|
382
|
+
}
|
|
383
|
+
}
|
|
384
|
+
lines.push("");
|
|
385
|
+
lines.push("Fix steps:");
|
|
386
|
+
lines.push("1) Add missing required values for the selected target environment.");
|
|
387
|
+
lines.push("2) Align variable names with your prefix policy and variable scope.");
|
|
388
|
+
return lines.join("\n");
|
|
389
|
+
}
|
|
390
|
+
var EnvValidationError = class extends Error {
|
|
391
|
+
issues;
|
|
392
|
+
constructor(issues) {
|
|
393
|
+
super(formatValidationIssues(issues));
|
|
394
|
+
this.name = "EnvValidationError";
|
|
395
|
+
this.issues = [...issues];
|
|
396
|
+
}
|
|
397
|
+
};
|
|
398
|
+
function createEnv(schema, options) {
|
|
399
|
+
const strict = options.strict ?? true;
|
|
400
|
+
const throwOnError = options.throwOnError ?? true;
|
|
401
|
+
const envValues = options.values ?? process.env;
|
|
402
|
+
const now = options.now ?? /* @__PURE__ */ new Date();
|
|
403
|
+
const issues = [];
|
|
404
|
+
const values = {};
|
|
405
|
+
const rawValues = { ...envValues };
|
|
406
|
+
const prefixes = options.prefixPolicy?.publicPrefixes ?? [];
|
|
407
|
+
const warnedDeprecatedKeys = /* @__PURE__ */ new Set();
|
|
408
|
+
for (const [key, definition] of Object.entries(schema)) {
|
|
409
|
+
const deprecated = definition.deprecated;
|
|
410
|
+
const deprecatedRawValue = envValues[String(key)];
|
|
411
|
+
if (!deprecated || isUnsetRawValue(deprecatedRawValue)) {
|
|
412
|
+
continue;
|
|
413
|
+
}
|
|
414
|
+
const replacementKey = deprecated.replacedBy;
|
|
415
|
+
const hasReplacementKey = Boolean(replacementKey && replacementKey in schema);
|
|
416
|
+
const replacementRawValue = replacementKey ? rawValues[replacementKey] : void 0;
|
|
417
|
+
if (hasReplacementKey && replacementKey && isUnsetRawValue(replacementRawValue)) {
|
|
418
|
+
rawValues[replacementKey] = deprecatedRawValue;
|
|
419
|
+
}
|
|
420
|
+
const parsedDeadline = deprecated.removeAfter ? parseRemoveAfter(deprecated.removeAfter) : void 0;
|
|
421
|
+
const isExpired = parsedDeadline ? now.getTime() > parsedDeadline.getTime() : false;
|
|
422
|
+
if (isExpired) {
|
|
423
|
+
issues.push({
|
|
424
|
+
key: String(key),
|
|
425
|
+
target: options.target,
|
|
426
|
+
scope: definition.scope,
|
|
427
|
+
severity: strict ? "error" : "warning",
|
|
428
|
+
code: "DEPRECATED_EXPIRED",
|
|
429
|
+
message: `Deprecated key is past removeAfter (${deprecated.removeAfter}).`,
|
|
430
|
+
fix: replacementKey ? `Stop using "${String(key)}" and set "${replacementKey}" instead.` : `Remove "${String(key)}" from your environment.`
|
|
431
|
+
});
|
|
432
|
+
continue;
|
|
433
|
+
}
|
|
434
|
+
if (!strict && !warnedDeprecatedKeys.has(String(key))) {
|
|
435
|
+
issues.push({
|
|
436
|
+
key: String(key),
|
|
437
|
+
target: options.target,
|
|
438
|
+
scope: definition.scope,
|
|
439
|
+
severity: "warning",
|
|
440
|
+
code: "DEPRECATED_KEY_USED",
|
|
441
|
+
message: replacementKey ? `Deprecated key is used. Prefer "${replacementKey}".` : "Deprecated key is used.",
|
|
442
|
+
fix: replacementKey ? `Rename "${String(key)}" to "${replacementKey}".` : `Remove "${String(key)}" from your environment when possible.`
|
|
443
|
+
});
|
|
444
|
+
warnedDeprecatedKeys.add(String(key));
|
|
445
|
+
}
|
|
446
|
+
}
|
|
447
|
+
for (const [key, definition] of Object.entries(schema)) {
|
|
448
|
+
const scope = definition.scope;
|
|
449
|
+
const prefixed = hasPublicPrefix(String(key), prefixes);
|
|
450
|
+
if (prefixes.length > 0) {
|
|
451
|
+
if (scope === "client" && !prefixed) {
|
|
452
|
+
issues.push(
|
|
453
|
+
createScopeIssue(
|
|
454
|
+
String(key),
|
|
455
|
+
options.target,
|
|
456
|
+
scope,
|
|
457
|
+
strict,
|
|
458
|
+
"PREFIX_CLIENT_MISMATCH",
|
|
459
|
+
`Client variable must use one of prefixes: ${prefixes.join(", ")}.`,
|
|
460
|
+
`Rename "${String(key)}" with an approved public prefix or mark it as scope: "server".`
|
|
461
|
+
)
|
|
462
|
+
);
|
|
463
|
+
} else if (scope === "server" && prefixed) {
|
|
464
|
+
issues.push(
|
|
465
|
+
createScopeIssue(
|
|
466
|
+
String(key),
|
|
467
|
+
options.target,
|
|
468
|
+
scope,
|
|
469
|
+
strict,
|
|
470
|
+
"PREFIX_SERVER_PUBLIC",
|
|
471
|
+
`Server variable is using a public prefix (${prefixes.join(", ")}).`,
|
|
472
|
+
`Rename "${String(key)}" to remove a public prefix or mark it as scope: "client".`
|
|
473
|
+
)
|
|
474
|
+
);
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
if (options.prefixPolicy?.validate) {
|
|
478
|
+
const additionalIssues = options.prefixPolicy.validate({
|
|
479
|
+
key: String(key),
|
|
480
|
+
target: options.target,
|
|
481
|
+
scope,
|
|
482
|
+
hasPublicPrefix: prefixed,
|
|
483
|
+
publicPrefixes: prefixes,
|
|
484
|
+
strict
|
|
485
|
+
});
|
|
486
|
+
if (additionalIssues) {
|
|
487
|
+
for (const additionalIssue of additionalIssues) {
|
|
488
|
+
issues.push({
|
|
489
|
+
...additionalIssue,
|
|
490
|
+
key: String(key),
|
|
491
|
+
scope,
|
|
492
|
+
target: options.target
|
|
493
|
+
});
|
|
494
|
+
}
|
|
495
|
+
}
|
|
496
|
+
}
|
|
497
|
+
const replacementKey = definition.deprecated?.replacedBy;
|
|
498
|
+
const isDeprecatedAlias = Boolean(replacementKey && replacementKey in schema);
|
|
499
|
+
const requiredForTarget = definition.requiredIn.includes(options.target) && !isDeprecatedAlias;
|
|
500
|
+
const rawValue = rawValues[String(key)];
|
|
501
|
+
if (isUnsetRawValue(rawValue)) {
|
|
502
|
+
if (definition.defaultValue !== void 0) {
|
|
503
|
+
values[key] = definition.defaultValue;
|
|
504
|
+
continue;
|
|
505
|
+
}
|
|
506
|
+
values[key] = void 0;
|
|
507
|
+
if (requiredForTarget) {
|
|
508
|
+
issues.push({
|
|
509
|
+
key: String(key),
|
|
510
|
+
target: options.target,
|
|
511
|
+
scope,
|
|
512
|
+
severity: "error",
|
|
513
|
+
code: "MISSING_REQUIRED",
|
|
514
|
+
message: `Missing required variable for target "${options.target}".`,
|
|
515
|
+
fix: `Set "${String(key)}" for ${options.target}.`
|
|
516
|
+
});
|
|
517
|
+
}
|
|
518
|
+
continue;
|
|
519
|
+
}
|
|
520
|
+
const presentRawValue = rawValue;
|
|
521
|
+
try {
|
|
522
|
+
values[key] = parseVariableValue(definition, presentRawValue);
|
|
523
|
+
} catch (error) {
|
|
524
|
+
const reason = error instanceof Error ? error.message : "invalid value";
|
|
525
|
+
issues.push({
|
|
526
|
+
key: String(key),
|
|
527
|
+
target: options.target,
|
|
528
|
+
scope,
|
|
529
|
+
severity: "error",
|
|
530
|
+
code: "INVALID_VALUE",
|
|
531
|
+
message: `Invalid value: ${reason}.`,
|
|
532
|
+
fix: `Update "${String(key)}" to match type "${definition.kind}".`
|
|
533
|
+
});
|
|
534
|
+
values[key] = void 0;
|
|
535
|
+
}
|
|
536
|
+
}
|
|
537
|
+
const warnings = issues.filter((issue) => issue.severity === "warning");
|
|
538
|
+
for (const warning of warnings) {
|
|
539
|
+
options.onWarning?.(warning);
|
|
540
|
+
}
|
|
541
|
+
const hasErrors = issues.some((issue) => issue.severity === "error");
|
|
542
|
+
function assert() {
|
|
543
|
+
if (hasErrors) {
|
|
544
|
+
throw new EnvValidationError(issues);
|
|
545
|
+
}
|
|
546
|
+
}
|
|
547
|
+
if (throwOnError) {
|
|
548
|
+
assert();
|
|
549
|
+
}
|
|
550
|
+
function get(key) {
|
|
551
|
+
const value = values[key];
|
|
552
|
+
if (value === void 0) {
|
|
553
|
+
throw new Error(
|
|
554
|
+
`Variable "${String(key)}" is undefined. Use optional("${String(
|
|
555
|
+
key
|
|
556
|
+
)}") if this key is optional for target "${options.target}".`
|
|
557
|
+
);
|
|
558
|
+
}
|
|
559
|
+
return value;
|
|
560
|
+
}
|
|
561
|
+
function optional(key) {
|
|
562
|
+
return values[key];
|
|
563
|
+
}
|
|
564
|
+
function pick(keys) {
|
|
565
|
+
const selected = {};
|
|
566
|
+
for (const key of keys) {
|
|
567
|
+
selected[key] = values[key];
|
|
568
|
+
}
|
|
569
|
+
return selected;
|
|
570
|
+
}
|
|
571
|
+
function all() {
|
|
572
|
+
return { ...values };
|
|
573
|
+
}
|
|
574
|
+
function meta(key) {
|
|
575
|
+
if (key !== void 0) {
|
|
576
|
+
return { ...schema[key] };
|
|
577
|
+
}
|
|
578
|
+
const allMetadata = {};
|
|
579
|
+
for (const [schemaKey, definition] of Object.entries(schema)) {
|
|
580
|
+
allMetadata[schemaKey] = { ...definition };
|
|
581
|
+
}
|
|
582
|
+
return allMetadata;
|
|
583
|
+
}
|
|
584
|
+
return {
|
|
585
|
+
get,
|
|
586
|
+
optional,
|
|
587
|
+
assert,
|
|
588
|
+
pick,
|
|
589
|
+
all,
|
|
590
|
+
meta
|
|
591
|
+
};
|
|
592
|
+
}
|
|
593
|
+
|
|
594
|
+
// src/drift.ts
|
|
595
|
+
function sortIssues(issues) {
|
|
596
|
+
const severityOrder = {
|
|
597
|
+
error: 0,
|
|
598
|
+
warning: 1
|
|
599
|
+
};
|
|
600
|
+
return [...issues].sort((left, right) => {
|
|
601
|
+
return severityOrder[left.severity] - severityOrder[right.severity] || left.target.localeCompare(right.target) || left.scope.localeCompare(right.scope) || left.key.localeCompare(right.key) || left.code.localeCompare(right.code);
|
|
602
|
+
});
|
|
603
|
+
}
|
|
604
|
+
function collectUnknownIssues(schema, values, target, source) {
|
|
605
|
+
const schemaKeys = new Set(Object.keys(schema));
|
|
606
|
+
const issues = [];
|
|
607
|
+
for (const key of Object.keys(values)) {
|
|
608
|
+
if (schemaKeys.has(key)) {
|
|
609
|
+
continue;
|
|
610
|
+
}
|
|
611
|
+
issues.push({
|
|
612
|
+
key,
|
|
613
|
+
target,
|
|
614
|
+
scope: "server",
|
|
615
|
+
severity: "error",
|
|
616
|
+
code: "EXTRA_VARIABLE",
|
|
617
|
+
message: "Variable is present in snapshot but not defined in schema.",
|
|
618
|
+
fix: `Remove "${key}" from snapshot or add it to schema.`,
|
|
619
|
+
source
|
|
620
|
+
});
|
|
621
|
+
}
|
|
622
|
+
return issues;
|
|
623
|
+
}
|
|
624
|
+
function detectDrift(schema, options) {
|
|
625
|
+
const issues = [];
|
|
626
|
+
const runtime = createEnv(schema, {
|
|
627
|
+
target: options.target,
|
|
628
|
+
values: options.values,
|
|
629
|
+
strict: true,
|
|
630
|
+
throwOnError: false
|
|
631
|
+
});
|
|
632
|
+
try {
|
|
633
|
+
runtime.assert();
|
|
634
|
+
} catch (error) {
|
|
635
|
+
if (!(error instanceof EnvValidationError)) {
|
|
636
|
+
throw error;
|
|
637
|
+
}
|
|
638
|
+
issues.push(
|
|
639
|
+
...error.issues.map((issue) => ({
|
|
640
|
+
...issue,
|
|
641
|
+
source: options.source
|
|
642
|
+
}))
|
|
643
|
+
);
|
|
644
|
+
}
|
|
645
|
+
if (!options.allowUnknown) {
|
|
646
|
+
issues.push(
|
|
647
|
+
...collectUnknownIssues(schema, options.values, options.target, options.source)
|
|
648
|
+
);
|
|
649
|
+
}
|
|
650
|
+
const sortedIssues = sortIssues(issues);
|
|
651
|
+
const errors = sortedIssues.filter((issue) => issue.severity === "error").length;
|
|
652
|
+
const warnings = sortedIssues.filter((issue) => issue.severity === "warning").length;
|
|
653
|
+
return {
|
|
654
|
+
target: options.target,
|
|
655
|
+
summary: {
|
|
656
|
+
total: sortedIssues.length,
|
|
657
|
+
errors,
|
|
658
|
+
warnings
|
|
659
|
+
},
|
|
660
|
+
issues: sortedIssues
|
|
661
|
+
};
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
// src/schema-loader.ts
|
|
665
|
+
import { access as access3, readFile as readFile3 } from "fs/promises";
|
|
666
|
+
import path3 from "path";
|
|
667
|
+
import { pathToFileURL as pathToFileURL2 } from "url";
|
|
668
|
+
|
|
669
|
+
// src/migrations.ts
|
|
670
|
+
import { createHash } from "crypto";
|
|
671
|
+
import { access as access2, mkdir as mkdir2, readFile as readFile2, readdir, unlink, writeFile as writeFile2 } from "fs/promises";
|
|
672
|
+
import path2 from "path";
|
|
673
|
+
import { pathToFileURL } from "url";
|
|
674
|
+
|
|
675
|
+
// src/generators.ts
|
|
676
|
+
var DOC_TARGETS = ["dev", "preview", "prod"];
|
|
677
|
+
var AIRLOCK_GENERATED_BY = "# Generated by airlock";
|
|
678
|
+
var AIRLOCK_GENERATE = `${AIRLOCK_GENERATED_BY} generate`;
|
|
679
|
+
function schemaEntries(schema) {
|
|
680
|
+
return Object.entries(schema);
|
|
681
|
+
}
|
|
682
|
+
function toEnvString(value, kind) {
|
|
683
|
+
if (value === void 0 || value === null) {
|
|
684
|
+
return "";
|
|
685
|
+
}
|
|
686
|
+
if (kind === "boolean" && typeof value === "boolean") {
|
|
687
|
+
return value ? "true" : "false";
|
|
688
|
+
}
|
|
689
|
+
if (kind === "number" && typeof value === "number") {
|
|
690
|
+
return `${value}`;
|
|
691
|
+
}
|
|
692
|
+
if (kind === "json") {
|
|
693
|
+
if (typeof value === "string") {
|
|
694
|
+
return value;
|
|
695
|
+
}
|
|
696
|
+
return JSON.stringify(value);
|
|
697
|
+
}
|
|
698
|
+
return String(value);
|
|
699
|
+
}
|
|
700
|
+
function placeholderForKey(key, definition) {
|
|
701
|
+
if (definition.sensitive) {
|
|
702
|
+
return "";
|
|
703
|
+
}
|
|
704
|
+
if (definition.defaultValue !== void 0) {
|
|
705
|
+
return toEnvString(definition.defaultValue, definition.kind);
|
|
706
|
+
}
|
|
707
|
+
switch (definition.kind) {
|
|
708
|
+
case "url":
|
|
709
|
+
return "https://example.com";
|
|
710
|
+
case "number":
|
|
711
|
+
return "0";
|
|
712
|
+
case "boolean":
|
|
713
|
+
return "false";
|
|
714
|
+
case "enum":
|
|
715
|
+
return definition.values[0] ?? "";
|
|
716
|
+
case "json":
|
|
717
|
+
return "{}";
|
|
718
|
+
case "string":
|
|
719
|
+
default:
|
|
720
|
+
return `example_${key.toLowerCase()}`;
|
|
721
|
+
}
|
|
722
|
+
}
|
|
723
|
+
function renderMetadataComment(key, definition) {
|
|
724
|
+
const description = definition.description ?? "No description";
|
|
725
|
+
const required = definition.requiredIn.join(",");
|
|
726
|
+
const sensitivity = definition.sensitive ? "yes" : "no";
|
|
727
|
+
return [
|
|
728
|
+
`# ${key}: ${description}`,
|
|
729
|
+
`# type=${definition.kind} scope=${definition.scope} requiredIn=${required} sensitive=${sensitivity}`
|
|
730
|
+
];
|
|
731
|
+
}
|
|
732
|
+
function renderEnvExample(entries, app) {
|
|
733
|
+
const lines = [AIRLOCK_GENERATE];
|
|
734
|
+
if (app) {
|
|
735
|
+
lines.push(`# app=${app}`);
|
|
736
|
+
}
|
|
737
|
+
for (const [key, definition] of entries) {
|
|
738
|
+
lines.push("");
|
|
739
|
+
lines.push(...renderMetadataComment(key, definition));
|
|
740
|
+
lines.push(`${key}=${formatDotenvValue(placeholderForKey(key, definition))}`);
|
|
741
|
+
}
|
|
742
|
+
return `${lines.join("\n")}
|
|
743
|
+
`;
|
|
744
|
+
}
|
|
745
|
+
function renderEnvDefaults(entries, app) {
|
|
746
|
+
const lines = [AIRLOCK_GENERATE];
|
|
747
|
+
if (app) {
|
|
748
|
+
lines.push(`# app=${app}`);
|
|
749
|
+
}
|
|
750
|
+
lines.push("# Non-sensitive defaults only");
|
|
751
|
+
for (const [key, definition] of entries) {
|
|
752
|
+
if (definition.sensitive || definition.defaultValue === void 0) {
|
|
753
|
+
continue;
|
|
754
|
+
}
|
|
755
|
+
lines.push(`${key}=${formatDotenvValue(toEnvString(definition.defaultValue, definition.kind))}`);
|
|
756
|
+
}
|
|
757
|
+
return `${lines.join("\n")}
|
|
758
|
+
`;
|
|
759
|
+
}
|
|
760
|
+
function escapeMarkdownCell(value) {
|
|
761
|
+
if (value.length === 0) {
|
|
762
|
+
return "-";
|
|
763
|
+
}
|
|
764
|
+
return value.replace(/\|/g, "\\|");
|
|
765
|
+
}
|
|
766
|
+
function renderProviderMapping(definition) {
|
|
767
|
+
if (!definition.provider) {
|
|
768
|
+
return "";
|
|
769
|
+
}
|
|
770
|
+
return Object.entries(definition.provider).filter(([, providerKey]) => providerKey).map(([provider, providerKey]) => `${provider}=${providerKey}`).join(", ");
|
|
771
|
+
}
|
|
772
|
+
function renderDeprecated(definition) {
|
|
773
|
+
if (!definition.deprecated) {
|
|
774
|
+
return "";
|
|
775
|
+
}
|
|
776
|
+
const parts = [];
|
|
777
|
+
if (definition.deprecated.replacedBy) {
|
|
778
|
+
parts.push(`replacedBy=${definition.deprecated.replacedBy}`);
|
|
779
|
+
}
|
|
780
|
+
if (definition.deprecated.removeAfter) {
|
|
781
|
+
parts.push(`removeAfter=${definition.deprecated.removeAfter}`);
|
|
782
|
+
}
|
|
783
|
+
if (definition.deprecated.message) {
|
|
784
|
+
parts.push(definition.deprecated.message);
|
|
785
|
+
}
|
|
786
|
+
return parts.join("; ");
|
|
787
|
+
}
|
|
788
|
+
function renderDefaultValue(definition) {
|
|
789
|
+
if (definition.sensitive || definition.defaultValue === void 0) {
|
|
790
|
+
return "";
|
|
791
|
+
}
|
|
792
|
+
return toEnvString(definition.defaultValue, definition.kind);
|
|
793
|
+
}
|
|
794
|
+
function renderMarkdownTable(entries, options) {
|
|
795
|
+
const lines = [];
|
|
796
|
+
const columns = options.includeRequiredIn ? [
|
|
797
|
+
"Key",
|
|
798
|
+
"Type",
|
|
799
|
+
"Scope",
|
|
800
|
+
"Required In",
|
|
801
|
+
"Sensitive",
|
|
802
|
+
"Default",
|
|
803
|
+
"Description",
|
|
804
|
+
"Owner",
|
|
805
|
+
"Rotation Days",
|
|
806
|
+
"Provider",
|
|
807
|
+
"Deprecated",
|
|
808
|
+
"Tags"
|
|
809
|
+
] : [
|
|
810
|
+
"Key",
|
|
811
|
+
"Type",
|
|
812
|
+
"Scope",
|
|
813
|
+
"Sensitive",
|
|
814
|
+
"Default",
|
|
815
|
+
"Description",
|
|
816
|
+
"Owner",
|
|
817
|
+
"Rotation Days",
|
|
818
|
+
"Provider",
|
|
819
|
+
"Deprecated",
|
|
820
|
+
"Tags"
|
|
821
|
+
];
|
|
822
|
+
lines.push(`| ${columns.join(" | ")} |`);
|
|
823
|
+
lines.push(`| ${columns.map(() => "---").join(" | ")} |`);
|
|
824
|
+
for (const [key, definition] of entries) {
|
|
825
|
+
const tags = definition.tags?.join(", ") ?? "";
|
|
826
|
+
const commonCells = [
|
|
827
|
+
escapeMarkdownCell(key),
|
|
828
|
+
escapeMarkdownCell(definition.kind),
|
|
829
|
+
escapeMarkdownCell(definition.scope),
|
|
830
|
+
escapeMarkdownCell(definition.sensitive ? "yes" : "no"),
|
|
831
|
+
escapeMarkdownCell(renderDefaultValue(definition)),
|
|
832
|
+
escapeMarkdownCell(definition.description ?? ""),
|
|
833
|
+
escapeMarkdownCell(definition.owner ?? ""),
|
|
834
|
+
escapeMarkdownCell(definition.rotationDays === void 0 ? "" : `${definition.rotationDays}`),
|
|
835
|
+
escapeMarkdownCell(renderProviderMapping(definition)),
|
|
836
|
+
escapeMarkdownCell(renderDeprecated(definition)),
|
|
837
|
+
escapeMarkdownCell(tags)
|
|
838
|
+
];
|
|
839
|
+
if (options.includeRequiredIn) {
|
|
840
|
+
const [keyCell, typeCell, scopeCell, sensitiveCell, ...restCells] = commonCells;
|
|
841
|
+
lines.push(
|
|
842
|
+
`| ${keyCell} | ${typeCell} | ${scopeCell} | ${escapeMarkdownCell(
|
|
843
|
+
definition.requiredIn.join(", ")
|
|
844
|
+
)} | ${sensitiveCell} | ${restCells.join(" | ")} |`
|
|
845
|
+
);
|
|
846
|
+
continue;
|
|
847
|
+
}
|
|
848
|
+
lines.push(`| ${commonCells.join(" | ")} |`);
|
|
849
|
+
}
|
|
850
|
+
return lines;
|
|
851
|
+
}
|
|
852
|
+
function renderEnvMarkdown(entries, app) {
|
|
853
|
+
const lines = ["# ENV", ""];
|
|
854
|
+
if (app) {
|
|
855
|
+
lines.push(`App: \`${escapeMarkdownCell(app)}\``);
|
|
856
|
+
lines.push("");
|
|
857
|
+
}
|
|
858
|
+
lines.push("## Catalog", "");
|
|
859
|
+
lines.push(...renderMarkdownTable(entries, { includeRequiredIn: true }));
|
|
860
|
+
lines.push("");
|
|
861
|
+
for (const target of DOC_TARGETS) {
|
|
862
|
+
const targetEntries = entries.filter(([, definition]) => definition.requiredIn.includes(target));
|
|
863
|
+
lines.push(`## Environment: ${target}`, "");
|
|
864
|
+
if (targetEntries.length === 0) {
|
|
865
|
+
lines.push("No variables are required in this environment.", "");
|
|
866
|
+
continue;
|
|
867
|
+
}
|
|
868
|
+
lines.push(...renderMarkdownTable(targetEntries, { includeRequiredIn: false }));
|
|
869
|
+
lines.push("");
|
|
870
|
+
}
|
|
871
|
+
return `${lines.join("\n")}
|
|
872
|
+
`;
|
|
873
|
+
}
|
|
874
|
+
function renderAuditJson(entries, app) {
|
|
875
|
+
const sensitiveCount = entries.filter(([, definition]) => definition.sensitive).length;
|
|
876
|
+
const deprecatedCount = entries.filter(([, definition]) => Boolean(definition.deprecated)).length;
|
|
877
|
+
const defaultsCount = entries.filter(
|
|
878
|
+
([, definition]) => definition.defaultValue !== void 0 && !definition.sensitive
|
|
879
|
+
).length;
|
|
880
|
+
const payload = {
|
|
881
|
+
app: app ?? null,
|
|
882
|
+
totals: {
|
|
883
|
+
variables: entries.length,
|
|
884
|
+
sensitive: sensitiveCount,
|
|
885
|
+
deprecated: deprecatedCount,
|
|
886
|
+
defaults: defaultsCount
|
|
887
|
+
},
|
|
888
|
+
variables: entries.map(([key, definition]) => ({
|
|
889
|
+
key,
|
|
890
|
+
type: definition.kind,
|
|
891
|
+
scope: definition.scope,
|
|
892
|
+
requiredIn: [...definition.requiredIn],
|
|
893
|
+
sensitive: Boolean(definition.sensitive),
|
|
894
|
+
owner: definition.owner ?? null,
|
|
895
|
+
rotationDays: definition.rotationDays ?? null,
|
|
896
|
+
tags: definition.tags ?? [],
|
|
897
|
+
provider: definition.provider ?? {},
|
|
898
|
+
deprecated: definition.deprecated ?? null,
|
|
899
|
+
hasDefault: definition.defaultValue !== void 0
|
|
900
|
+
}))
|
|
901
|
+
};
|
|
902
|
+
return `${JSON.stringify(payload, null, 2)}
|
|
903
|
+
`;
|
|
904
|
+
}
|
|
905
|
+
function renderEnvLocal(entries, app) {
|
|
906
|
+
const lines = [`${AIRLOCK_GENERATE} --write-local`];
|
|
907
|
+
if (app) {
|
|
908
|
+
lines.push(`# app=${app}`);
|
|
909
|
+
}
|
|
910
|
+
for (const [key, definition] of entries) {
|
|
911
|
+
const value = definition.sensitive ? "" : definition.defaultValue !== void 0 ? toEnvString(definition.defaultValue, definition.kind) : "";
|
|
912
|
+
lines.push(`${key}=${formatDotenvValue(value)}`);
|
|
913
|
+
}
|
|
914
|
+
return `${lines.join("\n")}
|
|
915
|
+
`;
|
|
916
|
+
}
|
|
917
|
+
function generateArtifacts(schema, options = {}) {
|
|
918
|
+
const entries = schemaEntries(schema);
|
|
919
|
+
return {
|
|
920
|
+
envExample: renderEnvExample(entries, options.app),
|
|
921
|
+
envDefaults: renderEnvDefaults(entries, options.app),
|
|
922
|
+
envMarkdown: renderEnvMarkdown(entries, options.app),
|
|
923
|
+
envAuditJson: renderAuditJson(entries, options.app),
|
|
924
|
+
envLocal: renderEnvLocal(entries, options.app)
|
|
925
|
+
};
|
|
926
|
+
}
|
|
927
|
+
function generateTargetEnvFile(schema, target, options = {}) {
|
|
928
|
+
const entries = schemaEntries(schema);
|
|
929
|
+
const lines = [`${AIRLOCK_GENERATE} --write-target-files`, `# target=${target}`];
|
|
930
|
+
if (options.app) {
|
|
931
|
+
lines.push(`# app=${options.app}`);
|
|
932
|
+
}
|
|
933
|
+
for (const [key, definition] of entries) {
|
|
934
|
+
if (!definition.requiredIn.includes(target)) {
|
|
935
|
+
continue;
|
|
936
|
+
}
|
|
937
|
+
const value = definition.sensitive ? "" : definition.defaultValue !== void 0 ? toEnvString(definition.defaultValue, definition.kind) : "";
|
|
938
|
+
lines.push(`${key}=${formatDotenvValue(value)}`);
|
|
939
|
+
}
|
|
940
|
+
return `${lines.join("\n")}
|
|
941
|
+
`;
|
|
942
|
+
}
|
|
943
|
+
|
|
944
|
+
// src/workspace-layout.ts
|
|
945
|
+
import { access, mkdir, readFile, writeFile } from "fs/promises";
|
|
946
|
+
import path from "path";
|
|
947
|
+
var WRKSPACE_ENV_DIR = "env";
|
|
948
|
+
var DEFAULT_MIGRATIONS_DIR = `${WRKSPACE_ENV_DIR}/migrations`;
|
|
949
|
+
var DEFAULT_MIGRATION_STATE_FILE = `${WRKSPACE_ENV_DIR}/migrations.state.json`;
|
|
950
|
+
var DEFAULT_GENERATED_SCHEMA_FILE = `${WRKSPACE_ENV_DIR}/schema.generated.json`;
|
|
951
|
+
var DEFAULT_ENV_DOC_FILE = `${WRKSPACE_ENV_DIR}/ENV.md`;
|
|
952
|
+
var DEFAULT_ENV_AUDIT_FILE = `${WRKSPACE_ENV_DIR}/env.audit.json`;
|
|
953
|
+
var DEFAULT_WRKSPACE_ENV_GITIGNORE_FILE = `${WRKSPACE_ENV_DIR}/.gitignore`;
|
|
954
|
+
var DEFAULT_WRKSPACE_ENV_GITIGNORE_ENTRIES = [
|
|
955
|
+
"schema.generated.json",
|
|
956
|
+
"migrations.state.json",
|
|
957
|
+
"ENV.md",
|
|
958
|
+
"env.audit.json"
|
|
959
|
+
];
|
|
960
|
+
async function fileExists(filepath) {
|
|
961
|
+
try {
|
|
962
|
+
await access(filepath);
|
|
963
|
+
return true;
|
|
964
|
+
} catch {
|
|
965
|
+
return false;
|
|
966
|
+
}
|
|
967
|
+
}
|
|
968
|
+
function normalizeLine(line) {
|
|
969
|
+
return line.trim();
|
|
970
|
+
}
|
|
971
|
+
function uniqueNormalized(lines) {
|
|
972
|
+
return new Set(lines.map((line) => normalizeLine(line)).filter((line) => line.length > 0));
|
|
973
|
+
}
|
|
974
|
+
function resolveWrkspaceEnvDir(cwd) {
|
|
975
|
+
return path.resolve(cwd, WRKSPACE_ENV_DIR);
|
|
976
|
+
}
|
|
977
|
+
function toWrkspaceEnvIgnoreEntry(cwd, absolutePath) {
|
|
978
|
+
const wrkspaceDir = resolveWrkspaceEnvDir(cwd);
|
|
979
|
+
const relativePath = path.relative(wrkspaceDir, absolutePath);
|
|
980
|
+
if (relativePath.length === 0 || relativePath.startsWith("..") || path.isAbsolute(relativePath)) {
|
|
981
|
+
return void 0;
|
|
982
|
+
}
|
|
983
|
+
return relativePath.split(path.sep).join("/");
|
|
984
|
+
}
|
|
985
|
+
async function ensureWrkspaceEnvGitignore(cwd, additionalEntries = []) {
|
|
986
|
+
const gitignorePath = path.resolve(cwd, DEFAULT_WRKSPACE_ENV_GITIGNORE_FILE);
|
|
987
|
+
const wrkspaceDir = path.dirname(gitignorePath);
|
|
988
|
+
await mkdir(wrkspaceDir, { recursive: true });
|
|
989
|
+
const existingLines = await fileExists(gitignorePath) ? (await readFile(gitignorePath, "utf8")).split(/\r?\n/) : ["# Generated by airlock"];
|
|
990
|
+
const requiredEntries = [
|
|
991
|
+
...DEFAULT_WRKSPACE_ENV_GITIGNORE_ENTRIES,
|
|
992
|
+
...additionalEntries.filter((entry) => entry.trim().length > 0)
|
|
993
|
+
];
|
|
994
|
+
const seenLines = uniqueNormalized(existingLines);
|
|
995
|
+
const mergedLines = [...existingLines];
|
|
996
|
+
for (const entry of requiredEntries) {
|
|
997
|
+
if (seenLines.has(entry)) {
|
|
998
|
+
continue;
|
|
999
|
+
}
|
|
1000
|
+
mergedLines.push(entry);
|
|
1001
|
+
seenLines.add(entry);
|
|
1002
|
+
}
|
|
1003
|
+
const content = `${mergedLines.filter((line) => line.length > 0).join("\n")}
|
|
1004
|
+
`;
|
|
1005
|
+
const existingContent = await fileExists(gitignorePath) ? await readFile(gitignorePath, "utf8") : "";
|
|
1006
|
+
if (content !== existingContent) {
|
|
1007
|
+
await writeFile(gitignorePath, content, "utf8");
|
|
1008
|
+
}
|
|
1009
|
+
return gitignorePath;
|
|
1010
|
+
}
|
|
1011
|
+
|
|
1012
|
+
// src/migrations.ts
|
|
1013
|
+
var DEFAULT_MIGRATIONS_DIR2 = DEFAULT_MIGRATIONS_DIR;
|
|
1014
|
+
var DEFAULT_MIGRATION_STATE_FILE2 = DEFAULT_MIGRATION_STATE_FILE;
|
|
1015
|
+
var DEFAULT_GENERATED_SCHEMA_FILE2 = DEFAULT_GENERATED_SCHEMA_FILE;
|
|
1016
|
+
var DEFAULT_MIGRATION_TARGET_FILES = [
|
|
1017
|
+
".env.local",
|
|
1018
|
+
".env",
|
|
1019
|
+
".env.example",
|
|
1020
|
+
".env.defaults"
|
|
1021
|
+
];
|
|
1022
|
+
var ENV_KEY_PATTERN = /^[A-Za-z_][A-Za-z0-9_]*$/;
|
|
1023
|
+
var MIGRATION_FILE_PATTERN = /\.(?:mjs|cjs|js|mts|cts|ts)$/;
|
|
1024
|
+
async function pathExists(filepath) {
|
|
1025
|
+
try {
|
|
1026
|
+
await access2(filepath);
|
|
1027
|
+
return true;
|
|
1028
|
+
} catch {
|
|
1029
|
+
return false;
|
|
1030
|
+
}
|
|
1031
|
+
}
|
|
1032
|
+
function assertEnvKey(key) {
|
|
1033
|
+
if (!ENV_KEY_PATTERN.test(key)) {
|
|
1034
|
+
throw new Error(
|
|
1035
|
+
`Invalid env key "${key}". Fix: use a key matching ${ENV_KEY_PATTERN.toString()}.`
|
|
1036
|
+
);
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
function cloneSchemaDefinition(definition) {
|
|
1040
|
+
const enumValues = definition.kind === "enum" && Array.isArray(definition.values) ? [...definition.values] : void 0;
|
|
1041
|
+
return {
|
|
1042
|
+
...definition,
|
|
1043
|
+
requiredIn: [...definition.requiredIn],
|
|
1044
|
+
deprecated: definition.deprecated ? { ...definition.deprecated } : void 0,
|
|
1045
|
+
provider: definition.provider ? { ...definition.provider } : void 0,
|
|
1046
|
+
tags: definition.tags ? [...definition.tags] : void 0,
|
|
1047
|
+
values: enumValues
|
|
1048
|
+
};
|
|
1049
|
+
}
|
|
1050
|
+
function normalizeSchemaDefinition(key, definition) {
|
|
1051
|
+
if (!definition || typeof definition !== "object" || Array.isArray(definition)) {
|
|
1052
|
+
throw new Error(
|
|
1053
|
+
`Invalid schema definition for "${key}". Fix: provide an object with at least a "kind" field.`
|
|
1054
|
+
);
|
|
1055
|
+
}
|
|
1056
|
+
const normalized = cloneSchemaDefinition(definition);
|
|
1057
|
+
const kind = normalized.kind;
|
|
1058
|
+
if (!["string", "url", "number", "boolean", "enum", "json"].includes(kind)) {
|
|
1059
|
+
throw new Error(
|
|
1060
|
+
`Invalid schema kind "${String(kind)}" for "${key}". Fix: use one of string|url|number|boolean|enum|json.`
|
|
1061
|
+
);
|
|
1062
|
+
}
|
|
1063
|
+
const scope = normalized.scope ?? "server";
|
|
1064
|
+
if (scope !== "server" && scope !== "client") {
|
|
1065
|
+
throw new Error(
|
|
1066
|
+
`Invalid schema scope "${String(scope)}" for "${key}". Fix: use "server" or "client".`
|
|
1067
|
+
);
|
|
1068
|
+
}
|
|
1069
|
+
normalized.scope = scope;
|
|
1070
|
+
const requiredIn = normalized.requiredIn ?? ["dev", "preview", "prod"];
|
|
1071
|
+
const uniqueTargets = Array.from(new Set(requiredIn));
|
|
1072
|
+
for (const target of uniqueTargets) {
|
|
1073
|
+
if (target !== "dev" && target !== "preview" && target !== "prod") {
|
|
1074
|
+
throw new Error(
|
|
1075
|
+
`Invalid requiredIn target "${String(target)}" for "${key}". Fix: use dev|preview|prod.`
|
|
1076
|
+
);
|
|
1077
|
+
}
|
|
1078
|
+
}
|
|
1079
|
+
normalized.requiredIn = uniqueTargets;
|
|
1080
|
+
if (kind === "enum") {
|
|
1081
|
+
const enumValues = normalized.values;
|
|
1082
|
+
if (!Array.isArray(enumValues) || enumValues.length === 0) {
|
|
1083
|
+
throw new Error(
|
|
1084
|
+
`Invalid enum schema for "${key}". Fix: provide a non-empty values array.`
|
|
1085
|
+
);
|
|
1086
|
+
}
|
|
1087
|
+
if (enumValues.some((value) => typeof value !== "string")) {
|
|
1088
|
+
throw new Error(`Invalid enum schema for "${key}". Fix: all enum values must be strings.`);
|
|
1089
|
+
}
|
|
1090
|
+
normalized.values = [...enumValues];
|
|
1091
|
+
}
|
|
1092
|
+
return normalized;
|
|
1093
|
+
}
|
|
1094
|
+
function normalizePrefixPolicy(policy) {
|
|
1095
|
+
if (!policy) {
|
|
1096
|
+
return void 0;
|
|
1097
|
+
}
|
|
1098
|
+
if (policy === "nextjs") {
|
|
1099
|
+
return prefixPolicies.nextjs;
|
|
1100
|
+
}
|
|
1101
|
+
if (policy === "vite") {
|
|
1102
|
+
return prefixPolicies.vite;
|
|
1103
|
+
}
|
|
1104
|
+
if (!Array.isArray(policy.publicPrefixes)) {
|
|
1105
|
+
throw new Error(
|
|
1106
|
+
'Invalid prefix policy. Fix: provide "nextjs", "vite", or a PrefixPolicy with publicPrefixes.'
|
|
1107
|
+
);
|
|
1108
|
+
}
|
|
1109
|
+
if (policy.publicPrefixes.some((prefix) => typeof prefix !== "string")) {
|
|
1110
|
+
throw new Error(
|
|
1111
|
+
'Invalid prefix policy. Fix: provide "nextjs", "vite", or a PrefixPolicy with publicPrefixes.'
|
|
1112
|
+
);
|
|
1113
|
+
}
|
|
1114
|
+
return {
|
|
1115
|
+
...policy,
|
|
1116
|
+
publicPrefixes: [...policy.publicPrefixes]
|
|
1117
|
+
};
|
|
1118
|
+
}
|
|
1119
|
+
function serializePrefixPolicy(policy) {
|
|
1120
|
+
if (!policy) {
|
|
1121
|
+
return void 0;
|
|
1122
|
+
}
|
|
1123
|
+
if (policy === prefixPolicies.nextjs || policy.name === "nextjs") {
|
|
1124
|
+
return "nextjs";
|
|
1125
|
+
}
|
|
1126
|
+
if (policy === prefixPolicies.vite || policy.name === "vite") {
|
|
1127
|
+
return "vite";
|
|
1128
|
+
}
|
|
1129
|
+
return {
|
|
1130
|
+
name: policy.name,
|
|
1131
|
+
publicPrefixes: [...policy.publicPrefixes]
|
|
1132
|
+
};
|
|
1133
|
+
}
|
|
1134
|
+
function deserializePrefixPolicy(value) {
|
|
1135
|
+
if (!value) {
|
|
1136
|
+
return void 0;
|
|
1137
|
+
}
|
|
1138
|
+
if (value === "nextjs") {
|
|
1139
|
+
return prefixPolicies.nextjs;
|
|
1140
|
+
}
|
|
1141
|
+
if (value === "vite") {
|
|
1142
|
+
return prefixPolicies.vite;
|
|
1143
|
+
}
|
|
1144
|
+
if (!Array.isArray(value.publicPrefixes) || value.publicPrefixes.some((prefix) => typeof prefix !== "string")) {
|
|
1145
|
+
throw new Error(
|
|
1146
|
+
"Invalid generated schema file prefix policy. Fix: delete the generated schema file and rebuild it from migrations."
|
|
1147
|
+
);
|
|
1148
|
+
}
|
|
1149
|
+
return {
|
|
1150
|
+
name: value.name,
|
|
1151
|
+
publicPrefixes: [...value.publicPrefixes]
|
|
1152
|
+
};
|
|
1153
|
+
}
|
|
1154
|
+
function toEnvString2(value, kind) {
|
|
1155
|
+
if (value === void 0 || value === null) {
|
|
1156
|
+
return "";
|
|
1157
|
+
}
|
|
1158
|
+
if (kind === "boolean" && typeof value === "boolean") {
|
|
1159
|
+
return value ? "true" : "false";
|
|
1160
|
+
}
|
|
1161
|
+
if (kind === "number" && typeof value === "number") {
|
|
1162
|
+
return `${value}`;
|
|
1163
|
+
}
|
|
1164
|
+
if (kind === "json") {
|
|
1165
|
+
if (typeof value === "string") {
|
|
1166
|
+
return value;
|
|
1167
|
+
}
|
|
1168
|
+
return JSON.stringify(value);
|
|
1169
|
+
}
|
|
1170
|
+
return String(value);
|
|
1171
|
+
}
|
|
1172
|
+
function defaultEnvValueForDefinition(definition) {
|
|
1173
|
+
if (definition.sensitive) {
|
|
1174
|
+
return "";
|
|
1175
|
+
}
|
|
1176
|
+
if (definition.defaultValue === void 0) {
|
|
1177
|
+
return "";
|
|
1178
|
+
}
|
|
1179
|
+
return toEnvString2(definition.defaultValue, definition.kind);
|
|
1180
|
+
}
|
|
1181
|
+
function normalizeTargets2(targets) {
|
|
1182
|
+
if (!targets || targets.length === 0) {
|
|
1183
|
+
return [...DEFAULT_MIGRATION_TARGET_FILES];
|
|
1184
|
+
}
|
|
1185
|
+
const unique = Array.from(new Set(targets));
|
|
1186
|
+
for (const target of unique) {
|
|
1187
|
+
if (!DEFAULT_MIGRATION_TARGET_FILES.includes(target)) {
|
|
1188
|
+
throw new Error(
|
|
1189
|
+
`Unsupported migration target "${target}". Fix: use one of ${DEFAULT_MIGRATION_TARGET_FILES.join(
|
|
1190
|
+
", "
|
|
1191
|
+
)}.`
|
|
1192
|
+
);
|
|
1193
|
+
}
|
|
1194
|
+
}
|
|
1195
|
+
return unique;
|
|
1196
|
+
}
|
|
1197
|
+
function findPairIndexes(document, key) {
|
|
1198
|
+
const indexes = [];
|
|
1199
|
+
for (let index = 0; index < document.lines.length; index += 1) {
|
|
1200
|
+
const line = document.lines[index];
|
|
1201
|
+
if (line.type === "pair" && line.key === key) {
|
|
1202
|
+
indexes.push(index);
|
|
1203
|
+
}
|
|
1204
|
+
}
|
|
1205
|
+
return indexes;
|
|
1206
|
+
}
|
|
1207
|
+
function findLastPairIndex(document, key) {
|
|
1208
|
+
for (let index = document.lines.length - 1; index >= 0; index -= 1) {
|
|
1209
|
+
const line = document.lines[index];
|
|
1210
|
+
if (line.type === "pair" && line.key === key) {
|
|
1211
|
+
return index;
|
|
1212
|
+
}
|
|
1213
|
+
}
|
|
1214
|
+
return -1;
|
|
1215
|
+
}
|
|
1216
|
+
function renderPairLine(line) {
|
|
1217
|
+
const value = formatDotenvValue(line.value, line.quote);
|
|
1218
|
+
const exportPrefix = line.exported ? "export " : "";
|
|
1219
|
+
const inlineComment = line.inlineComment ? ` ${line.inlineComment}` : "";
|
|
1220
|
+
return `${exportPrefix}${line.key}=${value}${inlineComment}`;
|
|
1221
|
+
}
|
|
1222
|
+
function createPairLine(key, value) {
|
|
1223
|
+
return {
|
|
1224
|
+
type: "pair",
|
|
1225
|
+
raw: `${key}=${formatDotenvValue(value)}`,
|
|
1226
|
+
key,
|
|
1227
|
+
value,
|
|
1228
|
+
quote: "none",
|
|
1229
|
+
exported: false
|
|
1230
|
+
};
|
|
1231
|
+
}
|
|
1232
|
+
function normalizeCommentLines(comment) {
|
|
1233
|
+
if (comment === void 0) {
|
|
1234
|
+
return [];
|
|
1235
|
+
}
|
|
1236
|
+
const chunks = typeof comment === "string" ? comment.split(/\r?\n/) : [...comment];
|
|
1237
|
+
const output = [];
|
|
1238
|
+
for (const chunk of chunks) {
|
|
1239
|
+
const trimmed = chunk.trim();
|
|
1240
|
+
if (trimmed.length === 0) {
|
|
1241
|
+
continue;
|
|
1242
|
+
}
|
|
1243
|
+
const normalized = trimmed.startsWith("#") ? trimmed : `# ${trimmed}`;
|
|
1244
|
+
output.push({
|
|
1245
|
+
type: "comment",
|
|
1246
|
+
raw: normalized
|
|
1247
|
+
});
|
|
1248
|
+
}
|
|
1249
|
+
return output;
|
|
1250
|
+
}
|
|
1251
|
+
function setCommentBlockForKey(document, key, comment) {
|
|
1252
|
+
const index = findLastPairIndex(document, key);
|
|
1253
|
+
if (index < 0) {
|
|
1254
|
+
return;
|
|
1255
|
+
}
|
|
1256
|
+
let start = index;
|
|
1257
|
+
while (start > 0 && document.lines[start - 1]?.type === "comment") {
|
|
1258
|
+
start -= 1;
|
|
1259
|
+
}
|
|
1260
|
+
const newComments = normalizeCommentLines(comment);
|
|
1261
|
+
document.lines.splice(start, index - start, ...newComments);
|
|
1262
|
+
}
|
|
1263
|
+
function upsertValue(document, key, value, options) {
|
|
1264
|
+
const existingIndex = findLastPairIndex(document, key);
|
|
1265
|
+
if (existingIndex >= 0) {
|
|
1266
|
+
if (options.overwrite) {
|
|
1267
|
+
const existingLine = document.lines[existingIndex];
|
|
1268
|
+
if (existingLine.type === "pair") {
|
|
1269
|
+
existingLine.value = value;
|
|
1270
|
+
existingLine.raw = renderPairLine(existingLine);
|
|
1271
|
+
}
|
|
1272
|
+
}
|
|
1273
|
+
if (options.comment !== void 0) {
|
|
1274
|
+
setCommentBlockForKey(document, key, options.comment);
|
|
1275
|
+
}
|
|
1276
|
+
return;
|
|
1277
|
+
}
|
|
1278
|
+
if (!options.createIfMissing) {
|
|
1279
|
+
return;
|
|
1280
|
+
}
|
|
1281
|
+
const commentLines = normalizeCommentLines(options.comment);
|
|
1282
|
+
document.lines.push(...commentLines, createPairLine(key, value));
|
|
1283
|
+
}
|
|
1284
|
+
function removeKey(document, key) {
|
|
1285
|
+
for (let index = document.lines.length - 1; index >= 0; index -= 1) {
|
|
1286
|
+
const line = document.lines[index];
|
|
1287
|
+
if (line.type === "pair" && line.key === key) {
|
|
1288
|
+
document.lines.splice(index, 1);
|
|
1289
|
+
}
|
|
1290
|
+
}
|
|
1291
|
+
}
|
|
1292
|
+
function renameKey(document, fromKey, toKey, options) {
|
|
1293
|
+
if (fromKey === toKey) {
|
|
1294
|
+
if (options.comment !== void 0) {
|
|
1295
|
+
setCommentBlockForKey(document, toKey, options.comment);
|
|
1296
|
+
}
|
|
1297
|
+
return;
|
|
1298
|
+
}
|
|
1299
|
+
const fromIndexes = findPairIndexes(document, fromKey);
|
|
1300
|
+
if (fromIndexes.length === 0) {
|
|
1301
|
+
return;
|
|
1302
|
+
}
|
|
1303
|
+
const toIndex = findLastPairIndex(document, toKey);
|
|
1304
|
+
if (toIndex >= 0) {
|
|
1305
|
+
if (!options.overwrite) {
|
|
1306
|
+
return;
|
|
1307
|
+
}
|
|
1308
|
+
const sourceLine = document.lines[fromIndexes[fromIndexes.length - 1]];
|
|
1309
|
+
const targetLine = document.lines[toIndex];
|
|
1310
|
+
if (sourceLine.type === "pair" && targetLine.type === "pair") {
|
|
1311
|
+
targetLine.value = sourceLine.value;
|
|
1312
|
+
targetLine.raw = renderPairLine(targetLine);
|
|
1313
|
+
}
|
|
1314
|
+
removeKey(document, fromKey);
|
|
1315
|
+
if (options.comment !== void 0) {
|
|
1316
|
+
setCommentBlockForKey(document, toKey, options.comment);
|
|
1317
|
+
}
|
|
1318
|
+
return;
|
|
1319
|
+
}
|
|
1320
|
+
for (const index of fromIndexes) {
|
|
1321
|
+
const line = document.lines[index];
|
|
1322
|
+
if (line.type !== "pair") {
|
|
1323
|
+
continue;
|
|
1324
|
+
}
|
|
1325
|
+
line.key = toKey;
|
|
1326
|
+
line.raw = renderPairLine(line);
|
|
1327
|
+
}
|
|
1328
|
+
if (options.comment !== void 0) {
|
|
1329
|
+
setCommentBlockForKey(document, toKey, options.comment);
|
|
1330
|
+
}
|
|
1331
|
+
}
|
|
1332
|
+
function renderDocument(document) {
|
|
1333
|
+
if (document.lines.length === 0) {
|
|
1334
|
+
return "";
|
|
1335
|
+
}
|
|
1336
|
+
let output = document.lines.map((line) => line.raw).join(document.newline);
|
|
1337
|
+
if (document.hasTrailingNewline) {
|
|
1338
|
+
output += document.newline;
|
|
1339
|
+
}
|
|
1340
|
+
return output;
|
|
1341
|
+
}
|
|
1342
|
+
function newDocumentForMissingFile() {
|
|
1343
|
+
return {
|
|
1344
|
+
lines: [],
|
|
1345
|
+
newline: "\n",
|
|
1346
|
+
hasTrailingNewline: true
|
|
1347
|
+
};
|
|
1348
|
+
}
|
|
1349
|
+
async function loadEnvTargetDocuments(options = {}) {
|
|
1350
|
+
const cwd = options.cwd ?? process.cwd();
|
|
1351
|
+
const targets = normalizeTargets2(options.targets);
|
|
1352
|
+
const output = /* @__PURE__ */ new Map();
|
|
1353
|
+
for (const target of targets) {
|
|
1354
|
+
const filePath = path2.resolve(cwd, target);
|
|
1355
|
+
const exists2 = await pathExists(filePath);
|
|
1356
|
+
if (!exists2) {
|
|
1357
|
+
output.set(target, {
|
|
1358
|
+
target,
|
|
1359
|
+
filePath,
|
|
1360
|
+
document: newDocumentForMissingFile(),
|
|
1361
|
+
source: ""
|
|
1362
|
+
});
|
|
1363
|
+
continue;
|
|
1364
|
+
}
|
|
1365
|
+
const source = await readFile2(filePath, "utf8");
|
|
1366
|
+
output.set(target, {
|
|
1367
|
+
target,
|
|
1368
|
+
filePath,
|
|
1369
|
+
document: parseDotenv(source),
|
|
1370
|
+
source
|
|
1371
|
+
});
|
|
1372
|
+
}
|
|
1373
|
+
return output;
|
|
1374
|
+
}
|
|
1375
|
+
function createNoopTargetDocuments() {
|
|
1376
|
+
const documents = /* @__PURE__ */ new Map();
|
|
1377
|
+
for (const target of DEFAULT_MIGRATION_TARGET_FILES) {
|
|
1378
|
+
documents.set(target, {
|
|
1379
|
+
target,
|
|
1380
|
+
filePath: target,
|
|
1381
|
+
source: "",
|
|
1382
|
+
document: newDocumentForMissingFile()
|
|
1383
|
+
});
|
|
1384
|
+
}
|
|
1385
|
+
return documents;
|
|
1386
|
+
}
|
|
1387
|
+
function hasKeyInTargetDocuments(documents, target, key) {
|
|
1388
|
+
const document = documents.get(target)?.document;
|
|
1389
|
+
if (!document) {
|
|
1390
|
+
return false;
|
|
1391
|
+
}
|
|
1392
|
+
return findLastPairIndex(document, key) >= 0;
|
|
1393
|
+
}
|
|
1394
|
+
var finalizeFluentDefinitionsSymbol = /* @__PURE__ */ Symbol("@wrkspace-co/env/finalizeFluentDefinitions");
|
|
1395
|
+
function createInitialFluentDefinition(kind) {
|
|
1396
|
+
const base = {
|
|
1397
|
+
kind,
|
|
1398
|
+
scope: "server",
|
|
1399
|
+
requiredIn: ["dev", "preview", "prod"]
|
|
1400
|
+
};
|
|
1401
|
+
if (kind === "enum") {
|
|
1402
|
+
return {
|
|
1403
|
+
...base,
|
|
1404
|
+
kind: "enum",
|
|
1405
|
+
values: []
|
|
1406
|
+
};
|
|
1407
|
+
}
|
|
1408
|
+
return base;
|
|
1409
|
+
}
|
|
1410
|
+
function normalizeRequiredInInput(requiredIn) {
|
|
1411
|
+
if (Array.isArray(requiredIn)) {
|
|
1412
|
+
return [...requiredIn];
|
|
1413
|
+
}
|
|
1414
|
+
return [requiredIn];
|
|
1415
|
+
}
|
|
1416
|
+
function finalizeFluentDefinitions(context) {
|
|
1417
|
+
const maybeContext = context;
|
|
1418
|
+
if (typeof maybeContext[finalizeFluentDefinitionsSymbol] === "function") {
|
|
1419
|
+
maybeContext[finalizeFluentDefinitionsSymbol]();
|
|
1420
|
+
}
|
|
1421
|
+
}
|
|
1422
|
+
function createEnvMigrationContext(documents, operationLog = [], schemaState) {
|
|
1423
|
+
const variableChains = [];
|
|
1424
|
+
let context;
|
|
1425
|
+
const resolveTargets = (options) => normalizeTargets2(options?.targets);
|
|
1426
|
+
const forTargetFiles = (targetFiles) => targetFiles.map((target) => {
|
|
1427
|
+
const targetDocument = documents.get(target);
|
|
1428
|
+
if (!targetDocument) {
|
|
1429
|
+
throw new Error(`Missing in-memory target "${target}".`);
|
|
1430
|
+
}
|
|
1431
|
+
return targetDocument;
|
|
1432
|
+
});
|
|
1433
|
+
const setSchemaDefinition = (key, definition) => {
|
|
1434
|
+
const normalizedDefinition = normalizeSchemaDefinition(key, definition);
|
|
1435
|
+
if (schemaState) {
|
|
1436
|
+
schemaState.schema[key] = normalizedDefinition;
|
|
1437
|
+
}
|
|
1438
|
+
return normalizedDefinition;
|
|
1439
|
+
};
|
|
1440
|
+
const removeSchemaDefinition = (key) => {
|
|
1441
|
+
if (!schemaState) {
|
|
1442
|
+
return;
|
|
1443
|
+
}
|
|
1444
|
+
delete schemaState.schema[key];
|
|
1445
|
+
};
|
|
1446
|
+
const renameSchemaDefinition = (fromKey, toKey, options) => {
|
|
1447
|
+
if (!schemaState || fromKey === toKey) {
|
|
1448
|
+
return;
|
|
1449
|
+
}
|
|
1450
|
+
if (!(fromKey in schemaState.schema)) {
|
|
1451
|
+
return;
|
|
1452
|
+
}
|
|
1453
|
+
if (toKey in schemaState.schema && !options?.overwrite) {
|
|
1454
|
+
return;
|
|
1455
|
+
}
|
|
1456
|
+
const current = schemaState.schema[fromKey];
|
|
1457
|
+
schemaState.schema[toKey] = normalizeSchemaDefinition(toKey, current);
|
|
1458
|
+
delete schemaState.schema[fromKey];
|
|
1459
|
+
};
|
|
1460
|
+
const upsertKeyInTargets = (key, value, options) => {
|
|
1461
|
+
const targetFiles = resolveTargets(options);
|
|
1462
|
+
for (const targetDocument of forTargetFiles(targetFiles)) {
|
|
1463
|
+
upsertValue(targetDocument.document, key, value, {
|
|
1464
|
+
overwrite: options?.overwrite !== false,
|
|
1465
|
+
createIfMissing: options?.createIfMissing !== false,
|
|
1466
|
+
comment: options?.comment
|
|
1467
|
+
});
|
|
1468
|
+
}
|
|
1469
|
+
return targetFiles;
|
|
1470
|
+
};
|
|
1471
|
+
const removeKeyInTargets = (key, options) => {
|
|
1472
|
+
const targetFiles = resolveTargets(options);
|
|
1473
|
+
for (const targetDocument of forTargetFiles(targetFiles)) {
|
|
1474
|
+
removeKey(targetDocument.document, key);
|
|
1475
|
+
}
|
|
1476
|
+
return targetFiles;
|
|
1477
|
+
};
|
|
1478
|
+
const renameKeyInTargets = (fromKey, toKey, options) => {
|
|
1479
|
+
const targetFiles = resolveTargets(options);
|
|
1480
|
+
for (const targetDocument of forTargetFiles(targetFiles)) {
|
|
1481
|
+
renameKey(targetDocument.document, fromKey, toKey, {
|
|
1482
|
+
overwrite: Boolean(options?.overwrite),
|
|
1483
|
+
comment: options?.comment
|
|
1484
|
+
});
|
|
1485
|
+
}
|
|
1486
|
+
return targetFiles;
|
|
1487
|
+
};
|
|
1488
|
+
const applyUpdateVariable = (key, definition, value, options) => {
|
|
1489
|
+
assertEnvKey(key);
|
|
1490
|
+
const normalizedDefinition = setSchemaDefinition(key, definition);
|
|
1491
|
+
const targetFiles = upsertKeyInTargets(
|
|
1492
|
+
key,
|
|
1493
|
+
value ?? defaultEnvValueForDefinition(normalizedDefinition),
|
|
1494
|
+
options
|
|
1495
|
+
);
|
|
1496
|
+
operationLog.push({
|
|
1497
|
+
kind: "updateVariable",
|
|
1498
|
+
key,
|
|
1499
|
+
targets: targetFiles
|
|
1500
|
+
});
|
|
1501
|
+
};
|
|
1502
|
+
const applyRemoveVariable = (key, options) => {
|
|
1503
|
+
assertEnvKey(key);
|
|
1504
|
+
removeSchemaDefinition(key);
|
|
1505
|
+
const targetFiles = removeKeyInTargets(key, options);
|
|
1506
|
+
operationLog.push({
|
|
1507
|
+
kind: "removeVariable",
|
|
1508
|
+
key,
|
|
1509
|
+
targets: targetFiles
|
|
1510
|
+
});
|
|
1511
|
+
};
|
|
1512
|
+
const applyRenameVariable = (fromKey, toKey, options) => {
|
|
1513
|
+
assertEnvKey(fromKey);
|
|
1514
|
+
assertEnvKey(toKey);
|
|
1515
|
+
renameSchemaDefinition(fromKey, toKey, options);
|
|
1516
|
+
const targetFiles = renameKeyInTargets(fromKey, toKey, options);
|
|
1517
|
+
operationLog.push({
|
|
1518
|
+
kind: "renameVariable",
|
|
1519
|
+
fromKey,
|
|
1520
|
+
toKey,
|
|
1521
|
+
targets: targetFiles
|
|
1522
|
+
});
|
|
1523
|
+
};
|
|
1524
|
+
const existingSchemaDefinitionForKey = (key) => {
|
|
1525
|
+
if (!schemaState) {
|
|
1526
|
+
return void 0;
|
|
1527
|
+
}
|
|
1528
|
+
const existing = schemaState.schema[key];
|
|
1529
|
+
if (!existing) {
|
|
1530
|
+
return void 0;
|
|
1531
|
+
}
|
|
1532
|
+
return cloneSchemaDefinition(existing);
|
|
1533
|
+
};
|
|
1534
|
+
const ensureChainDefinition = (state, methodName) => {
|
|
1535
|
+
const definition = state.definition ?? existingSchemaDefinitionForKey(state.key);
|
|
1536
|
+
if (!definition) {
|
|
1537
|
+
throw new Error(
|
|
1538
|
+
`Cannot call .${methodName}(...) for "${state.key}" before setting its kind. Fix: call .kind(...), .define(...), or start with ctx.string(...)/ctx.url(...)/ctx.number(...)/ctx.boolean(...)/ctx.enum(...)/ctx.json(...).`
|
|
1539
|
+
);
|
|
1540
|
+
}
|
|
1541
|
+
state.definition = definition;
|
|
1542
|
+
return definition;
|
|
1543
|
+
};
|
|
1544
|
+
const markPendingUpdate = (state) => {
|
|
1545
|
+
state.pendingUpdate = true;
|
|
1546
|
+
};
|
|
1547
|
+
const flushChain = (state) => {
|
|
1548
|
+
if (!state.pendingUpdate) {
|
|
1549
|
+
return;
|
|
1550
|
+
}
|
|
1551
|
+
if (!state.definition) {
|
|
1552
|
+
throw new Error(
|
|
1553
|
+
`Incomplete fluent schema definition for "${state.key}". Fix: call .kind(...) or .define(...) before applying variable metadata/value.`
|
|
1554
|
+
);
|
|
1555
|
+
}
|
|
1556
|
+
if (state.definition.kind === "enum" && state.definition.values.length === 0) {
|
|
1557
|
+
throw new Error(
|
|
1558
|
+
`Incomplete fluent schema definition for "${state.key}". Fix: enum keys require .values(...).`
|
|
1559
|
+
);
|
|
1560
|
+
}
|
|
1561
|
+
applyUpdateVariable(
|
|
1562
|
+
state.key,
|
|
1563
|
+
state.definition,
|
|
1564
|
+
state.explicitValue ?? defaultEnvValueForDefinition(state.definition),
|
|
1565
|
+
state.options
|
|
1566
|
+
);
|
|
1567
|
+
state.pendingUpdate = false;
|
|
1568
|
+
};
|
|
1569
|
+
const mergeRenameOptions = (state, options) => {
|
|
1570
|
+
const merged = {};
|
|
1571
|
+
if (options?.targets !== void 0) {
|
|
1572
|
+
merged.targets = options.targets;
|
|
1573
|
+
} else if (state.options.targets !== void 0) {
|
|
1574
|
+
merged.targets = state.options.targets;
|
|
1575
|
+
}
|
|
1576
|
+
if (options?.overwrite !== void 0) {
|
|
1577
|
+
merged.overwrite = options.overwrite;
|
|
1578
|
+
} else if (state.options.overwrite !== void 0) {
|
|
1579
|
+
merged.overwrite = state.options.overwrite;
|
|
1580
|
+
}
|
|
1581
|
+
if (options?.comment !== void 0) {
|
|
1582
|
+
merged.comment = options.comment;
|
|
1583
|
+
} else if (state.options.comment !== void 0) {
|
|
1584
|
+
merged.comment = state.options.comment;
|
|
1585
|
+
}
|
|
1586
|
+
if (merged.targets === void 0 && merged.overwrite === void 0 && merged.comment === void 0) {
|
|
1587
|
+
return void 0;
|
|
1588
|
+
}
|
|
1589
|
+
return merged;
|
|
1590
|
+
};
|
|
1591
|
+
const mergeRemoveOptions = (state, options) => {
|
|
1592
|
+
const merged = {};
|
|
1593
|
+
if (options?.targets !== void 0) {
|
|
1594
|
+
merged.targets = options.targets;
|
|
1595
|
+
} else if (state.options.targets !== void 0) {
|
|
1596
|
+
merged.targets = state.options.targets;
|
|
1597
|
+
}
|
|
1598
|
+
if (merged.targets === void 0) {
|
|
1599
|
+
return void 0;
|
|
1600
|
+
}
|
|
1601
|
+
return merged;
|
|
1602
|
+
};
|
|
1603
|
+
const createVariableChain = (key, initialKind) => {
|
|
1604
|
+
assertEnvKey(key);
|
|
1605
|
+
const existingDefinition = existingSchemaDefinitionForKey(key);
|
|
1606
|
+
const state = {
|
|
1607
|
+
key,
|
|
1608
|
+
definition: initialKind ? createInitialFluentDefinition(initialKind) : existingDefinition,
|
|
1609
|
+
options: {},
|
|
1610
|
+
pendingUpdate: Boolean(initialKind)
|
|
1611
|
+
};
|
|
1612
|
+
variableChains.push(state);
|
|
1613
|
+
const chain = {
|
|
1614
|
+
kind(kind) {
|
|
1615
|
+
state.definition = createInitialFluentDefinition(kind);
|
|
1616
|
+
markPendingUpdate(state);
|
|
1617
|
+
return chain;
|
|
1618
|
+
},
|
|
1619
|
+
define(definition) {
|
|
1620
|
+
state.definition = normalizeSchemaDefinition(state.key, definition);
|
|
1621
|
+
markPendingUpdate(state);
|
|
1622
|
+
return chain;
|
|
1623
|
+
},
|
|
1624
|
+
string() {
|
|
1625
|
+
return chain.kind("string");
|
|
1626
|
+
},
|
|
1627
|
+
url() {
|
|
1628
|
+
return chain.kind("url");
|
|
1629
|
+
},
|
|
1630
|
+
number() {
|
|
1631
|
+
return chain.kind("number");
|
|
1632
|
+
},
|
|
1633
|
+
boolean() {
|
|
1634
|
+
return chain.kind("boolean");
|
|
1635
|
+
},
|
|
1636
|
+
enum() {
|
|
1637
|
+
return chain.kind("enum");
|
|
1638
|
+
},
|
|
1639
|
+
json() {
|
|
1640
|
+
return chain.kind("json");
|
|
1641
|
+
},
|
|
1642
|
+
scope(scope) {
|
|
1643
|
+
const definition = ensureChainDefinition(state, "scope");
|
|
1644
|
+
state.definition = {
|
|
1645
|
+
...definition,
|
|
1646
|
+
scope
|
|
1647
|
+
};
|
|
1648
|
+
markPendingUpdate(state);
|
|
1649
|
+
return chain;
|
|
1650
|
+
},
|
|
1651
|
+
requiredIn(requiredIn) {
|
|
1652
|
+
const definition = ensureChainDefinition(state, "requiredIn");
|
|
1653
|
+
state.definition = {
|
|
1654
|
+
...definition,
|
|
1655
|
+
requiredIn: normalizeRequiredInInput(requiredIn)
|
|
1656
|
+
};
|
|
1657
|
+
markPendingUpdate(state);
|
|
1658
|
+
return chain;
|
|
1659
|
+
},
|
|
1660
|
+
defaultValue(value) {
|
|
1661
|
+
const definition = ensureChainDefinition(state, "defaultValue");
|
|
1662
|
+
state.definition = {
|
|
1663
|
+
...definition,
|
|
1664
|
+
defaultValue: value
|
|
1665
|
+
};
|
|
1666
|
+
markPendingUpdate(state);
|
|
1667
|
+
return chain;
|
|
1668
|
+
},
|
|
1669
|
+
sensitive(value = true) {
|
|
1670
|
+
const definition = ensureChainDefinition(state, "sensitive");
|
|
1671
|
+
state.definition = {
|
|
1672
|
+
...definition,
|
|
1673
|
+
sensitive: value
|
|
1674
|
+
};
|
|
1675
|
+
markPendingUpdate(state);
|
|
1676
|
+
return chain;
|
|
1677
|
+
},
|
|
1678
|
+
values(values) {
|
|
1679
|
+
const definition = ensureChainDefinition(state, "values");
|
|
1680
|
+
if (definition.kind !== "enum") {
|
|
1681
|
+
throw new Error(
|
|
1682
|
+
`Cannot call .values(...) for "${state.key}" because it is "${definition.kind}". Fix: use .enum() or ctx.enum("${state.key}") before .values(...).`
|
|
1683
|
+
);
|
|
1684
|
+
}
|
|
1685
|
+
const normalizedValues = Array.isArray(values) ? [...values] : [values];
|
|
1686
|
+
state.definition = {
|
|
1687
|
+
...definition,
|
|
1688
|
+
values: normalizedValues
|
|
1689
|
+
};
|
|
1690
|
+
markPendingUpdate(state);
|
|
1691
|
+
return chain;
|
|
1692
|
+
},
|
|
1693
|
+
description(value) {
|
|
1694
|
+
const definition = ensureChainDefinition(state, "description");
|
|
1695
|
+
state.definition = {
|
|
1696
|
+
...definition,
|
|
1697
|
+
description: value
|
|
1698
|
+
};
|
|
1699
|
+
markPendingUpdate(state);
|
|
1700
|
+
return chain;
|
|
1701
|
+
},
|
|
1702
|
+
example(value) {
|
|
1703
|
+
const definition = ensureChainDefinition(state, "example");
|
|
1704
|
+
state.definition = {
|
|
1705
|
+
...definition,
|
|
1706
|
+
example: value
|
|
1707
|
+
};
|
|
1708
|
+
markPendingUpdate(state);
|
|
1709
|
+
return chain;
|
|
1710
|
+
},
|
|
1711
|
+
owner(value) {
|
|
1712
|
+
const definition = ensureChainDefinition(state, "owner");
|
|
1713
|
+
state.definition = {
|
|
1714
|
+
...definition,
|
|
1715
|
+
owner: value
|
|
1716
|
+
};
|
|
1717
|
+
markPendingUpdate(state);
|
|
1718
|
+
return chain;
|
|
1719
|
+
},
|
|
1720
|
+
rotationDays(value) {
|
|
1721
|
+
const definition = ensureChainDefinition(state, "rotationDays");
|
|
1722
|
+
state.definition = {
|
|
1723
|
+
...definition,
|
|
1724
|
+
rotationDays: value
|
|
1725
|
+
};
|
|
1726
|
+
markPendingUpdate(state);
|
|
1727
|
+
return chain;
|
|
1728
|
+
},
|
|
1729
|
+
deprecated(value) {
|
|
1730
|
+
const definition = ensureChainDefinition(state, "deprecated");
|
|
1731
|
+
state.definition = {
|
|
1732
|
+
...definition,
|
|
1733
|
+
deprecated: value ? { ...value } : void 0
|
|
1734
|
+
};
|
|
1735
|
+
markPendingUpdate(state);
|
|
1736
|
+
return chain;
|
|
1737
|
+
},
|
|
1738
|
+
deprecate(replacedBy, removeAfter, message) {
|
|
1739
|
+
return chain.deprecated({
|
|
1740
|
+
replacedBy,
|
|
1741
|
+
removeAfter,
|
|
1742
|
+
message
|
|
1743
|
+
});
|
|
1744
|
+
},
|
|
1745
|
+
provider(value) {
|
|
1746
|
+
const definition = ensureChainDefinition(state, "provider");
|
|
1747
|
+
state.definition = {
|
|
1748
|
+
...definition,
|
|
1749
|
+
provider: value ? { ...value } : void 0
|
|
1750
|
+
};
|
|
1751
|
+
markPendingUpdate(state);
|
|
1752
|
+
return chain;
|
|
1753
|
+
},
|
|
1754
|
+
providerAlias(provider, key2) {
|
|
1755
|
+
const definition = ensureChainDefinition(state, "providerAlias");
|
|
1756
|
+
const nextProvider = {
|
|
1757
|
+
...definition.provider ?? {}
|
|
1758
|
+
};
|
|
1759
|
+
if (key2 === void 0) {
|
|
1760
|
+
delete nextProvider[provider];
|
|
1761
|
+
} else {
|
|
1762
|
+
nextProvider[provider] = key2;
|
|
1763
|
+
}
|
|
1764
|
+
state.definition = {
|
|
1765
|
+
...definition,
|
|
1766
|
+
provider: Object.keys(nextProvider).length > 0 ? nextProvider : void 0
|
|
1767
|
+
};
|
|
1768
|
+
markPendingUpdate(state);
|
|
1769
|
+
return chain;
|
|
1770
|
+
},
|
|
1771
|
+
tags(values) {
|
|
1772
|
+
const definition = ensureChainDefinition(state, "tags");
|
|
1773
|
+
const normalized = values === void 0 ? void 0 : Array.isArray(values) ? [...values] : [values];
|
|
1774
|
+
state.definition = {
|
|
1775
|
+
...definition,
|
|
1776
|
+
tags: normalized
|
|
1777
|
+
};
|
|
1778
|
+
markPendingUpdate(state);
|
|
1779
|
+
return chain;
|
|
1780
|
+
},
|
|
1781
|
+
tag(value) {
|
|
1782
|
+
const definition = ensureChainDefinition(state, "tag");
|
|
1783
|
+
const nextTags = new Set(definition.tags ?? []);
|
|
1784
|
+
nextTags.add(value);
|
|
1785
|
+
state.definition = {
|
|
1786
|
+
...definition,
|
|
1787
|
+
tags: [...nextTags]
|
|
1788
|
+
};
|
|
1789
|
+
markPendingUpdate(state);
|
|
1790
|
+
return chain;
|
|
1791
|
+
},
|
|
1792
|
+
value(value) {
|
|
1793
|
+
ensureChainDefinition(state, "value");
|
|
1794
|
+
state.explicitValue = value;
|
|
1795
|
+
markPendingUpdate(state);
|
|
1796
|
+
return chain;
|
|
1797
|
+
},
|
|
1798
|
+
targets(targets) {
|
|
1799
|
+
state.options.targets = normalizeTargets2(
|
|
1800
|
+
Array.isArray(targets) ? targets : [targets]
|
|
1801
|
+
);
|
|
1802
|
+
return chain;
|
|
1803
|
+
},
|
|
1804
|
+
overwrite(value = true) {
|
|
1805
|
+
state.options.overwrite = value;
|
|
1806
|
+
return chain;
|
|
1807
|
+
},
|
|
1808
|
+
createIfMissing(value = true) {
|
|
1809
|
+
state.options.createIfMissing = value;
|
|
1810
|
+
return chain;
|
|
1811
|
+
},
|
|
1812
|
+
comment(comment) {
|
|
1813
|
+
state.options.comment = comment;
|
|
1814
|
+
return chain;
|
|
1815
|
+
},
|
|
1816
|
+
rename(toKey, options) {
|
|
1817
|
+
flushChain(state);
|
|
1818
|
+
const fromKey = state.key;
|
|
1819
|
+
const mergedOptions = mergeRenameOptions(state, options);
|
|
1820
|
+
applyRenameVariable(fromKey, toKey, mergedOptions);
|
|
1821
|
+
state.key = toKey;
|
|
1822
|
+
return chain;
|
|
1823
|
+
},
|
|
1824
|
+
remove(options) {
|
|
1825
|
+
flushChain(state);
|
|
1826
|
+
const mergedOptions = mergeRemoveOptions(state, options);
|
|
1827
|
+
applyRemoveVariable(state.key, mergedOptions);
|
|
1828
|
+
state.definition = void 0;
|
|
1829
|
+
state.explicitValue = void 0;
|
|
1830
|
+
state.pendingUpdate = false;
|
|
1831
|
+
return chain;
|
|
1832
|
+
}
|
|
1833
|
+
};
|
|
1834
|
+
return chain;
|
|
1835
|
+
};
|
|
1836
|
+
context = {
|
|
1837
|
+
key(key) {
|
|
1838
|
+
return createVariableChain(key);
|
|
1839
|
+
},
|
|
1840
|
+
string(key) {
|
|
1841
|
+
return createVariableChain(key, "string");
|
|
1842
|
+
},
|
|
1843
|
+
url(key) {
|
|
1844
|
+
return createVariableChain(key, "url");
|
|
1845
|
+
},
|
|
1846
|
+
number(key) {
|
|
1847
|
+
return createVariableChain(key, "number");
|
|
1848
|
+
},
|
|
1849
|
+
boolean(key) {
|
|
1850
|
+
return createVariableChain(key, "boolean");
|
|
1851
|
+
},
|
|
1852
|
+
enum(key) {
|
|
1853
|
+
return createVariableChain(key, "enum");
|
|
1854
|
+
},
|
|
1855
|
+
json(key) {
|
|
1856
|
+
return createVariableChain(key, "json");
|
|
1857
|
+
},
|
|
1858
|
+
setPrefixPolicy(policy) {
|
|
1859
|
+
operationLog.push({
|
|
1860
|
+
kind: "setPrefixPolicy",
|
|
1861
|
+
targets: [...DEFAULT_MIGRATION_TARGET_FILES]
|
|
1862
|
+
});
|
|
1863
|
+
if (!schemaState) {
|
|
1864
|
+
return;
|
|
1865
|
+
}
|
|
1866
|
+
schemaState.prefixPolicy = normalizePrefixPolicy(policy);
|
|
1867
|
+
},
|
|
1868
|
+
[finalizeFluentDefinitionsSymbol]() {
|
|
1869
|
+
for (const state of variableChains) {
|
|
1870
|
+
flushChain(state);
|
|
1871
|
+
}
|
|
1872
|
+
}
|
|
1873
|
+
};
|
|
1874
|
+
return context;
|
|
1875
|
+
}
|
|
1876
|
+
async function writeEnvTargetDocuments(documents) {
|
|
1877
|
+
const changedFiles = [];
|
|
1878
|
+
const sorted = [...documents.values()].sort((left, right) => left.filePath.localeCompare(right.filePath));
|
|
1879
|
+
for (const targetDocument of sorted) {
|
|
1880
|
+
const rendered = renderDocument(targetDocument.document);
|
|
1881
|
+
if (rendered === targetDocument.source) {
|
|
1882
|
+
continue;
|
|
1883
|
+
}
|
|
1884
|
+
await mkdir2(path2.dirname(targetDocument.filePath), { recursive: true });
|
|
1885
|
+
await writeFile2(targetDocument.filePath, rendered, "utf8");
|
|
1886
|
+
targetDocument.source = rendered;
|
|
1887
|
+
changedFiles.push(targetDocument.target);
|
|
1888
|
+
}
|
|
1889
|
+
return changedFiles;
|
|
1890
|
+
}
|
|
1891
|
+
function resolveMigrationsDir(cwd, migrationsDir) {
|
|
1892
|
+
return path2.resolve(cwd, migrationsDir ?? DEFAULT_MIGRATIONS_DIR2);
|
|
1893
|
+
}
|
|
1894
|
+
function resolveStatePath(cwd, stateFile) {
|
|
1895
|
+
return path2.resolve(cwd, stateFile ?? DEFAULT_MIGRATION_STATE_FILE2);
|
|
1896
|
+
}
|
|
1897
|
+
function resolveGeneratedSchemaPath(cwd, generatedSchemaFile) {
|
|
1898
|
+
return path2.resolve(cwd, generatedSchemaFile ?? DEFAULT_GENERATED_SCHEMA_FILE2);
|
|
1899
|
+
}
|
|
1900
|
+
function isRecord(value) {
|
|
1901
|
+
return Boolean(value) && typeof value === "object" && !Array.isArray(value);
|
|
1902
|
+
}
|
|
1903
|
+
async function loadMigrationState(options = {}) {
|
|
1904
|
+
const cwd = options.cwd ?? process.cwd();
|
|
1905
|
+
const statePath = resolveStatePath(cwd, options.stateFile);
|
|
1906
|
+
if (!await pathExists(statePath)) {
|
|
1907
|
+
return {
|
|
1908
|
+
statePath,
|
|
1909
|
+
state: {
|
|
1910
|
+
version: 1,
|
|
1911
|
+
applied: []
|
|
1912
|
+
}
|
|
1913
|
+
};
|
|
1914
|
+
}
|
|
1915
|
+
const raw = await readFile2(statePath, "utf8");
|
|
1916
|
+
let parsed;
|
|
1917
|
+
try {
|
|
1918
|
+
parsed = JSON.parse(raw);
|
|
1919
|
+
} catch {
|
|
1920
|
+
throw new Error(
|
|
1921
|
+
`Invalid migration state file at "${statePath}". Fix: ensure it contains valid JSON.`
|
|
1922
|
+
);
|
|
1923
|
+
}
|
|
1924
|
+
if (!isRecord(parsed)) {
|
|
1925
|
+
throw new Error(
|
|
1926
|
+
`Invalid migration state file at "${statePath}". Fix: expected an object with an "applied" array.`
|
|
1927
|
+
);
|
|
1928
|
+
}
|
|
1929
|
+
if (!Array.isArray(parsed.applied)) {
|
|
1930
|
+
throw new Error(
|
|
1931
|
+
`Invalid migration state file at "${statePath}". Fix: "applied" must be an array.`
|
|
1932
|
+
);
|
|
1933
|
+
}
|
|
1934
|
+
const applied = [];
|
|
1935
|
+
for (const item of parsed.applied) {
|
|
1936
|
+
if (!isRecord(item)) {
|
|
1937
|
+
throw new Error(
|
|
1938
|
+
`Invalid migration state file at "${statePath}". Fix: each applied entry must be an object.`
|
|
1939
|
+
);
|
|
1940
|
+
}
|
|
1941
|
+
const id = item.id;
|
|
1942
|
+
const appliedAt = item.appliedAt;
|
|
1943
|
+
const checksum = item.checksum;
|
|
1944
|
+
if (typeof id !== "string" || id.length === 0) {
|
|
1945
|
+
throw new Error(
|
|
1946
|
+
`Invalid migration state file at "${statePath}". Fix: each applied entry must include a non-empty "id".`
|
|
1947
|
+
);
|
|
1948
|
+
}
|
|
1949
|
+
if (typeof appliedAt !== "string" || appliedAt.length === 0) {
|
|
1950
|
+
throw new Error(
|
|
1951
|
+
`Invalid migration state file at "${statePath}". Fix: each applied entry must include "appliedAt".`
|
|
1952
|
+
);
|
|
1953
|
+
}
|
|
1954
|
+
if (typeof checksum !== "string" || checksum.length === 0) {
|
|
1955
|
+
throw new Error(
|
|
1956
|
+
`Invalid migration state file at "${statePath}". Fix: each applied entry must include "checksum".`
|
|
1957
|
+
);
|
|
1958
|
+
}
|
|
1959
|
+
applied.push({
|
|
1960
|
+
id,
|
|
1961
|
+
appliedAt,
|
|
1962
|
+
checksum
|
|
1963
|
+
});
|
|
1964
|
+
}
|
|
1965
|
+
return {
|
|
1966
|
+
statePath,
|
|
1967
|
+
state: {
|
|
1968
|
+
version: 1,
|
|
1969
|
+
applied
|
|
1970
|
+
}
|
|
1971
|
+
};
|
|
1972
|
+
}
|
|
1973
|
+
async function saveMigrationState(cwd, statePath, state) {
|
|
1974
|
+
await mkdir2(path2.dirname(statePath), { recursive: true });
|
|
1975
|
+
const ignoreEntry = toWrkspaceEnvIgnoreEntry(cwd, statePath);
|
|
1976
|
+
await ensureWrkspaceEnvGitignore(cwd, ignoreEntry ? [ignoreEntry] : []);
|
|
1977
|
+
await writeFile2(statePath, `${JSON.stringify(state, null, 2)}
|
|
1978
|
+
`, "utf8");
|
|
1979
|
+
}
|
|
1980
|
+
async function loadPersistedGeneratedSchema(schemaPath) {
|
|
1981
|
+
if (!await pathExists(schemaPath)) {
|
|
1982
|
+
return void 0;
|
|
1983
|
+
}
|
|
1984
|
+
const raw = await readFile2(schemaPath, "utf8");
|
|
1985
|
+
let parsed;
|
|
1986
|
+
try {
|
|
1987
|
+
parsed = JSON.parse(raw);
|
|
1988
|
+
} catch {
|
|
1989
|
+
throw new Error(
|
|
1990
|
+
`Invalid generated schema file at "${schemaPath}". Fix: delete it and re-run migrations, or write valid JSON.`
|
|
1991
|
+
);
|
|
1992
|
+
}
|
|
1993
|
+
if (!isRecord(parsed)) {
|
|
1994
|
+
throw new Error(
|
|
1995
|
+
`Invalid generated schema file at "${schemaPath}". Fix: expected an object with schema metadata.`
|
|
1996
|
+
);
|
|
1997
|
+
}
|
|
1998
|
+
const version = parsed.version;
|
|
1999
|
+
const mode = parsed.mode;
|
|
2000
|
+
const schema = parsed.schema;
|
|
2001
|
+
const appliedMigrationIds = parsed.appliedMigrationIds;
|
|
2002
|
+
if (version !== 1 || mode !== "migration-first") {
|
|
2003
|
+
throw new Error(
|
|
2004
|
+
`Unsupported generated schema version at "${schemaPath}". Fix: delete it and rebuild from migrations.`
|
|
2005
|
+
);
|
|
2006
|
+
}
|
|
2007
|
+
if (!Array.isArray(appliedMigrationIds) || appliedMigrationIds.some((item) => typeof item !== "string")) {
|
|
2008
|
+
throw new Error(
|
|
2009
|
+
`Invalid generated schema file at "${schemaPath}". Fix: "appliedMigrationIds" must be a string array.`
|
|
2010
|
+
);
|
|
2011
|
+
}
|
|
2012
|
+
if (!isRecord(schema)) {
|
|
2013
|
+
throw new Error(
|
|
2014
|
+
`Invalid generated schema file at "${schemaPath}". Fix: "schema" must be an object map.`
|
|
2015
|
+
);
|
|
2016
|
+
}
|
|
2017
|
+
const normalizedSchema = {};
|
|
2018
|
+
for (const [key, definition] of Object.entries(schema)) {
|
|
2019
|
+
normalizedSchema[key] = normalizeSchemaDefinition(key, definition);
|
|
2020
|
+
}
|
|
2021
|
+
const parsedPrefixPolicy = parsed.prefixPolicy;
|
|
2022
|
+
return {
|
|
2023
|
+
version: 1,
|
|
2024
|
+
mode: "migration-first",
|
|
2025
|
+
appliedMigrationIds: [...appliedMigrationIds],
|
|
2026
|
+
schema: normalizedSchema,
|
|
2027
|
+
prefixPolicy: parsedPrefixPolicy
|
|
2028
|
+
};
|
|
2029
|
+
}
|
|
2030
|
+
async function savePersistedGeneratedSchema(cwd, schemaPath, state, appliedMigrationIds) {
|
|
2031
|
+
const payload = {
|
|
2032
|
+
version: 1,
|
|
2033
|
+
mode: "migration-first",
|
|
2034
|
+
appliedMigrationIds: [...appliedMigrationIds],
|
|
2035
|
+
schema: Object.fromEntries(
|
|
2036
|
+
Object.entries(state.schema).map(([key, definition]) => [key, cloneSchemaDefinition(definition)])
|
|
2037
|
+
),
|
|
2038
|
+
prefixPolicy: serializePrefixPolicy(state.prefixPolicy)
|
|
2039
|
+
};
|
|
2040
|
+
await mkdir2(path2.dirname(schemaPath), { recursive: true });
|
|
2041
|
+
const ignoreEntry = toWrkspaceEnvIgnoreEntry(cwd, schemaPath);
|
|
2042
|
+
await ensureWrkspaceEnvGitignore(cwd, ignoreEntry ? [ignoreEntry] : []);
|
|
2043
|
+
await writeFile2(schemaPath, `${JSON.stringify(payload, null, 2)}
|
|
2044
|
+
`, "utf8");
|
|
2045
|
+
}
|
|
2046
|
+
async function writeEnvDocsFromSchema(cwd, schema) {
|
|
2047
|
+
const artifacts = generateArtifacts(schema);
|
|
2048
|
+
const envDocPath = path2.resolve(cwd, DEFAULT_ENV_DOC_FILE);
|
|
2049
|
+
const envAuditPath = path2.resolve(cwd, DEFAULT_ENV_AUDIT_FILE);
|
|
2050
|
+
await Promise.all([
|
|
2051
|
+
mkdir2(path2.dirname(envDocPath), { recursive: true }),
|
|
2052
|
+
mkdir2(path2.dirname(envAuditPath), { recursive: true })
|
|
2053
|
+
]);
|
|
2054
|
+
await ensureWrkspaceEnvGitignore(cwd, [
|
|
2055
|
+
toWrkspaceEnvIgnoreEntry(cwd, envDocPath) ?? "",
|
|
2056
|
+
toWrkspaceEnvIgnoreEntry(cwd, envAuditPath) ?? ""
|
|
2057
|
+
]);
|
|
2058
|
+
await Promise.all([
|
|
2059
|
+
writeFile2(envDocPath, artifacts.envMarkdown, "utf8"),
|
|
2060
|
+
writeFile2(envAuditPath, artifacts.envAuditJson, "utf8")
|
|
2061
|
+
]);
|
|
2062
|
+
}
|
|
2063
|
+
function createEmptyInMemorySchemaState() {
|
|
2064
|
+
return {
|
|
2065
|
+
schema: {}
|
|
2066
|
+
};
|
|
2067
|
+
}
|
|
2068
|
+
function createInMemorySchemaStateFromPersisted(persisted) {
|
|
2069
|
+
if (!persisted) {
|
|
2070
|
+
return createEmptyInMemorySchemaState();
|
|
2071
|
+
}
|
|
2072
|
+
return {
|
|
2073
|
+
schema: Object.fromEntries(
|
|
2074
|
+
Object.entries(persisted.schema).map(([key, definition]) => [
|
|
2075
|
+
key,
|
|
2076
|
+
normalizeSchemaDefinition(key, definition)
|
|
2077
|
+
])
|
|
2078
|
+
),
|
|
2079
|
+
prefixPolicy: deserializePrefixPolicy(persisted.prefixPolicy)
|
|
2080
|
+
};
|
|
2081
|
+
}
|
|
2082
|
+
function computeChecksum(content) {
|
|
2083
|
+
return createHash("sha256").update(content, "utf8").digest("hex");
|
|
2084
|
+
}
|
|
2085
|
+
var cachedTypeScriptModule;
|
|
2086
|
+
function resolveTypeScriptModule(source) {
|
|
2087
|
+
if (!isRecord(source)) {
|
|
2088
|
+
return void 0;
|
|
2089
|
+
}
|
|
2090
|
+
const transpileModule = source.transpileModule;
|
|
2091
|
+
const moduleKind = source.ModuleKind;
|
|
2092
|
+
const scriptTarget = source.ScriptTarget;
|
|
2093
|
+
if (typeof transpileModule !== "function" || !isRecord(moduleKind) || typeof moduleKind.ESNext !== "number" || !isRecord(scriptTarget) || typeof scriptTarget.ES2022 !== "number") {
|
|
2094
|
+
return void 0;
|
|
2095
|
+
}
|
|
2096
|
+
return {
|
|
2097
|
+
transpileModule,
|
|
2098
|
+
ModuleKind: {
|
|
2099
|
+
ESNext: moduleKind.ESNext
|
|
2100
|
+
},
|
|
2101
|
+
ScriptTarget: {
|
|
2102
|
+
ES2022: scriptTarget.ES2022
|
|
2103
|
+
}
|
|
2104
|
+
};
|
|
2105
|
+
}
|
|
2106
|
+
async function loadTypeScriptModule() {
|
|
2107
|
+
if (!cachedTypeScriptModule) {
|
|
2108
|
+
cachedTypeScriptModule = (async () => {
|
|
2109
|
+
let imported;
|
|
2110
|
+
try {
|
|
2111
|
+
imported = await import("typescript");
|
|
2112
|
+
} catch (error) {
|
|
2113
|
+
const reason = error instanceof Error ? error.message : String(error);
|
|
2114
|
+
throw new Error(
|
|
2115
|
+
`Cannot load "typescript" required for .ts env migrations. Fix: install it in your app (pnpm add -D typescript) or use .mjs migrations. Error: ${reason}`
|
|
2116
|
+
);
|
|
2117
|
+
}
|
|
2118
|
+
const direct = resolveTypeScriptModule(imported);
|
|
2119
|
+
if (direct) {
|
|
2120
|
+
return direct;
|
|
2121
|
+
}
|
|
2122
|
+
const fallbackDefault = isRecord(imported) ? resolveTypeScriptModule(imported.default) : void 0;
|
|
2123
|
+
if (fallbackDefault) {
|
|
2124
|
+
return fallbackDefault;
|
|
2125
|
+
}
|
|
2126
|
+
throw new Error(
|
|
2127
|
+
'Loaded "typescript" but module shape is unsupported. Fix: use a current TypeScript version that exports transpileModule/ModuleKind/ScriptTarget.'
|
|
2128
|
+
);
|
|
2129
|
+
})();
|
|
2130
|
+
}
|
|
2131
|
+
return await cachedTypeScriptModule;
|
|
2132
|
+
}
|
|
2133
|
+
async function importModuleFromFile(filePath) {
|
|
2134
|
+
const extension = path2.extname(filePath).toLowerCase();
|
|
2135
|
+
if (extension === ".ts" || extension === ".mts" || extension === ".cts") {
|
|
2136
|
+
const source = await readFile2(filePath, "utf8");
|
|
2137
|
+
const typescript = await loadTypeScriptModule();
|
|
2138
|
+
const transpiled = typescript.transpileModule(source, {
|
|
2139
|
+
fileName: filePath,
|
|
2140
|
+
compilerOptions: {
|
|
2141
|
+
target: typescript.ScriptTarget.ES2022,
|
|
2142
|
+
module: typescript.ModuleKind.ESNext
|
|
2143
|
+
}
|
|
2144
|
+
}).outputText;
|
|
2145
|
+
const transientFilePath = path2.join(
|
|
2146
|
+
path2.dirname(filePath),
|
|
2147
|
+
`.__wrkspace_env_migration_${path2.basename(filePath)}_${process.pid}_${Date.now()}.mjs`
|
|
2148
|
+
);
|
|
2149
|
+
await writeFile2(transientFilePath, transpiled, "utf8");
|
|
2150
|
+
try {
|
|
2151
|
+
return await import(`${pathToFileURL(transientFilePath).href}?v=${Date.now()}`);
|
|
2152
|
+
} finally {
|
|
2153
|
+
await unlink(transientFilePath).catch(() => void 0);
|
|
2154
|
+
}
|
|
2155
|
+
}
|
|
2156
|
+
return await import(`${pathToFileURL(filePath).href}?v=${Date.now()}`);
|
|
2157
|
+
}
|
|
2158
|
+
function normalizeMigrationExports(imported) {
|
|
2159
|
+
const output = {};
|
|
2160
|
+
if (isRecord(imported)) {
|
|
2161
|
+
Object.assign(output, imported);
|
|
2162
|
+
const defaultExport = imported.default;
|
|
2163
|
+
if (isRecord(defaultExport)) {
|
|
2164
|
+
Object.assign(output, defaultExport);
|
|
2165
|
+
}
|
|
2166
|
+
}
|
|
2167
|
+
return output;
|
|
2168
|
+
}
|
|
2169
|
+
async function loadMigrationFromFile(filePath) {
|
|
2170
|
+
const source = await readFile2(filePath, "utf8");
|
|
2171
|
+
const checksum = computeChecksum(source);
|
|
2172
|
+
const imported = await importModuleFromFile(filePath);
|
|
2173
|
+
const normalized = normalizeMigrationExports(imported);
|
|
2174
|
+
const exportedId = normalized.id;
|
|
2175
|
+
const derivedId = path2.basename(filePath, path2.extname(filePath));
|
|
2176
|
+
const up = normalized.up;
|
|
2177
|
+
const down = normalized.down;
|
|
2178
|
+
if (exportedId !== void 0 && (typeof exportedId !== "string" || exportedId.trim().length === 0)) {
|
|
2179
|
+
throw new Error(
|
|
2180
|
+
`Invalid migration "${filePath}". Fix: if you export "id", it must be a non-empty string matching the filename stem "${derivedId}".`
|
|
2181
|
+
);
|
|
2182
|
+
}
|
|
2183
|
+
if (typeof exportedId === "string" && exportedId !== derivedId) {
|
|
2184
|
+
throw new Error(
|
|
2185
|
+
`Invalid migration "${filePath}". Fix: migration id is derived from filename ("${derivedId}"). Remove exported id or rename it to match.`
|
|
2186
|
+
);
|
|
2187
|
+
}
|
|
2188
|
+
if (typeof up !== "function") {
|
|
2189
|
+
throw new Error(
|
|
2190
|
+
`Invalid migration "${filePath}". Fix: export an "up(ctx)" function.`
|
|
2191
|
+
);
|
|
2192
|
+
}
|
|
2193
|
+
if (typeof down !== "function") {
|
|
2194
|
+
throw new Error(
|
|
2195
|
+
`Invalid migration "${filePath}". Fix: export a "down(ctx)" function.`
|
|
2196
|
+
);
|
|
2197
|
+
}
|
|
2198
|
+
return {
|
|
2199
|
+
id: derivedId,
|
|
2200
|
+
up,
|
|
2201
|
+
down,
|
|
2202
|
+
filePath,
|
|
2203
|
+
checksum
|
|
2204
|
+
};
|
|
2205
|
+
}
|
|
2206
|
+
async function discoverMigrations(options = {}) {
|
|
2207
|
+
const cwd = options.cwd ?? process.cwd();
|
|
2208
|
+
const migrationsDir = resolveMigrationsDir(cwd, options.migrationsDir);
|
|
2209
|
+
if (!await pathExists(migrationsDir)) {
|
|
2210
|
+
return {
|
|
2211
|
+
migrationsDir,
|
|
2212
|
+
migrations: []
|
|
2213
|
+
};
|
|
2214
|
+
}
|
|
2215
|
+
const directoryEntries = await readdir(migrationsDir, {
|
|
2216
|
+
withFileTypes: true
|
|
2217
|
+
});
|
|
2218
|
+
const migrationPaths = directoryEntries.filter((entry) => entry.isFile()).map((entry) => entry.name).filter((filename) => MIGRATION_FILE_PATTERN.test(filename) && !filename.endsWith(".d.ts")).sort((left, right) => left.localeCompare(right)).map((filename) => path2.resolve(migrationsDir, filename));
|
|
2219
|
+
const migrations = [];
|
|
2220
|
+
const seenIds = /* @__PURE__ */ new Set();
|
|
2221
|
+
for (const migrationPath of migrationPaths) {
|
|
2222
|
+
const migration = await loadMigrationFromFile(migrationPath);
|
|
2223
|
+
if (seenIds.has(migration.id)) {
|
|
2224
|
+
throw new Error(
|
|
2225
|
+
`Duplicate migration id "${migration.id}" in "${migrationsDir}". Fix: ensure migration filenames produce unique ids.`
|
|
2226
|
+
);
|
|
2227
|
+
}
|
|
2228
|
+
seenIds.add(migration.id);
|
|
2229
|
+
migrations.push(migration);
|
|
2230
|
+
}
|
|
2231
|
+
migrations.sort((left, right) => left.id.localeCompare(right.id) || left.filePath.localeCompare(right.filePath));
|
|
2232
|
+
return {
|
|
2233
|
+
migrationsDir,
|
|
2234
|
+
migrations
|
|
2235
|
+
};
|
|
2236
|
+
}
|
|
2237
|
+
function migrationStatusFromRuntime(migrationsDir, statePath, migrations, state) {
|
|
2238
|
+
const stateById = /* @__PURE__ */ new Map();
|
|
2239
|
+
for (const applied of state.applied) {
|
|
2240
|
+
stateById.set(applied.id, applied);
|
|
2241
|
+
}
|
|
2242
|
+
const migrationById = /* @__PURE__ */ new Map();
|
|
2243
|
+
for (const migration of migrations) {
|
|
2244
|
+
migrationById.set(migration.id, migration);
|
|
2245
|
+
}
|
|
2246
|
+
const entries = [];
|
|
2247
|
+
for (const migration of migrations) {
|
|
2248
|
+
const applied = stateById.get(migration.id);
|
|
2249
|
+
if (!applied) {
|
|
2250
|
+
entries.push({
|
|
2251
|
+
id: migration.id,
|
|
2252
|
+
status: "pending",
|
|
2253
|
+
filePath: migration.filePath,
|
|
2254
|
+
checksum: migration.checksum
|
|
2255
|
+
});
|
|
2256
|
+
continue;
|
|
2257
|
+
}
|
|
2258
|
+
const changed = applied.checksum !== migration.checksum;
|
|
2259
|
+
entries.push({
|
|
2260
|
+
id: migration.id,
|
|
2261
|
+
status: changed ? "changed" : "applied",
|
|
2262
|
+
filePath: migration.filePath,
|
|
2263
|
+
appliedAt: applied.appliedAt,
|
|
2264
|
+
checksum: applied.checksum,
|
|
2265
|
+
currentChecksum: migration.checksum
|
|
2266
|
+
});
|
|
2267
|
+
}
|
|
2268
|
+
for (const applied of state.applied) {
|
|
2269
|
+
if (migrationById.has(applied.id)) {
|
|
2270
|
+
continue;
|
|
2271
|
+
}
|
|
2272
|
+
entries.push({
|
|
2273
|
+
id: applied.id,
|
|
2274
|
+
status: "missing",
|
|
2275
|
+
appliedAt: applied.appliedAt,
|
|
2276
|
+
checksum: applied.checksum
|
|
2277
|
+
});
|
|
2278
|
+
}
|
|
2279
|
+
const summary = {
|
|
2280
|
+
total: entries.length,
|
|
2281
|
+
applied: entries.filter((entry) => entry.status === "applied").length,
|
|
2282
|
+
pending: entries.filter((entry) => entry.status === "pending").length,
|
|
2283
|
+
changed: entries.filter((entry) => entry.status === "changed").length,
|
|
2284
|
+
missing: entries.filter((entry) => entry.status === "missing").length
|
|
2285
|
+
};
|
|
2286
|
+
return {
|
|
2287
|
+
migrationsDir,
|
|
2288
|
+
statePath,
|
|
2289
|
+
entries,
|
|
2290
|
+
summary
|
|
2291
|
+
};
|
|
2292
|
+
}
|
|
2293
|
+
async function getMigrationStatus(options = {}) {
|
|
2294
|
+
const cwd = options.cwd ?? process.cwd();
|
|
2295
|
+
const [{ migrationsDir, migrations }, { statePath, state }] = await Promise.all([
|
|
2296
|
+
discoverMigrations({
|
|
2297
|
+
cwd,
|
|
2298
|
+
migrationsDir: options.migrationsDir
|
|
2299
|
+
}),
|
|
2300
|
+
loadMigrationState({
|
|
2301
|
+
cwd,
|
|
2302
|
+
stateFile: options.stateFile
|
|
2303
|
+
})
|
|
2304
|
+
]);
|
|
2305
|
+
return migrationStatusFromRuntime(migrationsDir, statePath, migrations, state);
|
|
2306
|
+
}
|
|
2307
|
+
async function rebuildGeneratedSchemaFromState(options) {
|
|
2308
|
+
const migrationById = new Map(options.migrations.map((migration) => [migration.id, migration]));
|
|
2309
|
+
const schemaState = createEmptyInMemorySchemaState();
|
|
2310
|
+
for (const applied of options.state.applied) {
|
|
2311
|
+
const migration = migrationById.get(applied.id);
|
|
2312
|
+
if (!migration) {
|
|
2313
|
+
throw new Error(
|
|
2314
|
+
`Cannot rebuild generated schema because migration "${applied.id}" is missing. Fix: restore migration files and rerun.`
|
|
2315
|
+
);
|
|
2316
|
+
}
|
|
2317
|
+
const context = createEnvMigrationContext(createNoopTargetDocuments(), [], schemaState);
|
|
2318
|
+
try {
|
|
2319
|
+
await migration.up(context);
|
|
2320
|
+
finalizeFluentDefinitions(context);
|
|
2321
|
+
} catch (error) {
|
|
2322
|
+
throw formatMigrationExecutionError(migration, "up", error);
|
|
2323
|
+
}
|
|
2324
|
+
}
|
|
2325
|
+
const appliedMigrationIds = options.state.applied.map((entry) => entry.id);
|
|
2326
|
+
await savePersistedGeneratedSchema(
|
|
2327
|
+
options.cwd,
|
|
2328
|
+
options.generatedSchemaPath,
|
|
2329
|
+
schemaState,
|
|
2330
|
+
appliedMigrationIds
|
|
2331
|
+
);
|
|
2332
|
+
return {
|
|
2333
|
+
schemaPath: options.generatedSchemaPath,
|
|
2334
|
+
schema: schemaState.schema,
|
|
2335
|
+
prefixPolicy: schemaState.prefixPolicy,
|
|
2336
|
+
appliedMigrationIds
|
|
2337
|
+
};
|
|
2338
|
+
}
|
|
2339
|
+
async function loadOrBuildMigrationSchema(options = {}) {
|
|
2340
|
+
const cwd = options.cwd ?? process.cwd();
|
|
2341
|
+
const generatedSchemaPath = resolveGeneratedSchemaPath(cwd, options.generatedSchemaFile);
|
|
2342
|
+
const [{ migrationsDir, migrations }, { statePath, state }, persistedSchema] = await Promise.all([
|
|
2343
|
+
discoverMigrations({
|
|
2344
|
+
cwd,
|
|
2345
|
+
migrationsDir: options.migrationsDir
|
|
2346
|
+
}),
|
|
2347
|
+
loadMigrationState({
|
|
2348
|
+
cwd,
|
|
2349
|
+
stateFile: options.stateFile
|
|
2350
|
+
}),
|
|
2351
|
+
loadPersistedGeneratedSchema(generatedSchemaPath)
|
|
2352
|
+
]);
|
|
2353
|
+
const status = migrationStatusFromRuntime(migrationsDir, statePath, migrations, state);
|
|
2354
|
+
assertMigrationIntegrity(status);
|
|
2355
|
+
if (state.applied.length === 0 && !persistedSchema) {
|
|
2356
|
+
if (options.allowEmpty) {
|
|
2357
|
+
const emptyState = createEmptyInMemorySchemaState();
|
|
2358
|
+
await savePersistedGeneratedSchema(cwd, generatedSchemaPath, emptyState, []);
|
|
2359
|
+
return {
|
|
2360
|
+
schemaPath: generatedSchemaPath,
|
|
2361
|
+
schema: {},
|
|
2362
|
+
appliedMigrationIds: []
|
|
2363
|
+
};
|
|
2364
|
+
}
|
|
2365
|
+
throw new Error(
|
|
2366
|
+
`No schema file found and no applied migrations to derive one in "${cwd}". Fix: apply migrations that define schema (for example via ctx.url(...)/ctx.string(...)) or provide --schema <path>.`
|
|
2367
|
+
);
|
|
2368
|
+
}
|
|
2369
|
+
const appliedMigrationIds = state.applied.map((entry) => entry.id);
|
|
2370
|
+
if (!options.forceRebuild && persistedSchema && persistedSchema.appliedMigrationIds.length === appliedMigrationIds.length && persistedSchema.appliedMigrationIds.every((id, index) => id === appliedMigrationIds[index])) {
|
|
2371
|
+
const normalized = createInMemorySchemaStateFromPersisted(persistedSchema);
|
|
2372
|
+
return {
|
|
2373
|
+
schemaPath: generatedSchemaPath,
|
|
2374
|
+
schema: normalized.schema,
|
|
2375
|
+
prefixPolicy: normalized.prefixPolicy,
|
|
2376
|
+
appliedMigrationIds
|
|
2377
|
+
};
|
|
2378
|
+
}
|
|
2379
|
+
return await rebuildGeneratedSchemaFromState({
|
|
2380
|
+
cwd,
|
|
2381
|
+
generatedSchemaPath,
|
|
2382
|
+
migrations,
|
|
2383
|
+
state
|
|
2384
|
+
});
|
|
2385
|
+
}
|
|
2386
|
+
function formatMigrationExecutionError(migration, direction, error) {
|
|
2387
|
+
const reason = error instanceof Error ? error.message : String(error);
|
|
2388
|
+
return new Error(
|
|
2389
|
+
`Migration "${migration.id}" failed during ${direction}() at "${migration.filePath}". Fix: check migration logic and retry. Error: ${reason}`
|
|
2390
|
+
);
|
|
2391
|
+
}
|
|
2392
|
+
function assertMigrationIntegrity(status) {
|
|
2393
|
+
if (status.summary.changed === 0 && status.summary.missing === 0) {
|
|
2394
|
+
return;
|
|
2395
|
+
}
|
|
2396
|
+
const changedIds = status.entries.filter((entry) => entry.status === "changed").map((entry) => entry.id);
|
|
2397
|
+
const missingIds = status.entries.filter((entry) => entry.status === "missing").map((entry) => entry.id);
|
|
2398
|
+
const details = [];
|
|
2399
|
+
if (changedIds.length > 0) {
|
|
2400
|
+
details.push(`changed: ${changedIds.join(", ")}`);
|
|
2401
|
+
}
|
|
2402
|
+
if (missingIds.length > 0) {
|
|
2403
|
+
details.push(`missing: ${missingIds.join(", ")}`);
|
|
2404
|
+
}
|
|
2405
|
+
throw new Error(
|
|
2406
|
+
`Migration integrity check failed (${details.join(" | ")}). Fix: restore migration files/checksums to match ${status.statePath}.`
|
|
2407
|
+
);
|
|
2408
|
+
}
|
|
2409
|
+
function formatIsoTimestamp(date) {
|
|
2410
|
+
return date.toISOString();
|
|
2411
|
+
}
|
|
2412
|
+
function formatMigrationIdTimestamp(date) {
|
|
2413
|
+
const year = date.getUTCFullYear();
|
|
2414
|
+
const month = `${date.getUTCMonth() + 1}`.padStart(2, "0");
|
|
2415
|
+
const day = `${date.getUTCDate()}`.padStart(2, "0");
|
|
2416
|
+
const hour = `${date.getUTCHours()}`.padStart(2, "0");
|
|
2417
|
+
const minute = `${date.getUTCMinutes()}`.padStart(2, "0");
|
|
2418
|
+
const second = `${date.getUTCSeconds()}`.padStart(2, "0");
|
|
2419
|
+
return `${year}${month}${day}${hour}${minute}${second}`;
|
|
2420
|
+
}
|
|
2421
|
+
function slugify(input) {
|
|
2422
|
+
const slug = input.trim().toLowerCase().replace(/[^a-z0-9]+/g, "-").replace(/^-+|-+$/g, "");
|
|
2423
|
+
return slug.length > 0 ? slug : "migration";
|
|
2424
|
+
}
|
|
2425
|
+
function renderDefaultMigrationTemplate(id) {
|
|
2426
|
+
const _migrationId = id;
|
|
2427
|
+
return [
|
|
2428
|
+
'import type { EnvMigrationContext } from "@wrkspace-co/env";',
|
|
2429
|
+
"",
|
|
2430
|
+
`// Migration id is derived from this filename: ${_migrationId}.ts`,
|
|
2431
|
+
"",
|
|
2432
|
+
"export async function up(ctx: EnvMigrationContext) {",
|
|
2433
|
+
" // Fluent schema + env definition:",
|
|
2434
|
+
' // ctx.url("DATABASE_URL").scope("server").requiredIn("prod");',
|
|
2435
|
+
' // ctx.enum("LOG_LEVEL").values(["debug", "info", "warn", "error"]).defaultValue("info");',
|
|
2436
|
+
"}",
|
|
2437
|
+
"",
|
|
2438
|
+
"export async function down(ctx: EnvMigrationContext) {",
|
|
2439
|
+
' // ctx.key("DATABASE_URL").remove();',
|
|
2440
|
+
' // ctx.key("LOG_LEVEL").remove();',
|
|
2441
|
+
"}",
|
|
2442
|
+
""
|
|
2443
|
+
].join("\n");
|
|
2444
|
+
}
|
|
2445
|
+
async function createMigrationFile(options) {
|
|
2446
|
+
const cwd = options.cwd ?? process.cwd();
|
|
2447
|
+
const migrationsDir = resolveMigrationsDir(cwd, options.migrationsDir);
|
|
2448
|
+
const now = options.now ?? /* @__PURE__ */ new Date();
|
|
2449
|
+
const id = `${formatMigrationIdTimestamp(now)}_${slugify(options.name)}`;
|
|
2450
|
+
const filename = `${id}.ts`;
|
|
2451
|
+
const filePath = path2.resolve(migrationsDir, filename);
|
|
2452
|
+
await mkdir2(migrationsDir, { recursive: true });
|
|
2453
|
+
if (await pathExists(filePath)) {
|
|
2454
|
+
throw new Error(
|
|
2455
|
+
`Migration file already exists at "${filePath}". Fix: choose a different migration name or timestamp.`
|
|
2456
|
+
);
|
|
2457
|
+
}
|
|
2458
|
+
const content = options.template ? options.template(id) : renderDefaultMigrationTemplate(id);
|
|
2459
|
+
await writeFile2(filePath, content, "utf8");
|
|
2460
|
+
return {
|
|
2461
|
+
id,
|
|
2462
|
+
filePath,
|
|
2463
|
+
migrationsDir
|
|
2464
|
+
};
|
|
2465
|
+
}
|
|
2466
|
+
async function applyPendingMigrations(options = {}) {
|
|
2467
|
+
const cwd = options.cwd ?? process.cwd();
|
|
2468
|
+
const now = options.now ?? /* @__PURE__ */ new Date();
|
|
2469
|
+
const generatedSchemaPath = resolveGeneratedSchemaPath(cwd, options.generatedSchemaFile);
|
|
2470
|
+
const [{ migrationsDir, migrations }, { statePath, state }] = await Promise.all([
|
|
2471
|
+
discoverMigrations({
|
|
2472
|
+
cwd,
|
|
2473
|
+
migrationsDir: options.migrationsDir
|
|
2474
|
+
}),
|
|
2475
|
+
loadMigrationState({
|
|
2476
|
+
cwd,
|
|
2477
|
+
stateFile: options.stateFile
|
|
2478
|
+
})
|
|
2479
|
+
]);
|
|
2480
|
+
const status = migrationStatusFromRuntime(migrationsDir, statePath, migrations, state);
|
|
2481
|
+
assertMigrationIntegrity(status);
|
|
2482
|
+
const appliedIds = new Set(state.applied.map((item) => item.id));
|
|
2483
|
+
const pending = migrations.filter((migration) => !appliedIds.has(migration.id));
|
|
2484
|
+
if (pending.length === 0) {
|
|
2485
|
+
const derivedSchema2 = await loadOrBuildMigrationSchema({
|
|
2486
|
+
cwd,
|
|
2487
|
+
migrationsDir: options.migrationsDir,
|
|
2488
|
+
stateFile: options.stateFile,
|
|
2489
|
+
generatedSchemaFile: options.generatedSchemaFile,
|
|
2490
|
+
allowEmpty: true
|
|
2491
|
+
});
|
|
2492
|
+
await writeEnvDocsFromSchema(cwd, derivedSchema2.schema);
|
|
2493
|
+
return {
|
|
2494
|
+
migrationsDir,
|
|
2495
|
+
statePath,
|
|
2496
|
+
applied: [],
|
|
2497
|
+
changedFiles: [],
|
|
2498
|
+
pendingCount: 0
|
|
2499
|
+
};
|
|
2500
|
+
}
|
|
2501
|
+
const targetDocuments = await loadEnvTargetDocuments({ cwd });
|
|
2502
|
+
const derivedSchema = await loadOrBuildMigrationSchema({
|
|
2503
|
+
cwd,
|
|
2504
|
+
migrationsDir: options.migrationsDir,
|
|
2505
|
+
stateFile: options.stateFile,
|
|
2506
|
+
generatedSchemaFile: options.generatedSchemaFile,
|
|
2507
|
+
allowEmpty: true
|
|
2508
|
+
});
|
|
2509
|
+
const schemaState = {
|
|
2510
|
+
schema: Object.fromEntries(
|
|
2511
|
+
Object.entries(derivedSchema.schema).map(([key, definition]) => [
|
|
2512
|
+
key,
|
|
2513
|
+
normalizeSchemaDefinition(key, definition)
|
|
2514
|
+
])
|
|
2515
|
+
),
|
|
2516
|
+
prefixPolicy: derivedSchema.prefixPolicy
|
|
2517
|
+
};
|
|
2518
|
+
const changedFiles = /* @__PURE__ */ new Set();
|
|
2519
|
+
const applied = [];
|
|
2520
|
+
for (const migration of pending) {
|
|
2521
|
+
const context = createEnvMigrationContext(targetDocuments, [], schemaState);
|
|
2522
|
+
try {
|
|
2523
|
+
await migration.up(context);
|
|
2524
|
+
finalizeFluentDefinitions(context);
|
|
2525
|
+
} catch (error) {
|
|
2526
|
+
throw formatMigrationExecutionError(migration, "up", error);
|
|
2527
|
+
}
|
|
2528
|
+
const changedFromMigration = await writeEnvTargetDocuments(targetDocuments);
|
|
2529
|
+
for (const changedFile of changedFromMigration) {
|
|
2530
|
+
changedFiles.add(changedFile);
|
|
2531
|
+
}
|
|
2532
|
+
state.applied.push({
|
|
2533
|
+
id: migration.id,
|
|
2534
|
+
appliedAt: formatIsoTimestamp(now),
|
|
2535
|
+
checksum: migration.checksum
|
|
2536
|
+
});
|
|
2537
|
+
await Promise.all([
|
|
2538
|
+
saveMigrationState(cwd, statePath, state),
|
|
2539
|
+
savePersistedGeneratedSchema(
|
|
2540
|
+
cwd,
|
|
2541
|
+
generatedSchemaPath,
|
|
2542
|
+
schemaState,
|
|
2543
|
+
state.applied.map((entry) => entry.id)
|
|
2544
|
+
)
|
|
2545
|
+
]);
|
|
2546
|
+
applied.push(migration.id);
|
|
2547
|
+
}
|
|
2548
|
+
await writeEnvDocsFromSchema(cwd, schemaState.schema);
|
|
2549
|
+
return {
|
|
2550
|
+
migrationsDir,
|
|
2551
|
+
statePath,
|
|
2552
|
+
applied,
|
|
2553
|
+
changedFiles: [...changedFiles].sort((left, right) => left.localeCompare(right)),
|
|
2554
|
+
pendingCount: 0
|
|
2555
|
+
};
|
|
2556
|
+
}
|
|
2557
|
+
async function rollbackMigrations(options) {
|
|
2558
|
+
const cwd = options.cwd ?? process.cwd();
|
|
2559
|
+
const generatedSchemaPath = resolveGeneratedSchemaPath(cwd, options.generatedSchemaFile);
|
|
2560
|
+
if (!Number.isInteger(options.steps) || options.steps <= 0) {
|
|
2561
|
+
throw new Error(`Invalid rollback steps "${options.steps}". Fix: pass a positive integer.`);
|
|
2562
|
+
}
|
|
2563
|
+
const [{ migrationsDir, migrations }, { statePath, state }] = await Promise.all([
|
|
2564
|
+
discoverMigrations({
|
|
2565
|
+
cwd,
|
|
2566
|
+
migrationsDir: options.migrationsDir
|
|
2567
|
+
}),
|
|
2568
|
+
loadMigrationState({
|
|
2569
|
+
cwd,
|
|
2570
|
+
stateFile: options.stateFile
|
|
2571
|
+
})
|
|
2572
|
+
]);
|
|
2573
|
+
const status = migrationStatusFromRuntime(migrationsDir, statePath, migrations, state);
|
|
2574
|
+
assertMigrationIntegrity(status);
|
|
2575
|
+
if (state.applied.length === 0) {
|
|
2576
|
+
return {
|
|
2577
|
+
migrationsDir,
|
|
2578
|
+
statePath,
|
|
2579
|
+
rolledBack: [],
|
|
2580
|
+
changedFiles: [],
|
|
2581
|
+
remainingApplied: 0
|
|
2582
|
+
};
|
|
2583
|
+
}
|
|
2584
|
+
if (options.steps > state.applied.length) {
|
|
2585
|
+
throw new Error(
|
|
2586
|
+
`Cannot rollback ${options.steps} step(s): only ${state.applied.length} migration(s) are applied. Fix: lower --steps.`
|
|
2587
|
+
);
|
|
2588
|
+
}
|
|
2589
|
+
const migrationById = /* @__PURE__ */ new Map();
|
|
2590
|
+
for (const migration of migrations) {
|
|
2591
|
+
migrationById.set(migration.id, migration);
|
|
2592
|
+
}
|
|
2593
|
+
const selected = state.applied.slice(-options.steps).reverse();
|
|
2594
|
+
const targetDocuments = await loadEnvTargetDocuments({ cwd });
|
|
2595
|
+
const derivedSchema = await loadOrBuildMigrationSchema({
|
|
2596
|
+
cwd,
|
|
2597
|
+
migrationsDir: options.migrationsDir,
|
|
2598
|
+
stateFile: options.stateFile,
|
|
2599
|
+
generatedSchemaFile: options.generatedSchemaFile,
|
|
2600
|
+
allowEmpty: true
|
|
2601
|
+
});
|
|
2602
|
+
const schemaState = {
|
|
2603
|
+
schema: Object.fromEntries(
|
|
2604
|
+
Object.entries(derivedSchema.schema).map(([key, definition]) => [
|
|
2605
|
+
key,
|
|
2606
|
+
normalizeSchemaDefinition(key, definition)
|
|
2607
|
+
])
|
|
2608
|
+
),
|
|
2609
|
+
prefixPolicy: derivedSchema.prefixPolicy
|
|
2610
|
+
};
|
|
2611
|
+
const changedFiles = /* @__PURE__ */ new Set();
|
|
2612
|
+
const rolledBack = [];
|
|
2613
|
+
for (const applied of selected) {
|
|
2614
|
+
const migration = migrationById.get(applied.id);
|
|
2615
|
+
if (!migration) {
|
|
2616
|
+
throw new Error(
|
|
2617
|
+
`Cannot rollback migration "${applied.id}" because its file is missing. Fix: restore the migration file in ${migrationsDir}.`
|
|
2618
|
+
);
|
|
2619
|
+
}
|
|
2620
|
+
const context = createEnvMigrationContext(targetDocuments, [], schemaState);
|
|
2621
|
+
try {
|
|
2622
|
+
await migration.down(context);
|
|
2623
|
+
finalizeFluentDefinitions(context);
|
|
2624
|
+
} catch (error) {
|
|
2625
|
+
throw formatMigrationExecutionError(migration, "down", error);
|
|
2626
|
+
}
|
|
2627
|
+
const changedFromMigration = await writeEnvTargetDocuments(targetDocuments);
|
|
2628
|
+
for (const changedFile of changedFromMigration) {
|
|
2629
|
+
changedFiles.add(changedFile);
|
|
2630
|
+
}
|
|
2631
|
+
state.applied = state.applied.filter((entry) => entry.id !== applied.id);
|
|
2632
|
+
await Promise.all([
|
|
2633
|
+
saveMigrationState(cwd, statePath, state),
|
|
2634
|
+
savePersistedGeneratedSchema(
|
|
2635
|
+
cwd,
|
|
2636
|
+
generatedSchemaPath,
|
|
2637
|
+
schemaState,
|
|
2638
|
+
state.applied.map((entry) => entry.id)
|
|
2639
|
+
)
|
|
2640
|
+
]);
|
|
2641
|
+
rolledBack.push(applied.id);
|
|
2642
|
+
}
|
|
2643
|
+
await writeEnvDocsFromSchema(cwd, schemaState.schema);
|
|
2644
|
+
return {
|
|
2645
|
+
migrationsDir,
|
|
2646
|
+
statePath,
|
|
2647
|
+
rolledBack,
|
|
2648
|
+
changedFiles: [...changedFiles].sort((left, right) => left.localeCompare(right)),
|
|
2649
|
+
remainingApplied: state.applied.length
|
|
2650
|
+
};
|
|
2651
|
+
}
|
|
2652
|
+
|
|
2653
|
+
// src/schema-loader.ts
|
|
2654
|
+
var DEFAULT_SCHEMA_FILES = [
|
|
2655
|
+
"env.schema.mjs",
|
|
2656
|
+
"env.schema.js",
|
|
2657
|
+
"env.schema.cjs",
|
|
2658
|
+
"env.schema.json"
|
|
2659
|
+
];
|
|
2660
|
+
async function exists(filepath) {
|
|
2661
|
+
try {
|
|
2662
|
+
await access3(filepath);
|
|
2663
|
+
return true;
|
|
2664
|
+
} catch {
|
|
2665
|
+
return false;
|
|
2666
|
+
}
|
|
2667
|
+
}
|
|
2668
|
+
async function resolveSchemaPath(schemaPath, cwd, options = {}) {
|
|
2669
|
+
if (schemaPath) {
|
|
2670
|
+
const resolved = path3.resolve(cwd, schemaPath);
|
|
2671
|
+
if (!await exists(resolved)) {
|
|
2672
|
+
throw new Error(
|
|
2673
|
+
`Schema file not found at "${resolved}". Fix: provide a valid path via --schema <path>.`
|
|
2674
|
+
);
|
|
2675
|
+
}
|
|
2676
|
+
return resolved;
|
|
2677
|
+
}
|
|
2678
|
+
try {
|
|
2679
|
+
const derived = await loadOrBuildMigrationSchema({
|
|
2680
|
+
cwd,
|
|
2681
|
+
migrationsDir: options.migrationsDir,
|
|
2682
|
+
stateFile: options.stateFile,
|
|
2683
|
+
generatedSchemaFile: options.generatedSchemaFile,
|
|
2684
|
+
forceRebuild: Boolean(options.forceRebuildMigrationSchema),
|
|
2685
|
+
allowEmpty: false
|
|
2686
|
+
});
|
|
2687
|
+
return derived.schemaPath;
|
|
2688
|
+
} catch (error) {
|
|
2689
|
+
const generatedSchemaPath = path3.resolve(
|
|
2690
|
+
cwd,
|
|
2691
|
+
options.generatedSchemaFile ?? DEFAULT_GENERATED_SCHEMA_FILE2
|
|
2692
|
+
);
|
|
2693
|
+
const reason = error instanceof Error ? error.message : String(error);
|
|
2694
|
+
throw new Error(
|
|
2695
|
+
`Migration-first schema resolution failed in "${cwd}" for "${generatedSchemaPath}". Fix: run env migrations that define schema (for example via ctx.url(...)/ctx.string(...)), or pass --schema <path> for manual debug/view mode. Error: ${reason}`
|
|
2696
|
+
);
|
|
2697
|
+
}
|
|
2698
|
+
}
|
|
2699
|
+
function validateLoadedSchema(rawSchema, schemaPath) {
|
|
2700
|
+
if (!rawSchema || typeof rawSchema !== "object" || Array.isArray(rawSchema)) {
|
|
2701
|
+
throw new Error(
|
|
2702
|
+
`Invalid schema export from "${schemaPath}". Fix: export an object map of env keys to variable definitions.`
|
|
2703
|
+
);
|
|
2704
|
+
}
|
|
2705
|
+
const schema = rawSchema;
|
|
2706
|
+
for (const [key, candidate] of Object.entries(schema)) {
|
|
2707
|
+
if (!candidate || typeof candidate !== "object" || Array.isArray(candidate)) {
|
|
2708
|
+
throw new Error(
|
|
2709
|
+
`Invalid schema definition for key "${key}" in "${schemaPath}". Fix: each key must map to an object definition.`
|
|
2710
|
+
);
|
|
2711
|
+
}
|
|
2712
|
+
const definition = candidate;
|
|
2713
|
+
if (typeof definition.kind !== "string") {
|
|
2714
|
+
throw new Error(
|
|
2715
|
+
`Invalid schema definition for key "${key}" in "${schemaPath}". Fix: include a "kind" field (string/url/number/boolean/enum/json).`
|
|
2716
|
+
);
|
|
2717
|
+
}
|
|
2718
|
+
}
|
|
2719
|
+
return schema;
|
|
2720
|
+
}
|
|
2721
|
+
function resolvePrefixPolicy(rawPolicy, schemaPath, fieldName = "prefixPolicy") {
|
|
2722
|
+
if (!rawPolicy) {
|
|
2723
|
+
return void 0;
|
|
2724
|
+
}
|
|
2725
|
+
if (typeof rawPolicy === "string") {
|
|
2726
|
+
if (rawPolicy === "nextjs") {
|
|
2727
|
+
return prefixPolicies.nextjs;
|
|
2728
|
+
}
|
|
2729
|
+
if (rawPolicy === "vite") {
|
|
2730
|
+
return prefixPolicies.vite;
|
|
2731
|
+
}
|
|
2732
|
+
throw new Error(
|
|
2733
|
+
`Unsupported ${fieldName} "${rawPolicy}" in "${schemaPath}". Fix: use "nextjs", "vite", or a PrefixPolicy object.`
|
|
2734
|
+
);
|
|
2735
|
+
}
|
|
2736
|
+
if (typeof rawPolicy === "object" && !Array.isArray(rawPolicy)) {
|
|
2737
|
+
const candidate = rawPolicy;
|
|
2738
|
+
if (!Array.isArray(candidate.publicPrefixes)) {
|
|
2739
|
+
throw new Error(
|
|
2740
|
+
`Invalid ${fieldName} in "${schemaPath}". Fix: include publicPrefixes: string[].`
|
|
2741
|
+
);
|
|
2742
|
+
}
|
|
2743
|
+
return rawPolicy;
|
|
2744
|
+
}
|
|
2745
|
+
throw new Error(
|
|
2746
|
+
`Invalid ${fieldName} in "${schemaPath}". Fix: use "nextjs", "vite", or a PrefixPolicy object.`
|
|
2747
|
+
);
|
|
2748
|
+
}
|
|
2749
|
+
function isRecord2(candidate) {
|
|
2750
|
+
return Boolean(candidate) && typeof candidate === "object" && !Array.isArray(candidate);
|
|
2751
|
+
}
|
|
2752
|
+
function parseAppSchemaConfig(appName, rawAppConfig, schemaPath) {
|
|
2753
|
+
if (!isRecord2(rawAppConfig)) {
|
|
2754
|
+
throw new Error(
|
|
2755
|
+
`Invalid app config for "${appName}" in "${schemaPath}". Fix: app configs must be objects.`
|
|
2756
|
+
);
|
|
2757
|
+
}
|
|
2758
|
+
const hasContainerShape = ["schema", "overrides", "only", "prefixPolicy"].some(
|
|
2759
|
+
(field) => Object.hasOwn(rawAppConfig, field)
|
|
2760
|
+
);
|
|
2761
|
+
const config = rawAppConfig;
|
|
2762
|
+
const overridesCandidate = hasContainerShape ? config.schema ?? config.overrides ?? {} : rawAppConfig;
|
|
2763
|
+
const overrides = validateLoadedSchema(overridesCandidate, schemaPath);
|
|
2764
|
+
let only;
|
|
2765
|
+
if (hasContainerShape && config.only !== void 0) {
|
|
2766
|
+
if (!Array.isArray(config.only) || config.only.some((value) => typeof value !== "string")) {
|
|
2767
|
+
throw new Error(
|
|
2768
|
+
`Invalid apps.${appName}.only in "${schemaPath}". Fix: provide a string[] of schema keys.`
|
|
2769
|
+
);
|
|
2770
|
+
}
|
|
2771
|
+
only = [...config.only];
|
|
2772
|
+
}
|
|
2773
|
+
const prefixPolicy = hasContainerShape ? resolvePrefixPolicy(config.prefixPolicy, schemaPath, `apps.${appName}.prefixPolicy`) : void 0;
|
|
2774
|
+
return {
|
|
2775
|
+
overrides,
|
|
2776
|
+
only,
|
|
2777
|
+
prefixPolicy
|
|
2778
|
+
};
|
|
2779
|
+
}
|
|
2780
|
+
function parseApps(rawApps, schemaPath) {
|
|
2781
|
+
if (rawApps === void 0) {
|
|
2782
|
+
return {};
|
|
2783
|
+
}
|
|
2784
|
+
if (!isRecord2(rawApps)) {
|
|
2785
|
+
throw new Error(
|
|
2786
|
+
`Invalid apps config in "${schemaPath}". Fix: export apps as an object map keyed by app name.`
|
|
2787
|
+
);
|
|
2788
|
+
}
|
|
2789
|
+
const parsed = {};
|
|
2790
|
+
for (const [appName, appConfig] of Object.entries(rawApps)) {
|
|
2791
|
+
parsed[appName] = parseAppSchemaConfig(appName, appConfig, schemaPath);
|
|
2792
|
+
}
|
|
2793
|
+
return parsed;
|
|
2794
|
+
}
|
|
2795
|
+
function selectSchemaForApp(baseSchema, appName, appConfig, schemaPath) {
|
|
2796
|
+
const mergedSchema = {
|
|
2797
|
+
...baseSchema,
|
|
2798
|
+
...appConfig.overrides
|
|
2799
|
+
};
|
|
2800
|
+
if (!appConfig.only) {
|
|
2801
|
+
return mergedSchema;
|
|
2802
|
+
}
|
|
2803
|
+
const subsetSchema = {};
|
|
2804
|
+
for (const key of appConfig.only) {
|
|
2805
|
+
if (!(key in mergedSchema)) {
|
|
2806
|
+
throw new Error(
|
|
2807
|
+
`Unknown key "${key}" in apps.${appName}.only for "${schemaPath}". Fix: add this key to base/schema overrides or remove it from only.`
|
|
2808
|
+
);
|
|
2809
|
+
}
|
|
2810
|
+
subsetSchema[key] = mergedSchema[key];
|
|
2811
|
+
}
|
|
2812
|
+
return subsetSchema;
|
|
2813
|
+
}
|
|
2814
|
+
function normalizeLoadedSchema(rawSource, schemaPath, options) {
|
|
2815
|
+
const source = isRecord2(rawSource) ? rawSource : {};
|
|
2816
|
+
const baseCandidate = source.baseSchema ?? source.default ?? source.schema ?? rawSource;
|
|
2817
|
+
const baseSchema = validateLoadedSchema(baseCandidate, schemaPath);
|
|
2818
|
+
const globalPrefixPolicy = resolvePrefixPolicy(source.prefixPolicy, schemaPath, "prefixPolicy");
|
|
2819
|
+
const apps = parseApps(source.apps ?? source.appSchemas, schemaPath);
|
|
2820
|
+
if (!options.app) {
|
|
2821
|
+
return {
|
|
2822
|
+
schema: baseSchema,
|
|
2823
|
+
prefixPolicy: globalPrefixPolicy,
|
|
2824
|
+
schemaPath
|
|
2825
|
+
};
|
|
2826
|
+
}
|
|
2827
|
+
const appConfig = apps[options.app];
|
|
2828
|
+
if (!appConfig) {
|
|
2829
|
+
const knownApps = Object.keys(apps);
|
|
2830
|
+
const knownAppsText = knownApps.length > 0 ? knownApps.join(", ") : "none";
|
|
2831
|
+
throw new Error(
|
|
2832
|
+
`Unknown app "${options.app}" for "${schemaPath}". Fix: use one of: ${knownAppsText}.`
|
|
2833
|
+
);
|
|
2834
|
+
}
|
|
2835
|
+
return {
|
|
2836
|
+
schema: selectSchemaForApp(baseSchema, options.app, appConfig, schemaPath),
|
|
2837
|
+
prefixPolicy: appConfig.prefixPolicy ?? globalPrefixPolicy,
|
|
2838
|
+
schemaPath,
|
|
2839
|
+
app: options.app
|
|
2840
|
+
};
|
|
2841
|
+
}
|
|
2842
|
+
async function loadSchema(schemaPath, options = {}) {
|
|
2843
|
+
if (schemaPath.endsWith(".json")) {
|
|
2844
|
+
const jsonContent = await readFile3(schemaPath, "utf8");
|
|
2845
|
+
return normalizeLoadedSchema(JSON.parse(jsonContent), schemaPath, options);
|
|
2846
|
+
}
|
|
2847
|
+
const imported = await import(pathToFileURL2(schemaPath).href);
|
|
2848
|
+
return normalizeLoadedSchema(imported, schemaPath, options);
|
|
2849
|
+
}
|
|
2850
|
+
|
|
2851
|
+
// src/audit-command.ts
|
|
2852
|
+
var TARGETS2 = ["dev", "preview", "prod"];
|
|
2853
|
+
var AUDIT_FILES_BY_TARGET = {
|
|
2854
|
+
dev: [".env.defaults", ".env", ".env.dev", ".env.local", ".env.dev.local", ".env.vercel.dev"],
|
|
2855
|
+
preview: [
|
|
2856
|
+
".env.defaults",
|
|
2857
|
+
".env",
|
|
2858
|
+
".env.preview",
|
|
2859
|
+
".env.local",
|
|
2860
|
+
".env.preview.local",
|
|
2861
|
+
".env.vercel.preview"
|
|
2862
|
+
],
|
|
2863
|
+
prod: [".env.defaults", ".env", ".env.prod", ".env.local", ".env.prod.local", ".env.vercel.prod"]
|
|
2864
|
+
};
|
|
2865
|
+
async function fileExists2(filepath) {
|
|
2866
|
+
try {
|
|
2867
|
+
await access4(filepath);
|
|
2868
|
+
return true;
|
|
2869
|
+
} catch {
|
|
2870
|
+
return false;
|
|
2871
|
+
}
|
|
2872
|
+
}
|
|
2873
|
+
async function loadValuesForTarget(cwd, target, schema) {
|
|
2874
|
+
const values = {};
|
|
2875
|
+
const inspectedFiles = [];
|
|
2876
|
+
for (const candidate of AUDIT_FILES_BY_TARGET[target]) {
|
|
2877
|
+
const filepath = path4.resolve(cwd, candidate);
|
|
2878
|
+
if (!await fileExists2(filepath)) {
|
|
2879
|
+
continue;
|
|
2880
|
+
}
|
|
2881
|
+
const content = await readFile4(filepath, "utf8");
|
|
2882
|
+
const parsed = parseDotenv(content);
|
|
2883
|
+
Object.assign(values, dotenvToObject(parsed));
|
|
2884
|
+
inspectedFiles.push(candidate);
|
|
2885
|
+
}
|
|
2886
|
+
for (const key of Object.keys(schema)) {
|
|
2887
|
+
if (process.env[key] !== void 0) {
|
|
2888
|
+
values[key] = process.env[key];
|
|
2889
|
+
}
|
|
2890
|
+
}
|
|
2891
|
+
return { values, inspectedFiles };
|
|
2892
|
+
}
|
|
2893
|
+
function aggregateSummary(targets) {
|
|
2894
|
+
return targets.reduce(
|
|
2895
|
+
(accumulator, targetResult) => ({
|
|
2896
|
+
total: accumulator.total + targetResult.summary.total,
|
|
2897
|
+
errors: accumulator.errors + targetResult.summary.errors,
|
|
2898
|
+
warnings: accumulator.warnings + targetResult.summary.warnings
|
|
2899
|
+
}),
|
|
2900
|
+
{ total: 0, errors: 0, warnings: 0 }
|
|
2901
|
+
);
|
|
2902
|
+
}
|
|
2903
|
+
function flattenIssues(targets) {
|
|
2904
|
+
return targets.flatMap((targetResult) => targetResult.issues);
|
|
2905
|
+
}
|
|
2906
|
+
async function runAuditCommand(options = {}) {
|
|
2907
|
+
const cwd = options.cwd ?? process.cwd();
|
|
2908
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
2909
|
+
const loaded = await loadSchema(schemaPath);
|
|
2910
|
+
const targetResults = [];
|
|
2911
|
+
for (const target of TARGETS2) {
|
|
2912
|
+
const { values, inspectedFiles } = await loadValuesForTarget(cwd, target, loaded.schema);
|
|
2913
|
+
const drift = detectDrift(loaded.schema, {
|
|
2914
|
+
target,
|
|
2915
|
+
values,
|
|
2916
|
+
source: inspectedFiles.join(", ")
|
|
2917
|
+
});
|
|
2918
|
+
targetResults.push({
|
|
2919
|
+
target,
|
|
2920
|
+
inspectedFiles,
|
|
2921
|
+
summary: drift.summary,
|
|
2922
|
+
issues: drift.issues
|
|
2923
|
+
});
|
|
2924
|
+
}
|
|
2925
|
+
const summary = aggregateSummary(targetResults);
|
|
2926
|
+
const issues = flattenIssues(targetResults);
|
|
2927
|
+
const result = {
|
|
2928
|
+
ok: summary.errors === 0,
|
|
2929
|
+
exitCode: summary.errors === 0 ? 0 : 1,
|
|
2930
|
+
schemaPath,
|
|
2931
|
+
summary,
|
|
2932
|
+
targets: targetResults,
|
|
2933
|
+
issues
|
|
2934
|
+
};
|
|
2935
|
+
const stdout = options.stdout ?? process.stdout;
|
|
2936
|
+
const stderr = options.stderr ?? process.stderr;
|
|
2937
|
+
if (options.json) {
|
|
2938
|
+
stdout.write(`${JSON.stringify(result, null, 2)}
|
|
2939
|
+
`);
|
|
2940
|
+
} else if (result.ok) {
|
|
2941
|
+
stdout.write("env audit passed. No drift detected across dev/preview/prod snapshots.\n");
|
|
2942
|
+
} else {
|
|
2943
|
+
stderr.write(`${formatValidationIssues(result.issues)}
|
|
2944
|
+
`);
|
|
2945
|
+
}
|
|
2946
|
+
return result;
|
|
2947
|
+
}
|
|
2948
|
+
|
|
2949
|
+
// src/check-command.ts
|
|
2950
|
+
import { access as access5, readFile as readFile5 } from "fs/promises";
|
|
2951
|
+
import path5 from "path";
|
|
2952
|
+
var CHECK_ENV_FILES_BY_TARGET = {
|
|
2953
|
+
dev: [".env.defaults", ".env", ".env.dev", ".env.local", ".env.dev.local"],
|
|
2954
|
+
preview: [".env.defaults", ".env", ".env.preview", ".env.local", ".env.preview.local"],
|
|
2955
|
+
prod: [".env.defaults", ".env", ".env.prod", ".env.local", ".env.prod.local"]
|
|
2956
|
+
};
|
|
2957
|
+
function assertTarget(target) {
|
|
2958
|
+
if (target === "dev" || target === "preview" || target === "prod") {
|
|
2959
|
+
return;
|
|
2960
|
+
}
|
|
2961
|
+
throw new Error(`Invalid target "${String(target)}". Expected one of: dev, preview, prod.`);
|
|
2962
|
+
}
|
|
2963
|
+
function sortIssues2(issues) {
|
|
2964
|
+
const severityOrder = {
|
|
2965
|
+
error: 0,
|
|
2966
|
+
warning: 1
|
|
2967
|
+
};
|
|
2968
|
+
return [...issues].sort((left, right) => {
|
|
2969
|
+
return severityOrder[left.severity] - severityOrder[right.severity] || left.target.localeCompare(right.target) || left.scope.localeCompare(right.scope) || left.key.localeCompare(right.key) || left.code.localeCompare(right.code);
|
|
2970
|
+
});
|
|
2971
|
+
}
|
|
2972
|
+
function collectUnknownIssues2(schema, values, target) {
|
|
2973
|
+
const schemaKeys = new Set(Object.keys(schema));
|
|
2974
|
+
const issues = [];
|
|
2975
|
+
for (const key of Object.keys(values)) {
|
|
2976
|
+
if (schemaKeys.has(key)) {
|
|
2977
|
+
continue;
|
|
2978
|
+
}
|
|
2979
|
+
issues.push({
|
|
2980
|
+
key,
|
|
2981
|
+
target,
|
|
2982
|
+
scope: "server",
|
|
2983
|
+
severity: "error",
|
|
2984
|
+
code: "UNKNOWN_VARIABLE",
|
|
2985
|
+
message: "Variable is not defined in schema and `--allow-unknown` was not set.",
|
|
2986
|
+
fix: `Add "${key}" to your schema or re-run check with --allow-unknown.`
|
|
2987
|
+
});
|
|
2988
|
+
}
|
|
2989
|
+
return issues;
|
|
2990
|
+
}
|
|
2991
|
+
async function fileExists3(filepath) {
|
|
2992
|
+
try {
|
|
2993
|
+
await access5(filepath);
|
|
2994
|
+
return true;
|
|
2995
|
+
} catch {
|
|
2996
|
+
return false;
|
|
2997
|
+
}
|
|
2998
|
+
}
|
|
2999
|
+
async function loadValuesFromFiles(cwd, target, schema) {
|
|
3000
|
+
const merged = {};
|
|
3001
|
+
const fileOrder = CHECK_ENV_FILES_BY_TARGET[target];
|
|
3002
|
+
for (const filename of fileOrder) {
|
|
3003
|
+
const absolutePath = path5.resolve(cwd, filename);
|
|
3004
|
+
if (!await fileExists3(absolutePath)) {
|
|
3005
|
+
continue;
|
|
3006
|
+
}
|
|
3007
|
+
const fileContent = await readFile5(absolutePath, "utf8");
|
|
3008
|
+
const parsed = parseDotenv(fileContent);
|
|
3009
|
+
Object.assign(merged, dotenvToObject(parsed));
|
|
3010
|
+
}
|
|
3011
|
+
for (const key of Object.keys(schema)) {
|
|
3012
|
+
if (process.env[key] !== void 0) {
|
|
3013
|
+
merged[key] = process.env[key];
|
|
3014
|
+
}
|
|
3015
|
+
}
|
|
3016
|
+
return merged;
|
|
3017
|
+
}
|
|
3018
|
+
async function runCheckCommand(options) {
|
|
3019
|
+
assertTarget(options.target);
|
|
3020
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3021
|
+
const strict = options.strict ?? true;
|
|
3022
|
+
const allowUnknown = options.allowUnknown ?? false;
|
|
3023
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
3024
|
+
const loaded = await loadSchema(schemaPath, { app: options.app });
|
|
3025
|
+
const schema = loaded.schema;
|
|
3026
|
+
const prefixPolicy = options.prefixPolicy ?? loaded.prefixPolicy;
|
|
3027
|
+
const values = options.values ?? await loadValuesFromFiles(cwd, options.target, schema);
|
|
3028
|
+
const issues = [];
|
|
3029
|
+
const runtime = createEnv(schema, {
|
|
3030
|
+
target: options.target,
|
|
3031
|
+
values,
|
|
3032
|
+
strict,
|
|
3033
|
+
throwOnError: false,
|
|
3034
|
+
prefixPolicy,
|
|
3035
|
+
now: options.now
|
|
3036
|
+
});
|
|
3037
|
+
try {
|
|
3038
|
+
runtime.assert();
|
|
3039
|
+
} catch (error) {
|
|
3040
|
+
if (!(error instanceof EnvValidationError)) {
|
|
3041
|
+
throw error;
|
|
3042
|
+
}
|
|
3043
|
+
issues.push(...error.issues);
|
|
3044
|
+
}
|
|
3045
|
+
if (!allowUnknown) {
|
|
3046
|
+
issues.push(...collectUnknownIssues2(schema, values, options.target));
|
|
3047
|
+
}
|
|
3048
|
+
const sortedIssues = sortIssues2(issues);
|
|
3049
|
+
const errors = sortedIssues.filter((issue) => issue.severity === "error").length;
|
|
3050
|
+
const warnings = sortedIssues.filter((issue) => issue.severity === "warning").length;
|
|
3051
|
+
const ok = errors === 0;
|
|
3052
|
+
const result = {
|
|
3053
|
+
ok,
|
|
3054
|
+
exitCode: ok ? 0 : 1,
|
|
3055
|
+
target: options.target,
|
|
3056
|
+
allowUnknown,
|
|
3057
|
+
strict,
|
|
3058
|
+
schemaPath,
|
|
3059
|
+
summary: {
|
|
3060
|
+
total: sortedIssues.length,
|
|
3061
|
+
errors,
|
|
3062
|
+
warnings
|
|
3063
|
+
},
|
|
3064
|
+
issues: sortedIssues
|
|
3065
|
+
};
|
|
3066
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3067
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3068
|
+
if (options.json) {
|
|
3069
|
+
stdout.write(`${JSON.stringify(result, null, 2)}
|
|
3070
|
+
`);
|
|
3071
|
+
} else if (!result.ok) {
|
|
3072
|
+
stderr.write(`${formatValidationIssues(sortedIssues)}
|
|
3073
|
+
`);
|
|
3074
|
+
} else {
|
|
3075
|
+
stdout.write(
|
|
3076
|
+
`env check passed for target "${options.target}" with ${Object.keys(schema).length} schema variable(s).
|
|
3077
|
+
`
|
|
3078
|
+
);
|
|
3079
|
+
}
|
|
3080
|
+
return result;
|
|
3081
|
+
}
|
|
3082
|
+
|
|
3083
|
+
// src/deprecations-command.ts
|
|
3084
|
+
function parseRemoveAfter2(removeAfter) {
|
|
3085
|
+
if (/^\d{4}-\d{2}-\d{2}$/.test(removeAfter)) {
|
|
3086
|
+
return /* @__PURE__ */ new Date(`${removeAfter}T23:59:59.999Z`);
|
|
3087
|
+
}
|
|
3088
|
+
const parsed = new Date(removeAfter);
|
|
3089
|
+
if (Number.isNaN(parsed.getTime())) {
|
|
3090
|
+
return void 0;
|
|
3091
|
+
}
|
|
3092
|
+
return parsed;
|
|
3093
|
+
}
|
|
3094
|
+
function getStatus(removeAfter, now) {
|
|
3095
|
+
if (!removeAfter) {
|
|
3096
|
+
return "no-deadline";
|
|
3097
|
+
}
|
|
3098
|
+
const parsedDeadline = parseRemoveAfter2(removeAfter);
|
|
3099
|
+
if (!parsedDeadline) {
|
|
3100
|
+
return "no-deadline";
|
|
3101
|
+
}
|
|
3102
|
+
if (now.getTime() > parsedDeadline.getTime()) {
|
|
3103
|
+
return "expired";
|
|
3104
|
+
}
|
|
3105
|
+
return "active";
|
|
3106
|
+
}
|
|
3107
|
+
function formatItem(item) {
|
|
3108
|
+
const parts = [`- ${item.key}`, `scope=${item.scope}`, `status=${item.status}`];
|
|
3109
|
+
if (item.replacedBy) {
|
|
3110
|
+
parts.push(`replacedBy=${item.replacedBy}`);
|
|
3111
|
+
}
|
|
3112
|
+
if (item.removeAfter) {
|
|
3113
|
+
parts.push(`removeAfter=${item.removeAfter}`);
|
|
3114
|
+
}
|
|
3115
|
+
if (item.message) {
|
|
3116
|
+
parts.push(`message=${item.message}`);
|
|
3117
|
+
}
|
|
3118
|
+
return parts.join(" ");
|
|
3119
|
+
}
|
|
3120
|
+
async function runDeprecationsCommand(options = {}) {
|
|
3121
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3122
|
+
const now = options.now ?? /* @__PURE__ */ new Date();
|
|
3123
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
3124
|
+
const loaded = await loadSchema(schemaPath, { app: options.app });
|
|
3125
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3126
|
+
const items = Object.entries(loaded.schema).filter(([, definition]) => Boolean(definition.deprecated)).map(([key, definition]) => ({
|
|
3127
|
+
key,
|
|
3128
|
+
scope: definition.scope,
|
|
3129
|
+
replacedBy: definition.deprecated?.replacedBy,
|
|
3130
|
+
removeAfter: definition.deprecated?.removeAfter,
|
|
3131
|
+
message: definition.deprecated?.message,
|
|
3132
|
+
status: getStatus(definition.deprecated?.removeAfter, now)
|
|
3133
|
+
})).sort((left, right) => left.key.localeCompare(right.key));
|
|
3134
|
+
const result = {
|
|
3135
|
+
ok: true,
|
|
3136
|
+
exitCode: 0,
|
|
3137
|
+
schemaPath,
|
|
3138
|
+
app: options.app,
|
|
3139
|
+
count: items.length,
|
|
3140
|
+
items
|
|
3141
|
+
};
|
|
3142
|
+
if (options.json) {
|
|
3143
|
+
stdout.write(`${JSON.stringify(result, null, 2)}
|
|
3144
|
+
`);
|
|
3145
|
+
return result;
|
|
3146
|
+
}
|
|
3147
|
+
if (items.length === 0) {
|
|
3148
|
+
stdout.write("No deprecated variables found in schema.\n");
|
|
3149
|
+
return result;
|
|
3150
|
+
}
|
|
3151
|
+
stdout.write("Deprecated variables:\n");
|
|
3152
|
+
for (const item of items) {
|
|
3153
|
+
stdout.write(`${formatItem(item)}
|
|
3154
|
+
`);
|
|
3155
|
+
}
|
|
3156
|
+
return result;
|
|
3157
|
+
}
|
|
3158
|
+
|
|
3159
|
+
// src/export-command.ts
|
|
3160
|
+
import { access as access6, readFile as readFile6 } from "fs/promises";
|
|
3161
|
+
import path6 from "path";
|
|
3162
|
+
function yamlString(value) {
|
|
3163
|
+
return JSON.stringify(value);
|
|
3164
|
+
}
|
|
3165
|
+
function includeEntryForFormat(format, definition) {
|
|
3166
|
+
if (format === "github-actions") {
|
|
3167
|
+
return true;
|
|
3168
|
+
}
|
|
3169
|
+
return definition.scope === "server";
|
|
3170
|
+
}
|
|
3171
|
+
function buildEntries(schema, format, options) {
|
|
3172
|
+
return Object.entries(schema).sort(([left], [right]) => left.localeCompare(right)).filter(([, definition]) => includeEntryForFormat(format, definition)).map(([key, definition]) => ({
|
|
3173
|
+
key,
|
|
3174
|
+
scope: definition.scope,
|
|
3175
|
+
sensitive: Boolean(definition.sensitive),
|
|
3176
|
+
value: options.withValues ? options.sourceValues[key] ?? "" : ""
|
|
3177
|
+
}));
|
|
3178
|
+
}
|
|
3179
|
+
function renderDockerCompose(entries) {
|
|
3180
|
+
const lines = ['version: "3.9"', "services:", " app:", " environment:"];
|
|
3181
|
+
if (entries.length === 0) {
|
|
3182
|
+
lines.push(" {}");
|
|
3183
|
+
return `${lines.join("\n")}
|
|
3184
|
+
`;
|
|
3185
|
+
}
|
|
3186
|
+
for (const entry of entries) {
|
|
3187
|
+
lines.push(` ${entry.key}: ${yamlString(entry.value)}`);
|
|
3188
|
+
}
|
|
3189
|
+
return `${lines.join("\n")}
|
|
3190
|
+
`;
|
|
3191
|
+
}
|
|
3192
|
+
function renderK8s(entries) {
|
|
3193
|
+
const configEntries = entries.filter((entry) => !entry.sensitive);
|
|
3194
|
+
const secretEntries = entries.filter((entry) => entry.sensitive);
|
|
3195
|
+
const lines = ["apiVersion: v1", "kind: ConfigMap", "metadata:", " name: wrkspace-env-config"];
|
|
3196
|
+
if (configEntries.length === 0) {
|
|
3197
|
+
lines.push("data: {}");
|
|
3198
|
+
} else {
|
|
3199
|
+
lines.push("data:");
|
|
3200
|
+
for (const entry of configEntries) {
|
|
3201
|
+
lines.push(` ${entry.key}: ${yamlString(entry.value)}`);
|
|
3202
|
+
}
|
|
3203
|
+
}
|
|
3204
|
+
lines.push("---");
|
|
3205
|
+
lines.push("apiVersion: v1");
|
|
3206
|
+
lines.push("kind: Secret");
|
|
3207
|
+
lines.push("metadata:");
|
|
3208
|
+
lines.push(" name: wrkspace-env-secret");
|
|
3209
|
+
lines.push("type: Opaque");
|
|
3210
|
+
if (secretEntries.length === 0) {
|
|
3211
|
+
lines.push("stringData: {}");
|
|
3212
|
+
} else {
|
|
3213
|
+
lines.push("stringData:");
|
|
3214
|
+
for (const entry of secretEntries) {
|
|
3215
|
+
lines.push(` ${entry.key}: ${yamlString(entry.value)}`);
|
|
3216
|
+
}
|
|
3217
|
+
}
|
|
3218
|
+
return `${lines.join("\n")}
|
|
3219
|
+
`;
|
|
3220
|
+
}
|
|
3221
|
+
function renderGithubActions(entries) {
|
|
3222
|
+
const serverEntries = entries.filter((entry) => entry.scope === "server");
|
|
3223
|
+
const clientEntries = entries.filter((entry) => entry.scope === "client");
|
|
3224
|
+
const lines = ["env:"];
|
|
3225
|
+
if (serverEntries.length === 0 && clientEntries.length === 0) {
|
|
3226
|
+
lines.push(" {}");
|
|
3227
|
+
return `${lines.join("\n")}
|
|
3228
|
+
`;
|
|
3229
|
+
}
|
|
3230
|
+
if (serverEntries.length > 0) {
|
|
3231
|
+
lines.push(" # server");
|
|
3232
|
+
for (const entry of serverEntries) {
|
|
3233
|
+
lines.push(` ${entry.key}: ${yamlString(entry.value)}`);
|
|
3234
|
+
}
|
|
3235
|
+
}
|
|
3236
|
+
if (clientEntries.length > 0) {
|
|
3237
|
+
lines.push(" # client");
|
|
3238
|
+
for (const entry of clientEntries) {
|
|
3239
|
+
lines.push(` ${entry.key}: ${yamlString(entry.value)}`);
|
|
3240
|
+
}
|
|
3241
|
+
}
|
|
3242
|
+
return `${lines.join("\n")}
|
|
3243
|
+
`;
|
|
3244
|
+
}
|
|
3245
|
+
function renderExport(schema, format, options) {
|
|
3246
|
+
const entries = buildEntries(schema, format, options);
|
|
3247
|
+
if (format === "docker-compose") {
|
|
3248
|
+
return renderDockerCompose(entries);
|
|
3249
|
+
}
|
|
3250
|
+
if (format === "k8s") {
|
|
3251
|
+
return renderK8s(entries);
|
|
3252
|
+
}
|
|
3253
|
+
return renderGithubActions(entries);
|
|
3254
|
+
}
|
|
3255
|
+
async function fileExists4(filepath) {
|
|
3256
|
+
try {
|
|
3257
|
+
await access6(filepath);
|
|
3258
|
+
return true;
|
|
3259
|
+
} catch {
|
|
3260
|
+
return false;
|
|
3261
|
+
}
|
|
3262
|
+
}
|
|
3263
|
+
async function readValuesFromFile(filepath) {
|
|
3264
|
+
const content = await readFile6(filepath, "utf8");
|
|
3265
|
+
const parsed = parseDotenv(content);
|
|
3266
|
+
const values = dotenvToObject(parsed);
|
|
3267
|
+
return Object.fromEntries(Object.entries(values).map(([key, value]) => [key, value]));
|
|
3268
|
+
}
|
|
3269
|
+
async function runExportCommand(options) {
|
|
3270
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3271
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3272
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3273
|
+
const withValues = Boolean(options.withValues);
|
|
3274
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
3275
|
+
const loaded = await loadSchema(schemaPath);
|
|
3276
|
+
if (withValues && !options.from) {
|
|
3277
|
+
stderr.write(
|
|
3278
|
+
'Cannot export with values because "--from <file>" was not provided. Fix: pass both --with-values and --from <file>.\n'
|
|
3279
|
+
);
|
|
3280
|
+
return {
|
|
3281
|
+
ok: false,
|
|
3282
|
+
exitCode: 1,
|
|
3283
|
+
format: options.format,
|
|
3284
|
+
schemaPath
|
|
3285
|
+
};
|
|
3286
|
+
}
|
|
3287
|
+
let sourcePath;
|
|
3288
|
+
let sourceValues = {};
|
|
3289
|
+
if (withValues && options.from) {
|
|
3290
|
+
sourcePath = path6.resolve(cwd, options.from);
|
|
3291
|
+
if (!await fileExists4(sourcePath)) {
|
|
3292
|
+
stderr.write(
|
|
3293
|
+
`Source file not found at "${sourcePath}". Fix: pass a valid --from <file> path.
|
|
3294
|
+
`
|
|
3295
|
+
);
|
|
3296
|
+
return {
|
|
3297
|
+
ok: false,
|
|
3298
|
+
exitCode: 1,
|
|
3299
|
+
format: options.format,
|
|
3300
|
+
schemaPath,
|
|
3301
|
+
sourcePath
|
|
3302
|
+
};
|
|
3303
|
+
}
|
|
3304
|
+
sourceValues = await readValuesFromFile(sourcePath);
|
|
3305
|
+
}
|
|
3306
|
+
const output = renderExport(loaded.schema, options.format, {
|
|
3307
|
+
withValues,
|
|
3308
|
+
sourceValues
|
|
3309
|
+
});
|
|
3310
|
+
stdout.write(output);
|
|
3311
|
+
return {
|
|
3312
|
+
ok: true,
|
|
3313
|
+
exitCode: 0,
|
|
3314
|
+
format: options.format,
|
|
3315
|
+
schemaPath,
|
|
3316
|
+
sourcePath,
|
|
3317
|
+
output
|
|
3318
|
+
};
|
|
3319
|
+
}
|
|
3320
|
+
|
|
3321
|
+
// src/generate-command.ts
|
|
3322
|
+
import { mkdir as mkdir3, writeFile as writeFile3 } from "fs/promises";
|
|
3323
|
+
import path7 from "path";
|
|
3324
|
+
async function runGenerateCommand(options = {}) {
|
|
3325
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3326
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
3327
|
+
const loaded = await loadSchema(schemaPath, { app: options.app });
|
|
3328
|
+
const schema = loaded.schema;
|
|
3329
|
+
const artifacts = generateArtifacts(schema, { app: options.app });
|
|
3330
|
+
const filesToWrite = [
|
|
3331
|
+
[".env.example", artifacts.envExample],
|
|
3332
|
+
[".env.defaults", artifacts.envDefaults],
|
|
3333
|
+
[DEFAULT_ENV_DOC_FILE, artifacts.envMarkdown],
|
|
3334
|
+
[DEFAULT_ENV_AUDIT_FILE, artifacts.envAuditJson]
|
|
3335
|
+
];
|
|
3336
|
+
if (options.writeLocal) {
|
|
3337
|
+
filesToWrite.push([".env.local", artifacts.envLocal]);
|
|
3338
|
+
}
|
|
3339
|
+
if (options.writeTargetFiles) {
|
|
3340
|
+
filesToWrite.push(
|
|
3341
|
+
[".env.dev", generateTargetEnvFile(schema, "dev", { app: options.app })],
|
|
3342
|
+
[".env.preview", generateTargetEnvFile(schema, "preview", { app: options.app })],
|
|
3343
|
+
[".env.prod", generateTargetEnvFile(schema, "prod", { app: options.app })]
|
|
3344
|
+
);
|
|
3345
|
+
}
|
|
3346
|
+
const autoGeneratedIgnoreEntries = filesToWrite.map(([relativePath]) => toWrkspaceEnvIgnoreEntry(cwd, path7.resolve(cwd, relativePath))).filter((entry) => Boolean(entry));
|
|
3347
|
+
await ensureWrkspaceEnvGitignore(cwd, autoGeneratedIgnoreEntries);
|
|
3348
|
+
for (const [relativePath, content] of filesToWrite) {
|
|
3349
|
+
const absolutePath = path7.resolve(cwd, relativePath);
|
|
3350
|
+
await mkdir3(path7.dirname(absolutePath), { recursive: true });
|
|
3351
|
+
await writeFile3(absolutePath, content, "utf8");
|
|
3352
|
+
}
|
|
3353
|
+
const output = options.stdout ?? process.stdout;
|
|
3354
|
+
output.write(`Generated ${filesToWrite.length} env artifact(s) from ${path7.basename(schemaPath)}.
|
|
3355
|
+
`);
|
|
3356
|
+
return {
|
|
3357
|
+
schemaPath,
|
|
3358
|
+
writtenFiles: filesToWrite.map(([relativePath]) => relativePath),
|
|
3359
|
+
artifacts
|
|
3360
|
+
};
|
|
3361
|
+
}
|
|
3362
|
+
|
|
3363
|
+
// src/migrate-command.ts
|
|
3364
|
+
function toErrorMessage(error) {
|
|
3365
|
+
return error instanceof Error ? error.message : String(error);
|
|
3366
|
+
}
|
|
3367
|
+
async function runMigrateMakeCommand(options) {
|
|
3368
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3369
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3370
|
+
try {
|
|
3371
|
+
const created = await createMigrationFile({
|
|
3372
|
+
name: options.name,
|
|
3373
|
+
cwd: options.cwd,
|
|
3374
|
+
migrationsDir: options.migrationsDir,
|
|
3375
|
+
now: options.now
|
|
3376
|
+
});
|
|
3377
|
+
stdout.write(`Created migration "${created.id}" at ${created.filePath}.
|
|
3378
|
+
`);
|
|
3379
|
+
return {
|
|
3380
|
+
ok: true,
|
|
3381
|
+
exitCode: 0,
|
|
3382
|
+
id: created.id,
|
|
3383
|
+
filePath: created.filePath,
|
|
3384
|
+
migrationsDir: created.migrationsDir
|
|
3385
|
+
};
|
|
3386
|
+
} catch (error) {
|
|
3387
|
+
stderr.write(`${toErrorMessage(error)}
|
|
3388
|
+
`);
|
|
3389
|
+
return {
|
|
3390
|
+
ok: false,
|
|
3391
|
+
exitCode: 1
|
|
3392
|
+
};
|
|
3393
|
+
}
|
|
3394
|
+
}
|
|
3395
|
+
async function runMigrateCommand(options = {}) {
|
|
3396
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3397
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3398
|
+
try {
|
|
3399
|
+
const result = await applyPendingMigrations({
|
|
3400
|
+
cwd: options.cwd,
|
|
3401
|
+
migrationsDir: options.migrationsDir,
|
|
3402
|
+
stateFile: options.stateFile,
|
|
3403
|
+
generatedSchemaFile: options.generatedSchemaFile,
|
|
3404
|
+
now: options.now
|
|
3405
|
+
});
|
|
3406
|
+
if (result.applied.length === 0) {
|
|
3407
|
+
stdout.write("No pending env migrations.\n");
|
|
3408
|
+
} else {
|
|
3409
|
+
stdout.write(`Applied ${result.applied.length} env migration(s): ${result.applied.join(", ")}.
|
|
3410
|
+
`);
|
|
3411
|
+
if (result.changedFiles.length > 0) {
|
|
3412
|
+
stdout.write(`Updated files: ${result.changedFiles.join(", ")}.
|
|
3413
|
+
`);
|
|
3414
|
+
}
|
|
3415
|
+
stdout.write(`State updated at ${result.statePath}.
|
|
3416
|
+
`);
|
|
3417
|
+
}
|
|
3418
|
+
return {
|
|
3419
|
+
ok: true,
|
|
3420
|
+
exitCode: 0,
|
|
3421
|
+
migrationsDir: result.migrationsDir,
|
|
3422
|
+
statePath: result.statePath,
|
|
3423
|
+
applied: result.applied,
|
|
3424
|
+
changedFiles: result.changedFiles
|
|
3425
|
+
};
|
|
3426
|
+
} catch (error) {
|
|
3427
|
+
stderr.write(`${toErrorMessage(error)}
|
|
3428
|
+
`);
|
|
3429
|
+
return {
|
|
3430
|
+
ok: false,
|
|
3431
|
+
exitCode: 1,
|
|
3432
|
+
applied: [],
|
|
3433
|
+
changedFiles: []
|
|
3434
|
+
};
|
|
3435
|
+
}
|
|
3436
|
+
}
|
|
3437
|
+
async function runMigrateStatusCommand(options = {}) {
|
|
3438
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3439
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3440
|
+
try {
|
|
3441
|
+
const status = await getMigrationStatus({
|
|
3442
|
+
cwd: options.cwd,
|
|
3443
|
+
migrationsDir: options.migrationsDir,
|
|
3444
|
+
stateFile: options.stateFile
|
|
3445
|
+
});
|
|
3446
|
+
let schemaPath;
|
|
3447
|
+
if (status.summary.changed === 0 && status.summary.missing === 0) {
|
|
3448
|
+
const derivedSchema = await loadOrBuildMigrationSchema({
|
|
3449
|
+
cwd: options.cwd,
|
|
3450
|
+
migrationsDir: options.migrationsDir,
|
|
3451
|
+
stateFile: options.stateFile,
|
|
3452
|
+
generatedSchemaFile: options.generatedSchemaFile,
|
|
3453
|
+
allowEmpty: true
|
|
3454
|
+
});
|
|
3455
|
+
schemaPath = derivedSchema.schemaPath;
|
|
3456
|
+
}
|
|
3457
|
+
const ok = status.summary.changed === 0 && status.summary.missing === 0;
|
|
3458
|
+
const resultBase = {
|
|
3459
|
+
ok,
|
|
3460
|
+
exitCode: ok ? 0 : 1,
|
|
3461
|
+
migrationsDir: status.migrationsDir,
|
|
3462
|
+
statePath: status.statePath,
|
|
3463
|
+
summary: status.summary,
|
|
3464
|
+
entries: status.entries
|
|
3465
|
+
};
|
|
3466
|
+
const result = schemaPath ? { ...resultBase, schemaPath } : resultBase;
|
|
3467
|
+
if (options.json) {
|
|
3468
|
+
stdout.write(`${JSON.stringify(result, null, 2)}
|
|
3469
|
+
`);
|
|
3470
|
+
return result;
|
|
3471
|
+
}
|
|
3472
|
+
stdout.write(`Migrations dir: ${status.migrationsDir}
|
|
3473
|
+
`);
|
|
3474
|
+
stdout.write(`State file: ${status.statePath}
|
|
3475
|
+
`);
|
|
3476
|
+
if (result.schemaPath) {
|
|
3477
|
+
stdout.write(`Schema file: ${result.schemaPath}
|
|
3478
|
+
`);
|
|
3479
|
+
}
|
|
3480
|
+
stdout.write(
|
|
3481
|
+
`Summary: applied=${status.summary.applied} pending=${status.summary.pending} changed=${status.summary.changed} missing=${status.summary.missing}
|
|
3482
|
+
`
|
|
3483
|
+
);
|
|
3484
|
+
for (const entry of status.entries) {
|
|
3485
|
+
const appliedAt = entry.appliedAt ? ` appliedAt=${entry.appliedAt}` : "";
|
|
3486
|
+
stdout.write(`- [${entry.status}] ${entry.id}${appliedAt}
|
|
3487
|
+
`);
|
|
3488
|
+
}
|
|
3489
|
+
if (!ok) {
|
|
3490
|
+
stderr.write(
|
|
3491
|
+
"Migration integrity issues detected. Fix: resolve changed/missing migrations before apply or rollback.\n"
|
|
3492
|
+
);
|
|
3493
|
+
}
|
|
3494
|
+
return result;
|
|
3495
|
+
} catch (error) {
|
|
3496
|
+
const errorMessage = toErrorMessage(error);
|
|
3497
|
+
const failureResult = {
|
|
3498
|
+
ok: false,
|
|
3499
|
+
exitCode: 1,
|
|
3500
|
+
migrationsDir: "",
|
|
3501
|
+
statePath: "",
|
|
3502
|
+
error: errorMessage,
|
|
3503
|
+
summary: {
|
|
3504
|
+
total: 0,
|
|
3505
|
+
applied: 0,
|
|
3506
|
+
pending: 0,
|
|
3507
|
+
changed: 0,
|
|
3508
|
+
missing: 0
|
|
3509
|
+
},
|
|
3510
|
+
entries: []
|
|
3511
|
+
};
|
|
3512
|
+
if (options.json) {
|
|
3513
|
+
stdout.write(`${JSON.stringify(failureResult, null, 2)}
|
|
3514
|
+
`);
|
|
3515
|
+
return failureResult;
|
|
3516
|
+
}
|
|
3517
|
+
stderr.write(`${errorMessage}
|
|
3518
|
+
`);
|
|
3519
|
+
return failureResult;
|
|
3520
|
+
}
|
|
3521
|
+
}
|
|
3522
|
+
async function runMigrateRollbackCommand(options) {
|
|
3523
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3524
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3525
|
+
try {
|
|
3526
|
+
const rolledBack = await rollbackMigrations({
|
|
3527
|
+
steps: options.steps,
|
|
3528
|
+
cwd: options.cwd,
|
|
3529
|
+
migrationsDir: options.migrationsDir,
|
|
3530
|
+
stateFile: options.stateFile,
|
|
3531
|
+
generatedSchemaFile: options.generatedSchemaFile
|
|
3532
|
+
});
|
|
3533
|
+
if (rolledBack.rolledBack.length === 0) {
|
|
3534
|
+
stdout.write("No applied env migrations to rollback.\n");
|
|
3535
|
+
} else {
|
|
3536
|
+
stdout.write(
|
|
3537
|
+
`Rolled back ${rolledBack.rolledBack.length} env migration(s): ${rolledBack.rolledBack.join(", ")}.
|
|
3538
|
+
`
|
|
3539
|
+
);
|
|
3540
|
+
if (rolledBack.changedFiles.length > 0) {
|
|
3541
|
+
stdout.write(`Updated files: ${rolledBack.changedFiles.join(", ")}.
|
|
3542
|
+
`);
|
|
3543
|
+
}
|
|
3544
|
+
stdout.write(`State updated at ${rolledBack.statePath}.
|
|
3545
|
+
`);
|
|
3546
|
+
}
|
|
3547
|
+
return {
|
|
3548
|
+
ok: true,
|
|
3549
|
+
exitCode: 0,
|
|
3550
|
+
migrationsDir: rolledBack.migrationsDir,
|
|
3551
|
+
statePath: rolledBack.statePath,
|
|
3552
|
+
rolledBack: rolledBack.rolledBack,
|
|
3553
|
+
changedFiles: rolledBack.changedFiles,
|
|
3554
|
+
remainingApplied: rolledBack.remainingApplied
|
|
3555
|
+
};
|
|
3556
|
+
} catch (error) {
|
|
3557
|
+
stderr.write(`${toErrorMessage(error)}
|
|
3558
|
+
`);
|
|
3559
|
+
return {
|
|
3560
|
+
ok: false,
|
|
3561
|
+
exitCode: 1,
|
|
3562
|
+
rolledBack: [],
|
|
3563
|
+
changedFiles: []
|
|
3564
|
+
};
|
|
3565
|
+
}
|
|
3566
|
+
}
|
|
3567
|
+
async function runSchemaBuildCommand(options = {}) {
|
|
3568
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3569
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3570
|
+
try {
|
|
3571
|
+
const result = await loadOrBuildMigrationSchema({
|
|
3572
|
+
cwd: options.cwd,
|
|
3573
|
+
migrationsDir: options.migrationsDir,
|
|
3574
|
+
stateFile: options.stateFile,
|
|
3575
|
+
generatedSchemaFile: options.generatedSchemaFile,
|
|
3576
|
+
forceRebuild: Boolean(options.force),
|
|
3577
|
+
allowEmpty: true
|
|
3578
|
+
});
|
|
3579
|
+
stdout.write(
|
|
3580
|
+
`Built migration-derived schema at ${result.schemaPath} from ${result.appliedMigrationIds.length} applied migration(s).
|
|
3581
|
+
`
|
|
3582
|
+
);
|
|
3583
|
+
return {
|
|
3584
|
+
ok: true,
|
|
3585
|
+
exitCode: 0,
|
|
3586
|
+
schemaPath: result.schemaPath,
|
|
3587
|
+
appliedMigrationIds: result.appliedMigrationIds
|
|
3588
|
+
};
|
|
3589
|
+
} catch (error) {
|
|
3590
|
+
stderr.write(`${toErrorMessage(error)}
|
|
3591
|
+
`);
|
|
3592
|
+
return {
|
|
3593
|
+
ok: false,
|
|
3594
|
+
exitCode: 1
|
|
3595
|
+
};
|
|
3596
|
+
}
|
|
3597
|
+
}
|
|
3598
|
+
|
|
3599
|
+
// src/provider-command.ts
|
|
3600
|
+
import { spawn } from "child_process";
|
|
3601
|
+
import { access as access7, readFile as readFile7 } from "fs/promises";
|
|
3602
|
+
import path8 from "path";
|
|
3603
|
+
async function runExternalCommand(command, args, options = {}) {
|
|
3604
|
+
return await new Promise((resolve) => {
|
|
3605
|
+
const subprocess = spawn(command, [...args], {
|
|
3606
|
+
cwd: options.cwd,
|
|
3607
|
+
stdio: ["ignore", "pipe", "pipe"]
|
|
3608
|
+
});
|
|
3609
|
+
let stdout = "";
|
|
3610
|
+
let stderr = "";
|
|
3611
|
+
let missingBinary = false;
|
|
3612
|
+
subprocess.stdout?.on("data", (chunk) => {
|
|
3613
|
+
stdout += chunk.toString();
|
|
3614
|
+
});
|
|
3615
|
+
subprocess.stderr?.on("data", (chunk) => {
|
|
3616
|
+
stderr += chunk.toString();
|
|
3617
|
+
});
|
|
3618
|
+
subprocess.on("error", (error) => {
|
|
3619
|
+
if (error.code === "ENOENT") {
|
|
3620
|
+
missingBinary = true;
|
|
3621
|
+
resolve({
|
|
3622
|
+
exitCode: 127,
|
|
3623
|
+
stdout,
|
|
3624
|
+
stderr: `Command not found: ${command}`,
|
|
3625
|
+
missingBinary: true
|
|
3626
|
+
});
|
|
3627
|
+
return;
|
|
3628
|
+
}
|
|
3629
|
+
resolve({
|
|
3630
|
+
exitCode: 1,
|
|
3631
|
+
stdout,
|
|
3632
|
+
stderr: error.message,
|
|
3633
|
+
missingBinary: false
|
|
3634
|
+
});
|
|
3635
|
+
});
|
|
3636
|
+
subprocess.on("close", (code) => {
|
|
3637
|
+
if (missingBinary) {
|
|
3638
|
+
return;
|
|
3639
|
+
}
|
|
3640
|
+
resolve({
|
|
3641
|
+
exitCode: code ?? 1,
|
|
3642
|
+
stdout,
|
|
3643
|
+
stderr,
|
|
3644
|
+
missingBinary: false
|
|
3645
|
+
});
|
|
3646
|
+
});
|
|
3647
|
+
});
|
|
3648
|
+
}
|
|
3649
|
+
function parseTarget(value) {
|
|
3650
|
+
if (value === "dev" || value === "preview" || value === "prod") {
|
|
3651
|
+
return value;
|
|
3652
|
+
}
|
|
3653
|
+
throw new Error(`Invalid target "${value}". Expected one of: dev, preview, prod.`);
|
|
3654
|
+
}
|
|
3655
|
+
async function pathExists2(filepath) {
|
|
3656
|
+
try {
|
|
3657
|
+
await access7(filepath);
|
|
3658
|
+
return true;
|
|
3659
|
+
} catch {
|
|
3660
|
+
return false;
|
|
3661
|
+
}
|
|
3662
|
+
}
|
|
3663
|
+
function mergeValuesFromSnapshot(snapshotContent) {
|
|
3664
|
+
const parsed = parseDotenv(snapshotContent);
|
|
3665
|
+
const values = dotenvToObject(parsed);
|
|
3666
|
+
return Object.fromEntries(Object.entries(values).map(([key, value]) => [key, value]));
|
|
3667
|
+
}
|
|
3668
|
+
function formatDiffIssues(issues) {
|
|
3669
|
+
if (issues.length === 0) {
|
|
3670
|
+
return "No schema drift detected.";
|
|
3671
|
+
}
|
|
3672
|
+
return formatValidationIssues(
|
|
3673
|
+
issues.map((issue) => ({
|
|
3674
|
+
key: issue.key,
|
|
3675
|
+
target: issue.target,
|
|
3676
|
+
scope: issue.scope,
|
|
3677
|
+
severity: issue.severity,
|
|
3678
|
+
code: issue.code,
|
|
3679
|
+
message: issue.message,
|
|
3680
|
+
fix: issue.fix
|
|
3681
|
+
}))
|
|
3682
|
+
);
|
|
3683
|
+
}
|
|
3684
|
+
var PROVIDER_INSTALL_GUIDE = {
|
|
3685
|
+
vercel: "Install Vercel CLI: npm install -g vercel",
|
|
3686
|
+
heroku: "Install Heroku CLI: https://devcenter.heroku.com/articles/heroku-cli"
|
|
3687
|
+
};
|
|
3688
|
+
async function runDoctorCommand(options = {}) {
|
|
3689
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3690
|
+
const runner = options.runner ?? runExternalCommand;
|
|
3691
|
+
const checks = [];
|
|
3692
|
+
let schemaPath;
|
|
3693
|
+
try {
|
|
3694
|
+
schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
3695
|
+
checks.push({
|
|
3696
|
+
name: "schema",
|
|
3697
|
+
ok: true,
|
|
3698
|
+
message: `Schema found at ${schemaPath}.`
|
|
3699
|
+
});
|
|
3700
|
+
} catch (error) {
|
|
3701
|
+
checks.push({
|
|
3702
|
+
name: "schema",
|
|
3703
|
+
ok: false,
|
|
3704
|
+
message: error instanceof Error ? error.message : "Schema file not found.",
|
|
3705
|
+
fix: "Add env.schema.mjs (or pass --schema <path>)."
|
|
3706
|
+
});
|
|
3707
|
+
}
|
|
3708
|
+
for (const provider of ["vercel", "heroku"]) {
|
|
3709
|
+
const result2 = await runner(provider, ["--version"], { cwd });
|
|
3710
|
+
if (result2.missingBinary) {
|
|
3711
|
+
checks.push({
|
|
3712
|
+
name: `${provider}-cli`,
|
|
3713
|
+
ok: false,
|
|
3714
|
+
message: `${provider} CLI is not installed.`,
|
|
3715
|
+
fix: PROVIDER_INSTALL_GUIDE[provider]
|
|
3716
|
+
});
|
|
3717
|
+
continue;
|
|
3718
|
+
}
|
|
3719
|
+
if (result2.exitCode !== 0) {
|
|
3720
|
+
checks.push({
|
|
3721
|
+
name: `${provider}-cli`,
|
|
3722
|
+
ok: false,
|
|
3723
|
+
message: `${provider} CLI returned non-zero exit code (${result2.exitCode}).`,
|
|
3724
|
+
fix: `Run "${provider} --version" manually and fix local setup.`
|
|
3725
|
+
});
|
|
3726
|
+
continue;
|
|
3727
|
+
}
|
|
3728
|
+
checks.push({
|
|
3729
|
+
name: `${provider}-cli`,
|
|
3730
|
+
ok: true,
|
|
3731
|
+
message: `${provider} CLI is available.`
|
|
3732
|
+
});
|
|
3733
|
+
}
|
|
3734
|
+
const ok = checks.every((check) => check.ok);
|
|
3735
|
+
const result = {
|
|
3736
|
+
ok,
|
|
3737
|
+
exitCode: ok ? 0 : 1,
|
|
3738
|
+
schemaPath,
|
|
3739
|
+
checks
|
|
3740
|
+
};
|
|
3741
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3742
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3743
|
+
if (ok) {
|
|
3744
|
+
stdout.write("env doctor passed.\n");
|
|
3745
|
+
return result;
|
|
3746
|
+
}
|
|
3747
|
+
stderr.write("env doctor found setup issues:\n");
|
|
3748
|
+
for (const check of checks.filter((item) => !item.ok)) {
|
|
3749
|
+
stderr.write(`- ${check.name}: ${check.message}
|
|
3750
|
+
`);
|
|
3751
|
+
if (check.fix) {
|
|
3752
|
+
stderr.write(` Fix: ${check.fix}
|
|
3753
|
+
`);
|
|
3754
|
+
}
|
|
3755
|
+
}
|
|
3756
|
+
return result;
|
|
3757
|
+
}
|
|
3758
|
+
async function runPullVercelCommand(options) {
|
|
3759
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3760
|
+
const runner = options.runner ?? runExternalCommand;
|
|
3761
|
+
const outputPath = path8.resolve(cwd, `.env.vercel.${options.env}`);
|
|
3762
|
+
const result = await runner("vercel", ["env", "pull", outputPath, options.env], { cwd });
|
|
3763
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3764
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3765
|
+
if (result.missingBinary) {
|
|
3766
|
+
stderr.write(
|
|
3767
|
+
`vercel CLI is not installed. Fix: ${PROVIDER_INSTALL_GUIDE.vercel}.
|
|
3768
|
+
`
|
|
3769
|
+
);
|
|
3770
|
+
return { ok: false, exitCode: 1, outputPath };
|
|
3771
|
+
}
|
|
3772
|
+
if (result.exitCode !== 0) {
|
|
3773
|
+
stderr.write(
|
|
3774
|
+
`vercel env pull failed for target "${options.env}". ${result.stderr || "No stderr output."}
|
|
3775
|
+
`
|
|
3776
|
+
);
|
|
3777
|
+
return { ok: false, exitCode: 1, outputPath };
|
|
3778
|
+
}
|
|
3779
|
+
stdout.write(`Pulled Vercel env snapshot to ${outputPath}.
|
|
3780
|
+
`);
|
|
3781
|
+
return { ok: true, exitCode: 0, outputPath };
|
|
3782
|
+
}
|
|
3783
|
+
function getSnapshotPath(options, cwd) {
|
|
3784
|
+
if (options.snapshot) {
|
|
3785
|
+
return path8.resolve(cwd, options.snapshot);
|
|
3786
|
+
}
|
|
3787
|
+
if (options.provider === "vercel") {
|
|
3788
|
+
const envTarget = options.env;
|
|
3789
|
+
if (!envTarget) {
|
|
3790
|
+
throw new Error('Missing required option "--env <target>" for vercel diff.');
|
|
3791
|
+
}
|
|
3792
|
+
return path8.resolve(cwd, `.env.vercel.${envTarget}`);
|
|
3793
|
+
}
|
|
3794
|
+
const app = options.app;
|
|
3795
|
+
if (!app) {
|
|
3796
|
+
throw new Error('Missing required option "--app <name>" for heroku diff.');
|
|
3797
|
+
}
|
|
3798
|
+
return path8.resolve(cwd, `.env.heroku.${app}`);
|
|
3799
|
+
}
|
|
3800
|
+
function resolveDiffTarget(options) {
|
|
3801
|
+
if (options.provider === "vercel") {
|
|
3802
|
+
if (!options.env) {
|
|
3803
|
+
throw new Error('Missing required option "--env <target>" for vercel diff.');
|
|
3804
|
+
}
|
|
3805
|
+
return parseTarget(options.env);
|
|
3806
|
+
}
|
|
3807
|
+
return options.target ?? "prod";
|
|
3808
|
+
}
|
|
3809
|
+
async function runDiffCommand(options) {
|
|
3810
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3811
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
3812
|
+
const loaded = await loadSchema(schemaPath);
|
|
3813
|
+
const target = resolveDiffTarget(options);
|
|
3814
|
+
const snapshotPath = getSnapshotPath(options, cwd);
|
|
3815
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3816
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3817
|
+
if (!await pathExists2(snapshotPath)) {
|
|
3818
|
+
const message = `Snapshot file not found at "${snapshotPath}". Fix: pull provider env first, then retry diff.`;
|
|
3819
|
+
if (options.json) {
|
|
3820
|
+
stdout.write(
|
|
3821
|
+
`${JSON.stringify(
|
|
3822
|
+
{
|
|
3823
|
+
provider: options.provider,
|
|
3824
|
+
ok: false,
|
|
3825
|
+
exitCode: 1,
|
|
3826
|
+
schemaPath,
|
|
3827
|
+
snapshotPath,
|
|
3828
|
+
target,
|
|
3829
|
+
summary: { total: 1, errors: 1, warnings: 0 },
|
|
3830
|
+
issues: [
|
|
3831
|
+
{
|
|
3832
|
+
key: "__snapshot__",
|
|
3833
|
+
target,
|
|
3834
|
+
scope: "server",
|
|
3835
|
+
severity: "error",
|
|
3836
|
+
code: "SNAPSHOT_MISSING",
|
|
3837
|
+
message,
|
|
3838
|
+
fix: "Create the snapshot file and re-run diff.",
|
|
3839
|
+
source: snapshotPath
|
|
3840
|
+
}
|
|
3841
|
+
]
|
|
3842
|
+
},
|
|
3843
|
+
null,
|
|
3844
|
+
2
|
|
3845
|
+
)}
|
|
3846
|
+
`
|
|
3847
|
+
);
|
|
3848
|
+
} else {
|
|
3849
|
+
stderr.write(`${message}
|
|
3850
|
+
`);
|
|
3851
|
+
}
|
|
3852
|
+
return {
|
|
3853
|
+
provider: options.provider,
|
|
3854
|
+
ok: false,
|
|
3855
|
+
exitCode: 1,
|
|
3856
|
+
schemaPath,
|
|
3857
|
+
snapshotPath,
|
|
3858
|
+
target,
|
|
3859
|
+
summary: {
|
|
3860
|
+
total: 1,
|
|
3861
|
+
errors: 1,
|
|
3862
|
+
warnings: 0
|
|
3863
|
+
},
|
|
3864
|
+
issues: [
|
|
3865
|
+
{
|
|
3866
|
+
key: "__snapshot__",
|
|
3867
|
+
target,
|
|
3868
|
+
scope: "server",
|
|
3869
|
+
severity: "error",
|
|
3870
|
+
code: "SNAPSHOT_MISSING",
|
|
3871
|
+
message,
|
|
3872
|
+
fix: "Create the snapshot file and re-run diff.",
|
|
3873
|
+
source: snapshotPath
|
|
3874
|
+
}
|
|
3875
|
+
]
|
|
3876
|
+
};
|
|
3877
|
+
}
|
|
3878
|
+
const snapshotContent = await readFile7(snapshotPath, "utf8");
|
|
3879
|
+
const snapshotValues = mergeValuesFromSnapshot(snapshotContent);
|
|
3880
|
+
const drift = detectDrift(loaded.schema, {
|
|
3881
|
+
target,
|
|
3882
|
+
values: snapshotValues,
|
|
3883
|
+
source: snapshotPath,
|
|
3884
|
+
allowUnknown: options.allowUnknown ?? false
|
|
3885
|
+
});
|
|
3886
|
+
const ok = drift.summary.errors === 0;
|
|
3887
|
+
const result = {
|
|
3888
|
+
provider: options.provider,
|
|
3889
|
+
ok,
|
|
3890
|
+
exitCode: ok ? 0 : 1,
|
|
3891
|
+
schemaPath,
|
|
3892
|
+
snapshotPath,
|
|
3893
|
+
target,
|
|
3894
|
+
summary: drift.summary,
|
|
3895
|
+
issues: drift.issues
|
|
3896
|
+
};
|
|
3897
|
+
if (options.json) {
|
|
3898
|
+
stdout.write(`${JSON.stringify(result, null, 2)}
|
|
3899
|
+
`);
|
|
3900
|
+
} else if (ok) {
|
|
3901
|
+
stdout.write(`No drift detected for ${options.provider} snapshot (${path8.basename(snapshotPath)}).
|
|
3902
|
+
`);
|
|
3903
|
+
} else {
|
|
3904
|
+
stderr.write(`${formatDiffIssues(result.issues)}
|
|
3905
|
+
`);
|
|
3906
|
+
}
|
|
3907
|
+
return result;
|
|
3908
|
+
}
|
|
3909
|
+
function isGeneratedPlaceholder(key, value) {
|
|
3910
|
+
const normalized = value.trim();
|
|
3911
|
+
if (normalized.length === 0) {
|
|
3912
|
+
return true;
|
|
3913
|
+
}
|
|
3914
|
+
const lowercase = normalized.toLowerCase();
|
|
3915
|
+
if (lowercase === `example_${key.toLowerCase()}`) {
|
|
3916
|
+
return true;
|
|
3917
|
+
}
|
|
3918
|
+
return normalized === "https://example.com" || normalized === "{}" || lowercase.startsWith("example_");
|
|
3919
|
+
}
|
|
3920
|
+
async function runPushHerokuCommand(options) {
|
|
3921
|
+
const cwd = options.cwd ?? process.cwd();
|
|
3922
|
+
const runner = options.runner ?? runExternalCommand;
|
|
3923
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
3924
|
+
const loaded = await loadSchema(schemaPath);
|
|
3925
|
+
const inputFile = path8.resolve(cwd, options.from);
|
|
3926
|
+
const stdout = options.stdout ?? process.stdout;
|
|
3927
|
+
const stderr = options.stderr ?? process.stderr;
|
|
3928
|
+
if (!await pathExists2(inputFile)) {
|
|
3929
|
+
stderr.write(`Source file not found at "${inputFile}". Fix: pass a valid --from <file> path.
|
|
3930
|
+
`);
|
|
3931
|
+
return { ok: false, exitCode: 1, pushedKeys: [], skippedKeys: [] };
|
|
3932
|
+
}
|
|
3933
|
+
const content = await readFile7(inputFile, "utf8");
|
|
3934
|
+
const values = mergeValuesFromSnapshot(content);
|
|
3935
|
+
const pushedEntries = [];
|
|
3936
|
+
const skippedKeys = [];
|
|
3937
|
+
for (const [key, value] of Object.entries(values)) {
|
|
3938
|
+
if (!(key in loaded.schema)) {
|
|
3939
|
+
skippedKeys.push(key);
|
|
3940
|
+
continue;
|
|
3941
|
+
}
|
|
3942
|
+
if (value === void 0 || isGeneratedPlaceholder(key, value)) {
|
|
3943
|
+
skippedKeys.push(key);
|
|
3944
|
+
continue;
|
|
3945
|
+
}
|
|
3946
|
+
pushedEntries.push([key, value]);
|
|
3947
|
+
}
|
|
3948
|
+
if (pushedEntries.length === 0) {
|
|
3949
|
+
stdout.write(
|
|
3950
|
+
"No eligible variables to push. Only schema-approved non-placeholder values are pushed to Heroku.\n"
|
|
3951
|
+
);
|
|
3952
|
+
return { ok: true, exitCode: 0, pushedKeys: [], skippedKeys };
|
|
3953
|
+
}
|
|
3954
|
+
const assignmentArgs = pushedEntries.map(([key, value]) => `${key}=${value}`);
|
|
3955
|
+
const result = await runner("heroku", ["config:set", ...assignmentArgs, "--app", options.app], { cwd });
|
|
3956
|
+
if (result.missingBinary) {
|
|
3957
|
+
stderr.write(
|
|
3958
|
+
`heroku CLI is not installed. Fix: ${PROVIDER_INSTALL_GUIDE.heroku}.
|
|
3959
|
+
`
|
|
3960
|
+
);
|
|
3961
|
+
return { ok: false, exitCode: 1, pushedKeys: [], skippedKeys };
|
|
3962
|
+
}
|
|
3963
|
+
if (result.exitCode !== 0) {
|
|
3964
|
+
stderr.write(
|
|
3965
|
+
`heroku config:set failed for app "${options.app}". ${result.stderr || "No stderr output."}
|
|
3966
|
+
`
|
|
3967
|
+
);
|
|
3968
|
+
return { ok: false, exitCode: 1, pushedKeys: [], skippedKeys };
|
|
3969
|
+
}
|
|
3970
|
+
stdout.write(
|
|
3971
|
+
`Pushed ${pushedEntries.length} variable(s) to Heroku app "${options.app}" for target "${options.target}".
|
|
3972
|
+
`
|
|
3973
|
+
);
|
|
3974
|
+
return {
|
|
3975
|
+
ok: true,
|
|
3976
|
+
exitCode: 0,
|
|
3977
|
+
pushedKeys: pushedEntries.map(([key]) => key),
|
|
3978
|
+
skippedKeys
|
|
3979
|
+
};
|
|
3980
|
+
}
|
|
3981
|
+
|
|
3982
|
+
// src/reconcile-command.ts
|
|
3983
|
+
function toErrorMessage2(error) {
|
|
3984
|
+
return error instanceof Error ? error.message : String(error);
|
|
3985
|
+
}
|
|
3986
|
+
function envValueFromDefault(definition) {
|
|
3987
|
+
if (definition.sensitive) {
|
|
3988
|
+
return "";
|
|
3989
|
+
}
|
|
3990
|
+
if (definition.defaultValue === void 0 || definition.defaultValue === null) {
|
|
3991
|
+
return "";
|
|
3992
|
+
}
|
|
3993
|
+
if (definition.kind === "number" && typeof definition.defaultValue === "number") {
|
|
3994
|
+
return `${definition.defaultValue}`;
|
|
3995
|
+
}
|
|
3996
|
+
if (definition.kind === "boolean" && typeof definition.defaultValue === "boolean") {
|
|
3997
|
+
return definition.defaultValue ? "true" : "false";
|
|
3998
|
+
}
|
|
3999
|
+
if (definition.kind === "json") {
|
|
4000
|
+
if (typeof definition.defaultValue === "string") {
|
|
4001
|
+
return definition.defaultValue;
|
|
4002
|
+
}
|
|
4003
|
+
return JSON.stringify(definition.defaultValue);
|
|
4004
|
+
}
|
|
4005
|
+
return String(definition.defaultValue);
|
|
4006
|
+
}
|
|
4007
|
+
function renderTargets(targets) {
|
|
4008
|
+
return `[${targets.map((target) => JSON.stringify(target)).join(", ")}]`;
|
|
4009
|
+
}
|
|
4010
|
+
function renderDefinition(definition) {
|
|
4011
|
+
const sanitizedDefinition = definition.sensitive && definition.defaultValue !== void 0 ? {
|
|
4012
|
+
...definition,
|
|
4013
|
+
defaultValue: void 0
|
|
4014
|
+
} : definition;
|
|
4015
|
+
return JSON.stringify(sanitizedDefinition, null, 2).split("\n");
|
|
4016
|
+
}
|
|
4017
|
+
function renderReconcileMigrationTemplate(id, additions) {
|
|
4018
|
+
const lines = [
|
|
4019
|
+
'import type { EnvMigrationContext } from "@wrkspace-co/env";',
|
|
4020
|
+
"",
|
|
4021
|
+
`// Migration id is derived from this filename: ${id}.ts`,
|
|
4022
|
+
"",
|
|
4023
|
+
"export async function up(ctx: EnvMigrationContext) {"
|
|
4024
|
+
];
|
|
4025
|
+
for (const addition of additions) {
|
|
4026
|
+
const hasAllTargets = addition.targets.length === DEFAULT_MIGRATION_TARGET_FILES.length;
|
|
4027
|
+
lines.push(` ctx.key(${JSON.stringify(addition.key)})`);
|
|
4028
|
+
lines.push(" .define(");
|
|
4029
|
+
const definitionLines = renderDefinition(addition.definition);
|
|
4030
|
+
for (let index = 0; index < definitionLines.length; index += 1) {
|
|
4031
|
+
lines.push(` ${definitionLines[index]}`);
|
|
4032
|
+
}
|
|
4033
|
+
lines.push(" )");
|
|
4034
|
+
lines.push(` .value(${JSON.stringify(addition.value)})`);
|
|
4035
|
+
if (!hasAllTargets) {
|
|
4036
|
+
lines.push(` .targets(${renderTargets(addition.targets)})`);
|
|
4037
|
+
}
|
|
4038
|
+
lines.push(" .overwrite(false)");
|
|
4039
|
+
lines.push(" .createIfMissing(true);");
|
|
4040
|
+
}
|
|
4041
|
+
lines.push("}");
|
|
4042
|
+
lines.push("");
|
|
4043
|
+
lines.push("export async function down(ctx: EnvMigrationContext) {");
|
|
4044
|
+
for (const addition of additions) {
|
|
4045
|
+
const hasAllTargets = addition.targets.length === DEFAULT_MIGRATION_TARGET_FILES.length;
|
|
4046
|
+
if (hasAllTargets) {
|
|
4047
|
+
lines.push(` ctx.key(${JSON.stringify(addition.key)}).remove();`);
|
|
4048
|
+
} else {
|
|
4049
|
+
lines.push(
|
|
4050
|
+
` ctx.key(${JSON.stringify(addition.key)}).targets(${renderTargets(addition.targets)}).remove();`
|
|
4051
|
+
);
|
|
4052
|
+
}
|
|
4053
|
+
}
|
|
4054
|
+
lines.push("}");
|
|
4055
|
+
lines.push("");
|
|
4056
|
+
return lines.join("\n");
|
|
4057
|
+
}
|
|
4058
|
+
async function runReconcileCommand(options = {}) {
|
|
4059
|
+
const cwd = options.cwd ?? process.cwd();
|
|
4060
|
+
const stdout = options.stdout ?? process.stdout;
|
|
4061
|
+
const stderr = options.stderr ?? process.stderr;
|
|
4062
|
+
try {
|
|
4063
|
+
const schemaPath = await resolveSchemaPath(options.schema, cwd);
|
|
4064
|
+
const loaded = await loadSchema(schemaPath, { app: options.app });
|
|
4065
|
+
const documents = await loadEnvTargetDocuments({ cwd });
|
|
4066
|
+
const context = createEnvMigrationContext(documents);
|
|
4067
|
+
const additions = [];
|
|
4068
|
+
for (const [key, definition] of Object.entries(loaded.schema)) {
|
|
4069
|
+
const missingTargets = DEFAULT_MIGRATION_TARGET_FILES.filter(
|
|
4070
|
+
(target) => !hasKeyInTargetDocuments(documents, target, key)
|
|
4071
|
+
);
|
|
4072
|
+
if (missingTargets.length === 0) {
|
|
4073
|
+
continue;
|
|
4074
|
+
}
|
|
4075
|
+
const value = envValueFromDefault(definition);
|
|
4076
|
+
const chain = context.key(key).define(definition).value(value).overwrite(false).createIfMissing(true);
|
|
4077
|
+
if (missingTargets.length !== DEFAULT_MIGRATION_TARGET_FILES.length) {
|
|
4078
|
+
chain.targets(missingTargets);
|
|
4079
|
+
}
|
|
4080
|
+
additions.push({
|
|
4081
|
+
key,
|
|
4082
|
+
definition,
|
|
4083
|
+
value,
|
|
4084
|
+
targets: missingTargets
|
|
4085
|
+
});
|
|
4086
|
+
}
|
|
4087
|
+
finalizeFluentDefinitions(context);
|
|
4088
|
+
const changedFiles = await writeEnvTargetDocuments(documents);
|
|
4089
|
+
let migrationId;
|
|
4090
|
+
let migrationPath;
|
|
4091
|
+
if (options.writeMigration && additions.length > 0) {
|
|
4092
|
+
const created = await createMigrationFile({
|
|
4093
|
+
name: "reconcile",
|
|
4094
|
+
cwd,
|
|
4095
|
+
migrationsDir: options.migrationsDir,
|
|
4096
|
+
now: options.now,
|
|
4097
|
+
template: (id) => renderReconcileMigrationTemplate(id, additions)
|
|
4098
|
+
});
|
|
4099
|
+
migrationId = created.id;
|
|
4100
|
+
migrationPath = created.filePath;
|
|
4101
|
+
}
|
|
4102
|
+
if (additions.length === 0) {
|
|
4103
|
+
stdout.write("env reconcile: all schema keys already exist in target env files.\n");
|
|
4104
|
+
} else {
|
|
4105
|
+
stdout.write(
|
|
4106
|
+
`env reconcile added ${additions.length} schema key(s) across ${changedFiles.length} file(s).
|
|
4107
|
+
`
|
|
4108
|
+
);
|
|
4109
|
+
stdout.write(`Updated files: ${changedFiles.join(", ")}.
|
|
4110
|
+
`);
|
|
4111
|
+
if (migrationPath) {
|
|
4112
|
+
stdout.write(`Created reconcile migration "${migrationId}" at ${migrationPath}.
|
|
4113
|
+
`);
|
|
4114
|
+
}
|
|
4115
|
+
}
|
|
4116
|
+
return {
|
|
4117
|
+
ok: true,
|
|
4118
|
+
exitCode: 0,
|
|
4119
|
+
schemaPath,
|
|
4120
|
+
changedFiles,
|
|
4121
|
+
added: additions,
|
|
4122
|
+
migrationId,
|
|
4123
|
+
migrationPath
|
|
4124
|
+
};
|
|
4125
|
+
} catch (error) {
|
|
4126
|
+
stderr.write(`${toErrorMessage2(error)}
|
|
4127
|
+
`);
|
|
4128
|
+
return {
|
|
4129
|
+
ok: false,
|
|
4130
|
+
exitCode: 1,
|
|
4131
|
+
changedFiles: [],
|
|
4132
|
+
added: []
|
|
4133
|
+
};
|
|
4134
|
+
}
|
|
4135
|
+
}
|
|
4136
|
+
|
|
4137
|
+
// src/plugin.ts
|
|
4138
|
+
function parseTarget2(value) {
|
|
4139
|
+
if (value === "dev" || value === "preview" || value === "prod") {
|
|
4140
|
+
return value;
|
|
4141
|
+
}
|
|
4142
|
+
throw new Error(`Invalid --target "${value}". Expected one of: dev, preview, prod.`);
|
|
4143
|
+
}
|
|
4144
|
+
function parsePositiveInteger(value) {
|
|
4145
|
+
const parsed = Number.parseInt(value, 10);
|
|
4146
|
+
if (!Number.isInteger(parsed) || parsed <= 0) {
|
|
4147
|
+
throw new Error(`Invalid positive integer "${value}".`);
|
|
4148
|
+
}
|
|
4149
|
+
return parsed;
|
|
4150
|
+
}
|
|
4151
|
+
async function handleGenerateCommand(options) {
|
|
4152
|
+
await runGenerateCommand({
|
|
4153
|
+
writeLocal: Boolean(options.writeLocal),
|
|
4154
|
+
writeTargetFiles: Boolean(options.writeTargetFiles),
|
|
4155
|
+
schema: options.schema,
|
|
4156
|
+
app: options.app
|
|
4157
|
+
});
|
|
4158
|
+
}
|
|
4159
|
+
async function handleCheckCommand(options) {
|
|
4160
|
+
const result = await runCheckCommand({
|
|
4161
|
+
target: options.target,
|
|
4162
|
+
app: options.app,
|
|
4163
|
+
allowUnknown: Boolean(options.allowUnknown),
|
|
4164
|
+
json: Boolean(options.json),
|
|
4165
|
+
schema: options.schema
|
|
4166
|
+
});
|
|
4167
|
+
return result.exitCode;
|
|
4168
|
+
}
|
|
4169
|
+
async function handleDeprecationsCommand(options) {
|
|
4170
|
+
const result = await runDeprecationsCommand({
|
|
4171
|
+
schema: options.schema,
|
|
4172
|
+
app: options.app,
|
|
4173
|
+
json: Boolean(options.json)
|
|
4174
|
+
});
|
|
4175
|
+
return result.exitCode;
|
|
4176
|
+
}
|
|
4177
|
+
async function handleDoctorCommand(options) {
|
|
4178
|
+
const result = await runDoctorCommand({
|
|
4179
|
+
schema: options.schema
|
|
4180
|
+
});
|
|
4181
|
+
return result.exitCode;
|
|
4182
|
+
}
|
|
4183
|
+
async function handleAuditCommand(options) {
|
|
4184
|
+
const result = await runAuditCommand({
|
|
4185
|
+
schema: options.schema,
|
|
4186
|
+
json: Boolean(options.json)
|
|
4187
|
+
});
|
|
4188
|
+
return result.exitCode;
|
|
4189
|
+
}
|
|
4190
|
+
async function handlePullVercelCommand(options) {
|
|
4191
|
+
const result = await runPullVercelCommand({
|
|
4192
|
+
env: options.env
|
|
4193
|
+
});
|
|
4194
|
+
return result.exitCode;
|
|
4195
|
+
}
|
|
4196
|
+
async function handleDiffVercelCommand(options) {
|
|
4197
|
+
const result = await runDiffCommand({
|
|
4198
|
+
provider: "vercel",
|
|
4199
|
+
env: options.env,
|
|
4200
|
+
schema: options.schema,
|
|
4201
|
+
json: Boolean(options.json),
|
|
4202
|
+
allowUnknown: Boolean(options.allowUnknown)
|
|
4203
|
+
});
|
|
4204
|
+
return result.exitCode;
|
|
4205
|
+
}
|
|
4206
|
+
async function handleDiffHerokuCommand(options) {
|
|
4207
|
+
const result = await runDiffCommand({
|
|
4208
|
+
provider: "heroku",
|
|
4209
|
+
app: options.app,
|
|
4210
|
+
schema: options.schema,
|
|
4211
|
+
json: Boolean(options.json),
|
|
4212
|
+
allowUnknown: Boolean(options.allowUnknown)
|
|
4213
|
+
});
|
|
4214
|
+
return result.exitCode;
|
|
4215
|
+
}
|
|
4216
|
+
async function handlePushHerokuCommand(options) {
|
|
4217
|
+
const result = await runPushHerokuCommand({
|
|
4218
|
+
app: options.app,
|
|
4219
|
+
target: options.target,
|
|
4220
|
+
from: options.from,
|
|
4221
|
+
schema: options.schema
|
|
4222
|
+
});
|
|
4223
|
+
return result.exitCode;
|
|
4224
|
+
}
|
|
4225
|
+
async function handleExportDockerComposeCommand(options) {
|
|
4226
|
+
const result = await runExportCommand({
|
|
4227
|
+
format: "docker-compose",
|
|
4228
|
+
schema: options.schema,
|
|
4229
|
+
withValues: Boolean(options.withValues),
|
|
4230
|
+
from: options.from
|
|
4231
|
+
});
|
|
4232
|
+
return result.exitCode;
|
|
4233
|
+
}
|
|
4234
|
+
async function handleExportK8sCommand(options) {
|
|
4235
|
+
const result = await runExportCommand({
|
|
4236
|
+
format: "k8s",
|
|
4237
|
+
schema: options.schema,
|
|
4238
|
+
withValues: Boolean(options.withValues),
|
|
4239
|
+
from: options.from
|
|
4240
|
+
});
|
|
4241
|
+
return result.exitCode;
|
|
4242
|
+
}
|
|
4243
|
+
async function handleExportGithubActionsCommand(options) {
|
|
4244
|
+
const result = await runExportCommand({
|
|
4245
|
+
format: "github-actions",
|
|
4246
|
+
schema: options.schema,
|
|
4247
|
+
withValues: Boolean(options.withValues),
|
|
4248
|
+
from: options.from
|
|
4249
|
+
});
|
|
4250
|
+
return result.exitCode;
|
|
4251
|
+
}
|
|
4252
|
+
async function handleMigrateMakeCommand(name, options) {
|
|
4253
|
+
const result = await runMigrateMakeCommand({
|
|
4254
|
+
name,
|
|
4255
|
+
migrationsDir: options.dir
|
|
4256
|
+
});
|
|
4257
|
+
return result.exitCode;
|
|
4258
|
+
}
|
|
4259
|
+
async function handleMigrateCommand(options) {
|
|
4260
|
+
const result = await runMigrateCommand({
|
|
4261
|
+
migrationsDir: options.dir,
|
|
4262
|
+
stateFile: options.state,
|
|
4263
|
+
generatedSchemaFile: options.schemaFile
|
|
4264
|
+
});
|
|
4265
|
+
return result.exitCode;
|
|
4266
|
+
}
|
|
4267
|
+
async function handleMigrateStatusCommand(options) {
|
|
4268
|
+
const result = await runMigrateStatusCommand({
|
|
4269
|
+
migrationsDir: options.dir,
|
|
4270
|
+
stateFile: options.state,
|
|
4271
|
+
generatedSchemaFile: options.schemaFile,
|
|
4272
|
+
json: Boolean(options.json)
|
|
4273
|
+
});
|
|
4274
|
+
return result.exitCode;
|
|
4275
|
+
}
|
|
4276
|
+
async function handleMigrateRollbackCommand(options) {
|
|
4277
|
+
const result = await runMigrateRollbackCommand({
|
|
4278
|
+
steps: options.steps,
|
|
4279
|
+
migrationsDir: options.dir,
|
|
4280
|
+
stateFile: options.state,
|
|
4281
|
+
generatedSchemaFile: options.schemaFile
|
|
4282
|
+
});
|
|
4283
|
+
return result.exitCode;
|
|
4284
|
+
}
|
|
4285
|
+
async function handleSchemaBuildCommand(options) {
|
|
4286
|
+
const result = await runSchemaBuildCommand({
|
|
4287
|
+
migrationsDir: options.dir,
|
|
4288
|
+
stateFile: options.state,
|
|
4289
|
+
generatedSchemaFile: options.schemaFile,
|
|
4290
|
+
force: Boolean(options.force)
|
|
4291
|
+
});
|
|
4292
|
+
return result.exitCode;
|
|
4293
|
+
}
|
|
4294
|
+
async function handleReconcileCommand(options) {
|
|
4295
|
+
const result = await runReconcileCommand({
|
|
4296
|
+
schema: options.schema,
|
|
4297
|
+
app: options.app,
|
|
4298
|
+
writeMigration: Boolean(options.writeMigration),
|
|
4299
|
+
migrationsDir: options.dir
|
|
4300
|
+
});
|
|
4301
|
+
return result.exitCode;
|
|
4302
|
+
}
|
|
4303
|
+
function applyExitCode(exitCode) {
|
|
4304
|
+
if (exitCode !== 0) {
|
|
4305
|
+
process.exitCode = exitCode;
|
|
4306
|
+
}
|
|
4307
|
+
}
|
|
4308
|
+
function registerAirlockCommands(command) {
|
|
4309
|
+
command.command("doctor").description("Check schema and provider CLI setup").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4310
|
+
applyExitCode(await handleDoctorCommand(options));
|
|
4311
|
+
});
|
|
4312
|
+
command.command("check").description("Validate environment variables against schema and policy").requiredOption("--target <target>", "Target environment (dev|preview|prod)", parseTarget2).option("--app <name>", "Optional app name for composed schema selection").option("--allow-unknown", "Allow variables not defined in schema").option("--json", "Print stable JSON output").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4313
|
+
applyExitCode(await handleCheckCommand(options));
|
|
4314
|
+
});
|
|
4315
|
+
command.command("generate").description("Generate schema-driven env artifacts").option("--write-local", "Write .env.local with empty values for sensitive vars").option(
|
|
4316
|
+
"--write-target-files",
|
|
4317
|
+
"Write .env.dev, .env.preview, and .env.prod with target-required keys"
|
|
4318
|
+
).option("--schema <path>", "Path to schema module").option("--app <name>", "Optional app name for composed schema selection").action(async (options) => {
|
|
4319
|
+
await handleGenerateCommand(options);
|
|
4320
|
+
});
|
|
4321
|
+
command.command("deprecations").description("Report deprecated environment keys and removal deadlines").option("--json", "Print stable JSON output").option("--schema <path>", "Path to schema module").option("--app <name>", "Optional app name for composed schema selection").action(async (options) => {
|
|
4322
|
+
applyExitCode(await handleDeprecationsCommand(options));
|
|
4323
|
+
});
|
|
4324
|
+
command.command("audit").description("Detect schema drift in local env snapshots").option("--json", "Print stable JSON output").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4325
|
+
applyExitCode(await handleAuditCommand(options));
|
|
4326
|
+
});
|
|
4327
|
+
const pullCommand = command.command("pull").description("Pull environment snapshots from provider CLIs");
|
|
4328
|
+
pullCommand.command("vercel").description("Pull env snapshot from Vercel").requiredOption("--env <target>", "Target environment (dev|preview|prod)", parseTarget2).action(async (options) => {
|
|
4329
|
+
applyExitCode(await handlePullVercelCommand(options));
|
|
4330
|
+
});
|
|
4331
|
+
const diffCommand = command.command("diff").description("Diff provider snapshots against schema expectations");
|
|
4332
|
+
diffCommand.command("vercel").description("Diff a Vercel snapshot against schema").requiredOption("--env <target>", "Target environment (dev|preview|prod)", parseTarget2).option("--json", "Print stable JSON output").option("--allow-unknown", "Ignore unknown keys in snapshot").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4333
|
+
applyExitCode(await handleDiffVercelCommand(options));
|
|
4334
|
+
});
|
|
4335
|
+
diffCommand.command("heroku").description("Diff a Heroku snapshot against schema").requiredOption("--app <name>", "Heroku app name").option("--json", "Print stable JSON output").option("--allow-unknown", "Ignore unknown keys in snapshot").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4336
|
+
applyExitCode(await handleDiffHerokuCommand(options));
|
|
4337
|
+
});
|
|
4338
|
+
const pushCommand = command.command("push").description("Push env values to provider CLIs with safety filters");
|
|
4339
|
+
pushCommand.command("heroku").description("Push schema-approved values to Heroku").requiredOption("--app <name>", "Heroku app name").requiredOption("--target <target>", "Target environment (dev|preview|prod)", parseTarget2).requiredOption("--from <file>", "Source dotenv file to read values from").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4340
|
+
applyExitCode(await handlePushHerokuCommand(options));
|
|
4341
|
+
});
|
|
4342
|
+
const exportCommand = command.command("export").description("Export env schema to deployment formats with safe defaults");
|
|
4343
|
+
exportCommand.command("docker-compose").description("Export docker-compose environment block").option("--with-values", "Include values from --from file").option("--from <file>", "Source dotenv file (required with --with-values)").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4344
|
+
applyExitCode(await handleExportDockerComposeCommand(options));
|
|
4345
|
+
});
|
|
4346
|
+
exportCommand.command("k8s").description("Export Kubernetes ConfigMap and Secret manifests").option("--with-values", "Include values from --from file").option("--from <file>", "Source dotenv file (required with --with-values)").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4347
|
+
applyExitCode(await handleExportK8sCommand(options));
|
|
4348
|
+
});
|
|
4349
|
+
exportCommand.command("github-actions").description("Export GitHub Actions env block").option("--with-values", "Include values from --from file").option("--from <file>", "Source dotenv file (required with --with-values)").option("--schema <path>", "Path to schema module").action(async (options) => {
|
|
4350
|
+
applyExitCode(await handleExportGithubActionsCommand(options));
|
|
4351
|
+
});
|
|
4352
|
+
command.command("migrate:make").description("Create a new env migration file").argument("<name>", "Migration name").option("--dir <path>", "Migration directory (default: env/migrations)").action(async (name, options) => {
|
|
4353
|
+
applyExitCode(await handleMigrateMakeCommand(name, options));
|
|
4354
|
+
});
|
|
4355
|
+
command.command("migrate").description("Apply pending env migrations").option("--dir <path>", "Migration directory (default: env/migrations)").option("--state <path>", "Migration state file (default: env/migrations.state.json)").option(
|
|
4356
|
+
"--schema-file <path>",
|
|
4357
|
+
"Generated schema file (default: env/schema.generated.json)"
|
|
4358
|
+
).action(async (options) => {
|
|
4359
|
+
applyExitCode(await handleMigrateCommand(options));
|
|
4360
|
+
});
|
|
4361
|
+
command.command("migrate:status").description("Show env migration status").option("--json", "Print stable JSON output").option("--dir <path>", "Migration directory (default: env/migrations)").option("--state <path>", "Migration state file (default: env/migrations.state.json)").option(
|
|
4362
|
+
"--schema-file <path>",
|
|
4363
|
+
"Generated schema file (default: env/schema.generated.json)"
|
|
4364
|
+
).action(async (options) => {
|
|
4365
|
+
applyExitCode(await handleMigrateStatusCommand(options));
|
|
4366
|
+
});
|
|
4367
|
+
command.command("migrate:rollback").description("Rollback applied env migrations").requiredOption("--steps <count>", "Number of steps to rollback", parsePositiveInteger).option("--dir <path>", "Migration directory (default: env/migrations)").option("--state <path>", "Migration state file (default: env/migrations.state.json)").option(
|
|
4368
|
+
"--schema-file <path>",
|
|
4369
|
+
"Generated schema file (default: env/schema.generated.json)"
|
|
4370
|
+
).action(async (options) => {
|
|
4371
|
+
applyExitCode(await handleMigrateRollbackCommand(options));
|
|
4372
|
+
});
|
|
4373
|
+
command.command("schema:build").description("Build migration-derived schema file").option("--dir <path>", "Migration directory (default: env/migrations)").option("--state <path>", "Migration state file (default: env/migrations.state.json)").option(
|
|
4374
|
+
"--schema-file <path>",
|
|
4375
|
+
"Generated schema file (default: env/schema.generated.json)"
|
|
4376
|
+
).option("--force", "Force a full schema rebuild from applied migrations").action(async (options) => {
|
|
4377
|
+
applyExitCode(await handleSchemaBuildCommand(options));
|
|
4378
|
+
});
|
|
4379
|
+
command.command("reconcile").description("Ensure env files include schema keys with minimal edits").option("--schema <path>", "Path to schema module").option("--app <name>", "Optional app name for composed schema selection").option("--write-migration", "Create a migration file with reconcile changes").option("--dir <path>", "Migration directory used by --write-migration").action(async (options) => {
|
|
4380
|
+
applyExitCode(await handleReconcileCommand(options));
|
|
4381
|
+
});
|
|
4382
|
+
}
|
|
4383
|
+
function createAirlockProgram() {
|
|
4384
|
+
const program = new Command().name("airlock").description("Airlock environment commands");
|
|
4385
|
+
registerAirlockCommands(program);
|
|
4386
|
+
return program;
|
|
4387
|
+
}
|
|
4388
|
+
function register(program) {
|
|
4389
|
+
const envCommand = program.command("env").description("Airlock environment commands");
|
|
4390
|
+
registerAirlockCommands(envCommand);
|
|
4391
|
+
}
|
|
4392
|
+
var plugin = { register };
|
|
4393
|
+
var plugin_default = plugin;
|
|
4394
|
+
|
|
4395
|
+
export {
|
|
4396
|
+
parseDotenv,
|
|
4397
|
+
formatDotenvValue,
|
|
4398
|
+
writeDotenv,
|
|
4399
|
+
dotenvToObject,
|
|
4400
|
+
string,
|
|
4401
|
+
url,
|
|
4402
|
+
number,
|
|
4403
|
+
boolean,
|
|
4404
|
+
enumBuilder,
|
|
4405
|
+
json,
|
|
4406
|
+
builders,
|
|
4407
|
+
defineSchema,
|
|
4408
|
+
prefixPolicies,
|
|
4409
|
+
formatValidationIssues,
|
|
4410
|
+
EnvValidationError,
|
|
4411
|
+
createEnv,
|
|
4412
|
+
detectDrift,
|
|
4413
|
+
generateArtifacts,
|
|
4414
|
+
generateTargetEnvFile,
|
|
4415
|
+
DEFAULT_MIGRATIONS_DIR2 as DEFAULT_MIGRATIONS_DIR,
|
|
4416
|
+
DEFAULT_MIGRATION_STATE_FILE2 as DEFAULT_MIGRATION_STATE_FILE,
|
|
4417
|
+
DEFAULT_GENERATED_SCHEMA_FILE2 as DEFAULT_GENERATED_SCHEMA_FILE,
|
|
4418
|
+
DEFAULT_MIGRATION_TARGET_FILES,
|
|
4419
|
+
loadEnvTargetDocuments,
|
|
4420
|
+
hasKeyInTargetDocuments,
|
|
4421
|
+
finalizeFluentDefinitions,
|
|
4422
|
+
createEnvMigrationContext,
|
|
4423
|
+
writeEnvTargetDocuments,
|
|
4424
|
+
loadMigrationState,
|
|
4425
|
+
saveMigrationState,
|
|
4426
|
+
discoverMigrations,
|
|
4427
|
+
getMigrationStatus,
|
|
4428
|
+
loadOrBuildMigrationSchema,
|
|
4429
|
+
createMigrationFile,
|
|
4430
|
+
applyPendingMigrations,
|
|
4431
|
+
rollbackMigrations,
|
|
4432
|
+
DEFAULT_SCHEMA_FILES,
|
|
4433
|
+
resolveSchemaPath,
|
|
4434
|
+
loadSchema,
|
|
4435
|
+
runAuditCommand,
|
|
4436
|
+
runCheckCommand,
|
|
4437
|
+
runDeprecationsCommand,
|
|
4438
|
+
renderExport,
|
|
4439
|
+
runExportCommand,
|
|
4440
|
+
runGenerateCommand,
|
|
4441
|
+
runMigrateMakeCommand,
|
|
4442
|
+
runMigrateCommand,
|
|
4443
|
+
runMigrateStatusCommand,
|
|
4444
|
+
runMigrateRollbackCommand,
|
|
4445
|
+
runSchemaBuildCommand,
|
|
4446
|
+
runExternalCommand,
|
|
4447
|
+
runDoctorCommand,
|
|
4448
|
+
runPullVercelCommand,
|
|
4449
|
+
runDiffCommand,
|
|
4450
|
+
runPushHerokuCommand,
|
|
4451
|
+
runReconcileCommand,
|
|
4452
|
+
handleGenerateCommand,
|
|
4453
|
+
handleCheckCommand,
|
|
4454
|
+
handleDeprecationsCommand,
|
|
4455
|
+
handleDoctorCommand,
|
|
4456
|
+
handleAuditCommand,
|
|
4457
|
+
handlePullVercelCommand,
|
|
4458
|
+
handleDiffVercelCommand,
|
|
4459
|
+
handleDiffHerokuCommand,
|
|
4460
|
+
handlePushHerokuCommand,
|
|
4461
|
+
handleExportDockerComposeCommand,
|
|
4462
|
+
handleExportK8sCommand,
|
|
4463
|
+
handleExportGithubActionsCommand,
|
|
4464
|
+
handleMigrateMakeCommand,
|
|
4465
|
+
handleMigrateCommand,
|
|
4466
|
+
handleMigrateStatusCommand,
|
|
4467
|
+
handleMigrateRollbackCommand,
|
|
4468
|
+
handleSchemaBuildCommand,
|
|
4469
|
+
handleReconcileCommand,
|
|
4470
|
+
createAirlockProgram,
|
|
4471
|
+
register,
|
|
4472
|
+
plugin_default
|
|
4473
|
+
};
|
|
4474
|
+
//# sourceMappingURL=chunk-32DQKIYR.js.map
|