qavor 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +201 -0
- package/README.md +93 -0
- package/dist/index.js +2883 -0
- package/dist/index.js.map +1 -0
- package/package.json +51 -0
package/dist/index.js
ADDED
|
@@ -0,0 +1,2883 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
|
|
3
|
+
// src/index.ts
|
|
4
|
+
import process2 from "process";
|
|
5
|
+
import { Command } from "commander";
|
|
6
|
+
|
|
7
|
+
// src/util/exit-codes.ts
|
|
8
|
+
var ExitCode = {
|
|
9
|
+
Ok: 0,
|
|
10
|
+
UserError: 1,
|
|
11
|
+
ManifestError: 2,
|
|
12
|
+
RuntimeError: 3
|
|
13
|
+
};
|
|
14
|
+
var QavorError = class extends Error {
|
|
15
|
+
exitCode;
|
|
16
|
+
constructor(message, exitCode = ExitCode.RuntimeError) {
|
|
17
|
+
super(message);
|
|
18
|
+
this.name = "QavorError";
|
|
19
|
+
this.exitCode = exitCode;
|
|
20
|
+
}
|
|
21
|
+
};
|
|
22
|
+
var UserError = class extends QavorError {
|
|
23
|
+
constructor(message) {
|
|
24
|
+
super(message, ExitCode.UserError);
|
|
25
|
+
this.name = "UserError";
|
|
26
|
+
}
|
|
27
|
+
};
|
|
28
|
+
var ManifestError = class extends QavorError {
|
|
29
|
+
constructor(message) {
|
|
30
|
+
super(message, ExitCode.ManifestError);
|
|
31
|
+
this.name = "ManifestError";
|
|
32
|
+
}
|
|
33
|
+
};
|
|
34
|
+
var RuntimeFailure = class extends QavorError {
|
|
35
|
+
constructor(message) {
|
|
36
|
+
super(message, ExitCode.RuntimeError);
|
|
37
|
+
this.name = "RuntimeFailure";
|
|
38
|
+
}
|
|
39
|
+
};
|
|
40
|
+
|
|
41
|
+
// src/util/logger.ts
|
|
42
|
+
import pino from "pino";
|
|
43
|
+
var rootLogger = null;
|
|
44
|
+
function configureLogger(opts) {
|
|
45
|
+
const level = opts.verbose ? "debug" : "info";
|
|
46
|
+
const stderrIsTty = Boolean(process.stderr.isTTY);
|
|
47
|
+
if (opts.json || !stderrIsTty) {
|
|
48
|
+
rootLogger = pino({ level }, pino.destination({ fd: 2, sync: true }));
|
|
49
|
+
} else {
|
|
50
|
+
rootLogger = pino({
|
|
51
|
+
level,
|
|
52
|
+
transport: {
|
|
53
|
+
target: "pino-pretty",
|
|
54
|
+
options: {
|
|
55
|
+
destination: 2,
|
|
56
|
+
colorize: stderrIsTty,
|
|
57
|
+
translateTime: false,
|
|
58
|
+
ignore: "pid,hostname,time",
|
|
59
|
+
messageFormat: "{msg}",
|
|
60
|
+
singleLine: false,
|
|
61
|
+
sync: true
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
});
|
|
65
|
+
}
|
|
66
|
+
return rootLogger;
|
|
67
|
+
}
|
|
68
|
+
function getLogger() {
|
|
69
|
+
if (!rootLogger) {
|
|
70
|
+
rootLogger = pino({ level: "info" });
|
|
71
|
+
}
|
|
72
|
+
return rootLogger;
|
|
73
|
+
}
|
|
74
|
+
function emit(text) {
|
|
75
|
+
process.stdout.write(text + "\n");
|
|
76
|
+
}
|
|
77
|
+
function emitJson(payload) {
|
|
78
|
+
process.stdout.write(JSON.stringify(payload) + "\n");
|
|
79
|
+
}
|
|
80
|
+
|
|
81
|
+
// src/cli/commands/init.ts
|
|
82
|
+
import path6 from "path";
|
|
83
|
+
|
|
84
|
+
// src/workspace/init.ts
|
|
85
|
+
import { createHash as createHash2 } from "crypto";
|
|
86
|
+
import fs4 from "fs/promises";
|
|
87
|
+
import path5 from "path";
|
|
88
|
+
|
|
89
|
+
// src/util/fs.ts
|
|
90
|
+
import { createHash } from "crypto";
|
|
91
|
+
import { createReadStream } from "fs";
|
|
92
|
+
import fs from "fs/promises";
|
|
93
|
+
import path from "path";
|
|
94
|
+
async function pathExists(target) {
|
|
95
|
+
try {
|
|
96
|
+
await fs.access(target);
|
|
97
|
+
return true;
|
|
98
|
+
} catch {
|
|
99
|
+
return false;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
async function isDirectory(target) {
|
|
103
|
+
try {
|
|
104
|
+
const s = await fs.stat(target);
|
|
105
|
+
return s.isDirectory();
|
|
106
|
+
} catch {
|
|
107
|
+
return false;
|
|
108
|
+
}
|
|
109
|
+
}
|
|
110
|
+
async function isFile(target) {
|
|
111
|
+
try {
|
|
112
|
+
const s = await fs.stat(target);
|
|
113
|
+
return s.isFile();
|
|
114
|
+
} catch {
|
|
115
|
+
return false;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
async function ensureDir(target) {
|
|
119
|
+
await fs.mkdir(target, { recursive: true });
|
|
120
|
+
}
|
|
121
|
+
async function readJsonFile(target) {
|
|
122
|
+
const raw = await fs.readFile(target, "utf8");
|
|
123
|
+
return JSON.parse(raw);
|
|
124
|
+
}
|
|
125
|
+
async function writeJsonFile(target, value) {
|
|
126
|
+
await ensureDir(path.dirname(target));
|
|
127
|
+
await fs.writeFile(target, JSON.stringify(value, null, 2) + "\n", "utf8");
|
|
128
|
+
}
|
|
129
|
+
function globalCacheDir(env = process.env) {
|
|
130
|
+
const xdg = env.XDG_CACHE_HOME;
|
|
131
|
+
if (xdg && xdg.length > 0) return path.join(xdg, "qavor");
|
|
132
|
+
const home = env.HOME ?? env.USERPROFILE ?? "/tmp";
|
|
133
|
+
return path.join(home, ".cache", "qavor");
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
// src/git/git.ts
|
|
137
|
+
import { execa } from "execa";
|
|
138
|
+
import fs2 from "fs/promises";
|
|
139
|
+
import path2 from "path";
|
|
140
|
+
import simpleGit from "simple-git";
|
|
141
|
+
async function runGit(args, opts) {
|
|
142
|
+
let child;
|
|
143
|
+
try {
|
|
144
|
+
child = execa("git", args, {
|
|
145
|
+
cwd: opts.cwd,
|
|
146
|
+
env: opts.env ? { ...process.env, ...opts.env } : process.env,
|
|
147
|
+
...opts.signal ? { cancelSignal: opts.signal } : {},
|
|
148
|
+
stdout: "pipe",
|
|
149
|
+
stderr: "pipe"
|
|
150
|
+
});
|
|
151
|
+
const res = await child;
|
|
152
|
+
return typeof res.stdout === "string" ? res.stdout : "";
|
|
153
|
+
} catch (err) {
|
|
154
|
+
const ee = err;
|
|
155
|
+
const stderr = typeof ee.stderr === "string" ? ee.stderr : "";
|
|
156
|
+
const stdout = typeof ee.stdout === "string" ? ee.stdout : "";
|
|
157
|
+
const code2 = ee.exitCode ?? -1;
|
|
158
|
+
const tail = stderr.trim() || stdout.trim() || ee.shortMessage || ee.message;
|
|
159
|
+
throw new RuntimeFailure(`git ${args.join(" ")} (exit ${code2}) in ${opts.cwd}
|
|
160
|
+
${tail}`);
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
async function isGitRepo(dir) {
|
|
164
|
+
if (!await isDirectory(dir)) return false;
|
|
165
|
+
try {
|
|
166
|
+
await fs2.access(path2.join(dir, ".git"));
|
|
167
|
+
return true;
|
|
168
|
+
} catch {
|
|
169
|
+
try {
|
|
170
|
+
const out = await runGit(["rev-parse", "--is-inside-work-tree"], { cwd: dir });
|
|
171
|
+
return out.trim() === "true";
|
|
172
|
+
} catch {
|
|
173
|
+
return false;
|
|
174
|
+
}
|
|
175
|
+
}
|
|
176
|
+
}
|
|
177
|
+
async function readRepoStatus(dir) {
|
|
178
|
+
const git = simpleGit({ baseDir: dir });
|
|
179
|
+
let branch = null;
|
|
180
|
+
try {
|
|
181
|
+
const summary = await git.branch();
|
|
182
|
+
branch = summary.current || null;
|
|
183
|
+
} catch {
|
|
184
|
+
branch = null;
|
|
185
|
+
}
|
|
186
|
+
let ahead = 0;
|
|
187
|
+
let behind = 0;
|
|
188
|
+
try {
|
|
189
|
+
const counts = await runGit(["rev-list", "--left-right", "--count", "@{u}...HEAD"], { cwd: dir });
|
|
190
|
+
const [b, a] = counts.trim().split(/\s+/).map((n) => Number.parseInt(n, 10));
|
|
191
|
+
behind = Number.isFinite(b) ? b ?? 0 : 0;
|
|
192
|
+
ahead = Number.isFinite(a) ? a ?? 0 : 0;
|
|
193
|
+
} catch {
|
|
194
|
+
}
|
|
195
|
+
let dirtyCount = 0;
|
|
196
|
+
try {
|
|
197
|
+
const status = await git.status();
|
|
198
|
+
dirtyCount = status.files.length;
|
|
199
|
+
} catch {
|
|
200
|
+
}
|
|
201
|
+
let lastCommit = null;
|
|
202
|
+
let lastCommitSubject = null;
|
|
203
|
+
try {
|
|
204
|
+
const log = await git.log({ maxCount: 1 });
|
|
205
|
+
if (log.latest) {
|
|
206
|
+
lastCommit = log.latest.hash.slice(0, 7);
|
|
207
|
+
lastCommitSubject = log.latest.message;
|
|
208
|
+
}
|
|
209
|
+
} catch {
|
|
210
|
+
}
|
|
211
|
+
return { branch, ahead, behind, dirtyCount, lastCommit, lastCommitSubject };
|
|
212
|
+
}
|
|
213
|
+
async function gitClone(opts) {
|
|
214
|
+
const args = ["clone"];
|
|
215
|
+
if (opts.branch && !opts.commit) args.push("--branch", opts.branch);
|
|
216
|
+
else if (opts.tag && !opts.commit) args.push("--branch", opts.tag);
|
|
217
|
+
if (opts.shallow) args.push("--depth", "1");
|
|
218
|
+
if (opts.submodules) args.push("--recurse-submodules");
|
|
219
|
+
args.push("--", opts.url, opts.dest);
|
|
220
|
+
await fs2.mkdir(path2.dirname(opts.dest), { recursive: true });
|
|
221
|
+
const runOpts = { cwd: path2.dirname(opts.dest) };
|
|
222
|
+
if (opts.signal) runOpts.signal = opts.signal;
|
|
223
|
+
await runGit(args, runOpts);
|
|
224
|
+
if (opts.commit) {
|
|
225
|
+
const checkoutOpts = { cwd: opts.dest };
|
|
226
|
+
if (opts.signal) checkoutOpts.signal = opts.signal;
|
|
227
|
+
await runGit(["checkout", opts.commit], checkoutOpts);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
async function gitFetch(dir, signal) {
|
|
231
|
+
const opts = { cwd: dir };
|
|
232
|
+
if (signal) opts.signal = signal;
|
|
233
|
+
await runGit(["fetch", "--prune"], opts);
|
|
234
|
+
}
|
|
235
|
+
async function gitPullFastForward(dir, signal) {
|
|
236
|
+
const opts = { cwd: dir };
|
|
237
|
+
if (signal) opts.signal = signal;
|
|
238
|
+
await runGit(["pull", "--ff-only"], opts);
|
|
239
|
+
}
|
|
240
|
+
async function gitCommit(dir, message, opts = {}) {
|
|
241
|
+
const status = await runGit(["status", "--porcelain"], { cwd: dir });
|
|
242
|
+
if (status.trim().length === 0 && !opts.allowEmpty) {
|
|
243
|
+
return { committed: false };
|
|
244
|
+
}
|
|
245
|
+
const addOpts = { cwd: dir };
|
|
246
|
+
if (opts.signal) addOpts.signal = opts.signal;
|
|
247
|
+
await runGit(["add", "-A"], addOpts);
|
|
248
|
+
const args = ["commit", "-m", message];
|
|
249
|
+
if (opts.allowEmpty) args.push("--allow-empty");
|
|
250
|
+
const commitOpts = { cwd: dir };
|
|
251
|
+
if (opts.signal) commitOpts.signal = opts.signal;
|
|
252
|
+
await runGit(args, commitOpts);
|
|
253
|
+
return { committed: true };
|
|
254
|
+
}
|
|
255
|
+
async function gitPush(dir, signal) {
|
|
256
|
+
const opts = { cwd: dir };
|
|
257
|
+
if (signal) opts.signal = signal;
|
|
258
|
+
await runGit(["push"], opts);
|
|
259
|
+
}
|
|
260
|
+
function deriveCloneUrl(input) {
|
|
261
|
+
if (input.explicitUrl) return input.explicitUrl;
|
|
262
|
+
if (!input.rootUrl) {
|
|
263
|
+
throw new RuntimeFailure(
|
|
264
|
+
`Cannot derive clone URL for '${input.name}': project manifest has no git.root_url and no explicit url is set.`
|
|
265
|
+
);
|
|
266
|
+
}
|
|
267
|
+
const prefix = input.repoPrefix ?? "";
|
|
268
|
+
const base = input.rootUrl.endsWith("/") ? input.rootUrl.slice(0, -1) : input.rootUrl;
|
|
269
|
+
const fullName = `${prefix}${input.name}`;
|
|
270
|
+
return `${base}/${fullName}.git`;
|
|
271
|
+
}
|
|
272
|
+
|
|
273
|
+
// src/manifest/loader.ts
|
|
274
|
+
import fs3 from "fs/promises";
|
|
275
|
+
import path3 from "path";
|
|
276
|
+
import { parseAllDocuments } from "yaml";
|
|
277
|
+
async function loadManifestFile(filePath, opts = {}) {
|
|
278
|
+
const absFile = path3.resolve(filePath);
|
|
279
|
+
let source;
|
|
280
|
+
try {
|
|
281
|
+
source = await fs3.readFile(absFile, "utf8");
|
|
282
|
+
} catch (err) {
|
|
283
|
+
if (err.code === "ENOENT") {
|
|
284
|
+
throw new ManifestError(`Manifest file not found: ${absFile}`);
|
|
285
|
+
}
|
|
286
|
+
throw err;
|
|
287
|
+
}
|
|
288
|
+
const docs = parseAllDocuments(source, {
|
|
289
|
+
keepSourceTokens: true
|
|
290
|
+
});
|
|
291
|
+
const out = [];
|
|
292
|
+
let idx = 0;
|
|
293
|
+
for (const doc of docs) {
|
|
294
|
+
if (doc.errors.length && opts.throwOnParseError !== false) {
|
|
295
|
+
const e = doc.errors[0];
|
|
296
|
+
const pos = errorPosition(absFile, source, e);
|
|
297
|
+
throw new ManifestError(
|
|
298
|
+
`${pos.file}:${pos.line}:${pos.column}: YAML parse error: ${e.message}`
|
|
299
|
+
);
|
|
300
|
+
}
|
|
301
|
+
const data = doc.toJS({ maxAliasCount: -1 }) ?? {};
|
|
302
|
+
const kind = typeof data.kind === "string" ? data.kind : void 0;
|
|
303
|
+
const position = makePositionResolver(absFile, source, doc);
|
|
304
|
+
out.push({ file: absFile, docIndex: idx, kind, data, position });
|
|
305
|
+
idx++;
|
|
306
|
+
}
|
|
307
|
+
return out;
|
|
308
|
+
}
|
|
309
|
+
function errorPosition(file, source, e) {
|
|
310
|
+
if (Array.isArray(e.pos) && typeof e.pos[0] === "number") {
|
|
311
|
+
const { line, col } = offsetToLineCol(source, e.pos[0]);
|
|
312
|
+
return { file, line, column: col };
|
|
313
|
+
}
|
|
314
|
+
return { file, line: 1, column: 1 };
|
|
315
|
+
}
|
|
316
|
+
function offsetToLineCol(source, offset) {
|
|
317
|
+
let line = 1;
|
|
318
|
+
let col = 1;
|
|
319
|
+
for (let i = 0; i < offset && i < source.length; i++) {
|
|
320
|
+
if (source[i] === "\n") {
|
|
321
|
+
line++;
|
|
322
|
+
col = 1;
|
|
323
|
+
} else {
|
|
324
|
+
col++;
|
|
325
|
+
}
|
|
326
|
+
}
|
|
327
|
+
return { line, col };
|
|
328
|
+
}
|
|
329
|
+
function makePositionResolver(file, source, doc) {
|
|
330
|
+
return (jsonPath) => {
|
|
331
|
+
const keys = parseJsonPointer(jsonPath);
|
|
332
|
+
let node;
|
|
333
|
+
if (keys.length === 0) {
|
|
334
|
+
node = doc.contents;
|
|
335
|
+
} else {
|
|
336
|
+
node = doc.getIn(keys, true);
|
|
337
|
+
if (!node) {
|
|
338
|
+
for (let i = keys.length - 1; i >= 0; i--) {
|
|
339
|
+
const partial = keys.slice(0, i);
|
|
340
|
+
const candidate = doc.getIn(partial, true);
|
|
341
|
+
if (candidate) {
|
|
342
|
+
node = candidate;
|
|
343
|
+
break;
|
|
344
|
+
}
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
const range = node?.range;
|
|
349
|
+
if (range && typeof range[0] === "number") {
|
|
350
|
+
const { line, col } = offsetToLineCol(source, range[0]);
|
|
351
|
+
return { file, line, column: col };
|
|
352
|
+
}
|
|
353
|
+
return { file, line: 1, column: 1 };
|
|
354
|
+
};
|
|
355
|
+
}
|
|
356
|
+
function parseJsonPointer(pointer) {
|
|
357
|
+
if (!pointer) return [];
|
|
358
|
+
const trimmed = pointer.startsWith("/") ? pointer.slice(1) : pointer;
|
|
359
|
+
if (trimmed.length === 0) return [];
|
|
360
|
+
return trimmed.split("/").map((segment) => {
|
|
361
|
+
const decoded = segment.replace(/~1/g, "/").replace(/~0/g, "~");
|
|
362
|
+
const asNumber = Number(decoded);
|
|
363
|
+
return decoded !== "" && Number.isInteger(asNumber) && String(asNumber) === decoded ? asNumber : decoded;
|
|
364
|
+
});
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
// src/manifest/validator.ts
|
|
368
|
+
import Ajv2020 from "ajv/dist/2020.js";
|
|
369
|
+
import addFormats from "ajv-formats";
|
|
370
|
+
|
|
371
|
+
// src/schema/qavor.defs.schema.json
|
|
372
|
+
var qavor_defs_schema_default = {
|
|
373
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
374
|
+
$id: "https://qavor.dev/schemas/qavor.defs.schema.json",
|
|
375
|
+
title: "qavor shared definitions",
|
|
376
|
+
description: "Shared $defs referenced by every kind-specific qavor manifest schema. Has no top-level shape of its own; consume via $ref to /$defs/<name>.",
|
|
377
|
+
type: "object",
|
|
378
|
+
$defs: {
|
|
379
|
+
name: {
|
|
380
|
+
type: "string",
|
|
381
|
+
pattern: "^[a-z0-9][a-z0-9._-]{0,62}$",
|
|
382
|
+
description: "Lowercase identifier. Letters, digits, dot, dash, underscore. 1-63 chars. Used as service/stateful/profile/repo/group identifiers."
|
|
383
|
+
},
|
|
384
|
+
envKey: {
|
|
385
|
+
type: "string",
|
|
386
|
+
pattern: "^[A-Z_][A-Z0-9_]*$",
|
|
387
|
+
description: "Conventional UPPER_SNAKE_CASE environment variable name."
|
|
388
|
+
},
|
|
389
|
+
envScalar: {
|
|
390
|
+
description: "Scalar value usable on the right-hand side of an env entry. Strings support ${VAR} and ${secret:NAME} interpolation.",
|
|
391
|
+
oneOf: [
|
|
392
|
+
{ type: "string" },
|
|
393
|
+
{ type: "number" },
|
|
394
|
+
{ type: "boolean" }
|
|
395
|
+
]
|
|
396
|
+
},
|
|
397
|
+
envSpec: {
|
|
398
|
+
description: "Long-form env entry. Use when you need typing, validation, default vs override, secret marking, or documentation.",
|
|
399
|
+
type: "object",
|
|
400
|
+
additionalProperties: false,
|
|
401
|
+
properties: {
|
|
402
|
+
value: { $ref: "#/$defs/envScalar" },
|
|
403
|
+
default: { $ref: "#/$defs/envScalar" },
|
|
404
|
+
required: { type: "boolean", default: false },
|
|
405
|
+
type: { type: "string", enum: ["string", "int", "number", "bool", "url", "duration"] },
|
|
406
|
+
pattern: { type: "string", format: "regex" },
|
|
407
|
+
secret: { type: "boolean", default: false },
|
|
408
|
+
description: { type: "string" }
|
|
409
|
+
}
|
|
410
|
+
},
|
|
411
|
+
envMap: {
|
|
412
|
+
description: "Map of env names to scalar values or long-form envSpec entries.",
|
|
413
|
+
type: "object",
|
|
414
|
+
patternProperties: {
|
|
415
|
+
"^[A-Z_][A-Z0-9_]*$": {
|
|
416
|
+
oneOf: [
|
|
417
|
+
{ $ref: "#/$defs/envScalar" },
|
|
418
|
+
{ $ref: "#/$defs/envSpec" }
|
|
419
|
+
]
|
|
420
|
+
}
|
|
421
|
+
},
|
|
422
|
+
additionalProperties: false
|
|
423
|
+
},
|
|
424
|
+
envBlock: {
|
|
425
|
+
description: "Layered env block. `common` always applies; `native` or `docker` is layered on top depending on the active run mode. See the proposal section on Manifest Resolution Order for the full precedence chain.",
|
|
426
|
+
type: "object",
|
|
427
|
+
additionalProperties: false,
|
|
428
|
+
properties: {
|
|
429
|
+
common: { $ref: "#/$defs/envMap" },
|
|
430
|
+
native: { $ref: "#/$defs/envMap" },
|
|
431
|
+
docker: { $ref: "#/$defs/envMap" }
|
|
432
|
+
}
|
|
433
|
+
},
|
|
434
|
+
statefulEnvBlock: {
|
|
435
|
+
description: "Env block for a stateful service. Adds `publish`: env vars exposed to dependents at start time.",
|
|
436
|
+
type: "object",
|
|
437
|
+
additionalProperties: false,
|
|
438
|
+
properties: {
|
|
439
|
+
common: { $ref: "#/$defs/envMap" },
|
|
440
|
+
native: { $ref: "#/$defs/envMap" },
|
|
441
|
+
docker: { $ref: "#/$defs/envMap" },
|
|
442
|
+
publish: { $ref: "#/$defs/envMap" }
|
|
443
|
+
}
|
|
444
|
+
},
|
|
445
|
+
runtimeStep: {
|
|
446
|
+
description: "Single shell step in a runtime block (check_installed, install, prepare, run).",
|
|
447
|
+
type: "object",
|
|
448
|
+
additionalProperties: false,
|
|
449
|
+
required: ["cmd"],
|
|
450
|
+
properties: {
|
|
451
|
+
cmd: { type: "string", description: "Shell command. Multiline strings are treated as a script." },
|
|
452
|
+
cwd: { type: "string", description: "Working directory relative to the manifest file." },
|
|
453
|
+
env: { $ref: "#/$defs/envMap" },
|
|
454
|
+
shell: { type: "string", description: "Override shell. Defaults to `/bin/sh -c` (POSIX) or `cmd /C` on Windows." }
|
|
455
|
+
}
|
|
456
|
+
},
|
|
457
|
+
runtimeBackend: {
|
|
458
|
+
description: "Runtime backend definition. Each of `check_installed`, `install`, `prepare`, `run` is optional but ordered: install runs only when check_installed fails; prepare runs before run.",
|
|
459
|
+
type: "object",
|
|
460
|
+
additionalProperties: false,
|
|
461
|
+
properties: {
|
|
462
|
+
enabled: { type: "boolean", default: false },
|
|
463
|
+
check_installed: { $ref: "#/$defs/runtimeStep" },
|
|
464
|
+
install: { $ref: "#/$defs/runtimeStep" },
|
|
465
|
+
prepare: { $ref: "#/$defs/runtimeStep" },
|
|
466
|
+
run: { $ref: "#/$defs/runtimeStep" }
|
|
467
|
+
}
|
|
468
|
+
},
|
|
469
|
+
runtimeBlock: {
|
|
470
|
+
description: "Available runtime backends. A service or stateful manifest may declare any subset; the active backend is chosen by the resolved `mode`.",
|
|
471
|
+
type: "object",
|
|
472
|
+
additionalProperties: false,
|
|
473
|
+
properties: {
|
|
474
|
+
native: { $ref: "#/$defs/runtimeBackend" },
|
|
475
|
+
docker: { $ref: "#/$defs/runtimeBackend" },
|
|
476
|
+
"docker-compose": { $ref: "#/$defs/runtimeBackend" }
|
|
477
|
+
}
|
|
478
|
+
},
|
|
479
|
+
mode: {
|
|
480
|
+
description: "Run mode. `docker-compose` is only valid for stateful manifests at v0.",
|
|
481
|
+
type: "string",
|
|
482
|
+
enum: ["native", "docker", "docker-compose"]
|
|
483
|
+
},
|
|
484
|
+
profileRef: {
|
|
485
|
+
description: "Reference to a profile by name. Profiles are merged in declaration order; later entries win.",
|
|
486
|
+
$ref: "#/$defs/name"
|
|
487
|
+
},
|
|
488
|
+
requirement: {
|
|
489
|
+
description: "A single dependency edge. Exactly one of `service`, `stateful`, or `group` must be set.",
|
|
490
|
+
type: "object",
|
|
491
|
+
additionalProperties: false,
|
|
492
|
+
properties: {
|
|
493
|
+
service: { type: "string", description: "Service reference. `<service>` for same-workspace, `<repo>:<service>` permitted." },
|
|
494
|
+
stateful: { $ref: "#/$defs/name" },
|
|
495
|
+
group: { $ref: "#/$defs/name" },
|
|
496
|
+
optional: { type: "boolean", default: false },
|
|
497
|
+
condition: {
|
|
498
|
+
type: "string",
|
|
499
|
+
description: "Optional gating expression. Examples: `mode == 'docker'`, `profile == 'dev'`, `os == 'darwin'`."
|
|
500
|
+
},
|
|
501
|
+
waitFor: {
|
|
502
|
+
type: "string",
|
|
503
|
+
enum: ["start", "ready"],
|
|
504
|
+
default: "ready",
|
|
505
|
+
description: "Whether to wait for process-up or for the readiness probe to pass before starting dependents."
|
|
506
|
+
}
|
|
507
|
+
},
|
|
508
|
+
oneOf: [
|
|
509
|
+
{ required: ["service"] },
|
|
510
|
+
{ required: ["stateful"] },
|
|
511
|
+
{ required: ["group"] }
|
|
512
|
+
]
|
|
513
|
+
},
|
|
514
|
+
hookCommands: {
|
|
515
|
+
description: "One or more shell commands or paths to executable scripts, run in the manifest's directory.",
|
|
516
|
+
oneOf: [
|
|
517
|
+
{ type: "string" },
|
|
518
|
+
{ type: "array", items: { type: "string" }, minItems: 1 }
|
|
519
|
+
]
|
|
520
|
+
},
|
|
521
|
+
hooks: {
|
|
522
|
+
description: "Lifecycle hooks. Each hook list runs in the manifest's directory at the corresponding lifecycle event.",
|
|
523
|
+
type: "object",
|
|
524
|
+
additionalProperties: false,
|
|
525
|
+
properties: {
|
|
526
|
+
pre_clone: { $ref: "#/$defs/hookCommands" },
|
|
527
|
+
post_clone: { $ref: "#/$defs/hookCommands" },
|
|
528
|
+
pre_prepare: { $ref: "#/$defs/hookCommands" },
|
|
529
|
+
post_prepare: { $ref: "#/$defs/hookCommands" },
|
|
530
|
+
pre_run: { $ref: "#/$defs/hookCommands" },
|
|
531
|
+
post_run: { $ref: "#/$defs/hookCommands" },
|
|
532
|
+
pre_stop: { $ref: "#/$defs/hookCommands" },
|
|
533
|
+
post_stop: { $ref: "#/$defs/hookCommands" }
|
|
534
|
+
}
|
|
535
|
+
},
|
|
536
|
+
schemaVersion: {
|
|
537
|
+
description: "Manifest schema version. Defaults to 1 if omitted.",
|
|
538
|
+
type: "integer",
|
|
539
|
+
enum: [1]
|
|
540
|
+
}
|
|
541
|
+
}
|
|
542
|
+
};
|
|
543
|
+
|
|
544
|
+
// src/schema/qavor.workspaces.schema.json
|
|
545
|
+
var qavor_workspaces_schema_default = {
|
|
546
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
547
|
+
$id: "https://qavor.dev/schemas/qavor.workspaces.schema.json",
|
|
548
|
+
title: "qavor workspaces manifest",
|
|
549
|
+
description: "Workspace pointer file. Lives at the root of the workspace directory as `qavor.yaml` and is created automatically by `qavor init`. Its only job is to point at the project repo whose `kind: project` manifest enumerates the rest of the workspace.",
|
|
550
|
+
type: "object",
|
|
551
|
+
additionalProperties: false,
|
|
552
|
+
required: ["kind", "root_project_path"],
|
|
553
|
+
properties: {
|
|
554
|
+
kind: { const: "workspaces" },
|
|
555
|
+
schemaVersion: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/schemaVersion" },
|
|
556
|
+
root_project_path: {
|
|
557
|
+
type: "string",
|
|
558
|
+
description: "Workspace-relative path to the directory containing the project repo's `qavor.yaml` (kind: project)."
|
|
559
|
+
}
|
|
560
|
+
}
|
|
561
|
+
};
|
|
562
|
+
|
|
563
|
+
// src/schema/qavor.project.schema.json
|
|
564
|
+
var qavor_project_schema_default = {
|
|
565
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
566
|
+
$id: "https://qavor.dev/schemas/qavor.project.schema.json",
|
|
567
|
+
title: "qavor project manifest",
|
|
568
|
+
description: "Project-level manifest. Lives at the root of the project repo as `qavor.yaml`. Defines workspace identity and enumerates the repos that make up the workspace.",
|
|
569
|
+
type: "object",
|
|
570
|
+
additionalProperties: false,
|
|
571
|
+
required: ["kind", "name", "repositories"],
|
|
572
|
+
properties: {
|
|
573
|
+
kind: { const: "project" },
|
|
574
|
+
schemaVersion: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/schemaVersion" },
|
|
575
|
+
name: {
|
|
576
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name",
|
|
577
|
+
description: "Human-readable workspace name. Used as the compose project namespace and the on-disk workspace identifier."
|
|
578
|
+
},
|
|
579
|
+
description: { type: "string" },
|
|
580
|
+
git: {
|
|
581
|
+
type: "object",
|
|
582
|
+
additionalProperties: false,
|
|
583
|
+
properties: {
|
|
584
|
+
root_url: {
|
|
585
|
+
type: "string",
|
|
586
|
+
description: "Base git URL for repos in this project. Combined with `repo_prefix` and a repo `name` to derive the clone URL when no explicit `url` is given."
|
|
587
|
+
},
|
|
588
|
+
repo_prefix: {
|
|
589
|
+
type: "string",
|
|
590
|
+
description: "Optional prefix prepended to repo names when deriving clone URLs."
|
|
591
|
+
},
|
|
592
|
+
default_branch: {
|
|
593
|
+
type: "string",
|
|
594
|
+
default: "main",
|
|
595
|
+
description: "Default branch used when a repo entry does not pin its own."
|
|
596
|
+
},
|
|
597
|
+
remote: {
|
|
598
|
+
type: "string",
|
|
599
|
+
enum: ["ssh", "https"],
|
|
600
|
+
description: "Hint for clone URL form when `root_url` is a host instead of a full URL."
|
|
601
|
+
},
|
|
602
|
+
shallow: { type: "boolean", default: false },
|
|
603
|
+
submodules: { type: "boolean", default: false }
|
|
604
|
+
}
|
|
605
|
+
},
|
|
606
|
+
groups: {
|
|
607
|
+
description: "Named groups of repo names. A repo may appear in multiple groups. Repos can also self-declare additional group memberships in their own `kind: repo` or service/stateful manifests.",
|
|
608
|
+
type: "object",
|
|
609
|
+
patternProperties: {
|
|
610
|
+
"^[a-z0-9][a-z0-9._-]{0,62}$": {
|
|
611
|
+
type: "array",
|
|
612
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name" },
|
|
613
|
+
uniqueItems: true,
|
|
614
|
+
minItems: 1
|
|
615
|
+
}
|
|
616
|
+
},
|
|
617
|
+
additionalProperties: false
|
|
618
|
+
},
|
|
619
|
+
repositories: {
|
|
620
|
+
description: "Repos that compose the workspace. Each entry is either a bare name (URL derived from `git.root_url` + `git.repo_prefix` + name) or an object with explicit fields.",
|
|
621
|
+
type: "array",
|
|
622
|
+
minItems: 1,
|
|
623
|
+
items: {
|
|
624
|
+
oneOf: [
|
|
625
|
+
{ $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name" },
|
|
626
|
+
{ $ref: "#/$defs/repoEntry" }
|
|
627
|
+
]
|
|
628
|
+
}
|
|
629
|
+
}
|
|
630
|
+
},
|
|
631
|
+
$defs: {
|
|
632
|
+
repoEntry: {
|
|
633
|
+
type: "object",
|
|
634
|
+
additionalProperties: false,
|
|
635
|
+
required: ["name"],
|
|
636
|
+
properties: {
|
|
637
|
+
name: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name" },
|
|
638
|
+
url: {
|
|
639
|
+
type: "string",
|
|
640
|
+
description: "Explicit git URL. Overrides URL derivation from `git.root_url` + `git.repo_prefix`."
|
|
641
|
+
},
|
|
642
|
+
branch: { type: "string" },
|
|
643
|
+
tag: { type: "string" },
|
|
644
|
+
commit: {
|
|
645
|
+
type: "string",
|
|
646
|
+
pattern: "^[0-9a-f]{7,40}$"
|
|
647
|
+
},
|
|
648
|
+
path: {
|
|
649
|
+
type: "string",
|
|
650
|
+
description: "Workspace-relative clone path. Defaults to `./<name>` (or `./<name>.git` to match the conventional layout)."
|
|
651
|
+
},
|
|
652
|
+
groups: {
|
|
653
|
+
type: "array",
|
|
654
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name" },
|
|
655
|
+
uniqueItems: true,
|
|
656
|
+
description: "Inline group memberships in addition to the top-level `groups` map."
|
|
657
|
+
},
|
|
658
|
+
shallow: { type: "boolean" },
|
|
659
|
+
submodules: { type: "boolean" },
|
|
660
|
+
optional: {
|
|
661
|
+
type: "boolean",
|
|
662
|
+
default: false,
|
|
663
|
+
description: "Skip rather than fail if cloning is not authorized."
|
|
664
|
+
}
|
|
665
|
+
},
|
|
666
|
+
allOf: [
|
|
667
|
+
{
|
|
668
|
+
description: "At most one of branch, tag, or commit may be set.",
|
|
669
|
+
not: {
|
|
670
|
+
anyOf: [
|
|
671
|
+
{ required: ["branch", "tag"] },
|
|
672
|
+
{ required: ["branch", "commit"] },
|
|
673
|
+
{ required: ["tag", "commit"] }
|
|
674
|
+
]
|
|
675
|
+
}
|
|
676
|
+
}
|
|
677
|
+
]
|
|
678
|
+
}
|
|
679
|
+
}
|
|
680
|
+
};
|
|
681
|
+
|
|
682
|
+
// src/schema/qavor.repo.schema.json
|
|
683
|
+
var qavor_repo_schema_default = {
|
|
684
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
685
|
+
$id: "https://qavor.dev/schemas/qavor.repo.schema.json",
|
|
686
|
+
title: "qavor repo manifest",
|
|
687
|
+
description: "Per-repo metadata. Lives at the root of an individual repo as `qavor.yaml` (or as one document of a multi-document `qavor.yaml`). Carries information that is not specific to a single service: identity, group membership, and lifecycle hooks that fire around qavor verbs at the repo level.",
|
|
688
|
+
type: "object",
|
|
689
|
+
additionalProperties: false,
|
|
690
|
+
required: ["kind", "name"],
|
|
691
|
+
properties: {
|
|
692
|
+
kind: { const: "repo" },
|
|
693
|
+
schemaVersion: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/schemaVersion" },
|
|
694
|
+
name: {
|
|
695
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name",
|
|
696
|
+
description: "Repository identifier. Must match the `name` used in the project manifest's `repositories` list."
|
|
697
|
+
},
|
|
698
|
+
description: { type: "string" },
|
|
699
|
+
groups: {
|
|
700
|
+
type: "array",
|
|
701
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name" },
|
|
702
|
+
uniqueItems: true,
|
|
703
|
+
description: "Additional group memberships, layered on top of any groups assigned by the project manifest."
|
|
704
|
+
},
|
|
705
|
+
hooks: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/hooks" }
|
|
706
|
+
}
|
|
707
|
+
};
|
|
708
|
+
|
|
709
|
+
// src/schema/qavor.service.schema.json
|
|
710
|
+
var qavor_service_schema_default = {
|
|
711
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
712
|
+
$id: "https://qavor.dev/schemas/qavor.service.schema.json",
|
|
713
|
+
title: "qavor service manifest",
|
|
714
|
+
description: "Runnable application. Lives at the root of a single-service repo as `qavor.yaml`, or under a sub-directory of a multi-service repo (e.g. `service-foo/qavor.yaml`). When this manifest is at the root of a repo, its `groups` also define repo-level group membership.",
|
|
715
|
+
type: "object",
|
|
716
|
+
additionalProperties: false,
|
|
717
|
+
required: ["kind", "name"],
|
|
718
|
+
properties: {
|
|
719
|
+
kind: { const: "service" },
|
|
720
|
+
schemaVersion: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/schemaVersion" },
|
|
721
|
+
name: {
|
|
722
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name",
|
|
723
|
+
description: "Service identifier. Must be unique within the workspace; cross-repo references use this name."
|
|
724
|
+
},
|
|
725
|
+
description: { type: "string" },
|
|
726
|
+
groups: {
|
|
727
|
+
type: "array",
|
|
728
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name" },
|
|
729
|
+
uniqueItems: true,
|
|
730
|
+
description: "Additional group memberships. When this manifest is at the root of a repo, these also define repo group membership."
|
|
731
|
+
},
|
|
732
|
+
profiles: {
|
|
733
|
+
type: "array",
|
|
734
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/profileRef" },
|
|
735
|
+
uniqueItems: true,
|
|
736
|
+
description: "Profiles applied to this service in declaration order. Profile values are layered first; this manifest's own `runtime` and `env` are merged on top."
|
|
737
|
+
},
|
|
738
|
+
runtime: {
|
|
739
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/runtimeBlock",
|
|
740
|
+
description: "Available runtime backends. Services typically declare `native` and/or `docker`. `docker-compose` is reserved for stateful manifests at v0."
|
|
741
|
+
},
|
|
742
|
+
mode: {
|
|
743
|
+
description: "Default run mode for this service. Overridable per invocation via `--mode`. Must match a backend whose `enabled: true` is set on this service or one of its profiles.",
|
|
744
|
+
type: "string",
|
|
745
|
+
enum: ["native", "docker"]
|
|
746
|
+
},
|
|
747
|
+
require: {
|
|
748
|
+
type: "array",
|
|
749
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/requirement" },
|
|
750
|
+
description: "Dependencies that must be running before this service starts."
|
|
751
|
+
},
|
|
752
|
+
env: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/envBlock" },
|
|
753
|
+
hooks: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/hooks" }
|
|
754
|
+
}
|
|
755
|
+
};
|
|
756
|
+
|
|
757
|
+
// src/schema/qavor.stateful.schema.json
|
|
758
|
+
var qavor_stateful_schema_default = {
|
|
759
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
760
|
+
$id: "https://qavor.dev/schemas/qavor.stateful.schema.json",
|
|
761
|
+
title: "qavor stateful service manifest",
|
|
762
|
+
description: "Externally provided stateful service (postgres, kafka, redis, ...). Lives at the root of a stateful-dep repo as `qavor.yaml`, or under a sub-directory of a deps repo (e.g. `postgresql/qavor.yaml`). At v0 stateful services run via `docker-compose` and qavor owns the generated compose project (per ADR-005).",
|
|
763
|
+
type: "object",
|
|
764
|
+
additionalProperties: false,
|
|
765
|
+
required: ["kind", "name"],
|
|
766
|
+
properties: {
|
|
767
|
+
kind: { const: "stateful" },
|
|
768
|
+
schemaVersion: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/schemaVersion" },
|
|
769
|
+
name: {
|
|
770
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name",
|
|
771
|
+
description: "Stateful service identifier. Must be unique within the workspace."
|
|
772
|
+
},
|
|
773
|
+
description: { type: "string" },
|
|
774
|
+
groups: {
|
|
775
|
+
type: "array",
|
|
776
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name" },
|
|
777
|
+
uniqueItems: true,
|
|
778
|
+
description: "Additional group memberships. When this manifest is at the root of a repo, these also define repo group membership."
|
|
779
|
+
},
|
|
780
|
+
profiles: {
|
|
781
|
+
type: "array",
|
|
782
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/profileRef" },
|
|
783
|
+
uniqueItems: true
|
|
784
|
+
},
|
|
785
|
+
runtime: {
|
|
786
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/runtimeBlock",
|
|
787
|
+
description: "Available runtime backends. v0 stateful services delegate to `docker-compose`; `native` and `docker` are reserved for future use."
|
|
788
|
+
},
|
|
789
|
+
mode: {
|
|
790
|
+
description: "Default run mode. v0 stateful services should use `docker-compose`.",
|
|
791
|
+
type: "string",
|
|
792
|
+
enum: ["docker-compose", "native", "docker"],
|
|
793
|
+
default: "docker-compose"
|
|
794
|
+
},
|
|
795
|
+
require: {
|
|
796
|
+
type: "array",
|
|
797
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/requirement" }
|
|
798
|
+
},
|
|
799
|
+
env: {
|
|
800
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/statefulEnvBlock",
|
|
801
|
+
description: "Layered env block. `publish` is the explicit contract exposed to dependents at start time; values are interpolated against the resolved env of this stateful service."
|
|
802
|
+
},
|
|
803
|
+
hooks: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/hooks" }
|
|
804
|
+
}
|
|
805
|
+
};
|
|
806
|
+
|
|
807
|
+
// src/schema/qavor.profile.schema.json
|
|
808
|
+
var qavor_profile_schema_default = {
|
|
809
|
+
$schema: "https://json-schema.org/draft/2020-12/schema",
|
|
810
|
+
$id: "https://qavor.dev/schemas/qavor.profile.schema.json",
|
|
811
|
+
title: "qavor profile manifest",
|
|
812
|
+
description: "Reusable runtime + env bundle. Referenced by services and stateful manifests via the `profiles:` list. Profiles can themselves reference other profiles; resolution flattens the chain in declaration order with later entries winning. A profile's runtime/env layer below the referencing manifest's own runtime/env.",
|
|
813
|
+
type: "object",
|
|
814
|
+
additionalProperties: false,
|
|
815
|
+
required: ["kind", "name"],
|
|
816
|
+
properties: {
|
|
817
|
+
kind: { const: "profile" },
|
|
818
|
+
schemaVersion: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/schemaVersion" },
|
|
819
|
+
name: {
|
|
820
|
+
$ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/name",
|
|
821
|
+
description: "Profile identifier. Referenced from `profiles:` lists and from CLI flags."
|
|
822
|
+
},
|
|
823
|
+
description: { type: "string" },
|
|
824
|
+
profiles: {
|
|
825
|
+
type: "array",
|
|
826
|
+
items: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/profileRef" },
|
|
827
|
+
uniqueItems: true,
|
|
828
|
+
description: "Other profiles this one extends. Resolved in declaration order before this profile's own values are applied."
|
|
829
|
+
},
|
|
830
|
+
runtime: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/runtimeBlock" },
|
|
831
|
+
mode: {
|
|
832
|
+
type: "string",
|
|
833
|
+
enum: ["native", "docker", "docker-compose"]
|
|
834
|
+
},
|
|
835
|
+
env: { $ref: "https://qavor.dev/schemas/qavor.defs.schema.json#/$defs/envBlock" }
|
|
836
|
+
}
|
|
837
|
+
};
|
|
838
|
+
|
|
839
|
+
// src/manifest/validator.ts
|
|
840
|
+
var KIND_SCHEMA_IDS = {
|
|
841
|
+
workspaces: qavor_workspaces_schema_default.$id,
|
|
842
|
+
project: qavor_project_schema_default.$id,
|
|
843
|
+
repo: qavor_repo_schema_default.$id,
|
|
844
|
+
service: qavor_service_schema_default.$id,
|
|
845
|
+
stateful: qavor_stateful_schema_default.$id,
|
|
846
|
+
profile: qavor_profile_schema_default.$id
|
|
847
|
+
};
|
|
848
|
+
var ALL_KINDS = [
|
|
849
|
+
"workspaces",
|
|
850
|
+
"project",
|
|
851
|
+
"repo",
|
|
852
|
+
"service",
|
|
853
|
+
"stateful",
|
|
854
|
+
"profile"
|
|
855
|
+
];
|
|
856
|
+
var ajvSingleton = null;
|
|
857
|
+
var validatorCache = /* @__PURE__ */ new Map();
|
|
858
|
+
function getAjv() {
|
|
859
|
+
if (ajvSingleton) return ajvSingleton;
|
|
860
|
+
const ajv = new Ajv2020({
|
|
861
|
+
allErrors: true,
|
|
862
|
+
strict: false,
|
|
863
|
+
allowUnionTypes: true
|
|
864
|
+
});
|
|
865
|
+
addFormats(ajv);
|
|
866
|
+
ajv.addSchema(qavor_defs_schema_default);
|
|
867
|
+
ajv.addSchema(qavor_workspaces_schema_default);
|
|
868
|
+
ajv.addSchema(qavor_project_schema_default);
|
|
869
|
+
ajv.addSchema(qavor_repo_schema_default);
|
|
870
|
+
ajv.addSchema(qavor_service_schema_default);
|
|
871
|
+
ajv.addSchema(qavor_stateful_schema_default);
|
|
872
|
+
ajv.addSchema(qavor_profile_schema_default);
|
|
873
|
+
ajvSingleton = ajv;
|
|
874
|
+
return ajv;
|
|
875
|
+
}
|
|
876
|
+
function getValidator(kind) {
|
|
877
|
+
const cached = validatorCache.get(kind);
|
|
878
|
+
if (cached) return cached;
|
|
879
|
+
const ajv = getAjv();
|
|
880
|
+
const id = KIND_SCHEMA_IDS[kind];
|
|
881
|
+
const fn = ajv.getSchema(id);
|
|
882
|
+
if (!fn) throw new Error(`Internal: schema not registered for kind=${kind}`);
|
|
883
|
+
validatorCache.set(kind, fn);
|
|
884
|
+
return fn;
|
|
885
|
+
}
|
|
886
|
+
function isKnownKind(value) {
|
|
887
|
+
return typeof value === "string" && ALL_KINDS.includes(value);
|
|
888
|
+
}
|
|
889
|
+
function validateDocument(doc) {
|
|
890
|
+
const issues = [];
|
|
891
|
+
if (!doc.kind) {
|
|
892
|
+
const pos = doc.position("");
|
|
893
|
+
issues.push({
|
|
894
|
+
file: pos.file,
|
|
895
|
+
line: pos.line,
|
|
896
|
+
column: pos.column,
|
|
897
|
+
kind: "unknown",
|
|
898
|
+
path: "",
|
|
899
|
+
message: "Missing top-level `kind:` field."
|
|
900
|
+
});
|
|
901
|
+
return { ok: false, issues };
|
|
902
|
+
}
|
|
903
|
+
if (!isKnownKind(doc.kind)) {
|
|
904
|
+
const pos = doc.position("/kind");
|
|
905
|
+
issues.push({
|
|
906
|
+
file: pos.file,
|
|
907
|
+
line: pos.line,
|
|
908
|
+
column: pos.column,
|
|
909
|
+
kind: String(doc.kind),
|
|
910
|
+
path: "/kind",
|
|
911
|
+
message: `Unknown kind: ${doc.kind}. Expected one of: ${ALL_KINDS.join(", ")}`
|
|
912
|
+
});
|
|
913
|
+
return { ok: false, issues };
|
|
914
|
+
}
|
|
915
|
+
const validator = getValidator(doc.kind);
|
|
916
|
+
const valid = validator(doc.data);
|
|
917
|
+
if (valid) return { ok: true, issues: [] };
|
|
918
|
+
for (const err of validator.errors ?? []) {
|
|
919
|
+
issues.push(formatAjvError(doc, err));
|
|
920
|
+
}
|
|
921
|
+
return { ok: false, issues };
|
|
922
|
+
}
|
|
923
|
+
function formatAjvError(doc, err) {
|
|
924
|
+
const instancePath = err.instancePath ?? "";
|
|
925
|
+
const pos = doc.position(instancePath);
|
|
926
|
+
const where = instancePath || "<root>";
|
|
927
|
+
let message = err.message ?? "invalid";
|
|
928
|
+
if (err.keyword === "additionalProperties" && err.params && typeof err.params.additionalProperty === "string") {
|
|
929
|
+
const extra = err.params.additionalProperty;
|
|
930
|
+
message = `Unexpected property '${extra}'`;
|
|
931
|
+
} else if (err.keyword === "required" && err.params && typeof err.params.missingProperty === "string") {
|
|
932
|
+
const missing = err.params.missingProperty;
|
|
933
|
+
message = `Missing required property '${missing}'`;
|
|
934
|
+
} else if (err.keyword === "enum" && err.params && Array.isArray(err.params.allowedValues)) {
|
|
935
|
+
const allowed = err.params.allowedValues;
|
|
936
|
+
message = `${message}: ${allowed.map((v) => JSON.stringify(v)).join(", ")}`;
|
|
937
|
+
} else if (err.keyword === "pattern" && err.params && typeof err.params.pattern === "string") {
|
|
938
|
+
message = `Value does not match pattern /${err.params.pattern}/`;
|
|
939
|
+
}
|
|
940
|
+
return {
|
|
941
|
+
file: pos.file,
|
|
942
|
+
line: pos.line,
|
|
943
|
+
column: pos.column,
|
|
944
|
+
kind: String(doc.kind ?? "unknown"),
|
|
945
|
+
path: where,
|
|
946
|
+
message
|
|
947
|
+
};
|
|
948
|
+
}
|
|
949
|
+
function formatIssue(i) {
|
|
950
|
+
return `${i.file}:${i.line}:${i.column} [${i.kind}] ${i.path}: ${i.message}`;
|
|
951
|
+
}
|
|
952
|
+
|
|
953
|
+
// src/workspace/paths.ts
|
|
954
|
+
import path4 from "path";
|
|
955
|
+
function workspacePaths(root) {
|
|
956
|
+
const abs = path4.resolve(root);
|
|
957
|
+
const stateRoot = path4.join(abs, ".qavor");
|
|
958
|
+
return {
|
|
959
|
+
root: abs,
|
|
960
|
+
workspacesFile: path4.join(abs, "qavor.yaml"),
|
|
961
|
+
stateRoot,
|
|
962
|
+
stateDir: path4.join(stateRoot, "state"),
|
|
963
|
+
logsDir: path4.join(stateRoot, "logs"),
|
|
964
|
+
composeDir: path4.join(stateRoot, "compose"),
|
|
965
|
+
cacheDir: path4.join(stateRoot, "cache"),
|
|
966
|
+
workspaceMetaFile: path4.join(stateRoot, "workspace.json"),
|
|
967
|
+
stateGitignore: path4.join(stateRoot, ".gitignore")
|
|
968
|
+
};
|
|
969
|
+
}
|
|
970
|
+
|
|
971
|
+
// src/workspace/init.ts
|
|
972
|
+
var URL_RE = /^(?:git@[^:]+:|https?:\/\/|git:\/\/|ssh:\/\/|file:\/\/)/;
|
|
973
|
+
function looksLikeGitUrl(s) {
|
|
974
|
+
return URL_RE.test(s);
|
|
975
|
+
}
|
|
976
|
+
function projectRepoNameFromUrl(url) {
|
|
977
|
+
const cleaned = url.replace(/[?#].*$/, "");
|
|
978
|
+
const last = cleaned.split("/").pop() ?? cleaned.split(":").pop() ?? "project";
|
|
979
|
+
return last.replace(/\.git$/, "");
|
|
980
|
+
}
|
|
981
|
+
async function initWorkspace(opts) {
|
|
982
|
+
const workspaceRoot = path5.resolve(opts.into ?? process.cwd());
|
|
983
|
+
await ensureDir(workspaceRoot);
|
|
984
|
+
const paths = workspacePaths(workspaceRoot);
|
|
985
|
+
let projectRepoPath;
|
|
986
|
+
let cloned = false;
|
|
987
|
+
if (looksLikeGitUrl(opts.source)) {
|
|
988
|
+
const repoName = projectRepoNameFromUrl(opts.source);
|
|
989
|
+
const target = path5.join(workspaceRoot, `${repoName}.git`);
|
|
990
|
+
if (await isDirectory(target)) {
|
|
991
|
+
if (!await isGitRepo(target)) {
|
|
992
|
+
throw new UserError(
|
|
993
|
+
`Cannot reuse ${target}: directory exists but is not a git repo. Move it aside and re-run.`
|
|
994
|
+
);
|
|
995
|
+
}
|
|
996
|
+
opts.logger.info({ target }, "reusing existing project repo clone");
|
|
997
|
+
projectRepoPath = target;
|
|
998
|
+
} else {
|
|
999
|
+
const cacheDir = path5.join(globalCacheDir(), "projects", urlHash(opts.source));
|
|
1000
|
+
await ensureDir(path5.dirname(cacheDir));
|
|
1001
|
+
opts.logger.info({ url: opts.source, target }, "cloning project repo");
|
|
1002
|
+
await gitClone({ url: opts.source, dest: target });
|
|
1003
|
+
try {
|
|
1004
|
+
await ensureDir(cacheDir);
|
|
1005
|
+
await fs4.writeFile(
|
|
1006
|
+
path5.join(cacheDir, "source.json"),
|
|
1007
|
+
JSON.stringify({ url: opts.source, cloned_to: target, at: (/* @__PURE__ */ new Date()).toISOString() }, null, 2)
|
|
1008
|
+
);
|
|
1009
|
+
} catch {
|
|
1010
|
+
}
|
|
1011
|
+
projectRepoPath = target;
|
|
1012
|
+
cloned = true;
|
|
1013
|
+
}
|
|
1014
|
+
} else {
|
|
1015
|
+
const localPath = path5.resolve(opts.source);
|
|
1016
|
+
if (!await isDirectory(localPath)) {
|
|
1017
|
+
throw new UserError(`Project repo source does not exist or is not a directory: ${localPath}`);
|
|
1018
|
+
}
|
|
1019
|
+
projectRepoPath = localPath;
|
|
1020
|
+
}
|
|
1021
|
+
const projectManifestFile = path5.join(projectRepoPath, "qavor.yaml");
|
|
1022
|
+
const docs = await loadManifestFile(projectManifestFile);
|
|
1023
|
+
const projectDoc = docs.find((d) => d.kind === "project");
|
|
1024
|
+
if (!projectDoc) {
|
|
1025
|
+
throw new ManifestError(
|
|
1026
|
+
`Project repo at ${projectRepoPath} is missing a \`kind: project\` document in qavor.yaml.`
|
|
1027
|
+
);
|
|
1028
|
+
}
|
|
1029
|
+
const result = validateDocument(projectDoc);
|
|
1030
|
+
if (!result.ok) {
|
|
1031
|
+
const msg = result.issues.map((i) => ` ${i.file}:${i.line}:${i.column} ${i.path}: ${i.message}`).join("\n");
|
|
1032
|
+
throw new ManifestError(`Invalid project manifest:
|
|
1033
|
+
${msg}`);
|
|
1034
|
+
}
|
|
1035
|
+
const project = projectDoc.data;
|
|
1036
|
+
await ensureDir(paths.stateRoot);
|
|
1037
|
+
await ensureDir(paths.stateDir);
|
|
1038
|
+
await ensureDir(paths.logsDir);
|
|
1039
|
+
await ensureDir(paths.composeDir);
|
|
1040
|
+
await ensureDir(paths.cacheDir);
|
|
1041
|
+
await fs4.writeFile(
|
|
1042
|
+
paths.stateGitignore,
|
|
1043
|
+
[
|
|
1044
|
+
"# qavor state directory \u2014 all files are generated. Do not commit.",
|
|
1045
|
+
"*",
|
|
1046
|
+
"!.gitignore",
|
|
1047
|
+
""
|
|
1048
|
+
].join("\n")
|
|
1049
|
+
);
|
|
1050
|
+
const relProjectPath = "./" + path5.relative(workspaceRoot, projectRepoPath).split(path5.sep).join("/");
|
|
1051
|
+
const workspacesYaml = renderWorkspacesYaml(relProjectPath);
|
|
1052
|
+
await fs4.writeFile(paths.workspacesFile, workspacesYaml, "utf8");
|
|
1053
|
+
const manifestHash = createHash2("sha256").update(await fs4.readFile(projectManifestFile)).digest("hex");
|
|
1054
|
+
await writeJsonFile(paths.workspaceMetaFile, {
|
|
1055
|
+
project_name: project.name,
|
|
1056
|
+
project_repo_path: projectRepoPath,
|
|
1057
|
+
manifest_hash: manifestHash,
|
|
1058
|
+
initialized_at: (/* @__PURE__ */ new Date()).toISOString()
|
|
1059
|
+
});
|
|
1060
|
+
return { paths, projectRepoPath, project, cloned };
|
|
1061
|
+
}
|
|
1062
|
+
function urlHash(url) {
|
|
1063
|
+
return createHash2("sha256").update(url).digest("hex").slice(0, 16);
|
|
1064
|
+
}
|
|
1065
|
+
function renderWorkspacesYaml(relProjectPath) {
|
|
1066
|
+
return [
|
|
1067
|
+
"# Generated by `qavor init`. Points at the project repo whose",
|
|
1068
|
+
"# `kind: project` manifest enumerates the rest of the workspace.",
|
|
1069
|
+
"kind: workspaces",
|
|
1070
|
+
`root_project_path: ${relProjectPath}`,
|
|
1071
|
+
""
|
|
1072
|
+
].join("\n");
|
|
1073
|
+
}
|
|
1074
|
+
|
|
1075
|
+
// src/cli/options.ts
|
|
1076
|
+
function rootOptions(cmd) {
|
|
1077
|
+
const opts = cmd.opts();
|
|
1078
|
+
return {
|
|
1079
|
+
json: Boolean(opts.json),
|
|
1080
|
+
verbose: Boolean(opts.verbose),
|
|
1081
|
+
jobs: typeof opts.jobs === "string" ? Number.parseInt(opts.jobs, 10) : void 0,
|
|
1082
|
+
config: typeof opts.config === "string" ? opts.config : void 0
|
|
1083
|
+
};
|
|
1084
|
+
}
|
|
1085
|
+
function inheritRootOptions(cmd) {
|
|
1086
|
+
let current = cmd;
|
|
1087
|
+
while (current && current.parent) current = current.parent;
|
|
1088
|
+
if (!current) return { json: false, verbose: false, jobs: void 0, config: void 0 };
|
|
1089
|
+
return rootOptions(current);
|
|
1090
|
+
}
|
|
1091
|
+
|
|
1092
|
+
// src/cli/commands/init.ts
|
|
1093
|
+
function registerInit(program) {
|
|
1094
|
+
program.command("init").description("Bootstrap a workspace from a project repo (local path or git URL).").argument("<source>", "Local path to a project repo, or a git URL.").option("--into <dir>", "Workspace root directory. Defaults to the current directory.").action(async (source, opts, cmd) => {
|
|
1095
|
+
const root = inheritRootOptions(cmd);
|
|
1096
|
+
const logger = getLogger();
|
|
1097
|
+
const initOpts = { source, logger };
|
|
1098
|
+
if (opts.into) initOpts.into = opts.into;
|
|
1099
|
+
const result = await initWorkspace(initOpts);
|
|
1100
|
+
if (root.json) {
|
|
1101
|
+
emitJson({
|
|
1102
|
+
ok: true,
|
|
1103
|
+
workspace: result.paths.root,
|
|
1104
|
+
project_name: result.project.name,
|
|
1105
|
+
project_repo_path: result.projectRepoPath,
|
|
1106
|
+
cloned_project: result.cloned,
|
|
1107
|
+
repositories: result.project.repositories.length
|
|
1108
|
+
});
|
|
1109
|
+
} else {
|
|
1110
|
+
emit(`Workspace initialized at ${result.paths.root}`);
|
|
1111
|
+
emit(` project: ${result.project.name}`);
|
|
1112
|
+
emit(` project repo: ${path6.relative(result.paths.root, result.projectRepoPath)}`);
|
|
1113
|
+
emit(` repositories declared: ${result.project.repositories.length}`);
|
|
1114
|
+
emit(` next: qavor clone`);
|
|
1115
|
+
}
|
|
1116
|
+
});
|
|
1117
|
+
}
|
|
1118
|
+
|
|
1119
|
+
// src/cli/commands/workspace.ts
|
|
1120
|
+
import path8 from "path";
|
|
1121
|
+
import fs6 from "fs/promises";
|
|
1122
|
+
|
|
1123
|
+
// src/workspace/locate.ts
|
|
1124
|
+
import fs5 from "fs/promises";
|
|
1125
|
+
import path7 from "path";
|
|
1126
|
+
async function findWorkspaceRoot(start) {
|
|
1127
|
+
let cur = path7.resolve(start);
|
|
1128
|
+
for (let i = 0; i < 64; i++) {
|
|
1129
|
+
const candidate = path7.join(cur, "qavor.yaml");
|
|
1130
|
+
if (await isFile(candidate)) {
|
|
1131
|
+
try {
|
|
1132
|
+
const docs = await loadManifestFile(candidate, { throwOnParseError: false });
|
|
1133
|
+
if (docs.some((d) => d.kind === "workspaces")) return cur;
|
|
1134
|
+
} catch {
|
|
1135
|
+
}
|
|
1136
|
+
}
|
|
1137
|
+
const parent = path7.dirname(cur);
|
|
1138
|
+
if (parent === cur) return null;
|
|
1139
|
+
cur = parent;
|
|
1140
|
+
}
|
|
1141
|
+
return null;
|
|
1142
|
+
}
|
|
1143
|
+
async function resolveWorkspace(start = process.cwd()) {
|
|
1144
|
+
const root = await findWorkspaceRoot(start);
|
|
1145
|
+
if (!root) {
|
|
1146
|
+
throw new UserError(
|
|
1147
|
+
`No qavor workspace found searching upward from ${start}. Run \`qavor init <project-repo-source>\` first.`
|
|
1148
|
+
);
|
|
1149
|
+
}
|
|
1150
|
+
const paths = workspacePaths(root);
|
|
1151
|
+
const docs = await loadManifestFile(paths.workspacesFile);
|
|
1152
|
+
const workspaceDoc = docs.find((d) => d.kind === "workspaces");
|
|
1153
|
+
if (!workspaceDoc) {
|
|
1154
|
+
throw new UserError(`Workspace pointer at ${paths.workspacesFile} has no \`kind: workspaces\` document.`);
|
|
1155
|
+
}
|
|
1156
|
+
const rootProjectPath = workspaceDoc.data.root_project_path;
|
|
1157
|
+
if (typeof rootProjectPath !== "string" || rootProjectPath.length === 0) {
|
|
1158
|
+
throw new UserError(
|
|
1159
|
+
`Workspace pointer at ${paths.workspacesFile} is missing \`root_project_path\`.`
|
|
1160
|
+
);
|
|
1161
|
+
}
|
|
1162
|
+
const projectRepoPath = path7.isAbsolute(rootProjectPath) ? rootProjectPath : path7.resolve(paths.root, rootProjectPath);
|
|
1163
|
+
const projectManifestFile = path7.join(projectRepoPath, "qavor.yaml");
|
|
1164
|
+
return { paths, projectRepoPath, projectManifestFile };
|
|
1165
|
+
}
|
|
1166
|
+
async function readProjectManifest(projectManifestFile) {
|
|
1167
|
+
const docs = await loadManifestFile(projectManifestFile);
|
|
1168
|
+
const project = docs.find((d) => d.kind === "project");
|
|
1169
|
+
if (!project) {
|
|
1170
|
+
throw new UserError(`No \`kind: project\` document found in ${projectManifestFile}.`);
|
|
1171
|
+
}
|
|
1172
|
+
return { data: project.data };
|
|
1173
|
+
}
|
|
1174
|
+
|
|
1175
|
+
// src/cli/commands/workspace.ts
|
|
1176
|
+
function registerWorkspace(program) {
|
|
1177
|
+
const ws = program.command("workspace").description("Workspace operations.");
|
|
1178
|
+
ws.command("info").description("Show information about the workspace at or above the cwd.").action(async (_opts, cmd) => {
|
|
1179
|
+
const root = inheritRootOptions(cmd);
|
|
1180
|
+
const resolved = await resolveWorkspace();
|
|
1181
|
+
const project = await readProjectManifest(resolved.projectManifestFile);
|
|
1182
|
+
let meta = {};
|
|
1183
|
+
try {
|
|
1184
|
+
meta = await readJsonFile(resolved.paths.workspaceMetaFile);
|
|
1185
|
+
} catch {
|
|
1186
|
+
}
|
|
1187
|
+
const info = {
|
|
1188
|
+
workspace_root: resolved.paths.root,
|
|
1189
|
+
workspaces_file: resolved.paths.workspacesFile,
|
|
1190
|
+
project_repo_path: resolved.projectRepoPath,
|
|
1191
|
+
project_manifest_file: resolved.projectManifestFile,
|
|
1192
|
+
project_name: typeof project.data.name === "string" ? project.data.name : null,
|
|
1193
|
+
state_dir: resolved.paths.stateRoot,
|
|
1194
|
+
meta
|
|
1195
|
+
};
|
|
1196
|
+
if (root.json) {
|
|
1197
|
+
emitJson(info);
|
|
1198
|
+
return;
|
|
1199
|
+
}
|
|
1200
|
+
emit(`Workspace root: ${info.workspace_root}`);
|
|
1201
|
+
emit(`Workspaces manifest: ${path8.relative(info.workspace_root, info.workspaces_file)}`);
|
|
1202
|
+
emit(`Project repo path: ${path8.relative(info.workspace_root, info.project_repo_path)}`);
|
|
1203
|
+
emit(`Project manifest: ${path8.relative(info.workspace_root, info.project_manifest_file)}`);
|
|
1204
|
+
emit(`Project name: ${info.project_name ?? "<unknown>"}`);
|
|
1205
|
+
emit(`State directory: ${path8.relative(info.workspace_root, info.state_dir)}`);
|
|
1206
|
+
if (Object.keys(meta).length > 0) {
|
|
1207
|
+
emit("Workspace meta:");
|
|
1208
|
+
for (const [k, v] of Object.entries(meta)) emit(` ${k}: ${typeof v === "string" ? v : JSON.stringify(v)}`);
|
|
1209
|
+
}
|
|
1210
|
+
try {
|
|
1211
|
+
await fs6.access(resolved.paths.workspaceMetaFile);
|
|
1212
|
+
} catch {
|
|
1213
|
+
}
|
|
1214
|
+
});
|
|
1215
|
+
}
|
|
1216
|
+
|
|
1217
|
+
// src/cli/commands/validate.ts
|
|
1218
|
+
import path9 from "path";
|
|
1219
|
+
import fs7 from "fs/promises";
|
|
1220
|
+
import pMap from "p-map";
|
|
1221
|
+
|
|
1222
|
+
// src/util/concurrency.ts
|
|
1223
|
+
import os from "os";
|
|
1224
|
+
import pLimit from "p-limit";
|
|
1225
|
+
function resolveJobs(override) {
|
|
1226
|
+
if (typeof override === "number" && Number.isFinite(override) && override >= 1) {
|
|
1227
|
+
return Math.floor(override);
|
|
1228
|
+
}
|
|
1229
|
+
const avail = typeof os.availableParallelism === "function" ? os.availableParallelism() : os.cpus().length;
|
|
1230
|
+
return Math.max(1, avail);
|
|
1231
|
+
}
|
|
1232
|
+
|
|
1233
|
+
// src/cli/commands/validate.ts
|
|
1234
|
+
function registerValidate(program) {
|
|
1235
|
+
program.command("validate").description("Validate one or more qavor manifest files. Targets a file or a directory.").argument("<path>", "Path to a qavor.yaml file, a directory containing one, or a directory of multiple manifests.").action(async (target, _opts, cmd) => {
|
|
1236
|
+
const root = inheritRootOptions(cmd);
|
|
1237
|
+
const logger = getLogger();
|
|
1238
|
+
const abs = path9.resolve(target);
|
|
1239
|
+
const files = [];
|
|
1240
|
+
if (await isFile(abs)) {
|
|
1241
|
+
files.push(abs);
|
|
1242
|
+
} else if (await isDirectory(abs)) {
|
|
1243
|
+
const direct = path9.join(abs, "qavor.yaml");
|
|
1244
|
+
if (await isFile(direct)) files.push(direct);
|
|
1245
|
+
try {
|
|
1246
|
+
const entries = await fs7.readdir(abs, { withFileTypes: true });
|
|
1247
|
+
for (const e of entries) {
|
|
1248
|
+
if (e.isDirectory()) {
|
|
1249
|
+
const child = path9.join(abs, e.name, "qavor.yaml");
|
|
1250
|
+
if (await isFile(child)) files.push(child);
|
|
1251
|
+
}
|
|
1252
|
+
if (e.isFile() && e.name === "qavor.yaml") {
|
|
1253
|
+
}
|
|
1254
|
+
}
|
|
1255
|
+
} catch {
|
|
1256
|
+
}
|
|
1257
|
+
} else {
|
|
1258
|
+
throw new UserError(`Path not found: ${abs}`);
|
|
1259
|
+
}
|
|
1260
|
+
if (files.length === 0) throw new UserError(`No qavor.yaml files found under ${abs}.`);
|
|
1261
|
+
const jobs = resolveJobs(root.jobs);
|
|
1262
|
+
const issues = [];
|
|
1263
|
+
await pMap(
|
|
1264
|
+
files,
|
|
1265
|
+
async (file) => {
|
|
1266
|
+
try {
|
|
1267
|
+
const docs = await loadManifestFile(file);
|
|
1268
|
+
for (const d of docs) {
|
|
1269
|
+
const r = validateDocument(d);
|
|
1270
|
+
if (!r.ok) issues.push(...r.issues);
|
|
1271
|
+
}
|
|
1272
|
+
} catch (err) {
|
|
1273
|
+
issues.push({
|
|
1274
|
+
file,
|
|
1275
|
+
line: 1,
|
|
1276
|
+
column: 1,
|
|
1277
|
+
kind: "unknown",
|
|
1278
|
+
path: "",
|
|
1279
|
+
message: err instanceof Error ? err.message : String(err)
|
|
1280
|
+
});
|
|
1281
|
+
}
|
|
1282
|
+
},
|
|
1283
|
+
{ concurrency: jobs }
|
|
1284
|
+
);
|
|
1285
|
+
if (root.json) {
|
|
1286
|
+
emitJson({ ok: issues.length === 0, files: files.length, issues });
|
|
1287
|
+
} else {
|
|
1288
|
+
if (issues.length === 0) {
|
|
1289
|
+
emit(`OK \u2014 ${files.length} file(s) validated.`);
|
|
1290
|
+
} else {
|
|
1291
|
+
emit(`FAILED \u2014 ${issues.length} issue(s) across ${files.length} file(s):`);
|
|
1292
|
+
for (const i of issues) emit(` ${formatIssue(i)}`);
|
|
1293
|
+
}
|
|
1294
|
+
}
|
|
1295
|
+
if (issues.length > 0) {
|
|
1296
|
+
logger.debug({ count: issues.length }, "validation failed");
|
|
1297
|
+
throw new ManifestError(`Validation failed with ${issues.length} issue(s).`);
|
|
1298
|
+
}
|
|
1299
|
+
});
|
|
1300
|
+
}
|
|
1301
|
+
|
|
1302
|
+
// src/cli/commands/git.ts
|
|
1303
|
+
import path11 from "path";
|
|
1304
|
+
import pMap2 from "p-map";
|
|
1305
|
+
|
|
1306
|
+
// src/workspace/repos.ts
|
|
1307
|
+
import path10 from "path";
|
|
1308
|
+
function resolveRepos(opts) {
|
|
1309
|
+
const list = opts.project.repositories;
|
|
1310
|
+
const repos = [];
|
|
1311
|
+
const seen = /* @__PURE__ */ new Set();
|
|
1312
|
+
for (const entry of list) {
|
|
1313
|
+
const normalized = typeof entry === "string" ? { name: entry } : entry;
|
|
1314
|
+
const name = normalized.name;
|
|
1315
|
+
if (!name) throw new ManifestError(`Project repository entry is missing a name.`);
|
|
1316
|
+
if (seen.has(name)) {
|
|
1317
|
+
throw new ManifestError(`Duplicate repository name in project manifest: '${name}'.`);
|
|
1318
|
+
}
|
|
1319
|
+
seen.add(name);
|
|
1320
|
+
const dir = normalized.path ? path10.isAbsolute(normalized.path) ? normalized.path : path10.resolve(opts.workspaceRoot, normalized.path) : path10.join(opts.workspaceRoot, `${name}.git`);
|
|
1321
|
+
const url = deriveCloneUrl({
|
|
1322
|
+
explicitUrl: normalized.url,
|
|
1323
|
+
rootUrl: opts.project.git?.root_url,
|
|
1324
|
+
repoPrefix: opts.project.git?.repo_prefix,
|
|
1325
|
+
name
|
|
1326
|
+
});
|
|
1327
|
+
repos.push({
|
|
1328
|
+
name,
|
|
1329
|
+
url,
|
|
1330
|
+
dir,
|
|
1331
|
+
branch: normalized.branch ?? opts.project.git?.default_branch,
|
|
1332
|
+
tag: normalized.tag,
|
|
1333
|
+
commit: normalized.commit,
|
|
1334
|
+
shallow: normalized.shallow ?? opts.project.git?.shallow,
|
|
1335
|
+
submodules: normalized.submodules ?? opts.project.git?.submodules,
|
|
1336
|
+
optional: Boolean(normalized.optional),
|
|
1337
|
+
isProjectRepo: path10.resolve(dir) === path10.resolve(opts.projectRepoPath)
|
|
1338
|
+
});
|
|
1339
|
+
}
|
|
1340
|
+
return repos;
|
|
1341
|
+
}
|
|
1342
|
+
|
|
1343
|
+
// src/cli/repos.ts
|
|
1344
|
+
function selectRepos(all, selector) {
|
|
1345
|
+
if (!selector || selector.length === 0) return all;
|
|
1346
|
+
const set = new Set(selector);
|
|
1347
|
+
const out = [];
|
|
1348
|
+
for (const r of all) {
|
|
1349
|
+
if (set.has(r.name)) {
|
|
1350
|
+
out.push(r);
|
|
1351
|
+
set.delete(r.name);
|
|
1352
|
+
}
|
|
1353
|
+
}
|
|
1354
|
+
if (set.size > 0) {
|
|
1355
|
+
throw new Error(
|
|
1356
|
+
`Unknown repo${set.size > 1 ? "s" : ""}: ${[...set].join(", ")}`
|
|
1357
|
+
);
|
|
1358
|
+
}
|
|
1359
|
+
return out;
|
|
1360
|
+
}
|
|
1361
|
+
async function reposPresent(repos) {
|
|
1362
|
+
const out = [];
|
|
1363
|
+
for (const r of repos) {
|
|
1364
|
+
if (await isDirectory(r.dir)) out.push(r);
|
|
1365
|
+
}
|
|
1366
|
+
return out;
|
|
1367
|
+
}
|
|
1368
|
+
|
|
1369
|
+
// src/cli/commands/git.ts
|
|
1370
|
+
async function loadProjectRepos() {
|
|
1371
|
+
const ws = await resolveWorkspace();
|
|
1372
|
+
const project = await readProjectManifest(ws.projectManifestFile);
|
|
1373
|
+
const repos = resolveRepos({
|
|
1374
|
+
workspaceRoot: ws.paths.root,
|
|
1375
|
+
project: project.data,
|
|
1376
|
+
projectRepoPath: ws.projectRepoPath
|
|
1377
|
+
});
|
|
1378
|
+
return { workspaceRoot: ws.paths.root, repos };
|
|
1379
|
+
}
|
|
1380
|
+
function repoOption(c) {
|
|
1381
|
+
return c.option("--repo <name...>", "Operate on a subset of repos by name.");
|
|
1382
|
+
}
|
|
1383
|
+
function registerGitCommands(program) {
|
|
1384
|
+
repoOption(
|
|
1385
|
+
program.command("clone").description("Clone every repo enumerated in the project manifest.")
|
|
1386
|
+
).action(async (opts, cmd) => {
|
|
1387
|
+
const root = inheritRootOptions(cmd);
|
|
1388
|
+
const logger = getLogger();
|
|
1389
|
+
const { workspaceRoot, repos } = await loadProjectRepos();
|
|
1390
|
+
const selected = selectRepos(repos, opts.repo);
|
|
1391
|
+
const jobs = resolveJobs(root.jobs);
|
|
1392
|
+
const results = [];
|
|
1393
|
+
await pMap2(
|
|
1394
|
+
selected,
|
|
1395
|
+
async (r) => {
|
|
1396
|
+
if (r.isProjectRepo) {
|
|
1397
|
+
results.push({ repo: r.name, status: "present", message: "project repo (already cloned)" });
|
|
1398
|
+
return;
|
|
1399
|
+
}
|
|
1400
|
+
if (await isGitRepo(r.dir)) {
|
|
1401
|
+
results.push({ repo: r.name, status: "present" });
|
|
1402
|
+
return;
|
|
1403
|
+
}
|
|
1404
|
+
try {
|
|
1405
|
+
logger.info({ repo: r.name, url: r.url, dir: r.dir }, "clone: starting");
|
|
1406
|
+
await gitClone({
|
|
1407
|
+
url: r.url,
|
|
1408
|
+
dest: r.dir,
|
|
1409
|
+
branch: r.branch,
|
|
1410
|
+
tag: r.tag,
|
|
1411
|
+
commit: r.commit,
|
|
1412
|
+
shallow: r.shallow,
|
|
1413
|
+
submodules: r.submodules
|
|
1414
|
+
});
|
|
1415
|
+
results.push({ repo: r.name, status: "cloned" });
|
|
1416
|
+
} catch (err) {
|
|
1417
|
+
if (r.optional) {
|
|
1418
|
+
results.push({ repo: r.name, status: "skipped", message: "optional; clone failed" });
|
|
1419
|
+
} else {
|
|
1420
|
+
throw new RuntimeFailure(
|
|
1421
|
+
`Clone failed for ${r.name}: ${err instanceof Error ? err.message : String(err)}`
|
|
1422
|
+
);
|
|
1423
|
+
}
|
|
1424
|
+
}
|
|
1425
|
+
},
|
|
1426
|
+
{ concurrency: jobs }
|
|
1427
|
+
);
|
|
1428
|
+
if (root.json) {
|
|
1429
|
+
emitJson({ workspace: workspaceRoot, results });
|
|
1430
|
+
return;
|
|
1431
|
+
}
|
|
1432
|
+
for (const r of results) {
|
|
1433
|
+
emit(`${r.status.padEnd(8)} ${r.repo}${r.message ? " \u2014 " + r.message : ""}`);
|
|
1434
|
+
}
|
|
1435
|
+
});
|
|
1436
|
+
repoOption(
|
|
1437
|
+
program.command("sync").description("Run `git fetch && git pull --ff-only` across selected repos.")
|
|
1438
|
+
).action(async (opts, cmd) => {
|
|
1439
|
+
const root = inheritRootOptions(cmd);
|
|
1440
|
+
const { repos } = await loadProjectRepos();
|
|
1441
|
+
const selected = await reposPresent(selectRepos(repos, opts.repo));
|
|
1442
|
+
const jobs = resolveJobs(root.jobs);
|
|
1443
|
+
const results = [];
|
|
1444
|
+
await pMap2(
|
|
1445
|
+
selected,
|
|
1446
|
+
async (r) => {
|
|
1447
|
+
try {
|
|
1448
|
+
await gitFetch(r.dir);
|
|
1449
|
+
await gitPullFastForward(r.dir);
|
|
1450
|
+
results.push({ repo: r.name, ok: true });
|
|
1451
|
+
} catch (err) {
|
|
1452
|
+
results.push({ repo: r.name, ok: false, error: err instanceof Error ? err.message : String(err) });
|
|
1453
|
+
}
|
|
1454
|
+
},
|
|
1455
|
+
{ concurrency: jobs }
|
|
1456
|
+
);
|
|
1457
|
+
if (root.json) {
|
|
1458
|
+
emitJson({ results });
|
|
1459
|
+
return;
|
|
1460
|
+
}
|
|
1461
|
+
for (const r of results) emit(`${r.ok ? "ok " : "fail"} ${r.repo}${r.error ? " \u2014 " + r.error : ""}`);
|
|
1462
|
+
if (results.some((r) => !r.ok)) throw new RuntimeFailure("Some repos failed to sync.");
|
|
1463
|
+
});
|
|
1464
|
+
repoOption(
|
|
1465
|
+
program.command("status").description("Aggregated repo status across selected repos.")
|
|
1466
|
+
).action(async (opts, cmd) => {
|
|
1467
|
+
const root = inheritRootOptions(cmd);
|
|
1468
|
+
const { workspaceRoot, repos } = await loadProjectRepos();
|
|
1469
|
+
const selected = await reposPresent(selectRepos(repos, opts.repo));
|
|
1470
|
+
const jobs = resolveJobs(root.jobs);
|
|
1471
|
+
const rows = await pMap2(
|
|
1472
|
+
selected,
|
|
1473
|
+
async (r) => {
|
|
1474
|
+
const s = await readRepoStatus(r.dir);
|
|
1475
|
+
return {
|
|
1476
|
+
repo: r.name,
|
|
1477
|
+
branch: s.branch,
|
|
1478
|
+
ahead: s.ahead,
|
|
1479
|
+
behind: s.behind,
|
|
1480
|
+
dirty: s.dirtyCount,
|
|
1481
|
+
last_commit: s.lastCommit,
|
|
1482
|
+
last_commit_subject: s.lastCommitSubject
|
|
1483
|
+
};
|
|
1484
|
+
},
|
|
1485
|
+
{ concurrency: jobs }
|
|
1486
|
+
);
|
|
1487
|
+
if (root.json) {
|
|
1488
|
+
emitJson({ workspace: workspaceRoot, repos: rows });
|
|
1489
|
+
return;
|
|
1490
|
+
}
|
|
1491
|
+
const headers = ["REPO", "BRANCH", "AHEAD", "BEHIND", "DIRTY", "COMMIT", "SUBJECT"];
|
|
1492
|
+
const data = rows.map((r) => [
|
|
1493
|
+
r.repo,
|
|
1494
|
+
r.branch ?? "-",
|
|
1495
|
+
String(r.ahead),
|
|
1496
|
+
String(r.behind),
|
|
1497
|
+
String(r.dirty),
|
|
1498
|
+
r.last_commit ?? "-",
|
|
1499
|
+
(r.last_commit_subject ?? "").split("\n")[0]?.slice(0, 60) ?? ""
|
|
1500
|
+
]);
|
|
1501
|
+
const widths = headers.map(
|
|
1502
|
+
(h, i) => Math.max(h.length, ...data.map((row) => (row[i] ?? "").length))
|
|
1503
|
+
);
|
|
1504
|
+
const fmt = (row) => row.map((c, i) => c.padEnd(widths[i] ?? 0)).join(" ");
|
|
1505
|
+
emit(fmt(headers));
|
|
1506
|
+
for (const row of data) emit(fmt(row));
|
|
1507
|
+
});
|
|
1508
|
+
repoOption(
|
|
1509
|
+
program.command("commit").description("Commit pending changes across selected repos.").requiredOption("-m, --message <msg>", "Commit message.").option("--allow-empty", "Allow empty commits.")
|
|
1510
|
+
).action(
|
|
1511
|
+
async (opts, cmd) => {
|
|
1512
|
+
const root = inheritRootOptions(cmd);
|
|
1513
|
+
if (!opts.message || opts.message.trim().length === 0) {
|
|
1514
|
+
throw new UserError(`Commit message must not be empty.`);
|
|
1515
|
+
}
|
|
1516
|
+
const { repos } = await loadProjectRepos();
|
|
1517
|
+
const selected = await reposPresent(selectRepos(repos, opts.repo));
|
|
1518
|
+
const jobs = resolveJobs(root.jobs);
|
|
1519
|
+
const results = [];
|
|
1520
|
+
await pMap2(
|
|
1521
|
+
selected,
|
|
1522
|
+
async (r) => {
|
|
1523
|
+
try {
|
|
1524
|
+
const res = await gitCommit(r.dir, opts.message, { allowEmpty: Boolean(opts.allowEmpty) });
|
|
1525
|
+
results.push({ repo: r.name, committed: res.committed });
|
|
1526
|
+
} catch (err) {
|
|
1527
|
+
results.push({ repo: r.name, committed: false, error: err instanceof Error ? err.message : String(err) });
|
|
1528
|
+
}
|
|
1529
|
+
},
|
|
1530
|
+
{ concurrency: jobs }
|
|
1531
|
+
);
|
|
1532
|
+
if (root.json) {
|
|
1533
|
+
emitJson({ results });
|
|
1534
|
+
return;
|
|
1535
|
+
}
|
|
1536
|
+
for (const r of results) {
|
|
1537
|
+
const verb = r.committed ? "committed" : r.error ? "failed" : "skipped";
|
|
1538
|
+
emit(`${verb.padEnd(10)} ${r.repo}${r.error ? " \u2014 " + r.error : ""}`);
|
|
1539
|
+
}
|
|
1540
|
+
if (results.some((r) => r.error)) throw new RuntimeFailure("Some commits failed.");
|
|
1541
|
+
}
|
|
1542
|
+
);
|
|
1543
|
+
repoOption(
|
|
1544
|
+
program.command("push").description("git push the current branch across selected repos.")
|
|
1545
|
+
).action(async (opts, cmd) => {
|
|
1546
|
+
const root = inheritRootOptions(cmd);
|
|
1547
|
+
const { repos } = await loadProjectRepos();
|
|
1548
|
+
const selected = await reposPresent(selectRepos(repos, opts.repo));
|
|
1549
|
+
const jobs = resolveJobs(root.jobs);
|
|
1550
|
+
const results = [];
|
|
1551
|
+
await pMap2(
|
|
1552
|
+
selected,
|
|
1553
|
+
async (r) => {
|
|
1554
|
+
try {
|
|
1555
|
+
await gitPush(r.dir);
|
|
1556
|
+
results.push({ repo: r.name, ok: true });
|
|
1557
|
+
} catch (err) {
|
|
1558
|
+
results.push({ repo: r.name, ok: false, error: err instanceof Error ? err.message : String(err) });
|
|
1559
|
+
}
|
|
1560
|
+
},
|
|
1561
|
+
{ concurrency: jobs }
|
|
1562
|
+
);
|
|
1563
|
+
if (root.json) {
|
|
1564
|
+
emitJson({ results });
|
|
1565
|
+
return;
|
|
1566
|
+
}
|
|
1567
|
+
for (const r of results) emit(`${r.ok ? "ok " : "fail"} ${r.repo}${r.error ? " \u2014 " + r.error : ""}`);
|
|
1568
|
+
if (results.some((r) => !r.ok)) throw new RuntimeFailure("Some pushes failed.");
|
|
1569
|
+
});
|
|
1570
|
+
void path11;
|
|
1571
|
+
}
|
|
1572
|
+
|
|
1573
|
+
// src/cli/commands/prepare.ts
|
|
1574
|
+
import pMap4 from "p-map";
|
|
1575
|
+
|
|
1576
|
+
// src/manifest/discovery.ts
|
|
1577
|
+
import fs8 from "fs/promises";
|
|
1578
|
+
import path12 from "path";
|
|
1579
|
+
import pMap3 from "p-map";
|
|
1580
|
+
var MAX_DEPTH = 4;
|
|
1581
|
+
var SKIP_DIRS = /* @__PURE__ */ new Set([
|
|
1582
|
+
".git",
|
|
1583
|
+
".qavor",
|
|
1584
|
+
"node_modules",
|
|
1585
|
+
".venv",
|
|
1586
|
+
"venv",
|
|
1587
|
+
"__pycache__",
|
|
1588
|
+
"dist",
|
|
1589
|
+
"build",
|
|
1590
|
+
"target",
|
|
1591
|
+
".next",
|
|
1592
|
+
".svelte-kit",
|
|
1593
|
+
".cache"
|
|
1594
|
+
]);
|
|
1595
|
+
async function discoverManifestFiles(repoRoot) {
|
|
1596
|
+
const abs = path12.resolve(repoRoot);
|
|
1597
|
+
const found = /* @__PURE__ */ new Set();
|
|
1598
|
+
if (!await isDirectory(abs)) return [];
|
|
1599
|
+
const rootFile = path12.join(abs, "qavor.yaml");
|
|
1600
|
+
try {
|
|
1601
|
+
await fs8.access(rootFile);
|
|
1602
|
+
found.add(rootFile);
|
|
1603
|
+
} catch {
|
|
1604
|
+
}
|
|
1605
|
+
const qavorDir = path12.join(abs, "qavor");
|
|
1606
|
+
if (await isDirectory(qavorDir)) {
|
|
1607
|
+
for await (const f of walk(qavorDir, qavorDir, 0)) found.add(f);
|
|
1608
|
+
}
|
|
1609
|
+
for await (const f of walk(abs, abs, 0)) {
|
|
1610
|
+
found.add(f);
|
|
1611
|
+
}
|
|
1612
|
+
return [...found].sort();
|
|
1613
|
+
}
|
|
1614
|
+
async function* walk(rootBase, current, depth) {
|
|
1615
|
+
if (depth > MAX_DEPTH) return;
|
|
1616
|
+
let entries;
|
|
1617
|
+
try {
|
|
1618
|
+
entries = await fs8.readdir(current, { withFileTypes: true });
|
|
1619
|
+
} catch {
|
|
1620
|
+
return;
|
|
1621
|
+
}
|
|
1622
|
+
for (const entry of entries) {
|
|
1623
|
+
const full = path12.join(current, entry.name);
|
|
1624
|
+
if (entry.isDirectory()) {
|
|
1625
|
+
if (SKIP_DIRS.has(entry.name)) continue;
|
|
1626
|
+
if (depth === 0 && entry.name === "qavor") continue;
|
|
1627
|
+
yield* walk(rootBase, full, depth + 1);
|
|
1628
|
+
} else if (entry.isFile() && entry.name === "qavor.yaml") {
|
|
1629
|
+
if (depth === 0 && current === rootBase) continue;
|
|
1630
|
+
yield full;
|
|
1631
|
+
}
|
|
1632
|
+
}
|
|
1633
|
+
}
|
|
1634
|
+
async function buildWorkspaceRegistry(opts) {
|
|
1635
|
+
const issues = [];
|
|
1636
|
+
const all = [];
|
|
1637
|
+
const reposList = [];
|
|
1638
|
+
for (const [name, dir] of opts.repos) reposList.push({ name, dir });
|
|
1639
|
+
await pMap3(
|
|
1640
|
+
reposList,
|
|
1641
|
+
async ({ name: repoName, dir }) => {
|
|
1642
|
+
const files = await discoverManifestFiles(dir);
|
|
1643
|
+
for (const file of files) {
|
|
1644
|
+
let docs;
|
|
1645
|
+
try {
|
|
1646
|
+
docs = await loadManifestFile(file);
|
|
1647
|
+
} catch (err) {
|
|
1648
|
+
issues.push({
|
|
1649
|
+
file,
|
|
1650
|
+
line: 1,
|
|
1651
|
+
column: 1,
|
|
1652
|
+
kind: "unknown",
|
|
1653
|
+
path: "",
|
|
1654
|
+
message: err instanceof Error ? err.message : String(err)
|
|
1655
|
+
});
|
|
1656
|
+
continue;
|
|
1657
|
+
}
|
|
1658
|
+
for (const doc of docs) {
|
|
1659
|
+
if (!isKnownKind(doc.kind)) {
|
|
1660
|
+
const pos = doc.position("/kind");
|
|
1661
|
+
issues.push({
|
|
1662
|
+
file: pos.file,
|
|
1663
|
+
line: pos.line,
|
|
1664
|
+
column: pos.column,
|
|
1665
|
+
kind: String(doc.kind ?? "unknown"),
|
|
1666
|
+
path: "/kind",
|
|
1667
|
+
message: `Unknown or missing kind in this document`
|
|
1668
|
+
});
|
|
1669
|
+
continue;
|
|
1670
|
+
}
|
|
1671
|
+
const result = validateDocument(doc);
|
|
1672
|
+
if (!result.ok) {
|
|
1673
|
+
issues.push(...result.issues);
|
|
1674
|
+
continue;
|
|
1675
|
+
}
|
|
1676
|
+
const data = doc.data;
|
|
1677
|
+
all.push({
|
|
1678
|
+
kind: doc.kind,
|
|
1679
|
+
name: typeof data.name === "string" ? data.name : "",
|
|
1680
|
+
file: doc.file,
|
|
1681
|
+
docIndex: doc.docIndex,
|
|
1682
|
+
dir: path12.dirname(doc.file),
|
|
1683
|
+
repo: repoName,
|
|
1684
|
+
data: doc.data,
|
|
1685
|
+
position: doc.position
|
|
1686
|
+
});
|
|
1687
|
+
}
|
|
1688
|
+
}
|
|
1689
|
+
},
|
|
1690
|
+
{ concurrency: opts.concurrency ?? 8 }
|
|
1691
|
+
);
|
|
1692
|
+
const byName = /* @__PURE__ */ new Map();
|
|
1693
|
+
for (const entry of all) {
|
|
1694
|
+
if (!entry.name) continue;
|
|
1695
|
+
if (entry.kind === "workspaces" || entry.kind === "project") continue;
|
|
1696
|
+
const key = entry.name;
|
|
1697
|
+
const existing = byName.get(key);
|
|
1698
|
+
if (existing && existing.kind === entry.kind) {
|
|
1699
|
+
const pos = entry.position("/name");
|
|
1700
|
+
issues.push({
|
|
1701
|
+
file: pos.file,
|
|
1702
|
+
line: pos.line,
|
|
1703
|
+
column: pos.column,
|
|
1704
|
+
kind: entry.kind,
|
|
1705
|
+
path: "/name",
|
|
1706
|
+
message: `Duplicate ${entry.kind} name '${entry.name}'. Already declared at ${existing.file}.`
|
|
1707
|
+
});
|
|
1708
|
+
continue;
|
|
1709
|
+
}
|
|
1710
|
+
if (!existing) byName.set(key, entry);
|
|
1711
|
+
}
|
|
1712
|
+
return { byName, entries: all, issues };
|
|
1713
|
+
}
|
|
1714
|
+
|
|
1715
|
+
// src/prepare/prepare.ts
|
|
1716
|
+
import { createHash as createHash3 } from "crypto";
|
|
1717
|
+
import { createReadStream as createReadStream2 } from "fs";
|
|
1718
|
+
import fs10 from "fs/promises";
|
|
1719
|
+
import path14 from "path";
|
|
1720
|
+
import { execa as execa3 } from "execa";
|
|
1721
|
+
|
|
1722
|
+
// src/env/composer.ts
|
|
1723
|
+
import path13 from "path";
|
|
1724
|
+
|
|
1725
|
+
// src/env/dotenv.ts
|
|
1726
|
+
import fs9 from "fs/promises";
|
|
1727
|
+
async function loadDotenvFile(file) {
|
|
1728
|
+
if (!await pathExists(file)) return [];
|
|
1729
|
+
const raw = await fs9.readFile(file, "utf8");
|
|
1730
|
+
const lines = raw.split(/\r?\n/);
|
|
1731
|
+
const out = [];
|
|
1732
|
+
for (let i = 0; i < lines.length; i++) {
|
|
1733
|
+
const raw2 = lines[i] ?? "";
|
|
1734
|
+
const line = raw2.trim();
|
|
1735
|
+
if (!line || line.startsWith("#")) continue;
|
|
1736
|
+
const stripped = line.startsWith("export ") ? line.slice("export ".length).trimStart() : line;
|
|
1737
|
+
const eq = stripped.indexOf("=");
|
|
1738
|
+
if (eq <= 0) continue;
|
|
1739
|
+
const key = stripped.slice(0, eq).trim();
|
|
1740
|
+
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) continue;
|
|
1741
|
+
let value = stripped.slice(eq + 1);
|
|
1742
|
+
if (!value.startsWith('"') && !value.startsWith("'")) {
|
|
1743
|
+
const hash = value.indexOf(" #");
|
|
1744
|
+
if (hash >= 0) value = value.slice(0, hash);
|
|
1745
|
+
value = value.trim();
|
|
1746
|
+
} else {
|
|
1747
|
+
const quote = value[0];
|
|
1748
|
+
const closing = value.lastIndexOf(quote);
|
|
1749
|
+
if (closing > 0) value = value.slice(1, closing);
|
|
1750
|
+
}
|
|
1751
|
+
out.push({ key, value, file, line: i + 1 });
|
|
1752
|
+
}
|
|
1753
|
+
return out;
|
|
1754
|
+
}
|
|
1755
|
+
|
|
1756
|
+
// src/env/composer.ts
|
|
1757
|
+
async function composeServiceEnv(input) {
|
|
1758
|
+
const issues = [];
|
|
1759
|
+
const layers = [];
|
|
1760
|
+
const manifestDir = path13.dirname(input.serviceDoc.file);
|
|
1761
|
+
const env = input.service.env;
|
|
1762
|
+
const positionFor = input.serviceDoc.position;
|
|
1763
|
+
if (env?.common) {
|
|
1764
|
+
pushEnvMap(layers, env.common, "service.env.common", input.serviceDoc.file, positionFor, "/env/common");
|
|
1765
|
+
}
|
|
1766
|
+
if (input.mode === "native" && env?.native) {
|
|
1767
|
+
pushEnvMap(layers, env.native, "service.env.native", input.serviceDoc.file, positionFor, "/env/native");
|
|
1768
|
+
} else if (input.mode === "docker" && env?.docker) {
|
|
1769
|
+
pushEnvMap(layers, env.docker, "service.env.docker", input.serviceDoc.file, positionFor, "/env/docker");
|
|
1770
|
+
}
|
|
1771
|
+
const baseDotenv = await loadDotenvFile(path13.join(manifestDir, ".env"));
|
|
1772
|
+
for (const e of baseDotenv) {
|
|
1773
|
+
layers.push({
|
|
1774
|
+
key: e.key,
|
|
1775
|
+
raw: e.value,
|
|
1776
|
+
layer: "service.env",
|
|
1777
|
+
file: e.file,
|
|
1778
|
+
line: e.line,
|
|
1779
|
+
spec: null
|
|
1780
|
+
});
|
|
1781
|
+
}
|
|
1782
|
+
const modeDotenvFile = path13.join(manifestDir, input.mode === "native" ? ".env.native" : ".env.docker");
|
|
1783
|
+
const modeDotenv = await loadDotenvFile(modeDotenvFile);
|
|
1784
|
+
for (const e of modeDotenv) {
|
|
1785
|
+
layers.push({
|
|
1786
|
+
key: e.key,
|
|
1787
|
+
raw: e.value,
|
|
1788
|
+
layer: `service.env.${input.mode}`,
|
|
1789
|
+
file: e.file,
|
|
1790
|
+
line: e.line,
|
|
1791
|
+
spec: null
|
|
1792
|
+
});
|
|
1793
|
+
}
|
|
1794
|
+
const wsEnv = await loadDotenvFile(path13.join(input.workspaceRoot, ".env"));
|
|
1795
|
+
for (const e of wsEnv) {
|
|
1796
|
+
layers.push({
|
|
1797
|
+
key: e.key,
|
|
1798
|
+
raw: e.value,
|
|
1799
|
+
layer: "workspace.env",
|
|
1800
|
+
file: e.file,
|
|
1801
|
+
line: e.line,
|
|
1802
|
+
spec: null
|
|
1803
|
+
});
|
|
1804
|
+
}
|
|
1805
|
+
if (input.cliEnv) {
|
|
1806
|
+
for (const [k, v] of Object.entries(input.cliEnv)) {
|
|
1807
|
+
layers.push({
|
|
1808
|
+
key: k,
|
|
1809
|
+
raw: v,
|
|
1810
|
+
layer: "cli.--env",
|
|
1811
|
+
file: "<cli>",
|
|
1812
|
+
line: 0,
|
|
1813
|
+
spec: null
|
|
1814
|
+
});
|
|
1815
|
+
}
|
|
1816
|
+
}
|
|
1817
|
+
return interpolateLayers(layers, issues);
|
|
1818
|
+
}
|
|
1819
|
+
function pushEnvMap(layers, map, layerLabel, file, positionFor, basePath) {
|
|
1820
|
+
for (const [key, val] of Object.entries(map)) {
|
|
1821
|
+
const valuePath = `${basePath}/${key}`;
|
|
1822
|
+
const pos = positionFor(valuePath);
|
|
1823
|
+
if (isEnvSpec(val)) {
|
|
1824
|
+
const spec = val;
|
|
1825
|
+
const concrete = typeof spec.value !== "undefined" ? spec.value : typeof spec.default !== "undefined" ? spec.default : void 0;
|
|
1826
|
+
if (typeof concrete === "undefined") {
|
|
1827
|
+
layers.push({
|
|
1828
|
+
key,
|
|
1829
|
+
raw: "",
|
|
1830
|
+
layer: layerLabel,
|
|
1831
|
+
file,
|
|
1832
|
+
line: pos.line,
|
|
1833
|
+
spec
|
|
1834
|
+
});
|
|
1835
|
+
continue;
|
|
1836
|
+
}
|
|
1837
|
+
layers.push({
|
|
1838
|
+
key,
|
|
1839
|
+
raw: String(concrete),
|
|
1840
|
+
layer: layerLabel,
|
|
1841
|
+
file,
|
|
1842
|
+
line: pos.line,
|
|
1843
|
+
spec
|
|
1844
|
+
});
|
|
1845
|
+
} else {
|
|
1846
|
+
layers.push({
|
|
1847
|
+
key,
|
|
1848
|
+
raw: String(val),
|
|
1849
|
+
layer: layerLabel,
|
|
1850
|
+
file,
|
|
1851
|
+
line: pos.line,
|
|
1852
|
+
spec: null
|
|
1853
|
+
});
|
|
1854
|
+
}
|
|
1855
|
+
}
|
|
1856
|
+
}
|
|
1857
|
+
function isEnvSpec(v) {
|
|
1858
|
+
return Boolean(
|
|
1859
|
+
v && typeof v === "object" && !Array.isArray(v) && ("value" in v || "default" in v || "required" in v || "secret" in v || "type" in v || "pattern" in v || "description" in v)
|
|
1860
|
+
);
|
|
1861
|
+
}
|
|
1862
|
+
var INTERP_RE = /\$\{([^}]+)\}/g;
|
|
1863
|
+
var SECRET_PREFIX = "secret:";
|
|
1864
|
+
function interpolateLayers(layers, issues) {
|
|
1865
|
+
const values = /* @__PURE__ */ new Map();
|
|
1866
|
+
for (const entry of layers) {
|
|
1867
|
+
const isRequiredPlaceholder = !entry.raw && entry.spec?.required === true && typeof entry.spec?.value === "undefined" && typeof entry.spec?.default === "undefined";
|
|
1868
|
+
if (isRequiredPlaceholder) {
|
|
1869
|
+
const prior2 = values.get(entry.key);
|
|
1870
|
+
const provenance2 = prior2 ? prior2.provenance.slice() : [];
|
|
1871
|
+
provenance2.push({ file: entry.file, line: entry.line, layer: entry.layer, raw: "" });
|
|
1872
|
+
values.set(entry.key, {
|
|
1873
|
+
value: prior2?.value ?? "",
|
|
1874
|
+
provenance: provenance2,
|
|
1875
|
+
secret: Boolean(entry.spec?.secret) || (prior2?.secret ?? false),
|
|
1876
|
+
required: true
|
|
1877
|
+
});
|
|
1878
|
+
continue;
|
|
1879
|
+
}
|
|
1880
|
+
const { value, missing, secrets } = interpolate(entry.raw, values, process.env);
|
|
1881
|
+
if (secrets.length > 0) {
|
|
1882
|
+
issues.push({
|
|
1883
|
+
file: entry.file,
|
|
1884
|
+
line: entry.line,
|
|
1885
|
+
message: `\${secret:${secrets[0]}} interpolation is reserved for v1. Configure as plain env until then.`
|
|
1886
|
+
});
|
|
1887
|
+
continue;
|
|
1888
|
+
}
|
|
1889
|
+
if (missing.length > 0) {
|
|
1890
|
+
issues.push({
|
|
1891
|
+
file: entry.file,
|
|
1892
|
+
line: entry.line,
|
|
1893
|
+
message: `Unresolved interpolation in ${entry.key}: \${${missing[0]}}`
|
|
1894
|
+
});
|
|
1895
|
+
continue;
|
|
1896
|
+
}
|
|
1897
|
+
const prior = values.get(entry.key);
|
|
1898
|
+
const provenance = prior ? prior.provenance.slice() : [];
|
|
1899
|
+
provenance.push({ file: entry.file, line: entry.line, layer: entry.layer, raw: entry.raw });
|
|
1900
|
+
values.set(entry.key, {
|
|
1901
|
+
value,
|
|
1902
|
+
provenance,
|
|
1903
|
+
secret: Boolean(entry.spec?.secret) || (prior?.secret ?? false),
|
|
1904
|
+
required: Boolean(entry.spec?.required) || (prior?.required ?? false)
|
|
1905
|
+
});
|
|
1906
|
+
}
|
|
1907
|
+
for (const [key, val] of values) {
|
|
1908
|
+
if (val.required && (val.value === "" || typeof val.value === "undefined")) {
|
|
1909
|
+
const last = val.provenance[val.provenance.length - 1];
|
|
1910
|
+
issues.push({
|
|
1911
|
+
file: last?.file ?? "<unknown>",
|
|
1912
|
+
line: last?.line ?? 0,
|
|
1913
|
+
message: `Required env ${key} has no value.`
|
|
1914
|
+
});
|
|
1915
|
+
}
|
|
1916
|
+
}
|
|
1917
|
+
return { values, issues };
|
|
1918
|
+
}
|
|
1919
|
+
function interpolate(raw, resolved, procEnv) {
|
|
1920
|
+
if (!raw) return { value: raw, missing: [], secrets: [] };
|
|
1921
|
+
const missing = [];
|
|
1922
|
+
const secrets = [];
|
|
1923
|
+
const value = raw.replace(INTERP_RE, (_m, expr) => {
|
|
1924
|
+
const trimmed = expr.trim();
|
|
1925
|
+
if (trimmed.startsWith(SECRET_PREFIX)) {
|
|
1926
|
+
secrets.push(trimmed.slice(SECRET_PREFIX.length));
|
|
1927
|
+
return "";
|
|
1928
|
+
}
|
|
1929
|
+
const fromResolved = resolved.get(trimmed);
|
|
1930
|
+
if (fromResolved) return fromResolved.value;
|
|
1931
|
+
const fromProc = procEnv[trimmed];
|
|
1932
|
+
if (typeof fromProc === "string") return fromProc;
|
|
1933
|
+
missing.push(trimmed);
|
|
1934
|
+
return "";
|
|
1935
|
+
});
|
|
1936
|
+
return { value, missing, secrets };
|
|
1937
|
+
}
|
|
1938
|
+
function parseCliEnv(items) {
|
|
1939
|
+
const out = {};
|
|
1940
|
+
for (const item of items) {
|
|
1941
|
+
const eq = item.indexOf("=");
|
|
1942
|
+
if (eq <= 0) throw new UserError(`Invalid --env value '${item}'. Expected KEY=VALUE.`);
|
|
1943
|
+
const key = item.slice(0, eq);
|
|
1944
|
+
if (!/^[A-Za-z_][A-Za-z0-9_]*$/.test(key)) {
|
|
1945
|
+
throw new UserError(`Invalid env key '${key}' in --env. Use UPPER_SNAKE_CASE.`);
|
|
1946
|
+
}
|
|
1947
|
+
out[key] = item.slice(eq + 1);
|
|
1948
|
+
}
|
|
1949
|
+
return out;
|
|
1950
|
+
}
|
|
1951
|
+
function toEnvObject(resolved) {
|
|
1952
|
+
const out = {};
|
|
1953
|
+
for (const [k, v] of resolved.values) out[k] = v.value;
|
|
1954
|
+
return out;
|
|
1955
|
+
}
|
|
1956
|
+
function assertNoIssues(resolved) {
|
|
1957
|
+
if (resolved.issues.length === 0) return;
|
|
1958
|
+
const lines = resolved.issues.map((i) => `${i.file}:${i.line}: ${i.message}`);
|
|
1959
|
+
throw new ManifestError(`Environment composition failed:
|
|
1960
|
+
${lines.join("\n ")}`);
|
|
1961
|
+
}
|
|
1962
|
+
|
|
1963
|
+
// src/util/hooks.ts
|
|
1964
|
+
import { execa as execa2 } from "execa";
|
|
1965
|
+
function toList(cmds) {
|
|
1966
|
+
if (!cmds) return [];
|
|
1967
|
+
return Array.isArray(cmds) ? [...cmds] : [cmds];
|
|
1968
|
+
}
|
|
1969
|
+
async function runHooks(opts) {
|
|
1970
|
+
const cmds = toList(opts.hooks?.[opts.event]);
|
|
1971
|
+
if (cmds.length === 0) return;
|
|
1972
|
+
for (const cmd of cmds) {
|
|
1973
|
+
opts.logger.info({ event: opts.event, cmd }, "hook: running");
|
|
1974
|
+
try {
|
|
1975
|
+
await execa2("/bin/sh", ["-c", cmd], {
|
|
1976
|
+
cwd: opts.cwd,
|
|
1977
|
+
env: opts.env ? { ...process.env, ...opts.env } : process.env,
|
|
1978
|
+
stdout: "inherit",
|
|
1979
|
+
stderr: "inherit",
|
|
1980
|
+
...opts.signal ? { cancelSignal: opts.signal } : {}
|
|
1981
|
+
});
|
|
1982
|
+
} catch (err) {
|
|
1983
|
+
throw new RuntimeFailure(
|
|
1984
|
+
`Hook ${opts.event} failed (${cmd}): ${err instanceof Error ? err.message : String(err)}`
|
|
1985
|
+
);
|
|
1986
|
+
}
|
|
1987
|
+
}
|
|
1988
|
+
}
|
|
1989
|
+
|
|
1990
|
+
// src/prepare/prepare.ts
|
|
1991
|
+
var DEFAULT_LOCK_PATTERNS = [
|
|
1992
|
+
"package-lock.json",
|
|
1993
|
+
"pnpm-lock.yaml",
|
|
1994
|
+
"yarn.lock",
|
|
1995
|
+
"uv.lock",
|
|
1996
|
+
"poetry.lock",
|
|
1997
|
+
"requirements.txt",
|
|
1998
|
+
"Pipfile.lock",
|
|
1999
|
+
"go.sum",
|
|
2000
|
+
"Cargo.lock"
|
|
2001
|
+
];
|
|
2002
|
+
async function prepareService(input) {
|
|
2003
|
+
const cmd = input.service.runtime?.native?.prepare?.cmd;
|
|
2004
|
+
if (!cmd) {
|
|
2005
|
+
return {
|
|
2006
|
+
serviceName: input.service.name,
|
|
2007
|
+
status: "no-prepare-cmd",
|
|
2008
|
+
cacheFile: "",
|
|
2009
|
+
hash: null
|
|
2010
|
+
};
|
|
2011
|
+
}
|
|
2012
|
+
const manifestDir = path14.dirname(input.serviceDoc.file);
|
|
2013
|
+
const cacheFile = path14.join(input.paths.cacheDir, "prepare", `${input.service.name}.json`);
|
|
2014
|
+
const hash = await computePrepareHash(manifestDir, cmd);
|
|
2015
|
+
if (!input.force) {
|
|
2016
|
+
const prev = await readPrev(cacheFile);
|
|
2017
|
+
if (prev && prev.hash === hash) {
|
|
2018
|
+
input.logger.info({ service: input.service.name }, "prepare: lockfile hash unchanged; skipping");
|
|
2019
|
+
return { serviceName: input.service.name, status: "skipped", cacheFile, hash };
|
|
2020
|
+
}
|
|
2021
|
+
}
|
|
2022
|
+
const envRes = await composeServiceEnv({
|
|
2023
|
+
mode: "native",
|
|
2024
|
+
serviceDoc: input.serviceDoc,
|
|
2025
|
+
service: input.service,
|
|
2026
|
+
workspaceRoot: input.paths.root,
|
|
2027
|
+
...input.cliEnv ? { cliEnv: input.cliEnv } : {}
|
|
2028
|
+
});
|
|
2029
|
+
assertNoIssues(envRes);
|
|
2030
|
+
const env = toEnvObject(envRes);
|
|
2031
|
+
await runHooks({
|
|
2032
|
+
hooks: input.service.hooks,
|
|
2033
|
+
event: "pre_prepare",
|
|
2034
|
+
cwd: manifestDir,
|
|
2035
|
+
env,
|
|
2036
|
+
logger: input.logger,
|
|
2037
|
+
...input.signal ? { signal: input.signal } : {}
|
|
2038
|
+
});
|
|
2039
|
+
await ensureDir(path14.join(input.paths.logsDir, input.service.name));
|
|
2040
|
+
const logFile = path14.join(input.paths.logsDir, input.service.name, "prepare.log");
|
|
2041
|
+
input.logger.info({ service: input.service.name, cmd }, "prepare: starting");
|
|
2042
|
+
const cwd = input.service.runtime?.native?.prepare?.cwd ? path14.resolve(manifestDir, input.service.runtime.native.prepare.cwd) : manifestDir;
|
|
2043
|
+
const fileHandle = await fs10.open(logFile, "a");
|
|
2044
|
+
const shell = input.service.runtime?.native?.prepare?.shell ?? "/bin/sh";
|
|
2045
|
+
try {
|
|
2046
|
+
const opts = {
|
|
2047
|
+
cwd,
|
|
2048
|
+
env: { ...process.env, ...env },
|
|
2049
|
+
stdio: ["ignore", fileHandle.fd, fileHandle.fd],
|
|
2050
|
+
...input.signal ? { cancelSignal: input.signal } : {},
|
|
2051
|
+
reject: false
|
|
2052
|
+
};
|
|
2053
|
+
const res = await execa3(shell, ["-c", cmd], opts);
|
|
2054
|
+
if (res.exitCode !== 0) {
|
|
2055
|
+
throw new RuntimeFailure(
|
|
2056
|
+
`prepare failed for ${input.service.name} (exit ${res.exitCode}). See ${logFile}.`
|
|
2057
|
+
);
|
|
2058
|
+
}
|
|
2059
|
+
} catch (err) {
|
|
2060
|
+
if (err instanceof RuntimeFailure) throw err;
|
|
2061
|
+
const message = err instanceof Error ? err.message : String(err);
|
|
2062
|
+
throw new RuntimeFailure(
|
|
2063
|
+
`prepare failed for ${input.service.name}: ${message}. See ${logFile}.`
|
|
2064
|
+
);
|
|
2065
|
+
} finally {
|
|
2066
|
+
await fileHandle.close();
|
|
2067
|
+
}
|
|
2068
|
+
await runHooks({
|
|
2069
|
+
hooks: input.service.hooks,
|
|
2070
|
+
event: "post_prepare",
|
|
2071
|
+
cwd: manifestDir,
|
|
2072
|
+
env,
|
|
2073
|
+
logger: input.logger,
|
|
2074
|
+
...input.signal ? { signal: input.signal } : {}
|
|
2075
|
+
});
|
|
2076
|
+
await writeJsonFile(cacheFile, {
|
|
2077
|
+
service: input.service.name,
|
|
2078
|
+
hash,
|
|
2079
|
+
at: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2080
|
+
cmd
|
|
2081
|
+
});
|
|
2082
|
+
return { serviceName: input.service.name, status: "ok", cacheFile, hash };
|
|
2083
|
+
}
|
|
2084
|
+
async function readPrev(file) {
|
|
2085
|
+
if (!await pathExists(file)) return null;
|
|
2086
|
+
try {
|
|
2087
|
+
return await readJsonFile(file);
|
|
2088
|
+
} catch {
|
|
2089
|
+
return null;
|
|
2090
|
+
}
|
|
2091
|
+
}
|
|
2092
|
+
async function computePrepareHash(manifestDir, cmd) {
|
|
2093
|
+
const hash = createHash3("sha256");
|
|
2094
|
+
hash.update("cmd:" + cmd + "\n");
|
|
2095
|
+
for (const candidate of DEFAULT_LOCK_PATTERNS) {
|
|
2096
|
+
const file = path14.join(manifestDir, candidate);
|
|
2097
|
+
const stat = await safeStat(file);
|
|
2098
|
+
if (!stat) {
|
|
2099
|
+
hash.update(`missing:${candidate}
|
|
2100
|
+
`);
|
|
2101
|
+
continue;
|
|
2102
|
+
}
|
|
2103
|
+
hash.update(`file:${candidate}
|
|
2104
|
+
size:${stat.size}
|
|
2105
|
+
mtime:${stat.mtimeMs}
|
|
2106
|
+
`);
|
|
2107
|
+
const fileHash = await hashFile(file);
|
|
2108
|
+
hash.update(`sha:${fileHash}
|
|
2109
|
+
`);
|
|
2110
|
+
}
|
|
2111
|
+
return hash.digest("hex");
|
|
2112
|
+
}
|
|
2113
|
+
async function safeStat(file) {
|
|
2114
|
+
try {
|
|
2115
|
+
return await fs10.stat(file);
|
|
2116
|
+
} catch {
|
|
2117
|
+
return null;
|
|
2118
|
+
}
|
|
2119
|
+
}
|
|
2120
|
+
async function hashFile(file) {
|
|
2121
|
+
const hash = createHash3("sha256");
|
|
2122
|
+
const stream = createReadStream2(file);
|
|
2123
|
+
for await (const chunk of stream) hash.update(chunk);
|
|
2124
|
+
return hash.digest("hex");
|
|
2125
|
+
}
|
|
2126
|
+
|
|
2127
|
+
// src/cli/commands/prepare.ts
|
|
2128
|
+
function registerPrepare(program) {
|
|
2129
|
+
program.command("prepare").description("Run `runtime.native.prepare` for every selected service (lockfile-aware skip).").option("--repo <name...>", "Limit to services in the given repo(s).").option("--service <name...>", "Limit to specific service names.").option("--force", "Re-run prepare even when the lockfile hash matches the cache.").option("--env <kv...>", "Override env values, KEY=VAL.").action(
|
|
2130
|
+
async (opts, cmd) => {
|
|
2131
|
+
const root = inheritRootOptions(cmd);
|
|
2132
|
+
const logger = getLogger();
|
|
2133
|
+
const ws = await resolveWorkspace();
|
|
2134
|
+
const projectDoc = await readProjectManifest(ws.projectManifestFile);
|
|
2135
|
+
const allRepos = resolveRepos({
|
|
2136
|
+
workspaceRoot: ws.paths.root,
|
|
2137
|
+
project: projectDoc.data,
|
|
2138
|
+
projectRepoPath: ws.projectRepoPath
|
|
2139
|
+
});
|
|
2140
|
+
const selectedRepos = selectRepos(allRepos, opts.repo);
|
|
2141
|
+
const repoMap = new Map(selectedRepos.map((r) => [r.name, r.dir]));
|
|
2142
|
+
if (!opts.repo) repoMap.set("__project__", ws.projectRepoPath);
|
|
2143
|
+
const registry = await buildWorkspaceRegistry({
|
|
2144
|
+
workspaceRoot: ws.paths.root,
|
|
2145
|
+
repos: repoMap,
|
|
2146
|
+
concurrency: resolveJobs(root.jobs)
|
|
2147
|
+
});
|
|
2148
|
+
for (const issue of registry.issues) {
|
|
2149
|
+
logger.warn(
|
|
2150
|
+
{ file: issue.file, line: issue.line, message: issue.message },
|
|
2151
|
+
"manifest issue"
|
|
2152
|
+
);
|
|
2153
|
+
}
|
|
2154
|
+
let services = registry.entries.filter((e) => e.kind === "service");
|
|
2155
|
+
if (opts.service && opts.service.length > 0) {
|
|
2156
|
+
const set = new Set(opts.service);
|
|
2157
|
+
services = services.filter((s) => set.has(s.name));
|
|
2158
|
+
const missing = [...set].filter((n) => !services.some((s) => s.name === n));
|
|
2159
|
+
if (missing.length > 0) {
|
|
2160
|
+
throw new UserError(`Unknown service(s): ${missing.join(", ")}`);
|
|
2161
|
+
}
|
|
2162
|
+
}
|
|
2163
|
+
const cliEnv = opts.env ? parseCliEnv(opts.env) : void 0;
|
|
2164
|
+
const jobs = resolveJobs(root.jobs);
|
|
2165
|
+
const results = await pMap4(
|
|
2166
|
+
services,
|
|
2167
|
+
async (entry) => {
|
|
2168
|
+
const docs = await loadManifestFile(entry.file);
|
|
2169
|
+
const serviceDoc = docs[entry.docIndex];
|
|
2170
|
+
const service = entry.data;
|
|
2171
|
+
const prepareOpts = {
|
|
2172
|
+
paths: ws.paths,
|
|
2173
|
+
serviceDoc,
|
|
2174
|
+
service,
|
|
2175
|
+
force: Boolean(opts.force),
|
|
2176
|
+
logger
|
|
2177
|
+
};
|
|
2178
|
+
if (cliEnv) prepareOpts.cliEnv = cliEnv;
|
|
2179
|
+
return prepareService(prepareOpts);
|
|
2180
|
+
},
|
|
2181
|
+
{ concurrency: jobs }
|
|
2182
|
+
);
|
|
2183
|
+
if (root.json) {
|
|
2184
|
+
emitJson({ results });
|
|
2185
|
+
return;
|
|
2186
|
+
}
|
|
2187
|
+
for (const r of results) {
|
|
2188
|
+
emit(`${r.status.padEnd(15)} ${r.serviceName}`);
|
|
2189
|
+
}
|
|
2190
|
+
}
|
|
2191
|
+
);
|
|
2192
|
+
}
|
|
2193
|
+
|
|
2194
|
+
// src/cli/commands/env.ts
|
|
2195
|
+
function registerEnv(program) {
|
|
2196
|
+
program.command("env").description("Print the fully-resolved environment for a service, with provenance per key.").argument("<service>", "Service name.").option("--mode <mode>", "native | docker (default: native).", "native").option("--env <kv...>", "Layer KEY=VAL on top of the composed env.").action(async (service, opts, cmd) => {
|
|
2197
|
+
const root = inheritRootOptions(cmd);
|
|
2198
|
+
const mode = opts.mode === "docker" ? "docker" : "native";
|
|
2199
|
+
if (opts.mode !== "native" && opts.mode !== "docker") {
|
|
2200
|
+
throw new UserError(`--mode must be 'native' or 'docker'.`);
|
|
2201
|
+
}
|
|
2202
|
+
const ws = await resolveWorkspace();
|
|
2203
|
+
const projectDoc = await readProjectManifest(ws.projectManifestFile);
|
|
2204
|
+
const repos = resolveRepos({
|
|
2205
|
+
workspaceRoot: ws.paths.root,
|
|
2206
|
+
project: projectDoc.data,
|
|
2207
|
+
projectRepoPath: ws.projectRepoPath
|
|
2208
|
+
});
|
|
2209
|
+
const repoMap = new Map(repos.map((r) => [r.name, r.dir]));
|
|
2210
|
+
repoMap.set("__project__", ws.projectRepoPath);
|
|
2211
|
+
const registry = await buildWorkspaceRegistry({
|
|
2212
|
+
workspaceRoot: ws.paths.root,
|
|
2213
|
+
repos: repoMap,
|
|
2214
|
+
concurrency: resolveJobs(root.jobs)
|
|
2215
|
+
});
|
|
2216
|
+
const entry = registry.entries.find((e) => e.kind === "service" && e.name === service);
|
|
2217
|
+
if (!entry) throw new UserError(`Service '${service}' not found in workspace.`);
|
|
2218
|
+
const docs = await loadManifestFile(entry.file);
|
|
2219
|
+
const serviceDoc = docs[entry.docIndex];
|
|
2220
|
+
const cliEnv = opts.env ? parseCliEnv(opts.env) : void 0;
|
|
2221
|
+
const composeOpts = {
|
|
2222
|
+
mode,
|
|
2223
|
+
serviceDoc,
|
|
2224
|
+
service: entry.data,
|
|
2225
|
+
workspaceRoot: ws.paths.root
|
|
2226
|
+
};
|
|
2227
|
+
if (cliEnv) composeOpts.cliEnv = cliEnv;
|
|
2228
|
+
const resolved = await composeServiceEnv(composeOpts);
|
|
2229
|
+
if (root.json) {
|
|
2230
|
+
emitJson({
|
|
2231
|
+
service,
|
|
2232
|
+
mode,
|
|
2233
|
+
issues: resolved.issues,
|
|
2234
|
+
env: [...resolved.values].map(([k, v]) => ({
|
|
2235
|
+
key: k,
|
|
2236
|
+
value: v.secret ? "<redacted>" : v.value,
|
|
2237
|
+
secret: v.secret,
|
|
2238
|
+
required: v.required,
|
|
2239
|
+
provenance: v.provenance.map((p) => ({
|
|
2240
|
+
file: p.file,
|
|
2241
|
+
line: p.line,
|
|
2242
|
+
layer: p.layer,
|
|
2243
|
+
raw: v.secret ? "<redacted>" : p.raw
|
|
2244
|
+
}))
|
|
2245
|
+
}))
|
|
2246
|
+
});
|
|
2247
|
+
return;
|
|
2248
|
+
}
|
|
2249
|
+
if (resolved.issues.length > 0) {
|
|
2250
|
+
emit("Issues:");
|
|
2251
|
+
for (const i of resolved.issues) emit(` ${i.file}:${i.line}: ${i.message}`);
|
|
2252
|
+
emit("");
|
|
2253
|
+
}
|
|
2254
|
+
emit(`Resolved environment for ${service} (mode=${mode}):`);
|
|
2255
|
+
for (const [k, v] of resolved.values) {
|
|
2256
|
+
const printed = v.secret ? "<redacted>" : v.value;
|
|
2257
|
+
emit(` ${k} = ${printed}`);
|
|
2258
|
+
for (const p of v.provenance) {
|
|
2259
|
+
const rawPrinted = v.secret ? "<redacted>" : p.raw;
|
|
2260
|
+
emit(` via ${p.layer} (${p.file}:${p.line}) = ${rawPrinted}`);
|
|
2261
|
+
}
|
|
2262
|
+
}
|
|
2263
|
+
});
|
|
2264
|
+
}
|
|
2265
|
+
|
|
2266
|
+
// src/supervisor/native.ts
|
|
2267
|
+
import fs12 from "fs/promises";
|
|
2268
|
+
import path16 from "path";
|
|
2269
|
+
import { execa as execa4 } from "execa";
|
|
2270
|
+
|
|
2271
|
+
// src/supervisor/state.ts
|
|
2272
|
+
import fs11 from "fs/promises";
|
|
2273
|
+
import path15 from "path";
|
|
2274
|
+
function stateFile(paths, service) {
|
|
2275
|
+
return path15.join(paths.stateDir, `${service}.json`);
|
|
2276
|
+
}
|
|
2277
|
+
async function readState(paths, service) {
|
|
2278
|
+
const file = stateFile(paths, service);
|
|
2279
|
+
if (!await pathExists(file)) return null;
|
|
2280
|
+
try {
|
|
2281
|
+
return await readJsonFile(file);
|
|
2282
|
+
} catch {
|
|
2283
|
+
return null;
|
|
2284
|
+
}
|
|
2285
|
+
}
|
|
2286
|
+
async function writeState(paths, state) {
|
|
2287
|
+
state.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
|
|
2288
|
+
await writeJsonFile(stateFile(paths, state.service), state);
|
|
2289
|
+
}
|
|
2290
|
+
async function clearState(paths, service) {
|
|
2291
|
+
const file = stateFile(paths, service);
|
|
2292
|
+
try {
|
|
2293
|
+
await fs11.unlink(file);
|
|
2294
|
+
} catch (err) {
|
|
2295
|
+
if (err.code !== "ENOENT") throw err;
|
|
2296
|
+
}
|
|
2297
|
+
}
|
|
2298
|
+
function isPidAlive(pid) {
|
|
2299
|
+
if (!Number.isInteger(pid) || pid <= 0) return false;
|
|
2300
|
+
try {
|
|
2301
|
+
process.kill(pid, 0);
|
|
2302
|
+
return true;
|
|
2303
|
+
} catch (err) {
|
|
2304
|
+
const e = err;
|
|
2305
|
+
if (e.code === "EPERM") return true;
|
|
2306
|
+
return false;
|
|
2307
|
+
}
|
|
2308
|
+
}
|
|
2309
|
+
async function listSupervisorStates(paths) {
|
|
2310
|
+
const out = [];
|
|
2311
|
+
if (!await pathExists(paths.stateDir)) return out;
|
|
2312
|
+
const entries = await fs11.readdir(paths.stateDir, { withFileTypes: true });
|
|
2313
|
+
for (const entry of entries) {
|
|
2314
|
+
if (!entry.isFile() || !entry.name.endsWith(".json")) continue;
|
|
2315
|
+
try {
|
|
2316
|
+
const state = await readJsonFile(path15.join(paths.stateDir, entry.name));
|
|
2317
|
+
out.push(state);
|
|
2318
|
+
} catch {
|
|
2319
|
+
}
|
|
2320
|
+
}
|
|
2321
|
+
return out;
|
|
2322
|
+
}
|
|
2323
|
+
|
|
2324
|
+
// src/supervisor/native.ts
|
|
2325
|
+
var LOG_FILE_NAME = "service.log";
|
|
2326
|
+
var LOG_MAX_BYTES = 5 * 1024 * 1024;
|
|
2327
|
+
var LOG_BACKLOG = 5;
|
|
2328
|
+
async function startNativeService(opts) {
|
|
2329
|
+
const cmd = opts.service.runtime?.native?.run?.cmd;
|
|
2330
|
+
if (!cmd) {
|
|
2331
|
+
throw new UserError(
|
|
2332
|
+
`Service '${opts.service.name}' has no runtime.native.run.cmd. Cannot start in native mode.`
|
|
2333
|
+
);
|
|
2334
|
+
}
|
|
2335
|
+
if (opts.service.runtime?.native?.enabled === false) {
|
|
2336
|
+
throw new UserError(
|
|
2337
|
+
`Service '${opts.service.name}' has runtime.native.enabled: false. Cannot start in native mode.`
|
|
2338
|
+
);
|
|
2339
|
+
}
|
|
2340
|
+
const existing = await readState(opts.paths, opts.service.name);
|
|
2341
|
+
if (existing && existing.status === "running" && isPidAlive(existing.pid)) {
|
|
2342
|
+
throw new UserError(`Service '${opts.service.name}' is already running (pid ${existing.pid}).`);
|
|
2343
|
+
}
|
|
2344
|
+
const manifestDir = path16.dirname(opts.serviceDoc.file);
|
|
2345
|
+
const cwd = opts.service.runtime?.native?.run?.cwd ? path16.resolve(manifestDir, opts.service.runtime.native.run.cwd) : manifestDir;
|
|
2346
|
+
const envRes = await composeServiceEnv({
|
|
2347
|
+
mode: "native",
|
|
2348
|
+
serviceDoc: opts.serviceDoc,
|
|
2349
|
+
service: opts.service,
|
|
2350
|
+
workspaceRoot: opts.paths.root,
|
|
2351
|
+
...opts.cliEnv ? { cliEnv: opts.cliEnv } : {}
|
|
2352
|
+
});
|
|
2353
|
+
assertNoIssues(envRes);
|
|
2354
|
+
const env = toEnvObject(envRes);
|
|
2355
|
+
const serviceLogDir = path16.join(opts.paths.logsDir, opts.service.name);
|
|
2356
|
+
await ensureDir(serviceLogDir);
|
|
2357
|
+
await rotateLog(serviceLogDir);
|
|
2358
|
+
const logFile = path16.join(serviceLogDir, LOG_FILE_NAME);
|
|
2359
|
+
const fh = await fs12.open(logFile, "a");
|
|
2360
|
+
await runHooks({
|
|
2361
|
+
hooks: opts.service.hooks,
|
|
2362
|
+
event: "pre_run",
|
|
2363
|
+
cwd: manifestDir,
|
|
2364
|
+
env,
|
|
2365
|
+
logger: opts.logger
|
|
2366
|
+
});
|
|
2367
|
+
let child;
|
|
2368
|
+
const shell = opts.service.runtime?.native?.run?.shell ?? "/bin/sh";
|
|
2369
|
+
try {
|
|
2370
|
+
const spawnOpts = {
|
|
2371
|
+
cwd,
|
|
2372
|
+
env: { ...process.env, ...env },
|
|
2373
|
+
detached: true,
|
|
2374
|
+
stdio: ["ignore", fh.fd, fh.fd],
|
|
2375
|
+
reject: false
|
|
2376
|
+
};
|
|
2377
|
+
child = execa4(shell, ["-c", cmd], spawnOpts);
|
|
2378
|
+
} catch (err) {
|
|
2379
|
+
await fh.close().catch(() => void 0);
|
|
2380
|
+
throw new RuntimeFailure(
|
|
2381
|
+
`Failed to spawn ${opts.service.name}: ${err instanceof Error ? err.message : String(err)}`
|
|
2382
|
+
);
|
|
2383
|
+
}
|
|
2384
|
+
const pid = child.pid ?? 0;
|
|
2385
|
+
if (!pid) {
|
|
2386
|
+
await fh.close().catch(() => void 0);
|
|
2387
|
+
throw new RuntimeFailure(`Failed to spawn ${opts.service.name}: no pid assigned.`);
|
|
2388
|
+
}
|
|
2389
|
+
child.unref();
|
|
2390
|
+
await fh.close().catch(() => void 0);
|
|
2391
|
+
const state = {
|
|
2392
|
+
service: opts.service.name,
|
|
2393
|
+
pid,
|
|
2394
|
+
pgid: pid,
|
|
2395
|
+
startedAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2396
|
+
cmd,
|
|
2397
|
+
cwd,
|
|
2398
|
+
logFile,
|
|
2399
|
+
status: "running",
|
|
2400
|
+
updatedAt: (/* @__PURE__ */ new Date()).toISOString()
|
|
2401
|
+
};
|
|
2402
|
+
await writeState(opts.paths, state);
|
|
2403
|
+
opts.logger.info({ service: opts.service.name, pid, logFile }, "up: started");
|
|
2404
|
+
return { pid, pgid: pid, logFile };
|
|
2405
|
+
}
|
|
2406
|
+
var DEFAULT_GRACE_MS = 1e4;
|
|
2407
|
+
async function stopNativeService(opts) {
|
|
2408
|
+
const state = await readState(opts.paths, opts.service);
|
|
2409
|
+
if (!state) {
|
|
2410
|
+
opts.logger.info({ service: opts.service }, "down: no state file; nothing to do");
|
|
2411
|
+
return { stopped: false };
|
|
2412
|
+
}
|
|
2413
|
+
if (!isPidAlive(state.pid)) {
|
|
2414
|
+
opts.logger.info({ service: opts.service, pid: state.pid }, "down: process already gone; clearing state");
|
|
2415
|
+
await clearState(opts.paths, opts.service);
|
|
2416
|
+
return { stopped: true };
|
|
2417
|
+
}
|
|
2418
|
+
if (opts.manifestDir && opts.hooks) {
|
|
2419
|
+
await runHooks({
|
|
2420
|
+
hooks: opts.hooks,
|
|
2421
|
+
event: "pre_stop",
|
|
2422
|
+
cwd: opts.manifestDir,
|
|
2423
|
+
logger: opts.logger
|
|
2424
|
+
});
|
|
2425
|
+
}
|
|
2426
|
+
const grace = opts.graceMs ?? DEFAULT_GRACE_MS;
|
|
2427
|
+
try {
|
|
2428
|
+
process.kill(-state.pid, "SIGTERM");
|
|
2429
|
+
} catch (err) {
|
|
2430
|
+
const e = err;
|
|
2431
|
+
if (e.code !== "ESRCH") throw err;
|
|
2432
|
+
}
|
|
2433
|
+
const stoppedGracefully = await waitForExit(state.pid, grace);
|
|
2434
|
+
if (!stoppedGracefully) {
|
|
2435
|
+
opts.logger.warn({ service: opts.service, pid: state.pid }, "down: grace exceeded; SIGKILL");
|
|
2436
|
+
try {
|
|
2437
|
+
process.kill(-state.pid, "SIGKILL");
|
|
2438
|
+
} catch (err) {
|
|
2439
|
+
const e = err;
|
|
2440
|
+
if (e.code !== "ESRCH") throw err;
|
|
2441
|
+
}
|
|
2442
|
+
await waitForExit(state.pid, 5e3);
|
|
2443
|
+
}
|
|
2444
|
+
if (opts.manifestDir && opts.hooks) {
|
|
2445
|
+
await runHooks({
|
|
2446
|
+
hooks: opts.hooks,
|
|
2447
|
+
event: "post_stop",
|
|
2448
|
+
cwd: opts.manifestDir,
|
|
2449
|
+
logger: opts.logger
|
|
2450
|
+
});
|
|
2451
|
+
}
|
|
2452
|
+
await clearState(opts.paths, opts.service);
|
|
2453
|
+
return { stopped: true };
|
|
2454
|
+
}
|
|
2455
|
+
async function waitForExit(pid, timeoutMs) {
|
|
2456
|
+
const deadline = Date.now() + timeoutMs;
|
|
2457
|
+
while (Date.now() < deadline) {
|
|
2458
|
+
if (!isPidAlive(pid)) return true;
|
|
2459
|
+
await sleep(100);
|
|
2460
|
+
}
|
|
2461
|
+
return false;
|
|
2462
|
+
}
|
|
2463
|
+
function sleep(ms) {
|
|
2464
|
+
return new Promise((r) => setTimeout(r, ms));
|
|
2465
|
+
}
|
|
2466
|
+
async function rotateLog(dir) {
|
|
2467
|
+
const main2 = path16.join(dir, LOG_FILE_NAME);
|
|
2468
|
+
let st = null;
|
|
2469
|
+
try {
|
|
2470
|
+
st = await fs12.stat(main2);
|
|
2471
|
+
} catch {
|
|
2472
|
+
return;
|
|
2473
|
+
}
|
|
2474
|
+
if (st.size < LOG_MAX_BYTES) return;
|
|
2475
|
+
for (let i = LOG_BACKLOG - 1; i >= 1; i--) {
|
|
2476
|
+
const src = path16.join(dir, `${LOG_FILE_NAME}.${i}`);
|
|
2477
|
+
const dst = path16.join(dir, `${LOG_FILE_NAME}.${i + 1}`);
|
|
2478
|
+
try {
|
|
2479
|
+
await fs12.rename(src, dst);
|
|
2480
|
+
} catch {
|
|
2481
|
+
}
|
|
2482
|
+
}
|
|
2483
|
+
try {
|
|
2484
|
+
await fs12.rename(main2, path16.join(dir, `${LOG_FILE_NAME}.1`));
|
|
2485
|
+
} catch {
|
|
2486
|
+
}
|
|
2487
|
+
}
|
|
2488
|
+
async function listServicesState(paths) {
|
|
2489
|
+
const states = await listSupervisorStates(paths);
|
|
2490
|
+
const out = [];
|
|
2491
|
+
for (const s of states) {
|
|
2492
|
+
const alive = isPidAlive(s.pid);
|
|
2493
|
+
const status = alive ? "running" : s.status === "running" ? "crashed" : "stopped";
|
|
2494
|
+
const started = new Date(s.startedAt);
|
|
2495
|
+
const uptimeSec = alive ? Math.floor((Date.now() - started.getTime()) / 1e3) : null;
|
|
2496
|
+
out.push({
|
|
2497
|
+
service: s.service,
|
|
2498
|
+
status,
|
|
2499
|
+
pid: alive ? s.pid : null,
|
|
2500
|
+
startedAt: s.startedAt,
|
|
2501
|
+
uptimeSec,
|
|
2502
|
+
logFile: s.logFile
|
|
2503
|
+
});
|
|
2504
|
+
}
|
|
2505
|
+
return out;
|
|
2506
|
+
}
|
|
2507
|
+
|
|
2508
|
+
// src/supervisor/logs.ts
|
|
2509
|
+
import fs13 from "fs/promises";
|
|
2510
|
+
import { createReadStream as createReadStream3 } from "fs";
|
|
2511
|
+
import path17 from "path";
|
|
2512
|
+
async function tailFile(opts) {
|
|
2513
|
+
if (!await pathExists(opts.file)) {
|
|
2514
|
+
if (!opts.follow) return;
|
|
2515
|
+
await waitForFile(opts.file, opts.signal);
|
|
2516
|
+
}
|
|
2517
|
+
let offset = await initialOffset(opts.file, opts.initialBytes ?? 16 * 1024);
|
|
2518
|
+
await streamFrom(opts.file, offset, opts.out);
|
|
2519
|
+
if (!opts.follow) return;
|
|
2520
|
+
let stat = await fs13.stat(opts.file);
|
|
2521
|
+
offset = stat.size;
|
|
2522
|
+
const watcher = fs13.watch(opts.file, { persistent: true });
|
|
2523
|
+
const abort = opts.signal ?? new AbortController().signal;
|
|
2524
|
+
const pollTimer = setInterval(async () => {
|
|
2525
|
+
try {
|
|
2526
|
+
const cur = await fs13.stat(opts.file);
|
|
2527
|
+
if (cur.size < offset) {
|
|
2528
|
+
offset = 0;
|
|
2529
|
+
}
|
|
2530
|
+
if (cur.size > offset) {
|
|
2531
|
+
await streamFrom(opts.file, offset, opts.out);
|
|
2532
|
+
offset = cur.size;
|
|
2533
|
+
}
|
|
2534
|
+
} catch {
|
|
2535
|
+
}
|
|
2536
|
+
}, 500);
|
|
2537
|
+
try {
|
|
2538
|
+
for await (const _event of watcher) {
|
|
2539
|
+
if (abort.aborted) break;
|
|
2540
|
+
const cur = await fs13.stat(opts.file).catch(() => null);
|
|
2541
|
+
if (!cur) continue;
|
|
2542
|
+
if (cur.size < offset) {
|
|
2543
|
+
offset = 0;
|
|
2544
|
+
}
|
|
2545
|
+
if (cur.size > offset) {
|
|
2546
|
+
await streamFrom(opts.file, offset, opts.out);
|
|
2547
|
+
offset = cur.size;
|
|
2548
|
+
}
|
|
2549
|
+
}
|
|
2550
|
+
} catch (err) {
|
|
2551
|
+
if (err.code === "ABORT_ERR") return;
|
|
2552
|
+
throw err;
|
|
2553
|
+
} finally {
|
|
2554
|
+
clearInterval(pollTimer);
|
|
2555
|
+
}
|
|
2556
|
+
}
|
|
2557
|
+
async function initialOffset(file, fromEndBytes) {
|
|
2558
|
+
const st = await fs13.stat(file);
|
|
2559
|
+
return Math.max(0, st.size - fromEndBytes);
|
|
2560
|
+
}
|
|
2561
|
+
async function streamFrom(file, start, out) {
|
|
2562
|
+
return new Promise((resolve, reject) => {
|
|
2563
|
+
const rs = createReadStream3(file, { start, encoding: "utf8" });
|
|
2564
|
+
rs.on("data", (chunk) => out.write(chunk));
|
|
2565
|
+
rs.on("end", () => resolve());
|
|
2566
|
+
rs.on("error", (err) => reject(err));
|
|
2567
|
+
});
|
|
2568
|
+
}
|
|
2569
|
+
async function waitForFile(file, signal) {
|
|
2570
|
+
const dir = path17.dirname(file);
|
|
2571
|
+
await fs13.mkdir(dir, { recursive: true });
|
|
2572
|
+
for (; ; ) {
|
|
2573
|
+
if (signal?.aborted) return;
|
|
2574
|
+
if (await pathExists(file)) return;
|
|
2575
|
+
await new Promise((r) => setTimeout(r, 250));
|
|
2576
|
+
}
|
|
2577
|
+
}
|
|
2578
|
+
|
|
2579
|
+
// src/cli/commands/run.ts
|
|
2580
|
+
import path18 from "path";
|
|
2581
|
+
async function findService(name, jobs) {
|
|
2582
|
+
const ws = await resolveWorkspace();
|
|
2583
|
+
const projectDoc = await readProjectManifest(ws.projectManifestFile);
|
|
2584
|
+
const repos = resolveRepos({
|
|
2585
|
+
workspaceRoot: ws.paths.root,
|
|
2586
|
+
project: projectDoc.data,
|
|
2587
|
+
projectRepoPath: ws.projectRepoPath
|
|
2588
|
+
});
|
|
2589
|
+
const repoMap = new Map(repos.map((r) => [r.name, r.dir]));
|
|
2590
|
+
repoMap.set("__project__", ws.projectRepoPath);
|
|
2591
|
+
const registry = await buildWorkspaceRegistry({
|
|
2592
|
+
workspaceRoot: ws.paths.root,
|
|
2593
|
+
repos: repoMap,
|
|
2594
|
+
concurrency: jobs
|
|
2595
|
+
});
|
|
2596
|
+
const entry = registry.entries.find((e) => e.kind === "service" && e.name === name);
|
|
2597
|
+
if (!entry) throw new UserError(`Service '${name}' not found in workspace.`);
|
|
2598
|
+
const docs = await loadManifestFile(entry.file);
|
|
2599
|
+
const serviceDoc = docs[entry.docIndex];
|
|
2600
|
+
return {
|
|
2601
|
+
serviceDoc,
|
|
2602
|
+
service: entry.data,
|
|
2603
|
+
paths: ws.paths,
|
|
2604
|
+
manifestDir: path18.dirname(entry.file)
|
|
2605
|
+
};
|
|
2606
|
+
}
|
|
2607
|
+
function registerRunCommands(program) {
|
|
2608
|
+
program.command("up").description("Start a single service in native mode.").argument("<service>").option("--mode <mode>", "native | docker (default native; docker is v1).", "native").option("--env <kv...>", "KEY=VAL overrides.").action(async (name, opts, cmd) => {
|
|
2609
|
+
const root = inheritRootOptions(cmd);
|
|
2610
|
+
const logger = getLogger();
|
|
2611
|
+
if (opts.mode === "docker") {
|
|
2612
|
+
throw new UserError(`--mode docker is deferred to v1.`);
|
|
2613
|
+
}
|
|
2614
|
+
const cliEnv = opts.env ? parseCliEnv(opts.env) : void 0;
|
|
2615
|
+
const jobs = resolveJobs(root.jobs);
|
|
2616
|
+
const ctx = await findService(name, jobs);
|
|
2617
|
+
const startOpts = {
|
|
2618
|
+
paths: ctx.paths,
|
|
2619
|
+
serviceDoc: ctx.serviceDoc,
|
|
2620
|
+
service: ctx.service,
|
|
2621
|
+
logger
|
|
2622
|
+
};
|
|
2623
|
+
if (cliEnv) startOpts.cliEnv = cliEnv;
|
|
2624
|
+
const result = await startNativeService(startOpts);
|
|
2625
|
+
if (root.json) {
|
|
2626
|
+
emitJson({ service: name, pid: result.pid, logFile: result.logFile });
|
|
2627
|
+
} else {
|
|
2628
|
+
emit(`Started ${name} pid=${result.pid}`);
|
|
2629
|
+
emit(` log file: ${result.logFile}`);
|
|
2630
|
+
emit(` tail with: qavor logs ${name} -f`);
|
|
2631
|
+
}
|
|
2632
|
+
});
|
|
2633
|
+
program.command("down").description("Stop a single running service gracefully.").argument("<service>").option("--grace <ms>", "Grace period for SIGTERM before SIGKILL (default 10000).", "10000").action(async (name, opts, cmd) => {
|
|
2634
|
+
const root = inheritRootOptions(cmd);
|
|
2635
|
+
const logger = getLogger();
|
|
2636
|
+
const jobs = resolveJobs(root.jobs);
|
|
2637
|
+
const ctx = await findService(name, jobs);
|
|
2638
|
+
const graceMs = Number.parseInt(opts.grace, 10);
|
|
2639
|
+
const res = await stopNativeService({
|
|
2640
|
+
paths: ctx.paths,
|
|
2641
|
+
service: name,
|
|
2642
|
+
manifestDir: ctx.manifestDir,
|
|
2643
|
+
...ctx.service.hooks ? { hooks: ctx.service.hooks } : {},
|
|
2644
|
+
graceMs: Number.isFinite(graceMs) ? graceMs : 1e4,
|
|
2645
|
+
logger
|
|
2646
|
+
});
|
|
2647
|
+
if (root.json) {
|
|
2648
|
+
emitJson({ service: name, stopped: res.stopped });
|
|
2649
|
+
} else {
|
|
2650
|
+
emit(`${res.stopped ? "Stopped" : "No-op"} ${name}`);
|
|
2651
|
+
}
|
|
2652
|
+
});
|
|
2653
|
+
program.command("logs").description("Print or tail a service log file.").argument("<service>").option("-f, --follow", "Follow the log file as new lines append.").option("--bytes <n>", "Initial bytes from the tail to print (default 16384).", "16384").action(async (name, opts, cmd) => {
|
|
2654
|
+
const root = inheritRootOptions(cmd);
|
|
2655
|
+
const jobs = resolveJobs(root.jobs);
|
|
2656
|
+
const ctx = await findService(name, jobs);
|
|
2657
|
+
const logFile = path18.join(ctx.paths.logsDir, name, "service.log");
|
|
2658
|
+
const ac = new AbortController();
|
|
2659
|
+
process.on("SIGINT", () => ac.abort());
|
|
2660
|
+
process.on("SIGTERM", () => ac.abort());
|
|
2661
|
+
await tailFile({
|
|
2662
|
+
file: logFile,
|
|
2663
|
+
out: process.stdout,
|
|
2664
|
+
follow: Boolean(opts.follow),
|
|
2665
|
+
signal: ac.signal,
|
|
2666
|
+
initialBytes: Number.parseInt(opts.bytes, 10) || 16 * 1024
|
|
2667
|
+
});
|
|
2668
|
+
void root;
|
|
2669
|
+
});
|
|
2670
|
+
program.command("ps").description("List services tracked by the supervisor.").action(async (_opts, cmd) => {
|
|
2671
|
+
const root = inheritRootOptions(cmd);
|
|
2672
|
+
const ws = await resolveWorkspace();
|
|
2673
|
+
const list = await listServicesState(ws.paths);
|
|
2674
|
+
if (root.json) {
|
|
2675
|
+
emitJson({ services: list });
|
|
2676
|
+
return;
|
|
2677
|
+
}
|
|
2678
|
+
if (list.length === 0) {
|
|
2679
|
+
emit("(no services tracked)");
|
|
2680
|
+
return;
|
|
2681
|
+
}
|
|
2682
|
+
const headers = ["SERVICE", "STATUS", "PID", "UPTIME", "LOG"];
|
|
2683
|
+
const data = list.map((s) => [
|
|
2684
|
+
s.service,
|
|
2685
|
+
s.status,
|
|
2686
|
+
s.pid !== null ? String(s.pid) : "-",
|
|
2687
|
+
s.uptimeSec !== null ? `${s.uptimeSec}s` : "-",
|
|
2688
|
+
s.logFile ?? "-"
|
|
2689
|
+
]);
|
|
2690
|
+
const widths = headers.map((h, i) => Math.max(h.length, ...data.map((row) => (row[i] ?? "").length)));
|
|
2691
|
+
const fmt = (row) => row.map((c, i) => c.padEnd(widths[i] ?? 0)).join(" ");
|
|
2692
|
+
emit(fmt(headers));
|
|
2693
|
+
for (const row of data) emit(fmt(row));
|
|
2694
|
+
});
|
|
2695
|
+
}
|
|
2696
|
+
|
|
2697
|
+
// src/cli/commands/doctor.ts
|
|
2698
|
+
import path19 from "path";
|
|
2699
|
+
import fs14 from "fs/promises";
|
|
2700
|
+
import { execa as execa5 } from "execa";
|
|
2701
|
+
async function runShell(cmd, cwd) {
|
|
2702
|
+
try {
|
|
2703
|
+
const res = await execa5("/bin/sh", ["-c", cmd], { cwd, reject: false });
|
|
2704
|
+
return { ok: res.exitCode === 0, exitCode: res.exitCode ?? -1 };
|
|
2705
|
+
} catch {
|
|
2706
|
+
return { ok: false, exitCode: -1 };
|
|
2707
|
+
}
|
|
2708
|
+
}
|
|
2709
|
+
function registerDoctor(program) {
|
|
2710
|
+
program.command("doctor").description("Verify toolchain prerequisites, workspace paths, and per-service check_installed steps.").action(async (_opts, cmd) => {
|
|
2711
|
+
const root = inheritRootOptions(cmd);
|
|
2712
|
+
const logger = getLogger();
|
|
2713
|
+
const checks = [];
|
|
2714
|
+
try {
|
|
2715
|
+
const res = await execa5("git", ["--version"]);
|
|
2716
|
+
const version = res.stdout.trim().replace(/^git version /, "");
|
|
2717
|
+
const [maj, min] = version.split(".").map((s) => Number.parseInt(s, 10));
|
|
2718
|
+
if (Number.isFinite(maj) && Number.isFinite(min) && ((maj ?? 0) > 2 || (maj ?? 0) === 2 && (min ?? 0) >= 30)) {
|
|
2719
|
+
checks.push({ name: "git \u2265 2.30", status: "ok", message: version });
|
|
2720
|
+
} else {
|
|
2721
|
+
checks.push({ name: "git \u2265 2.30", status: "warn", message: `found ${version}` });
|
|
2722
|
+
}
|
|
2723
|
+
} catch {
|
|
2724
|
+
checks.push({ name: "git \u2265 2.30", status: "fail", message: "git not found", hint: "Install git." });
|
|
2725
|
+
}
|
|
2726
|
+
try {
|
|
2727
|
+
await execa5("docker", ["--version"]);
|
|
2728
|
+
checks.push({ name: "docker (optional v0)", status: "ok" });
|
|
2729
|
+
} catch {
|
|
2730
|
+
checks.push({ name: "docker (optional v0)", status: "warn", message: "docker not detected" });
|
|
2731
|
+
}
|
|
2732
|
+
try {
|
|
2733
|
+
const ws = await resolveWorkspace();
|
|
2734
|
+
await ensureDir(ws.paths.stateRoot);
|
|
2735
|
+
const probe = path19.join(ws.paths.stateRoot, ".doctor-write-check");
|
|
2736
|
+
await fs14.writeFile(probe, "");
|
|
2737
|
+
await fs14.unlink(probe);
|
|
2738
|
+
checks.push({ name: "workspace .qavor/ writable", status: "ok", message: ws.paths.stateRoot });
|
|
2739
|
+
} catch (err) {
|
|
2740
|
+
checks.push({
|
|
2741
|
+
name: "workspace .qavor/ writable",
|
|
2742
|
+
status: "fail",
|
|
2743
|
+
message: err instanceof Error ? err.message : String(err)
|
|
2744
|
+
});
|
|
2745
|
+
}
|
|
2746
|
+
const cache = globalCacheDir();
|
|
2747
|
+
try {
|
|
2748
|
+
await ensureDir(cache);
|
|
2749
|
+
const probe = path19.join(cache, ".doctor-write-check");
|
|
2750
|
+
await fs14.writeFile(probe, "");
|
|
2751
|
+
await fs14.unlink(probe);
|
|
2752
|
+
checks.push({ name: "global cache writable", status: "ok", message: cache });
|
|
2753
|
+
} catch (err) {
|
|
2754
|
+
checks.push({
|
|
2755
|
+
name: "global cache writable",
|
|
2756
|
+
status: "fail",
|
|
2757
|
+
message: err instanceof Error ? err.message : String(err)
|
|
2758
|
+
});
|
|
2759
|
+
}
|
|
2760
|
+
try {
|
|
2761
|
+
const ws = await resolveWorkspace();
|
|
2762
|
+
const project = await readProjectManifest(ws.projectManifestFile);
|
|
2763
|
+
const repos = resolveRepos({
|
|
2764
|
+
workspaceRoot: ws.paths.root,
|
|
2765
|
+
project: project.data,
|
|
2766
|
+
projectRepoPath: ws.projectRepoPath
|
|
2767
|
+
});
|
|
2768
|
+
const repoMap = new Map(repos.map((r) => [r.name, r.dir]));
|
|
2769
|
+
repoMap.set("__project__", ws.projectRepoPath);
|
|
2770
|
+
const registry = await buildWorkspaceRegistry({
|
|
2771
|
+
workspaceRoot: ws.paths.root,
|
|
2772
|
+
repos: repoMap,
|
|
2773
|
+
concurrency: resolveJobs(root.jobs)
|
|
2774
|
+
});
|
|
2775
|
+
for (const entry of registry.entries) {
|
|
2776
|
+
if (entry.kind !== "service") continue;
|
|
2777
|
+
const svc = entry.data;
|
|
2778
|
+
const checkCmd = svc.runtime?.native?.check_installed?.cmd;
|
|
2779
|
+
if (!checkCmd) {
|
|
2780
|
+
checks.push({
|
|
2781
|
+
name: `service ${entry.name}: check_installed`,
|
|
2782
|
+
status: "warn",
|
|
2783
|
+
message: "no runtime.native.check_installed.cmd"
|
|
2784
|
+
});
|
|
2785
|
+
continue;
|
|
2786
|
+
}
|
|
2787
|
+
const docs = await loadManifestFile(entry.file);
|
|
2788
|
+
const serviceDoc = docs[entry.docIndex];
|
|
2789
|
+
const cwd = svc.runtime?.native?.check_installed?.cwd ? path19.resolve(path19.dirname(serviceDoc.file), svc.runtime.native.check_installed.cwd) : path19.dirname(serviceDoc.file);
|
|
2790
|
+
const res = await runShell(checkCmd, cwd);
|
|
2791
|
+
if (res.ok) {
|
|
2792
|
+
checks.push({ name: `service ${entry.name}: check_installed`, status: "ok" });
|
|
2793
|
+
} else {
|
|
2794
|
+
const installHint = svc.runtime?.native?.install?.cmd;
|
|
2795
|
+
const failCheck = {
|
|
2796
|
+
name: `service ${entry.name}: check_installed`,
|
|
2797
|
+
status: "fail",
|
|
2798
|
+
message: `exit ${res.exitCode}`
|
|
2799
|
+
};
|
|
2800
|
+
if (installHint) failCheck.hint = `Hint: \`${installHint}\``;
|
|
2801
|
+
checks.push(failCheck);
|
|
2802
|
+
}
|
|
2803
|
+
}
|
|
2804
|
+
} catch (err) {
|
|
2805
|
+
logger.debug({ err: err instanceof Error ? err.message : String(err) }, "doctor: workspace probe failed");
|
|
2806
|
+
}
|
|
2807
|
+
if (root.json) {
|
|
2808
|
+
emitJson({ checks, ok: checks.every((c) => c.status !== "fail") });
|
|
2809
|
+
} else {
|
|
2810
|
+
for (const c of checks) {
|
|
2811
|
+
const sym = c.status === "ok" ? "\u2713" : c.status === "warn" ? "!" : "\u2717";
|
|
2812
|
+
let line = `${sym} ${c.status.toUpperCase().padEnd(5)} ${c.name}`;
|
|
2813
|
+
if (c.message) line += ` \u2014 ${c.message}`;
|
|
2814
|
+
emit(line);
|
|
2815
|
+
if (c.hint) emit(` ${c.hint}`);
|
|
2816
|
+
}
|
|
2817
|
+
}
|
|
2818
|
+
if (checks.some((c) => c.status === "fail")) {
|
|
2819
|
+
throw new RuntimeFailure("doctor: one or more checks failed.");
|
|
2820
|
+
}
|
|
2821
|
+
});
|
|
2822
|
+
}
|
|
2823
|
+
|
|
2824
|
+
// src/index.ts
|
|
2825
|
+
var PKG_VERSION = "0.1.0";
|
|
2826
|
+
function buildProgram() {
|
|
2827
|
+
const program = new Command();
|
|
2828
|
+
program.name("qavor").description("A CLI for managing a constellation of related repositories as one cohesive developer workspace.").version(PKG_VERSION, "-V, --version").option("--json", "Emit machine-readable JSON output. One object per line on stdout.").option("-v, --verbose", "Enable debug-level logging on stderr.").option("-c, --config <path>", "Override the path to the workspace pointer file.").option("-j, --jobs <n>", "Maximum concurrency for fan-out operations.", (raw) => {
|
|
2829
|
+
const n = Number.parseInt(raw, 10);
|
|
2830
|
+
if (!Number.isFinite(n) || n < 1) {
|
|
2831
|
+
throw new Error(`--jobs must be a positive integer (got '${raw}').`);
|
|
2832
|
+
}
|
|
2833
|
+
return raw;
|
|
2834
|
+
}).hook("preAction", (thisCommand) => {
|
|
2835
|
+
const opts = thisCommand.opts();
|
|
2836
|
+
configureLogger({ json: Boolean(opts.json), verbose: Boolean(opts.verbose) });
|
|
2837
|
+
});
|
|
2838
|
+
registerInit(program);
|
|
2839
|
+
registerWorkspace(program);
|
|
2840
|
+
registerValidate(program);
|
|
2841
|
+
registerGitCommands(program);
|
|
2842
|
+
registerPrepare(program);
|
|
2843
|
+
registerEnv(program);
|
|
2844
|
+
registerRunCommands(program);
|
|
2845
|
+
registerDoctor(program);
|
|
2846
|
+
return program;
|
|
2847
|
+
}
|
|
2848
|
+
async function main(argv) {
|
|
2849
|
+
const program = buildProgram();
|
|
2850
|
+
try {
|
|
2851
|
+
await program.parseAsync(argv, { from: "node" });
|
|
2852
|
+
return ExitCode.Ok;
|
|
2853
|
+
} catch (err) {
|
|
2854
|
+
return handleError(err);
|
|
2855
|
+
}
|
|
2856
|
+
}
|
|
2857
|
+
function handleError(err) {
|
|
2858
|
+
const e = err;
|
|
2859
|
+
if (e && (e.code === "commander.helpDisplayed" || e.code === "commander.help")) return ExitCode.Ok;
|
|
2860
|
+
if (e && e.code === "commander.version") return ExitCode.Ok;
|
|
2861
|
+
if (e && typeof e.code === "string" && e.code.startsWith("commander.")) {
|
|
2862
|
+
process2.stderr.write(`${e.message ?? "command error"}
|
|
2863
|
+
`);
|
|
2864
|
+
return ExitCode.UserError;
|
|
2865
|
+
}
|
|
2866
|
+
if (err instanceof QavorError) {
|
|
2867
|
+
process2.stderr.write(`${err.message}
|
|
2868
|
+
`);
|
|
2869
|
+
return err.exitCode;
|
|
2870
|
+
}
|
|
2871
|
+
const message = err instanceof Error ? err.stack ?? err.message : String(err);
|
|
2872
|
+
try {
|
|
2873
|
+
const logger = getLogger();
|
|
2874
|
+
logger.error({ err: message }, "unhandled error");
|
|
2875
|
+
} catch {
|
|
2876
|
+
process2.stderr.write(`${message}
|
|
2877
|
+
`);
|
|
2878
|
+
}
|
|
2879
|
+
return ExitCode.RuntimeError;
|
|
2880
|
+
}
|
|
2881
|
+
var code = await main(process2.argv);
|
|
2882
|
+
process2.exit(code);
|
|
2883
|
+
//# sourceMappingURL=index.js.map
|