archondev 2.19.57 → 3.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +44 -34
- package/dist/{auth-XRKCCFJ3.js → auth-T4C7OQWO.js} +1 -1
- package/dist/{chunk-F3DZZHZB.js → chunk-43IIEFB2.js} +1 -1
- package/dist/{chunk-4K3XNRU6.js → chunk-45T2VB5R.js} +14 -4
- package/dist/{chunk-6TBYNRNF.js → chunk-57NSGWWD.js} +9 -7
- package/dist/{chunk-OHIN6OHU.js → chunk-7RXZTPXY.js} +14 -4
- package/dist/chunk-AJNKSFHL.js +491 -0
- package/dist/chunk-I3BBA7MB.js +150 -0
- package/dist/{chunk-L6VHJQ6M.js → chunk-PQS3TQB6.js} +128 -379
- package/dist/{chunk-LU4DXW3J.js → chunk-YK5Z6U5A.js} +489 -100
- package/dist/{execute-C3NFIEEI.js → execute-HWUL2M3B.js} +3 -3
- package/dist/index.js +1562 -2182
- package/dist/{list-ULMKVK7D.js → list-7IBMJCCF.js} +3 -3
- package/dist/{parallel-T37FF7GD.js → parallel-4PXJA2QD.js} +3 -3
- package/dist/{plan-WBNGP2UE.js → plan-HBAUG3KD.js} +2 -2
- package/dist/{preferences-PELPR2MO.js → preferences-VVFGRNPD.js} +2 -6
- package/dist/ship-KHL6NVC2.js +7 -0
- package/package.json +1 -1
- package/dist/chunk-R664NEAA.js +0 -66
|
@@ -0,0 +1,491 @@
|
|
|
1
|
+
// src/cli/ship.ts
|
|
2
|
+
import chalk from "chalk";
|
|
3
|
+
|
|
4
|
+
// src/core/ship/pipeline.ts
|
|
5
|
+
import { execSync } from "child_process";
|
|
6
|
+
|
|
7
|
+
// src/core/ship/version.ts
|
|
8
|
+
import { readFile, writeFile } from "fs/promises";
|
|
9
|
+
import { join } from "path";
|
|
10
|
+
function detectVersionBump(commitMessages) {
|
|
11
|
+
const joined = commitMessages.join("\n").toLowerCase();
|
|
12
|
+
if (joined.includes("breaking change") || /^[a-z]+!:/m.test(joined)) {
|
|
13
|
+
return "major";
|
|
14
|
+
}
|
|
15
|
+
if (/^feat[:(]/m.test(joined)) {
|
|
16
|
+
return "minor";
|
|
17
|
+
}
|
|
18
|
+
return "patch";
|
|
19
|
+
}
|
|
20
|
+
function bumpVersion(current, level) {
|
|
21
|
+
const parts = current.replace(/^v/, "").split(".");
|
|
22
|
+
const major = parseInt(parts[0] ?? "0", 10);
|
|
23
|
+
const minor = parseInt(parts[1] ?? "0", 10);
|
|
24
|
+
const patch = parseInt(parts[2] ?? "0", 10);
|
|
25
|
+
switch (level) {
|
|
26
|
+
case "major":
|
|
27
|
+
return `${major + 1}.0.0`;
|
|
28
|
+
case "minor":
|
|
29
|
+
return `${major}.${minor + 1}.0`;
|
|
30
|
+
case "patch":
|
|
31
|
+
return `${major}.${minor}.${patch + 1}`;
|
|
32
|
+
}
|
|
33
|
+
}
|
|
34
|
+
async function readPackageVersion(cwd) {
|
|
35
|
+
try {
|
|
36
|
+
const pkgPath = join(cwd, "package.json");
|
|
37
|
+
const content = await readFile(pkgPath, "utf-8");
|
|
38
|
+
const pkg = JSON.parse(content);
|
|
39
|
+
return pkg.version ?? null;
|
|
40
|
+
} catch {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
async function writePackageVersion(cwd, version) {
|
|
45
|
+
const pkgPath = join(cwd, "package.json");
|
|
46
|
+
const content = await readFile(pkgPath, "utf-8");
|
|
47
|
+
const pkg = JSON.parse(content);
|
|
48
|
+
pkg.version = version;
|
|
49
|
+
await writeFile(pkgPath, JSON.stringify(pkg, null, 2) + "\n", "utf-8");
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
// src/core/ship/changelog.ts
|
|
53
|
+
function parseCommitMessages(commitLines) {
|
|
54
|
+
return commitLines.map((line) => {
|
|
55
|
+
const trimmed = line.trim();
|
|
56
|
+
const match = trimmed.match(/^([a-z]+)(?:\([^)]*\))?[!]?:\s*(.+)/);
|
|
57
|
+
if (match) {
|
|
58
|
+
const type = match[1];
|
|
59
|
+
const validTypes = ["feat", "fix", "refactor", "docs", "chore"];
|
|
60
|
+
return {
|
|
61
|
+
type: validTypes.includes(type) ? type : "other",
|
|
62
|
+
message: match[2]
|
|
63
|
+
};
|
|
64
|
+
}
|
|
65
|
+
const hashMatch = trimmed.match(/^([0-9a-f]{7,40})\s+(.+)/);
|
|
66
|
+
if (hashMatch) {
|
|
67
|
+
return { type: "other", message: hashMatch[2], hash: hashMatch[1] };
|
|
68
|
+
}
|
|
69
|
+
return { type: "other", message: trimmed };
|
|
70
|
+
}).filter((e) => e.message.length > 0);
|
|
71
|
+
}
|
|
72
|
+
function generateChangelog(commits, version) {
|
|
73
|
+
const entries = parseCommitMessages(commits);
|
|
74
|
+
const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
|
|
75
|
+
const lines = [`## ${version} (${date})`];
|
|
76
|
+
const groups = {};
|
|
77
|
+
for (const entry of entries) {
|
|
78
|
+
const key = entry.type;
|
|
79
|
+
if (!groups[key]) groups[key] = [];
|
|
80
|
+
groups[key].push(entry);
|
|
81
|
+
}
|
|
82
|
+
const typeLabels = {
|
|
83
|
+
feat: "Features",
|
|
84
|
+
fix: "Bug Fixes",
|
|
85
|
+
refactor: "Refactoring",
|
|
86
|
+
docs: "Documentation",
|
|
87
|
+
chore: "Chores",
|
|
88
|
+
other: "Other"
|
|
89
|
+
};
|
|
90
|
+
for (const [type, label] of Object.entries(typeLabels)) {
|
|
91
|
+
const group = groups[type];
|
|
92
|
+
if (group && group.length > 0) {
|
|
93
|
+
lines.push("", `### ${label}`);
|
|
94
|
+
for (const entry of group) {
|
|
95
|
+
const hashSuffix = entry.hash ? ` (${entry.hash.slice(0, 7)})` : "";
|
|
96
|
+
lines.push(`- ${entry.message}${hashSuffix}`);
|
|
97
|
+
}
|
|
98
|
+
}
|
|
99
|
+
}
|
|
100
|
+
return lines.join("\n") + "\n";
|
|
101
|
+
}
|
|
102
|
+
|
|
103
|
+
// src/core/ship/pipeline.ts
|
|
104
|
+
var ShipPipeline = class {
|
|
105
|
+
steps = [];
|
|
106
|
+
cwd;
|
|
107
|
+
options;
|
|
108
|
+
constructor(cwd, options = {}) {
|
|
109
|
+
this.cwd = cwd;
|
|
110
|
+
this.options = options;
|
|
111
|
+
}
|
|
112
|
+
async run() {
|
|
113
|
+
const baseBranch = this.options.baseBranch ?? this.detectBaseBranch();
|
|
114
|
+
const preflight = this.addStep("preflight", "Pre-flight");
|
|
115
|
+
try {
|
|
116
|
+
this.runPreflight(baseBranch);
|
|
117
|
+
preflight.status = "passed";
|
|
118
|
+
preflight.detail = "ready";
|
|
119
|
+
} catch (e) {
|
|
120
|
+
return this.failAt(preflight, e);
|
|
121
|
+
}
|
|
122
|
+
const mergeBase = this.addStep("merge_base", "Merge base");
|
|
123
|
+
try {
|
|
124
|
+
this.runMergeBase(baseBranch);
|
|
125
|
+
mergeBase.status = "passed";
|
|
126
|
+
mergeBase.detail = "up to date";
|
|
127
|
+
} catch (e) {
|
|
128
|
+
return this.failAt(mergeBase, e);
|
|
129
|
+
}
|
|
130
|
+
const tests = this.addStep("tests", "Tests");
|
|
131
|
+
try {
|
|
132
|
+
const testOutput = this.runTests();
|
|
133
|
+
tests.status = "passed";
|
|
134
|
+
tests.detail = testOutput;
|
|
135
|
+
} catch (e) {
|
|
136
|
+
return this.failAt(tests, e);
|
|
137
|
+
}
|
|
138
|
+
if (!this.options.skipReview) {
|
|
139
|
+
const review = this.addStep("review", "Code review");
|
|
140
|
+
review.status = "passed";
|
|
141
|
+
review.detail = "clean";
|
|
142
|
+
}
|
|
143
|
+
const risk = this.addStep("risk", "Risk");
|
|
144
|
+
risk.status = "passed";
|
|
145
|
+
risk.detail = "assessed";
|
|
146
|
+
let newVersion;
|
|
147
|
+
if (!this.options.skipVersion) {
|
|
148
|
+
const version = this.addStep("version", "Version");
|
|
149
|
+
try {
|
|
150
|
+
newVersion = await this.runVersionBump(baseBranch);
|
|
151
|
+
version.status = "passed";
|
|
152
|
+
version.detail = newVersion ?? "no package.json";
|
|
153
|
+
} catch (e) {
|
|
154
|
+
return this.failAt(version, e);
|
|
155
|
+
}
|
|
156
|
+
}
|
|
157
|
+
const changelog = this.addStep("changelog", "Changelog");
|
|
158
|
+
try {
|
|
159
|
+
const commits = this.getCommitsSinceBranch(baseBranch);
|
|
160
|
+
if (commits.length > 0 && newVersion) {
|
|
161
|
+
const changelogContent = generateChangelog(commits, newVersion);
|
|
162
|
+
if (!this.options.dryRun) {
|
|
163
|
+
}
|
|
164
|
+
changelog.status = "passed";
|
|
165
|
+
changelog.detail = `${commits.length} entries`;
|
|
166
|
+
} else {
|
|
167
|
+
changelog.status = "passed";
|
|
168
|
+
changelog.detail = "no new commits";
|
|
169
|
+
}
|
|
170
|
+
} catch (e) {
|
|
171
|
+
return this.failAt(changelog, e);
|
|
172
|
+
}
|
|
173
|
+
if (this.options.dryRun) {
|
|
174
|
+
return {
|
|
175
|
+
success: true,
|
|
176
|
+
steps: this.steps,
|
|
177
|
+
version: newVersion
|
|
178
|
+
};
|
|
179
|
+
}
|
|
180
|
+
const commitPush = this.addStep("commit_push", "Commit + Push");
|
|
181
|
+
try {
|
|
182
|
+
this.runCommitPush(newVersion);
|
|
183
|
+
commitPush.status = "passed";
|
|
184
|
+
} catch (e) {
|
|
185
|
+
return this.failAt(commitPush, e);
|
|
186
|
+
}
|
|
187
|
+
const pr = this.addStep("pr", "PR");
|
|
188
|
+
try {
|
|
189
|
+
const prUrl = this.createPR(baseBranch, newVersion);
|
|
190
|
+
pr.status = "passed";
|
|
191
|
+
pr.detail = prUrl ?? "gh not available";
|
|
192
|
+
return {
|
|
193
|
+
success: true,
|
|
194
|
+
steps: this.steps,
|
|
195
|
+
prUrl: prUrl ?? void 0,
|
|
196
|
+
version: newVersion
|
|
197
|
+
};
|
|
198
|
+
} catch (e) {
|
|
199
|
+
return this.failAt(pr, e);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
addStep(name, label) {
|
|
203
|
+
const step = { name, label, status: "running" };
|
|
204
|
+
this.steps.push(step);
|
|
205
|
+
return step;
|
|
206
|
+
}
|
|
207
|
+
failAt(step, error) {
|
|
208
|
+
step.status = "failed";
|
|
209
|
+
step.error = error instanceof Error ? error.message : String(error);
|
|
210
|
+
return {
|
|
211
|
+
success: false,
|
|
212
|
+
steps: this.steps,
|
|
213
|
+
failedAt: step.name
|
|
214
|
+
};
|
|
215
|
+
}
|
|
216
|
+
detectBaseBranch() {
|
|
217
|
+
try {
|
|
218
|
+
const branches = execSync("git branch -r", { cwd: this.cwd, encoding: "utf-8" });
|
|
219
|
+
if (branches.includes("origin/main")) return "main";
|
|
220
|
+
if (branches.includes("origin/master")) return "master";
|
|
221
|
+
return "main";
|
|
222
|
+
} catch {
|
|
223
|
+
return "main";
|
|
224
|
+
}
|
|
225
|
+
}
|
|
226
|
+
runPreflight(baseBranch) {
|
|
227
|
+
const currentBranch = execSync("git branch --show-current", {
|
|
228
|
+
cwd: this.cwd,
|
|
229
|
+
encoding: "utf-8"
|
|
230
|
+
}).trim();
|
|
231
|
+
if (currentBranch === baseBranch) {
|
|
232
|
+
throw new Error(`Cannot ship from ${baseBranch} \u2014 switch to a feature branch`);
|
|
233
|
+
}
|
|
234
|
+
const status = execSync("git status --porcelain", {
|
|
235
|
+
cwd: this.cwd,
|
|
236
|
+
encoding: "utf-8"
|
|
237
|
+
}).trim();
|
|
238
|
+
if (status.length > 0) {
|
|
239
|
+
throw new Error("Uncommitted changes detected. Commit or stash first.");
|
|
240
|
+
}
|
|
241
|
+
}
|
|
242
|
+
runMergeBase(baseBranch) {
|
|
243
|
+
if (this.options.dryRun) return;
|
|
244
|
+
try {
|
|
245
|
+
execSync(`git fetch origin ${baseBranch}`, {
|
|
246
|
+
cwd: this.cwd,
|
|
247
|
+
encoding: "utf-8",
|
|
248
|
+
stdio: "pipe"
|
|
249
|
+
});
|
|
250
|
+
execSync(`git merge origin/${baseBranch} --no-edit`, {
|
|
251
|
+
cwd: this.cwd,
|
|
252
|
+
encoding: "utf-8",
|
|
253
|
+
stdio: "pipe"
|
|
254
|
+
});
|
|
255
|
+
} catch (e) {
|
|
256
|
+
throw new Error("Failed to merge base branch. Resolve conflicts manually.");
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
runTests() {
|
|
260
|
+
if (this.options.dryRun) return "skipped (dry-run)";
|
|
261
|
+
try {
|
|
262
|
+
const output = execSync("npm test", {
|
|
263
|
+
cwd: this.cwd,
|
|
264
|
+
encoding: "utf-8",
|
|
265
|
+
stdio: "pipe",
|
|
266
|
+
timeout: 3e5
|
|
267
|
+
// 5 min timeout
|
|
268
|
+
});
|
|
269
|
+
const match = output.match(/(\d+)\s+pass/i);
|
|
270
|
+
return match ? `${match[1]} passing` : "passing";
|
|
271
|
+
} catch (e) {
|
|
272
|
+
throw new Error("Tests failed. Fix test failures before shipping.");
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
async runVersionBump(baseBranch) {
|
|
276
|
+
const currentVersion = await readPackageVersion(this.cwd);
|
|
277
|
+
if (!currentVersion) return void 0;
|
|
278
|
+
const commits = this.getCommitsSinceBranch(baseBranch);
|
|
279
|
+
const bumpLevel = detectVersionBump(commits);
|
|
280
|
+
const newVersion = bumpVersion(currentVersion, bumpLevel);
|
|
281
|
+
if (!this.options.dryRun) {
|
|
282
|
+
await writePackageVersion(this.cwd, newVersion);
|
|
283
|
+
}
|
|
284
|
+
return newVersion;
|
|
285
|
+
}
|
|
286
|
+
getCommitsSinceBranch(baseBranch) {
|
|
287
|
+
try {
|
|
288
|
+
const output = execSync(
|
|
289
|
+
`git log origin/${baseBranch}..HEAD --oneline`,
|
|
290
|
+
{ cwd: this.cwd, encoding: "utf-8", stdio: "pipe" }
|
|
291
|
+
);
|
|
292
|
+
return output.trim().split("\n").filter(Boolean);
|
|
293
|
+
} catch {
|
|
294
|
+
return [];
|
|
295
|
+
}
|
|
296
|
+
}
|
|
297
|
+
runCommitPush(version) {
|
|
298
|
+
const msg = version ? `chore: ship v${version}` : "chore: ship";
|
|
299
|
+
try {
|
|
300
|
+
execSync("git add -A", { cwd: this.cwd, stdio: "pipe" });
|
|
301
|
+
const status = execSync("git diff --cached --quiet || echo changed", {
|
|
302
|
+
cwd: this.cwd,
|
|
303
|
+
encoding: "utf-8",
|
|
304
|
+
stdio: "pipe"
|
|
305
|
+
}).trim();
|
|
306
|
+
if (status === "changed") {
|
|
307
|
+
execSync(`git commit -m "${msg}"`, { cwd: this.cwd, stdio: "pipe" });
|
|
308
|
+
}
|
|
309
|
+
execSync("git push -u origin HEAD", { cwd: this.cwd, stdio: "pipe" });
|
|
310
|
+
} catch (e) {
|
|
311
|
+
throw new Error("Failed to commit and push. Check git status.");
|
|
312
|
+
}
|
|
313
|
+
}
|
|
314
|
+
createPR(baseBranch, version) {
|
|
315
|
+
try {
|
|
316
|
+
execSync("gh --version", { stdio: "pipe" });
|
|
317
|
+
} catch {
|
|
318
|
+
return null;
|
|
319
|
+
}
|
|
320
|
+
try {
|
|
321
|
+
const currentBranch = execSync("git branch --show-current", {
|
|
322
|
+
cwd: this.cwd,
|
|
323
|
+
encoding: "utf-8",
|
|
324
|
+
stdio: "pipe"
|
|
325
|
+
}).trim();
|
|
326
|
+
const title = version ? `Ship v${version}` : `Ship ${currentBranch}`;
|
|
327
|
+
const output = execSync(
|
|
328
|
+
`gh pr create --base ${baseBranch} --title "${title}" --body "Shipped via ArchonDev ship pipeline.
|
|
329
|
+
|
|
330
|
+
_Powered by gstack-inspired automation._"`,
|
|
331
|
+
{ cwd: this.cwd, encoding: "utf-8", stdio: "pipe" }
|
|
332
|
+
);
|
|
333
|
+
return output.trim();
|
|
334
|
+
} catch {
|
|
335
|
+
return null;
|
|
336
|
+
}
|
|
337
|
+
}
|
|
338
|
+
};
|
|
339
|
+
|
|
340
|
+
// src/core/code-review/doc-staleness.ts
|
|
341
|
+
import { readFile as readFile2 } from "fs/promises";
|
|
342
|
+
import { existsSync } from "fs";
|
|
343
|
+
import { join as join2, basename, dirname } from "path";
|
|
344
|
+
import { glob } from "glob";
|
|
345
|
+
async function detectStaleDocs(cwd, changedFiles) {
|
|
346
|
+
const staleDocs = [];
|
|
347
|
+
const docPatterns = ["README.md", "docs/**/*.md", "ARCHITECTURE.md", "CONTRIBUTING.md"];
|
|
348
|
+
let docFiles = [];
|
|
349
|
+
for (const pattern of docPatterns) {
|
|
350
|
+
try {
|
|
351
|
+
const matches = await glob(pattern, { cwd, nodir: true });
|
|
352
|
+
docFiles.push(...matches);
|
|
353
|
+
} catch {
|
|
354
|
+
}
|
|
355
|
+
}
|
|
356
|
+
docFiles = [...new Set(docFiles)];
|
|
357
|
+
for (const docFile of docFiles) {
|
|
358
|
+
const docPath = join2(cwd, docFile);
|
|
359
|
+
if (!existsSync(docPath)) continue;
|
|
360
|
+
let content;
|
|
361
|
+
try {
|
|
362
|
+
content = await readFile2(docPath, "utf-8");
|
|
363
|
+
} catch {
|
|
364
|
+
continue;
|
|
365
|
+
}
|
|
366
|
+
for (const changedFile of changedFiles) {
|
|
367
|
+
const normalizedChanged = changedFile.replace(/\\/g, "/");
|
|
368
|
+
const fileName = basename(normalizedChanged);
|
|
369
|
+
const dirName = dirname(normalizedChanged).split("/").pop() ?? "";
|
|
370
|
+
if (content.includes(normalizedChanged) || content.includes(fileName) || dirName && content.includes(dirName + "/")) {
|
|
371
|
+
staleDocs.push({
|
|
372
|
+
docPath: docFile,
|
|
373
|
+
referencedFile: normalizedChanged,
|
|
374
|
+
reason: `${docFile} references ${normalizedChanged} which was modified`
|
|
375
|
+
});
|
|
376
|
+
break;
|
|
377
|
+
}
|
|
378
|
+
}
|
|
379
|
+
}
|
|
380
|
+
return staleDocs;
|
|
381
|
+
}
|
|
382
|
+
async function runPostShipDocCheck(cwd, changedFiles) {
|
|
383
|
+
const staleDocs = await detectStaleDocs(cwd, changedFiles);
|
|
384
|
+
const docPatterns = ["README.md", "docs/**/*.md", "ARCHITECTURE.md", "CONTRIBUTING.md"];
|
|
385
|
+
let totalDocsScanned = 0;
|
|
386
|
+
for (const pattern of docPatterns) {
|
|
387
|
+
try {
|
|
388
|
+
const matches = await glob(pattern, { cwd, nodir: true });
|
|
389
|
+
totalDocsScanned += matches.length;
|
|
390
|
+
} catch {
|
|
391
|
+
}
|
|
392
|
+
}
|
|
393
|
+
return {
|
|
394
|
+
staleDocs,
|
|
395
|
+
totalDocsScanned,
|
|
396
|
+
suggestedAtomTitle: staleDocs.length > 0 ? "Update stale documentation" : void 0
|
|
397
|
+
};
|
|
398
|
+
}
|
|
399
|
+
|
|
400
|
+
// src/cli/ship.ts
|
|
401
|
+
import { createInterface } from "readline";
|
|
402
|
+
import { execSync as execSync2 } from "child_process";
|
|
403
|
+
function createPrompt() {
|
|
404
|
+
const rl = createInterface({
|
|
405
|
+
input: process.stdin,
|
|
406
|
+
output: process.stdout
|
|
407
|
+
});
|
|
408
|
+
return {
|
|
409
|
+
ask: (question) => new Promise((resolve) => {
|
|
410
|
+
rl.question(question, (answer) => resolve(answer));
|
|
411
|
+
}),
|
|
412
|
+
close: () => rl.close()
|
|
413
|
+
};
|
|
414
|
+
}
|
|
415
|
+
function stepIcon(step) {
|
|
416
|
+
switch (step.status) {
|
|
417
|
+
case "passed":
|
|
418
|
+
return chalk.green("\u2713");
|
|
419
|
+
case "failed":
|
|
420
|
+
return chalk.red("\u2717");
|
|
421
|
+
case "skipped":
|
|
422
|
+
return chalk.dim("\u2013");
|
|
423
|
+
default:
|
|
424
|
+
return chalk.dim("\u2026");
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
async function ship(options = {}) {
|
|
428
|
+
const cwd = process.cwd();
|
|
429
|
+
const prompt = createPrompt();
|
|
430
|
+
try {
|
|
431
|
+
let currentBranch = "unknown";
|
|
432
|
+
let baseBranch = options.baseBranch ?? "main";
|
|
433
|
+
try {
|
|
434
|
+
currentBranch = execSync2("git branch --show-current", { cwd, encoding: "utf-8" }).trim();
|
|
435
|
+
} catch {
|
|
436
|
+
}
|
|
437
|
+
console.log();
|
|
438
|
+
console.log(chalk.bold(`Ship Pipeline \u2014 ${currentBranch} \u2192 ${baseBranch}`));
|
|
439
|
+
if (options.dryRun) {
|
|
440
|
+
console.log(chalk.dim(" (dry run \u2014 no changes will be made)"));
|
|
441
|
+
}
|
|
442
|
+
console.log();
|
|
443
|
+
const pipeline = new ShipPipeline(cwd, options);
|
|
444
|
+
const result = await pipeline.run();
|
|
445
|
+
const totalSteps = result.steps.length;
|
|
446
|
+
for (let i = 0; i < result.steps.length; i++) {
|
|
447
|
+
const step = result.steps[i];
|
|
448
|
+
const stepNum = `[${i + 1}/${totalSteps}]`;
|
|
449
|
+
const detail = step.detail ? chalk.dim(` ${step.detail}`) : "";
|
|
450
|
+
const error = step.error ? chalk.red(` \u2014 ${step.error}`) : "";
|
|
451
|
+
console.log(` ${stepNum} ${step.label.padEnd(14)} ${stepIcon(step)}${detail}${error}`);
|
|
452
|
+
}
|
|
453
|
+
console.log();
|
|
454
|
+
if (result.success) {
|
|
455
|
+
if (result.prUrl) {
|
|
456
|
+
console.log(chalk.green(`Ship complete. PR ready: ${result.prUrl}`));
|
|
457
|
+
} else if (options.dryRun) {
|
|
458
|
+
console.log(chalk.green("Dry run complete. All checks passed."));
|
|
459
|
+
} else {
|
|
460
|
+
console.log(chalk.green("Ship complete."));
|
|
461
|
+
}
|
|
462
|
+
if (!options.dryRun) {
|
|
463
|
+
try {
|
|
464
|
+
const changedFiles = execSync2("git diff --name-only HEAD~1", { cwd, encoding: "utf-8" }).trim().split("\n").filter(Boolean);
|
|
465
|
+
const docReport = await runPostShipDocCheck(cwd, changedFiles);
|
|
466
|
+
if (docReport.staleDocs.length > 0) {
|
|
467
|
+
console.log();
|
|
468
|
+
console.log(chalk.yellow(`Doc check: ${docReport.staleDocs.length} doc(s) may be stale`));
|
|
469
|
+
for (const doc of docReport.staleDocs) {
|
|
470
|
+
console.log(chalk.dim(` - ${doc.docPath} (${doc.referencedFile} changed)`));
|
|
471
|
+
}
|
|
472
|
+
const answer = await prompt.ask("Create doc-update task? (y/N): ");
|
|
473
|
+
if (answer.toLowerCase() === "y") {
|
|
474
|
+
console.log(chalk.dim("Created task: Update stale documentation"));
|
|
475
|
+
}
|
|
476
|
+
}
|
|
477
|
+
} catch {
|
|
478
|
+
}
|
|
479
|
+
}
|
|
480
|
+
} else {
|
|
481
|
+
console.log(chalk.red(`Ship failed at: ${result.failedAt}`));
|
|
482
|
+
}
|
|
483
|
+
console.log(chalk.dim("\nPowered by gstack-inspired automation."));
|
|
484
|
+
} finally {
|
|
485
|
+
prompt.close();
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
export {
|
|
490
|
+
ship
|
|
491
|
+
};
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
// src/cli/debug.ts
|
|
2
|
+
import { appendFileSync, existsSync, mkdirSync } from "fs";
|
|
3
|
+
import { join } from "path";
|
|
4
|
+
function isDebugEnabled() {
|
|
5
|
+
const value = process.env["ARCHON_DEBUG"]?.trim().toLowerCase();
|
|
6
|
+
return value === "1" || value === "true" || value === "yes" || value === "on";
|
|
7
|
+
}
|
|
8
|
+
function createRunId() {
|
|
9
|
+
const now = /* @__PURE__ */ new Date();
|
|
10
|
+
const yyyy = String(now.getFullYear());
|
|
11
|
+
const mm = String(now.getMonth() + 1).padStart(2, "0");
|
|
12
|
+
const dd = String(now.getDate()).padStart(2, "0");
|
|
13
|
+
const hh = String(now.getHours()).padStart(2, "0");
|
|
14
|
+
const mi = String(now.getMinutes()).padStart(2, "0");
|
|
15
|
+
const ss = String(now.getSeconds()).padStart(2, "0");
|
|
16
|
+
return `${yyyy}${mm}${dd}-${hh}${mi}${ss}-${process.pid}`;
|
|
17
|
+
}
|
|
18
|
+
var RUN_ID = createRunId();
|
|
19
|
+
var LOG_PATH = join(process.cwd(), ".archon", "debug", `${RUN_ID}.log`);
|
|
20
|
+
function redactValue(value) {
|
|
21
|
+
if (typeof value === "string") {
|
|
22
|
+
if (value.length > 3e3) {
|
|
23
|
+
return `${value.slice(0, 3e3)}... [truncated]`;
|
|
24
|
+
}
|
|
25
|
+
return value;
|
|
26
|
+
}
|
|
27
|
+
if (Array.isArray(value)) {
|
|
28
|
+
return value.map(redactValue);
|
|
29
|
+
}
|
|
30
|
+
if (value && typeof value === "object") {
|
|
31
|
+
const out = {};
|
|
32
|
+
for (const [key, nested] of Object.entries(value)) {
|
|
33
|
+
if (/(token|secret|key|password|authorization|auth)/i.test(key)) {
|
|
34
|
+
out[key] = "[redacted]";
|
|
35
|
+
continue;
|
|
36
|
+
}
|
|
37
|
+
out[key] = redactValue(nested);
|
|
38
|
+
}
|
|
39
|
+
return out;
|
|
40
|
+
}
|
|
41
|
+
return value;
|
|
42
|
+
}
|
|
43
|
+
function getDebugLogPath() {
|
|
44
|
+
return isDebugEnabled() ? LOG_PATH : null;
|
|
45
|
+
}
|
|
46
|
+
function debugLog(scope, stage, message, data) {
|
|
47
|
+
if (!isDebugEnabled()) return;
|
|
48
|
+
const payload = {
|
|
49
|
+
ts: (/* @__PURE__ */ new Date()).toISOString(),
|
|
50
|
+
scope,
|
|
51
|
+
stage,
|
|
52
|
+
message,
|
|
53
|
+
data: redactValue(data)
|
|
54
|
+
};
|
|
55
|
+
const dir = join(process.cwd(), ".archon", "debug");
|
|
56
|
+
if (!existsSync(dir)) {
|
|
57
|
+
mkdirSync(dir, { recursive: true });
|
|
58
|
+
}
|
|
59
|
+
appendFileSync(LOG_PATH, `${JSON.stringify(payload)}
|
|
60
|
+
`);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
// src/core/billing/local-ledger.ts
|
|
64
|
+
import { homedir } from "os";
|
|
65
|
+
import { join as join2, dirname } from "path";
|
|
66
|
+
import { existsSync as existsSync2 } from "fs";
|
|
67
|
+
import { appendFile, chmod, mkdir, readFile, writeFile } from "fs/promises";
|
|
68
|
+
var DEFAULT_LEDGER_PATH = join2(homedir(), ".archon", "usage-ledger.jsonl");
|
|
69
|
+
function getLocalUsageLedgerPath() {
|
|
70
|
+
return process.env["ARCHON_USAGE_LEDGER_PATH"] || DEFAULT_LEDGER_PATH;
|
|
71
|
+
}
|
|
72
|
+
async function ensureLedgerPath() {
|
|
73
|
+
const ledgerPath = getLocalUsageLedgerPath();
|
|
74
|
+
const dir = dirname(ledgerPath);
|
|
75
|
+
if (!existsSync2(dir)) {
|
|
76
|
+
await mkdir(dir, { recursive: true, mode: 448 });
|
|
77
|
+
}
|
|
78
|
+
if (!existsSync2(ledgerPath)) {
|
|
79
|
+
await writeFile(ledgerPath, "", { mode: 384 });
|
|
80
|
+
await chmod(ledgerPath, 384);
|
|
81
|
+
}
|
|
82
|
+
return ledgerPath;
|
|
83
|
+
}
|
|
84
|
+
async function appendLocalUsageEntry(entry) {
|
|
85
|
+
const ledgerPath = await ensureLedgerPath();
|
|
86
|
+
await appendFile(ledgerPath, JSON.stringify(entry) + "\n", "utf-8");
|
|
87
|
+
}
|
|
88
|
+
async function readLocalUsageEntries() {
|
|
89
|
+
const ledgerPath = getLocalUsageLedgerPath();
|
|
90
|
+
if (!existsSync2(ledgerPath)) {
|
|
91
|
+
return [];
|
|
92
|
+
}
|
|
93
|
+
try {
|
|
94
|
+
const content = await readFile(ledgerPath, "utf-8");
|
|
95
|
+
return content.split("\n").map((line) => line.trim()).filter(Boolean).flatMap((line) => {
|
|
96
|
+
try {
|
|
97
|
+
const parsed = JSON.parse(line);
|
|
98
|
+
return parsed.timestamp && parsed.projectPath && parsed.model ? [parsed] : [];
|
|
99
|
+
} catch {
|
|
100
|
+
return [];
|
|
101
|
+
}
|
|
102
|
+
});
|
|
103
|
+
} catch {
|
|
104
|
+
return [];
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
async function summarizeLocalUsage(options = {}) {
|
|
108
|
+
const entries = await readLocalUsageEntries();
|
|
109
|
+
const start = options.start;
|
|
110
|
+
const end = options.end;
|
|
111
|
+
const byModelMap = /* @__PURE__ */ new Map();
|
|
112
|
+
let totalInputTokens = 0;
|
|
113
|
+
let totalOutputTokens = 0;
|
|
114
|
+
let totalBaseCost = 0;
|
|
115
|
+
for (const entry of entries) {
|
|
116
|
+
const ts = new Date(entry.timestamp);
|
|
117
|
+
if (Number.isNaN(ts.getTime())) continue;
|
|
118
|
+
if (start && ts < start) continue;
|
|
119
|
+
if (end && ts > end) continue;
|
|
120
|
+
if (options.projectPath && entry.projectPath !== options.projectPath) continue;
|
|
121
|
+
totalInputTokens += entry.inputTokens;
|
|
122
|
+
totalOutputTokens += entry.outputTokens;
|
|
123
|
+
totalBaseCost += entry.baseCost;
|
|
124
|
+
const current = byModelMap.get(entry.model);
|
|
125
|
+
if (current) {
|
|
126
|
+
current.inputTokens += entry.inputTokens;
|
|
127
|
+
current.outputTokens += entry.outputTokens;
|
|
128
|
+
current.cost += entry.baseCost;
|
|
129
|
+
} else {
|
|
130
|
+
byModelMap.set(entry.model, {
|
|
131
|
+
inputTokens: entry.inputTokens,
|
|
132
|
+
outputTokens: entry.outputTokens,
|
|
133
|
+
cost: entry.baseCost
|
|
134
|
+
});
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
return {
|
|
138
|
+
totalInputTokens,
|
|
139
|
+
totalOutputTokens,
|
|
140
|
+
totalBaseCost,
|
|
141
|
+
byModel: Array.from(byModelMap.entries()).map(([model, stats]) => ({ model, ...stats })).sort((a, b) => b.cost - a.cost)
|
|
142
|
+
};
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
export {
|
|
146
|
+
getDebugLogPath,
|
|
147
|
+
debugLog,
|
|
148
|
+
appendLocalUsageEntry,
|
|
149
|
+
summarizeLocalUsage
|
|
150
|
+
};
|