@treeseed/sdk 0.6.33 → 0.6.35
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/operations/services/build-warning-policy.js +85 -0
- package/dist/operations/services/deploy.d.ts +1 -0
- package/dist/operations/services/deploy.js +37 -0
- package/dist/operations/services/git-workflow.d.ts +18 -1
- package/dist/operations/services/git-workflow.js +42 -8
- package/dist/operations/services/github-actions-verification.d.ts +9 -1
- package/dist/operations/services/github-actions-verification.js +75 -7
- package/dist/operations/services/github-api.d.ts +4 -0
- package/dist/operations/services/github-api.js +5 -1
- package/dist/operations/services/github-automation.d.ts +2 -0
- package/dist/operations/services/release-candidate.js +40 -4
- package/dist/operations/services/repository-save-orchestrator.d.ts +10 -0
- package/dist/operations/services/repository-save-orchestrator.js +47 -6
- package/dist/scripts/check-build-warnings.js +16 -16
- package/dist/workflow/operations.d.ts +12 -0
- package/dist/workflow/operations.js +37 -0
- package/dist/workflow-state.d.ts +14 -0
- package/dist/workflow-state.js +43 -5
- package/package.json +1 -1
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
export const DEFAULT_BUILD_WARNING_RULES = [
|
|
2
|
+
{
|
|
3
|
+
label: 'vite-browser-external-libsodium-url',
|
|
4
|
+
pattern: /Module "url" has been externalized for browser compatibility, imported by ".*libsodium-sumo.*"/u,
|
|
5
|
+
},
|
|
6
|
+
];
|
|
7
|
+
|
|
8
|
+
export function createBuildWarningRules(options = {}) {
|
|
9
|
+
const useDefaultPolicy = options.useDefaultPolicy !== false;
|
|
10
|
+
const customAllow = Array.isArray(options.allow) ? options.allow : [];
|
|
11
|
+
return [
|
|
12
|
+
...(useDefaultPolicy ? DEFAULT_BUILD_WARNING_RULES : []),
|
|
13
|
+
...customAllow.map((pattern) => ({
|
|
14
|
+
label: `custom:${pattern}`,
|
|
15
|
+
pattern: pattern instanceof RegExp ? pattern : new RegExp(String(pattern)),
|
|
16
|
+
})),
|
|
17
|
+
];
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
export function classifyBuildWarningLine(line, options = {}) {
|
|
21
|
+
const value = String(line ?? '');
|
|
22
|
+
if (!value.includes('[WARN]')) {
|
|
23
|
+
return { kind: 'not-warning' };
|
|
24
|
+
}
|
|
25
|
+
const allowed = createBuildWarningRules(options).find((rule) => rule.pattern.test(value));
|
|
26
|
+
if (allowed) {
|
|
27
|
+
return { kind: 'allowed', label: allowed.label };
|
|
28
|
+
}
|
|
29
|
+
return { kind: 'unexpected', line: value };
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
export function createBuildWarningSummary() {
|
|
33
|
+
const allowedWarnings = new Map();
|
|
34
|
+
const unexpectedWarnings = [];
|
|
35
|
+
return {
|
|
36
|
+
allowedWarnings,
|
|
37
|
+
unexpectedWarnings,
|
|
38
|
+
record(line, options = {}) {
|
|
39
|
+
const classified = classifyBuildWarningLine(line, options);
|
|
40
|
+
if (classified.kind === 'allowed') {
|
|
41
|
+
allowedWarnings.set(classified.label, (allowedWarnings.get(classified.label) ?? 0) + 1);
|
|
42
|
+
return classified;
|
|
43
|
+
}
|
|
44
|
+
if (classified.kind === 'unexpected') {
|
|
45
|
+
unexpectedWarnings.push(classified.line);
|
|
46
|
+
}
|
|
47
|
+
return classified;
|
|
48
|
+
},
|
|
49
|
+
};
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
export function mergeAllowedBuildWarnings(target, source) {
|
|
53
|
+
for (const [label, count] of source.entries()) {
|
|
54
|
+
target.set(label, (target.get(label) ?? 0) + count);
|
|
55
|
+
}
|
|
56
|
+
return target;
|
|
57
|
+
}
|
|
58
|
+
|
|
59
|
+
export function countAllowedBuildWarnings(allowedWarnings) {
|
|
60
|
+
return [...allowedWarnings.values()].reduce((sum, count) => sum + count, 0);
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export function formatAllowedBuildWarnings(allowedWarnings) {
|
|
64
|
+
const total = countAllowedBuildWarnings(allowedWarnings);
|
|
65
|
+
if (total === 0) {
|
|
66
|
+
return [];
|
|
67
|
+
}
|
|
68
|
+
return [
|
|
69
|
+
`Allowed build warnings: ${total}`,
|
|
70
|
+
...[...allowedWarnings.entries()]
|
|
71
|
+
.sort(([left], [right]) => left.localeCompare(right))
|
|
72
|
+
.map(([label, count]) => `- ${label}: ${count}`),
|
|
73
|
+
];
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
export function scanBuildWarningText(text, options = {}) {
|
|
77
|
+
const summary = createBuildWarningSummary();
|
|
78
|
+
for (const line of String(text ?? '').split(/\r?\n/u)) {
|
|
79
|
+
summary.record(line, options);
|
|
80
|
+
}
|
|
81
|
+
return {
|
|
82
|
+
allowedWarnings: summary.allowedWarnings,
|
|
83
|
+
unexpectedWarnings: summary.unexpectedWarnings,
|
|
84
|
+
};
|
|
85
|
+
}
|
|
@@ -998,6 +998,7 @@ export declare function runRemoteD1Migrations(tenantRoot: any, options?: {}): {
|
|
|
998
998
|
};
|
|
999
999
|
export declare function markDeploymentInitialized(tenantRoot: any, options?: {}): any;
|
|
1000
1000
|
export declare function markManagedServicesInitialized(tenantRoot: any, options?: {}): any;
|
|
1001
|
+
export declare function recordHostedDeploymentState(tenantRoot: any, options?: {}): any;
|
|
1001
1002
|
export declare function assertDeploymentInitialized(tenantRoot: any, options?: {}): any;
|
|
1002
1003
|
export declare function finalizeDeploymentState(tenantRoot: any, options?: {}): any;
|
|
1003
1004
|
export declare function printDeploySummary(summary: any): void;
|
|
@@ -2096,6 +2096,42 @@ function markManagedServicesInitialized(tenantRoot, options = {}) {
|
|
|
2096
2096
|
writeDeployState(tenantRoot, state, { target });
|
|
2097
2097
|
return state;
|
|
2098
2098
|
}
|
|
2099
|
+
function recordHostedDeploymentState(tenantRoot, options = {}) {
|
|
2100
|
+
const target = normalizeTarget(options.scope ?? options.target ?? "prod");
|
|
2101
|
+
const deployConfig = loadTenantDeployConfig(tenantRoot);
|
|
2102
|
+
const state = loadDeployState(tenantRoot, deployConfig, { target });
|
|
2103
|
+
const timestamp = typeof options.timestamp === "string" && options.timestamp.trim() ? options.timestamp.trim() : (/* @__PURE__ */ new Date()).toISOString();
|
|
2104
|
+
const deployedUrl = typeof options.url === "string" && options.url.trim() ? options.url.trim() : state.lastDeployedUrl ?? resolveConfiguredSurfaceBaseUrl(deployConfig, target, "web");
|
|
2105
|
+
const commit = typeof options.commit === "string" && options.commit.trim() ? options.commit.trim() : null;
|
|
2106
|
+
state.lastDeployedUrl = deployedUrl;
|
|
2107
|
+
state.lastDeploymentTimestamp = timestamp;
|
|
2108
|
+
state.lastDeployedCommit = commit;
|
|
2109
|
+
state.readiness = {
|
|
2110
|
+
...state.readiness ?? {},
|
|
2111
|
+
initialized: true,
|
|
2112
|
+
configured: true,
|
|
2113
|
+
provisioned: true,
|
|
2114
|
+
deployable: true,
|
|
2115
|
+
phase: "provisioned",
|
|
2116
|
+
initializedAt: state.readiness?.initializedAt ?? timestamp,
|
|
2117
|
+
lastValidatedAt: timestamp,
|
|
2118
|
+
blockers: [],
|
|
2119
|
+
warnings: state.readiness?.warnings ?? []
|
|
2120
|
+
};
|
|
2121
|
+
const nextHistoryEntry = {
|
|
2122
|
+
commit,
|
|
2123
|
+
timestamp,
|
|
2124
|
+
url: deployedUrl,
|
|
2125
|
+
target: deployTargetLabel(target),
|
|
2126
|
+
source: options.source ?? "hosted-github-workflow",
|
|
2127
|
+
workflow: options.workflow ?? null,
|
|
2128
|
+
runId: options.runId ?? null
|
|
2129
|
+
};
|
|
2130
|
+
const history = Array.isArray(state.deploymentHistory) ? state.deploymentHistory : [];
|
|
2131
|
+
state.deploymentHistory = [...history, nextHistoryEntry].slice(-20);
|
|
2132
|
+
writeDeployState(tenantRoot, state, { target });
|
|
2133
|
+
return state;
|
|
2134
|
+
}
|
|
2099
2135
|
function assertDeploymentInitialized(tenantRoot, options = {}) {
|
|
2100
2136
|
const target = normalizeTarget(options.scope ?? options.target ?? "prod");
|
|
2101
2137
|
const deployConfig = loadTenantDeployConfig(tenantRoot);
|
|
@@ -2225,6 +2261,7 @@ export {
|
|
|
2225
2261
|
queueId,
|
|
2226
2262
|
queueName,
|
|
2227
2263
|
reconcileCloudflareWebCacheRules,
|
|
2264
|
+
recordHostedDeploymentState,
|
|
2228
2265
|
resolveCloudflareZoneIdForHost,
|
|
2229
2266
|
resolveConfiguredCloudflareAccountId,
|
|
2230
2267
|
resolveConfiguredSurfaceBaseUrl,
|
|
@@ -119,15 +119,32 @@ export declare function mergeCurrentBranchIntoStaging(cwd: any, featureBranch: a
|
|
|
119
119
|
committed: boolean;
|
|
120
120
|
commitSha: string;
|
|
121
121
|
pushed: boolean;
|
|
122
|
+
generatedMetadataReconciliation: {
|
|
123
|
+
commitSha: null;
|
|
124
|
+
resolved: boolean;
|
|
125
|
+
repoDir: any;
|
|
126
|
+
targetBranch: string;
|
|
127
|
+
reconciledFiles: string[];
|
|
128
|
+
allConflictsWereGeneratedMetadata: boolean;
|
|
129
|
+
} | null;
|
|
122
130
|
};
|
|
123
|
-
export declare function squashMergeBranchIntoStaging(cwd: any, featureBranch: any, message: any, { pushTarget }?: {
|
|
131
|
+
export declare function squashMergeBranchIntoStaging(cwd: any, featureBranch: any, message: any, { pushTarget, reportGeneratedMetadataReconciliation }?: {
|
|
124
132
|
pushTarget?: boolean | undefined;
|
|
133
|
+
reportGeneratedMetadataReconciliation?: boolean | undefined;
|
|
125
134
|
}): {
|
|
126
135
|
repoDir: string;
|
|
127
136
|
targetBranch: string;
|
|
128
137
|
committed: boolean;
|
|
129
138
|
commitSha: string;
|
|
130
139
|
pushed: boolean;
|
|
140
|
+
generatedMetadataReconciliation: {
|
|
141
|
+
commitSha: null;
|
|
142
|
+
resolved: boolean;
|
|
143
|
+
repoDir: any;
|
|
144
|
+
targetBranch: string;
|
|
145
|
+
reconciledFiles: string[];
|
|
146
|
+
allConflictsWereGeneratedMetadata: boolean;
|
|
147
|
+
} | null;
|
|
131
148
|
};
|
|
132
149
|
export declare function currentManagedBranch(cwd?: any): string;
|
|
133
150
|
export declare function isTaskBranch(branchName: any): boolean;
|
|
@@ -28,14 +28,34 @@ function conflictedFiles(repoDir) {
|
|
|
28
28
|
}
|
|
29
29
|
function resolveGeneratedPackageMetadataConflicts(repoDir) {
|
|
30
30
|
const files = conflictedFiles(repoDir);
|
|
31
|
-
if (files.length === 0)
|
|
31
|
+
if (files.length === 0) {
|
|
32
|
+
return {
|
|
33
|
+
resolved: false,
|
|
34
|
+
repoDir,
|
|
35
|
+
targetBranch: STAGING_BRANCH,
|
|
36
|
+
reconciledFiles: [],
|
|
37
|
+
allConflictsWereGeneratedMetadata: false
|
|
38
|
+
};
|
|
39
|
+
}
|
|
32
40
|
const generatedMetadataFiles = /* @__PURE__ */ new Set(["package.json", "package-lock.json"]);
|
|
33
41
|
if (files.some((file) => !generatedMetadataFiles.has(file))) {
|
|
34
|
-
return
|
|
42
|
+
return {
|
|
43
|
+
resolved: false,
|
|
44
|
+
repoDir,
|
|
45
|
+
targetBranch: STAGING_BRANCH,
|
|
46
|
+
reconciledFiles: files,
|
|
47
|
+
allConflictsWereGeneratedMetadata: false
|
|
48
|
+
};
|
|
35
49
|
}
|
|
36
50
|
runGit(["checkout", "--theirs", "--", ...files], { cwd: repoDir });
|
|
37
51
|
runGit(["add", "--", ...files], { cwd: repoDir });
|
|
38
|
-
return
|
|
52
|
+
return {
|
|
53
|
+
resolved: true,
|
|
54
|
+
repoDir,
|
|
55
|
+
targetBranch: STAGING_BRANCH,
|
|
56
|
+
reconciledFiles: files,
|
|
57
|
+
allConflictsWereGeneratedMetadata: true
|
|
58
|
+
};
|
|
39
59
|
}
|
|
40
60
|
function headCommit(repoDir, ref = "HEAD") {
|
|
41
61
|
return runGit(["rev-parse", ref], { cwd: repoDir, capture: true }).trim();
|
|
@@ -301,22 +321,35 @@ function deleteRemoteBranch(repoDir, branchName) {
|
|
|
301
321
|
function mergeCurrentBranchIntoStaging(cwd, featureBranch) {
|
|
302
322
|
return squashMergeBranchIntoStaging(cwd, featureBranch, `stage: ${featureBranch}`);
|
|
303
323
|
}
|
|
304
|
-
function squashMergeBranchIntoStaging(cwd, featureBranch, message, { pushTarget = true } = {}) {
|
|
324
|
+
function squashMergeBranchIntoStaging(cwd, featureBranch, message, { pushTarget = true, reportGeneratedMetadataReconciliation = true } = {}) {
|
|
305
325
|
const repoDir = assertCleanWorktree(cwd);
|
|
306
326
|
fetchOrigin(repoDir);
|
|
307
327
|
syncBranchWithOrigin(repoDir, STAGING_BRANCH);
|
|
328
|
+
let generatedMetadataReconciliation = null;
|
|
308
329
|
try {
|
|
309
|
-
runGit(["merge", "--squash", featureBranch], { cwd: repoDir });
|
|
330
|
+
runGit(["merge", "--squash", featureBranch], { cwd: repoDir, capture: true });
|
|
310
331
|
} catch (error) {
|
|
311
|
-
|
|
332
|
+
const reconciliation = resolveGeneratedPackageMetadataConflicts(repoDir);
|
|
333
|
+
if (!reconciliation.resolved) {
|
|
312
334
|
throw error;
|
|
313
335
|
}
|
|
336
|
+
if (reportGeneratedMetadataReconciliation) {
|
|
337
|
+
console.log(`Resolving generated package metadata reconciliation for ${reconciliation.reconciledFiles.join(", ")}.`);
|
|
338
|
+
}
|
|
339
|
+
generatedMetadataReconciliation = {
|
|
340
|
+
...reconciliation,
|
|
341
|
+
commitSha: null
|
|
342
|
+
};
|
|
314
343
|
}
|
|
315
344
|
let committed = false;
|
|
316
345
|
if (repoHasStagedChanges(repoDir)) {
|
|
317
346
|
runGit(["commit", "-m", message], { cwd: repoDir });
|
|
318
347
|
committed = true;
|
|
319
348
|
}
|
|
349
|
+
const commitSha = headCommit(repoDir);
|
|
350
|
+
if (generatedMetadataReconciliation) {
|
|
351
|
+
generatedMetadataReconciliation.commitSha = commitSha;
|
|
352
|
+
}
|
|
320
353
|
if (pushTarget) {
|
|
321
354
|
pushBranch(repoDir, STAGING_BRANCH);
|
|
322
355
|
}
|
|
@@ -324,8 +357,9 @@ function squashMergeBranchIntoStaging(cwd, featureBranch, message, { pushTarget
|
|
|
324
357
|
repoDir,
|
|
325
358
|
targetBranch: STAGING_BRANCH,
|
|
326
359
|
committed,
|
|
327
|
-
commitSha
|
|
328
|
-
pushed: pushTarget
|
|
360
|
+
commitSha,
|
|
361
|
+
pushed: pushTarget,
|
|
362
|
+
generatedMetadataReconciliation
|
|
329
363
|
};
|
|
330
364
|
}
|
|
331
365
|
function currentManagedBranch(cwd = workspaceRoot()) {
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import { type GitHubApiClient } from './github-api.ts';
|
|
1
|
+
import { type GitHubApiClient, type GitHubWorkflowProgressEvent } from './github-api.ts';
|
|
2
2
|
export type GitHubActionsWorkflowState = 'success' | 'failure' | 'pending' | 'missing' | 'not_pushed' | 'error';
|
|
3
3
|
export type GitHubActionsVerificationTarget = {
|
|
4
4
|
name: string;
|
|
@@ -114,8 +114,16 @@ export declare function skippedGitHubActionsGate(gate: GitHubActionsWorkflowGate
|
|
|
114
114
|
conclusion: null;
|
|
115
115
|
runId: null;
|
|
116
116
|
url: null;
|
|
117
|
+
createdAt: null;
|
|
118
|
+
updatedAt: null;
|
|
117
119
|
};
|
|
118
120
|
export declare function formatGitHubActionsGateFailure(gate: GitHubActionsWorkflowGate, result: Record<string, unknown>): string;
|
|
121
|
+
export declare function createGitHubActionsGateProgressReporter(gate: GitHubActionsWorkflowGate, options?: {
|
|
122
|
+
operation?: string;
|
|
123
|
+
now?: () => number;
|
|
124
|
+
minRepeatMs?: number;
|
|
125
|
+
onProgress?: (message: string, stream?: 'stdout' | 'stderr') => void;
|
|
126
|
+
}): (event: GitHubWorkflowProgressEvent) => void;
|
|
119
127
|
export declare function waitForGitHubActionsGate(gate: GitHubActionsWorkflowGate, options?: {
|
|
120
128
|
timeoutSeconds?: number;
|
|
121
129
|
pollSeconds?: number;
|
|
@@ -406,7 +406,9 @@ function skippedGitHubActionsGate(gate, reason) {
|
|
|
406
406
|
reason,
|
|
407
407
|
conclusion: null,
|
|
408
408
|
runId: null,
|
|
409
|
-
url: null
|
|
409
|
+
url: null,
|
|
410
|
+
createdAt: null,
|
|
411
|
+
updatedAt: null
|
|
410
412
|
};
|
|
411
413
|
}
|
|
412
414
|
function formatGitHubActionsGateFailure(gate, result) {
|
|
@@ -431,13 +433,15 @@ function formatElapsed(seconds) {
|
|
|
431
433
|
function shortSha(value) {
|
|
432
434
|
return value ? value.slice(0, 12) : "(unknown)";
|
|
433
435
|
}
|
|
434
|
-
function
|
|
436
|
+
function activeJobSummaries(event) {
|
|
435
437
|
const activeJobs = event.activeJobs ?? [];
|
|
436
|
-
|
|
437
|
-
const summaries = activeJobs.slice(0, 2).map((job) => {
|
|
438
|
+
return activeJobs.slice(0, 2).map((job) => {
|
|
438
439
|
const activeStep = (job.steps ?? []).find((step) => step.status && step.status !== "completed");
|
|
439
440
|
return activeStep?.name ? `${job.name} > ${activeStep.name}` : job.name;
|
|
440
441
|
}).filter(Boolean);
|
|
442
|
+
}
|
|
443
|
+
function activeJobSummary(event) {
|
|
444
|
+
const summaries = activeJobSummaries(event);
|
|
441
445
|
return summaries.length > 0 ? `; active: ${summaries.join(", ")}` : "";
|
|
442
446
|
}
|
|
443
447
|
function failedJobSummary(event) {
|
|
@@ -461,8 +465,73 @@ function formatGitHubActionsGateProgress(gate, event, operation) {
|
|
|
461
465
|
const run = event.runId ? ` run ${event.runId}` : "";
|
|
462
466
|
return `${prefix}${run} ${status}${activeJobSummary(event)}${url} (${formatElapsed(event.elapsedSeconds)} elapsed)`;
|
|
463
467
|
}
|
|
468
|
+
function progressCompactKey(gate, event) {
|
|
469
|
+
const active = activeJobSummaries(event).join(",");
|
|
470
|
+
return [
|
|
471
|
+
gate.name,
|
|
472
|
+
event.workflow,
|
|
473
|
+
event.runId ?? "none",
|
|
474
|
+
event.type,
|
|
475
|
+
event.status ?? "none",
|
|
476
|
+
event.conclusion ?? "none",
|
|
477
|
+
active
|
|
478
|
+
].join("|");
|
|
479
|
+
}
|
|
480
|
+
function formatCompactedGitHubActionsGateProgress(gate, event, operation, repeatedPolls, lastChangeSeconds) {
|
|
481
|
+
const prefix = `[${operation}][gate][${gate.name}] ${event.workflow}`;
|
|
482
|
+
const run = event.runId ? ` run ${event.runId}` : "";
|
|
483
|
+
const active = activeJobSummaries(event);
|
|
484
|
+
const activeText = active.length > 0 ? active.join(", ") : event.status ?? "waiting";
|
|
485
|
+
const url = event.url ? `: ${event.url}` : "";
|
|
486
|
+
return `${prefix}${run} still active: ${activeText} (${repeatedPolls} polls, ${formatElapsed(lastChangeSeconds)} since last change)${url}`;
|
|
487
|
+
}
|
|
488
|
+
function createGitHubActionsGateProgressReporter(gate, options = {}) {
|
|
489
|
+
const operation = options.operation ?? "workflow";
|
|
490
|
+
const now = options.now ?? (() => Date.now());
|
|
491
|
+
const minRepeatMs = options.minRepeatMs ?? 6e4;
|
|
492
|
+
let lastKey = null;
|
|
493
|
+
let lastChangeAt = now();
|
|
494
|
+
let lastEmitAt = 0;
|
|
495
|
+
let repeatedPolls = 0;
|
|
496
|
+
return (event) => {
|
|
497
|
+
if (event.type === "completed") {
|
|
498
|
+
options.onProgress?.(formatGitHubActionsGateProgress(gate, event, operation));
|
|
499
|
+
lastKey = null;
|
|
500
|
+
repeatedPolls = 0;
|
|
501
|
+
lastEmitAt = now();
|
|
502
|
+
lastChangeAt = lastEmitAt;
|
|
503
|
+
return;
|
|
504
|
+
}
|
|
505
|
+
const currentKey = progressCompactKey(gate, event);
|
|
506
|
+
const currentTime = now();
|
|
507
|
+
if (currentKey !== lastKey) {
|
|
508
|
+
lastKey = currentKey;
|
|
509
|
+
lastChangeAt = currentTime;
|
|
510
|
+
lastEmitAt = currentTime;
|
|
511
|
+
repeatedPolls = 0;
|
|
512
|
+
options.onProgress?.(formatGitHubActionsGateProgress(gate, event, operation));
|
|
513
|
+
return;
|
|
514
|
+
}
|
|
515
|
+
repeatedPolls += 1;
|
|
516
|
+
if (currentTime - lastEmitAt >= minRepeatMs) {
|
|
517
|
+
options.onProgress?.(formatCompactedGitHubActionsGateProgress(
|
|
518
|
+
gate,
|
|
519
|
+
event,
|
|
520
|
+
operation,
|
|
521
|
+
repeatedPolls,
|
|
522
|
+
Math.max(0, Math.round((currentTime - lastChangeAt) / 1e3))
|
|
523
|
+
));
|
|
524
|
+
lastEmitAt = currentTime;
|
|
525
|
+
repeatedPolls = 0;
|
|
526
|
+
}
|
|
527
|
+
};
|
|
528
|
+
}
|
|
464
529
|
async function waitForGitHubActionsGate(gate, options = {}) {
|
|
465
530
|
const { waitForGitHubWorkflowCompletion } = await import("./github-automation.js");
|
|
531
|
+
const reportProgress = createGitHubActionsGateProgressReporter(gate, {
|
|
532
|
+
operation: options.operation ?? "workflow",
|
|
533
|
+
onProgress: options.onProgress
|
|
534
|
+
});
|
|
466
535
|
return await waitForGitHubWorkflowCompletion(gate.repoPath, {
|
|
467
536
|
repository: gate.repository,
|
|
468
537
|
workflow: gate.workflow,
|
|
@@ -470,12 +539,11 @@ async function waitForGitHubActionsGate(gate, options = {}) {
|
|
|
470
539
|
branch: gate.branch,
|
|
471
540
|
timeoutSeconds: options.timeoutSeconds,
|
|
472
541
|
pollSeconds: options.pollSeconds,
|
|
473
|
-
onProgress:
|
|
474
|
-
options.onProgress?.(formatGitHubActionsGateProgress(gate, event, options.operation ?? "workflow"));
|
|
475
|
-
}
|
|
542
|
+
onProgress: reportProgress
|
|
476
543
|
});
|
|
477
544
|
}
|
|
478
545
|
export {
|
|
546
|
+
createGitHubActionsGateProgressReporter,
|
|
479
547
|
formatGitHubActionsGateFailure,
|
|
480
548
|
inspectGitHubActionsVerification,
|
|
481
549
|
skippedGitHubActionsGate,
|
|
@@ -26,6 +26,8 @@ export interface GitHubWorkflowRunSummary {
|
|
|
26
26
|
url: string | null;
|
|
27
27
|
headSha: string | null;
|
|
28
28
|
headBranch: string | null;
|
|
29
|
+
createdAt: string | null;
|
|
30
|
+
updatedAt: string | null;
|
|
29
31
|
}
|
|
30
32
|
export interface GitHubWorkflowJobSummary {
|
|
31
33
|
id: number;
|
|
@@ -162,6 +164,8 @@ export declare function waitForGitHubWorkflowRunCompletion(repository: string |
|
|
|
162
164
|
runId: number;
|
|
163
165
|
headSha: string | null;
|
|
164
166
|
branch: string | null;
|
|
167
|
+
createdAt: string | null;
|
|
168
|
+
updatedAt: string | null;
|
|
165
169
|
conclusion: string | null;
|
|
166
170
|
url: string | null;
|
|
167
171
|
jobs: GitHubWorkflowJobSummary[];
|
|
@@ -520,7 +520,9 @@ function normalizeWorkflowRun(run) {
|
|
|
520
520
|
conclusion: typeof run.conclusion === "string" ? run.conclusion : null,
|
|
521
521
|
url: typeof run.html_url === "string" ? run.html_url : null,
|
|
522
522
|
headSha: typeof run.head_sha === "string" ? run.head_sha : null,
|
|
523
|
-
headBranch: typeof run.head_branch === "string" ? run.head_branch : null
|
|
523
|
+
headBranch: typeof run.head_branch === "string" ? run.head_branch : null,
|
|
524
|
+
createdAt: typeof run.created_at === "string" ? run.created_at : null,
|
|
525
|
+
updatedAt: typeof run.updated_at === "string" ? run.updated_at : null
|
|
524
526
|
};
|
|
525
527
|
}
|
|
526
528
|
function normalizeWorkflowJob(job) {
|
|
@@ -623,6 +625,8 @@ async function waitForGitHubWorkflowRunCompletion(repository, {
|
|
|
623
625
|
runId: normalized.id,
|
|
624
626
|
headSha: normalized.headSha,
|
|
625
627
|
branch: normalized.headBranch,
|
|
628
|
+
createdAt: normalized.createdAt,
|
|
629
|
+
updatedAt: normalized.updatedAt,
|
|
626
630
|
conclusion: normalized.conclusion,
|
|
627
631
|
url: normalized.url,
|
|
628
632
|
jobs: normalizedJobs,
|
|
@@ -284,6 +284,8 @@ export declare function waitForGitHubWorkflowCompletion(tenantRoot: any, { repos
|
|
|
284
284
|
runId: number;
|
|
285
285
|
headSha: string | null;
|
|
286
286
|
branch: string | null;
|
|
287
|
+
createdAt: string | null;
|
|
288
|
+
updatedAt: string | null;
|
|
287
289
|
conclusion: string | null;
|
|
288
290
|
url: string | null;
|
|
289
291
|
jobs: import("./github-api.ts").GitHubWorkflowJobSummary[];
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { createHash } from "node:crypto";
|
|
2
|
+
import { spawnSync } from "node:child_process";
|
|
2
3
|
import { cpSync, existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs";
|
|
3
4
|
import { tmpdir } from "node:os";
|
|
4
5
|
import { dirname, join, relative, resolve } from "node:path";
|
|
@@ -10,6 +11,7 @@ import { collectTreeseedEnvironmentContext, resolveTreeseedMachineEnvironmentVal
|
|
|
10
11
|
import { loadDeployState } from "./deploy.js";
|
|
11
12
|
import { loadCliDeployConfig } from "./runtime-tools.js";
|
|
12
13
|
import { packagesWithScript, run, workspacePackages } from "./workspace-tools.js";
|
|
14
|
+
import { createBuildWarningSummary, formatAllowedBuildWarnings } from "./build-warning-policy.js";
|
|
13
15
|
const RELEASE_CANDIDATE_CACHE_DIR = ".treeseed/workflow/release-candidates";
|
|
14
16
|
const STABLE_SEMVER = /^\d+\.\d+\.\d+$/u;
|
|
15
17
|
const REHEARSAL_IGNORED_SEGMENTS = /* @__PURE__ */ new Set([
|
|
@@ -224,9 +226,43 @@ function rehearsalVerifyScript(root) {
|
|
|
224
226
|
}
|
|
225
227
|
return null;
|
|
226
228
|
}
|
|
229
|
+
function runNpmRehearsalCommand(args, options) {
|
|
230
|
+
const result = spawnSync("npm", args, {
|
|
231
|
+
cwd: options.cwd,
|
|
232
|
+
env: process.env,
|
|
233
|
+
stdio: "pipe",
|
|
234
|
+
encoding: "utf8",
|
|
235
|
+
timeout: options.timeoutMs
|
|
236
|
+
});
|
|
237
|
+
const stdout = result.stdout ?? "";
|
|
238
|
+
const stderr = result.stderr ?? "";
|
|
239
|
+
if (result.status !== 0) {
|
|
240
|
+
if (stdout) process.stdout.write(stdout);
|
|
241
|
+
if (stderr) process.stderr.write(stderr);
|
|
242
|
+
const message = (result.error?.message ? `${result.error.message}
|
|
243
|
+
` : "") + (stderr.trim() || stdout.trim() || `npm ${args.join(" ")} failed`);
|
|
244
|
+
throw new Error(message);
|
|
245
|
+
}
|
|
246
|
+
const warningSummary = createBuildWarningSummary();
|
|
247
|
+
const emitFiltered = (text, stream) => {
|
|
248
|
+
for (const line of text.split(/\r?\n/u)) {
|
|
249
|
+
if (!line) continue;
|
|
250
|
+
const classified = warningSummary.record(line);
|
|
251
|
+
if (classified.kind === "allowed") continue;
|
|
252
|
+
stream.write(`${line}
|
|
253
|
+
`);
|
|
254
|
+
}
|
|
255
|
+
};
|
|
256
|
+
emitFiltered(stdout, process.stdout);
|
|
257
|
+
emitFiltered(stderr, process.stderr);
|
|
258
|
+
for (const line of formatAllowedBuildWarnings(warningSummary.allowedWarnings)) {
|
|
259
|
+
process.stdout.write(`${line}
|
|
260
|
+
`);
|
|
261
|
+
}
|
|
262
|
+
}
|
|
227
263
|
function buildRehearsalWorkspacePackageArtifacts(root) {
|
|
228
264
|
for (const pkg of packagesWithScript("build:dist", root)) {
|
|
229
|
-
|
|
265
|
+
runNpmRehearsalCommand(["--prefix", pkg.dir, "run", "build:dist"], { cwd: root, timeoutMs: 3e5 });
|
|
230
266
|
}
|
|
231
267
|
}
|
|
232
268
|
function runProductionDependencyRehearsal(root, plannedVersions, selectedPackageNames, failures) {
|
|
@@ -239,12 +275,12 @@ function runProductionDependencyRehearsal(root, plannedVersions, selectedPackage
|
|
|
239
275
|
const copied = copyWorkspaceForProductionRehearsal(root);
|
|
240
276
|
tempParent = copied.tempParent;
|
|
241
277
|
applyPlannedStableMetadata(copied.tempRoot, plannedVersions);
|
|
242
|
-
|
|
243
|
-
|
|
278
|
+
runNpmRehearsalCommand(["install", "--package-lock-only", "--ignore-scripts"], { cwd: copied.tempRoot, timeoutMs: 3e5 });
|
|
279
|
+
runNpmRehearsalCommand(["ci", "--ignore-scripts"], { cwd: copied.tempRoot, timeoutMs: 6e5 });
|
|
244
280
|
buildRehearsalWorkspacePackageArtifacts(copied.tempRoot);
|
|
245
281
|
const scriptName = rehearsalVerifyScript(copied.tempRoot);
|
|
246
282
|
if (scriptName) {
|
|
247
|
-
|
|
283
|
+
runNpmRehearsalCommand(["run", scriptName], { cwd: copied.tempRoot, timeoutMs: 9e5 });
|
|
248
284
|
}
|
|
249
285
|
const postInstallIssues = collectInternalDevReferenceIssues(copied.tempRoot, selectedPackageSet);
|
|
250
286
|
if (postInstallIssues.length > 0) {
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import { type GitRemoteWriteMode } from './git-remote-policy.ts';
|
|
2
2
|
import { type CommitMessageContext, type CommitMessageProvider, type CommitMessageProviderMode } from './commit-message-provider.ts';
|
|
3
3
|
import { type DevDependencyReferenceMode, type GitDependencyProtocol } from './package-reference-policy.ts';
|
|
4
|
+
import { type BuildWarningPolicyOptions } from './build-warning-policy.js';
|
|
4
5
|
export type RepoKind = 'package' | 'project';
|
|
5
6
|
export type RepoBranchMode = 'package-release-main' | 'package-dev-save' | 'project-save';
|
|
6
7
|
export type SaveVerifyMode = 'action-first' | 'local-only' | 'skip';
|
|
@@ -150,6 +151,15 @@ export type RepositorySaveOptions = {
|
|
|
150
151
|
stablePackageRelease?: boolean;
|
|
151
152
|
onProgress?: (message: string, stream?: 'stdout' | 'stderr') => void;
|
|
152
153
|
};
|
|
154
|
+
export declare function runStreamingCommand(node: Pick<RepositorySaveNode, 'name' | 'path'>, options: Pick<RepositorySaveOptions, 'onProgress'>, phase: string, command: string, args: string[], commandOptions?: {
|
|
155
|
+
cwd?: string;
|
|
156
|
+
env?: NodeJS.ProcessEnv;
|
|
157
|
+
timeoutMs?: number;
|
|
158
|
+
buildWarningPolicy?: BuildWarningPolicyOptions | false;
|
|
159
|
+
}): Promise<{
|
|
160
|
+
stdout: string;
|
|
161
|
+
stderr: string;
|
|
162
|
+
}>;
|
|
153
163
|
export declare function nextDevVersion(version: string, branch: string, date?: Date): string;
|
|
154
164
|
export declare function discoverRepositorySaveNodes(root: string, gitRoot?: string, branch?: string, options?: {
|
|
155
165
|
stablePackageRelease?: boolean;
|
|
@@ -39,6 +39,10 @@ import {
|
|
|
39
39
|
workspacePackages
|
|
40
40
|
} from "./workspace-tools.js";
|
|
41
41
|
import { collectDeploymentLockfileWorkspaceIssues } from "./workspace-dependency-mode.js";
|
|
42
|
+
import {
|
|
43
|
+
createBuildWarningSummary,
|
|
44
|
+
formatAllowedBuildWarnings
|
|
45
|
+
} from "./build-warning-policy.js";
|
|
42
46
|
class RepositorySaveError extends Error {
|
|
43
47
|
exitCode;
|
|
44
48
|
details;
|
|
@@ -79,8 +83,10 @@ function runCapturedCommand(node, options, phase, command, args, commandOptions
|
|
|
79
83
|
});
|
|
80
84
|
const stdout = result.stdout?.trim() ?? "";
|
|
81
85
|
const stderr = result.stderr?.trim() ?? "";
|
|
82
|
-
if (
|
|
83
|
-
|
|
86
|
+
if (commandOptions.emitOutputOnSuccess !== false) {
|
|
87
|
+
if (stdout) emitProgress(options, node, phase, stdout);
|
|
88
|
+
if (stderr) emitProgress(options, node, phase, stderr, "stderr");
|
|
89
|
+
}
|
|
84
90
|
if (result.status !== 0) {
|
|
85
91
|
const message = (result.error?.message ? `${result.error.message}
|
|
86
92
|
` : "") + (prefixedOutput(node, phase, stderr) || prefixedOutput(node, phase, stdout) || `${progressPrefix(node, phase)} ${command} ${args.join(" ")} failed`);
|
|
@@ -94,6 +100,21 @@ function runCapturedCommand(node, options, phase, command, args, commandOptions
|
|
|
94
100
|
}
|
|
95
101
|
return stdout;
|
|
96
102
|
}
|
|
103
|
+
function npmLockfilePackageCount(repoDir) {
|
|
104
|
+
try {
|
|
105
|
+
const lockfile = readJson(resolve(repoDir, "package-lock.json"));
|
|
106
|
+
const packages = lockfile.packages;
|
|
107
|
+
if (packages && typeof packages === "object" && !Array.isArray(packages)) {
|
|
108
|
+
return Math.max(0, Object.keys(packages).filter((entry) => entry !== "").length);
|
|
109
|
+
}
|
|
110
|
+
const dependencies = lockfile.dependencies;
|
|
111
|
+
if (dependencies && typeof dependencies === "object" && !Array.isArray(dependencies)) {
|
|
112
|
+
return Object.keys(dependencies).length;
|
|
113
|
+
}
|
|
114
|
+
} catch {
|
|
115
|
+
}
|
|
116
|
+
return null;
|
|
117
|
+
}
|
|
97
118
|
function isNoOpGitCommitError(error) {
|
|
98
119
|
if (!(error instanceof RepositorySaveError)) return false;
|
|
99
120
|
const command = typeof error.details?.command === "string" ? error.details.command : "";
|
|
@@ -140,6 +161,7 @@ async function runStreamingCommand(node, options, phase, command, args, commandO
|
|
|
140
161
|
let stdoutRemainder = "";
|
|
141
162
|
let stderrRemainder = "";
|
|
142
163
|
let settled = false;
|
|
164
|
+
const warningSummary = commandOptions.buildWarningPolicy === false ? null : createBuildWarningSummary();
|
|
143
165
|
const flush = (chunk, stream) => {
|
|
144
166
|
const combined = stream === "stdout" ? stdoutRemainder + chunk : stderrRemainder + chunk;
|
|
145
167
|
const parts = combined.split(/\r?\n/u);
|
|
@@ -147,6 +169,10 @@ async function runStreamingCommand(node, options, phase, command, args, commandO
|
|
|
147
169
|
if (stream === "stdout") stdoutRemainder = parts.at(-1) ?? "";
|
|
148
170
|
else stderrRemainder = parts.at(-1) ?? "";
|
|
149
171
|
for (const line of complete) {
|
|
172
|
+
const classified = warningSummary?.record(line, commandOptions.buildWarningPolicy || void 0);
|
|
173
|
+
if (classified?.kind === "allowed") {
|
|
174
|
+
continue;
|
|
175
|
+
}
|
|
150
176
|
emitProgress(options, node, phase, line, stream);
|
|
151
177
|
}
|
|
152
178
|
};
|
|
@@ -176,9 +202,20 @@ async function runStreamingCommand(node, options, phase, command, args, commandO
|
|
|
176
202
|
if (settled) return;
|
|
177
203
|
settled = true;
|
|
178
204
|
if (timeout) clearTimeout(timeout);
|
|
179
|
-
if (stdoutRemainder)
|
|
180
|
-
|
|
205
|
+
if (stdoutRemainder) {
|
|
206
|
+
const classified = warningSummary?.record(stdoutRemainder, commandOptions.buildWarningPolicy || void 0);
|
|
207
|
+
if (classified?.kind !== "allowed") emitProgress(options, node, phase, stdoutRemainder);
|
|
208
|
+
}
|
|
209
|
+
if (stderrRemainder) {
|
|
210
|
+
const classified = warningSummary?.record(stderrRemainder, commandOptions.buildWarningPolicy || void 0);
|
|
211
|
+
if (classified?.kind !== "allowed") emitProgress(options, node, phase, stderrRemainder, "stderr");
|
|
212
|
+
}
|
|
181
213
|
if (code === 0) {
|
|
214
|
+
if (warningSummary) {
|
|
215
|
+
for (const line of formatAllowedBuildWarnings(warningSummary.allowedWarnings)) {
|
|
216
|
+
emitProgress(options, node, phase, line);
|
|
217
|
+
}
|
|
218
|
+
}
|
|
182
219
|
resolvePromise({ stdout, stderr });
|
|
183
220
|
return;
|
|
184
221
|
}
|
|
@@ -644,7 +681,10 @@ async function validateRepositoryLockfile(node, options) {
|
|
|
644
681
|
return { status: "skipped", command: commandText, issues: [], error: "stubbed" };
|
|
645
682
|
}
|
|
646
683
|
try {
|
|
647
|
-
runCapturedCommand(node, options, "lockfile", command, args, { timeoutMs: 12e4 });
|
|
684
|
+
runCapturedCommand(node, options, "lockfile", command, args, { timeoutMs: 12e4, emitOutputOnSuccess: false });
|
|
685
|
+
const packageCount = npmLockfilePackageCount(node.path);
|
|
686
|
+
const countText = packageCount === null ? "package-lock entries" : `${packageCount} package${packageCount === 1 ? "" : "s"}`;
|
|
687
|
+
emitProgress(options, node, "lockfile", `Lockfile validation passed: ${countText} checked, 0 issues.`);
|
|
648
688
|
return { status: "passed", command: commandText, issues: [], error: null };
|
|
649
689
|
} catch (error) {
|
|
650
690
|
const message = error instanceof Error ? error.message : String(error);
|
|
@@ -1520,5 +1560,6 @@ export {
|
|
|
1520
1560
|
refreshAndValidateRootWorkspaceLockfileForSave,
|
|
1521
1561
|
repositorySaveErrorDetails,
|
|
1522
1562
|
repositorySaveWaves,
|
|
1523
|
-
runRepositorySaveOrchestrator
|
|
1563
|
+
runRepositorySaveOrchestrator,
|
|
1564
|
+
runStreamingCommand
|
|
1524
1565
|
};
|
|
@@ -2,11 +2,12 @@
|
|
|
2
2
|
|
|
3
3
|
import { readFileSync } from 'node:fs';
|
|
4
4
|
import { resolve } from 'node:path';
|
|
5
|
+
import {
|
|
6
|
+
formatAllowedBuildWarnings,
|
|
7
|
+
scanBuildWarningText,
|
|
8
|
+
} from '../operations/services/build-warning-policy.js';
|
|
5
9
|
|
|
6
10
|
const args = process.argv.slice(2);
|
|
7
|
-
const defaultAllowlisted = [
|
|
8
|
-
/Module "url" has been externalized for browser compatibility, imported by ".*libsodium-sumo.*"/u,
|
|
9
|
-
];
|
|
10
11
|
const allowlisted = [];
|
|
11
12
|
const files = [];
|
|
12
13
|
let useDefaultPolicy = true;
|
|
@@ -22,7 +23,7 @@ for (let index = 0; index < args.length; index += 1) {
|
|
|
22
23
|
if (!pattern) {
|
|
23
24
|
throw new Error('Missing value for --allow.');
|
|
24
25
|
}
|
|
25
|
-
allowlisted.push(
|
|
26
|
+
allowlisted.push(pattern);
|
|
26
27
|
index += 1;
|
|
27
28
|
continue;
|
|
28
29
|
}
|
|
@@ -34,21 +35,17 @@ if (files.length === 0) {
|
|
|
34
35
|
}
|
|
35
36
|
|
|
36
37
|
const warningLines = [];
|
|
37
|
-
const
|
|
38
|
-
...(useDefaultPolicy ? defaultAllowlisted : []),
|
|
39
|
-
...allowlisted,
|
|
40
|
-
];
|
|
38
|
+
const allowedWarnings = new Map();
|
|
41
39
|
for (const file of files) {
|
|
42
40
|
const contents = readFileSync(resolve(process.cwd(), file), 'utf8');
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
}
|
|
50
|
-
warningLines.push(line);
|
|
41
|
+
const scan = scanBuildWarningText(contents, {
|
|
42
|
+
useDefaultPolicy,
|
|
43
|
+
allow: allowlisted,
|
|
44
|
+
});
|
|
45
|
+
for (const [label, count] of scan.allowedWarnings.entries()) {
|
|
46
|
+
allowedWarnings.set(label, (allowedWarnings.get(label) ?? 0) + count);
|
|
51
47
|
}
|
|
48
|
+
warningLines.push(...scan.unexpectedWarnings);
|
|
52
49
|
}
|
|
53
50
|
|
|
54
51
|
if (warningLines.length > 0) {
|
|
@@ -59,4 +56,7 @@ if (warningLines.length > 0) {
|
|
|
59
56
|
process.exit(1);
|
|
60
57
|
}
|
|
61
58
|
|
|
59
|
+
for (const line of formatAllowedBuildWarnings(allowedWarnings)) {
|
|
60
|
+
console.log(line);
|
|
61
|
+
}
|
|
62
62
|
console.log('No unexpected build warnings detected.');
|
|
@@ -415,6 +415,8 @@ export declare function workflowSave(helpers: WorkflowOperationHelpers, input: T
|
|
|
415
415
|
conclusion: null;
|
|
416
416
|
runId: null;
|
|
417
417
|
url: null;
|
|
418
|
+
createdAt: null;
|
|
419
|
+
updatedAt: null;
|
|
418
420
|
}[];
|
|
419
421
|
releaseCandidate: ReleaseCandidateReport | null;
|
|
420
422
|
} & {
|
|
@@ -553,6 +555,8 @@ export declare function workflowClose(helpers: WorkflowOperationHelpers, input:
|
|
|
553
555
|
conclusion: null;
|
|
554
556
|
runId: null;
|
|
555
557
|
url: null;
|
|
558
|
+
createdAt: null;
|
|
559
|
+
updatedAt: null;
|
|
556
560
|
}[];
|
|
557
561
|
releaseCandidate: ReleaseCandidateReport | null;
|
|
558
562
|
} & {
|
|
@@ -732,6 +736,8 @@ export declare function workflowStage(helpers: WorkflowOperationHelpers, input:
|
|
|
732
736
|
conclusion: null;
|
|
733
737
|
runId: null;
|
|
734
738
|
url: null;
|
|
739
|
+
createdAt: null;
|
|
740
|
+
updatedAt: null;
|
|
735
741
|
}[];
|
|
736
742
|
releaseCandidate: ReleaseCandidateReport | null;
|
|
737
743
|
} & {
|
|
@@ -886,6 +892,7 @@ export declare function workflowRelease(helpers: WorkflowOperationHelpers, input
|
|
|
886
892
|
targetBranch: string;
|
|
887
893
|
commitSha: string;
|
|
888
894
|
};
|
|
895
|
+
hostedDeploymentState: Record<string, unknown>[];
|
|
889
896
|
finalBranch: string;
|
|
890
897
|
pushStatus: {
|
|
891
898
|
stagingPushed: boolean;
|
|
@@ -905,6 +912,8 @@ export declare function workflowRelease(helpers: WorkflowOperationHelpers, input
|
|
|
905
912
|
conclusion: null;
|
|
906
913
|
runId: null;
|
|
907
914
|
url: null;
|
|
915
|
+
createdAt: null;
|
|
916
|
+
updatedAt: null;
|
|
908
917
|
}[];
|
|
909
918
|
} & {
|
|
910
919
|
finalState?: WorkflowStatePayload;
|
|
@@ -968,6 +977,7 @@ export declare function workflowRelease(helpers: WorkflowOperationHelpers, input
|
|
|
968
977
|
targetBranch: string;
|
|
969
978
|
commitSha: string;
|
|
970
979
|
};
|
|
980
|
+
hostedDeploymentState: Record<string, unknown>[];
|
|
971
981
|
finalBranch: string;
|
|
972
982
|
pushStatus: {
|
|
973
983
|
stagingPushed: boolean;
|
|
@@ -987,6 +997,8 @@ export declare function workflowRelease(helpers: WorkflowOperationHelpers, input
|
|
|
987
997
|
conclusion: null;
|
|
988
998
|
runId: null;
|
|
989
999
|
url: null;
|
|
1000
|
+
createdAt: null;
|
|
1001
|
+
updatedAt: null;
|
|
990
1002
|
})[];
|
|
991
1003
|
} & {
|
|
992
1004
|
finalState?: WorkflowStatePayload;
|
|
@@ -36,6 +36,7 @@ import {
|
|
|
36
36
|
ensureGeneratedWranglerConfig,
|
|
37
37
|
finalizeDeploymentState,
|
|
38
38
|
loadDeployState,
|
|
39
|
+
recordHostedDeploymentState,
|
|
39
40
|
runRemoteD1Migrations,
|
|
40
41
|
validateDeployPrerequisites,
|
|
41
42
|
validateDestroyPrerequisites
|
|
@@ -418,6 +419,38 @@ async function waitForWorkflowGates(operation, gates, ciMode, options = {}) {
|
|
|
418
419
|
}
|
|
419
420
|
return results;
|
|
420
421
|
}
|
|
422
|
+
function recordHostedDeploymentStatesFromRootGates(root, rootRelease, workflowGates) {
|
|
423
|
+
const gates = Array.isArray(workflowGates) ? workflowGates.map((gate) => stringRecord(gate)).filter((gate) => Boolean(gate)) : [];
|
|
424
|
+
const releaseRecord = stringRecord(rootRelease) ?? {};
|
|
425
|
+
const reports = [];
|
|
426
|
+
for (const target of [
|
|
427
|
+
{ scope: "staging", branch: STAGING_BRANCH, commit: releaseRecord.stagingCommit },
|
|
428
|
+
{ scope: "prod", branch: PRODUCTION_BRANCH, commit: releaseRecord.releasedCommit }
|
|
429
|
+
]) {
|
|
430
|
+
const gate = gates.find((candidate) => candidate.workflow === "deploy.yml" && candidate.branch === target.branch && candidate.status === "completed" && candidate.conclusion === "success");
|
|
431
|
+
const timestamp = typeof gate?.updatedAt === "string" && gate.updatedAt.trim() ? gate.updatedAt : null;
|
|
432
|
+
if (!gate || !timestamp) {
|
|
433
|
+
continue;
|
|
434
|
+
}
|
|
435
|
+
const state = recordHostedDeploymentState(root, {
|
|
436
|
+
scope: target.scope,
|
|
437
|
+
commit: typeof target.commit === "string" ? target.commit : null,
|
|
438
|
+
timestamp,
|
|
439
|
+
workflow: gate.workflow,
|
|
440
|
+
runId: gate.runId ?? null
|
|
441
|
+
});
|
|
442
|
+
reports.push({
|
|
443
|
+
scope: target.scope,
|
|
444
|
+
branch: target.branch,
|
|
445
|
+
commit: typeof target.commit === "string" ? target.commit : null,
|
|
446
|
+
timestamp: state.lastDeploymentTimestamp ?? timestamp,
|
|
447
|
+
url: state.lastDeployedUrl ?? null,
|
|
448
|
+
workflow: gate.workflow,
|
|
449
|
+
runId: gate.runId ?? null
|
|
450
|
+
});
|
|
451
|
+
}
|
|
452
|
+
return reports;
|
|
453
|
+
}
|
|
421
454
|
function ensureTreeseedCommandReadiness(root) {
|
|
422
455
|
if (getGitHubAutomationMode() === "stub") {
|
|
423
456
|
return {
|
|
@@ -3625,6 +3658,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3625
3658
|
runId: workflowRun.runId,
|
|
3626
3659
|
onProgress: (line, stream) => helpers.write(line, stream)
|
|
3627
3660
|
}).then((workflowGates) => ({ workflowGates })));
|
|
3661
|
+
const hostedDeploymentState2 = recordHostedDeploymentStatesFromRootGates(root, rootRelease2, rootWorkflowGateResult2?.workflowGates);
|
|
3628
3662
|
const releaseBackMerge2 = await executeJournalStep(root, workflowRun.runId, "release-back-merge", () => backMergeRootProductionIntoStaging(root, false));
|
|
3629
3663
|
const workspaceLinks2 = ensureWorkflowWorkspaceLinks(root, helpers, effectiveInput.workspaceLinks ?? "auto");
|
|
3630
3664
|
const payload2 = {
|
|
@@ -3648,6 +3682,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3648
3682
|
rootRepo,
|
|
3649
3683
|
releaseCandidate,
|
|
3650
3684
|
releaseBackMerge: releaseBackMerge2,
|
|
3685
|
+
hostedDeploymentState: hostedDeploymentState2,
|
|
3651
3686
|
finalBranch: currentBranch(gitRoot) || STAGING_BRANCH,
|
|
3652
3687
|
pushStatus: { stagingPushed: true, productionPushed: true, tagPushed: true },
|
|
3653
3688
|
workspaceLinks: workspaceLinks2,
|
|
@@ -3870,6 +3905,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3870
3905
|
runId: workflowRun.runId,
|
|
3871
3906
|
onProgress: (line, stream) => helpers.write(line, stream)
|
|
3872
3907
|
}).then((workflowGates) => ({ workflowGates })));
|
|
3908
|
+
const hostedDeploymentState = recordHostedDeploymentStatesFromRootGates(root, rootRelease, rootWorkflowGateResult?.workflowGates);
|
|
3873
3909
|
const releaseBackMerge = await executeJournalStep(root, workflowRun.runId, "release-back-merge", () => backMergeRootProductionIntoStaging(root, true));
|
|
3874
3910
|
const devTagCleanupMode = effectiveInput.devTagCleanup ?? "safe-after-release";
|
|
3875
3911
|
const devTagCleanup = devTagCleanupMode === "off" ? (skipJournalStep(root, workflowRun.runId, "cleanup-dev-tags", { status: "skipped", reason: "disabled" }), { status: "skipped", reason: "disabled" }) : await executeJournalStep(root, workflowRun.runId, "cleanup-dev-tags", () => {
|
|
@@ -3921,6 +3957,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3921
3957
|
rootRepo,
|
|
3922
3958
|
releaseCandidate,
|
|
3923
3959
|
releaseBackMerge,
|
|
3960
|
+
hostedDeploymentState,
|
|
3924
3961
|
finalBranch: currentBranch(gitRoot) || STAGING_BRANCH,
|
|
3925
3962
|
pushStatus: {
|
|
3926
3963
|
stagingPushed: true,
|
package/dist/workflow-state.d.ts
CHANGED
|
@@ -35,6 +35,7 @@ export type TreeseedWorkflowProviderStatus = Record<'local' | 'staging' | 'prod'
|
|
|
35
35
|
}>;
|
|
36
36
|
export type TreeseedWorkflowStatusOptions = {
|
|
37
37
|
live?: boolean;
|
|
38
|
+
history?: 'recent' | 'all';
|
|
38
39
|
env?: NodeJS.ProcessEnv;
|
|
39
40
|
};
|
|
40
41
|
export type TreeseedWorkflowState = {
|
|
@@ -75,6 +76,9 @@ export type TreeseedWorkflowState = {
|
|
|
75
76
|
updatedAt: string;
|
|
76
77
|
reasons: string[];
|
|
77
78
|
}>;
|
|
79
|
+
historyMode: 'recent' | 'all';
|
|
80
|
+
obsoleteRunsTotal: number;
|
|
81
|
+
obsoleteRunsOmitted: number;
|
|
78
82
|
blockers: string[];
|
|
79
83
|
};
|
|
80
84
|
packageSync: {
|
|
@@ -189,6 +193,7 @@ export type TreeseedWorkflowState = {
|
|
|
189
193
|
releaseHistory: {
|
|
190
194
|
stagingAheadMain: number | null;
|
|
191
195
|
stagingBehindMain: number | null;
|
|
196
|
+
unreleasedStagingCommits: number | null;
|
|
192
197
|
backMerged: boolean | null;
|
|
193
198
|
detail: string;
|
|
194
199
|
};
|
|
@@ -217,5 +222,14 @@ export type TreeseedWorkflowState = {
|
|
|
217
222
|
}>;
|
|
218
223
|
recommendations: TreeseedWorkflowRecommendation[];
|
|
219
224
|
};
|
|
225
|
+
export declare function capObsoleteWorkflowRuns<T>(obsoleteRuns: T[], options?: {
|
|
226
|
+
history?: 'recent' | 'all';
|
|
227
|
+
limit?: number;
|
|
228
|
+
}): {
|
|
229
|
+
historyMode: string;
|
|
230
|
+
obsoleteRuns: T[];
|
|
231
|
+
obsoleteRunsTotal: number;
|
|
232
|
+
obsoleteRunsOmitted: number;
|
|
233
|
+
};
|
|
220
234
|
export declare function resolveTreeseedWorkflowState(cwd: string, options?: TreeseedWorkflowStatusOptions): TreeseedWorkflowState;
|
|
221
235
|
export declare function recommendTreeseedNextSteps(state: TreeseedWorkflowState): TreeseedWorkflowRecommendation[];
|
package/dist/workflow-state.js
CHANGED
|
@@ -261,6 +261,7 @@ function safeReleaseHistory(repoDir) {
|
|
|
261
261
|
return {
|
|
262
262
|
stagingAheadMain: null,
|
|
263
263
|
stagingBehindMain: null,
|
|
264
|
+
unreleasedStagingCommits: null,
|
|
264
265
|
backMerged: null,
|
|
265
266
|
detail: "Repository root is unavailable."
|
|
266
267
|
};
|
|
@@ -273,21 +274,46 @@ function safeReleaseHistory(repoDir) {
|
|
|
273
274
|
if (!Number.isFinite(stagingAheadMain) || !Number.isFinite(stagingBehindMain)) {
|
|
274
275
|
throw new Error("invalid rev-list output");
|
|
275
276
|
}
|
|
277
|
+
const stagingOnlySubjects = run("git", ["log", "--format=%s", "main..staging"], { cwd: repoDir, capture: true }).split("\n").map((line) => line.trim()).filter(Boolean);
|
|
278
|
+
const unreleasedStagingCommits = stagingOnlySubjects.filter((subject) => subject !== "release: sync package staging heads" && subject !== "release: back-merge main into staging" && !subject.startsWith("release: back-merge main into staging ")).length;
|
|
276
279
|
return {
|
|
277
280
|
stagingAheadMain,
|
|
278
281
|
stagingBehindMain,
|
|
282
|
+
unreleasedStagingCommits,
|
|
279
283
|
backMerged: stagingBehindMain === 0,
|
|
280
|
-
detail: stagingBehindMain === 0 ? "Staging contains current main release history." : `Staging is missing ${stagingBehindMain} main commit${stagingBehindMain === 1 ? "" : "s"}.`
|
|
284
|
+
detail: stagingBehindMain === 0 && unreleasedStagingCommits === 0 ? stagingAheadMain > 0 ? "Staging contains current main release history and is only ahead by release sync commits." : "Staging contains current main release history." : stagingBehindMain === 0 ? `Staging has ${unreleasedStagingCommits} unreleased commit${unreleasedStagingCommits === 1 ? "" : "s"} and contains current main release history.` : `Staging is missing ${stagingBehindMain} main commit${stagingBehindMain === 1 ? "" : "s"}.`
|
|
281
285
|
};
|
|
282
286
|
} catch {
|
|
283
287
|
return {
|
|
284
288
|
stagingAheadMain: null,
|
|
285
289
|
stagingBehindMain: null,
|
|
290
|
+
unreleasedStagingCommits: null,
|
|
286
291
|
backMerged: null,
|
|
287
292
|
detail: "Could not compare staging and main release history."
|
|
288
293
|
};
|
|
289
294
|
}
|
|
290
295
|
}
|
|
296
|
+
const DEFAULT_OBSOLETE_RUN_HISTORY_LIMIT = 20;
|
|
297
|
+
function capObsoleteWorkflowRuns(obsoleteRuns, options = {}) {
|
|
298
|
+
const historyMode = options.history === "all" ? "all" : "recent";
|
|
299
|
+
const limit = options.limit ?? DEFAULT_OBSOLETE_RUN_HISTORY_LIMIT;
|
|
300
|
+
const obsoleteRunsTotal = obsoleteRuns.length;
|
|
301
|
+
if (historyMode === "all") {
|
|
302
|
+
return {
|
|
303
|
+
historyMode,
|
|
304
|
+
obsoleteRuns,
|
|
305
|
+
obsoleteRunsTotal,
|
|
306
|
+
obsoleteRunsOmitted: 0
|
|
307
|
+
};
|
|
308
|
+
}
|
|
309
|
+
const cappedRuns = obsoleteRuns.slice(0, limit);
|
|
310
|
+
return {
|
|
311
|
+
historyMode,
|
|
312
|
+
obsoleteRuns: cappedRuns,
|
|
313
|
+
obsoleteRunsTotal,
|
|
314
|
+
obsoleteRunsOmitted: Math.max(0, obsoleteRunsTotal - cappedRuns.length)
|
|
315
|
+
};
|
|
316
|
+
}
|
|
291
317
|
function resolveLocalStatusUrl(deployConfig) {
|
|
292
318
|
return deployConfig.surfaces?.web?.localBaseUrl ?? deployConfig.surfaces?.api?.localBaseUrl ?? Object.values(deployConfig.services ?? {}).find((service) => service?.enabled !== false && service.environments?.local?.baseUrl)?.environments?.local?.baseUrl ?? null;
|
|
293
319
|
}
|
|
@@ -404,6 +430,7 @@ function resolveTreeseedWorkflowState(cwd, options = {}) {
|
|
|
404
430
|
updatedAt: journal.updatedAt,
|
|
405
431
|
reasons: classification.reasons
|
|
406
432
|
}));
|
|
433
|
+
const obsoleteHistory = capObsoleteWorkflowRuns(obsoleteRuns, { history: options.history });
|
|
407
434
|
const workflowBlockers = [];
|
|
408
435
|
if (workflowLock.active && workflowLock.lock) {
|
|
409
436
|
workflowBlockers.push(`Workflow lock active for ${workflowLock.lock.command} (${workflowLock.lock.runId}).`);
|
|
@@ -414,6 +441,8 @@ function resolveTreeseedWorkflowState(cwd, options = {}) {
|
|
|
414
441
|
if (interruptedRuns.length > 0) {
|
|
415
442
|
workflowBlockers.push(`Interrupted workflow runs detected: ${interruptedRuns.map((run2) => run2.runId).join(", ")}.`);
|
|
416
443
|
}
|
|
444
|
+
const releaseHistory = safeReleaseHistory(root);
|
|
445
|
+
const releaseReady = branchRole === "staging" && !dirtyWorktree && (releaseHistory.unreleasedStagingCommits ?? 0) > 0;
|
|
417
446
|
const state = {
|
|
418
447
|
cwd: effectiveCwd,
|
|
419
448
|
workspaceRoot,
|
|
@@ -435,7 +464,10 @@ function resolveTreeseedWorkflowState(cwd, options = {}) {
|
|
|
435
464
|
},
|
|
436
465
|
interruptedRuns,
|
|
437
466
|
staleRuns,
|
|
438
|
-
obsoleteRuns,
|
|
467
|
+
obsoleteRuns: obsoleteHistory.obsoleteRuns,
|
|
468
|
+
historyMode: obsoleteHistory.historyMode,
|
|
469
|
+
obsoleteRunsTotal: obsoleteHistory.obsoleteRunsTotal,
|
|
470
|
+
obsoleteRunsOmitted: obsoleteHistory.obsoleteRunsOmitted,
|
|
439
471
|
blockers: workflowBlockers
|
|
440
472
|
},
|
|
441
473
|
packageSync: {
|
|
@@ -527,8 +559,8 @@ function resolveTreeseedWorkflowState(cwd, options = {}) {
|
|
|
527
559
|
idleRemainingMs: keyStatus.idleRemainingMs,
|
|
528
560
|
startupPassphraseConfigured: Boolean(process.env.TREESEED_KEY_PASSPHRASE?.trim())
|
|
529
561
|
},
|
|
530
|
-
releaseReady
|
|
531
|
-
releaseHistory
|
|
562
|
+
releaseReady,
|
|
563
|
+
releaseHistory,
|
|
532
564
|
readiness: {
|
|
533
565
|
local: { ready: false, blockers: [], warnings: [] },
|
|
534
566
|
staging: { ready: false, blockers: [], warnings: [] },
|
|
@@ -747,8 +779,13 @@ function recommendTreeseedNextSteps(state) {
|
|
|
747
779
|
}
|
|
748
780
|
if (!state.persistentEnvironments.staging.initialized) {
|
|
749
781
|
recommendations.push({ operation: "config", reason: "Initialize the staging environment before releasing.", input: { environment: ["staging"] } });
|
|
782
|
+
} else if ((state.releaseHistory.unreleasedStagingCommits ?? 0) > 0) {
|
|
783
|
+
recommendations.push({ operation: "release", reason: "Promote unreleased staging commits into production.", input: { bump: "patch" } });
|
|
784
|
+
if (state.managedServices.api.enabled) {
|
|
785
|
+
recommendations.push({ operation: "auth:login", reason: "Keep the local runtime authenticated to the remote API used by managed services." });
|
|
786
|
+
}
|
|
750
787
|
} else {
|
|
751
|
-
recommendations.push({ operation: "
|
|
788
|
+
recommendations.push({ operation: "status", reason: "Inspect staging and production state; no unreleased staging commits are pending." });
|
|
752
789
|
if (state.managedServices.api.enabled) {
|
|
753
790
|
recommendations.push({ operation: "auth:login", reason: "Keep the local runtime authenticated to the remote API used by managed services." });
|
|
754
791
|
}
|
|
@@ -771,6 +808,7 @@ function recommendTreeseedNextSteps(state) {
|
|
|
771
808
|
return recommendations.slice(0, 3);
|
|
772
809
|
}
|
|
773
810
|
export {
|
|
811
|
+
capObsoleteWorkflowRuns,
|
|
774
812
|
recommendTreeseedNextSteps,
|
|
775
813
|
resolveTreeseedWorkflowState
|
|
776
814
|
};
|