@treeseed/sdk 0.6.19 → 0.6.21
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/operations/services/release-candidate.d.ts +39 -0
- package/dist/operations/services/release-candidate.js +531 -0
- package/dist/workflow/operations.d.ts +42 -0
- package/dist/workflow/operations.js +134 -2
- package/dist/workflow/runs.js +12 -2
- package/dist/workflow-state.d.ts +6 -0
- package/dist/workflow-state.js +33 -0
- package/package.json +1 -1
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
export type ReleaseCandidateStatus = 'passed' | 'failed';
|
|
2
|
+
export type ReleaseCandidateFailure = {
|
|
3
|
+
code: string;
|
|
4
|
+
scope: string;
|
|
5
|
+
provider?: string | null;
|
|
6
|
+
message: string;
|
|
7
|
+
details?: Record<string, unknown> | null;
|
|
8
|
+
};
|
|
9
|
+
export type ReleaseCandidateFingerprint = {
|
|
10
|
+
key: string;
|
|
11
|
+
rootSha: string | null;
|
|
12
|
+
packageShas: Record<string, string | null>;
|
|
13
|
+
plannedVersions: Record<string, string>;
|
|
14
|
+
lockfiles: Record<string, string | null>;
|
|
15
|
+
selectedPackages: string[];
|
|
16
|
+
};
|
|
17
|
+
export type ReleaseCandidateCheck = {
|
|
18
|
+
name: string;
|
|
19
|
+
status: 'passed' | 'skipped' | 'failed';
|
|
20
|
+
detail: string;
|
|
21
|
+
};
|
|
22
|
+
export type ReleaseCandidateReport = {
|
|
23
|
+
status: ReleaseCandidateStatus;
|
|
24
|
+
fingerprint: ReleaseCandidateFingerprint;
|
|
25
|
+
reused: boolean;
|
|
26
|
+
checkedAt: string;
|
|
27
|
+
failures: ReleaseCandidateFailure[];
|
|
28
|
+
checks: ReleaseCandidateCheck[];
|
|
29
|
+
};
|
|
30
|
+
export type ReleaseCandidateInput = {
|
|
31
|
+
root: string;
|
|
32
|
+
plannedVersions: Record<string, unknown>;
|
|
33
|
+
selectedPackageNames?: string[];
|
|
34
|
+
allowReuse?: boolean;
|
|
35
|
+
};
|
|
36
|
+
export declare function buildReleaseCandidateFingerprint(input: ReleaseCandidateInput): ReleaseCandidateFingerprint;
|
|
37
|
+
export declare function readCachedReleaseCandidateReport(root: string, key: string): ReleaseCandidateReport | null;
|
|
38
|
+
export declare function writeReleaseCandidateReport(root: string, report: ReleaseCandidateReport): ReleaseCandidateReport;
|
|
39
|
+
export declare function runReleaseCandidateGate(input: ReleaseCandidateInput): Promise<ReleaseCandidateReport>;
|
|
@@ -0,0 +1,531 @@
|
|
|
1
|
+
import { createHash } from "node:crypto";
|
|
2
|
+
import { cpSync, existsSync, mkdirSync, mkdtempSync, readFileSync, rmSync, writeFileSync } from "node:fs";
|
|
3
|
+
import { tmpdir } from "node:os";
|
|
4
|
+
import { dirname, join, relative, resolve } from "node:path";
|
|
5
|
+
import { isTreeseedEnvironmentEntryRelevant, isTreeseedEnvironmentEntryRequired } from "../../platform/environment.js";
|
|
6
|
+
import { getGitHubAutomationMode, maybeResolveGitHubRepositorySlug } from "./github-automation.js";
|
|
7
|
+
import { createGitHubApiClient, listGitHubEnvironmentSecretNames, listGitHubEnvironmentVariableNames } from "./github-api.js";
|
|
8
|
+
import { collectInternalDevReferenceIssues } from "./package-reference-policy.js";
|
|
9
|
+
import { collectTreeseedEnvironmentContext, resolveTreeseedMachineEnvironmentValues, validateTreeseedCommandEnvironment } from "./config-runtime.js";
|
|
10
|
+
import { loadDeployState } from "./deploy.js";
|
|
11
|
+
import { loadCliDeployConfig } from "./runtime-tools.js";
|
|
12
|
+
import { run, workspacePackages } from "./workspace-tools.js";
|
|
13
|
+
const RELEASE_CANDIDATE_CACHE_DIR = ".treeseed/workflow/release-candidates";
|
|
14
|
+
const STABLE_SEMVER = /^\d+\.\d+\.\d+$/u;
|
|
15
|
+
const REHEARSAL_IGNORED_SEGMENTS = /* @__PURE__ */ new Set([
|
|
16
|
+
".git",
|
|
17
|
+
".treeseed",
|
|
18
|
+
".wrangler",
|
|
19
|
+
".astro",
|
|
20
|
+
"coverage",
|
|
21
|
+
"dist",
|
|
22
|
+
"node_modules"
|
|
23
|
+
]);
|
|
24
|
+
function nowIso() {
|
|
25
|
+
return (/* @__PURE__ */ new Date()).toISOString();
|
|
26
|
+
}
|
|
27
|
+
function sortedRecord(record) {
|
|
28
|
+
return Object.fromEntries(Object.entries(record).sort(([left], [right]) => left.localeCompare(right)));
|
|
29
|
+
}
|
|
30
|
+
function sha256(value) {
|
|
31
|
+
return createHash("sha256").update(value).digest("hex");
|
|
32
|
+
}
|
|
33
|
+
function fileSha256(filePath) {
|
|
34
|
+
if (!existsSync(filePath)) return null;
|
|
35
|
+
return createHash("sha256").update(readFileSync(filePath)).digest("hex");
|
|
36
|
+
}
|
|
37
|
+
function safeGitHead(repoDir) {
|
|
38
|
+
try {
|
|
39
|
+
return run("git", ["rev-parse", "HEAD"], { cwd: repoDir, capture: true }).trim();
|
|
40
|
+
} catch {
|
|
41
|
+
return null;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
function safePackageJson(filePath) {
|
|
45
|
+
try {
|
|
46
|
+
return JSON.parse(readFileSync(filePath, "utf8"));
|
|
47
|
+
} catch {
|
|
48
|
+
return null;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
function writeJsonFile(filePath, value) {
|
|
52
|
+
writeFileSync(filePath, `${JSON.stringify(value, null, 2)}
|
|
53
|
+
`, "utf8");
|
|
54
|
+
}
|
|
55
|
+
function packageScripts(filePath) {
|
|
56
|
+
const packageJson = safePackageJson(filePath);
|
|
57
|
+
return packageJson?.scripts && typeof packageJson.scripts === "object" && !Array.isArray(packageJson.scripts) ? packageJson.scripts : {};
|
|
58
|
+
}
|
|
59
|
+
function releaseCandidateCachePath(root, key) {
|
|
60
|
+
return resolve(root, RELEASE_CANDIDATE_CACHE_DIR, `${key}.json`);
|
|
61
|
+
}
|
|
62
|
+
function ensureReleaseCandidateCacheDir(root) {
|
|
63
|
+
const dir = resolve(root, RELEASE_CANDIDATE_CACHE_DIR);
|
|
64
|
+
mkdirSync(dir, { recursive: true });
|
|
65
|
+
const gitignorePath = resolve(root, ".treeseed", "workflow", ".gitignore");
|
|
66
|
+
mkdirSync(dirname(gitignorePath), { recursive: true });
|
|
67
|
+
if (!existsSync(gitignorePath)) {
|
|
68
|
+
writeFileSync(gitignorePath, "*\n!.gitignore\n!runs/\nruns/*\n!runs/.gitignore\n", "utf8");
|
|
69
|
+
}
|
|
70
|
+
return dir;
|
|
71
|
+
}
|
|
72
|
+
function buildReleaseCandidateFingerprint(input) {
|
|
73
|
+
const selectedPackages = [...new Set((input.selectedPackageNames ?? []).map(String))].sort();
|
|
74
|
+
const selectedPackageSet = new Set(selectedPackages);
|
|
75
|
+
const packages = workspacePackages(input.root).filter((pkg) => typeof pkg.name === "string" && pkg.name.startsWith("@treeseed/"));
|
|
76
|
+
const packageShas = sortedRecord(Object.fromEntries(
|
|
77
|
+
packages.map((pkg) => [pkg.name, safeGitHead(pkg.dir)])
|
|
78
|
+
));
|
|
79
|
+
const plannedVersions = sortedRecord(Object.fromEntries(
|
|
80
|
+
Object.entries(input.plannedVersions).filter(([name]) => name === "@treeseed/market" || selectedPackageSet.has(name)).map(([name, version]) => [name, String(version)])
|
|
81
|
+
));
|
|
82
|
+
const lockfiles = sortedRecord({
|
|
83
|
+
"@treeseed/market": fileSha256(resolve(input.root, "package-lock.json")),
|
|
84
|
+
...Object.fromEntries(packages.map((pkg) => [pkg.name, fileSha256(resolve(pkg.dir, "package-lock.json"))]))
|
|
85
|
+
});
|
|
86
|
+
const base = {
|
|
87
|
+
rootSha: safeGitHead(input.root),
|
|
88
|
+
packageShas,
|
|
89
|
+
plannedVersions,
|
|
90
|
+
lockfiles,
|
|
91
|
+
selectedPackages
|
|
92
|
+
};
|
|
93
|
+
return {
|
|
94
|
+
...base,
|
|
95
|
+
key: sha256(JSON.stringify(base))
|
|
96
|
+
};
|
|
97
|
+
}
|
|
98
|
+
function readCachedReleaseCandidateReport(root, key) {
|
|
99
|
+
const cachePath = releaseCandidateCachePath(root, key);
|
|
100
|
+
if (!existsSync(cachePath)) return null;
|
|
101
|
+
try {
|
|
102
|
+
return JSON.parse(readFileSync(cachePath, "utf8"));
|
|
103
|
+
} catch {
|
|
104
|
+
return null;
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
function writeReleaseCandidateReport(root, report) {
|
|
108
|
+
ensureReleaseCandidateCacheDir(root);
|
|
109
|
+
writeFileSync(releaseCandidateCachePath(root, report.fingerprint.key), `${JSON.stringify(report, null, 2)}
|
|
110
|
+
`, "utf8");
|
|
111
|
+
return report;
|
|
112
|
+
}
|
|
113
|
+
function addFailure(failures, failure) {
|
|
114
|
+
failures.push({
|
|
115
|
+
...failure,
|
|
116
|
+
details: failure.details ?? null,
|
|
117
|
+
provider: failure.provider ?? null
|
|
118
|
+
});
|
|
119
|
+
}
|
|
120
|
+
function packageReadinessChecks(root, selectedPackageNames, failures) {
|
|
121
|
+
if (selectedPackageNames.length === 0) {
|
|
122
|
+
return { name: "package-release-readiness", status: "skipped", detail: "No packages are selected for this release." };
|
|
123
|
+
}
|
|
124
|
+
if (getGitHubAutomationMode() === "stub") {
|
|
125
|
+
return { name: "package-release-readiness", status: "skipped", detail: "GitHub automation is stubbed." };
|
|
126
|
+
}
|
|
127
|
+
const selected = new Set(selectedPackageNames);
|
|
128
|
+
const packages = workspacePackages(root).filter((pkg) => selected.has(pkg.name));
|
|
129
|
+
for (const pkg of packages) {
|
|
130
|
+
const packageJson = safePackageJson(resolve(pkg.dir, "package.json"));
|
|
131
|
+
const scripts = packageJson?.scripts && typeof packageJson.scripts === "object" && !Array.isArray(packageJson.scripts) ? packageJson.scripts : {};
|
|
132
|
+
if (!existsSync(resolve(pkg.dir, ".github", "workflows", "publish.yml"))) {
|
|
133
|
+
addFailure(failures, {
|
|
134
|
+
code: "missing_publish_workflow",
|
|
135
|
+
scope: pkg.name,
|
|
136
|
+
provider: "github",
|
|
137
|
+
message: `${pkg.name} is missing .github/workflows/publish.yml.`
|
|
138
|
+
});
|
|
139
|
+
}
|
|
140
|
+
if (typeof scripts["release:publish"] !== "string") {
|
|
141
|
+
addFailure(failures, {
|
|
142
|
+
code: "missing_publish_script",
|
|
143
|
+
scope: pkg.name,
|
|
144
|
+
message: `${pkg.name} is missing a release:publish script.`
|
|
145
|
+
});
|
|
146
|
+
}
|
|
147
|
+
if (typeof scripts["verify:local"] !== "string" && typeof scripts["verify"] !== "string" && typeof scripts["verify:action"] !== "string") {
|
|
148
|
+
addFailure(failures, {
|
|
149
|
+
code: "missing_verify_script",
|
|
150
|
+
scope: pkg.name,
|
|
151
|
+
message: `${pkg.name} is missing a release-ready verify script.`
|
|
152
|
+
});
|
|
153
|
+
}
|
|
154
|
+
try {
|
|
155
|
+
run("npm", ["pack", "--dry-run"], { cwd: pkg.dir, capture: true, timeoutMs: 12e4 });
|
|
156
|
+
} catch (error) {
|
|
157
|
+
addFailure(failures, {
|
|
158
|
+
code: "npm_pack_dry_run_failed",
|
|
159
|
+
scope: pkg.name,
|
|
160
|
+
message: `${pkg.name} failed npm pack --dry-run.`,
|
|
161
|
+
details: { error: error instanceof Error ? error.message : String(error) }
|
|
162
|
+
});
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
return {
|
|
166
|
+
name: "package-release-readiness",
|
|
167
|
+
status: failures.some((failure) => failure.code.startsWith("missing_") || failure.code === "npm_pack_dry_run_failed") ? "failed" : "passed",
|
|
168
|
+
detail: `Checked ${packages.length} selected package${packages.length === 1 ? "" : "s"}.`
|
|
169
|
+
};
|
|
170
|
+
}
|
|
171
|
+
function copyWorkspaceForProductionRehearsal(root) {
|
|
172
|
+
const tempParent = mkdtempSync(join(tmpdir(), "treeseed-release-candidate-"));
|
|
173
|
+
const tempRoot = join(tempParent, "workspace");
|
|
174
|
+
cpSync(root, tempRoot, {
|
|
175
|
+
recursive: true,
|
|
176
|
+
filter: (source) => {
|
|
177
|
+
const rel = relative(root, source);
|
|
178
|
+
if (!rel) return true;
|
|
179
|
+
const segments = rel.split(/[\\/]+/u);
|
|
180
|
+
return !segments.some((segment) => REHEARSAL_IGNORED_SEGMENTS.has(segment));
|
|
181
|
+
}
|
|
182
|
+
});
|
|
183
|
+
return { tempParent, tempRoot };
|
|
184
|
+
}
|
|
185
|
+
function applyPlannedStableMetadata(root, plannedVersions) {
|
|
186
|
+
const stableVersions = new Map(
|
|
187
|
+
Object.entries(plannedVersions).filter(([, version]) => STABLE_SEMVER.test(version))
|
|
188
|
+
);
|
|
189
|
+
const targets = [
|
|
190
|
+
{ name: "@treeseed/market", dir: root },
|
|
191
|
+
...workspacePackages(root).map((pkg) => ({ name: pkg.name, dir: pkg.dir }))
|
|
192
|
+
];
|
|
193
|
+
for (const target of targets) {
|
|
194
|
+
const packageJsonPath = resolve(target.dir, "package.json");
|
|
195
|
+
const packageJson = safePackageJson(packageJsonPath);
|
|
196
|
+
if (!packageJson) continue;
|
|
197
|
+
let changed = false;
|
|
198
|
+
const plannedVersion = stableVersions.get(target.name);
|
|
199
|
+
if (plannedVersion && packageJson.version !== plannedVersion) {
|
|
200
|
+
packageJson.version = plannedVersion;
|
|
201
|
+
changed = true;
|
|
202
|
+
}
|
|
203
|
+
for (const field of ["dependencies", "optionalDependencies", "peerDependencies", "devDependencies"]) {
|
|
204
|
+
const values = packageJson[field];
|
|
205
|
+
if (!values || typeof values !== "object" || Array.isArray(values)) continue;
|
|
206
|
+
for (const [dependencyName, version] of stableVersions.entries()) {
|
|
207
|
+
if (!(dependencyName in values)) continue;
|
|
208
|
+
if (String(values[dependencyName]) === version) continue;
|
|
209
|
+
values[dependencyName] = version;
|
|
210
|
+
changed = true;
|
|
211
|
+
}
|
|
212
|
+
}
|
|
213
|
+
if (changed) {
|
|
214
|
+
writeJsonFile(packageJsonPath, packageJson);
|
|
215
|
+
}
|
|
216
|
+
}
|
|
217
|
+
}
|
|
218
|
+
function rehearsalVerifyScript(root) {
|
|
219
|
+
const scripts = packageScripts(resolve(root, "package.json"));
|
|
220
|
+
for (const scriptName of ["verify:direct", "verify:local", "verify", "build"]) {
|
|
221
|
+
if (typeof scripts[scriptName] === "string") {
|
|
222
|
+
return scriptName;
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
return null;
|
|
226
|
+
}
|
|
227
|
+
function runProductionDependencyRehearsal(root, plannedVersions, selectedPackageNames, failures) {
|
|
228
|
+
if (getGitHubAutomationMode() === "stub" || process.env.TREESEED_RELEASE_CANDIDATE_REHEARSAL_MODE === "skip") {
|
|
229
|
+
return "Skipped clean install rehearsal in stub/skip mode.";
|
|
230
|
+
}
|
|
231
|
+
const selectedPackageSet = new Set(selectedPackageNames);
|
|
232
|
+
let tempParent = null;
|
|
233
|
+
try {
|
|
234
|
+
const copied = copyWorkspaceForProductionRehearsal(root);
|
|
235
|
+
tempParent = copied.tempParent;
|
|
236
|
+
applyPlannedStableMetadata(copied.tempRoot, plannedVersions);
|
|
237
|
+
run("npm", ["install", "--package-lock-only", "--ignore-scripts"], { cwd: copied.tempRoot, timeoutMs: 3e5 });
|
|
238
|
+
run("npm", ["ci"], { cwd: copied.tempRoot, timeoutMs: 6e5 });
|
|
239
|
+
const scriptName = rehearsalVerifyScript(copied.tempRoot);
|
|
240
|
+
if (scriptName) {
|
|
241
|
+
run("npm", ["run", scriptName], { cwd: copied.tempRoot, timeoutMs: 9e5 });
|
|
242
|
+
}
|
|
243
|
+
const postInstallIssues = collectInternalDevReferenceIssues(copied.tempRoot, selectedPackageSet);
|
|
244
|
+
if (postInstallIssues.length > 0) {
|
|
245
|
+
addFailure(failures, {
|
|
246
|
+
code: "internal_dev_references_after_rehearsal",
|
|
247
|
+
scope: "@treeseed/market",
|
|
248
|
+
message: "Production dependency rehearsal still found internal dev references after clean install.",
|
|
249
|
+
details: {
|
|
250
|
+
references: postInstallIssues.map((issue) => ({
|
|
251
|
+
filePath: issue.filePath,
|
|
252
|
+
field: issue.field,
|
|
253
|
+
dependencyName: issue.dependencyName,
|
|
254
|
+
spec: issue.spec,
|
|
255
|
+
reason: issue.reason
|
|
256
|
+
}))
|
|
257
|
+
}
|
|
258
|
+
});
|
|
259
|
+
}
|
|
260
|
+
return scriptName ? `Ran clean install and npm run ${scriptName} in a temporary production rehearsal workspace.` : "Ran clean install in a temporary production rehearsal workspace.";
|
|
261
|
+
} catch (error) {
|
|
262
|
+
addFailure(failures, {
|
|
263
|
+
code: "production_dependency_rehearsal_failed",
|
|
264
|
+
scope: "@treeseed/market",
|
|
265
|
+
message: "Production dependency rehearsal failed in the temporary workspace.",
|
|
266
|
+
details: { error: error instanceof Error ? error.message : String(error) }
|
|
267
|
+
});
|
|
268
|
+
return "Production dependency rehearsal failed.";
|
|
269
|
+
} finally {
|
|
270
|
+
if (tempParent) {
|
|
271
|
+
rmSync(tempParent, { recursive: true, force: true });
|
|
272
|
+
}
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
function dependencyRehearsalChecks(root, plannedVersions, selectedPackageNames, failures) {
|
|
276
|
+
const before = failures.length;
|
|
277
|
+
for (const [name, version] of Object.entries(plannedVersions)) {
|
|
278
|
+
if ((name === "@treeseed/market" || selectedPackageNames.includes(name)) && !STABLE_SEMVER.test(version)) {
|
|
279
|
+
addFailure(failures, {
|
|
280
|
+
code: "unstable_planned_version",
|
|
281
|
+
scope: name,
|
|
282
|
+
message: `${name} planned release version is not stable semver: ${version}.`
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
}
|
|
286
|
+
const selectedPackageSet = new Set(selectedPackageNames);
|
|
287
|
+
const devReferenceIssues = collectInternalDevReferenceIssues(root);
|
|
288
|
+
const unrehearsableDevReferences = devReferenceIssues.filter((issue) => {
|
|
289
|
+
const dependencyName = issue.dependencyName ?? "";
|
|
290
|
+
const planned = plannedVersions[dependencyName];
|
|
291
|
+
return !selectedPackageSet.has(dependencyName) || !planned || !STABLE_SEMVER.test(planned);
|
|
292
|
+
});
|
|
293
|
+
if (unrehearsableDevReferences.length > 0) {
|
|
294
|
+
addFailure(failures, {
|
|
295
|
+
code: "internal_dev_references",
|
|
296
|
+
scope: "@treeseed/market",
|
|
297
|
+
message: "Production dependency rehearsal found internal dev references without a stable planned replacement.",
|
|
298
|
+
details: {
|
|
299
|
+
references: unrehearsableDevReferences.map((issue) => ({
|
|
300
|
+
filePath: issue.filePath,
|
|
301
|
+
field: issue.field,
|
|
302
|
+
dependencyName: issue.dependencyName,
|
|
303
|
+
spec: issue.spec,
|
|
304
|
+
reason: issue.reason
|
|
305
|
+
}))
|
|
306
|
+
}
|
|
307
|
+
});
|
|
308
|
+
}
|
|
309
|
+
const rehearsalDetail = unrehearsableDevReferences.length === 0 && failures.length === before ? runProductionDependencyRehearsal(root, plannedVersions, selectedPackageNames, failures) : "Skipped clean install rehearsal because stable dependency metadata is incomplete.";
|
|
310
|
+
return {
|
|
311
|
+
name: "production-dependency-rehearsal",
|
|
312
|
+
status: failures.length > before ? "failed" : "passed",
|
|
313
|
+
detail: `${devReferenceIssues.length > 0 ? `Rehearsed stable replacements for ${devReferenceIssues.length} internal dev reference${devReferenceIssues.length === 1 ? "" : "s"}.` : "Checked planned stable versions and internal dependency references."} ${rehearsalDetail}`
|
|
314
|
+
};
|
|
315
|
+
}
|
|
316
|
+
function localConfigCheck(root, scope, failures) {
|
|
317
|
+
try {
|
|
318
|
+
const report = validateTreeseedCommandEnvironment({ tenantRoot: root, scope, purpose: "deploy" });
|
|
319
|
+
const problems = [...report.validation.missing, ...report.validation.invalid];
|
|
320
|
+
for (const problem of problems) {
|
|
321
|
+
addFailure(failures, {
|
|
322
|
+
code: "missing_local_config",
|
|
323
|
+
scope,
|
|
324
|
+
provider: problem.entry.group ?? "config",
|
|
325
|
+
message: `${scope} deploy config is missing or invalid: ${problem.id}.`,
|
|
326
|
+
details: { key: problem.id, provider: problem.entry.group ?? null }
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
} catch (error) {
|
|
330
|
+
addFailure(failures, {
|
|
331
|
+
code: "config_validation_failed",
|
|
332
|
+
scope,
|
|
333
|
+
provider: "config",
|
|
334
|
+
message: `${scope} deploy config could not be validated.`,
|
|
335
|
+
details: { error: error instanceof Error ? error.message : String(error) }
|
|
336
|
+
});
|
|
337
|
+
}
|
|
338
|
+
}
|
|
339
|
+
async function githubRemoteConfigCheck(root, scope, failures) {
|
|
340
|
+
if (getGitHubAutomationMode() === "stub") {
|
|
341
|
+
return;
|
|
342
|
+
}
|
|
343
|
+
const repository = maybeResolveGitHubRepositorySlug(root);
|
|
344
|
+
if (!repository) {
|
|
345
|
+
addFailure(failures, {
|
|
346
|
+
code: "missing_github_repository",
|
|
347
|
+
scope,
|
|
348
|
+
provider: "github",
|
|
349
|
+
message: `${scope} GitHub config parity could not determine the repository from origin.`
|
|
350
|
+
});
|
|
351
|
+
return;
|
|
352
|
+
}
|
|
353
|
+
try {
|
|
354
|
+
const environment = scope === "prod" ? "production" : scope;
|
|
355
|
+
const expected = expectedGitHubDeployEnvironment(root, scope);
|
|
356
|
+
const client = createGitHubApiClient();
|
|
357
|
+
const [secretNames, variableNames] = await Promise.all([
|
|
358
|
+
listGitHubEnvironmentSecretNames(repository, environment, { client }),
|
|
359
|
+
listGitHubEnvironmentVariableNames(repository, environment, { client })
|
|
360
|
+
]);
|
|
361
|
+
const missingSecrets = expected.secrets.filter((key) => !secretNames.has(key));
|
|
362
|
+
const missingVariables = expected.variables.filter((key) => !variableNames.has(key));
|
|
363
|
+
for (const key of missingSecrets) {
|
|
364
|
+
addFailure(failures, {
|
|
365
|
+
code: "missing_remote_config",
|
|
366
|
+
scope,
|
|
367
|
+
provider: "github-secret",
|
|
368
|
+
message: `${scope} GitHub secret is missing: ${key}.`,
|
|
369
|
+
details: { key, provider: "github-secret", repository, environment }
|
|
370
|
+
});
|
|
371
|
+
}
|
|
372
|
+
for (const key of missingVariables) {
|
|
373
|
+
addFailure(failures, {
|
|
374
|
+
code: "missing_remote_config",
|
|
375
|
+
scope,
|
|
376
|
+
provider: "github-variable",
|
|
377
|
+
message: `${scope} GitHub variable is missing: ${key}.`,
|
|
378
|
+
details: { key, provider: "github-variable", repository, environment }
|
|
379
|
+
});
|
|
380
|
+
}
|
|
381
|
+
} catch (error) {
|
|
382
|
+
addFailure(failures, {
|
|
383
|
+
code: "remote_config_check_failed",
|
|
384
|
+
scope,
|
|
385
|
+
provider: "github",
|
|
386
|
+
message: `${scope} GitHub config parity check failed.`,
|
|
387
|
+
details: { error: error instanceof Error ? error.message : String(error) }
|
|
388
|
+
});
|
|
389
|
+
}
|
|
390
|
+
}
|
|
391
|
+
function expectedGitHubDeployEnvironment(root, scope) {
|
|
392
|
+
const registry = collectTreeseedEnvironmentContext(root);
|
|
393
|
+
const values = resolveTreeseedMachineEnvironmentValues(root, scope);
|
|
394
|
+
const expectedEntries = registry.entries.filter((entry) => {
|
|
395
|
+
if (!isTreeseedEnvironmentEntryRelevant(entry, registry.context, scope, "deploy")) return false;
|
|
396
|
+
if (isTreeseedEnvironmentEntryRequired(entry, registry.context, scope, "deploy")) return true;
|
|
397
|
+
return typeof values[entry.id] === "string" && values[entry.id].trim().length > 0;
|
|
398
|
+
});
|
|
399
|
+
return {
|
|
400
|
+
secrets: [...new Set(expectedEntries.filter((entry) => entry.targets.includes("github-secret")).map((entry) => entry.id))],
|
|
401
|
+
variables: [...new Set(expectedEntries.filter((entry) => entry.targets.includes("github-variable")).map((entry) => entry.id))]
|
|
402
|
+
};
|
|
403
|
+
}
|
|
404
|
+
function providerResourceIdentifierCheck(root, scope, failures) {
|
|
405
|
+
try {
|
|
406
|
+
const deployConfig = loadCliDeployConfig(root);
|
|
407
|
+
const state = loadDeployState(root, deployConfig, { scope });
|
|
408
|
+
const siteDataDb = state.d1Databases?.SITE_DATA_DB;
|
|
409
|
+
if (!siteDataDb?.databaseName || !siteDataDb?.databaseId) {
|
|
410
|
+
addFailure(failures, {
|
|
411
|
+
code: "missing_remote_resource_identifier",
|
|
412
|
+
scope,
|
|
413
|
+
provider: "cloudflare-d1",
|
|
414
|
+
message: `${scope} Cloudflare D1 SITE_DATA_DB is missing a database name or id.`,
|
|
415
|
+
details: { resource: "SITE_DATA_DB", required: ["databaseName", "databaseId"] }
|
|
416
|
+
});
|
|
417
|
+
}
|
|
418
|
+
const services = state.services && typeof state.services === "object" && !Array.isArray(state.services) ? state.services : {};
|
|
419
|
+
for (const [serviceKey, service] of Object.entries(services)) {
|
|
420
|
+
if ((service.provider ?? "railway") !== "railway") continue;
|
|
421
|
+
if (!service.projectName && !service.projectId) {
|
|
422
|
+
addFailure(failures, {
|
|
423
|
+
code: "missing_remote_resource_identifier",
|
|
424
|
+
scope,
|
|
425
|
+
provider: "railway",
|
|
426
|
+
message: `${scope} Railway service ${serviceKey} is missing a project name or id.`,
|
|
427
|
+
details: { service: serviceKey, required: ["projectName", "projectId"] }
|
|
428
|
+
});
|
|
429
|
+
}
|
|
430
|
+
if (!service.serviceName && !service.serviceId) {
|
|
431
|
+
addFailure(failures, {
|
|
432
|
+
code: "missing_remote_resource_identifier",
|
|
433
|
+
scope,
|
|
434
|
+
provider: "railway",
|
|
435
|
+
message: `${scope} Railway service ${serviceKey} is missing a service name or id.`,
|
|
436
|
+
details: { service: serviceKey, required: ["serviceName", "serviceId"] }
|
|
437
|
+
});
|
|
438
|
+
}
|
|
439
|
+
}
|
|
440
|
+
} catch (error) {
|
|
441
|
+
addFailure(failures, {
|
|
442
|
+
code: "provider_resource_check_failed",
|
|
443
|
+
scope,
|
|
444
|
+
provider: "deployment-state",
|
|
445
|
+
message: `${scope} provider resource identifiers could not be validated.`,
|
|
446
|
+
details: { error: error instanceof Error ? error.message : String(error) }
|
|
447
|
+
});
|
|
448
|
+
}
|
|
449
|
+
}
|
|
450
|
+
async function configParityChecks(root, failures) {
|
|
451
|
+
if (getGitHubAutomationMode() === "stub") {
|
|
452
|
+
return { name: "config-parity", status: "skipped", detail: "GitHub automation is stubbed." };
|
|
453
|
+
}
|
|
454
|
+
const before = failures.length;
|
|
455
|
+
localConfigCheck(root, "staging", failures);
|
|
456
|
+
localConfigCheck(root, "prod", failures);
|
|
457
|
+
await githubRemoteConfigCheck(root, "staging", failures);
|
|
458
|
+
await githubRemoteConfigCheck(root, "prod", failures);
|
|
459
|
+
providerResourceIdentifierCheck(root, "staging", failures);
|
|
460
|
+
providerResourceIdentifierCheck(root, "prod", failures);
|
|
461
|
+
return {
|
|
462
|
+
name: "config-parity",
|
|
463
|
+
status: failures.length > before ? "failed" : "passed",
|
|
464
|
+
detail: "Checked staging and production config, GitHub names, Railway service identifiers, and D1 identifiers without reading secret values."
|
|
465
|
+
};
|
|
466
|
+
}
|
|
467
|
+
function migrationCompatibilityChecks(root, failures) {
|
|
468
|
+
if (!existsSync(resolve(root, "migrations"))) {
|
|
469
|
+
return {
|
|
470
|
+
name: "migration-compatibility",
|
|
471
|
+
status: "skipped",
|
|
472
|
+
detail: "No migrations directory is present in this workspace."
|
|
473
|
+
};
|
|
474
|
+
}
|
|
475
|
+
const requiredMigrations = [
|
|
476
|
+
"migrations/0007_site_web_sessions.sql",
|
|
477
|
+
"migrations/0014_better_auth_integer_timestamps.sql"
|
|
478
|
+
];
|
|
479
|
+
const missing = requiredMigrations.filter((path) => !existsSync(resolve(root, path)));
|
|
480
|
+
for (const path of missing) {
|
|
481
|
+
addFailure(failures, {
|
|
482
|
+
code: "missing_migration_fixture",
|
|
483
|
+
scope: "@treeseed/market",
|
|
484
|
+
message: `Migration compatibility check is missing ${path}.`,
|
|
485
|
+
details: { path }
|
|
486
|
+
});
|
|
487
|
+
}
|
|
488
|
+
return {
|
|
489
|
+
name: "migration-compatibility",
|
|
490
|
+
status: missing.length > 0 ? "failed" : "passed",
|
|
491
|
+
detail: "Checked required legacy web session and Better Auth migration coverage."
|
|
492
|
+
};
|
|
493
|
+
}
|
|
494
|
+
async function runReleaseCandidateGate(input) {
|
|
495
|
+
const fingerprint = buildReleaseCandidateFingerprint(input);
|
|
496
|
+
if (input.allowReuse !== false) {
|
|
497
|
+
const cached = readCachedReleaseCandidateReport(input.root, fingerprint.key);
|
|
498
|
+
if (cached?.status === "passed") {
|
|
499
|
+
return {
|
|
500
|
+
...cached,
|
|
501
|
+
reused: true
|
|
502
|
+
};
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
const selectedPackageNames = [...new Set((input.selectedPackageNames ?? []).map(String))].sort();
|
|
506
|
+
const plannedVersions = Object.fromEntries(
|
|
507
|
+
Object.entries(input.plannedVersions).map(([name, version]) => [name, String(version)])
|
|
508
|
+
);
|
|
509
|
+
const failures = [];
|
|
510
|
+
const checks = [];
|
|
511
|
+
checks.push(dependencyRehearsalChecks(input.root, plannedVersions, selectedPackageNames, failures));
|
|
512
|
+
checks.push(packageReadinessChecks(input.root, selectedPackageNames, failures));
|
|
513
|
+
checks.push(await configParityChecks(input.root, failures));
|
|
514
|
+
checks.push(migrationCompatibilityChecks(input.root, failures));
|
|
515
|
+
const report = {
|
|
516
|
+
status: failures.length === 0 ? "passed" : "failed",
|
|
517
|
+
fingerprint,
|
|
518
|
+
reused: false,
|
|
519
|
+
checkedAt: nowIso(),
|
|
520
|
+
failures,
|
|
521
|
+
checks
|
|
522
|
+
};
|
|
523
|
+
writeReleaseCandidateReport(input.root, report);
|
|
524
|
+
return report;
|
|
525
|
+
}
|
|
526
|
+
export {
|
|
527
|
+
buildReleaseCandidateFingerprint,
|
|
528
|
+
readCachedReleaseCandidateReport,
|
|
529
|
+
runReleaseCandidateGate,
|
|
530
|
+
writeReleaseCandidateReport
|
|
531
|
+
};
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import { type ReleaseCandidateReport } from '../operations/services/release-candidate.ts';
|
|
1
2
|
import { resolveTreeseedWorkflowState, type TreeseedWorkflowStatusOptions } from '../workflow-state.ts';
|
|
2
3
|
import { type TreeseedWorkflowRunCommand, type TreeseedWorkflowRunJournal } from './runs.ts';
|
|
3
4
|
import { type TreeseedWorkflowMode } from './session.ts';
|
|
@@ -42,6 +43,7 @@ type WorkflowRepoReport = {
|
|
|
42
43
|
skippedReason: string | null;
|
|
43
44
|
publishWait: Record<string, unknown> | null;
|
|
44
45
|
workflowGates: Array<Record<string, unknown>>;
|
|
46
|
+
backMerge: Record<string, unknown> | null;
|
|
45
47
|
};
|
|
46
48
|
export declare function workflowStatus(helpers: WorkflowOperationHelpers, input?: TreeseedWorkflowStatusOptions): Promise<TreeseedWorkflowResult<import("../workflow-state.ts").TreeseedWorkflowState>>;
|
|
47
49
|
export declare function workflowCi(helpers: WorkflowOperationHelpers, input?: TreeseedCiInput): Promise<TreeseedWorkflowResult<TreeseedCiResult>>;
|
|
@@ -414,6 +416,7 @@ export declare function workflowSave(helpers: WorkflowOperationHelpers, input: T
|
|
|
414
416
|
runId: null;
|
|
415
417
|
url: null;
|
|
416
418
|
}[];
|
|
419
|
+
releaseCandidate: ReleaseCandidateReport | null;
|
|
417
420
|
} & {
|
|
418
421
|
finalState?: WorkflowStatePayload;
|
|
419
422
|
}>>;
|
|
@@ -551,6 +554,7 @@ export declare function workflowClose(helpers: WorkflowOperationHelpers, input:
|
|
|
551
554
|
runId: null;
|
|
552
555
|
url: null;
|
|
553
556
|
}[];
|
|
557
|
+
releaseCandidate: ReleaseCandidateReport | null;
|
|
554
558
|
} & {
|
|
555
559
|
finalState?: WorkflowStatePayload;
|
|
556
560
|
}) | null;
|
|
@@ -729,6 +733,7 @@ export declare function workflowStage(helpers: WorkflowOperationHelpers, input:
|
|
|
729
733
|
runId: null;
|
|
730
734
|
url: null;
|
|
731
735
|
}[];
|
|
736
|
+
releaseCandidate: ReleaseCandidateReport | null;
|
|
732
737
|
} & {
|
|
733
738
|
finalState?: WorkflowStatePayload;
|
|
734
739
|
}) | null;
|
|
@@ -742,6 +747,7 @@ export declare function workflowStage(helpers: WorkflowOperationHelpers, input:
|
|
|
742
747
|
status: string;
|
|
743
748
|
workflowGates: any;
|
|
744
749
|
};
|
|
750
|
+
releaseCandidate: ReleaseCandidateReport;
|
|
745
751
|
previewCleanup: {
|
|
746
752
|
performed: boolean;
|
|
747
753
|
state: any;
|
|
@@ -855,6 +861,24 @@ export declare function workflowRelease(helpers: WorkflowOperationHelpers, input
|
|
|
855
861
|
publishWait: never[];
|
|
856
862
|
repos: never[];
|
|
857
863
|
rootRepo: WorkflowRepoReport;
|
|
864
|
+
releaseCandidate: ReleaseCandidateReport;
|
|
865
|
+
releaseBackMerge: {
|
|
866
|
+
status: string;
|
|
867
|
+
merged: boolean;
|
|
868
|
+
repoName: string;
|
|
869
|
+
sourceBranch: string;
|
|
870
|
+
targetBranch: string;
|
|
871
|
+
commitSha: string;
|
|
872
|
+
} | {
|
|
873
|
+
packageStagingPointersSynced: boolean;
|
|
874
|
+
packageStagingPointerCommit: string;
|
|
875
|
+
status: string;
|
|
876
|
+
merged: boolean;
|
|
877
|
+
repoName: string;
|
|
878
|
+
sourceBranch: string;
|
|
879
|
+
targetBranch: string;
|
|
880
|
+
commitSha: string;
|
|
881
|
+
};
|
|
858
882
|
finalBranch: string;
|
|
859
883
|
pushStatus: {
|
|
860
884
|
stagingPushed: boolean;
|
|
@@ -914,6 +938,24 @@ export declare function workflowRelease(helpers: WorkflowOperationHelpers, input
|
|
|
914
938
|
publishWait: Record<string, unknown>[];
|
|
915
939
|
repos: WorkflowRepoReport[];
|
|
916
940
|
rootRepo: WorkflowRepoReport;
|
|
941
|
+
releaseCandidate: ReleaseCandidateReport;
|
|
942
|
+
releaseBackMerge: {
|
|
943
|
+
status: string;
|
|
944
|
+
merged: boolean;
|
|
945
|
+
repoName: string;
|
|
946
|
+
sourceBranch: string;
|
|
947
|
+
targetBranch: string;
|
|
948
|
+
commitSha: string;
|
|
949
|
+
} | {
|
|
950
|
+
packageStagingPointersSynced: boolean;
|
|
951
|
+
packageStagingPointerCommit: string;
|
|
952
|
+
status: string;
|
|
953
|
+
merged: boolean;
|
|
954
|
+
repoName: string;
|
|
955
|
+
sourceBranch: string;
|
|
956
|
+
targetBranch: string;
|
|
957
|
+
commitSha: string;
|
|
958
|
+
};
|
|
917
959
|
finalBranch: string;
|
|
918
960
|
pushStatus: {
|
|
919
961
|
stagingPushed: boolean;
|
|
@@ -73,6 +73,9 @@ import {
|
|
|
73
73
|
skippedGitHubActionsGate,
|
|
74
74
|
waitForGitHubActionsGate
|
|
75
75
|
} from "../operations/services/github-actions-verification.js";
|
|
76
|
+
import {
|
|
77
|
+
runReleaseCandidateGate
|
|
78
|
+
} from "../operations/services/release-candidate.js";
|
|
76
79
|
import { loadCliDeployConfig, packageScriptPath, resolveWranglerBin } from "../operations/services/runtime-tools.js";
|
|
77
80
|
import { runTenantDeployPreflight, runWorkspaceReleasePreflight, runWorkspaceSavePreflight } from "../operations/services/save-deploy-preflight.js";
|
|
78
81
|
import { collectCliPreflight } from "../operations/services/workspace-preflight.js";
|
|
@@ -517,7 +520,8 @@ function createRepoReport(name, path, branch, dirty) {
|
|
|
517
520
|
commitSha: branch ? headCommit(path) : null,
|
|
518
521
|
skippedReason: null,
|
|
519
522
|
publishWait: null,
|
|
520
|
-
workflowGates: []
|
|
523
|
+
workflowGates: [],
|
|
524
|
+
backMerge: null
|
|
521
525
|
};
|
|
522
526
|
}
|
|
523
527
|
function createWorkspaceRootRepoReport(root) {
|
|
@@ -1247,6 +1251,58 @@ function assertNoInternalDevReferencesForRepo(root, repoDir, packageNames) {
|
|
|
1247
1251
|
throw new Error(`Stable release still contains internal Git/dev dependency references.
|
|
1248
1252
|
${rendered}`);
|
|
1249
1253
|
}
|
|
1254
|
+
function backMergeProductionIntoStaging(repoDir, repoName) {
|
|
1255
|
+
syncBranchWithOrigin(repoDir, PRODUCTION_BRANCH);
|
|
1256
|
+
syncBranchWithOrigin(repoDir, STAGING_BRANCH);
|
|
1257
|
+
checkoutBranch(repoDir, STAGING_BRANCH);
|
|
1258
|
+
try {
|
|
1259
|
+
run("git", ["merge-base", "--is-ancestor", `origin/${PRODUCTION_BRANCH}`, "HEAD"], { cwd: repoDir, capture: true });
|
|
1260
|
+
return {
|
|
1261
|
+
status: "up-to-date",
|
|
1262
|
+
merged: false,
|
|
1263
|
+
repoName,
|
|
1264
|
+
sourceBranch: PRODUCTION_BRANCH,
|
|
1265
|
+
targetBranch: STAGING_BRANCH,
|
|
1266
|
+
commitSha: headCommit(repoDir)
|
|
1267
|
+
};
|
|
1268
|
+
} catch {
|
|
1269
|
+
}
|
|
1270
|
+
try {
|
|
1271
|
+
run("git", ["merge", "--no-ff", `origin/${PRODUCTION_BRANCH}`, "-m", `release: back-merge ${PRODUCTION_BRANCH} into ${STAGING_BRANCH}`], { cwd: repoDir });
|
|
1272
|
+
} catch (error) {
|
|
1273
|
+
const report = collectMergeConflictReport(repoDir);
|
|
1274
|
+
throw new TreeseedWorkflowError("release", "merge_conflict", formatMergeConflictReport(report, repoDir, STAGING_BRANCH), {
|
|
1275
|
+
details: { repoName, branch: STAGING_BRANCH, sourceBranch: PRODUCTION_BRANCH, report, originalError: error instanceof Error ? error.message : String(error) },
|
|
1276
|
+
exitCode: 12
|
|
1277
|
+
});
|
|
1278
|
+
}
|
|
1279
|
+
pushBranch(repoDir, STAGING_BRANCH);
|
|
1280
|
+
return {
|
|
1281
|
+
status: "merged",
|
|
1282
|
+
merged: true,
|
|
1283
|
+
repoName,
|
|
1284
|
+
sourceBranch: PRODUCTION_BRANCH,
|
|
1285
|
+
targetBranch: STAGING_BRANCH,
|
|
1286
|
+
commitSha: headCommit(repoDir)
|
|
1287
|
+
};
|
|
1288
|
+
}
|
|
1289
|
+
function backMergeRootProductionIntoStaging(root, syncPackageStagingHeads) {
|
|
1290
|
+
const gitRoot = repoRoot(root);
|
|
1291
|
+
const backMerge = backMergeProductionIntoStaging(gitRoot, "@treeseed/market");
|
|
1292
|
+
if (!syncPackageStagingHeads) {
|
|
1293
|
+
return backMerge;
|
|
1294
|
+
}
|
|
1295
|
+
syncAllCheckedOutPackageRepos(root, STAGING_BRANCH);
|
|
1296
|
+
const pointerSync = commitAllIfChanged(gitRoot, "release: sync package staging heads");
|
|
1297
|
+
if (pointerSync.committed) {
|
|
1298
|
+
pushBranch(gitRoot, STAGING_BRANCH);
|
|
1299
|
+
}
|
|
1300
|
+
return {
|
|
1301
|
+
...backMerge,
|
|
1302
|
+
packageStagingPointersSynced: pointerSync.committed,
|
|
1303
|
+
packageStagingPointerCommit: pointerSync.commitSha
|
|
1304
|
+
};
|
|
1305
|
+
}
|
|
1250
1306
|
function collectActiveDevTagReferences(root) {
|
|
1251
1307
|
return collectInternalDevReferenceIssues(root).map((issue) => devTagFromDependencySpec(issue.spec) ?? (issue.spec.includes("-dev.") ? issue.spec : null)).filter((value) => Boolean(value));
|
|
1252
1308
|
}
|
|
@@ -1263,6 +1319,32 @@ function releasePlanPackageSelection(value) {
|
|
|
1263
1319
|
selected: Array.isArray(record.selected) ? record.selected.map(String) : []
|
|
1264
1320
|
};
|
|
1265
1321
|
}
|
|
1322
|
+
function assertReleaseCandidatePassed(operation, report) {
|
|
1323
|
+
if (report.status === "passed") {
|
|
1324
|
+
return;
|
|
1325
|
+
}
|
|
1326
|
+
const rendered = report.failures.map((failure) => `- ${failure.scope}${failure.provider ? ` ${failure.provider}` : ""}: ${failure.message}`);
|
|
1327
|
+
workflowError(operation, "validation_failed", [
|
|
1328
|
+
"Treeseed release-candidate readiness failed.",
|
|
1329
|
+
...rendered
|
|
1330
|
+
].join("\n"), {
|
|
1331
|
+
details: {
|
|
1332
|
+
releaseCandidate: report
|
|
1333
|
+
}
|
|
1334
|
+
});
|
|
1335
|
+
}
|
|
1336
|
+
async function runReleaseCandidateForPlan(operation, root, plannedRelease, options = {}) {
|
|
1337
|
+
const plannedVersions = plannedRelease.plannedVersions && typeof plannedRelease.plannedVersions === "object" && !Array.isArray(plannedRelease.plannedVersions) ? plannedRelease.plannedVersions : {};
|
|
1338
|
+
const packageSelection = releasePlanPackageSelection(plannedRelease.packageSelection);
|
|
1339
|
+
const report = await runReleaseCandidateGate({
|
|
1340
|
+
root,
|
|
1341
|
+
plannedVersions,
|
|
1342
|
+
selectedPackageNames: packageSelection.selected,
|
|
1343
|
+
allowReuse: options.allowReuse
|
|
1344
|
+
});
|
|
1345
|
+
assertReleaseCandidatePassed(operation, report);
|
|
1346
|
+
return report;
|
|
1347
|
+
}
|
|
1266
1348
|
function buildReleasePlanSnapshot(input) {
|
|
1267
1349
|
const selectedPackageNames = new Set(input.packageSelection.selected);
|
|
1268
1350
|
const versionPlan = planWorkspaceReleaseBump(input.level, input.root, input.mode === "recursive-workspace" ? { selectedPackageNames } : {});
|
|
@@ -1295,6 +1377,7 @@ function buildReleasePlanSnapshot(input) {
|
|
|
1295
1377
|
finalBranch: STAGING_BRANCH,
|
|
1296
1378
|
plannedSteps: [
|
|
1297
1379
|
{ id: "release-plan", description: "Record immutable release plan and target versions" },
|
|
1380
|
+
{ id: "release-candidate", description: "Run exact staging release-candidate readiness checks" },
|
|
1298
1381
|
{ id: "workspace-unlink", description: "Remove local workspace links before stable release install" },
|
|
1299
1382
|
{ id: "prepare-release-metadata", description: "Rewrite package metadata and lockfiles to production dependency mode" },
|
|
1300
1383
|
...input.packageReports.filter((report) => selectedPackageNames.has(report.name)).map((report) => ({
|
|
@@ -1302,6 +1385,7 @@ function buildReleasePlanSnapshot(input) {
|
|
|
1302
1385
|
description: `Release ${report.name} from staging to main and tag ${plannedVersions[report.name] ?? "(planned)"}`
|
|
1303
1386
|
})),
|
|
1304
1387
|
{ id: "release-root", description: `Release market ${rootVersion}` },
|
|
1388
|
+
{ id: "release-back-merge", description: "Back-merge production release history into staging" },
|
|
1305
1389
|
{ id: "cleanup-dev-tags", description: "Clean replaced Treeseed dev tags after stable release" },
|
|
1306
1390
|
{ id: "workspace-link", description: "Restore local workspace links after release syncs back to staging" }
|
|
1307
1391
|
],
|
|
@@ -2400,6 +2484,7 @@ async function workflowSave(helpers, input) {
|
|
|
2400
2484
|
{ id: "workspace-unlink", description: "Remove local workspace links before deployment install and lockfile updates" },
|
|
2401
2485
|
...repositoryPlan.plannedSteps,
|
|
2402
2486
|
{ id: "lockfile-validation", description: "Validate refreshed package-lock.json files before any save commit is pushed" },
|
|
2487
|
+
...branch === STAGING_BRANCH ? [{ id: "release-candidate", description: "Run release-candidate readiness checks for the saved staging state" }] : [],
|
|
2403
2488
|
{ id: "workspace-link", description: "Restore local workspace links after save" },
|
|
2404
2489
|
...beforeState.branchRole === "feature" && (effectiveInput.preview === true || previewInitialized) ? [{ id: "preview", description: `Refresh preview deployment for ${branch}` }] : []
|
|
2405
2490
|
]
|
|
@@ -2457,6 +2542,14 @@ async function workflowSave(helpers, input) {
|
|
|
2457
2542
|
branch,
|
|
2458
2543
|
resumable: true
|
|
2459
2544
|
}] : [],
|
|
2545
|
+
...branch === STAGING_BRANCH ? [{
|
|
2546
|
+
id: "release-candidate",
|
|
2547
|
+
description: "Run release-candidate readiness checks",
|
|
2548
|
+
repoName: rootRepo.name,
|
|
2549
|
+
repoPath: rootRepo.path,
|
|
2550
|
+
branch,
|
|
2551
|
+
resumable: true
|
|
2552
|
+
}] : [],
|
|
2460
2553
|
...beforeState.branchRole === "feature" && (effectiveInput.preview === true || effectiveInput.refreshPreview !== false && previewInitialized) ? [{
|
|
2461
2554
|
id: "preview",
|
|
2462
2555
|
description: `Refresh preview ${branch}`,
|
|
@@ -2534,6 +2627,19 @@ async function workflowSave(helpers, input) {
|
|
|
2534
2627
|
headSha: String(repo.commitSha)
|
|
2535
2628
|
}))
|
|
2536
2629
|
], "hosted", { root, runId: workflowRun.runId }).then((workflowGates) => ({ workflowGates }))) : { workflowGates: [] };
|
|
2630
|
+
const releaseCandidate = branch === STAGING_BRANCH ? await executeJournalStep(root, workflowRun.runId, "release-candidate", () => {
|
|
2631
|
+
const releaseSession = resolveTreeseedWorkflowSession(root);
|
|
2632
|
+
const stagingReleasePlan = buildReleasePlanSnapshot({
|
|
2633
|
+
root,
|
|
2634
|
+
mode,
|
|
2635
|
+
level: effectiveInput.bump ?? "patch",
|
|
2636
|
+
packageSelection: releaseSession.packageSelection,
|
|
2637
|
+
packageReports: savedPackageReports,
|
|
2638
|
+
rootRepo: savedRootRepo,
|
|
2639
|
+
blockers: []
|
|
2640
|
+
});
|
|
2641
|
+
return runReleaseCandidateForPlan("save", root, stagingReleasePlan);
|
|
2642
|
+
}) : null;
|
|
2537
2643
|
let previewAction = { status: "skipped" };
|
|
2538
2644
|
if (beforeState.branchRole === "feature" && branch) {
|
|
2539
2645
|
if (effectiveInput.preview === true) {
|
|
@@ -2574,6 +2680,7 @@ async function workflowSave(helpers, input) {
|
|
|
2574
2680
|
ciMode: normalizeCiMode(effectiveInput.ciMode, "save"),
|
|
2575
2681
|
verifyMode: effectiveInput.verifyMode ?? "fast",
|
|
2576
2682
|
workflowGates: saveWorkflowGates?.workflowGates ?? [],
|
|
2683
|
+
releaseCandidate,
|
|
2577
2684
|
...worktreePayload(root, effectiveInput.worktreeMode)
|
|
2578
2685
|
};
|
|
2579
2686
|
completeWorkflowRun(root, workflowRun.runId, payload);
|
|
@@ -2869,6 +2976,7 @@ async function workflowStage(helpers, input) {
|
|
|
2869
2976
|
{ id: "merge-root", description: `Squash-merge ${initialSession.branchName ?? "(current task)"} into market staging` },
|
|
2870
2977
|
{ id: "lockfile-validation", description: "Refresh and validate the merged root workspace lockfile before pushing staging" },
|
|
2871
2978
|
{ id: "wait-staging", description: "Wait for exact-SHA staging GitHub Actions gates" },
|
|
2979
|
+
{ id: "release-candidate", description: "Run release-candidate readiness checks for the exact staging state" },
|
|
2872
2980
|
{ id: "preview-cleanup", description: "Destroy preview resources" },
|
|
2873
2981
|
{ id: "cleanup-root", description: "Archive and delete the task branch from market" },
|
|
2874
2982
|
...checkedOutWorkspacePackageRepos(root).map((pkg) => ({
|
|
@@ -2929,6 +3037,7 @@ async function workflowStage(helpers, input) {
|
|
|
2929
3037
|
})),
|
|
2930
3038
|
{ id: "merge-root", description: `Merge ${featureBranch} into market staging`, repoName: rootRepo.name, repoPath: rootRepo.path, branch: featureBranch, resumable: true },
|
|
2931
3039
|
{ id: "wait-staging", description: "Wait for exact-SHA staging GitHub Actions gates", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true },
|
|
3040
|
+
{ id: "release-candidate", description: "Run release-candidate readiness checks", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true },
|
|
2932
3041
|
{ id: "preview-cleanup", description: "Destroy preview resources", repoName: rootRepo.name, repoPath: rootRepo.path, branch: featureBranch, resumable: true },
|
|
2933
3042
|
{ id: "cleanup-root", description: `Archive ${featureBranch} in market`, repoName: rootRepo.name, repoPath: rootRepo.path, branch: featureBranch, resumable: true },
|
|
2934
3043
|
...packageReports.map((report) => ({
|
|
@@ -3060,6 +3169,16 @@ async function workflowStage(helpers, input) {
|
|
|
3060
3169
|
status: String(stageWorkflowGateResult?.status ?? "completed"),
|
|
3061
3170
|
workflowGates: Array.isArray(stageWorkflowGateResult?.workflowGates) ? stageWorkflowGateResult.workflowGates : []
|
|
3062
3171
|
};
|
|
3172
|
+
const stageReleasePlan = buildReleasePlanSnapshot({
|
|
3173
|
+
root,
|
|
3174
|
+
mode,
|
|
3175
|
+
level: "patch",
|
|
3176
|
+
packageSelection: session.packageSelection,
|
|
3177
|
+
packageReports,
|
|
3178
|
+
rootRepo,
|
|
3179
|
+
blockers: []
|
|
3180
|
+
});
|
|
3181
|
+
const releaseCandidate = await executeJournalStep(root, workflowRun.runId, "release-candidate", () => runReleaseCandidateForPlan("stage", root, stageReleasePlan));
|
|
3063
3182
|
const previewCleanup = effectiveInput.deletePreview === false ? (skipJournalStep(root, workflowRun.runId, "preview-cleanup", { performed: false }), { performed: false }) : await executeJournalStep(root, workflowRun.runId, "preview-cleanup", () => destroyPreviewIfPresent(root, featureBranch));
|
|
3064
3183
|
const rootCleanup = await executeJournalStep(root, workflowRun.runId, "cleanup-root", () => {
|
|
3065
3184
|
const deprecatedTag = createDeprecatedTaskTag(repoDir, featureBranch, `stage: ${message}`);
|
|
@@ -3106,6 +3225,7 @@ async function workflowStage(helpers, input) {
|
|
|
3106
3225
|
repos: packageReports,
|
|
3107
3226
|
rootRepo,
|
|
3108
3227
|
stagingWait,
|
|
3228
|
+
releaseCandidate,
|
|
3109
3229
|
previewCleanup,
|
|
3110
3230
|
lockfileValidation: rootMerge?.lockfileValidation ?? null,
|
|
3111
3231
|
lockfileInstall: rootMerge?.lockfileInstall ?? null,
|
|
@@ -3216,6 +3336,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3216
3336
|
},
|
|
3217
3337
|
[
|
|
3218
3338
|
{ id: "release-plan", description: "Record release plan", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true },
|
|
3339
|
+
{ id: "release-candidate", description: "Run release-candidate readiness checks", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true },
|
|
3219
3340
|
{ id: "workspace-unlink", description: "Remove local workspace links before release", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true },
|
|
3220
3341
|
...mode === "recursive-workspace" ? [{ id: "prepare-release-metadata", description: "Rewrite stable release metadata", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true }] : [],
|
|
3221
3342
|
...packageReports.filter((report) => selectedPackageNames.has(report.name)).map((report) => ({
|
|
@@ -3228,6 +3349,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3228
3349
|
})),
|
|
3229
3350
|
{ id: "release-root", description: "Release market repo", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true },
|
|
3230
3351
|
{ id: "release-root-gates", description: "Wait for market release GitHub Actions gates", repoName: rootRepo.name, repoPath: rootRepo.path, branch: PRODUCTION_BRANCH, resumable: true },
|
|
3352
|
+
{ id: "release-back-merge", description: "Back-merge main into staging", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true },
|
|
3231
3353
|
...mode === "recursive-workspace" ? [{ id: "cleanup-dev-tags", description: "Clean replaced dev package tags", repoName: rootRepo.name, repoPath: rootRepo.path, branch: STAGING_BRANCH, resumable: true }] : []
|
|
3232
3354
|
],
|
|
3233
3355
|
autoResumeRun ? {
|
|
@@ -3249,6 +3371,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3249
3371
|
const rootVersion = String(releasePlan.rootVersion);
|
|
3250
3372
|
applyTreeseedEnvironmentToProcess({ tenantRoot: root, scope: "staging", override: true });
|
|
3251
3373
|
assertReleaseGitHubAutomationReady(root, effectiveSelectedPackageNames);
|
|
3374
|
+
const releaseCandidate = await executeJournalStep(root, workflowRun.runId, "release-candidate", () => runReleaseCandidateForPlan("release", root, releasePlan, { allowReuse: true }));
|
|
3252
3375
|
if (!isResume) {
|
|
3253
3376
|
assertSessionBranchSafety("release", session, { requireCleanPackages: true, requireCurrentBranch: true });
|
|
3254
3377
|
assertCleanWorktree(root);
|
|
@@ -3317,6 +3440,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3317
3440
|
headSha: String(rootRelease2?.releasedCommit ?? rootRepo.commitSha ?? "")
|
|
3318
3441
|
}
|
|
3319
3442
|
].filter((gate) => gate.headSha), ciMode, { root, runId: workflowRun.runId }).then((workflowGates) => ({ workflowGates })));
|
|
3443
|
+
const releaseBackMerge2 = await executeJournalStep(root, workflowRun.runId, "release-back-merge", () => backMergeRootProductionIntoStaging(root, false));
|
|
3320
3444
|
const workspaceLinks2 = ensureWorkflowWorkspaceLinks(root, helpers, effectiveInput.workspaceLinks ?? "auto");
|
|
3321
3445
|
const payload2 = {
|
|
3322
3446
|
mode,
|
|
@@ -3335,6 +3459,8 @@ async function workflowRelease(helpers, input) {
|
|
|
3335
3459
|
publishWait: [],
|
|
3336
3460
|
repos: [],
|
|
3337
3461
|
rootRepo,
|
|
3462
|
+
releaseCandidate,
|
|
3463
|
+
releaseBackMerge: releaseBackMerge2,
|
|
3338
3464
|
finalBranch: currentBranch(gitRoot) || STAGING_BRANCH,
|
|
3339
3465
|
pushStatus: { stagingPushed: true, productionPushed: true, tagPushed: true },
|
|
3340
3466
|
workspaceLinks: workspaceLinks2,
|
|
@@ -3433,13 +3559,15 @@ async function workflowRelease(helpers, input) {
|
|
|
3433
3559
|
], ciMode, { root, runId: workflowRun.runId });
|
|
3434
3560
|
const publish = workflowGates.find((gate) => gate.workflow === "publish.yml") ?? workflowGates[0] ?? null;
|
|
3435
3561
|
assertReleaseGitHubWorkflowSucceeded(pkg.name, publish);
|
|
3562
|
+
const backMerge = backMergeProductionIntoStaging(pkg.dir, pkg.name);
|
|
3436
3563
|
syncBranchWithOrigin(pkg.dir, STAGING_BRANCH);
|
|
3437
3564
|
return {
|
|
3438
3565
|
commitSha: mergeResult.commitSha,
|
|
3439
3566
|
tagName,
|
|
3440
3567
|
tag,
|
|
3441
3568
|
publish,
|
|
3442
|
-
workflowGates
|
|
3569
|
+
workflowGates,
|
|
3570
|
+
backMerge
|
|
3443
3571
|
};
|
|
3444
3572
|
});
|
|
3445
3573
|
report.committed = true;
|
|
@@ -3449,6 +3577,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3449
3577
|
report.commitSha = String(releasedPackage?.commitSha ?? report.commitSha ?? "");
|
|
3450
3578
|
report.publishWait = releasedPackage?.publish ?? null;
|
|
3451
3579
|
report.workflowGates = Array.isArray(releasedPackage?.workflowGates) ? releasedPackage.workflowGates : [];
|
|
3580
|
+
report.backMerge = releasedPackage?.backMerge ?? null;
|
|
3452
3581
|
report.branch = STAGING_BRANCH;
|
|
3453
3582
|
publishWait.push({
|
|
3454
3583
|
name: report.name,
|
|
@@ -3539,6 +3668,7 @@ async function workflowRelease(helpers, input) {
|
|
|
3539
3668
|
headSha: String(rootRelease?.releasedCommit ?? rootRepo.commitSha ?? "")
|
|
3540
3669
|
}
|
|
3541
3670
|
].filter((gate) => gate.headSha), ciMode, { root, runId: workflowRun.runId }).then((workflowGates) => ({ workflowGates })));
|
|
3671
|
+
const releaseBackMerge = await executeJournalStep(root, workflowRun.runId, "release-back-merge", () => backMergeRootProductionIntoStaging(root, true));
|
|
3542
3672
|
const devTagCleanupMode = effectiveInput.devTagCleanup ?? "safe-after-release";
|
|
3543
3673
|
const devTagCleanup = devTagCleanupMode === "off" ? (skipJournalStep(root, workflowRun.runId, "cleanup-dev-tags", { status: "skipped", reason: "disabled" }), { status: "skipped", reason: "disabled" }) : await executeJournalStep(root, workflowRun.runId, "cleanup-dev-tags", () => {
|
|
3544
3674
|
const activeDevTags = collectActiveDevTagReferences(root);
|
|
@@ -3585,6 +3715,8 @@ async function workflowRelease(helpers, input) {
|
|
|
3585
3715
|
publishWait,
|
|
3586
3716
|
repos: packageReports,
|
|
3587
3717
|
rootRepo,
|
|
3718
|
+
releaseCandidate,
|
|
3719
|
+
releaseBackMerge,
|
|
3588
3720
|
finalBranch: currentBranch(gitRoot) || STAGING_BRANCH,
|
|
3589
3721
|
pushStatus: {
|
|
3590
3722
|
stagingPushed: true,
|
package/dist/workflow/runs.js
CHANGED
|
@@ -238,6 +238,15 @@ function selectedReleasePackageNames(plan) {
|
|
|
238
238
|
const selected = Array.isArray(selection?.selected) ? selection.selected.filter((name) => typeof name === "string") : [];
|
|
239
239
|
return selected;
|
|
240
240
|
}
|
|
241
|
+
function isReleaseGateOnlyCompletion(journal) {
|
|
242
|
+
if (journal.command !== "release") return false;
|
|
243
|
+
const releaseRoot = journal.steps.find((step) => step.id === "release-root");
|
|
244
|
+
if (releaseRoot?.status !== "completed") return false;
|
|
245
|
+
const releaseRootData = stringRecord(releaseRoot.data);
|
|
246
|
+
if (typeof releaseRootData?.releasedCommit !== "string") return false;
|
|
247
|
+
const pendingStep = journal.steps.find((step) => step.status === "pending");
|
|
248
|
+
return pendingStep?.id === "release-root-gates" || pendingStep?.id === "release-back-merge" || pendingStep?.id === "cleanup-dev-tags";
|
|
249
|
+
}
|
|
241
250
|
function classifyWorkflowRunJournal(journal, options = {}) {
|
|
242
251
|
const reasons = [];
|
|
243
252
|
const now = options.now ?? nowIso();
|
|
@@ -273,7 +282,8 @@ function classifyWorkflowRunJournal(journal, options = {}) {
|
|
|
273
282
|
if (options.currentBranch && journal.session.branchName && options.currentBranch !== journal.session.branchName) {
|
|
274
283
|
reasons.push(`current branch ${options.currentBranch} does not match journal branch ${journal.session.branchName}`);
|
|
275
284
|
}
|
|
276
|
-
|
|
285
|
+
const releaseGateOnlyCompletion = isReleaseGateOnlyCompletion(journal);
|
|
286
|
+
if (journal.command === "release" && options.currentHeads && !releaseGateOnlyCompletion) {
|
|
277
287
|
const releasePlan = stringRecord(journal.steps.find((step) => step.id === "release-plan")?.data);
|
|
278
288
|
if (releasePlan) {
|
|
279
289
|
const rootHead = options.currentHeads["@treeseed/market"];
|
|
@@ -292,7 +302,7 @@ function classifyWorkflowRunJournal(journal, options = {}) {
|
|
|
292
302
|
}
|
|
293
303
|
return {
|
|
294
304
|
state: reasons.length > 0 ? "stale" : "resumable",
|
|
295
|
-
reasons: reasons.length > 0 ? reasons : ["workflow run can be resumed"],
|
|
305
|
+
reasons: reasons.length > 0 ? reasons : releaseGateOnlyCompletion ? ["release commits already exist; remaining release gates can be rechecked"] : ["workflow run can be resumed"],
|
|
296
306
|
classifiedAt: now
|
|
297
307
|
};
|
|
298
308
|
}
|
package/dist/workflow-state.d.ts
CHANGED
|
@@ -183,6 +183,12 @@ export type TreeseedWorkflowState = {
|
|
|
183
183
|
startupPassphraseConfigured: boolean;
|
|
184
184
|
};
|
|
185
185
|
releaseReady: boolean;
|
|
186
|
+
releaseHistory: {
|
|
187
|
+
stagingAheadMain: number | null;
|
|
188
|
+
stagingBehindMain: number | null;
|
|
189
|
+
backMerged: boolean | null;
|
|
190
|
+
detail: string;
|
|
191
|
+
};
|
|
186
192
|
readiness: {
|
|
187
193
|
local: {
|
|
188
194
|
ready: boolean;
|
package/dist/workflow-state.js
CHANGED
|
@@ -255,6 +255,38 @@ function safeHeadCommit(repoDir) {
|
|
|
255
255
|
return null;
|
|
256
256
|
}
|
|
257
257
|
}
|
|
258
|
+
function safeReleaseHistory(repoDir) {
|
|
259
|
+
if (!repoDir) {
|
|
260
|
+
return {
|
|
261
|
+
stagingAheadMain: null,
|
|
262
|
+
stagingBehindMain: null,
|
|
263
|
+
backMerged: null,
|
|
264
|
+
detail: "Repository root is unavailable."
|
|
265
|
+
};
|
|
266
|
+
}
|
|
267
|
+
try {
|
|
268
|
+
const output = run("git", ["rev-list", "--left-right", "--count", "staging...main"], { cwd: repoDir, capture: true }).trim();
|
|
269
|
+
const [aheadRaw, behindRaw] = output.split(/\s+/u);
|
|
270
|
+
const stagingAheadMain = Number.parseInt(aheadRaw ?? "", 10);
|
|
271
|
+
const stagingBehindMain = Number.parseInt(behindRaw ?? "", 10);
|
|
272
|
+
if (!Number.isFinite(stagingAheadMain) || !Number.isFinite(stagingBehindMain)) {
|
|
273
|
+
throw new Error("invalid rev-list output");
|
|
274
|
+
}
|
|
275
|
+
return {
|
|
276
|
+
stagingAheadMain,
|
|
277
|
+
stagingBehindMain,
|
|
278
|
+
backMerged: stagingBehindMain === 0,
|
|
279
|
+
detail: stagingBehindMain === 0 ? "Staging contains current main release history." : `Staging is missing ${stagingBehindMain} main commit${stagingBehindMain === 1 ? "" : "s"}.`
|
|
280
|
+
};
|
|
281
|
+
} catch {
|
|
282
|
+
return {
|
|
283
|
+
stagingAheadMain: null,
|
|
284
|
+
stagingBehindMain: null,
|
|
285
|
+
backMerged: null,
|
|
286
|
+
detail: "Could not compare staging and main release history."
|
|
287
|
+
};
|
|
288
|
+
}
|
|
289
|
+
}
|
|
258
290
|
function resolveLocalStatusUrl(deployConfig) {
|
|
259
291
|
return deployConfig.surfaces?.web?.localBaseUrl ?? deployConfig.surfaces?.api?.localBaseUrl ?? Object.values(deployConfig.services ?? {}).find((service) => service?.enabled !== false && service.environments?.local?.baseUrl)?.environments?.local?.baseUrl ?? null;
|
|
260
292
|
}
|
|
@@ -472,6 +504,7 @@ function resolveTreeseedWorkflowState(cwd, options = {}) {
|
|
|
472
504
|
startupPassphraseConfigured: Boolean(process.env.TREESEED_KEY_PASSPHRASE?.trim())
|
|
473
505
|
},
|
|
474
506
|
releaseReady: branchRole === "staging" && !dirtyWorktree,
|
|
507
|
+
releaseHistory: safeReleaseHistory(root),
|
|
475
508
|
readiness: {
|
|
476
509
|
local: { ready: false, blockers: [], warnings: [] },
|
|
477
510
|
staging: { ready: false, blockers: [], warnings: [] },
|