@treeseed/cli 0.1.1 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +27 -26
- package/dist/cli/handlers/auth-login.d.ts +2 -0
- package/dist/cli/handlers/auth-login.js +67 -0
- package/dist/cli/handlers/auth-logout.d.ts +2 -0
- package/dist/cli/handlers/auth-logout.js +20 -0
- package/dist/cli/handlers/auth-whoami.d.ts +2 -0
- package/dist/cli/handlers/auth-whoami.js +24 -0
- package/dist/cli/handlers/close.js +19 -53
- package/dist/cli/handlers/config.js +33 -53
- package/dist/cli/handlers/destroy.js +34 -79
- package/dist/{src/cli/handlers/ship.d.ts → cli/handlers/dev.d.ts} +1 -1
- package/dist/cli/handlers/dev.js +19 -0
- package/dist/cli/handlers/doctor.js +13 -6
- package/dist/cli/handlers/init.js +32 -8
- package/dist/cli/handlers/release.js +21 -53
- package/dist/cli/handlers/rollback.js +8 -8
- package/dist/cli/handlers/save.js +21 -79
- package/dist/cli/handlers/stage.d.ts +2 -0
- package/dist/cli/handlers/stage.js +28 -0
- package/dist/cli/handlers/status.js +35 -26
- package/dist/{src/cli/handlers/deploy.d.ts → cli/handlers/switch.d.ts} +1 -1
- package/dist/cli/handlers/switch.js +29 -0
- package/dist/{src/cli/handlers/next.d.ts → cli/handlers/sync.d.ts} +1 -1
- package/dist/cli/handlers/sync.js +26 -0
- package/dist/cli/handlers/tasks.d.ts +2 -0
- package/dist/cli/handlers/tasks.js +31 -0
- package/dist/cli/handlers/template.d.ts +2 -0
- package/dist/cli/handlers/template.js +27 -0
- package/dist/cli/handlers/workflow.d.ts +6 -0
- package/dist/cli/handlers/workflow.js +71 -0
- package/dist/{src/cli → cli}/help.d.ts +2 -2
- package/dist/cli/help.js +36 -24
- package/dist/cli/main.d.ts +6 -0
- package/dist/cli/main.js +14 -19
- package/dist/cli/operations-help.d.ts +1 -0
- package/dist/cli/operations-help.js +1 -0
- package/dist/cli/operations-parser.d.ts +1 -0
- package/dist/cli/operations-parser.js +1 -0
- package/dist/cli/operations-registry.d.ts +5 -0
- package/dist/cli/operations-registry.js +260 -0
- package/dist/cli/operations-types.d.ts +72 -0
- package/dist/cli/parser.d.ts +3 -0
- package/dist/cli/parser.js +1 -6
- package/dist/cli/registry.d.ts +25 -0
- package/dist/cli/registry.js +28 -416
- package/dist/cli/repair.js +6 -4
- package/dist/cli/runtime.d.ts +31 -0
- package/dist/cli/runtime.js +240 -111
- package/dist/cli/types.d.ts +1 -0
- package/dist/{src/cli → cli}/workflow-state.d.ts +9 -0
- package/dist/cli/workflow-state.js +45 -21
- package/package.json +13 -13
- package/dist/cli/handlers/continue.js +0 -23
- package/dist/cli/handlers/deploy.js +0 -139
- package/dist/cli/handlers/next.js +0 -27
- package/dist/cli/handlers/prepare.js +0 -8
- package/dist/cli/handlers/promote.js +0 -8
- package/dist/cli/handlers/publish.js +0 -8
- package/dist/cli/handlers/setup.js +0 -48
- package/dist/cli/handlers/ship.js +0 -49
- package/dist/cli/handlers/start.js +0 -97
- package/dist/cli/handlers/teardown.js +0 -50
- package/dist/cli/handlers/work.js +0 -85
- package/dist/scripts/aggregate-book.d.ts +0 -1
- package/dist/scripts/aggregate-book.js +0 -121
- package/dist/scripts/assert-release-tag-version.d.ts +0 -1
- package/dist/scripts/assert-release-tag-version.js +0 -21
- package/dist/scripts/build-dist.d.ts +0 -1
- package/dist/scripts/build-dist.js +0 -108
- package/dist/scripts/build-tenant-worker.d.ts +0 -1
- package/dist/scripts/build-tenant-worker.js +0 -36
- package/dist/scripts/cleanup-markdown.d.ts +0 -2
- package/dist/scripts/cleanup-markdown.js +0 -373
- package/dist/scripts/config-runtime-lib.d.ts +0 -122
- package/dist/scripts/config-runtime-lib.js +0 -505
- package/dist/scripts/config-treeseed.d.ts +0 -2
- package/dist/scripts/config-treeseed.js +0 -81
- package/dist/scripts/d1-migration-lib.d.ts +0 -6
- package/dist/scripts/d1-migration-lib.js +0 -90
- package/dist/scripts/deploy-lib.d.ts +0 -127
- package/dist/scripts/deploy-lib.js +0 -841
- package/dist/scripts/ensure-mailpit.d.ts +0 -1
- package/dist/scripts/ensure-mailpit.js +0 -29
- package/dist/scripts/git-workflow-lib.d.ts +0 -25
- package/dist/scripts/git-workflow-lib.js +0 -136
- package/dist/scripts/github-automation-lib.d.ts +0 -156
- package/dist/scripts/github-automation-lib.js +0 -242
- package/dist/scripts/local-dev-lib.d.ts +0 -9
- package/dist/scripts/local-dev-lib.js +0 -84
- package/dist/scripts/local-dev.d.ts +0 -1
- package/dist/scripts/local-dev.js +0 -129
- package/dist/scripts/logs-mailpit.d.ts +0 -1
- package/dist/scripts/logs-mailpit.js +0 -2
- package/dist/scripts/mailpit-runtime.d.ts +0 -4
- package/dist/scripts/mailpit-runtime.js +0 -57
- package/dist/scripts/package-tools.d.ts +0 -22
- package/dist/scripts/package-tools.js +0 -255
- package/dist/scripts/patch-starlight-content-path.d.ts +0 -1
- package/dist/scripts/patch-starlight-content-path.js +0 -172
- package/dist/scripts/paths.d.ts +0 -17
- package/dist/scripts/paths.js +0 -26
- package/dist/scripts/publish-package.d.ts +0 -1
- package/dist/scripts/publish-package.js +0 -19
- package/dist/scripts/release-verify.d.ts +0 -1
- package/dist/scripts/release-verify.js +0 -136
- package/dist/scripts/run-fixture-astro-command.d.ts +0 -1
- package/dist/scripts/run-fixture-astro-command.js +0 -18
- package/dist/scripts/save-deploy-preflight-lib.d.ts +0 -34
- package/dist/scripts/save-deploy-preflight-lib.js +0 -69
- package/dist/scripts/scaffold-site.d.ts +0 -2
- package/dist/scripts/scaffold-site.js +0 -92
- package/dist/scripts/stop-mailpit.d.ts +0 -1
- package/dist/scripts/stop-mailpit.js +0 -5
- package/dist/scripts/sync-dev-vars.d.ts +0 -1
- package/dist/scripts/sync-dev-vars.js +0 -6
- package/dist/scripts/template-registry-lib.d.ts +0 -47
- package/dist/scripts/template-registry-lib.js +0 -137
- package/dist/scripts/tenant-astro-command.d.ts +0 -1
- package/dist/scripts/tenant-astro-command.js +0 -3
- package/dist/scripts/tenant-build.d.ts +0 -1
- package/dist/scripts/tenant-build.js +0 -16
- package/dist/scripts/tenant-check.d.ts +0 -1
- package/dist/scripts/tenant-check.js +0 -7
- package/dist/scripts/tenant-d1-migrate-local.d.ts +0 -1
- package/dist/scripts/tenant-d1-migrate-local.js +0 -11
- package/dist/scripts/tenant-deploy.d.ts +0 -2
- package/dist/scripts/tenant-deploy.js +0 -180
- package/dist/scripts/tenant-destroy.d.ts +0 -2
- package/dist/scripts/tenant-destroy.js +0 -104
- package/dist/scripts/tenant-dev.d.ts +0 -1
- package/dist/scripts/tenant-dev.js +0 -171
- package/dist/scripts/tenant-lint.d.ts +0 -1
- package/dist/scripts/tenant-lint.js +0 -4
- package/dist/scripts/tenant-test.d.ts +0 -1
- package/dist/scripts/tenant-test.js +0 -4
- package/dist/scripts/test-cloudflare-local.d.ts +0 -1
- package/dist/scripts/test-cloudflare-local.js +0 -212
- package/dist/scripts/test-scaffold.d.ts +0 -2
- package/dist/scripts/test-scaffold.js +0 -297
- package/dist/scripts/treeseed.d.ts +0 -2
- package/dist/scripts/treeseed.js +0 -4
- package/dist/scripts/validate-templates.d.ts +0 -2
- package/dist/scripts/validate-templates.js +0 -4
- package/dist/scripts/watch-dev-lib.d.ts +0 -21
- package/dist/scripts/watch-dev-lib.js +0 -277
- package/dist/scripts/workspace-close.d.ts +0 -2
- package/dist/scripts/workspace-close.js +0 -24
- package/dist/scripts/workspace-command-e2e.d.ts +0 -2
- package/dist/scripts/workspace-command-e2e.js +0 -718
- package/dist/scripts/workspace-lint.d.ts +0 -1
- package/dist/scripts/workspace-lint.js +0 -9
- package/dist/scripts/workspace-preflight-lib.d.ts +0 -36
- package/dist/scripts/workspace-preflight-lib.js +0 -179
- package/dist/scripts/workspace-preflight.d.ts +0 -2
- package/dist/scripts/workspace-preflight.js +0 -22
- package/dist/scripts/workspace-publish-changed-packages.d.ts +0 -1
- package/dist/scripts/workspace-publish-changed-packages.js +0 -16
- package/dist/scripts/workspace-release-verify.d.ts +0 -1
- package/dist/scripts/workspace-release-verify.js +0 -81
- package/dist/scripts/workspace-release.d.ts +0 -2
- package/dist/scripts/workspace-release.js +0 -42
- package/dist/scripts/workspace-save-lib.d.ts +0 -42
- package/dist/scripts/workspace-save-lib.js +0 -220
- package/dist/scripts/workspace-save.d.ts +0 -2
- package/dist/scripts/workspace-save.js +0 -124
- package/dist/scripts/workspace-start-warning.js +0 -3
- package/dist/scripts/workspace-start.d.ts +0 -2
- package/dist/scripts/workspace-start.js +0 -71
- package/dist/scripts/workspace-test-unit.d.ts +0 -1
- package/dist/scripts/workspace-test-unit.js +0 -4
- package/dist/scripts/workspace-test.d.ts +0 -1
- package/dist/scripts/workspace-test.js +0 -11
- package/dist/scripts/workspace-tools.d.ts +0 -13
- package/dist/scripts/workspace-tools.js +0 -226
- package/dist/src/cli/handlers/continue.d.ts +0 -2
- package/dist/src/cli/handlers/prepare.d.ts +0 -2
- package/dist/src/cli/handlers/promote.d.ts +0 -2
- package/dist/src/cli/handlers/publish.d.ts +0 -2
- package/dist/src/cli/handlers/setup.d.ts +0 -2
- package/dist/src/cli/handlers/start.d.ts +0 -3
- package/dist/src/cli/handlers/teardown.d.ts +0 -2
- package/dist/src/cli/handlers/work.d.ts +0 -2
- package/dist/src/cli/main.d.ts +0 -6
- package/dist/src/cli/parser.d.ts +0 -3
- package/dist/src/cli/registry.d.ts +0 -27
- package/dist/src/cli/runtime.d.ts +0 -4
- package/dist/src/cli/types.d.ts +0 -71
- /package/dist/{src/cli → cli}/handlers/close.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/config.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/destroy.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/doctor.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/init.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/release.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/rollback.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/save.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/status.d.ts +0 -0
- /package/dist/{src/cli → cli}/handlers/utils.d.ts +0 -0
- /package/dist/{scripts/workspace-start-warning.d.ts → cli/operations-types.js} +0 -0
- /package/dist/{src/cli → cli}/repair.d.ts +0 -0
- /package/dist/{src/index.d.ts → index.d.ts} +0 -0
|
@@ -1,718 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env node
|
|
2
|
-
import { cpSync, mkdirSync, mkdtempSync, readdirSync, readFileSync, rmSync, symlinkSync, writeFileSync } from 'node:fs';
|
|
3
|
-
import { spawn, spawnSync } from 'node:child_process';
|
|
4
|
-
import { tmpdir } from 'node:os';
|
|
5
|
-
import { dirname, join, resolve } from 'node:path';
|
|
6
|
-
import yaml from 'yaml';
|
|
7
|
-
import { packageScriptPath } from './package-tools.js';
|
|
8
|
-
import { collectCliPreflight, createWranglerCommandEnv, formatCliPreflightReport, writeJsonArtifact } from './workspace-preflight-lib.js';
|
|
9
|
-
import { ensureDeployWorkflow, parseGitHubRepositoryFromRemote } from './github-automation-lib.js';
|
|
10
|
-
import { MERGE_CONFLICT_EXIT_CODE } from './workspace-save-lib.js';
|
|
11
|
-
import { createTempDir, run, workspacePackages, workspaceRoot } from './workspace-tools.js';
|
|
12
|
-
const root = workspaceRoot();
|
|
13
|
-
const argv = new Set(process.argv.slice(2));
|
|
14
|
-
const mode = argv.has('--mode=staging') || argv.has('--staging')
|
|
15
|
-
? 'staging'
|
|
16
|
-
: argv.has('--mode=full') || argv.has('--full')
|
|
17
|
-
? 'full'
|
|
18
|
-
: 'local';
|
|
19
|
-
const runLocal = mode === 'local' || mode === 'full';
|
|
20
|
-
const runStaging = mode === 'staging' || mode === 'full';
|
|
21
|
-
const npmCacheRoot = resolve(process.env.TREESEED_RELEASE_NPM_CACHE_DIR
|
|
22
|
-
?? resolve(tmpdir(), 'treeseed-npm-cache'));
|
|
23
|
-
const artifactsRoot = process.env.TREESEED_E2E_ARTIFACTS_DIR
|
|
24
|
-
? resolve(process.env.TREESEED_E2E_ARTIFACTS_DIR)
|
|
25
|
-
: createTempDir('treeseed-command-e2e-');
|
|
26
|
-
mkdirSync(artifactsRoot, { recursive: true });
|
|
27
|
-
const report = {
|
|
28
|
-
mode,
|
|
29
|
-
startedAt: new Date().toISOString(),
|
|
30
|
-
artifactsRoot,
|
|
31
|
-
steps: [],
|
|
32
|
-
summary: {
|
|
33
|
-
ok: true,
|
|
34
|
-
},
|
|
35
|
-
};
|
|
36
|
-
function log(message) {
|
|
37
|
-
console.log(`[treeseed:e2e ${new Date().toISOString()}] ${message}`);
|
|
38
|
-
}
|
|
39
|
-
function sanitizeFileName(value) {
|
|
40
|
-
return value.replace(/[^A-Za-z0-9._-]+/g, '-');
|
|
41
|
-
}
|
|
42
|
-
function shellEscape(value) {
|
|
43
|
-
return `'${String(value).replaceAll("'", `'\\''`)}'`;
|
|
44
|
-
}
|
|
45
|
-
function cacheEnv(extraEnv = {}) {
|
|
46
|
-
return {
|
|
47
|
-
npm_config_cache: npmCacheRoot,
|
|
48
|
-
NPM_CONFIG_CACHE: npmCacheRoot,
|
|
49
|
-
npm_config_prefer_offline: 'true',
|
|
50
|
-
npm_config_audit: 'false',
|
|
51
|
-
npm_config_fund: 'false',
|
|
52
|
-
...extraEnv,
|
|
53
|
-
};
|
|
54
|
-
}
|
|
55
|
-
function writeCommandArtifact(label, payload) {
|
|
56
|
-
writeJsonArtifact(resolve(artifactsRoot, 'commands', `${sanitizeFileName(label)}.json`), payload);
|
|
57
|
-
}
|
|
58
|
-
function recordStep(name, data) {
|
|
59
|
-
report.steps.push({
|
|
60
|
-
name,
|
|
61
|
-
...data,
|
|
62
|
-
});
|
|
63
|
-
}
|
|
64
|
-
async function withStep(name, action) {
|
|
65
|
-
const startedAt = Date.now();
|
|
66
|
-
log(`${name} started`);
|
|
67
|
-
try {
|
|
68
|
-
const result = await action();
|
|
69
|
-
recordStep(name, {
|
|
70
|
-
status: 'completed',
|
|
71
|
-
durationMs: Date.now() - startedAt,
|
|
72
|
-
result,
|
|
73
|
-
});
|
|
74
|
-
log(`${name} completed in ${((Date.now() - startedAt) / 1000).toFixed(1)}s`);
|
|
75
|
-
return result;
|
|
76
|
-
}
|
|
77
|
-
catch (error) {
|
|
78
|
-
report.summary.ok = false;
|
|
79
|
-
recordStep(name, {
|
|
80
|
-
status: 'failed',
|
|
81
|
-
durationMs: Date.now() - startedAt,
|
|
82
|
-
error: error instanceof Error ? error.message : String(error),
|
|
83
|
-
});
|
|
84
|
-
log(`${name} failed in ${((Date.now() - startedAt) / 1000).toFixed(1)}s`);
|
|
85
|
-
throw error;
|
|
86
|
-
}
|
|
87
|
-
}
|
|
88
|
-
function writeReport() {
|
|
89
|
-
report.finishedAt = new Date().toISOString();
|
|
90
|
-
writeJsonArtifact(resolve(artifactsRoot, 'report.json'), report);
|
|
91
|
-
}
|
|
92
|
-
function runCommand(label, command, commandArgs, options = {}) {
|
|
93
|
-
const startedAt = Date.now();
|
|
94
|
-
const result = spawnSync(command, commandArgs, {
|
|
95
|
-
cwd: options.cwd ?? root,
|
|
96
|
-
env: { ...process.env, ...(options.env ?? {}) },
|
|
97
|
-
stdio: 'pipe',
|
|
98
|
-
encoding: 'utf8',
|
|
99
|
-
timeout: options.timeoutMs,
|
|
100
|
-
});
|
|
101
|
-
const entry = {
|
|
102
|
-
label,
|
|
103
|
-
command,
|
|
104
|
-
args: commandArgs,
|
|
105
|
-
cwd: options.cwd ?? root,
|
|
106
|
-
status: result.status ?? 1,
|
|
107
|
-
signal: result.signal ?? null,
|
|
108
|
-
durationMs: Date.now() - startedAt,
|
|
109
|
-
stdout: result.stdout ?? '',
|
|
110
|
-
stderr: result.stderr ?? '',
|
|
111
|
-
};
|
|
112
|
-
writeCommandArtifact(label, entry);
|
|
113
|
-
const allowedExitCodes = new Set(options.allowedExitCodes ?? [0]);
|
|
114
|
-
if (!allowedExitCodes.has(entry.status)) {
|
|
115
|
-
throw new Error([
|
|
116
|
-
`${label} failed with exit ${entry.status}.`,
|
|
117
|
-
entry.stdout.trim(),
|
|
118
|
-
entry.stderr.trim(),
|
|
119
|
-
].filter(Boolean).join('\n'));
|
|
120
|
-
}
|
|
121
|
-
return entry;
|
|
122
|
-
}
|
|
123
|
-
function randomPort(base) {
|
|
124
|
-
return base + Math.floor(Math.random() * 500);
|
|
125
|
-
}
|
|
126
|
-
function waitForRegexOutput(bufferRef, regex, timeoutMs, label) {
|
|
127
|
-
return new Promise((resolvePromise, reject) => {
|
|
128
|
-
const startedAt = Date.now();
|
|
129
|
-
const interval = setInterval(() => {
|
|
130
|
-
if (regex.test(bufferRef.text)) {
|
|
131
|
-
clearInterval(interval);
|
|
132
|
-
resolvePromise({
|
|
133
|
-
durationMs: Date.now() - startedAt,
|
|
134
|
-
});
|
|
135
|
-
return;
|
|
136
|
-
}
|
|
137
|
-
if (Date.now() - startedAt >= timeoutMs) {
|
|
138
|
-
clearInterval(interval);
|
|
139
|
-
reject(new Error(`${label} timed out waiting for ${regex}.`));
|
|
140
|
-
}
|
|
141
|
-
}, 250);
|
|
142
|
-
});
|
|
143
|
-
}
|
|
144
|
-
function isBindFailureOutput(source) {
|
|
145
|
-
return /listen EPERM|Address already in use|failed: ::bind\(|EADDRINUSE/i.test(source);
|
|
146
|
-
}
|
|
147
|
-
function portFromArgs(args) {
|
|
148
|
-
const portFlagIndex = args.findIndex((value) => value === '--port');
|
|
149
|
-
if (portFlagIndex >= 0) {
|
|
150
|
-
const candidate = Number(args[portFlagIndex + 1]);
|
|
151
|
-
return Number.isFinite(candidate) ? candidate : null;
|
|
152
|
-
}
|
|
153
|
-
return null;
|
|
154
|
-
}
|
|
155
|
-
async function waitForLocalDevReady(bufferRef, port, timeoutMs, label, readyPattern, childState) {
|
|
156
|
-
const startedAt = Date.now();
|
|
157
|
-
while (Date.now() - startedAt < timeoutMs) {
|
|
158
|
-
if (readyPattern.test(bufferRef.text)) {
|
|
159
|
-
return;
|
|
160
|
-
}
|
|
161
|
-
if (childState.exited) {
|
|
162
|
-
throw new Error(`${label} exited before reaching readiness (exit=${childState.code ?? 'unknown'} signal=${childState.signal ?? 'none'}).\n${bufferRef.text.trim()}`);
|
|
163
|
-
}
|
|
164
|
-
if (port) {
|
|
165
|
-
try {
|
|
166
|
-
const response = await fetch(`http://127.0.0.1:${port}`);
|
|
167
|
-
if (response.ok || response.status === 404) {
|
|
168
|
-
return;
|
|
169
|
-
}
|
|
170
|
-
}
|
|
171
|
-
catch {
|
|
172
|
-
// Keep polling.
|
|
173
|
-
}
|
|
174
|
-
}
|
|
175
|
-
await new Promise((resolvePromise) => setTimeout(resolvePromise, 500));
|
|
176
|
-
}
|
|
177
|
-
throw new Error(`${label} timed out waiting for dev readiness.`);
|
|
178
|
-
}
|
|
179
|
-
function runBuildSmoke(label, cwd, extraArgs = []) {
|
|
180
|
-
const commandLine = ['npm', 'run', 'build', '--', ...extraArgs].map(shellEscape).join(' ');
|
|
181
|
-
const result = spawnSync('script', ['-qefc', commandLine, '/dev/null'], {
|
|
182
|
-
cwd,
|
|
183
|
-
env: { ...process.env, ...cacheEnv() },
|
|
184
|
-
stdio: 'pipe',
|
|
185
|
-
encoding: 'utf8',
|
|
186
|
-
timeout: 1800000,
|
|
187
|
-
});
|
|
188
|
-
const entry = {
|
|
189
|
-
label,
|
|
190
|
-
command: 'script',
|
|
191
|
-
args: ['-qefc', commandLine, '/dev/null'],
|
|
192
|
-
cwd,
|
|
193
|
-
status: result.status ?? 1,
|
|
194
|
-
signal: result.signal ?? null,
|
|
195
|
-
durationMs: 0,
|
|
196
|
-
stdout: result.stdout ?? '',
|
|
197
|
-
stderr: result.stderr ?? '',
|
|
198
|
-
};
|
|
199
|
-
writeCommandArtifact(label, entry);
|
|
200
|
-
if (entry.status !== 0) {
|
|
201
|
-
throw new Error(`${label} failed with exit ${entry.status}.\n${entry.stdout}\n${entry.stderr}`.trim());
|
|
202
|
-
}
|
|
203
|
-
return entry;
|
|
204
|
-
}
|
|
205
|
-
function createManualPackageTarball(pkg) {
|
|
206
|
-
const stageRoot = mkdtempSync(join(tmpdir(), 'treeseed-package-stage-'));
|
|
207
|
-
const packageStageRoot = resolve(stageRoot, 'package');
|
|
208
|
-
mkdirSync(packageStageRoot, { recursive: true });
|
|
209
|
-
for (const entry of ['package.json', 'README.md', ...(pkg.packageJson.files ?? [])]) {
|
|
210
|
-
const sourcePath = resolve(pkg.dir, entry);
|
|
211
|
-
const targetPath = resolve(packageStageRoot, entry);
|
|
212
|
-
cpSync(sourcePath, targetPath, { recursive: true });
|
|
213
|
-
}
|
|
214
|
-
const tarballPath = resolve(tmpdir(), `${pkg.name.replace(/^@/, '').replaceAll('/', '-')}-${pkg.packageJson.version}-e2e.tgz`);
|
|
215
|
-
run('tar', ['-czf', tarballPath, '-C', stageRoot, 'package'], { cwd: root });
|
|
216
|
-
return {
|
|
217
|
-
packageName: pkg.name,
|
|
218
|
-
tarballPath,
|
|
219
|
-
stageRoot,
|
|
220
|
-
};
|
|
221
|
-
}
|
|
222
|
-
async function runDevSession(label, cwd, { args: devArgs = [], env = {}, readyPattern = /ready on|http:\/\/127\.0\.0\.1|http:\/\/localhost|starting unified wrangler watch mode/i, rebuildPattern = /rebuild complete|detected \d+ change/i, mutate = null, timeoutMs = 180000, } = {}) {
|
|
223
|
-
const port = portFromArgs(devArgs);
|
|
224
|
-
const commandLine = [process.execPath, packageScriptPath('treeseed'), 'dev', ...devArgs].map(shellEscape).join(' ');
|
|
225
|
-
const child = spawn('script', ['-qefc', commandLine, '/dev/null'], {
|
|
226
|
-
cwd,
|
|
227
|
-
env: {
|
|
228
|
-
...process.env,
|
|
229
|
-
...cacheEnv(createWranglerCommandEnv(env)),
|
|
230
|
-
},
|
|
231
|
-
stdio: ['ignore', 'pipe', 'pipe'],
|
|
232
|
-
});
|
|
233
|
-
const output = { text: '' };
|
|
234
|
-
const childState = { exited: false, code: null, signal: null };
|
|
235
|
-
child.stdout.on('data', (chunk) => {
|
|
236
|
-
output.text += String(chunk);
|
|
237
|
-
});
|
|
238
|
-
child.stderr.on('data', (chunk) => {
|
|
239
|
-
output.text += String(chunk);
|
|
240
|
-
});
|
|
241
|
-
child.on('exit', (code, signal) => {
|
|
242
|
-
childState.exited = true;
|
|
243
|
-
childState.code = code;
|
|
244
|
-
childState.signal = signal;
|
|
245
|
-
});
|
|
246
|
-
const logPath = resolve(artifactsRoot, 'commands', `${sanitizeFileName(label)}.log`);
|
|
247
|
-
mkdirSync(dirname(logPath), { recursive: true });
|
|
248
|
-
let pendingError = null;
|
|
249
|
-
try {
|
|
250
|
-
await waitForLocalDevReady(output, port, timeoutMs, `${label} readiness`, readyPattern, childState);
|
|
251
|
-
if (mutate) {
|
|
252
|
-
await mutate();
|
|
253
|
-
await waitForRegexOutput(output, rebuildPattern, timeoutMs, `${label} rebuild`);
|
|
254
|
-
}
|
|
255
|
-
}
|
|
256
|
-
catch (error) {
|
|
257
|
-
pendingError = error;
|
|
258
|
-
}
|
|
259
|
-
finally {
|
|
260
|
-
if (!childState.exited) {
|
|
261
|
-
child.kill('SIGTERM');
|
|
262
|
-
await new Promise((resolvePromise) => child.once('exit', () => resolvePromise()));
|
|
263
|
-
}
|
|
264
|
-
writeFileSync(logPath, output.text, 'utf8');
|
|
265
|
-
}
|
|
266
|
-
if (pendingError && childState.code && isBindFailureOutput(output.text)) {
|
|
267
|
-
if (mutate) {
|
|
268
|
-
await mutate();
|
|
269
|
-
runBuildSmoke(`${label}-fallback-build`, cwd);
|
|
270
|
-
}
|
|
271
|
-
return {
|
|
272
|
-
logPath,
|
|
273
|
-
outputPreview: output.text.slice(-4000),
|
|
274
|
-
degraded: 'bind_failure_fallback',
|
|
275
|
-
};
|
|
276
|
-
}
|
|
277
|
-
if (pendingError) {
|
|
278
|
-
throw pendingError;
|
|
279
|
-
}
|
|
280
|
-
return {
|
|
281
|
-
logPath,
|
|
282
|
-
outputPreview: output.text.slice(-4000),
|
|
283
|
-
};
|
|
284
|
-
}
|
|
285
|
-
function rewriteScaffoldDependencies(siteRoot, dependencies) {
|
|
286
|
-
const packageJsonPath = resolve(siteRoot, 'package.json');
|
|
287
|
-
const packageJson = JSON.parse(readFileSync(packageJsonPath, 'utf8'));
|
|
288
|
-
packageJson.dependencies = packageJson.dependencies ?? {};
|
|
289
|
-
for (const [name, specifier] of dependencies.entries()) {
|
|
290
|
-
packageJson.dependencies[name] = specifier;
|
|
291
|
-
}
|
|
292
|
-
writeFileSync(packageJsonPath, `${JSON.stringify(packageJson, null, 2)}\n`, 'utf8');
|
|
293
|
-
}
|
|
294
|
-
function installManualPackageTarball(siteRoot, packaged) {
|
|
295
|
-
const extractRoot = mkdtempSync(join(tmpdir(), 'treeseed-package-extract-'));
|
|
296
|
-
run('tar', ['-xzf', packaged.tarballPath, '-C', extractRoot], { cwd: root });
|
|
297
|
-
const scopeRoot = resolve(siteRoot, 'node_modules', '@treeseed');
|
|
298
|
-
mkdirSync(scopeRoot, { recursive: true });
|
|
299
|
-
const targetPath = resolve(scopeRoot, packaged.packageName.split('/')[1]);
|
|
300
|
-
cpSync(resolve(extractRoot, 'package'), targetPath, { recursive: true });
|
|
301
|
-
rmSync(extractRoot, { recursive: true, force: true });
|
|
302
|
-
}
|
|
303
|
-
function scaffoldTenant(siteRoot, dependencies, packagedTarballs) {
|
|
304
|
-
run(process.execPath, [
|
|
305
|
-
packageScriptPath('scaffold-site'),
|
|
306
|
-
siteRoot,
|
|
307
|
-
'--name',
|
|
308
|
-
'Treeseed E2E Site',
|
|
309
|
-
'--site-url',
|
|
310
|
-
'https://staging.treeseed-e2e.example.com',
|
|
311
|
-
'--contact-email',
|
|
312
|
-
'e2e@example.com',
|
|
313
|
-
], { cwd: root });
|
|
314
|
-
rewriteScaffoldDependencies(siteRoot, dependencies);
|
|
315
|
-
for (const packaged of packagedTarballs) {
|
|
316
|
-
installManualPackageTarball(siteRoot, packaged);
|
|
317
|
-
}
|
|
318
|
-
const sharedNodeModules = resolve(root, 'node_modules');
|
|
319
|
-
const sharedLinks = readdirSync(sharedNodeModules, { withFileTypes: true })
|
|
320
|
-
.filter((entry) => entry.name !== '.bin' && entry.name !== '@treeseed')
|
|
321
|
-
.map((entry) => [entry.name, resolve(sharedNodeModules, entry.name)]);
|
|
322
|
-
const copiedSharedPackages = new Set(['astro', '@astrojs']);
|
|
323
|
-
for (const [name, target] of sharedLinks) {
|
|
324
|
-
const targetPath = resolve(siteRoot, 'node_modules', name);
|
|
325
|
-
mkdirSync(dirname(targetPath), { recursive: true });
|
|
326
|
-
if (copiedSharedPackages.has(name)) {
|
|
327
|
-
cpSync(target, targetPath, { recursive: true });
|
|
328
|
-
continue;
|
|
329
|
-
}
|
|
330
|
-
try {
|
|
331
|
-
symlinkSync(target, targetPath, 'dir');
|
|
332
|
-
}
|
|
333
|
-
catch (error) {
|
|
334
|
-
if (!(error instanceof Error) || !String(error.message).includes('EEXIST')) {
|
|
335
|
-
throw error;
|
|
336
|
-
}
|
|
337
|
-
}
|
|
338
|
-
}
|
|
339
|
-
}
|
|
340
|
-
function readTenantConfig(tenantRoot) {
|
|
341
|
-
return yaml.parse(readFileSync(resolve(tenantRoot, 'treeseed.site.yaml'), 'utf8'));
|
|
342
|
-
}
|
|
343
|
-
function appendSaveMarker(filePath, marker) {
|
|
344
|
-
const source = readFileSync(filePath, 'utf8');
|
|
345
|
-
writeFileSync(filePath, `${source.trimEnd()}\n\nTreeseed E2E marker: ${marker}\n`, 'utf8');
|
|
346
|
-
}
|
|
347
|
-
function writeWorkspaceStub(repoDir) {
|
|
348
|
-
writeFileSync(resolve(repoDir, 'package.json'), `${JSON.stringify({
|
|
349
|
-
name: 'treeseed-save-stub',
|
|
350
|
-
private: true,
|
|
351
|
-
workspaces: ['packages/*'],
|
|
352
|
-
}, null, 2)}\n`, 'utf8');
|
|
353
|
-
}
|
|
354
|
-
function cloneLocalWorkspace() {
|
|
355
|
-
const cloneRoot = mkdtempSync(join(tmpdir(), 'treeseed-local-workspace-'));
|
|
356
|
-
run('git', ['clone', '--depth', '1', resolve(root, '..'), cloneRoot], { cwd: root });
|
|
357
|
-
const workingRoot = resolve(cloneRoot, 'docs');
|
|
358
|
-
run('git', ['config', 'user.name', 'Treeseed E2E'], { cwd: cloneRoot });
|
|
359
|
-
run('git', ['config', 'user.email', 'e2e@treeseed.dev'], { cwd: cloneRoot });
|
|
360
|
-
const workflow = ensureDeployWorkflow(workingRoot);
|
|
361
|
-
if (workflow.changed) {
|
|
362
|
-
run('git', ['add', 'docs/.github/workflows/deploy.yml'], { cwd: cloneRoot });
|
|
363
|
-
run('git', ['commit', '-m', 'test: sync deploy workflow for no-op save guard'], { cwd: cloneRoot });
|
|
364
|
-
}
|
|
365
|
-
return {
|
|
366
|
-
cloneRoot,
|
|
367
|
-
workingRoot,
|
|
368
|
-
};
|
|
369
|
-
}
|
|
370
|
-
function cloneLocalWorkspaceWithBareOrigin() {
|
|
371
|
-
const bareRoot = mkdtempSync(join(tmpdir(), 'treeseed-local-origin-'));
|
|
372
|
-
const cloneRoot = mkdtempSync(join(tmpdir(), 'treeseed-local-workspace-'));
|
|
373
|
-
run('git', ['clone', '--bare', resolve(root, '..'), bareRoot], { cwd: root });
|
|
374
|
-
run('git', ['clone', bareRoot, cloneRoot], { cwd: root });
|
|
375
|
-
const workingRoot = resolve(cloneRoot, 'docs');
|
|
376
|
-
run('git', ['config', 'user.name', 'Treeseed E2E'], { cwd: cloneRoot });
|
|
377
|
-
run('git', ['config', 'user.email', 'e2e@treeseed.dev'], { cwd: cloneRoot });
|
|
378
|
-
return {
|
|
379
|
-
bareRoot,
|
|
380
|
-
cloneRoot,
|
|
381
|
-
workingRoot,
|
|
382
|
-
};
|
|
383
|
-
}
|
|
384
|
-
function resolveRepositorySlug(repoDir) {
|
|
385
|
-
const remote = run('git', ['remote', 'get-url', 'origin'], { cwd: repoDir, capture: true }).trim();
|
|
386
|
-
return parseGitHubRepositoryFromRemote(remote);
|
|
387
|
-
}
|
|
388
|
-
function isProductionLikeTarget(repository, siteUrl) {
|
|
389
|
-
return repository === 'karyon-life/karyon' || /karyon\.life/i.test(siteUrl ?? '');
|
|
390
|
-
}
|
|
391
|
-
async function waitForGitHubWorkflow(repository, headSha, { timeoutMs = 900000 } = {}) {
|
|
392
|
-
const startedAt = Date.now();
|
|
393
|
-
while (Date.now() - startedAt < timeoutMs) {
|
|
394
|
-
const result = runCommand(`gh-run-list-${headSha.slice(0, 7)}`, 'gh', ['run', 'list', '--repo', repository, '--limit', '20', '--json', 'databaseId,headSha,status,conclusion,url,workflowName,event,displayTitle'], { cwd: root });
|
|
395
|
-
const runs = JSON.parse(result.stdout || '[]');
|
|
396
|
-
const match = runs.find((entry) => entry.headSha === headSha && /deploy/i.test(entry.workflowName ?? ''));
|
|
397
|
-
if (match?.status === 'completed') {
|
|
398
|
-
if (match.conclusion !== 'success') {
|
|
399
|
-
throw new Error(`GitHub workflow ${match.workflowName} for ${headSha} concluded with ${match.conclusion}.`);
|
|
400
|
-
}
|
|
401
|
-
return match;
|
|
402
|
-
}
|
|
403
|
-
await new Promise((resolvePromise) => setTimeout(resolvePromise, 10000));
|
|
404
|
-
}
|
|
405
|
-
throw new Error(`Timed out waiting for GitHub deploy workflow for ${headSha}.`);
|
|
406
|
-
}
|
|
407
|
-
async function waitForUrl(url, { contains = null, timeoutMs = 300000 } = {}) {
|
|
408
|
-
const startedAt = Date.now();
|
|
409
|
-
while (Date.now() - startedAt < timeoutMs) {
|
|
410
|
-
try {
|
|
411
|
-
const response = await fetch(url);
|
|
412
|
-
const body = await response.text();
|
|
413
|
-
if (response.ok && (!contains || body.includes(contains))) {
|
|
414
|
-
return {
|
|
415
|
-
status: response.status,
|
|
416
|
-
bodyPreview: body.slice(0, 1000),
|
|
417
|
-
};
|
|
418
|
-
}
|
|
419
|
-
}
|
|
420
|
-
catch {
|
|
421
|
-
// Keep polling until timeout.
|
|
422
|
-
}
|
|
423
|
-
await new Promise((resolvePromise) => setTimeout(resolvePromise, 5000));
|
|
424
|
-
}
|
|
425
|
-
throw new Error(`Timed out waiting for ${url}${contains ? ` to contain "${contains}"` : ''}.`);
|
|
426
|
-
}
|
|
427
|
-
function cloneStagingRepository() {
|
|
428
|
-
const gitUrl = process.env.TREESEED_E2E_STAGING_GIT_URL;
|
|
429
|
-
if (!gitUrl) {
|
|
430
|
-
throw new Error('TREESEED_E2E_STAGING_GIT_URL is required for staging E2E runs.');
|
|
431
|
-
}
|
|
432
|
-
const cloneRoot = mkdtempSync(join(tmpdir(), 'treeseed-staging-clone-'));
|
|
433
|
-
run('git', ['clone', '--depth', '1', gitUrl, cloneRoot], { cwd: root });
|
|
434
|
-
const subdir = process.env.TREESEED_E2E_STAGING_WORKING_DIRECTORY?.trim() || 'docs';
|
|
435
|
-
const workingRoot = resolve(cloneRoot, subdir);
|
|
436
|
-
run('npm', ['install', '--prefer-offline', '--no-audit', '--no-fund'], {
|
|
437
|
-
cwd: workingRoot,
|
|
438
|
-
env: cacheEnv(),
|
|
439
|
-
});
|
|
440
|
-
run('git', ['config', 'user.name', 'Treeseed E2E'], { cwd: cloneRoot });
|
|
441
|
-
run('git', ['config', 'user.email', 'e2e@treeseed.dev'], { cwd: cloneRoot });
|
|
442
|
-
return {
|
|
443
|
-
cloneRoot,
|
|
444
|
-
workingRoot,
|
|
445
|
-
};
|
|
446
|
-
}
|
|
447
|
-
async function runLocalSuite() {
|
|
448
|
-
const preflight = collectCliPreflight({ cwd: root, requireAuth: false });
|
|
449
|
-
writeJsonArtifact(resolve(artifactsRoot, 'preflight.local.json'), preflight);
|
|
450
|
-
log(formatCliPreflightReport(preflight));
|
|
451
|
-
await withStep('workspace dev smoke', async () => {
|
|
452
|
-
return await runDevSession('workspace-dev-smoke', root, {
|
|
453
|
-
args: ['--port', String(randomPort(8800))],
|
|
454
|
-
});
|
|
455
|
-
});
|
|
456
|
-
const dependencies = new Map();
|
|
457
|
-
const manualTarballs = [];
|
|
458
|
-
await withStep('local package tarball preparation', async () => {
|
|
459
|
-
for (const packageName of ['@treeseed/sdk', '@treeseed/core']) {
|
|
460
|
-
const pkg = workspacePackages(root).find((entry) => entry.name === packageName);
|
|
461
|
-
if (!pkg) {
|
|
462
|
-
throw new Error(`Unable to find workspace package ${packageName}.`);
|
|
463
|
-
}
|
|
464
|
-
run('npm', ['run', 'build:dist'], { cwd: pkg.dir });
|
|
465
|
-
const packaged = createManualPackageTarball(pkg);
|
|
466
|
-
manualTarballs.push(packaged);
|
|
467
|
-
dependencies.set(packageName, packaged.tarballPath);
|
|
468
|
-
}
|
|
469
|
-
return Object.fromEntries(dependencies);
|
|
470
|
-
});
|
|
471
|
-
const siteRoot = mkdtempSync(join(tmpdir(), 'treeseed-command-e2e-site-'));
|
|
472
|
-
try {
|
|
473
|
-
await withStep('scaffold tenant init', async () => {
|
|
474
|
-
scaffoldTenant(siteRoot, dependencies, manualTarballs);
|
|
475
|
-
return { siteRoot };
|
|
476
|
-
});
|
|
477
|
-
await withStep('scaffold tenant dev watch smoke', async () => {
|
|
478
|
-
const notePath = resolve(siteRoot, 'src', 'content', 'notes', 'first-note.mdx');
|
|
479
|
-
return await runDevSession('scaffold-dev-watch', siteRoot, {
|
|
480
|
-
args: ['--watch', '--port', String(randomPort(9300))],
|
|
481
|
-
mutate: async () => {
|
|
482
|
-
appendSaveMarker(notePath, 'treeseed-e2e-local-watch');
|
|
483
|
-
},
|
|
484
|
-
});
|
|
485
|
-
});
|
|
486
|
-
await withStep('scaffold tenant build', async () => {
|
|
487
|
-
return runCommand('scaffold-build', process.execPath, [packageScriptPath('treeseed'), 'build'], {
|
|
488
|
-
cwd: siteRoot,
|
|
489
|
-
env: cacheEnv(),
|
|
490
|
-
});
|
|
491
|
-
});
|
|
492
|
-
await withStep('scaffold tenant deploy dry-run', async () => {
|
|
493
|
-
return runCommand('scaffold-deploy-dry-run', process.execPath, [packageScriptPath('treeseed'), 'deploy', '--dry-run'], {
|
|
494
|
-
cwd: siteRoot,
|
|
495
|
-
env: cacheEnv(),
|
|
496
|
-
});
|
|
497
|
-
});
|
|
498
|
-
await withStep('scaffold tenant destroy dry-run', async () => {
|
|
499
|
-
const deployConfig = readTenantConfig(siteRoot);
|
|
500
|
-
return runCommand('scaffold-destroy-dry-run', process.execPath, [packageScriptPath('treeseed'), 'destroy', '--dry-run', '--skip-confirmation', '--confirm', String(deployConfig.slug)], {
|
|
501
|
-
cwd: siteRoot,
|
|
502
|
-
env: cacheEnv(),
|
|
503
|
-
});
|
|
504
|
-
});
|
|
505
|
-
}
|
|
506
|
-
finally {
|
|
507
|
-
rmSync(siteRoot, { recursive: true, force: true });
|
|
508
|
-
for (const packaged of manualTarballs) {
|
|
509
|
-
rmSync(packaged.tarballPath, { force: true });
|
|
510
|
-
rmSync(packaged.stageRoot, { recursive: true, force: true });
|
|
511
|
-
}
|
|
512
|
-
}
|
|
513
|
-
await withStep('save guard: missing message', async () => {
|
|
514
|
-
return runCommand('save-missing-message', process.execPath, [packageScriptPath('treeseed'), 'save'], {
|
|
515
|
-
cwd: root,
|
|
516
|
-
allowedExitCodes: [1],
|
|
517
|
-
});
|
|
518
|
-
});
|
|
519
|
-
await withStep('save guard: wrong branch', async () => {
|
|
520
|
-
const repoDir = mkdtempSync(join(tmpdir(), 'treeseed-save-branch-'));
|
|
521
|
-
try {
|
|
522
|
-
run('git', ['init', '--initial-branch=feature/e2e'], { cwd: repoDir });
|
|
523
|
-
writeWorkspaceStub(repoDir);
|
|
524
|
-
return runCommand('save-wrong-branch', process.execPath, [packageScriptPath('treeseed'), 'save', 'test: wrong branch'], {
|
|
525
|
-
cwd: repoDir,
|
|
526
|
-
allowedExitCodes: [1],
|
|
527
|
-
});
|
|
528
|
-
}
|
|
529
|
-
finally {
|
|
530
|
-
rmSync(repoDir, { recursive: true, force: true });
|
|
531
|
-
}
|
|
532
|
-
});
|
|
533
|
-
await withStep('save guard: missing origin', async () => {
|
|
534
|
-
const repoDir = mkdtempSync(join(tmpdir(), 'treeseed-save-origin-'));
|
|
535
|
-
try {
|
|
536
|
-
run('git', ['init', '--initial-branch=main'], { cwd: repoDir });
|
|
537
|
-
writeWorkspaceStub(repoDir);
|
|
538
|
-
return runCommand('save-missing-origin', process.execPath, [packageScriptPath('treeseed'), 'save', 'test: missing origin'], {
|
|
539
|
-
cwd: repoDir,
|
|
540
|
-
allowedExitCodes: [1],
|
|
541
|
-
});
|
|
542
|
-
}
|
|
543
|
-
finally {
|
|
544
|
-
rmSync(repoDir, { recursive: true, force: true });
|
|
545
|
-
}
|
|
546
|
-
});
|
|
547
|
-
await withStep('save guard: no changes', async () => {
|
|
548
|
-
const clonedWorkspace = cloneLocalWorkspace();
|
|
549
|
-
try {
|
|
550
|
-
return runCommand('save-no-changes', process.execPath, [packageScriptPath('treeseed'), 'save', 'test: no-op save'], {
|
|
551
|
-
cwd: clonedWorkspace.workingRoot,
|
|
552
|
-
allowedExitCodes: [1],
|
|
553
|
-
env: cacheEnv(),
|
|
554
|
-
});
|
|
555
|
-
}
|
|
556
|
-
finally {
|
|
557
|
-
rmSync(clonedWorkspace.cloneRoot, { recursive: true, force: true });
|
|
558
|
-
}
|
|
559
|
-
});
|
|
560
|
-
await withStep('save success: local bare origin with stubbed automation', async () => {
|
|
561
|
-
const clonedWorkspace = cloneLocalWorkspaceWithBareOrigin();
|
|
562
|
-
try {
|
|
563
|
-
const notePath = resolve(clonedWorkspace.workingRoot, 'src/content/notes/first-note.mdx');
|
|
564
|
-
appendSaveMarker(notePath, 'treeseed-e2e-local-save-success');
|
|
565
|
-
const saveReportPath = resolve(artifactsRoot, 'save-local-success.json');
|
|
566
|
-
const result = runCommand('save-local-success', process.execPath, [packageScriptPath('treeseed'), 'save', 'test: local save success'], {
|
|
567
|
-
cwd: clonedWorkspace.workingRoot,
|
|
568
|
-
env: {
|
|
569
|
-
...cacheEnv(),
|
|
570
|
-
TREESEED_GITHUB_AUTOMATION_MODE: 'stub',
|
|
571
|
-
TREESEED_SAVE_REPORT_PATH: saveReportPath,
|
|
572
|
-
},
|
|
573
|
-
timeoutMs: 1800000,
|
|
574
|
-
});
|
|
575
|
-
const localHead = run('git', ['rev-parse', 'HEAD'], { cwd: clonedWorkspace.cloneRoot, capture: true }).trim();
|
|
576
|
-
const remoteHead = run('git', ['--git-dir', clonedWorkspace.bareRoot, 'rev-parse', 'refs/heads/main'], { cwd: root, capture: true }).trim();
|
|
577
|
-
if (localHead !== remoteHead) {
|
|
578
|
-
throw new Error(`Expected pushed head ${remoteHead} to match local head ${localHead}.`);
|
|
579
|
-
}
|
|
580
|
-
return {
|
|
581
|
-
...result,
|
|
582
|
-
saveReportPath,
|
|
583
|
-
localHead,
|
|
584
|
-
remoteHead,
|
|
585
|
-
};
|
|
586
|
-
}
|
|
587
|
-
finally {
|
|
588
|
-
rmSync(clonedWorkspace.cloneRoot, { recursive: true, force: true });
|
|
589
|
-
rmSync(clonedWorkspace.bareRoot, { recursive: true, force: true });
|
|
590
|
-
}
|
|
591
|
-
});
|
|
592
|
-
}
|
|
593
|
-
async function runStagingSuite() {
|
|
594
|
-
const preflight = collectCliPreflight({ cwd: root, requireAuth: true });
|
|
595
|
-
writeJsonArtifact(resolve(artifactsRoot, 'preflight.staging.json'), preflight);
|
|
596
|
-
log(formatCliPreflightReport(preflight));
|
|
597
|
-
if (!preflight.ok) {
|
|
598
|
-
throw new Error('Staging preflight failed.');
|
|
599
|
-
}
|
|
600
|
-
const staging = cloneStagingRepository();
|
|
601
|
-
try {
|
|
602
|
-
const repository = resolveRepositorySlug(staging.cloneRoot);
|
|
603
|
-
const deployConfig = readTenantConfig(staging.workingRoot);
|
|
604
|
-
if (!process.env.TREESEED_E2E_ALLOW_PRODUCTION && isProductionLikeTarget(repository, deployConfig.siteUrl)) {
|
|
605
|
-
throw new Error(`Refusing to run staging E2E against production-like target ${repository} / ${deployConfig.siteUrl}.`);
|
|
606
|
-
}
|
|
607
|
-
await withStep('staging deploy', async () => {
|
|
608
|
-
return runCommand('staging-deploy', 'npm', ['run', 'deploy', '--', '--name', 'treeseed-e2e-staging'], {
|
|
609
|
-
cwd: staging.workingRoot,
|
|
610
|
-
env: cacheEnv(createWranglerCommandEnv()),
|
|
611
|
-
timeoutMs: 900000,
|
|
612
|
-
});
|
|
613
|
-
});
|
|
614
|
-
await withStep('staging site reachability', async () => {
|
|
615
|
-
return await waitForUrl(deployConfig.siteUrl);
|
|
616
|
-
});
|
|
617
|
-
const notePath = resolve(staging.workingRoot, process.env.TREESEED_E2E_SAVE_FILE ?? 'src/content/notes/first-note.mdx');
|
|
618
|
-
for (const iteration of [1, 2]) {
|
|
619
|
-
await withStep(`staging save iteration ${iteration}`, async () => {
|
|
620
|
-
const marker = `treeseed-e2e-save-${iteration}`;
|
|
621
|
-
appendSaveMarker(notePath, marker);
|
|
622
|
-
const saveReportPath = resolve(artifactsRoot, `save-iteration-${iteration}.json`);
|
|
623
|
-
runCommand(`staging-save-${iteration}`, 'npm', ['run', 'save', '--', `test: treeseed e2e save iteration ${iteration}`], {
|
|
624
|
-
cwd: staging.workingRoot,
|
|
625
|
-
env: {
|
|
626
|
-
...cacheEnv(createWranglerCommandEnv()),
|
|
627
|
-
TREESEED_SAVE_REPORT_PATH: saveReportPath,
|
|
628
|
-
},
|
|
629
|
-
timeoutMs: 1800000,
|
|
630
|
-
});
|
|
631
|
-
const headSha = run('git', ['rev-parse', 'HEAD'], { cwd: staging.cloneRoot, capture: true }).trim();
|
|
632
|
-
const workflow = await waitForGitHubWorkflow(repository, headSha);
|
|
633
|
-
const siteCheck = await waitForUrl(deployConfig.siteUrl, { contains: marker, timeoutMs: 600000 });
|
|
634
|
-
return {
|
|
635
|
-
headSha,
|
|
636
|
-
workflow,
|
|
637
|
-
siteCheck,
|
|
638
|
-
saveReportPath,
|
|
639
|
-
};
|
|
640
|
-
});
|
|
641
|
-
}
|
|
642
|
-
await withStep('staging save no-op guard', async () => {
|
|
643
|
-
return runCommand('staging-save-no-op', 'npm', ['run', 'save', '--', 'test: staging no-op save'], {
|
|
644
|
-
cwd: staging.workingRoot,
|
|
645
|
-
env: cacheEnv(createWranglerCommandEnv()),
|
|
646
|
-
allowedExitCodes: [1],
|
|
647
|
-
timeoutMs: 180000,
|
|
648
|
-
});
|
|
649
|
-
});
|
|
650
|
-
await withStep('staging merge-conflict reporting', async () => {
|
|
651
|
-
const local = cloneStagingRepository();
|
|
652
|
-
const remote = cloneStagingRepository();
|
|
653
|
-
const relativeSaveFile = process.env.TREESEED_E2E_SAVE_FILE ?? 'src/content/notes/first-note.mdx';
|
|
654
|
-
try {
|
|
655
|
-
appendSaveMarker(resolve(remote.workingRoot, relativeSaveFile), 'treeseed-e2e-remote-conflict');
|
|
656
|
-
run('git', ['add', '.'], { cwd: remote.cloneRoot });
|
|
657
|
-
run('git', ['commit', '-m', 'test: remote conflict seed'], { cwd: remote.cloneRoot });
|
|
658
|
-
run('git', ['push', 'origin', 'main'], { cwd: remote.cloneRoot });
|
|
659
|
-
appendSaveMarker(resolve(local.workingRoot, relativeSaveFile), 'treeseed-e2e-local-conflict');
|
|
660
|
-
const saveReportPath = resolve(artifactsRoot, 'save-conflict.json');
|
|
661
|
-
const result = runCommand('staging-save-conflict', 'npm', ['run', 'save', '--', 'test: staging merge conflict'], {
|
|
662
|
-
cwd: local.workingRoot,
|
|
663
|
-
env: {
|
|
664
|
-
...cacheEnv(createWranglerCommandEnv()),
|
|
665
|
-
TREESEED_SAVE_REPORT_PATH: saveReportPath,
|
|
666
|
-
},
|
|
667
|
-
allowedExitCodes: [MERGE_CONFLICT_EXIT_CODE],
|
|
668
|
-
timeoutMs: 1800000,
|
|
669
|
-
});
|
|
670
|
-
return {
|
|
671
|
-
exitCode: result.status,
|
|
672
|
-
saveReportPath,
|
|
673
|
-
};
|
|
674
|
-
}
|
|
675
|
-
finally {
|
|
676
|
-
try {
|
|
677
|
-
run('git', ['rebase', '--abort'], { cwd: local.cloneRoot, capture: true });
|
|
678
|
-
}
|
|
679
|
-
catch {
|
|
680
|
-
// Best effort cleanup for the temporary clone.
|
|
681
|
-
}
|
|
682
|
-
rmSync(local.cloneRoot, { recursive: true, force: true });
|
|
683
|
-
rmSync(remote.cloneRoot, { recursive: true, force: true });
|
|
684
|
-
}
|
|
685
|
-
});
|
|
686
|
-
await withStep('staging destroy', async () => {
|
|
687
|
-
return runCommand('staging-destroy', 'npm', ['run', 'destroy', '--', '--force', '--skip-confirmation', '--confirm', String(deployConfig.slug), '--remove-build-artifacts'], {
|
|
688
|
-
cwd: staging.workingRoot,
|
|
689
|
-
env: cacheEnv(createWranglerCommandEnv()),
|
|
690
|
-
timeoutMs: 900000,
|
|
691
|
-
});
|
|
692
|
-
});
|
|
693
|
-
}
|
|
694
|
-
finally {
|
|
695
|
-
rmSync(staging.cloneRoot, { recursive: true, force: true });
|
|
696
|
-
}
|
|
697
|
-
}
|
|
698
|
-
(async () => {
|
|
699
|
-
try {
|
|
700
|
-
if (runLocal) {
|
|
701
|
-
await runLocalSuite();
|
|
702
|
-
}
|
|
703
|
-
if (runStaging) {
|
|
704
|
-
await runStagingSuite();
|
|
705
|
-
}
|
|
706
|
-
report.summary.ok = true;
|
|
707
|
-
writeReport();
|
|
708
|
-
console.log(`Treeseed command E2E completed successfully. Artifacts: ${artifactsRoot}`);
|
|
709
|
-
}
|
|
710
|
-
catch (error) {
|
|
711
|
-
report.summary.ok = false;
|
|
712
|
-
report.summary.error = error instanceof Error ? error.message : String(error);
|
|
713
|
-
writeReport();
|
|
714
|
-
console.error(report.summary.error);
|
|
715
|
-
console.error(`Treeseed command E2E artifacts: ${artifactsRoot}`);
|
|
716
|
-
process.exit(1);
|
|
717
|
-
}
|
|
718
|
-
})();
|