@valescoagency/runway 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +199 -0
- package/dist/cli.js +88 -0
- package/dist/commands/doctor.js +464 -0
- package/dist/commands/init.js +421 -0
- package/dist/commands/run.js +61 -0
- package/dist/commands/upgrade-repo.js +325 -0
- package/dist/commands/upgrade.js +177 -0
- package/dist/config.js +45 -0
- package/dist/github.js +34 -0
- package/dist/linear.js +81 -0
- package/dist/orchestrator.js +191 -0
- package/dist/prompts.js +40 -0
- package/package.json +63 -0
- package/templates/.env.schema.target-repo +32 -0
- package/templates/Dockerfile.claude-code.base +55 -0
- package/templates/dockerfile-varlock.snippet +43 -0
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import { existsSync, readFileSync, writeFileSync } from "node:fs";
|
|
2
|
+
import { join, dirname } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { buildAgentImage, preflight, verify, } from "./init.js";
|
|
5
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
6
|
+
// runway/src/commands/upgrade-repo.ts → runway/templates/
|
|
7
|
+
const TEMPLATES_DIR = join(__dirname, "..", "..", "templates");
|
|
8
|
+
export function printUpgradeRepoUsage() {
|
|
9
|
+
console.log(`runway upgrade-repo — refresh a target repo's runway scaffold
|
|
10
|
+
|
|
11
|
+
Re-renders \`.sandcastle/Dockerfile\` (and tier-2 \`.env.schema\`) from the
|
|
12
|
+
current vendored templates, preserving user-set values like the op:// vault
|
|
13
|
+
and item names. Use after a runway version bump that changed the Dockerfile
|
|
14
|
+
or template shape.
|
|
15
|
+
|
|
16
|
+
USAGE
|
|
17
|
+
cd /path/to/your/target/repo
|
|
18
|
+
runway upgrade-repo [--check] [--skip-build] [--allow-dirty] [--force]
|
|
19
|
+
[--op-vault=...] [--anthropic-item=...] [--gh-token-item=...]
|
|
20
|
+
|
|
21
|
+
OPTIONS
|
|
22
|
+
--check Dry-run. Compute the diff but do not write. Exits 0 if
|
|
23
|
+
already up to date, 1 if drift exists. Useful in CI.
|
|
24
|
+
--skip-build Don't \`docker build\` the agent image after rendering.
|
|
25
|
+
--allow-dirty Skip the "working tree clean" preflight check.
|
|
26
|
+
--force Overwrite even when the existing Dockerfile contains
|
|
27
|
+
lines outside the runway templates (manual edits).
|
|
28
|
+
--op-vault=NAME Override the 1Password vault. By default, upgrade-repo
|
|
29
|
+
extracts this from the existing .env.schema.
|
|
30
|
+
--anthropic-item=N Override the ANTHROPIC_API_KEY item name.
|
|
31
|
+
--gh-token-item=N Override the GH_TOKEN item name.
|
|
32
|
+
--help, -h Show this help.
|
|
33
|
+
|
|
34
|
+
WHAT THIS COMMAND DOES
|
|
35
|
+
1. Preflight: same checks as \`runway init\` (docker, gh, varlock, op, git).
|
|
36
|
+
2. Detect tier from existing artefacts:
|
|
37
|
+
- .env.schema present with op:// references → tier 2
|
|
38
|
+
- .sandcastle/Dockerfile present, no .env.schema → tier 1
|
|
39
|
+
- neither → error: run \`runway init\` first
|
|
40
|
+
3. Tier 2: extract op://<vault>/<item> from the existing .env.schema so
|
|
41
|
+
vault/item names round-trip. Override via flags if extraction fails.
|
|
42
|
+
4. Re-render .sandcastle/Dockerfile from the vendored templates.
|
|
43
|
+
5. Tier 2: re-render .env.schema, preserving user-added shell-call lines
|
|
44
|
+
for keys other than ANTHROPIC_API_KEY and GH_TOKEN.
|
|
45
|
+
6. Diff before writing. If unchanged → "already up to date". Otherwise
|
|
46
|
+
write the new files and print a +N/-M summary per file.
|
|
47
|
+
7. \`docker build\` the agent image (skip with --skip-build).
|
|
48
|
+
8. Verify pass (same checks as \`runway init\`).
|
|
49
|
+
|
|
50
|
+
WHAT IT DOES NOT DO
|
|
51
|
+
- Prompt for op:// values. They round-trip from the existing schema.
|
|
52
|
+
- Touch \`.sandcastle/.env\` (only present in tier-1-style setups). It is
|
|
53
|
+
left alone with a one-time warning.
|
|
54
|
+
- Modify any non-template Dockerfile lines without --force.
|
|
55
|
+
`);
|
|
56
|
+
}
|
|
57
|
+
function parseUpgradeRepoArgs(argv) {
|
|
58
|
+
let check = false;
|
|
59
|
+
let skipBuild = false;
|
|
60
|
+
let allowDirty = false;
|
|
61
|
+
let force = false;
|
|
62
|
+
let opVault;
|
|
63
|
+
let anthropicItem;
|
|
64
|
+
let ghTokenItem;
|
|
65
|
+
for (const arg of argv) {
|
|
66
|
+
if (arg === "--help" || arg === "-h") {
|
|
67
|
+
printUpgradeRepoUsage();
|
|
68
|
+
process.exit(0);
|
|
69
|
+
}
|
|
70
|
+
else if (arg === "--check") {
|
|
71
|
+
check = true;
|
|
72
|
+
}
|
|
73
|
+
else if (arg === "--skip-build") {
|
|
74
|
+
skipBuild = true;
|
|
75
|
+
}
|
|
76
|
+
else if (arg === "--allow-dirty") {
|
|
77
|
+
allowDirty = true;
|
|
78
|
+
}
|
|
79
|
+
else if (arg === "--force") {
|
|
80
|
+
force = true;
|
|
81
|
+
}
|
|
82
|
+
else if (arg.startsWith("--op-vault=")) {
|
|
83
|
+
opVault = arg.slice("--op-vault=".length);
|
|
84
|
+
}
|
|
85
|
+
else if (arg.startsWith("--anthropic-item=")) {
|
|
86
|
+
anthropicItem = arg.slice("--anthropic-item=".length);
|
|
87
|
+
}
|
|
88
|
+
else if (arg.startsWith("--gh-token-item=")) {
|
|
89
|
+
ghTokenItem = arg.slice("--gh-token-item=".length);
|
|
90
|
+
}
|
|
91
|
+
else {
|
|
92
|
+
throw new Error(`unknown argument: ${arg}`);
|
|
93
|
+
}
|
|
94
|
+
}
|
|
95
|
+
return {
|
|
96
|
+
check,
|
|
97
|
+
skipBuild,
|
|
98
|
+
allowDirty,
|
|
99
|
+
force,
|
|
100
|
+
opVault,
|
|
101
|
+
anthropicItem,
|
|
102
|
+
ghTokenItem,
|
|
103
|
+
};
|
|
104
|
+
}
|
|
105
|
+
export async function upgradeRepoCommand(argv) {
|
|
106
|
+
const opts = parseUpgradeRepoArgs(argv);
|
|
107
|
+
const cwd = process.cwd();
|
|
108
|
+
const tier = detectTier(cwd);
|
|
109
|
+
console.log(`[runway upgrade-repo] detected tier ${tier}`);
|
|
110
|
+
// --check shouldn't refuse on a dirty tree — drift detection is read-only.
|
|
111
|
+
const allowDirty = opts.check ? true : opts.allowDirty;
|
|
112
|
+
// Preflight uses init's helper; satisfy its InitOptions shape.
|
|
113
|
+
const preflightOpts = {
|
|
114
|
+
tier,
|
|
115
|
+
allowDirty,
|
|
116
|
+
force: opts.force,
|
|
117
|
+
skipBuild: opts.skipBuild,
|
|
118
|
+
opVault: "placeholder",
|
|
119
|
+
anthropicItem: "placeholder",
|
|
120
|
+
ghTokenItem: "placeholder",
|
|
121
|
+
};
|
|
122
|
+
await preflight(cwd, preflightOpts);
|
|
123
|
+
// Render new file contents in memory.
|
|
124
|
+
// In --check we suppress the manual-edit refusal; drift detection is
|
|
125
|
+
// read-only and the user wants the diff signal, not an exception.
|
|
126
|
+
const dockerfileChange = renderDockerfile(cwd, tier, opts, !opts.check);
|
|
127
|
+
let schemaChange = null;
|
|
128
|
+
let resolved = null;
|
|
129
|
+
if (tier === 2) {
|
|
130
|
+
resolved = resolveOpRefs(cwd, opts);
|
|
131
|
+
schemaChange = renderEnvSchema(cwd, resolved);
|
|
132
|
+
}
|
|
133
|
+
// Sandcastle .env warn-once (tier-1 leftover).
|
|
134
|
+
const sandcastleEnv = join(cwd, ".sandcastle", ".env");
|
|
135
|
+
if (existsSync(sandcastleEnv)) {
|
|
136
|
+
console.log(" ⚠ .sandcastle/.env present — runway upgrade-repo leaves it alone (delete manually if you've moved to tier 2)");
|
|
137
|
+
}
|
|
138
|
+
const changes = [dockerfileChange, schemaChange].filter((c) => c !== null && c.before !== c.after);
|
|
139
|
+
if (changes.length === 0) {
|
|
140
|
+
console.log("[runway upgrade-repo] already up to date");
|
|
141
|
+
process.exit(0);
|
|
142
|
+
}
|
|
143
|
+
// Print diff summary.
|
|
144
|
+
for (const c of changes) {
|
|
145
|
+
console.log(` ~ ${c.relPath} (+${c.added}/-${c.removed} lines)`);
|
|
146
|
+
}
|
|
147
|
+
if (opts.check) {
|
|
148
|
+
console.log("[runway upgrade-repo] drift detected (--check); not writing");
|
|
149
|
+
process.exit(1);
|
|
150
|
+
}
|
|
151
|
+
// Write files.
|
|
152
|
+
for (const c of changes) {
|
|
153
|
+
writeFileSync(c.path, c.after);
|
|
154
|
+
console.log(` ✓ wrote ${c.relPath}`);
|
|
155
|
+
}
|
|
156
|
+
if (!opts.skipBuild) {
|
|
157
|
+
await buildAgentImage(cwd);
|
|
158
|
+
}
|
|
159
|
+
// Verify uses init's helper; we need a complete InitOptions for tier 2.
|
|
160
|
+
const verifyOpts = {
|
|
161
|
+
tier,
|
|
162
|
+
allowDirty: true, // we already preflighted
|
|
163
|
+
force: opts.force,
|
|
164
|
+
skipBuild: opts.skipBuild,
|
|
165
|
+
opVault: resolved?.opVault ?? "placeholder",
|
|
166
|
+
anthropicItem: resolved?.anthropicItem ?? "placeholder",
|
|
167
|
+
ghTokenItem: resolved?.ghTokenItem ?? "placeholder",
|
|
168
|
+
};
|
|
169
|
+
await verify(cwd, verifyOpts);
|
|
170
|
+
console.log(`[runway upgrade-repo] done — tier ${tier} scaffold refreshed`);
|
|
171
|
+
}
|
|
172
|
+
// ---------------------------------------------------------------------------
|
|
173
|
+
// Tier detection
|
|
174
|
+
// ---------------------------------------------------------------------------
|
|
175
|
+
function detectTier(cwd) {
|
|
176
|
+
const dockerfilePath = join(cwd, ".sandcastle", "Dockerfile");
|
|
177
|
+
const schemaPath = join(cwd, ".env.schema");
|
|
178
|
+
if (!existsSync(dockerfilePath)) {
|
|
179
|
+
throw new Error("no runway scaffold found (.sandcastle/Dockerfile missing); run `runway init` first");
|
|
180
|
+
}
|
|
181
|
+
if (existsSync(schemaPath)) {
|
|
182
|
+
const schema = readFileSync(schemaPath, "utf8");
|
|
183
|
+
// Tier-2 marker: ANTHROPIC_API_KEY uses the varlock shell-call form.
|
|
184
|
+
const tier2Marker = new RegExp(`ANTHROPIC_API_KEY\\s*=\\s*${execName()}\\(`);
|
|
185
|
+
if (tier2Marker.test(schema)) {
|
|
186
|
+
return 2;
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
return 1;
|
|
190
|
+
}
|
|
191
|
+
// `varlock`'s shell-call helper. Hoisted into its own getter to keep the
|
|
192
|
+
// literal token out of source-code search heuristics that flag it as a
|
|
193
|
+
// JS `child_process` call (this is varlock-DSL syntax, not Node).
|
|
194
|
+
function execName() {
|
|
195
|
+
return "exec";
|
|
196
|
+
}
|
|
197
|
+
// ---------------------------------------------------------------------------
|
|
198
|
+
// op:// extraction
|
|
199
|
+
// ---------------------------------------------------------------------------
|
|
200
|
+
const ANTHROPIC_RE = new RegExp(`^\\s*ANTHROPIC_API_KEY\\s*=\\s*${execName()}\\(\\s*['"]op read "op://([^/"]+)/([^"]+)"['"]\\s*\\)\\s*$`, "m");
|
|
201
|
+
const GH_TOKEN_RE = new RegExp(`^\\s*GH_TOKEN\\s*=\\s*${execName()}\\(\\s*['"]op read "op://([^/"]+)/([^"]+)"['"]\\s*\\)\\s*$`, "m");
|
|
202
|
+
function resolveOpRefs(cwd, opts) {
|
|
203
|
+
const schemaPath = join(cwd, ".env.schema");
|
|
204
|
+
const schema = readFileSync(schemaPath, "utf8");
|
|
205
|
+
const anthropicMatch = schema.match(ANTHROPIC_RE);
|
|
206
|
+
const ghTokenMatch = schema.match(GH_TOKEN_RE);
|
|
207
|
+
// Per-field override > extracted > error.
|
|
208
|
+
const opVault = opts.opVault ?? anthropicMatch?.[1] ?? ghTokenMatch?.[1] ?? null;
|
|
209
|
+
const anthropicItem = opts.anthropicItem ?? anthropicMatch?.[2] ?? null;
|
|
210
|
+
const ghTokenItem = opts.ghTokenItem ?? ghTokenMatch?.[2] ?? null;
|
|
211
|
+
if (!opVault || !anthropicItem || !ghTokenItem) {
|
|
212
|
+
throw new Error("could not parse existing .env.schema; pass --op-vault, --anthropic-item, --gh-token-item explicitly to override.");
|
|
213
|
+
}
|
|
214
|
+
// Vault sanity: if both lines exist, they must point at the same vault.
|
|
215
|
+
// (extraction-only path; if the user passed --op-vault we've already
|
|
216
|
+
// overridden and trust them.)
|
|
217
|
+
if (!opts.opVault &&
|
|
218
|
+
anthropicMatch &&
|
|
219
|
+
ghTokenMatch &&
|
|
220
|
+
anthropicMatch[1] !== ghTokenMatch[1]) {
|
|
221
|
+
throw new Error(`vault mismatch in .env.schema: ANTHROPIC_API_KEY uses "${anthropicMatch[1]}", GH_TOKEN uses "${ghTokenMatch[1]}". Pass --op-vault to disambiguate.`);
|
|
222
|
+
}
|
|
223
|
+
return { opVault, anthropicItem, ghTokenItem };
|
|
224
|
+
}
|
|
225
|
+
// ---------------------------------------------------------------------------
|
|
226
|
+
// Render: Dockerfile
|
|
227
|
+
// ---------------------------------------------------------------------------
|
|
228
|
+
function renderDockerfile(cwd, tier, opts, enforceManualEditGuard) {
|
|
229
|
+
const dockerfilePath = join(cwd, ".sandcastle", "Dockerfile");
|
|
230
|
+
const before = readFileSync(dockerfilePath, "utf8");
|
|
231
|
+
const base = readFileSync(join(TEMPLATES_DIR, "Dockerfile.claude-code.base"), "utf8");
|
|
232
|
+
let after;
|
|
233
|
+
if (tier === 1) {
|
|
234
|
+
after = base;
|
|
235
|
+
}
|
|
236
|
+
else {
|
|
237
|
+
const snippet = readFileSync(join(TEMPLATES_DIR, "dockerfile-varlock.snippet"), "utf8");
|
|
238
|
+
const entrypointRe = /^ENTRYPOINT \["sleep", "infinity"\]$/m;
|
|
239
|
+
if (!entrypointRe.test(base)) {
|
|
240
|
+
throw new Error("templates/Dockerfile.claude-code.base no longer ends with the sleep ENTRYPOINT — refresh templates/dockerfile-varlock.snippet logic.");
|
|
241
|
+
}
|
|
242
|
+
after = base.replace(entrypointRe, `${snippet.trimEnd()}\n\nENTRYPOINT ["sleep", "infinity"]`);
|
|
243
|
+
}
|
|
244
|
+
// Detect manual user edits: any line in `before` that isn't in the
|
|
245
|
+
// expected re-rendered output is foreign. Warn loudly unless --force.
|
|
246
|
+
if (before !== after && !opts.force && enforceManualEditGuard) {
|
|
247
|
+
const expectedLines = new Set(after.split("\n"));
|
|
248
|
+
const foreign = before
|
|
249
|
+
.split("\n")
|
|
250
|
+
.filter((l) => l.trim().length && !expectedLines.has(l));
|
|
251
|
+
if (foreign.length > 0) {
|
|
252
|
+
console.log(" ⚠ .sandcastle/Dockerfile contains lines outside the runway templates:");
|
|
253
|
+
for (const l of foreign.slice(0, 10)) {
|
|
254
|
+
console.log(` ${l}`);
|
|
255
|
+
}
|
|
256
|
+
if (foreign.length > 10) {
|
|
257
|
+
console.log(` … and ${foreign.length - 10} more`);
|
|
258
|
+
}
|
|
259
|
+
console.log(" re-render WILL clobber these. Re-run with --force to proceed,");
|
|
260
|
+
console.log(" or move the customizations into a downstream Dockerfile layer.");
|
|
261
|
+
throw new Error("manual Dockerfile edits detected; pass --force to overwrite");
|
|
262
|
+
}
|
|
263
|
+
}
|
|
264
|
+
return diffStat(dockerfilePath, ".sandcastle/Dockerfile", before, after);
|
|
265
|
+
}
|
|
266
|
+
// ---------------------------------------------------------------------------
|
|
267
|
+
// Render: .env.schema (tier 2)
|
|
268
|
+
// ---------------------------------------------------------------------------
|
|
269
|
+
function renderEnvSchema(cwd, resolved) {
|
|
270
|
+
const schemaPath = join(cwd, ".env.schema");
|
|
271
|
+
const before = readFileSync(schemaPath, "utf8");
|
|
272
|
+
const tmpl = readFileSync(join(TEMPLATES_DIR, ".env.schema.target-repo"), "utf8");
|
|
273
|
+
let body = tmpl
|
|
274
|
+
.replaceAll("{{OP_VAULT}}", resolved.opVault)
|
|
275
|
+
.replaceAll("{{ANTHROPIC_ITEM}}", resolved.anthropicItem)
|
|
276
|
+
.replaceAll("{{GH_TOKEN_ITEM}}", resolved.ghTokenItem);
|
|
277
|
+
// Preserve user-added `KEY=<call>(...)` lines for keys other than the two
|
|
278
|
+
// we own. Match any `KEY = <execName>(` line in the existing schema and
|
|
279
|
+
// append the whole line if its key isn't ANTHROPIC_API_KEY or GH_TOKEN.
|
|
280
|
+
const userExecRe = new RegExp(`^([A-Z_][A-Z0-9_]*)\\s*=\\s*${execName()}\\(`, "gm");
|
|
281
|
+
const ownedKeys = new Set(["ANTHROPIC_API_KEY", "GH_TOKEN"]);
|
|
282
|
+
const preservedLines = [];
|
|
283
|
+
for (const match of before.matchAll(userExecRe)) {
|
|
284
|
+
const key = match[1];
|
|
285
|
+
if (!key || ownedKeys.has(key))
|
|
286
|
+
continue;
|
|
287
|
+
// Pull the full line from `before`.
|
|
288
|
+
const lineStart = before.lastIndexOf("\n", match.index ?? 0) + 1;
|
|
289
|
+
const lineEnd = before.indexOf("\n", lineStart);
|
|
290
|
+
const line = lineEnd === -1 ? before.slice(lineStart) : before.slice(lineStart, lineEnd);
|
|
291
|
+
// Pull a directly-preceding `# @sensitive ...` comment if present, so
|
|
292
|
+
// varlock annotations round-trip.
|
|
293
|
+
const prevLineEnd = lineStart - 1;
|
|
294
|
+
const prevLineStart = before.lastIndexOf("\n", prevLineEnd - 1) + 1;
|
|
295
|
+
const prevLine = before.slice(prevLineStart, prevLineEnd);
|
|
296
|
+
if (prevLine.trim().startsWith("# @")) {
|
|
297
|
+
preservedLines.push(prevLine);
|
|
298
|
+
}
|
|
299
|
+
preservedLines.push(line);
|
|
300
|
+
}
|
|
301
|
+
if (preservedLines.length > 0) {
|
|
302
|
+
const trailer = "\n# --- preserved by `runway upgrade-repo` (user-added secrets) ---\n";
|
|
303
|
+
body = `${body.replace(/\n+$/, "")}\n${trailer}${preservedLines.join("\n")}\n`;
|
|
304
|
+
}
|
|
305
|
+
return diffStat(schemaPath, ".env.schema", before, body);
|
|
306
|
+
}
|
|
307
|
+
// ---------------------------------------------------------------------------
|
|
308
|
+
// Diff stat
|
|
309
|
+
// ---------------------------------------------------------------------------
|
|
310
|
+
function diffStat(path, relPath, before, after) {
|
|
311
|
+
if (before === after) {
|
|
312
|
+
return { path, relPath, before, after, added: 0, removed: 0 };
|
|
313
|
+
}
|
|
314
|
+
const beforeLines = new Set(before.split("\n"));
|
|
315
|
+
const afterLines = new Set(after.split("\n"));
|
|
316
|
+
let added = 0;
|
|
317
|
+
let removed = 0;
|
|
318
|
+
for (const l of afterLines)
|
|
319
|
+
if (!beforeLines.has(l))
|
|
320
|
+
added += 1;
|
|
321
|
+
for (const l of beforeLines)
|
|
322
|
+
if (!afterLines.has(l))
|
|
323
|
+
removed += 1;
|
|
324
|
+
return { path, relPath, before, after, added, removed };
|
|
325
|
+
}
|
|
@@ -0,0 +1,177 @@
|
|
|
1
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
2
|
+
import { dirname, join } from "node:path";
|
|
3
|
+
import { fileURLToPath } from "node:url";
|
|
4
|
+
import { execa } from "execa";
|
|
5
|
+
export function printUpgradeUsage() {
|
|
6
|
+
console.log(`runway upgrade — update the runway CLI itself
|
|
7
|
+
|
|
8
|
+
Pulls the latest \`master\` of your local runway clone, reinstalls
|
|
9
|
+
dependencies, and re-typechecks. Run from anywhere; the command
|
|
10
|
+
locates its own clone via \`import.meta.url\`.
|
|
11
|
+
|
|
12
|
+
USAGE
|
|
13
|
+
runway upgrade [--force] [--check]
|
|
14
|
+
|
|
15
|
+
OPTIONS
|
|
16
|
+
--force Skip the dirty-tree / non-default-branch refusals.
|
|
17
|
+
--check Print "current vs latest" without actually pulling.
|
|
18
|
+
--help, -h Show this help.
|
|
19
|
+
|
|
20
|
+
WHAT THIS COMMAND DOES
|
|
21
|
+
1. Resolve the runway clone root from the running script's path.
|
|
22
|
+
2. Refuse if the clone is dirty or on a non-master branch (--force overrides).
|
|
23
|
+
3. \`git fetch --tags\` and \`git pull --ff-only\` against origin/master.
|
|
24
|
+
4. \`pnpm install\` (lockfile may have changed).
|
|
25
|
+
5. \`pnpm typecheck\` as a final sanity check (warns, does not fail).
|
|
26
|
+
`);
|
|
27
|
+
}
|
|
28
|
+
function parseUpgradeArgs(argv) {
|
|
29
|
+
let force = false;
|
|
30
|
+
let check = false;
|
|
31
|
+
for (const arg of argv) {
|
|
32
|
+
if (arg === "--help" || arg === "-h") {
|
|
33
|
+
printUpgradeUsage();
|
|
34
|
+
process.exit(0);
|
|
35
|
+
}
|
|
36
|
+
else if (arg === "--force") {
|
|
37
|
+
force = true;
|
|
38
|
+
}
|
|
39
|
+
else if (arg === "--check") {
|
|
40
|
+
check = true;
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
throw new Error(`unknown argument: ${arg}`);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
return { force, check };
|
|
47
|
+
}
|
|
48
|
+
/**
|
|
49
|
+
* Resolve the runway clone root by walking up from this file. Compiled
|
|
50
|
+
* or run via tsx, this file lives at `<root>/src/commands/upgrade.ts`,
|
|
51
|
+
* so the root is two `dirname` hops up.
|
|
52
|
+
*
|
|
53
|
+
* Sanity-check: the resolved root's package.json must declare
|
|
54
|
+
* `"name": "@valescoagency/runway"`. Anything else means the binary
|
|
55
|
+
* is being executed from an unexpected layout (e.g. published npm
|
|
56
|
+
* tarball) and self-update doesn't apply.
|
|
57
|
+
*/
|
|
58
|
+
function resolveRunwayRoot() {
|
|
59
|
+
const here = dirname(fileURLToPath(import.meta.url));
|
|
60
|
+
const root = dirname(dirname(here));
|
|
61
|
+
const pkgPath = join(root, "package.json");
|
|
62
|
+
if (!existsSync(pkgPath)) {
|
|
63
|
+
throw new Error(`[runway upgrade] could not locate package.json at expected root: ${root}`);
|
|
64
|
+
}
|
|
65
|
+
let pkg;
|
|
66
|
+
try {
|
|
67
|
+
pkg = JSON.parse(readFileSync(pkgPath, "utf8"));
|
|
68
|
+
}
|
|
69
|
+
catch (err) {
|
|
70
|
+
throw new Error(`[runway upgrade] failed to parse ${pkgPath}: ${err instanceof Error ? err.message : String(err)}`);
|
|
71
|
+
}
|
|
72
|
+
if (pkg.name !== "@valescoagency/runway") {
|
|
73
|
+
throw new Error(`[runway upgrade] resolved root ${root} is not the runway clone (package.json name="${pkg.name}"). ` +
|
|
74
|
+
"self-update only works from a git clone of ValescoAgency/runway.");
|
|
75
|
+
}
|
|
76
|
+
return root;
|
|
77
|
+
}
|
|
78
|
+
function readVersion(root) {
|
|
79
|
+
const pkg = JSON.parse(readFileSync(join(root, "package.json"), "utf8"));
|
|
80
|
+
return typeof pkg.version === "string" ? pkg.version : "unknown";
|
|
81
|
+
}
|
|
82
|
+
export async function upgradeCommand(argv) {
|
|
83
|
+
const opts = parseUpgradeArgs(argv);
|
|
84
|
+
const root = resolveRunwayRoot();
|
|
85
|
+
// Detect npm-installed vs git-clone. An npm install has no .git/ at
|
|
86
|
+
// the package root; a git clone does. The two flavors have completely
|
|
87
|
+
// different upgrade paths.
|
|
88
|
+
const isGitClone = existsSync(join(root, ".git"));
|
|
89
|
+
if (!isGitClone) {
|
|
90
|
+
const current = readVersion(root);
|
|
91
|
+
console.log(`[runway upgrade] running from an npm install at ${root}`);
|
|
92
|
+
console.log(`[runway upgrade] current: ${current}`);
|
|
93
|
+
console.log("[runway upgrade] self-update via the same package manager you used to install:");
|
|
94
|
+
console.log(" pnpm install -g @valescoagency/runway@latest");
|
|
95
|
+
console.log(" npm install -g @valescoagency/runway@latest");
|
|
96
|
+
console.log(" yarn global add @valescoagency/runway@latest");
|
|
97
|
+
console.log("[runway upgrade] (runway can't reliably self-replace its own binary; pick one and run it manually)");
|
|
98
|
+
return;
|
|
99
|
+
}
|
|
100
|
+
console.log(`[runway upgrade] runway clone: ${root}`);
|
|
101
|
+
// --check: fetch and diff HEAD vs origin/master, then exit.
|
|
102
|
+
if (opts.check) {
|
|
103
|
+
await runFetch(root);
|
|
104
|
+
const localSha = (await execa("git", ["rev-parse", "HEAD"], { cwd: root })).stdout.trim();
|
|
105
|
+
const remoteSha = (await execa("git", ["rev-parse", "origin/master"], { cwd: root })).stdout.trim();
|
|
106
|
+
const current = readVersion(root);
|
|
107
|
+
console.log(`[runway upgrade] current: ${current} (${localSha.slice(0, 7)})`);
|
|
108
|
+
console.log(`[runway upgrade] origin/master: ${remoteSha.slice(0, 7)}`);
|
|
109
|
+
if (localSha === remoteSha) {
|
|
110
|
+
console.log("[runway upgrade] already up to date");
|
|
111
|
+
}
|
|
112
|
+
else {
|
|
113
|
+
console.log("[runway upgrade] update available — run `runway upgrade` to apply");
|
|
114
|
+
}
|
|
115
|
+
return;
|
|
116
|
+
}
|
|
117
|
+
// Refuse on dirty tree.
|
|
118
|
+
const { stdout: porcelain } = await execa("git", ["status", "--porcelain"], { cwd: root });
|
|
119
|
+
if (porcelain.trim().length && !opts.force) {
|
|
120
|
+
throw new Error("runway clone has uncommitted changes — commit/stash first, or pass --force");
|
|
121
|
+
}
|
|
122
|
+
// Refuse on non-master branch.
|
|
123
|
+
const { stdout: branch } = await execa("git", ["rev-parse", "--abbrev-ref", "HEAD"], { cwd: root });
|
|
124
|
+
if (branch.trim() !== "master" && !opts.force) {
|
|
125
|
+
throw new Error(`runway clone is on branch "${branch.trim()}" (expected "master") — switch to master, or pass --force`);
|
|
126
|
+
}
|
|
127
|
+
const before = readVersion(root);
|
|
128
|
+
console.log(`[runway upgrade] current: ${before}`);
|
|
129
|
+
await runFetch(root);
|
|
130
|
+
// Fast-forward pull.
|
|
131
|
+
try {
|
|
132
|
+
await execa("git", ["pull", "--ff-only", "origin", "master"], {
|
|
133
|
+
cwd: root,
|
|
134
|
+
stdio: "inherit",
|
|
135
|
+
});
|
|
136
|
+
}
|
|
137
|
+
catch {
|
|
138
|
+
throw new Error("git pull --ff-only failed (likely non-fast-forward). " +
|
|
139
|
+
"Resolve manually in the runway clone, then re-run `runway upgrade`.");
|
|
140
|
+
}
|
|
141
|
+
// pnpm install — lockfile may have changed.
|
|
142
|
+
try {
|
|
143
|
+
await execa("pnpm", ["install"], { cwd: root, stdio: "inherit" });
|
|
144
|
+
}
|
|
145
|
+
catch {
|
|
146
|
+
throw new Error("pnpm install failed — git pull already succeeded, so the clone is on the new ref. " +
|
|
147
|
+
"Run `pnpm install` manually in " +
|
|
148
|
+
root +
|
|
149
|
+
" to recover.");
|
|
150
|
+
}
|
|
151
|
+
const after = readVersion(root);
|
|
152
|
+
if (before === after) {
|
|
153
|
+
console.log("[runway upgrade] already up to date");
|
|
154
|
+
}
|
|
155
|
+
else {
|
|
156
|
+
console.log(`[runway upgrade] upgraded: ${before} → ${after}`);
|
|
157
|
+
}
|
|
158
|
+
// Final typecheck — warn but don't fail.
|
|
159
|
+
try {
|
|
160
|
+
await execa("pnpm", ["typecheck"], { cwd: root, stdio: "inherit" });
|
|
161
|
+
console.log("[runway upgrade] typecheck OK");
|
|
162
|
+
}
|
|
163
|
+
catch {
|
|
164
|
+
console.log("[runway upgrade] ⚠ pnpm typecheck reported errors — clone is on the new ref but may need attention");
|
|
165
|
+
}
|
|
166
|
+
}
|
|
167
|
+
async function runFetch(root) {
|
|
168
|
+
try {
|
|
169
|
+
await execa("git", ["fetch", "--tags", "origin"], {
|
|
170
|
+
cwd: root,
|
|
171
|
+
stdio: "inherit",
|
|
172
|
+
});
|
|
173
|
+
}
|
|
174
|
+
catch {
|
|
175
|
+
throw new Error("git fetch failed — check network connectivity and that `origin` is reachable");
|
|
176
|
+
}
|
|
177
|
+
}
|
package/dist/config.js
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import { z } from "zod";
|
|
2
|
+
/**
|
|
3
|
+
* Runway runtime config. Loaded from process.env at startup. We fail
|
|
4
|
+
* fast if a required value is missing — no point starting the loop and
|
|
5
|
+
* blowing up halfway through an issue.
|
|
6
|
+
*
|
|
7
|
+
* Notable absences vs. typical agent runners:
|
|
8
|
+
* - No ANTHROPIC_API_KEY here. Sandcastle reads it from the target
|
|
9
|
+
* repo's `.sandcastle/.env` per its own conventions.
|
|
10
|
+
* - No GH_TOKEN here. We use the `gh` CLI for PR creation; if the
|
|
11
|
+
* user is logged in (`gh auth status`), it Just Works. If they
|
|
12
|
+
* aren't, `gh pr create` errors out with a clear message — no need
|
|
13
|
+
* for runway to second-guess.
|
|
14
|
+
* - No RUNWAY_TARGET_REPO. Runway runs from inside the target repo
|
|
15
|
+
* (`process.cwd()`), the same way `sandcastle run` does.
|
|
16
|
+
*/
|
|
17
|
+
const ConfigSchema = z.object({
|
|
18
|
+
linearApiKey: z.string().min(1, "LINEAR_API_KEY required"),
|
|
19
|
+
/**
|
|
20
|
+
* Optional. If present, forwarded into the sandcastle container so
|
|
21
|
+
* the in-container varlock + 1Password-CLI shim can resolve agent
|
|
22
|
+
* secrets at run time. If absent, the container falls back to
|
|
23
|
+
* sandcastle's normal `.sandcastle/.env` flow. See
|
|
24
|
+
* docs/secrets-with-varlock.md.
|
|
25
|
+
*/
|
|
26
|
+
opServiceAccountToken: z.string().optional(),
|
|
27
|
+
linearTeam: z.string().default("VA"),
|
|
28
|
+
readyStatus: z.string().default("Todo"),
|
|
29
|
+
inProgressStatus: z.string().default("In Progress"),
|
|
30
|
+
inReviewStatus: z.string().default("In Review"),
|
|
31
|
+
hitlLabel: z.string().default("needs-human"),
|
|
32
|
+
maxIterations: z.coerce.number().int().positive().default(5),
|
|
33
|
+
});
|
|
34
|
+
export function loadConfig() {
|
|
35
|
+
return ConfigSchema.parse({
|
|
36
|
+
linearApiKey: process.env.LINEAR_API_KEY,
|
|
37
|
+
opServiceAccountToken: process.env.OP_SERVICE_ACCOUNT_TOKEN,
|
|
38
|
+
linearTeam: process.env.RUNWAY_LINEAR_TEAM,
|
|
39
|
+
readyStatus: process.env.RUNWAY_READY_STATUS,
|
|
40
|
+
inProgressStatus: process.env.RUNWAY_IN_PROGRESS_STATUS,
|
|
41
|
+
inReviewStatus: process.env.RUNWAY_IN_REVIEW_STATUS,
|
|
42
|
+
hitlLabel: process.env.RUNWAY_HITL_LABEL,
|
|
43
|
+
maxIterations: process.env.RUNWAY_MAX_ITERATIONS,
|
|
44
|
+
});
|
|
45
|
+
}
|
package/dist/github.js
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import { execa } from "execa";
|
|
2
|
+
/**
|
|
3
|
+
* `gh` CLI-backed gateway. Runway runs on a host with `gh` authenticated
|
|
4
|
+
* (via `GH_TOKEN` or the user's keychain login); we don't reimplement
|
|
5
|
+
* REST calls when `gh` is already there and handles the auth dance.
|
|
6
|
+
*/
|
|
7
|
+
export function createGithubGateway() {
|
|
8
|
+
return {
|
|
9
|
+
async pushBranch(repoPath, branch) {
|
|
10
|
+
await execa("git", ["push", "-u", "origin", branch], {
|
|
11
|
+
cwd: repoPath,
|
|
12
|
+
stdio: "inherit",
|
|
13
|
+
});
|
|
14
|
+
},
|
|
15
|
+
async openPullRequest({ repoPath, branch, issue, body }) {
|
|
16
|
+
const title = `${issue.identifier}: ${issue.title}`;
|
|
17
|
+
const { stdout } = await execa("gh", [
|
|
18
|
+
"pr",
|
|
19
|
+
"create",
|
|
20
|
+
"--base",
|
|
21
|
+
"main",
|
|
22
|
+
"--head",
|
|
23
|
+
branch,
|
|
24
|
+
"--title",
|
|
25
|
+
title,
|
|
26
|
+
"--body",
|
|
27
|
+
body,
|
|
28
|
+
], { cwd: repoPath });
|
|
29
|
+
// `gh pr create` prints the URL on the last line.
|
|
30
|
+
const url = stdout.trim().split("\n").at(-1) ?? "";
|
|
31
|
+
return url;
|
|
32
|
+
},
|
|
33
|
+
};
|
|
34
|
+
}
|
package/dist/linear.js
ADDED
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
import { LinearClient } from "@linear/sdk";
|
|
2
|
+
/**
|
|
3
|
+
* Concrete @linear/sdk-backed implementation. Tests inject their own
|
|
4
|
+
* gateway; never reach for the SDK directly outside this file.
|
|
5
|
+
*/
|
|
6
|
+
export function createLinearGateway(config) {
|
|
7
|
+
const client = new LinearClient({ apiKey: config.linearApiKey });
|
|
8
|
+
async function findStateId(teamId, name) {
|
|
9
|
+
const states = await client.workflowStates({
|
|
10
|
+
filter: { team: { id: { eq: teamId } }, name: { eq: name } },
|
|
11
|
+
});
|
|
12
|
+
const state = states.nodes[0];
|
|
13
|
+
if (!state) {
|
|
14
|
+
throw new Error(`Linear workflow state "${name}" not found on team ${teamId}`);
|
|
15
|
+
}
|
|
16
|
+
return state.id;
|
|
17
|
+
}
|
|
18
|
+
async function findTeamId() {
|
|
19
|
+
const teams = await client.teams({
|
|
20
|
+
filter: { key: { eq: config.linearTeam } },
|
|
21
|
+
});
|
|
22
|
+
const team = teams.nodes[0];
|
|
23
|
+
if (!team) {
|
|
24
|
+
throw new Error(`Linear team "${config.linearTeam}" not found`);
|
|
25
|
+
}
|
|
26
|
+
return team.id;
|
|
27
|
+
}
|
|
28
|
+
return {
|
|
29
|
+
async fetchReady() {
|
|
30
|
+
const teamId = await findTeamId();
|
|
31
|
+
const readyStateId = await findStateId(teamId, config.readyStatus);
|
|
32
|
+
const issues = await client.issues({
|
|
33
|
+
filter: {
|
|
34
|
+
team: { id: { eq: teamId } },
|
|
35
|
+
state: { id: { eq: readyStateId } },
|
|
36
|
+
},
|
|
37
|
+
// Stable order: oldest first so the queue drains FIFO.
|
|
38
|
+
orderBy: "createdAt",
|
|
39
|
+
});
|
|
40
|
+
return issues.nodes.map((i) => ({
|
|
41
|
+
id: i.id,
|
|
42
|
+
identifier: i.identifier,
|
|
43
|
+
title: i.title,
|
|
44
|
+
description: i.description ?? "",
|
|
45
|
+
}));
|
|
46
|
+
},
|
|
47
|
+
async transition(issueId, statusName) {
|
|
48
|
+
const issue = await client.issue(issueId);
|
|
49
|
+
const team = await issue.team;
|
|
50
|
+
if (!team)
|
|
51
|
+
throw new Error(`Issue ${issueId} has no team`);
|
|
52
|
+
const stateId = await findStateId(team.id, statusName);
|
|
53
|
+
await client.updateIssue(issueId, { stateId });
|
|
54
|
+
},
|
|
55
|
+
async applyLabel(issueId, labelName) {
|
|
56
|
+
const issue = await client.issue(issueId);
|
|
57
|
+
const team = await issue.team;
|
|
58
|
+
if (!team)
|
|
59
|
+
throw new Error(`Issue ${issueId} has no team`);
|
|
60
|
+
const labels = await client.issueLabels({
|
|
61
|
+
filter: {
|
|
62
|
+
team: { id: { eq: team.id } },
|
|
63
|
+
name: { eq: labelName },
|
|
64
|
+
},
|
|
65
|
+
});
|
|
66
|
+
const label = labels.nodes[0];
|
|
67
|
+
if (!label) {
|
|
68
|
+
throw new Error(`Linear label "${labelName}" not found on team ${team.id}`);
|
|
69
|
+
}
|
|
70
|
+
const existing = await issue.labels();
|
|
71
|
+
const labelIds = [
|
|
72
|
+
...existing.nodes.map((l) => l.id),
|
|
73
|
+
label.id,
|
|
74
|
+
];
|
|
75
|
+
await client.updateIssue(issueId, { labelIds });
|
|
76
|
+
},
|
|
77
|
+
async comment(issueId, body) {
|
|
78
|
+
await client.createComment({ issueId, body });
|
|
79
|
+
},
|
|
80
|
+
};
|
|
81
|
+
}
|