@weldr/runr 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +216 -0
- package/LICENSE +190 -0
- package/NOTICE +4 -0
- package/README.md +200 -0
- package/dist/cli.js +464 -0
- package/dist/commands/__tests__/report.test.js +202 -0
- package/dist/commands/compare.js +168 -0
- package/dist/commands/doctor.js +124 -0
- package/dist/commands/follow.js +251 -0
- package/dist/commands/gc.js +161 -0
- package/dist/commands/guards-only.js +89 -0
- package/dist/commands/metrics.js +441 -0
- package/dist/commands/orchestrate.js +800 -0
- package/dist/commands/paths.js +31 -0
- package/dist/commands/preflight.js +152 -0
- package/dist/commands/report.js +478 -0
- package/dist/commands/resume.js +149 -0
- package/dist/commands/run.js +538 -0
- package/dist/commands/status.js +189 -0
- package/dist/commands/summarize.js +220 -0
- package/dist/commands/version.js +82 -0
- package/dist/commands/wait.js +170 -0
- package/dist/config/__tests__/presets.test.js +104 -0
- package/dist/config/load.js +66 -0
- package/dist/config/schema.js +160 -0
- package/dist/context/__tests__/artifact.test.js +130 -0
- package/dist/context/__tests__/pack.test.js +191 -0
- package/dist/context/artifact.js +67 -0
- package/dist/context/index.js +2 -0
- package/dist/context/pack.js +273 -0
- package/dist/diagnosis/analyzer.js +678 -0
- package/dist/diagnosis/formatter.js +136 -0
- package/dist/diagnosis/index.js +6 -0
- package/dist/diagnosis/types.js +7 -0
- package/dist/env/__tests__/fingerprint.test.js +116 -0
- package/dist/env/fingerprint.js +111 -0
- package/dist/orchestrator/__tests__/policy.test.js +185 -0
- package/dist/orchestrator/__tests__/schema-version.test.js +65 -0
- package/dist/orchestrator/artifacts.js +405 -0
- package/dist/orchestrator/state-machine.js +646 -0
- package/dist/orchestrator/types.js +88 -0
- package/dist/ownership/normalize.js +45 -0
- package/dist/repo/context.js +90 -0
- package/dist/repo/git.js +13 -0
- package/dist/repo/worktree.js +239 -0
- package/dist/store/run-store.js +107 -0
- package/dist/store/run-utils.js +69 -0
- package/dist/store/runs-root.js +126 -0
- package/dist/supervisor/__tests__/evidence-gate.test.js +111 -0
- package/dist/supervisor/__tests__/ownership.test.js +103 -0
- package/dist/supervisor/__tests__/state-machine.test.js +290 -0
- package/dist/supervisor/collision.js +240 -0
- package/dist/supervisor/evidence-gate.js +98 -0
- package/dist/supervisor/planner.js +18 -0
- package/dist/supervisor/runner.js +1562 -0
- package/dist/supervisor/scope-guard.js +55 -0
- package/dist/supervisor/state-machine.js +121 -0
- package/dist/supervisor/verification-policy.js +64 -0
- package/dist/tasks/task-metadata.js +72 -0
- package/dist/types/schemas.js +1 -0
- package/dist/verification/engine.js +49 -0
- package/dist/workers/__tests__/claude.test.js +88 -0
- package/dist/workers/__tests__/codex.test.js +81 -0
- package/dist/workers/claude.js +119 -0
- package/dist/workers/codex.js +162 -0
- package/dist/workers/json.js +22 -0
- package/dist/workers/mock.js +193 -0
- package/dist/workers/prompts.js +98 -0
- package/dist/workers/schemas.js +39 -0
- package/package.json +47 -0
- package/templates/prompts/implementer.md +70 -0
- package/templates/prompts/planner.md +62 -0
- package/templates/prompts/reviewer.md +77 -0
|
@@ -0,0 +1,88 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Orchestrator types for multi-track run coordination.
|
|
3
|
+
*
|
|
4
|
+
* The orchestrator manages parallel tracks of serial steps,
|
|
5
|
+
* with collision-aware scheduling to prevent merge conflicts.
|
|
6
|
+
*/
|
|
7
|
+
import { z } from 'zod';
|
|
8
|
+
// Zod schemas for validation
|
|
9
|
+
export const stepConfigSchema = z.object({
|
|
10
|
+
task: z.string().min(1),
|
|
11
|
+
allowlist: z.array(z.string()).optional()
|
|
12
|
+
});
|
|
13
|
+
export const trackConfigSchema = z.object({
|
|
14
|
+
name: z.string().min(1),
|
|
15
|
+
steps: z.array(stepConfigSchema).min(1)
|
|
16
|
+
});
|
|
17
|
+
export const orchestrationConfigSchema = z.object({
|
|
18
|
+
tracks: z.array(trackConfigSchema).min(1)
|
|
19
|
+
});
|
|
20
|
+
export const stepResultSchema = z.object({
|
|
21
|
+
status: z.enum(['complete', 'stopped', 'timeout']),
|
|
22
|
+
stop_reason: z.string().optional(),
|
|
23
|
+
elapsed_ms: z.number()
|
|
24
|
+
});
|
|
25
|
+
export const stepSchema = z.object({
|
|
26
|
+
task_path: z.string(),
|
|
27
|
+
allowlist: z.array(z.string()).optional(),
|
|
28
|
+
owns_raw: z.array(z.string()).optional(),
|
|
29
|
+
owns_normalized: z.array(z.string()).optional(),
|
|
30
|
+
run_id: z.string().optional(),
|
|
31
|
+
run_dir: z.string().optional(),
|
|
32
|
+
result: stepResultSchema.optional()
|
|
33
|
+
});
|
|
34
|
+
export const trackSchema = z.object({
|
|
35
|
+
id: z.string(),
|
|
36
|
+
name: z.string(),
|
|
37
|
+
steps: z.array(stepSchema),
|
|
38
|
+
current_step: z.number(),
|
|
39
|
+
status: z.enum(['pending', 'running', 'waiting', 'complete', 'stopped', 'failed']),
|
|
40
|
+
error: z.string().optional()
|
|
41
|
+
});
|
|
42
|
+
export const orchestratorPolicySchema = z.object({
|
|
43
|
+
collision_policy: z.enum(['serialize', 'force', 'fail']),
|
|
44
|
+
parallel: z.number(),
|
|
45
|
+
fast: z.boolean(),
|
|
46
|
+
auto_resume: z.boolean(),
|
|
47
|
+
ownership_required: z.boolean().optional(),
|
|
48
|
+
time_budget_minutes: z.number(),
|
|
49
|
+
max_ticks: z.number()
|
|
50
|
+
});
|
|
51
|
+
const ownershipClaimSchema = z.object({
|
|
52
|
+
track_id: z.string(),
|
|
53
|
+
run_id: z.string().optional(),
|
|
54
|
+
owns_raw: z.array(z.string()),
|
|
55
|
+
owns_normalized: z.array(z.string())
|
|
56
|
+
});
|
|
57
|
+
const ownershipClaimEventSchema = z.object({
|
|
58
|
+
timestamp: z.string(),
|
|
59
|
+
action: z.enum(['acquire', 'release']),
|
|
60
|
+
track_id: z.string(),
|
|
61
|
+
run_id: z.string().optional(),
|
|
62
|
+
claims: z.array(z.string()),
|
|
63
|
+
owns_raw: z.array(z.string()),
|
|
64
|
+
owns_normalized: z.array(z.string())
|
|
65
|
+
});
|
|
66
|
+
export const orchestratorStateSchema = z.object({
|
|
67
|
+
orchestrator_id: z.string(),
|
|
68
|
+
repo_path: z.string(),
|
|
69
|
+
tracks: z.array(trackSchema),
|
|
70
|
+
active_runs: z.record(z.string()),
|
|
71
|
+
file_claims: z.record(z.union([z.string(), ownershipClaimSchema])),
|
|
72
|
+
claim_events: z.array(ownershipClaimEventSchema).optional(),
|
|
73
|
+
status: z.enum(['running', 'complete', 'stopped', 'failed']),
|
|
74
|
+
started_at: z.string(),
|
|
75
|
+
ended_at: z.string().optional(),
|
|
76
|
+
// v1+ policy block
|
|
77
|
+
policy: orchestratorPolicySchema.optional(),
|
|
78
|
+
// Legacy fields (v0) - kept for backward compatibility
|
|
79
|
+
collision_policy: z.enum(['serialize', 'force', 'fail']),
|
|
80
|
+
time_budget_minutes: z.number(),
|
|
81
|
+
max_ticks: z.number(),
|
|
82
|
+
fast: z.boolean().optional()
|
|
83
|
+
});
|
|
84
|
+
/**
|
|
85
|
+
* Current schema version for orchestration artifacts.
|
|
86
|
+
* Increment when making breaking changes to the structure.
|
|
87
|
+
*/
|
|
88
|
+
export const ORCHESTRATOR_ARTIFACT_SCHEMA_VERSION = 1;
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
/**
|
|
3
|
+
* Normalize a single ownership pattern to POSIX format with glob suffix.
|
|
4
|
+
* - Strips leading ./ and /
|
|
5
|
+
* - Converts backslashes to forward slashes
|
|
6
|
+
* - Adds /** suffix to bare directories
|
|
7
|
+
* - Returns null for empty/invalid patterns
|
|
8
|
+
*/
|
|
9
|
+
export function normalizeOwnPattern(pattern) {
|
|
10
|
+
let normalized = pattern.replace(/\\/g, '/').trim();
|
|
11
|
+
normalized = normalized.replace(/^\.\/+/, '').replace(/^\/+/, '');
|
|
12
|
+
normalized = normalized.replace(/\/{2,}/g, '/');
|
|
13
|
+
if (!normalized) {
|
|
14
|
+
return null;
|
|
15
|
+
}
|
|
16
|
+
const hasGlob = /[*?[\]]/.test(normalized);
|
|
17
|
+
if (!hasGlob) {
|
|
18
|
+
normalized = normalized.replace(/\/+$/, '');
|
|
19
|
+
if (!normalized) {
|
|
20
|
+
return null;
|
|
21
|
+
}
|
|
22
|
+
return `${normalized}/**`;
|
|
23
|
+
}
|
|
24
|
+
return normalized;
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Normalize an array of ownership patterns.
|
|
28
|
+
* Deduplicates and filters out invalid patterns.
|
|
29
|
+
*/
|
|
30
|
+
export function normalizeOwnsPatterns(patterns) {
|
|
31
|
+
const normalized = [];
|
|
32
|
+
for (const pattern of patterns) {
|
|
33
|
+
const entry = normalizeOwnPattern(pattern);
|
|
34
|
+
if (entry) {
|
|
35
|
+
normalized.push(entry);
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
return [...new Set(normalized)];
|
|
39
|
+
}
|
|
40
|
+
/**
|
|
41
|
+
* Convert a file path to POSIX format for matching.
|
|
42
|
+
*/
|
|
43
|
+
export function toPosixPath(filePath) {
|
|
44
|
+
return filePath.split(path.sep).join('/');
|
|
45
|
+
}
|
|
@@ -0,0 +1,90 @@
|
|
|
1
|
+
import path from 'node:path';
|
|
2
|
+
import { git, gitOptional } from './git.js';
|
|
3
|
+
export async function getGitRoot(repoPath) {
|
|
4
|
+
const result = await git(['rev-parse', '--show-toplevel'], repoPath);
|
|
5
|
+
return result.stdout.trim();
|
|
6
|
+
}
|
|
7
|
+
export async function getDefaultBranch(repoPath, fallback) {
|
|
8
|
+
const result = await gitOptional(['symbolic-ref', 'refs/remotes/origin/HEAD'], repoPath);
|
|
9
|
+
if (result?.stdout) {
|
|
10
|
+
const parts = result.stdout.trim().split('/');
|
|
11
|
+
const branch = parts[parts.length - 1];
|
|
12
|
+
if (branch) {
|
|
13
|
+
return branch;
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
return fallback;
|
|
17
|
+
}
|
|
18
|
+
export async function getCurrentBranch(repoPath) {
|
|
19
|
+
const result = await git(['rev-parse', '--abbrev-ref', 'HEAD'], repoPath);
|
|
20
|
+
return result.stdout.trim();
|
|
21
|
+
}
|
|
22
|
+
export async function listChangedFiles(gitRoot) {
|
|
23
|
+
const result = await git(['status', '--porcelain'], gitRoot);
|
|
24
|
+
const lines = result.stdout.split('\n').filter((line) => line.trim().length > 0);
|
|
25
|
+
if (lines.length === 0) {
|
|
26
|
+
return [];
|
|
27
|
+
}
|
|
28
|
+
const files = [];
|
|
29
|
+
for (const line of lines) {
|
|
30
|
+
const entry = line.slice(3);
|
|
31
|
+
const arrow = entry.indexOf('->');
|
|
32
|
+
if (arrow !== -1) {
|
|
33
|
+
// Rename: include BOTH old and new paths for ownership/scope enforcement
|
|
34
|
+
// Old path was touched (deleted from), new path was touched (created at)
|
|
35
|
+
const oldPath = entry.slice(0, arrow).trim();
|
|
36
|
+
const newPath = entry.slice(arrow + 2).trim();
|
|
37
|
+
if (oldPath)
|
|
38
|
+
files.push(oldPath);
|
|
39
|
+
if (newPath)
|
|
40
|
+
files.push(newPath);
|
|
41
|
+
}
|
|
42
|
+
else {
|
|
43
|
+
const filePath = entry.trim();
|
|
44
|
+
if (filePath)
|
|
45
|
+
files.push(filePath);
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
// Deduplicate: renames or multiple status entries can reference same path
|
|
49
|
+
return [...new Set(files)];
|
|
50
|
+
}
|
|
51
|
+
export function getTouchedPackages(changedFiles) {
|
|
52
|
+
const packages = new Set();
|
|
53
|
+
for (const file of changedFiles) {
|
|
54
|
+
const parts = file.split(path.sep);
|
|
55
|
+
const idx = parts.indexOf('packages');
|
|
56
|
+
if (idx !== -1 && parts.length > idx + 1) {
|
|
57
|
+
packages.add(path.join('packages', parts[idx + 1]));
|
|
58
|
+
continue;
|
|
59
|
+
}
|
|
60
|
+
if (parts[0] === 'package.json') {
|
|
61
|
+
packages.add('root');
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
return Array.from(packages);
|
|
65
|
+
}
|
|
66
|
+
export function toRunBranch(runId, slug) {
|
|
67
|
+
const safeSlug = slug
|
|
68
|
+
.toLowerCase()
|
|
69
|
+
.replace(/[^a-z0-9-]/g, '-')
|
|
70
|
+
.replace(/-+/g, '-')
|
|
71
|
+
.replace(/^-+|-+$/g, '');
|
|
72
|
+
return `agent/${runId}/${safeSlug || 'task'}`;
|
|
73
|
+
}
|
|
74
|
+
export async function buildRepoContext(repoPath, runId, slug, defaultBranchFallback) {
|
|
75
|
+
const gitRoot = await getGitRoot(repoPath);
|
|
76
|
+
const defaultBranch = await getDefaultBranch(gitRoot, defaultBranchFallback);
|
|
77
|
+
const currentBranch = await getCurrentBranch(gitRoot);
|
|
78
|
+
const runBranch = toRunBranch(runId, slug);
|
|
79
|
+
const changedFiles = await listChangedFiles(gitRoot);
|
|
80
|
+
const touchedPackages = getTouchedPackages(changedFiles);
|
|
81
|
+
return {
|
|
82
|
+
repo_path: repoPath,
|
|
83
|
+
git_root: gitRoot,
|
|
84
|
+
default_branch: defaultBranch,
|
|
85
|
+
run_branch: runBranch,
|
|
86
|
+
current_branch: currentBranch,
|
|
87
|
+
changed_files: changedFiles,
|
|
88
|
+
touched_packages: touchedPackages
|
|
89
|
+
};
|
|
90
|
+
}
|
package/dist/repo/git.js
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { execa } from 'execa';
|
|
2
|
+
export async function git(args, cwd) {
|
|
3
|
+
const result = await execa('git', args, { cwd });
|
|
4
|
+
return { stdout: result.stdout, stderr: result.stderr };
|
|
5
|
+
}
|
|
6
|
+
export async function gitOptional(args, cwd) {
|
|
7
|
+
try {
|
|
8
|
+
return await git(args, cwd);
|
|
9
|
+
}
|
|
10
|
+
catch {
|
|
11
|
+
return null;
|
|
12
|
+
}
|
|
13
|
+
}
|
|
@@ -0,0 +1,239 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { git, gitOptional } from './git.js';
|
|
4
|
+
/**
|
|
5
|
+
* Resolve the actual git directory for a worktree.
|
|
6
|
+
* In a worktree, .git is a file containing "gitdir: <path>".
|
|
7
|
+
* In a normal repo, .git is the directory itself.
|
|
8
|
+
*/
|
|
9
|
+
function resolveWorktreeGitDir(worktreePath) {
|
|
10
|
+
const dotGitPath = path.join(worktreePath, '.git');
|
|
11
|
+
let stat;
|
|
12
|
+
try {
|
|
13
|
+
stat = fs.statSync(dotGitPath);
|
|
14
|
+
}
|
|
15
|
+
catch (err) {
|
|
16
|
+
const e = err;
|
|
17
|
+
if (e.code === 'ENOENT') {
|
|
18
|
+
throw new Error(`Worktree missing .git file/dir at ${dotGitPath} (worktreePath=${worktreePath})`);
|
|
19
|
+
}
|
|
20
|
+
throw err;
|
|
21
|
+
}
|
|
22
|
+
if (stat.isDirectory()) {
|
|
23
|
+
return dotGitPath;
|
|
24
|
+
}
|
|
25
|
+
// Worktree: .git is a file with "gitdir: ..."
|
|
26
|
+
const content = fs.readFileSync(dotGitPath, 'utf8').trim();
|
|
27
|
+
const m = content.match(/^gitdir:\s*(.+)\s*$/i);
|
|
28
|
+
if (!m) {
|
|
29
|
+
throw new Error(`Unexpected .git file format at ${dotGitPath}: ${content.slice(0, 120)}`);
|
|
30
|
+
}
|
|
31
|
+
const gitdirPath = m[1].trim();
|
|
32
|
+
// gitdir can be relative to worktreePath
|
|
33
|
+
return path.isAbsolute(gitdirPath)
|
|
34
|
+
? gitdirPath
|
|
35
|
+
: path.resolve(worktreePath, gitdirPath);
|
|
36
|
+
}
|
|
37
|
+
/**
|
|
38
|
+
* Add patterns to .git/info/exclude.
|
|
39
|
+
* This prevents env artifacts like node_modules symlinks from showing as untracked.
|
|
40
|
+
*/
|
|
41
|
+
function upsertInfoExclude(gitdir, patterns) {
|
|
42
|
+
const infoDir = path.join(gitdir, 'info');
|
|
43
|
+
const excludePath = path.join(infoDir, 'exclude');
|
|
44
|
+
fs.mkdirSync(infoDir, { recursive: true });
|
|
45
|
+
const existing = fs.existsSync(excludePath)
|
|
46
|
+
? fs.readFileSync(excludePath, 'utf8')
|
|
47
|
+
: '';
|
|
48
|
+
const existingLines = new Set(existing
|
|
49
|
+
.split(/\r?\n/)
|
|
50
|
+
.map(l => l.trim())
|
|
51
|
+
.filter(l => l.length > 0 && !l.startsWith('#')));
|
|
52
|
+
// Find patterns we need to add
|
|
53
|
+
const toAdd = patterns.filter(p => !existingLines.has(p));
|
|
54
|
+
if (toAdd.length === 0)
|
|
55
|
+
return;
|
|
56
|
+
// Build new content
|
|
57
|
+
const needsNewline = existing.length > 0 && !existing.endsWith('\n');
|
|
58
|
+
const header = existing.includes('# runr env ignores') || existing.includes('# agent-framework env ignores')
|
|
59
|
+
? ''
|
|
60
|
+
: '# runr env ignores\n';
|
|
61
|
+
const addition = (needsNewline ? '\n' : '') + header + toAdd.map(p => `${p}\n`).join('');
|
|
62
|
+
fs.writeFileSync(excludePath, existing + addition, 'utf8');
|
|
63
|
+
}
|
|
64
|
+
/**
|
|
65
|
+
* Ensure repository-level git excludes for runr artifacts.
|
|
66
|
+
* Call this at run start (before preflight) to prevent .runr/ and .runr-worktrees/
|
|
67
|
+
* (or legacy .agent/ and .agent-worktrees/) from showing as dirty.
|
|
68
|
+
*
|
|
69
|
+
* This writes to the MAIN repo's .git/info/exclude (not tracked, no history pollution).
|
|
70
|
+
*
|
|
71
|
+
* @param repoRoot - The target repository root path
|
|
72
|
+
* @param patterns - Patterns to add (e.g., ['.runr', '.runr/', '.runr-worktrees'])
|
|
73
|
+
*/
|
|
74
|
+
export function ensureRepoInfoExclude(repoRoot, patterns) {
|
|
75
|
+
const mainGitDir = path.join(repoRoot, '.git');
|
|
76
|
+
// Only proceed if this looks like a git repo
|
|
77
|
+
if (!fs.existsSync(mainGitDir) || !fs.statSync(mainGitDir).isDirectory()) {
|
|
78
|
+
return;
|
|
79
|
+
}
|
|
80
|
+
upsertInfoExclude(mainGitDir, patterns);
|
|
81
|
+
}
|
|
82
|
+
/**
|
|
83
|
+
* Create a git worktree for isolated run execution.
|
|
84
|
+
*
|
|
85
|
+
* @param originalRepoPath - The source repository path
|
|
86
|
+
* @param worktreePath - Where to create the worktree (e.g., worktrees/<id>)
|
|
87
|
+
* @param runBranch - Optional branch name to create/use
|
|
88
|
+
* @returns WorktreeInfo with paths and base SHA
|
|
89
|
+
*/
|
|
90
|
+
export async function createWorktree(originalRepoPath, worktreePath, runBranch) {
|
|
91
|
+
// Get current HEAD SHA as base
|
|
92
|
+
const headResult = await git(['rev-parse', 'HEAD'], originalRepoPath);
|
|
93
|
+
const baseSha = headResult.stdout.trim();
|
|
94
|
+
// Ensure parent directory exists
|
|
95
|
+
const parentDir = path.dirname(worktreePath);
|
|
96
|
+
if (!fs.existsSync(parentDir)) {
|
|
97
|
+
fs.mkdirSync(parentDir, { recursive: true });
|
|
98
|
+
}
|
|
99
|
+
// Remove existing worktree if present (from failed run)
|
|
100
|
+
if (fs.existsSync(worktreePath)) {
|
|
101
|
+
await gitOptional(['worktree', 'remove', '--force', worktreePath], originalRepoPath);
|
|
102
|
+
// If git worktree remove failed, try rmdir
|
|
103
|
+
if (fs.existsSync(worktreePath)) {
|
|
104
|
+
fs.rmSync(worktreePath, { recursive: true, force: true });
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
if (runBranch) {
|
|
108
|
+
// Check if branch exists (gitOptional returns null on error/non-existent)
|
|
109
|
+
const branchCheck = await gitOptional(['rev-parse', '--verify', runBranch], originalRepoPath);
|
|
110
|
+
if (branchCheck !== null) {
|
|
111
|
+
// Branch exists, create worktree attached to it
|
|
112
|
+
await git(['worktree', 'add', worktreePath, runBranch], originalRepoPath);
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
// Create new branch and worktree together
|
|
116
|
+
await git(['worktree', 'add', '-b', runBranch, worktreePath, baseSha], originalRepoPath);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
else {
|
|
120
|
+
// Detached HEAD at base SHA
|
|
121
|
+
await git(['worktree', 'add', '--detach', worktreePath, baseSha], originalRepoPath);
|
|
122
|
+
}
|
|
123
|
+
// Validate worktree was created successfully and is clean
|
|
124
|
+
const statusBefore = await git(['status', '--porcelain'], worktreePath);
|
|
125
|
+
if (statusBefore.stdout.trim().length > 0) {
|
|
126
|
+
throw new Error(`Newly created worktree is not clean:\n${statusBefore.stdout}`);
|
|
127
|
+
}
|
|
128
|
+
// Inject excludes into MAIN repo's .git/info/exclude (git only reads from there, not worktree gitdir)
|
|
129
|
+
// This prevents env artifacts like node_modules symlinks from showing as untracked
|
|
130
|
+
const mainGitDir = path.join(originalRepoPath, '.git');
|
|
131
|
+
upsertInfoExclude(mainGitDir, [
|
|
132
|
+
'node_modules',
|
|
133
|
+
'node_modules/',
|
|
134
|
+
'/node_modules',
|
|
135
|
+
]);
|
|
136
|
+
// Symlink node_modules from original repo if present (for npm/pnpm projects)
|
|
137
|
+
const originalNodeModules = path.join(originalRepoPath, 'node_modules');
|
|
138
|
+
const worktreeNodeModules = path.join(worktreePath, 'node_modules');
|
|
139
|
+
if (fs.existsSync(originalNodeModules) && !fs.existsSync(worktreeNodeModules)) {
|
|
140
|
+
fs.symlinkSync(originalNodeModules, worktreeNodeModules, 'dir');
|
|
141
|
+
}
|
|
142
|
+
// Sanity check: worktree should still be clean after env setup
|
|
143
|
+
const statusAfter = await git(['status', '--porcelain'], worktreePath);
|
|
144
|
+
if (statusAfter.stdout.trim().length > 0) {
|
|
145
|
+
throw new Error(`Worktree became dirty after env setup:\n${statusAfter.stdout}`);
|
|
146
|
+
}
|
|
147
|
+
return {
|
|
148
|
+
worktree_enabled: true,
|
|
149
|
+
original_repo_path: originalRepoPath,
|
|
150
|
+
effective_repo_path: worktreePath,
|
|
151
|
+
base_sha: baseSha,
|
|
152
|
+
run_branch: runBranch,
|
|
153
|
+
created_at: new Date().toISOString()
|
|
154
|
+
};
|
|
155
|
+
}
|
|
156
|
+
/**
|
|
157
|
+
* Validate that a worktree exists and is usable.
|
|
158
|
+
*
|
|
159
|
+
* @param worktreePath - Path to the worktree
|
|
160
|
+
* @returns true if valid, false otherwise
|
|
161
|
+
*/
|
|
162
|
+
export async function validateWorktree(worktreePath) {
|
|
163
|
+
if (!fs.existsSync(worktreePath)) {
|
|
164
|
+
return false;
|
|
165
|
+
}
|
|
166
|
+
try {
|
|
167
|
+
const result = await gitOptional(['rev-parse', '--is-inside-work-tree'], worktreePath);
|
|
168
|
+
return result?.stdout?.trim() === 'true';
|
|
169
|
+
}
|
|
170
|
+
catch {
|
|
171
|
+
return false;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
/**
|
|
175
|
+
* Remove a worktree cleanly.
|
|
176
|
+
*
|
|
177
|
+
* @param originalRepoPath - The source repository path
|
|
178
|
+
* @param worktreePath - Path to the worktree to remove
|
|
179
|
+
*/
|
|
180
|
+
export async function removeWorktree(originalRepoPath, worktreePath) {
|
|
181
|
+
if (!fs.existsSync(worktreePath)) {
|
|
182
|
+
return;
|
|
183
|
+
}
|
|
184
|
+
// Try git worktree remove first (cleaner)
|
|
185
|
+
const result = await gitOptional(['worktree', 'remove', '--force', worktreePath], originalRepoPath);
|
|
186
|
+
// If git failed (result is null), manually remove
|
|
187
|
+
if (result === null && fs.existsSync(worktreePath)) {
|
|
188
|
+
fs.rmSync(worktreePath, { recursive: true, force: true });
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
/**
|
|
192
|
+
* Recreate a worktree from saved info (for resume).
|
|
193
|
+
* Validates branch matches if worktree exists but has wrong branch.
|
|
194
|
+
*
|
|
195
|
+
* @param info - Saved worktree info from config snapshot
|
|
196
|
+
* @param force - Allow recreation despite branch mismatch
|
|
197
|
+
* @returns Result with updated info and flags, or throws if recreation fails
|
|
198
|
+
*/
|
|
199
|
+
export async function recreateWorktree(info, force = false) {
|
|
200
|
+
// Check if worktree already exists and is valid
|
|
201
|
+
if (await validateWorktree(info.effective_repo_path)) {
|
|
202
|
+
// Ensure excludes are present in MAIN repo (upgrades old worktrees created before this fix)
|
|
203
|
+
const mainGitDir = path.join(info.original_repo_path, '.git');
|
|
204
|
+
upsertInfoExclude(mainGitDir, ['node_modules', 'node_modules/', '/node_modules']);
|
|
205
|
+
// Verify branch matches if one was specified
|
|
206
|
+
if (info.run_branch) {
|
|
207
|
+
const currentBranchResult = await gitOptional(['rev-parse', '--abbrev-ref', 'HEAD'], info.effective_repo_path);
|
|
208
|
+
const currentBranch = currentBranchResult?.stdout?.trim();
|
|
209
|
+
if (currentBranch && currentBranch !== info.run_branch) {
|
|
210
|
+
if (!force) {
|
|
211
|
+
throw new Error(`Branch mismatch: worktree is on '${currentBranch}' but run was on '${info.run_branch}'. ` +
|
|
212
|
+
`Use --force to override.`);
|
|
213
|
+
}
|
|
214
|
+
console.warn(`WARNING: Branch mismatch (expected '${info.run_branch}', found '${currentBranch}'). Continuing due to --force.`);
|
|
215
|
+
return {
|
|
216
|
+
info,
|
|
217
|
+
recreated: false,
|
|
218
|
+
branchMismatch: true,
|
|
219
|
+
nodeModulesSymlinked: fs.existsSync(path.join(info.effective_repo_path, 'node_modules'))
|
|
220
|
+
};
|
|
221
|
+
}
|
|
222
|
+
}
|
|
223
|
+
return {
|
|
224
|
+
info,
|
|
225
|
+
recreated: false,
|
|
226
|
+
branchMismatch: false,
|
|
227
|
+
nodeModulesSymlinked: fs.existsSync(path.join(info.effective_repo_path, 'node_modules'))
|
|
228
|
+
};
|
|
229
|
+
}
|
|
230
|
+
// Recreate from original repo
|
|
231
|
+
const recreatedInfo = await createWorktree(info.original_repo_path, info.effective_repo_path, info.run_branch);
|
|
232
|
+
const nodeModulesPath = path.join(info.effective_repo_path, 'node_modules');
|
|
233
|
+
return {
|
|
234
|
+
info: recreatedInfo,
|
|
235
|
+
recreated: true,
|
|
236
|
+
branchMismatch: false,
|
|
237
|
+
nodeModulesSymlinked: fs.existsSync(nodeModulesPath)
|
|
238
|
+
};
|
|
239
|
+
}
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { getRunsRoot } from './runs-root.js';
|
|
4
|
+
export class RunStore {
|
|
5
|
+
runDir;
|
|
6
|
+
timelinePath;
|
|
7
|
+
seqPath;
|
|
8
|
+
lastEvent = null;
|
|
9
|
+
lastWorkerCall = null;
|
|
10
|
+
constructor(runDir) {
|
|
11
|
+
this.runDir = runDir;
|
|
12
|
+
this.timelinePath = path.join(runDir, 'timeline.jsonl');
|
|
13
|
+
this.seqPath = path.join(runDir, 'seq.txt');
|
|
14
|
+
}
|
|
15
|
+
/**
|
|
16
|
+
* Initialize a RunStore for a new or existing run.
|
|
17
|
+
* @param runId - The run ID (timestamp format)
|
|
18
|
+
* @param repoPath - The target repository path (required)
|
|
19
|
+
*/
|
|
20
|
+
static init(runId, repoPath) {
|
|
21
|
+
const rootDir = getRunsRoot(repoPath);
|
|
22
|
+
const runDir = path.join(rootDir, runId);
|
|
23
|
+
fs.mkdirSync(runDir, { recursive: true });
|
|
24
|
+
fs.mkdirSync(path.join(runDir, 'artifacts'), { recursive: true });
|
|
25
|
+
fs.mkdirSync(path.join(runDir, 'handoffs'), { recursive: true });
|
|
26
|
+
if (!fs.existsSync(path.join(runDir, 'timeline.jsonl'))) {
|
|
27
|
+
fs.writeFileSync(path.join(runDir, 'timeline.jsonl'), '');
|
|
28
|
+
}
|
|
29
|
+
return new RunStore(runDir);
|
|
30
|
+
}
|
|
31
|
+
get path() {
|
|
32
|
+
return this.runDir;
|
|
33
|
+
}
|
|
34
|
+
writeConfigSnapshot(config) {
|
|
35
|
+
const target = path.join(this.runDir, 'config.snapshot.json');
|
|
36
|
+
fs.writeFileSync(target, JSON.stringify(config, null, 2));
|
|
37
|
+
}
|
|
38
|
+
writePlan(content) {
|
|
39
|
+
const target = path.join(this.runDir, 'plan.md');
|
|
40
|
+
fs.writeFileSync(target, content);
|
|
41
|
+
}
|
|
42
|
+
writeState(state) {
|
|
43
|
+
const target = path.join(this.runDir, 'state.json');
|
|
44
|
+
fs.writeFileSync(target, JSON.stringify(state, null, 2));
|
|
45
|
+
}
|
|
46
|
+
readState() {
|
|
47
|
+
const target = path.join(this.runDir, 'state.json');
|
|
48
|
+
const raw = fs.readFileSync(target, 'utf-8');
|
|
49
|
+
return JSON.parse(raw);
|
|
50
|
+
}
|
|
51
|
+
writeSummary(content) {
|
|
52
|
+
const target = path.join(this.runDir, 'summary.md');
|
|
53
|
+
fs.writeFileSync(target, content);
|
|
54
|
+
}
|
|
55
|
+
writeArtifact(name, content) {
|
|
56
|
+
const target = path.join(this.runDir, 'artifacts', name);
|
|
57
|
+
fs.writeFileSync(target, content);
|
|
58
|
+
}
|
|
59
|
+
writeMemo(name, content) {
|
|
60
|
+
const target = path.join(this.runDir, 'handoffs', name);
|
|
61
|
+
fs.writeFileSync(target, content);
|
|
62
|
+
}
|
|
63
|
+
writeFingerprint(fingerprint) {
|
|
64
|
+
const target = path.join(this.runDir, 'env.fingerprint.json');
|
|
65
|
+
fs.writeFileSync(target, JSON.stringify(fingerprint, null, 2));
|
|
66
|
+
}
|
|
67
|
+
readFingerprint() {
|
|
68
|
+
const target = path.join(this.runDir, 'env.fingerprint.json');
|
|
69
|
+
if (!fs.existsSync(target)) {
|
|
70
|
+
return null;
|
|
71
|
+
}
|
|
72
|
+
const raw = fs.readFileSync(target, 'utf-8');
|
|
73
|
+
return JSON.parse(raw);
|
|
74
|
+
}
|
|
75
|
+
appendEvent(event) {
|
|
76
|
+
const seq = this.nextSeq();
|
|
77
|
+
const full = {
|
|
78
|
+
...event,
|
|
79
|
+
seq,
|
|
80
|
+
timestamp: new Date().toISOString()
|
|
81
|
+
};
|
|
82
|
+
fs.appendFileSync(this.timelinePath, `${JSON.stringify(full)}\n`);
|
|
83
|
+
this.lastEvent = full;
|
|
84
|
+
return full;
|
|
85
|
+
}
|
|
86
|
+
getLastEvent() {
|
|
87
|
+
return this.lastEvent;
|
|
88
|
+
}
|
|
89
|
+
recordWorkerCall(info) {
|
|
90
|
+
this.lastWorkerCall = info;
|
|
91
|
+
}
|
|
92
|
+
getLastWorkerCall() {
|
|
93
|
+
return this.lastWorkerCall;
|
|
94
|
+
}
|
|
95
|
+
nextSeq() {
|
|
96
|
+
let current = 0;
|
|
97
|
+
if (fs.existsSync(this.seqPath)) {
|
|
98
|
+
const raw = fs.readFileSync(this.seqPath, 'utf-8').trim();
|
|
99
|
+
if (raw) {
|
|
100
|
+
current = Number.parseInt(raw, 10) || 0;
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
const next = current + 1;
|
|
104
|
+
fs.writeFileSync(this.seqPath, String(next));
|
|
105
|
+
return next;
|
|
106
|
+
}
|
|
107
|
+
}
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
import fs from 'node:fs';
|
|
2
|
+
import path from 'node:path';
|
|
3
|
+
import { getRunsRoot } from './runs-root.js';
|
|
4
|
+
/**
|
|
5
|
+
* Find the most recent run ID by scanning the runs directory.
|
|
6
|
+
* Run IDs are expected to be 14-digit timestamps (YYYYMMDDHHmmss).
|
|
7
|
+
* @param repoPath - The target repository path (defaults to current working directory)
|
|
8
|
+
* @returns The latest run ID or null if no runs exist
|
|
9
|
+
*/
|
|
10
|
+
export function findLatestRunId(repoPath = process.cwd()) {
|
|
11
|
+
const rootDir = getRunsRoot(repoPath);
|
|
12
|
+
if (!fs.existsSync(rootDir)) {
|
|
13
|
+
return null;
|
|
14
|
+
}
|
|
15
|
+
const entries = fs
|
|
16
|
+
.readdirSync(rootDir, { withFileTypes: true })
|
|
17
|
+
.filter((e) => e.isDirectory() && /^\d{14}$/.test(e.name))
|
|
18
|
+
.map((e) => e.name)
|
|
19
|
+
.sort()
|
|
20
|
+
.reverse();
|
|
21
|
+
return entries[0] ?? null;
|
|
22
|
+
}
|
|
23
|
+
/**
|
|
24
|
+
* Resolve a run ID, supporting 'latest' as a special value.
|
|
25
|
+
* @param runId - The run ID or 'latest'
|
|
26
|
+
* @param repoPath - The target repository path (defaults to current working directory)
|
|
27
|
+
* @returns The resolved run ID
|
|
28
|
+
* @throws Error if 'latest' is specified but no runs exist
|
|
29
|
+
* @throws Error if the specified run directory does not exist
|
|
30
|
+
*/
|
|
31
|
+
export function resolveRunId(runId, repoPath = process.cwd()) {
|
|
32
|
+
const rootDir = getRunsRoot(repoPath);
|
|
33
|
+
let resolvedId = runId;
|
|
34
|
+
if (runId === 'latest') {
|
|
35
|
+
const latest = findLatestRunId(repoPath);
|
|
36
|
+
if (!latest) {
|
|
37
|
+
throw new Error('No runs found. Run a task first with `agent run`.');
|
|
38
|
+
}
|
|
39
|
+
resolvedId = latest;
|
|
40
|
+
}
|
|
41
|
+
// Validate that the run directory exists
|
|
42
|
+
const runDir = path.join(rootDir, resolvedId);
|
|
43
|
+
if (!fs.existsSync(runDir)) {
|
|
44
|
+
// List available runs for helpful error message
|
|
45
|
+
const knownRuns = listRecentRunIds(repoPath, 5);
|
|
46
|
+
const hint = knownRuns.length > 0 ? `Known runs: ${knownRuns.join(', ')}` : 'No runs found.';
|
|
47
|
+
throw new Error(`Run not found: ${resolvedId}. ${hint}`);
|
|
48
|
+
}
|
|
49
|
+
return resolvedId;
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* List recent run IDs, sorted by most recent first.
|
|
53
|
+
* @param repoPath - The target repository path (defaults to current working directory)
|
|
54
|
+
* @param limit - Maximum number of runs to return
|
|
55
|
+
* @returns Array of run IDs
|
|
56
|
+
*/
|
|
57
|
+
export function listRecentRunIds(repoPath = process.cwd(), limit = 10) {
|
|
58
|
+
const rootDir = getRunsRoot(repoPath);
|
|
59
|
+
if (!fs.existsSync(rootDir)) {
|
|
60
|
+
return [];
|
|
61
|
+
}
|
|
62
|
+
return fs
|
|
63
|
+
.readdirSync(rootDir, { withFileTypes: true })
|
|
64
|
+
.filter((e) => e.isDirectory() && /^\d{14}$/.test(e.name))
|
|
65
|
+
.map((e) => e.name)
|
|
66
|
+
.sort()
|
|
67
|
+
.reverse()
|
|
68
|
+
.slice(0, limit);
|
|
69
|
+
}
|