everything-claude-code 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +739 -0
- package/README.zh-CN.md +523 -0
- package/crates/ecc-kernel/Cargo.lock +160 -0
- package/crates/ecc-kernel/Cargo.toml +15 -0
- package/crates/ecc-kernel/src/main.rs +710 -0
- package/docs/ecc.md +117 -0
- package/package.json +45 -0
- package/packs/blueprint.json +8 -0
- package/packs/forge.json +16 -0
- package/packs/instinct.json +16 -0
- package/packs/orchestra.json +15 -0
- package/packs/proof.json +8 -0
- package/packs/sentinel.json +8 -0
- package/prompts/ecc/patch.md +25 -0
- package/prompts/ecc/plan.md +28 -0
- package/schemas/ecc.apply.schema.json +35 -0
- package/schemas/ecc.config.schema.json +37 -0
- package/schemas/ecc.lock.schema.json +34 -0
- package/schemas/ecc.patch.schema.json +25 -0
- package/schemas/ecc.plan.schema.json +32 -0
- package/schemas/ecc.run.schema.json +67 -0
- package/schemas/ecc.verify.schema.json +27 -0
- package/schemas/hooks.schema.json +81 -0
- package/schemas/package-manager.schema.json +17 -0
- package/schemas/plugin.schema.json +13 -0
- package/scripts/ecc/catalog.js +82 -0
- package/scripts/ecc/config.js +43 -0
- package/scripts/ecc/diff.js +113 -0
- package/scripts/ecc/exec.js +121 -0
- package/scripts/ecc/fixtures/basic/patches/impl-core.diff +8 -0
- package/scripts/ecc/fixtures/basic/patches/tests.diff +8 -0
- package/scripts/ecc/fixtures/basic/plan.json +23 -0
- package/scripts/ecc/fixtures/unauthorized/patches/impl-core.diff +8 -0
- package/scripts/ecc/fixtures/unauthorized/plan.json +15 -0
- package/scripts/ecc/git.js +139 -0
- package/scripts/ecc/id.js +37 -0
- package/scripts/ecc/install-kernel.js +344 -0
- package/scripts/ecc/json-extract.js +301 -0
- package/scripts/ecc/json.js +26 -0
- package/scripts/ecc/kernel.js +144 -0
- package/scripts/ecc/lock.js +36 -0
- package/scripts/ecc/paths.js +28 -0
- package/scripts/ecc/plan.js +57 -0
- package/scripts/ecc/project.js +37 -0
- package/scripts/ecc/providers/codex.js +168 -0
- package/scripts/ecc/providers/index.js +23 -0
- package/scripts/ecc/providers/mock.js +49 -0
- package/scripts/ecc/report.js +127 -0
- package/scripts/ecc/run.js +105 -0
- package/scripts/ecc/validate.js +325 -0
- package/scripts/ecc/verify.js +125 -0
- package/scripts/ecc.js +532 -0
- package/scripts/lib/package-manager.js +390 -0
- package/scripts/lib/session-aliases.js +432 -0
- package/scripts/lib/session-manager.js +396 -0
- package/scripts/lib/utils.js +426 -0
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
{
|
|
2
|
+
"$schema": "http://json-schema.org/draft-07/schema#",
|
|
3
|
+
"title": "Claude Plugin Configuration",
|
|
4
|
+
"type": "object",
|
|
5
|
+
"required": ["name"],
|
|
6
|
+
"properties": {
|
|
7
|
+
"name": { "type": "string" },
|
|
8
|
+
"description": { "type": "string" },
|
|
9
|
+
"author": { "type": "string" },
|
|
10
|
+
"repository": { "type": "string" },
|
|
11
|
+
"license": { "type": "string" }
|
|
12
|
+
}
|
|
13
|
+
}
|
|
@@ -0,0 +1,82 @@
|
|
|
1
|
+
const crypto = require('crypto');
|
|
2
|
+
const fs = require('fs');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
|
|
5
|
+
const { packsDir, promptsDir, ENGINE_ROOT } = require('./paths');
|
|
6
|
+
|
|
7
|
+
function listJsonFiles(dirAbs) {
|
|
8
|
+
if (!fs.existsSync(dirAbs)) return [];
|
|
9
|
+
const entries = fs.readdirSync(dirAbs, { withFileTypes: true });
|
|
10
|
+
return entries
|
|
11
|
+
.filter(e => e.isFile() && e.name.endsWith('.json'))
|
|
12
|
+
.map(e => path.join(dirAbs, e.name))
|
|
13
|
+
.sort();
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
function listPromptFiles(dirAbs) {
|
|
17
|
+
if (!fs.existsSync(dirAbs)) return [];
|
|
18
|
+
const entries = fs.readdirSync(dirAbs, { withFileTypes: true });
|
|
19
|
+
return entries
|
|
20
|
+
.filter(e => e.isFile() && e.name.endsWith('.md'))
|
|
21
|
+
.map(e => path.join(dirAbs, e.name))
|
|
22
|
+
.sort();
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function readJson(filePath) {
|
|
26
|
+
const raw = fs.readFileSync(filePath, 'utf8');
|
|
27
|
+
return JSON.parse(raw);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
function loadPacks() {
|
|
31
|
+
const files = listJsonFiles(packsDir());
|
|
32
|
+
const packs = [];
|
|
33
|
+
|
|
34
|
+
for (const filePath of files) {
|
|
35
|
+
const data = readJson(filePath);
|
|
36
|
+
packs.push({
|
|
37
|
+
id: String(data.id || '').trim(),
|
|
38
|
+
name: String(data.name || '').trim(),
|
|
39
|
+
description: String(data.description || '').trim(),
|
|
40
|
+
tags: Array.isArray(data.tags) ? data.tags.filter(t => typeof t === 'string') : [],
|
|
41
|
+
modules: Array.isArray(data.modules) ? data.modules.filter(m => typeof m === 'string') : [],
|
|
42
|
+
path: filePath
|
|
43
|
+
});
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
packs.sort((a, b) => a.id.localeCompare(b.id));
|
|
47
|
+
return packs;
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
function getDefaultPacks() {
|
|
51
|
+
return ['blueprint', 'forge', 'proof', 'sentinel'];
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
/**
|
|
55
|
+
* Compute a digest for the embedded catalog.
|
|
56
|
+
*
|
|
57
|
+
* P0 definition: hash packs/*.json + prompts/ecc/*.md.
|
|
58
|
+
*/
|
|
59
|
+
function computeEmbeddedCatalogDigest() {
|
|
60
|
+
const hash = crypto.createHash('sha256');
|
|
61
|
+
|
|
62
|
+
const packFiles = listJsonFiles(packsDir());
|
|
63
|
+
const promptFiles = listPromptFiles(promptsDir());
|
|
64
|
+
const all = [...packFiles, ...promptFiles].sort();
|
|
65
|
+
|
|
66
|
+
for (const filePath of all) {
|
|
67
|
+
const rel = path.relative(ENGINE_ROOT, filePath).split(path.sep).join('/');
|
|
68
|
+
hash.update(rel);
|
|
69
|
+
hash.update('\n');
|
|
70
|
+
hash.update(fs.readFileSync(filePath));
|
|
71
|
+
hash.update('\n');
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
return `sha256:${hash.digest('hex')}`;
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
module.exports = {
|
|
78
|
+
loadPacks,
|
|
79
|
+
getDefaultPacks,
|
|
80
|
+
computeEmbeddedCatalogDigest
|
|
81
|
+
};
|
|
82
|
+
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
|
|
3
|
+
const { readJson, writeJson } = require('./json');
|
|
4
|
+
const { configPath } = require('./project');
|
|
5
|
+
const { validateConfig, throwIfErrors } = require('./validate');
|
|
6
|
+
|
|
7
|
+
function nowIso() {
|
|
8
|
+
return new Date().toISOString();
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function defaultVerifyConfig() {
|
|
12
|
+
return { mode: 'auto' };
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function createConfig({ backend, packs }) {
|
|
16
|
+
return {
|
|
17
|
+
version: 1,
|
|
18
|
+
backend,
|
|
19
|
+
packs,
|
|
20
|
+
verify: defaultVerifyConfig(),
|
|
21
|
+
createdAt: nowIso()
|
|
22
|
+
};
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function loadConfig(projectRoot) {
|
|
26
|
+
const p = configPath(projectRoot);
|
|
27
|
+
if (!fs.existsSync(p)) return null;
|
|
28
|
+
const cfg = readJson(p);
|
|
29
|
+
throwIfErrors(validateConfig(cfg), 'ecc config');
|
|
30
|
+
return cfg;
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
function saveConfig(projectRoot, cfg) {
|
|
34
|
+
throwIfErrors(validateConfig(cfg), 'ecc config');
|
|
35
|
+
writeJson(configPath(projectRoot), cfg);
|
|
36
|
+
}
|
|
37
|
+
|
|
38
|
+
module.exports = {
|
|
39
|
+
createConfig,
|
|
40
|
+
loadConfig,
|
|
41
|
+
saveConfig
|
|
42
|
+
};
|
|
43
|
+
|
|
@@ -0,0 +1,113 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const { spawnSync } = require('child_process');
|
|
5
|
+
|
|
6
|
+
const { runKernel } = require('./kernel');
|
|
7
|
+
|
|
8
|
+
function runGit(args, opts = {}) {
|
|
9
|
+
const res = spawnSync('git', args, {
|
|
10
|
+
encoding: 'utf8',
|
|
11
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
12
|
+
...opts
|
|
13
|
+
});
|
|
14
|
+
return {
|
|
15
|
+
ok: res.status === 0,
|
|
16
|
+
status: res.status,
|
|
17
|
+
stdout: (res.stdout || '').trimEnd(),
|
|
18
|
+
stderr: (res.stderr || '').trimEnd()
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function normalizeRepoPath(p) {
|
|
23
|
+
const posix = String(p || '').replace(/\\/g, '/');
|
|
24
|
+
// Prevent sneaky absolute paths and traversal.
|
|
25
|
+
if (posix.startsWith('/') || /^[A-Za-z]:\//.test(posix)) return null;
|
|
26
|
+
const norm = path.posix.normalize(posix);
|
|
27
|
+
if (norm === '.' || norm.startsWith('../') || norm.includes('/../')) return null;
|
|
28
|
+
return norm;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
function touchedFilesFromUnifiedDiff(patchText) {
|
|
32
|
+
const files = [];
|
|
33
|
+
const seen = new Set();
|
|
34
|
+
const lines = String(patchText || '').split(/\r?\n/);
|
|
35
|
+
for (const line of lines) {
|
|
36
|
+
if (!line.startsWith('diff --git ')) continue;
|
|
37
|
+
// Typical: diff --git a/foo/bar b/foo/bar
|
|
38
|
+
const m = line.match(/^diff --git a\/(.+?) b\/(.+)$/);
|
|
39
|
+
if (!m) continue;
|
|
40
|
+
const aPath = m[1];
|
|
41
|
+
const bPath = m[2];
|
|
42
|
+
const file = bPath === '/dev/null' ? aPath : bPath;
|
|
43
|
+
const normalized = normalizeRepoPath(file);
|
|
44
|
+
if (!normalized) {
|
|
45
|
+
files.push({ path: file, invalid: true });
|
|
46
|
+
continue;
|
|
47
|
+
}
|
|
48
|
+
if (seen.has(normalized)) continue;
|
|
49
|
+
seen.add(normalized);
|
|
50
|
+
files.push({ path: normalized, invalid: false });
|
|
51
|
+
}
|
|
52
|
+
return files;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function ensureOwned({ touchedFiles, allowedPathPrefixes }) {
|
|
56
|
+
const allowed = (Array.isArray(allowedPathPrefixes) ? allowedPathPrefixes : [])
|
|
57
|
+
.map(p => String(p || '').replace(/\\/g, '/'))
|
|
58
|
+
.map(p => (p.endsWith('/') ? p : `${p}/`))
|
|
59
|
+
.filter(Boolean);
|
|
60
|
+
|
|
61
|
+
if (!allowed.length) throw new Error('allowedPathPrefixes is empty');
|
|
62
|
+
|
|
63
|
+
const violations = [];
|
|
64
|
+
for (const f of touchedFiles) {
|
|
65
|
+
if (f.invalid) {
|
|
66
|
+
violations.push(`invalid path in patch: ${f.path}`);
|
|
67
|
+
continue;
|
|
68
|
+
}
|
|
69
|
+
const ok = allowed.some(prefix => f.path === prefix.slice(0, -1) || f.path.startsWith(prefix));
|
|
70
|
+
if (!ok) violations.push(`unauthorized path: ${f.path}`);
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
if (violations.length) {
|
|
74
|
+
throw new Error(`patch ownership check failed:\n- ${violations.join('\n- ')}`);
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
|
|
78
|
+
function applyPatch({ worktreePath, patchPath, allowedPathPrefixes }) {
|
|
79
|
+
const kernelOut = runKernel('patch.apply', {
|
|
80
|
+
worktreePath,
|
|
81
|
+
patchPath,
|
|
82
|
+
allowedPathPrefixes: Array.isArray(allowedPathPrefixes) ? allowedPathPrefixes : []
|
|
83
|
+
});
|
|
84
|
+
if (kernelOut && Array.isArray(kernelOut.touchedFiles)) {
|
|
85
|
+
return { touchedFiles: kernelOut.touchedFiles };
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
const patchText = fs.readFileSync(patchPath, 'utf8');
|
|
89
|
+
const trimmed = patchText.trim();
|
|
90
|
+
if (!trimmed) {
|
|
91
|
+
return { touchedFiles: [] };
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
const touched = touchedFilesFromUnifiedDiff(patchText);
|
|
95
|
+
if (!touched.length) {
|
|
96
|
+
throw new Error('patch has content but no "diff --git" headers (not a unified diff?)');
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
ensureOwned({ touchedFiles: touched, allowedPathPrefixes });
|
|
100
|
+
|
|
101
|
+
let res = runGit(['-C', worktreePath, 'apply', '--check', patchPath]);
|
|
102
|
+
if (!res.ok) throw new Error(res.stderr || 'git apply --check failed');
|
|
103
|
+
|
|
104
|
+
res = runGit(['-C', worktreePath, 'apply', patchPath]);
|
|
105
|
+
if (!res.ok) throw new Error(res.stderr || 'git apply failed');
|
|
106
|
+
|
|
107
|
+
return { touchedFiles: touched.map(t => t.path) };
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
module.exports = {
|
|
111
|
+
touchedFilesFromUnifiedDiff,
|
|
112
|
+
applyPatch
|
|
113
|
+
};
|
|
@@ -0,0 +1,121 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
const { readJson, writeJson } = require('./json');
|
|
5
|
+
const { runPaths, loadRun, saveRun } = require('./run');
|
|
6
|
+
const git = require('./git');
|
|
7
|
+
const diff = require('./diff');
|
|
8
|
+
const { validateApplyResult, throwIfErrors } = require('./validate');
|
|
9
|
+
|
|
10
|
+
function nowIso() {
|
|
11
|
+
return new Date().toISOString();
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function topoSortTasks(tasks) {
|
|
15
|
+
const byId = new Map(tasks.map(t => [t.id, t]));
|
|
16
|
+
const visited = new Set();
|
|
17
|
+
const visiting = new Set();
|
|
18
|
+
const out = [];
|
|
19
|
+
|
|
20
|
+
function visit(id) {
|
|
21
|
+
if (visited.has(id)) return;
|
|
22
|
+
if (visiting.has(id)) throw new Error(`cycle detected at task "${id}"`);
|
|
23
|
+
visiting.add(id);
|
|
24
|
+
const t = byId.get(id);
|
|
25
|
+
const deps = Array.isArray(t.dependsOn) ? t.dependsOn : [];
|
|
26
|
+
for (const dep of deps) visit(dep);
|
|
27
|
+
visiting.delete(id);
|
|
28
|
+
visited.add(id);
|
|
29
|
+
out.push(t);
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
for (const t of tasks) visit(t.id);
|
|
33
|
+
return out;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
async function execRun({ projectRoot, runId, provider, worktreeRoot }) {
|
|
37
|
+
const run = loadRun(projectRoot, runId);
|
|
38
|
+
if (!run) throw new Error(`unknown runId: ${runId}`);
|
|
39
|
+
|
|
40
|
+
const paths = runPaths(projectRoot, runId);
|
|
41
|
+
if (!fs.existsSync(paths.planJson)) throw new Error(`missing plan.json (run ecc plan first): ${paths.planJson}`);
|
|
42
|
+
const plan = readJson(paths.planJson);
|
|
43
|
+
|
|
44
|
+
const repoRoot = git.getRepoRoot(projectRoot);
|
|
45
|
+
if (!repoRoot) throw new Error('ecc exec requires a git repository');
|
|
46
|
+
|
|
47
|
+
const baseSha = run.base && run.base.sha ? run.base.sha : git.getHeadSha(repoRoot);
|
|
48
|
+
const branch = run.worktree && run.worktree.branch ? run.worktree.branch : `ecc/${runId}`;
|
|
49
|
+
|
|
50
|
+
const desiredWorktreePath =
|
|
51
|
+
run.worktree && run.worktree.path && fs.existsSync(run.worktree.path)
|
|
52
|
+
? run.worktree.path
|
|
53
|
+
: git.defaultWorktreePath({ repoRoot, runId, worktreeRoot });
|
|
54
|
+
|
|
55
|
+
const worktreePath = git.ensureWorktree({
|
|
56
|
+
repoRoot,
|
|
57
|
+
worktreePath: desiredWorktreePath,
|
|
58
|
+
branch,
|
|
59
|
+
baseSha
|
|
60
|
+
});
|
|
61
|
+
|
|
62
|
+
run.status = 'executing';
|
|
63
|
+
run.worktree.path = worktreePath;
|
|
64
|
+
run.worktree.branch = branch;
|
|
65
|
+
saveRun(projectRoot, runId, run);
|
|
66
|
+
|
|
67
|
+
const ordered = topoSortTasks(plan.tasks);
|
|
68
|
+
|
|
69
|
+
const applyResult = {
|
|
70
|
+
version: 1,
|
|
71
|
+
appliedAt: nowIso(),
|
|
72
|
+
baseSha,
|
|
73
|
+
tasks: []
|
|
74
|
+
};
|
|
75
|
+
|
|
76
|
+
for (const task of ordered) {
|
|
77
|
+
const patchPath = path.join(paths.patchesDir, `${task.id}.diff`);
|
|
78
|
+
const patchOut = await provider.generatePatch({
|
|
79
|
+
task,
|
|
80
|
+
repoRoot: worktreePath,
|
|
81
|
+
packs: run.packs,
|
|
82
|
+
patchPath
|
|
83
|
+
});
|
|
84
|
+
|
|
85
|
+
const patch = patchOut && typeof patchOut.patch === 'string' ? patchOut.patch : null;
|
|
86
|
+
const patchOutPath = patchOut && typeof patchOut.patchPath === 'string' ? patchOut.patchPath : null;
|
|
87
|
+
|
|
88
|
+
if (patchOutPath && path.resolve(patchOutPath) !== path.resolve(patchPath)) {
|
|
89
|
+
applyResult.tasks.push({ id: task.id, patchPath, ok: false, error: 'provider returned unexpected patchPath' });
|
|
90
|
+
writeJson(paths.applyJson, applyResult);
|
|
91
|
+
throw new Error(`provider returned unexpected patchPath for task: ${task.id}`);
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
if (patch !== null) {
|
|
95
|
+
fs.writeFileSync(patchPath, patch.endsWith('\n') ? patch : patch + '\n', 'utf8');
|
|
96
|
+
} else if (!fs.existsSync(patchPath)) {
|
|
97
|
+
applyResult.tasks.push({ id: task.id, patchPath, ok: false, error: 'provider did not produce patch' });
|
|
98
|
+
writeJson(paths.applyJson, applyResult);
|
|
99
|
+
throw new Error(`provider did not produce patch for task: ${task.id}`);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
try {
|
|
103
|
+
diff.applyPatch({ worktreePath, patchPath, allowedPathPrefixes: task.allowedPathPrefixes });
|
|
104
|
+
applyResult.tasks.push({ id: task.id, patchPath, ok: true });
|
|
105
|
+
} catch (err) {
|
|
106
|
+
const msg = err && err.message ? err.message : String(err);
|
|
107
|
+
applyResult.tasks.push({ id: task.id, patchPath, ok: false, error: msg });
|
|
108
|
+
writeJson(paths.applyJson, applyResult);
|
|
109
|
+
throw err;
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
throwIfErrors(validateApplyResult(applyResult), 'apply result');
|
|
114
|
+
writeJson(paths.applyJson, applyResult);
|
|
115
|
+
|
|
116
|
+
return { worktreePath, applyResult, run, plan, paths, repoRoot };
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
module.exports = {
|
|
120
|
+
execRun
|
|
121
|
+
};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 1,
|
|
3
|
+
"intent": "demo",
|
|
4
|
+
"tasks": [
|
|
5
|
+
{
|
|
6
|
+
"id": "impl-core",
|
|
7
|
+
"title": "Implement core demo change",
|
|
8
|
+
"kind": "patch",
|
|
9
|
+
"dependsOn": [],
|
|
10
|
+
"allowedPathPrefixes": ["src/"],
|
|
11
|
+
"prompt": "Create a demo marker file at src/ecc-demo.txt with a single line: \"ecc-demo: ok\"."
|
|
12
|
+
},
|
|
13
|
+
{
|
|
14
|
+
"id": "tests",
|
|
15
|
+
"title": "Add demo smoke test artifact",
|
|
16
|
+
"kind": "patch",
|
|
17
|
+
"dependsOn": ["impl-core"],
|
|
18
|
+
"allowedPathPrefixes": ["tests/"],
|
|
19
|
+
"prompt": "Create a demo marker file at tests/ecc-demo.txt with a single line: \"ecc-demo: ok\"."
|
|
20
|
+
}
|
|
21
|
+
]
|
|
22
|
+
}
|
|
23
|
+
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
{
|
|
2
|
+
"version": 1,
|
|
3
|
+
"intent": "unauthorized-demo",
|
|
4
|
+
"tasks": [
|
|
5
|
+
{
|
|
6
|
+
"id": "impl-core",
|
|
7
|
+
"title": "Unauthorized patch demo",
|
|
8
|
+
"kind": "patch",
|
|
9
|
+
"dependsOn": [],
|
|
10
|
+
"allowedPathPrefixes": ["src/"],
|
|
11
|
+
"prompt": "Attempt to write outside allowedPathPrefixes (should be blocked by executor)."
|
|
12
|
+
}
|
|
13
|
+
]
|
|
14
|
+
}
|
|
15
|
+
|
|
@@ -0,0 +1,139 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const os = require('os');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const { spawnSync } = require('child_process');
|
|
5
|
+
|
|
6
|
+
const { runKernel } = require('./kernel');
|
|
7
|
+
|
|
8
|
+
function runGit(args, opts = {}) {
|
|
9
|
+
const res = spawnSync('git', args, {
|
|
10
|
+
encoding: 'utf8',
|
|
11
|
+
stdio: ['ignore', 'pipe', 'pipe'],
|
|
12
|
+
...opts
|
|
13
|
+
});
|
|
14
|
+
return {
|
|
15
|
+
ok: res.status === 0,
|
|
16
|
+
status: res.status,
|
|
17
|
+
stdout: (res.stdout || '').trimEnd(),
|
|
18
|
+
stderr: (res.stderr || '').trimEnd()
|
|
19
|
+
};
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
function getRepoRoot(cwd) {
|
|
23
|
+
const res = runGit(['-C', cwd, 'rev-parse', '--show-toplevel']);
|
|
24
|
+
if (!res.ok) return null;
|
|
25
|
+
return res.stdout.trim();
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
function getHeadSha(repoRoot) {
|
|
29
|
+
const res = runGit(['-C', repoRoot, 'rev-parse', 'HEAD']);
|
|
30
|
+
if (!res.ok) throw new Error(res.stderr || 'git rev-parse HEAD failed');
|
|
31
|
+
return res.stdout.trim();
|
|
32
|
+
}
|
|
33
|
+
|
|
34
|
+
function getCurrentBranch(repoRoot) {
|
|
35
|
+
const res = runGit(['-C', repoRoot, 'rev-parse', '--abbrev-ref', 'HEAD']);
|
|
36
|
+
if (!res.ok) throw new Error(res.stderr || 'git rev-parse --abbrev-ref HEAD failed');
|
|
37
|
+
return res.stdout.trim();
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
function isClean(repoRoot) {
|
|
41
|
+
// Ignore untracked files; they won't be part of the worktree branch anyway.
|
|
42
|
+
const res = runGit(['-C', repoRoot, 'status', '--porcelain', '--untracked-files=no']);
|
|
43
|
+
if (!res.ok) throw new Error(res.stderr || 'git status --porcelain failed');
|
|
44
|
+
return res.stdout.trim().length === 0;
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
function branchExists(repoRoot, branch) {
|
|
48
|
+
const res = runGit(['-C', repoRoot, 'show-ref', '--verify', '--quiet', `refs/heads/${branch}`]);
|
|
49
|
+
return res.status === 0;
|
|
50
|
+
}
|
|
51
|
+
|
|
52
|
+
function ensureBranchAt(repoRoot, branch, baseSha) {
|
|
53
|
+
if (branchExists(repoRoot, branch)) return;
|
|
54
|
+
const res = runGit(['-C', repoRoot, 'branch', branch, baseSha]);
|
|
55
|
+
if (!res.ok) throw new Error(res.stderr || `git branch ${branch} ${baseSha} failed`);
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
function defaultWorktreePath({ repoRoot, runId, worktreeRoot }) {
|
|
59
|
+
const repoName = path.basename(repoRoot);
|
|
60
|
+
const root = worktreeRoot || path.join(os.tmpdir(), 'ecc-worktrees');
|
|
61
|
+
return path.join(root, repoName, runId);
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function assertExternalWorktreePath({ repoRoot, worktreePath }) {
|
|
65
|
+
const rel = path.relative(repoRoot, worktreePath);
|
|
66
|
+
const isInside = rel && !rel.startsWith('..') && !path.isAbsolute(rel);
|
|
67
|
+
if (isInside) {
|
|
68
|
+
throw new Error(
|
|
69
|
+
`Refusing to create worktree inside repo root (would recurse): repoRoot=${repoRoot} worktreePath=${worktreePath}`
|
|
70
|
+
);
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
|
|
74
|
+
function isGitWorktree(dir) {
|
|
75
|
+
if (!fs.existsSync(dir)) return false;
|
|
76
|
+
const res = runGit(['-C', dir, 'rev-parse', '--is-inside-work-tree']);
|
|
77
|
+
return res.ok && res.stdout.trim() === 'true';
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
function ensureWorktree({ repoRoot, worktreePath, branch, baseSha }) {
|
|
81
|
+
const kernelOut = runKernel('worktree.ensure', { repoRoot, worktreePath, branch, baseSha });
|
|
82
|
+
if (kernelOut && kernelOut.worktreePath) return kernelOut.worktreePath;
|
|
83
|
+
|
|
84
|
+
assertExternalWorktreePath({ repoRoot, worktreePath });
|
|
85
|
+
ensureBranchAt(repoRoot, branch, baseSha);
|
|
86
|
+
|
|
87
|
+
if (fs.existsSync(worktreePath)) {
|
|
88
|
+
if (!isGitWorktree(worktreePath)) {
|
|
89
|
+
throw new Error(`Worktree path exists but is not a git worktree: ${worktreePath}`);
|
|
90
|
+
}
|
|
91
|
+
return worktreePath;
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
fs.mkdirSync(path.dirname(worktreePath), { recursive: true });
|
|
95
|
+
|
|
96
|
+
const res = runGit(['-C', repoRoot, 'worktree', 'add', worktreePath, branch]);
|
|
97
|
+
if (!res.ok) throw new Error(res.stderr || `git worktree add failed: ${worktreePath}`);
|
|
98
|
+
return worktreePath;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
function removeWorktree({ repoRoot, worktreePath, force = true }) {
|
|
102
|
+
const kernelOut = runKernel('worktree.remove', { repoRoot, worktreePath, force: !!force });
|
|
103
|
+
if (kernelOut && kernelOut.ok) return;
|
|
104
|
+
|
|
105
|
+
const args = ['-C', repoRoot, 'worktree', 'remove'];
|
|
106
|
+
if (force) args.push('--force');
|
|
107
|
+
args.push(worktreePath);
|
|
108
|
+
const res = runGit(args);
|
|
109
|
+
if (!res.ok) throw new Error(res.stderr || `git worktree remove failed: ${worktreePath}`);
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
function commitAll({ repoRoot, message }) {
|
|
113
|
+
const kernelOut = runKernel('git.commit_all', { repoRoot, message });
|
|
114
|
+
if (kernelOut && kernelOut.sha) return kernelOut.sha;
|
|
115
|
+
|
|
116
|
+
let res = runGit(['-C', repoRoot, 'add', '-A']);
|
|
117
|
+
if (!res.ok) throw new Error(res.stderr || 'git add failed');
|
|
118
|
+
|
|
119
|
+
res = runGit(['-C', repoRoot, 'commit', '-m', message]);
|
|
120
|
+
if (!res.ok) throw new Error(res.stderr || 'git commit failed');
|
|
121
|
+
|
|
122
|
+
const sha = getHeadSha(repoRoot);
|
|
123
|
+
return sha;
|
|
124
|
+
}
|
|
125
|
+
|
|
126
|
+
module.exports = {
|
|
127
|
+
runGit,
|
|
128
|
+
getRepoRoot,
|
|
129
|
+
getHeadSha,
|
|
130
|
+
getCurrentBranch,
|
|
131
|
+
isClean,
|
|
132
|
+
defaultWorktreePath,
|
|
133
|
+
assertExternalWorktreePath,
|
|
134
|
+
isGitWorktree,
|
|
135
|
+
ensureBranchAt,
|
|
136
|
+
ensureWorktree,
|
|
137
|
+
removeWorktree,
|
|
138
|
+
commitAll
|
|
139
|
+
};
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
|
|
3
|
+
const { getDateString } = require('../lib/utils');
|
|
4
|
+
const { runsDir } = require('./project');
|
|
5
|
+
|
|
6
|
+
function slugify(s, fallback = 'run') {
|
|
7
|
+
const cleaned = String(s || '')
|
|
8
|
+
.toLowerCase()
|
|
9
|
+
.trim()
|
|
10
|
+
.replace(/[^a-z0-9]+/g, '-')
|
|
11
|
+
.replace(/^-+|-+$/g, '');
|
|
12
|
+
return cleaned || fallback;
|
|
13
|
+
}
|
|
14
|
+
|
|
15
|
+
function defaultRunId(intent) {
|
|
16
|
+
return `${getDateString()}-${slugify(intent, 'task')}`;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function ensureUniqueRunId(projectRoot, runIdBase) {
|
|
20
|
+
const base = slugify(runIdBase);
|
|
21
|
+
const root = runsDir(projectRoot);
|
|
22
|
+
let candidate = base;
|
|
23
|
+
let n = 2;
|
|
24
|
+
while (true) {
|
|
25
|
+
const p = path.join(root, candidate);
|
|
26
|
+
if (!require('fs').existsSync(p)) return candidate;
|
|
27
|
+
candidate = `${base}-${n}`;
|
|
28
|
+
n++;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
module.exports = {
|
|
33
|
+
slugify,
|
|
34
|
+
defaultRunId,
|
|
35
|
+
ensureUniqueRunId
|
|
36
|
+
};
|
|
37
|
+
|