everything-claude-code 1.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +739 -0
- package/README.zh-CN.md +523 -0
- package/crates/ecc-kernel/Cargo.lock +160 -0
- package/crates/ecc-kernel/Cargo.toml +15 -0
- package/crates/ecc-kernel/src/main.rs +710 -0
- package/docs/ecc.md +117 -0
- package/package.json +45 -0
- package/packs/blueprint.json +8 -0
- package/packs/forge.json +16 -0
- package/packs/instinct.json +16 -0
- package/packs/orchestra.json +15 -0
- package/packs/proof.json +8 -0
- package/packs/sentinel.json +8 -0
- package/prompts/ecc/patch.md +25 -0
- package/prompts/ecc/plan.md +28 -0
- package/schemas/ecc.apply.schema.json +35 -0
- package/schemas/ecc.config.schema.json +37 -0
- package/schemas/ecc.lock.schema.json +34 -0
- package/schemas/ecc.patch.schema.json +25 -0
- package/schemas/ecc.plan.schema.json +32 -0
- package/schemas/ecc.run.schema.json +67 -0
- package/schemas/ecc.verify.schema.json +27 -0
- package/schemas/hooks.schema.json +81 -0
- package/schemas/package-manager.schema.json +17 -0
- package/schemas/plugin.schema.json +13 -0
- package/scripts/ecc/catalog.js +82 -0
- package/scripts/ecc/config.js +43 -0
- package/scripts/ecc/diff.js +113 -0
- package/scripts/ecc/exec.js +121 -0
- package/scripts/ecc/fixtures/basic/patches/impl-core.diff +8 -0
- package/scripts/ecc/fixtures/basic/patches/tests.diff +8 -0
- package/scripts/ecc/fixtures/basic/plan.json +23 -0
- package/scripts/ecc/fixtures/unauthorized/patches/impl-core.diff +8 -0
- package/scripts/ecc/fixtures/unauthorized/plan.json +15 -0
- package/scripts/ecc/git.js +139 -0
- package/scripts/ecc/id.js +37 -0
- package/scripts/ecc/install-kernel.js +344 -0
- package/scripts/ecc/json-extract.js +301 -0
- package/scripts/ecc/json.js +26 -0
- package/scripts/ecc/kernel.js +144 -0
- package/scripts/ecc/lock.js +36 -0
- package/scripts/ecc/paths.js +28 -0
- package/scripts/ecc/plan.js +57 -0
- package/scripts/ecc/project.js +37 -0
- package/scripts/ecc/providers/codex.js +168 -0
- package/scripts/ecc/providers/index.js +23 -0
- package/scripts/ecc/providers/mock.js +49 -0
- package/scripts/ecc/report.js +127 -0
- package/scripts/ecc/run.js +105 -0
- package/scripts/ecc/validate.js +325 -0
- package/scripts/ecc/verify.js +125 -0
- package/scripts/ecc.js +532 -0
- package/scripts/lib/package-manager.js +390 -0
- package/scripts/lib/session-aliases.js +432 -0
- package/scripts/lib/session-manager.js +396 -0
- package/scripts/lib/utils.js +426 -0
|
@@ -0,0 +1,144 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
const { spawnSync } = require('child_process');
|
|
4
|
+
|
|
5
|
+
function isFile(p) {
|
|
6
|
+
try {
|
|
7
|
+
return fs.statSync(p).isFile();
|
|
8
|
+
} catch (_err) {
|
|
9
|
+
return false;
|
|
10
|
+
}
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function getKernelMode() {
|
|
14
|
+
// ECC_KERNEL:
|
|
15
|
+
// - "auto" (default): use ecc-kernel if available, else fallback to JS
|
|
16
|
+
// - "rust": require ecc-kernel, error if missing
|
|
17
|
+
// - "node": force JS implementation
|
|
18
|
+
const raw = process.env.ECC_KERNEL ? String(process.env.ECC_KERNEL).trim().toLowerCase() : 'auto';
|
|
19
|
+
if (!raw || raw === 'auto') return 'auto';
|
|
20
|
+
if (raw === 'rust' || raw === 'kernel') return 'rust';
|
|
21
|
+
if (raw === 'node' || raw === 'js' || raw === 'off' || raw === 'disable') return 'node';
|
|
22
|
+
return 'auto';
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function binName() {
|
|
26
|
+
return process.platform === 'win32' ? 'ecc-kernel.exe' : 'ecc-kernel';
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
function platformArchKey() {
|
|
30
|
+
const platform = process.platform;
|
|
31
|
+
const arch = process.arch;
|
|
32
|
+
const os =
|
|
33
|
+
platform === 'darwin' ? 'darwin' :
|
|
34
|
+
platform === 'linux' ? 'linux' :
|
|
35
|
+
platform === 'win32' ? 'windows' :
|
|
36
|
+
null;
|
|
37
|
+
const cpu =
|
|
38
|
+
arch === 'x64' ? 'x64' :
|
|
39
|
+
arch === 'arm64' ? 'arm64' :
|
|
40
|
+
null;
|
|
41
|
+
if (!os || !cpu) return null;
|
|
42
|
+
return `${os}-${cpu}`;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
function tryKernelFromPath() {
|
|
46
|
+
const res = spawnSync('ecc-kernel', ['--version'], {
|
|
47
|
+
encoding: 'utf8',
|
|
48
|
+
stdio: ['ignore', 'pipe', 'pipe']
|
|
49
|
+
});
|
|
50
|
+
if (res.error) return null;
|
|
51
|
+
if (res.status === 0) return 'ecc-kernel';
|
|
52
|
+
return null;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
function findKernelBinary() {
|
|
56
|
+
if (process.env.ECC_KERNEL_PATH) {
|
|
57
|
+
const p = path.resolve(String(process.env.ECC_KERNEL_PATH));
|
|
58
|
+
if (isFile(p)) return p;
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
// Preferred location for prebuilt binaries installed via postinstall.
|
|
62
|
+
const key = platformArchKey();
|
|
63
|
+
if (key) {
|
|
64
|
+
const packaged = path.join(__dirname, 'bin', key, binName());
|
|
65
|
+
if (isFile(packaged)) return packaged;
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
const fromPath = tryKernelFromPath();
|
|
69
|
+
if (fromPath) return fromPath;
|
|
70
|
+
|
|
71
|
+
const root = path.resolve(__dirname, '..', '..');
|
|
72
|
+
const candidates = [
|
|
73
|
+
path.join(root, 'crates', 'ecc-kernel', 'target', 'release', binName()),
|
|
74
|
+
path.join(root, 'crates', 'ecc-kernel', 'target', 'debug', binName())
|
|
75
|
+
];
|
|
76
|
+
for (const p of candidates) {
|
|
77
|
+
if (isFile(p)) return p;
|
|
78
|
+
}
|
|
79
|
+
return null;
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
let _cached = null;
|
|
83
|
+
|
|
84
|
+
function getKernel() {
|
|
85
|
+
if (_cached) return _cached;
|
|
86
|
+
|
|
87
|
+
const mode = getKernelMode();
|
|
88
|
+
if (mode === 'node') {
|
|
89
|
+
_cached = { mode, enabled: false, bin: null };
|
|
90
|
+
return _cached;
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
const bin = findKernelBinary();
|
|
94
|
+
if (mode === 'rust' && !bin) {
|
|
95
|
+
throw new Error(
|
|
96
|
+
'ECC kernel required but not found. Build it with:\n' +
|
|
97
|
+
' cargo build --release --manifest-path crates/ecc-kernel/Cargo.toml\n' +
|
|
98
|
+
'Then re-run, or set ECC_KERNEL=node to force JS fallback.'
|
|
99
|
+
);
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
_cached = { mode, enabled: !!bin, bin };
|
|
103
|
+
return _cached;
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
function runKernel(command, inputObj) {
|
|
107
|
+
const kernel = getKernel();
|
|
108
|
+
if (!kernel.enabled) return null;
|
|
109
|
+
|
|
110
|
+
const res = spawnSync(kernel.bin, [command], {
|
|
111
|
+
encoding: 'utf8',
|
|
112
|
+
input: JSON.stringify(inputObj),
|
|
113
|
+
stdio: ['pipe', 'pipe', 'pipe']
|
|
114
|
+
});
|
|
115
|
+
|
|
116
|
+
if (res.error) {
|
|
117
|
+
throw new Error(`ecc-kernel spawn failed: ${res.error.message}`);
|
|
118
|
+
}
|
|
119
|
+
const stdout = (res.stdout || '').trim();
|
|
120
|
+
const stderr = (res.stderr || '').trim();
|
|
121
|
+
if (res.status !== 0) {
|
|
122
|
+
const msg = [
|
|
123
|
+
`ecc-kernel ${command} failed (exit ${res.status})`,
|
|
124
|
+
stderr ? `stderr:\n${stderr}` : null,
|
|
125
|
+
stdout ? `stdout:\n${stdout}` : null
|
|
126
|
+
]
|
|
127
|
+
.filter(Boolean)
|
|
128
|
+
.join('\n\n');
|
|
129
|
+
throw new Error(msg);
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
if (!stdout) return {};
|
|
133
|
+
try {
|
|
134
|
+
return JSON.parse(stdout);
|
|
135
|
+
} catch (err) {
|
|
136
|
+
const detail = err && err.message ? err.message : String(err);
|
|
137
|
+
throw new Error(`ecc-kernel returned non-JSON output (${detail}). Raw:\n${stdout.slice(0, 2000)}`);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
|
|
141
|
+
module.exports = {
|
|
142
|
+
getKernel,
|
|
143
|
+
runKernel
|
|
144
|
+
};
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
|
|
3
|
+
const { writeJson } = require('./json');
|
|
4
|
+
const { registryLockPath } = require('./project');
|
|
5
|
+
const { computeEmbeddedCatalogDigest } = require('./catalog');
|
|
6
|
+
const { validateLock, throwIfErrors } = require('./validate');
|
|
7
|
+
|
|
8
|
+
function nowIso() {
|
|
9
|
+
return new Date().toISOString();
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
function buildRegistryLock({ packs }) {
|
|
13
|
+
const lock = {
|
|
14
|
+
version: 1,
|
|
15
|
+
lockedAt: nowIso(),
|
|
16
|
+
engine: { name: 'ecc' },
|
|
17
|
+
catalog: { type: 'embedded', digest: computeEmbeddedCatalogDigest() },
|
|
18
|
+
packs
|
|
19
|
+
};
|
|
20
|
+
throwIfErrors(validateLock(lock), 'registry lock');
|
|
21
|
+
return lock;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function writeRegistryLock(projectRoot, { packs, overwrite = true }) {
|
|
25
|
+
const p = registryLockPath(projectRoot);
|
|
26
|
+
if (!overwrite && fs.existsSync(p)) return p;
|
|
27
|
+
const lock = buildRegistryLock({ packs });
|
|
28
|
+
writeJson(p, lock);
|
|
29
|
+
return p;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
module.exports = {
|
|
33
|
+
buildRegistryLock,
|
|
34
|
+
writeRegistryLock
|
|
35
|
+
};
|
|
36
|
+
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
|
|
3
|
+
// Engine root is the repo root in this repository layout:
|
|
4
|
+
// <root>/scripts/ecc.js
|
|
5
|
+
// <root>/scripts/ecc/*
|
|
6
|
+
// <root>/packs/*
|
|
7
|
+
// <root>/prompts/ecc/*
|
|
8
|
+
const ENGINE_ROOT = path.resolve(__dirname, '..', '..');
|
|
9
|
+
|
|
10
|
+
function packsDir() {
|
|
11
|
+
return path.join(ENGINE_ROOT, 'packs');
|
|
12
|
+
}
|
|
13
|
+
|
|
14
|
+
function promptsDir() {
|
|
15
|
+
return path.join(ENGINE_ROOT, 'prompts', 'ecc');
|
|
16
|
+
}
|
|
17
|
+
|
|
18
|
+
function schemasDir() {
|
|
19
|
+
return path.join(ENGINE_ROOT, 'schemas');
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
module.exports = {
|
|
23
|
+
ENGINE_ROOT,
|
|
24
|
+
packsDir,
|
|
25
|
+
promptsDir,
|
|
26
|
+
schemasDir
|
|
27
|
+
};
|
|
28
|
+
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
|
|
3
|
+
const { writeJson, writeText } = require('./json');
|
|
4
|
+
const { validatePlan, throwIfErrors } = require('./validate');
|
|
5
|
+
const { runPaths, saveRun } = require('./run');
|
|
6
|
+
|
|
7
|
+
function renderPlanMd(plan) {
|
|
8
|
+
const lines = [];
|
|
9
|
+
lines.push(`# ECC Plan`);
|
|
10
|
+
lines.push('');
|
|
11
|
+
lines.push(`Intent: ${plan.intent}`);
|
|
12
|
+
lines.push('');
|
|
13
|
+
lines.push('## Tasks');
|
|
14
|
+
lines.push('');
|
|
15
|
+
for (const t of plan.tasks) {
|
|
16
|
+
lines.push(`### ${t.id}: ${t.title}`);
|
|
17
|
+
lines.push('');
|
|
18
|
+
lines.push(`- kind: ${t.kind}`);
|
|
19
|
+
lines.push(`- dependsOn: ${t.dependsOn.length ? t.dependsOn.join(', ') : '(none)'}`);
|
|
20
|
+
lines.push(`- allowedPathPrefixes: ${t.allowedPathPrefixes.join(', ')}`);
|
|
21
|
+
lines.push('');
|
|
22
|
+
lines.push('Prompt:');
|
|
23
|
+
lines.push('');
|
|
24
|
+
lines.push('```');
|
|
25
|
+
lines.push(t.prompt.trim());
|
|
26
|
+
lines.push('```');
|
|
27
|
+
lines.push('');
|
|
28
|
+
}
|
|
29
|
+
return lines.join('\n');
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
async function generatePlan({ projectRoot, run, provider }) {
|
|
33
|
+
const paths = runPaths(projectRoot, run.runId);
|
|
34
|
+
|
|
35
|
+
const plan = await provider.generatePlan({
|
|
36
|
+
intent: run.intent,
|
|
37
|
+
repoRoot: projectRoot,
|
|
38
|
+
packs: run.packs
|
|
39
|
+
});
|
|
40
|
+
|
|
41
|
+
throwIfErrors(validatePlan(plan), 'plan');
|
|
42
|
+
|
|
43
|
+
writeJson(paths.planJson, plan);
|
|
44
|
+
writeText(paths.planMd, renderPlanMd(plan));
|
|
45
|
+
|
|
46
|
+
// Ensure run.json stays valid and points to artifacts already.
|
|
47
|
+
// We also set worktree branch early for discoverability.
|
|
48
|
+
if (!fs.existsSync(paths.runJson)) saveRun(projectRoot, run.runId, run);
|
|
49
|
+
|
|
50
|
+
return { plan, paths };
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
module.exports = {
|
|
54
|
+
generatePlan,
|
|
55
|
+
renderPlanMd
|
|
56
|
+
};
|
|
57
|
+
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
|
|
3
|
+
const ECC_DIRNAME = '.ecc';
|
|
4
|
+
|
|
5
|
+
function eccDir(projectRoot) {
|
|
6
|
+
return path.join(projectRoot, ECC_DIRNAME);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
function configPath(projectRoot) {
|
|
10
|
+
return path.join(eccDir(projectRoot), 'ecc.json');
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function locksDir(projectRoot) {
|
|
14
|
+
return path.join(eccDir(projectRoot), 'locks');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function runsDir(projectRoot) {
|
|
18
|
+
return path.join(eccDir(projectRoot), 'runs');
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
function gitignorePath(projectRoot) {
|
|
22
|
+
return path.join(eccDir(projectRoot), '.gitignore');
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
function registryLockPath(projectRoot) {
|
|
26
|
+
return path.join(locksDir(projectRoot), 'registry.lock.json');
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
module.exports = {
|
|
30
|
+
eccDir,
|
|
31
|
+
configPath,
|
|
32
|
+
locksDir,
|
|
33
|
+
runsDir,
|
|
34
|
+
gitignorePath,
|
|
35
|
+
registryLockPath
|
|
36
|
+
};
|
|
37
|
+
|
|
@@ -0,0 +1,168 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const os = require('os');
|
|
3
|
+
const path = require('path');
|
|
4
|
+
const { spawnSync } = require('child_process');
|
|
5
|
+
|
|
6
|
+
const { promptsDir, schemasDir } = require('../paths');
|
|
7
|
+
const { extractJsonStringFieldToFileSync } = require('../json-extract');
|
|
8
|
+
|
|
9
|
+
function readText(p) {
|
|
10
|
+
return fs.readFileSync(p, 'utf8');
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function nowIso() {
|
|
14
|
+
return new Date().toISOString();
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
function runCodexLastMessage({ repoRoot, prompt, schemaPath }) {
|
|
18
|
+
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ecc-codex-'));
|
|
19
|
+
const outPath = path.join(tmpDir, 'last-message.json');
|
|
20
|
+
|
|
21
|
+
const args = [
|
|
22
|
+
'exec',
|
|
23
|
+
'--sandbox',
|
|
24
|
+
'read-only',
|
|
25
|
+
'--skip-git-repo-check',
|
|
26
|
+
'--cd',
|
|
27
|
+
repoRoot,
|
|
28
|
+
'--output-schema',
|
|
29
|
+
schemaPath,
|
|
30
|
+
'--output-last-message',
|
|
31
|
+
outPath,
|
|
32
|
+
'-'
|
|
33
|
+
];
|
|
34
|
+
|
|
35
|
+
const res = spawnSync('codex', args, {
|
|
36
|
+
cwd: repoRoot,
|
|
37
|
+
input: prompt,
|
|
38
|
+
encoding: 'utf8',
|
|
39
|
+
stdio: ['pipe', 'pipe', 'pipe']
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
const stdout = (res.stdout || '').trimEnd();
|
|
43
|
+
const stderr = (res.stderr || '').trimEnd();
|
|
44
|
+
|
|
45
|
+
if (res.status !== 0) {
|
|
46
|
+
const msg = [
|
|
47
|
+
`codex exec failed (exit ${res.status})`,
|
|
48
|
+
stdout ? `stdout:\n${stdout}` : null,
|
|
49
|
+
stderr ? `stderr:\n${stderr}` : null
|
|
50
|
+
]
|
|
51
|
+
.filter(Boolean)
|
|
52
|
+
.join('\n\n');
|
|
53
|
+
throw new Error(msg);
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
if (!fs.existsSync(outPath)) {
|
|
57
|
+
throw new Error('codex exec did not write --output-last-message file');
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
return { tmpDir, outPath };
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function planTemplate() {
|
|
64
|
+
return readText(path.join(promptsDir(), 'plan.md'));
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
function patchTemplate() {
|
|
68
|
+
return readText(path.join(promptsDir(), 'patch.md'));
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
function planSchemaPath() {
|
|
72
|
+
return path.join(schemasDir(), 'ecc.plan.schema.json');
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
function patchSchemaPath() {
|
|
76
|
+
return path.join(schemasDir(), 'ecc.patch.schema.json');
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
function buildPlanPrompt({ intent, repoRoot, packs }) {
|
|
80
|
+
return [
|
|
81
|
+
planTemplate(),
|
|
82
|
+
'',
|
|
83
|
+
'## Caller Input',
|
|
84
|
+
`generatedAt: ${nowIso()}`,
|
|
85
|
+
`projectRoot: ${repoRoot}`,
|
|
86
|
+
`packs: ${Array.isArray(packs) ? packs.join(', ') : ''}`,
|
|
87
|
+
`intent: ${String(intent || '').trim()}`,
|
|
88
|
+
'',
|
|
89
|
+
'Return JSON only.'
|
|
90
|
+
].join('\n');
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
function buildPatchPrompt({ task, repoRoot, packs }) {
|
|
94
|
+
const taskSummary = {
|
|
95
|
+
id: task.id,
|
|
96
|
+
title: task.title,
|
|
97
|
+
prompt: task.prompt
|
|
98
|
+
};
|
|
99
|
+
return [
|
|
100
|
+
patchTemplate(),
|
|
101
|
+
'',
|
|
102
|
+
'## Caller Input',
|
|
103
|
+
`generatedAt: ${nowIso()}`,
|
|
104
|
+
`projectRoot: ${repoRoot}`,
|
|
105
|
+
`packs: ${Array.isArray(packs) ? packs.join(', ') : ''}`,
|
|
106
|
+
`task: ${JSON.stringify(taskSummary, null, 2)}`,
|
|
107
|
+
`allowedPathPrefixes: ${Array.isArray(task.allowedPathPrefixes) ? task.allowedPathPrefixes.join(', ') : ''}`,
|
|
108
|
+
'',
|
|
109
|
+
'Patch rules:',
|
|
110
|
+
'- If patch is non-empty, it should be a raw unified diff starting with "diff --git".',
|
|
111
|
+
'- Do not wrap diffs in code fences.',
|
|
112
|
+
'',
|
|
113
|
+
'Return JSON only.'
|
|
114
|
+
].join('\n');
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
async function generatePlan({ intent, repoRoot, packs }) {
|
|
118
|
+
const prompt = buildPlanPrompt({ intent, repoRoot, packs });
|
|
119
|
+
const { tmpDir, outPath } = runCodexLastMessage({ repoRoot, prompt, schemaPath: planSchemaPath() });
|
|
120
|
+
let raw = '';
|
|
121
|
+
try {
|
|
122
|
+
raw = fs.readFileSync(outPath, 'utf8').trim();
|
|
123
|
+
return JSON.parse(raw);
|
|
124
|
+
} catch (err) {
|
|
125
|
+
const detail = err && err.message ? err.message : String(err);
|
|
126
|
+
throw new Error(`codex output is not valid JSON (${detail}). Raw:\n${raw.slice(0, 2000)}`);
|
|
127
|
+
} finally {
|
|
128
|
+
try {
|
|
129
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
130
|
+
} catch (_err) {
|
|
131
|
+
// ignore cleanup failures
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
async function generatePatch({ task, repoRoot, packs, patchPath }) {
|
|
137
|
+
const prompt = buildPatchPrompt({ task, repoRoot, packs });
|
|
138
|
+
const { tmpDir, outPath } = runCodexLastMessage({ repoRoot, prompt, schemaPath: patchSchemaPath() });
|
|
139
|
+
try {
|
|
140
|
+
if (!patchPath) {
|
|
141
|
+
const raw = fs.readFileSync(outPath, 'utf8').trim();
|
|
142
|
+
return JSON.parse(raw);
|
|
143
|
+
}
|
|
144
|
+
|
|
145
|
+
extractJsonStringFieldToFileSync({
|
|
146
|
+
jsonPath: outPath,
|
|
147
|
+
fieldName: 'patch',
|
|
148
|
+
outPath: patchPath
|
|
149
|
+
});
|
|
150
|
+
|
|
151
|
+
return {
|
|
152
|
+
patchPath,
|
|
153
|
+
meta: { provider: 'codex', generatedAt: nowIso() }
|
|
154
|
+
};
|
|
155
|
+
} finally {
|
|
156
|
+
try {
|
|
157
|
+
fs.rmSync(tmpDir, { recursive: true, force: true });
|
|
158
|
+
} catch (_err) {
|
|
159
|
+
// ignore cleanup failures
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
module.exports = {
|
|
165
|
+
name: 'codex',
|
|
166
|
+
generatePlan,
|
|
167
|
+
generatePatch
|
|
168
|
+
};
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
const codex = require('./codex');
|
|
2
|
+
const mock = require('./mock');
|
|
3
|
+
|
|
4
|
+
function getProvider({ backend }) {
|
|
5
|
+
const env = String(process.env.ECC_PROVIDER || '').trim().toLowerCase();
|
|
6
|
+
if (env) {
|
|
7
|
+
if (env === 'mock') return mock;
|
|
8
|
+
if (env === 'codex') return codex;
|
|
9
|
+
throw new Error(`Unknown ECC_PROVIDER: ${env}`);
|
|
10
|
+
}
|
|
11
|
+
|
|
12
|
+
if (backend === 'codex') return codex;
|
|
13
|
+
if (backend === 'claude') {
|
|
14
|
+
throw new Error('ECC P0: backend "claude" is not implemented yet (use backend "codex")');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
throw new Error(`Unknown backend: ${backend}`);
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
module.exports = {
|
|
21
|
+
getProvider
|
|
22
|
+
};
|
|
23
|
+
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
const path = require('path');
|
|
3
|
+
|
|
4
|
+
function fixtureRoot() {
|
|
5
|
+
const name = String(process.env.ECC_FIXTURE || 'basic').trim() || 'basic';
|
|
6
|
+
return path.resolve(__dirname, '..', 'fixtures', name);
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
function readJson(p) {
|
|
10
|
+
return JSON.parse(fs.readFileSync(p, 'utf8'));
|
|
11
|
+
}
|
|
12
|
+
|
|
13
|
+
function readText(p) {
|
|
14
|
+
return fs.readFileSync(p, 'utf8');
|
|
15
|
+
}
|
|
16
|
+
|
|
17
|
+
async function generatePlan({ intent }) {
|
|
18
|
+
const root = fixtureRoot();
|
|
19
|
+
const planPath = path.join(root, 'plan.json');
|
|
20
|
+
if (!fs.existsSync(planPath)) {
|
|
21
|
+
throw new Error(`mock provider fixture missing plan.json: ${planPath}`);
|
|
22
|
+
}
|
|
23
|
+
const plan = readJson(planPath);
|
|
24
|
+
// Keep fixtures deterministic but align plan.intent with the caller intent.
|
|
25
|
+
plan.intent = String(intent || plan.intent || '').trim() || 'intent';
|
|
26
|
+
return plan;
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
async function generatePatch({ task }) {
|
|
30
|
+
const root = fixtureRoot();
|
|
31
|
+
const patchPath = path.join(root, 'patches', `${task.id}.diff`);
|
|
32
|
+
if (!fs.existsSync(patchPath)) {
|
|
33
|
+
throw new Error(`mock provider fixture missing patch: ${patchPath}`);
|
|
34
|
+
}
|
|
35
|
+
return {
|
|
36
|
+
patch: readText(patchPath),
|
|
37
|
+
meta: {
|
|
38
|
+
note: `fixture=${path.basename(root)}`,
|
|
39
|
+
reason: '',
|
|
40
|
+
provider: 'mock'
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
module.exports = {
|
|
46
|
+
name: 'mock',
|
|
47
|
+
generatePlan,
|
|
48
|
+
generatePatch
|
|
49
|
+
};
|
|
@@ -0,0 +1,127 @@
|
|
|
1
|
+
const fs = require('fs');
|
|
2
|
+
|
|
3
|
+
const { readJson, writeText } = require('./json');
|
|
4
|
+
const { runPaths } = require('./run');
|
|
5
|
+
|
|
6
|
+
function fmtList(items) {
|
|
7
|
+
if (!items || !items.length) return '(none)';
|
|
8
|
+
return items.map(s => `- ${s}`).join('\n');
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
function loadIfExists(p) {
|
|
12
|
+
try {
|
|
13
|
+
if (!fs.existsSync(p)) return null;
|
|
14
|
+
return readJson(p);
|
|
15
|
+
} catch (_err) {
|
|
16
|
+
return null;
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
function buildReport({ run, plan, applyResult, verifySummary }) {
|
|
21
|
+
const lines = [];
|
|
22
|
+
|
|
23
|
+
lines.push(`# ECC Run Report`);
|
|
24
|
+
lines.push('');
|
|
25
|
+
lines.push(`- runId: \`${run.runId}\``);
|
|
26
|
+
lines.push(`- status: \`${run.status}\``);
|
|
27
|
+
lines.push(`- intent: ${run.intent}`);
|
|
28
|
+
lines.push(`- backend: \`${run.backend}\``);
|
|
29
|
+
lines.push(`- packs: ${run.packs.join(', ')}`);
|
|
30
|
+
lines.push(`- startedAt: ${run.startedAt}`);
|
|
31
|
+
if (run.endedAt) lines.push(`- endedAt: ${run.endedAt}`);
|
|
32
|
+
lines.push('');
|
|
33
|
+
|
|
34
|
+
lines.push('## Base');
|
|
35
|
+
lines.push('');
|
|
36
|
+
lines.push(`- repoRoot: \`${run.base.repoRoot}\``);
|
|
37
|
+
lines.push(`- branch: \`${run.base.branch}\``);
|
|
38
|
+
lines.push(`- sha: \`${run.base.sha}\``);
|
|
39
|
+
lines.push('');
|
|
40
|
+
|
|
41
|
+
lines.push('## Worktree');
|
|
42
|
+
lines.push('');
|
|
43
|
+
lines.push(`- path: \`${run.worktree.path || '(not created)'}\``);
|
|
44
|
+
lines.push(`- branch: \`${run.worktree.branch}\``);
|
|
45
|
+
lines.push('');
|
|
46
|
+
|
|
47
|
+
lines.push('## Plan');
|
|
48
|
+
lines.push('');
|
|
49
|
+
if (!plan) {
|
|
50
|
+
lines.push('(missing plan.json)');
|
|
51
|
+
} else {
|
|
52
|
+
lines.push(`Intent: ${plan.intent}`);
|
|
53
|
+
lines.push('');
|
|
54
|
+
lines.push('Tasks:');
|
|
55
|
+
lines.push('');
|
|
56
|
+
for (const t of plan.tasks) {
|
|
57
|
+
lines.push(`- \`${t.id}\`: ${t.title}`);
|
|
58
|
+
lines.push(` - allowedPathPrefixes:\n${fmtList(t.allowedPathPrefixes).split('\n').map(l => ' ' + l).join('\n')}`);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
lines.push('');
|
|
62
|
+
|
|
63
|
+
lines.push('## Apply');
|
|
64
|
+
lines.push('');
|
|
65
|
+
if (!applyResult) {
|
|
66
|
+
lines.push('(missing apply/applied.json)');
|
|
67
|
+
} else {
|
|
68
|
+
lines.push(`- appliedAt: ${applyResult.appliedAt}`);
|
|
69
|
+
lines.push(`- baseSha: \`${applyResult.baseSha}\``);
|
|
70
|
+
lines.push('');
|
|
71
|
+
lines.push('Tasks:');
|
|
72
|
+
lines.push('');
|
|
73
|
+
for (const t of applyResult.tasks) {
|
|
74
|
+
lines.push(`- \`${t.id}\`: ${t.ok ? 'OK' : 'FAILED'}`);
|
|
75
|
+
lines.push(` - patch: \`${t.patchPath}\``);
|
|
76
|
+
if (t.error) lines.push(` - error: ${t.error}`);
|
|
77
|
+
}
|
|
78
|
+
if (applyResult.commit) {
|
|
79
|
+
lines.push('');
|
|
80
|
+
lines.push(`Commit: \`${applyResult.commit.sha}\``);
|
|
81
|
+
}
|
|
82
|
+
}
|
|
83
|
+
lines.push('');
|
|
84
|
+
|
|
85
|
+
lines.push('## Verify');
|
|
86
|
+
lines.push('');
|
|
87
|
+
if (!verifySummary) {
|
|
88
|
+
lines.push('(missing verify/summary.json)');
|
|
89
|
+
} else {
|
|
90
|
+
lines.push(`- ok: ${verifySummary.ok ? 'true' : 'false'}`);
|
|
91
|
+
lines.push(`- ranAt: ${verifySummary.ranAt}`);
|
|
92
|
+
lines.push('');
|
|
93
|
+
lines.push('Commands:');
|
|
94
|
+
lines.push('');
|
|
95
|
+
for (const c of verifySummary.commands) {
|
|
96
|
+
lines.push(`- \`${c.name}\`: ${c.ok ? 'OK' : 'FAILED'} (exit ${c.exitCode})`);
|
|
97
|
+
lines.push(` - command: \`${c.command}\``);
|
|
98
|
+
lines.push(` - output: \`${c.outputPath}\``);
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
lines.push('');
|
|
103
|
+
lines.push('## Next Steps');
|
|
104
|
+
lines.push('');
|
|
105
|
+
lines.push('- Inspect the worktree path above.');
|
|
106
|
+
lines.push('- If verification passed, you can commit/push the worktree branch or open a PR.');
|
|
107
|
+
|
|
108
|
+
return lines.join('\n');
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
function writeReport({ projectRoot, runId }) {
|
|
112
|
+
const paths = runPaths(projectRoot, runId);
|
|
113
|
+
const run = loadIfExists(paths.runJson);
|
|
114
|
+
if (!run) throw new Error(`missing run.json for runId=${runId}`);
|
|
115
|
+
|
|
116
|
+
const plan = loadIfExists(paths.planJson);
|
|
117
|
+
const applyResult = loadIfExists(paths.applyJson);
|
|
118
|
+
const verifySummary = loadIfExists(paths.verifySummaryJson);
|
|
119
|
+
|
|
120
|
+
writeText(paths.reportMd, buildReport({ run, plan, applyResult, verifySummary }) + '\n');
|
|
121
|
+
return paths.reportMd;
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
module.exports = {
|
|
125
|
+
writeReport,
|
|
126
|
+
buildReport
|
|
127
|
+
};
|