@evomap/evolver 1.29.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (52) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +290 -0
  3. package/README.zh-CN.md +236 -0
  4. package/SKILL.md +132 -0
  5. package/assets/gep/capsules.json +79 -0
  6. package/assets/gep/events.jsonl +7 -0
  7. package/assets/gep/genes.json +108 -0
  8. package/index.js +479 -0
  9. package/package.json +38 -0
  10. package/src/canary.js +13 -0
  11. package/src/evolve.js +1704 -0
  12. package/src/gep/a2a.js +173 -0
  13. package/src/gep/a2aProtocol.js +736 -0
  14. package/src/gep/analyzer.js +35 -0
  15. package/src/gep/assetCallLog.js +130 -0
  16. package/src/gep/assetStore.js +297 -0
  17. package/src/gep/assets.js +36 -0
  18. package/src/gep/bridge.js +71 -0
  19. package/src/gep/candidates.js +142 -0
  20. package/src/gep/contentHash.js +65 -0
  21. package/src/gep/deviceId.js +209 -0
  22. package/src/gep/envFingerprint.js +68 -0
  23. package/src/gep/hubReview.js +206 -0
  24. package/src/gep/hubSearch.js +237 -0
  25. package/src/gep/issueReporter.js +262 -0
  26. package/src/gep/llmReview.js +92 -0
  27. package/src/gep/memoryGraph.js +771 -0
  28. package/src/gep/memoryGraphAdapter.js +203 -0
  29. package/src/gep/mutation.js +186 -0
  30. package/src/gep/narrativeMemory.js +108 -0
  31. package/src/gep/paths.js +113 -0
  32. package/src/gep/personality.js +355 -0
  33. package/src/gep/prompt.js +566 -0
  34. package/src/gep/questionGenerator.js +212 -0
  35. package/src/gep/reflection.js +127 -0
  36. package/src/gep/sanitize.js +67 -0
  37. package/src/gep/selector.js +250 -0
  38. package/src/gep/signals.js +417 -0
  39. package/src/gep/skillDistiller.js +499 -0
  40. package/src/gep/solidify.js +1681 -0
  41. package/src/gep/strategy.js +126 -0
  42. package/src/gep/taskReceiver.js +528 -0
  43. package/src/gep/validationReport.js +55 -0
  44. package/src/ops/cleanup.js +80 -0
  45. package/src/ops/commentary.js +60 -0
  46. package/src/ops/health_check.js +106 -0
  47. package/src/ops/index.js +11 -0
  48. package/src/ops/innovation.js +67 -0
  49. package/src/ops/lifecycle.js +168 -0
  50. package/src/ops/self_repair.js +72 -0
  51. package/src/ops/skills_monitor.js +143 -0
  52. package/src/ops/trigger.js +33 -0
@@ -0,0 +1,1681 @@
1
+ const fs = require('fs');
2
+ const path = require('path');
3
+ const { execSync } = require('child_process');
4
+ const { loadGenes, upsertGene, appendEventJsonl, appendCapsule, upsertCapsule, getLastEventId, appendFailedCapsule } = require('./assetStore');
5
+ const { computeSignalKey, memoryGraphPath } = require('./memoryGraph');
6
+ const { computeCapsuleSuccessStreak, isBlastRadiusSafe } = require('./a2a');
7
+ const { getRepoRoot, getMemoryDir, getEvolutionDir, getWorkspaceRoot } = require('./paths');
8
+ const { extractSignals } = require('./signals');
9
+ const { selectGene } = require('./selector');
10
+ const { isValidMutation, normalizeMutation, isHighRiskMutationAllowed, isHighRiskPersonality } = require('./mutation');
11
+ const {
12
+ isValidPersonalityState,
13
+ normalizePersonalityState,
14
+ personalityKey,
15
+ updatePersonalityStats,
16
+ } = require('./personality');
17
+ const { computeAssetId, SCHEMA_VERSION } = require('./contentHash');
18
+ const { captureEnvFingerprint } = require('./envFingerprint');
19
+ const { buildValidationReport } = require('./validationReport');
20
+ const { logAssetCall } = require('./assetCallLog');
21
+ const { recordNarrative } = require('./narrativeMemory');
22
+ const { isLlmReviewEnabled, runLlmReview } = require('./llmReview');
23
+
24
+ function nowIso() {
25
+ return new Date().toISOString();
26
+ }
27
+
28
+ function clamp01(x) {
29
+ const n = Number(x);
30
+ if (!Number.isFinite(n)) return 0;
31
+ return Math.max(0, Math.min(1, n));
32
+ }
33
+
34
+ function safeJsonParse(text, fallback) {
35
+ try {
36
+ return JSON.parse(text);
37
+ } catch {
38
+ return fallback;
39
+ }
40
+ }
41
+
42
+ function readJsonIfExists(filePath, fallback) {
43
+ try {
44
+ if (!fs.existsSync(filePath)) return fallback;
45
+ const raw = fs.readFileSync(filePath, 'utf8');
46
+ if (!raw.trim()) return fallback;
47
+ return JSON.parse(raw);
48
+ } catch {
49
+ return fallback;
50
+ }
51
+ }
52
+
53
+ function stableHash(input) {
54
+ const s = String(input || '');
55
+ let h = 2166136261;
56
+ for (let i = 0; i < s.length; i++) {
57
+ h ^= s.charCodeAt(i);
58
+ h = Math.imul(h, 16777619);
59
+ }
60
+ return (h >>> 0).toString(16).padStart(8, '0');
61
+ }
62
+
63
+ function runCmd(cmd, opts = {}) {
64
+ const cwd = opts.cwd || getRepoRoot();
65
+ const timeoutMs = Number.isFinite(Number(opts.timeoutMs)) ? Number(opts.timeoutMs) : 120000;
66
+ return execSync(cmd, { cwd, encoding: 'utf8', stdio: ['ignore', 'pipe', 'pipe'], timeout: timeoutMs, windowsHide: true });
67
+ }
68
+
69
+ function tryRunCmd(cmd, opts = {}) {
70
+ try {
71
+ return { ok: true, out: runCmd(cmd, opts), err: '' };
72
+ } catch (e) {
73
+ const stderr = e && e.stderr ? String(e.stderr) : '';
74
+ const stdout = e && e.stdout ? String(e.stdout) : '';
75
+ const msg = e && e.message ? String(e.message) : 'command_failed';
76
+ return { ok: false, out: stdout, err: stderr || msg };
77
+ }
78
+ }
79
+
80
+ function gitListChangedFiles({ repoRoot }) {
81
+ const files = new Set();
82
+ const s1 = tryRunCmd('git diff --name-only', { cwd: repoRoot, timeoutMs: 60000 });
83
+ if (s1.ok) for (const line of String(s1.out).split('\n').map(l => l.trim()).filter(Boolean)) files.add(line);
84
+ const s2 = tryRunCmd('git diff --cached --name-only', { cwd: repoRoot, timeoutMs: 60000 });
85
+ if (s2.ok) for (const line of String(s2.out).split('\n').map(l => l.trim()).filter(Boolean)) files.add(line);
86
+ const s3 = tryRunCmd('git ls-files --others --exclude-standard', { cwd: repoRoot, timeoutMs: 60000 });
87
+ if (s3.ok) for (const line of String(s3.out).split('\n').map(l => l.trim()).filter(Boolean)) files.add(line);
88
+ return Array.from(files);
89
+ }
90
+
91
+ function countFileLines(absPath) {
92
+ try {
93
+ if (!fs.existsSync(absPath)) return 0;
94
+ const buf = fs.readFileSync(absPath);
95
+ if (!buf || buf.length === 0) return 0;
96
+ let n = 1;
97
+ for (let i = 0; i < buf.length; i++) if (buf[i] === 10) n++;
98
+ return n;
99
+ } catch {
100
+ return 0;
101
+ }
102
+ }
103
+
104
+ function normalizeRelPath(relPath) {
105
+ return String(relPath || '').replace(/\\/g, '/').replace(/^\.\/+/, '').trim();
106
+ }
107
+
108
+ function readOpenclawConstraintPolicy() {
109
+ const defaults = {
110
+ excludePrefixes: ['logs/', 'memory/', 'assets/gep/', 'out/', 'temp/', 'node_modules/'],
111
+ excludeExact: ['event.json', 'temp_gep_output.json', 'temp_evolution_output.json', 'evolution_error.log'],
112
+ excludeRegex: ['capsule', 'events?\\.jsonl$'],
113
+ includePrefixes: ['src/', 'scripts/', 'config/'],
114
+ includeExact: ['index.js', 'package.json'],
115
+ includeExtensions: ['.js', '.cjs', '.mjs', '.ts', '.tsx', '.json', '.yaml', '.yml', '.toml', '.ini', '.sh'],
116
+ };
117
+ try {
118
+ const root = path.resolve(getWorkspaceRoot(), '..');
119
+ const cfgPath = path.join(root, 'openclaw.json');
120
+ if (!fs.existsSync(cfgPath)) return defaults;
121
+ const obj = readJsonIfExists(cfgPath, {});
122
+ const pol =
123
+ obj &&
124
+ obj.evolver &&
125
+ obj.evolver.constraints &&
126
+ obj.evolver.constraints.countedFilePolicy &&
127
+ typeof obj.evolver.constraints.countedFilePolicy === 'object'
128
+ ? obj.evolver.constraints.countedFilePolicy
129
+ : {};
130
+ return {
131
+ excludePrefixes: Array.isArray(pol.excludePrefixes) ? pol.excludePrefixes.map(String) : defaults.excludePrefixes,
132
+ excludeExact: Array.isArray(pol.excludeExact) ? pol.excludeExact.map(String) : defaults.excludeExact,
133
+ excludeRegex: Array.isArray(pol.excludeRegex) ? pol.excludeRegex.map(String) : defaults.excludeRegex,
134
+ includePrefixes: Array.isArray(pol.includePrefixes) ? pol.includePrefixes.map(String) : defaults.includePrefixes,
135
+ includeExact: Array.isArray(pol.includeExact) ? pol.includeExact.map(String) : defaults.includeExact,
136
+ includeExtensions: Array.isArray(pol.includeExtensions) ? pol.includeExtensions.map(String) : defaults.includeExtensions,
137
+ };
138
+ } catch (_) {
139
+ return defaults;
140
+ }
141
+ }
142
+
143
+ function matchAnyPrefix(rel, prefixes) {
144
+ const list = Array.isArray(prefixes) ? prefixes : [];
145
+ for (const p of list) {
146
+ const n = normalizeRelPath(p).replace(/\/+$/, '');
147
+ if (!n) continue;
148
+ if (rel === n || rel.startsWith(n + '/')) return true;
149
+ }
150
+ return false;
151
+ }
152
+
153
+ function matchAnyExact(rel, exacts) {
154
+ const set = new Set((Array.isArray(exacts) ? exacts : []).map(x => normalizeRelPath(x)));
155
+ return set.has(rel);
156
+ }
157
+
158
+ function matchAnyRegex(rel, regexList) {
159
+ for (const raw of Array.isArray(regexList) ? regexList : []) {
160
+ try {
161
+ if (new RegExp(String(raw), 'i').test(rel)) return true;
162
+ } catch (_) {}
163
+ }
164
+ return false;
165
+ }
166
+
167
+ function isConstraintCountedPath(relPath, policy) {
168
+ const rel = normalizeRelPath(relPath);
169
+ if (!rel) return false;
170
+ if (matchAnyExact(rel, policy.excludeExact)) return false;
171
+ if (matchAnyPrefix(rel, policy.excludePrefixes)) return false;
172
+ if (matchAnyRegex(rel, policy.excludeRegex)) return false;
173
+ if (matchAnyExact(rel, policy.includeExact)) return true;
174
+ if (matchAnyPrefix(rel, policy.includePrefixes)) return true;
175
+ const lower = rel.toLowerCase();
176
+ for (const ext of Array.isArray(policy.includeExtensions) ? policy.includeExtensions : []) {
177
+ const e = String(ext || '').toLowerCase();
178
+ if (!e) continue;
179
+ if (lower.endsWith(e)) return true;
180
+ }
181
+ return false;
182
+ }
183
+
184
+ function parseNumstatRows(text) {
185
+ const rows = [];
186
+ const lines = String(text || '').split('\n').map(l => l.trim()).filter(Boolean);
187
+ for (const line of lines) {
188
+ const parts = line.split('\t');
189
+ if (parts.length < 3) continue;
190
+ const a = Number(parts[0]);
191
+ const d = Number(parts[1]);
192
+ let rel = normalizeRelPath(parts.slice(2).join('\t'));
193
+ if (rel.includes('=>')) {
194
+ const right = rel.split('=>').pop();
195
+ rel = normalizeRelPath(String(right || '').replace(/[{}]/g, '').trim());
196
+ }
197
+ rows.push({
198
+ file: rel,
199
+ added: Number.isFinite(a) ? a : 0,
200
+ deleted: Number.isFinite(d) ? d : 0,
201
+ });
202
+ }
203
+ return rows;
204
+ }
205
+
206
+ function computeBlastRadius({ repoRoot, baselineUntracked }) {
207
+ const policy = readOpenclawConstraintPolicy();
208
+ let changedFiles = gitListChangedFiles({ repoRoot }).map(normalizeRelPath).filter(Boolean);
209
+ if (Array.isArray(baselineUntracked) && baselineUntracked.length > 0) {
210
+ const baselineSet = new Set(baselineUntracked.map(normalizeRelPath));
211
+ changedFiles = changedFiles.filter(f => !baselineSet.has(f));
212
+ }
213
+ const countedFiles = changedFiles.filter(f => isConstraintCountedPath(f, policy));
214
+ const ignoredFiles = changedFiles.filter(f => !isConstraintCountedPath(f, policy));
215
+ const filesCount = countedFiles.length;
216
+
217
+ const u = tryRunCmd('git diff --numstat', { cwd: repoRoot, timeoutMs: 60000 });
218
+ const c = tryRunCmd('git diff --cached --numstat', { cwd: repoRoot, timeoutMs: 60000 });
219
+ const unstagedRows = u.ok ? parseNumstatRows(u.out) : [];
220
+ const stagedRows = c.ok ? parseNumstatRows(c.out) : [];
221
+ let stagedUnstagedChurn = 0;
222
+ for (const row of [...unstagedRows, ...stagedRows]) {
223
+ if (!isConstraintCountedPath(row.file, policy)) continue;
224
+ stagedUnstagedChurn += row.added + row.deleted;
225
+ }
226
+
227
+ const untracked = tryRunCmd('git ls-files --others --exclude-standard', { cwd: repoRoot, timeoutMs: 60000 });
228
+ let untrackedLines = 0;
229
+ if (untracked.ok) {
230
+ const rels = String(untracked.out).split('\n').map(normalizeRelPath).filter(Boolean);
231
+ const baselineSet = new Set((Array.isArray(baselineUntracked) ? baselineUntracked : []).map(normalizeRelPath));
232
+ for (const rel of rels) {
233
+ if (baselineSet.has(rel)) continue;
234
+ if (!isConstraintCountedPath(rel, policy)) continue;
235
+ const abs = path.join(repoRoot, rel);
236
+ untrackedLines += countFileLines(abs);
237
+ }
238
+ }
239
+ const churn = stagedUnstagedChurn + untrackedLines;
240
+ return {
241
+ files: filesCount,
242
+ lines: churn,
243
+ changed_files: countedFiles,
244
+ ignored_files: ignoredFiles,
245
+ all_changed_files: changedFiles,
246
+ };
247
+ }
248
+
249
+ function isForbiddenPath(relPath, forbiddenPaths) {
250
+ const rel = String(relPath || '').replace(/\\/g, '/').replace(/^\.\/+/, '');
251
+ const list = Array.isArray(forbiddenPaths) ? forbiddenPaths : [];
252
+ for (const fp of list) {
253
+ const f = String(fp || '').replace(/\\/g, '/').replace(/^\.\/+/, '').replace(/\/+$/, '');
254
+ if (!f) continue;
255
+ if (rel === f) return true;
256
+ if (rel.startsWith(f + '/')) return true;
257
+ }
258
+ return false;
259
+ }
260
+
261
+ function checkConstraints({ gene, blast, blastRadiusEstimate, repoRoot }) {
262
+ const violations = [];
263
+ const warnings = [];
264
+ let blastSeverity = null;
265
+
266
+ if (!gene || gene.type !== 'Gene') return { ok: true, violations, warnings, blastSeverity };
267
+ const constraints = gene.constraints || {};
268
+ const DEFAULT_MAX_FILES = 20;
269
+ const maxFiles = Number(constraints.max_files) > 0 ? Number(constraints.max_files) : DEFAULT_MAX_FILES;
270
+
271
+ // --- Blast radius severity classification ---
272
+ blastSeverity = classifyBlastSeverity({ blast, maxFiles });
273
+
274
+ // Hard cap breach is always a violation, regardless of gene config.
275
+ if (blastSeverity.severity === 'hard_cap_breach') {
276
+ violations.push(blastSeverity.message);
277
+ console.error(`[Solidify] ${blastSeverity.message}`);
278
+ } else if (blastSeverity.severity === 'critical_overrun') {
279
+ violations.push(blastSeverity.message);
280
+ // Log directory breakdown for diagnostics.
281
+ const breakdown = analyzeBlastRadiusBreakdown(blast.all_changed_files || blast.changed_files || []);
282
+ console.error(`[Solidify] ${blastSeverity.message}`);
283
+ console.error(`[Solidify] Top contributing directories: ${breakdown.map(function (d) { return d.dir + ' (' + d.files + ')'; }).join(', ')}`);
284
+ } else if (blastSeverity.severity === 'exceeded') {
285
+ violations.push(`max_files exceeded: ${blast.files} > ${maxFiles}`);
286
+ } else if (blastSeverity.severity === 'approaching_limit') {
287
+ warnings.push(blastSeverity.message);
288
+ }
289
+
290
+ // --- Estimate vs actual drift detection ---
291
+ const estimateComparison = compareBlastEstimate(blastRadiusEstimate, blast);
292
+ if (estimateComparison && estimateComparison.drifted) {
293
+ warnings.push(estimateComparison.message);
294
+ console.log(`[Solidify] WARNING: ${estimateComparison.message}`);
295
+ }
296
+
297
+ // --- Forbidden paths ---
298
+ const forbidden = Array.isArray(constraints.forbidden_paths) ? constraints.forbidden_paths : [];
299
+ for (const f of blast.all_changed_files || blast.changed_files || []) {
300
+ if (isForbiddenPath(f, forbidden)) violations.push(`forbidden_path touched: ${f}`);
301
+ }
302
+
303
+ // --- Critical protection: block modifications to critical paths ---
304
+ // By default, evolution CANNOT modify evolver, wrapper, or other core skills.
305
+ // This prevents the "evolver modifies itself and introduces bugs" problem.
306
+ // To opt in to self-modification (NOT recommended for production):
307
+ // set EVOLVE_ALLOW_SELF_MODIFY=true in environment.
308
+ var allowSelfModify = String(process.env.EVOLVE_ALLOW_SELF_MODIFY || '').toLowerCase() === 'true';
309
+ for (const f of blast.all_changed_files || blast.changed_files || []) {
310
+ if (isCriticalProtectedPath(f)) {
311
+ var norm = normalizeRelPath(f);
312
+ if (allowSelfModify && norm.startsWith('skills/evolver/') && gene && gene.category === 'repair') {
313
+ // Self-modify opt-in: allow repair-only changes to evolver when explicitly enabled
314
+ warnings.push('self_modify_evolver_repair: ' + norm + ' (EVOLVE_ALLOW_SELF_MODIFY=true)');
315
+ } else {
316
+ violations.push('critical_path_modified: ' + norm);
317
+ }
318
+ }
319
+ }
320
+
321
+ // --- New skill directory completeness check ---
322
+ // Detect when an innovation cycle creates a skill directory with too few files.
323
+ // This catches the "empty directory" problem where AI creates skills/<name>/ but
324
+ // fails to write any code into it. A real skill needs at least index.js + SKILL.md.
325
+ if (repoRoot) {
326
+ var newSkillDirs = new Set();
327
+ var changedList = blast.all_changed_files || blast.changed_files || [];
328
+ for (var sci = 0; sci < changedList.length; sci++) {
329
+ var scNorm = normalizeRelPath(changedList[sci]);
330
+ var scMatch = scNorm.match(/^skills\/([^\/]+)\//);
331
+ if (scMatch && !isCriticalProtectedPath(scNorm)) {
332
+ newSkillDirs.add(scMatch[1]);
333
+ }
334
+ }
335
+ newSkillDirs.forEach(function (skillName) {
336
+ var skillDir = path.join(repoRoot, 'skills', skillName);
337
+ try {
338
+ var entries = fs.readdirSync(skillDir).filter(function (e) { return !e.startsWith('.'); });
339
+ if (entries.length < 2) {
340
+ warnings.push('incomplete_skill: skills/' + skillName + '/ has only ' + entries.length + ' file(s). New skills should have at least index.js + SKILL.md.');
341
+ }
342
+ } catch (e) { /* dir might not exist yet */ }
343
+ });
344
+ }
345
+
346
+ // --- Ethics Committee: constitutional principle enforcement ---
347
+ var ethicsText = '';
348
+ if (gene.strategy) {
349
+ ethicsText += (Array.isArray(gene.strategy) ? gene.strategy.join(' ') : String(gene.strategy)) + ' ';
350
+ }
351
+ if (gene.description) ethicsText += String(gene.description) + ' ';
352
+ if (gene.summary) ethicsText += String(gene.summary) + ' ';
353
+
354
+ if (ethicsText.length > 0) {
355
+ var ethicsBlockPatterns = [
356
+ { re: /(?:bypass|disable|circumvent|remove)\s+(?:safety|guardrail|security|ethic|constraint|protection)/i, rule: 'safety', msg: 'ethics: strategy attempts to bypass safety mechanisms' },
357
+ { re: /(?:keylogger|screen\s*capture|webcam\s*hijack|mic(?:rophone)?\s*record)/i, rule: 'human_welfare', msg: 'ethics: covert monitoring tool in strategy' },
358
+ { re: /(?:social\s+engineering|phishing)\s+(?:attack|template|script)/i, rule: 'human_welfare', msg: 'ethics: social engineering content in strategy' },
359
+ { re: /(?:exploit|hack)\s+(?:user|human|people|victim)/i, rule: 'human_welfare', msg: 'ethics: human exploitation in strategy' },
360
+ { re: /(?:hide|conceal|obfuscat)\w*\s+(?:action|behavior|intent|log)/i, rule: 'transparency', msg: 'ethics: strategy conceals actions from audit trail' },
361
+ ];
362
+ for (var ei = 0; ei < ethicsBlockPatterns.length; ei++) {
363
+ if (ethicsBlockPatterns[ei].re.test(ethicsText)) {
364
+ violations.push(ethicsBlockPatterns[ei].msg);
365
+ console.error('[Solidify] Ethics violation: ' + ethicsBlockPatterns[ei].msg);
366
+ }
367
+ }
368
+ }
369
+
370
+ return { ok: violations.length === 0, violations, warnings, blastSeverity };
371
+ }
372
+
373
+ function readStateForSolidify() {
374
+ const memoryDir = getMemoryDir();
375
+ const statePath = path.join(getEvolutionDir(), 'evolution_solidify_state.json');
376
+ return readJsonIfExists(statePath, { last_run: null });
377
+ }
378
+
379
+ function writeStateForSolidify(state) {
380
+ const memoryDir = getMemoryDir();
381
+ const statePath = path.join(getEvolutionDir(), 'evolution_solidify_state.json');
382
+ try {
383
+ if (!fs.existsSync(memoryDir)) fs.mkdirSync(memoryDir, { recursive: true });
384
+ } catch {}
385
+ const tmp = `${statePath}.tmp`;
386
+ fs.writeFileSync(tmp, JSON.stringify(state, null, 2) + '\n', 'utf8');
387
+ fs.renameSync(tmp, statePath);
388
+ }
389
+
390
+ function buildEventId(tsIso) {
391
+ const t = Date.parse(tsIso);
392
+ return `evt_${Number.isFinite(t) ? t : Date.now()}`;
393
+ }
394
+
395
+ function buildCapsuleId(tsIso) {
396
+ const t = Date.parse(tsIso);
397
+ return `capsule_${Number.isFinite(t) ? t : Date.now()}`;
398
+ }
399
+
400
+ // --- System-wide blast radius hard caps ---
401
+ // These are absolute maximums that NO gene can override.
402
+ // Even if a gene sets max_files: 1000, the hard cap prevails.
403
+ const BLAST_RADIUS_HARD_CAP_FILES = Number(process.env.EVOLVER_HARD_CAP_FILES) || 60;
404
+ const BLAST_RADIUS_HARD_CAP_LINES = Number(process.env.EVOLVER_HARD_CAP_LINES) || 20000;
405
+
406
+ // Severity thresholds (as ratios of gene max_files).
407
+ const BLAST_WARN_RATIO = 0.8; // >80% of limit: warning
408
+ const BLAST_CRITICAL_RATIO = 2.0; // >200% of limit: critical overrun
409
+
410
+ // Classify blast radius severity relative to a gene's max_files constraint.
411
+ // Returns: { severity, message }
412
+ // severity: 'within_limit' | 'approaching_limit' | 'exceeded' | 'critical_overrun' | 'hard_cap_breach'
413
+ function classifyBlastSeverity({ blast, maxFiles }) {
414
+ const files = Number(blast.files) || 0;
415
+ const lines = Number(blast.lines) || 0;
416
+
417
+ // Hard cap breach is always the highest severity -- system-level guard.
418
+ if (files > BLAST_RADIUS_HARD_CAP_FILES || lines > BLAST_RADIUS_HARD_CAP_LINES) {
419
+ return {
420
+ severity: 'hard_cap_breach',
421
+ message: `HARD CAP BREACH: ${files} files / ${lines} lines exceeds system limit (${BLAST_RADIUS_HARD_CAP_FILES} files / ${BLAST_RADIUS_HARD_CAP_LINES} lines)`,
422
+ };
423
+ }
424
+
425
+ if (!Number.isFinite(maxFiles) || maxFiles <= 0) {
426
+ return { severity: 'within_limit', message: 'no max_files constraint defined' };
427
+ }
428
+
429
+ if (files > maxFiles * BLAST_CRITICAL_RATIO) {
430
+ return {
431
+ severity: 'critical_overrun',
432
+ message: `CRITICAL OVERRUN: ${files} files > ${maxFiles * BLAST_CRITICAL_RATIO} (${BLAST_CRITICAL_RATIO}x limit of ${maxFiles}). Agent likely performed bulk/unintended operation.`,
433
+ };
434
+ }
435
+
436
+ if (files > maxFiles) {
437
+ return {
438
+ severity: 'exceeded',
439
+ message: `max_files exceeded: ${files} > ${maxFiles}`,
440
+ };
441
+ }
442
+
443
+ if (files > maxFiles * BLAST_WARN_RATIO) {
444
+ return {
445
+ severity: 'approaching_limit',
446
+ message: `approaching limit: ${files} / ${maxFiles} files (${Math.round((files / maxFiles) * 100)}%)`,
447
+ };
448
+ }
449
+
450
+ return { severity: 'within_limit', message: `${files} / ${maxFiles} files` };
451
+ }
452
+
453
+ // Analyze which directory prefixes contribute the most changed files.
454
+ // Returns top N directory groups sorted by count descending.
455
+ function analyzeBlastRadiusBreakdown(changedFiles, topN) {
456
+ const n = Number.isFinite(topN) && topN > 0 ? topN : 5;
457
+ const dirCount = {};
458
+ for (const f of Array.isArray(changedFiles) ? changedFiles : []) {
459
+ const rel = normalizeRelPath(f);
460
+ if (!rel) continue;
461
+ // Use first two path segments as the group key (e.g. "skills/feishu-post").
462
+ const parts = rel.split('/');
463
+ const key = parts.length >= 2 ? parts.slice(0, 2).join('/') : parts[0];
464
+ dirCount[key] = (dirCount[key] || 0) + 1;
465
+ }
466
+ return Object.entries(dirCount)
467
+ .sort(function (a, b) { return b[1] - a[1]; })
468
+ .slice(0, n)
469
+ .map(function (e) { return { dir: e[0], files: e[1] }; });
470
+ }
471
+
472
+ // Compare agent's pre-edit estimate against actual blast radius.
473
+ // Returns null if no estimate, or { estimateFiles, actualFiles, ratio, drifted }.
474
+ function compareBlastEstimate(estimate, actual) {
475
+ if (!estimate || typeof estimate !== 'object') return null;
476
+ const estFiles = Number(estimate.files);
477
+ const actFiles = Number(actual.files);
478
+ if (!Number.isFinite(estFiles) || estFiles <= 0) return null;
479
+ const ratio = actFiles / estFiles;
480
+ return {
481
+ estimateFiles: estFiles,
482
+ actualFiles: actFiles,
483
+ ratio: Math.round(ratio * 100) / 100,
484
+ drifted: ratio > 3 || ratio < 0.1,
485
+ message: ratio > 3
486
+ ? `Estimate drift: actual ${actFiles} files is ${ratio.toFixed(1)}x the estimated ${estFiles}. Agent did not plan accurately.`
487
+ : null,
488
+ };
489
+ }
490
+
491
+ // --- Critical skills / paths that evolver must NEVER delete or overwrite ---
492
+ // These are core dependencies; destroying them will crash the entire system.
493
+ const CRITICAL_PROTECTED_PREFIXES = [
494
+ 'skills/feishu-evolver-wrapper/',
495
+ 'skills/feishu-common/',
496
+ 'skills/feishu-post/',
497
+ 'skills/feishu-card/',
498
+ 'skills/feishu-doc/',
499
+ 'skills/skill-tools/',
500
+ 'skills/clawhub/',
501
+ 'skills/clawhub-batch-undelete/',
502
+ 'skills/git-sync/',
503
+ 'skills/evolver/',
504
+ ];
505
+
506
+ // Files at workspace root that must never be deleted by evolver.
507
+ const CRITICAL_PROTECTED_FILES = [
508
+ 'MEMORY.md',
509
+ 'SOUL.md',
510
+ 'IDENTITY.md',
511
+ 'AGENTS.md',
512
+ 'USER.md',
513
+ 'HEARTBEAT.md',
514
+ 'RECENT_EVENTS.md',
515
+ 'TOOLS.md',
516
+ 'TROUBLESHOOTING.md',
517
+ 'openclaw.json',
518
+ '.env',
519
+ 'package.json',
520
+ ];
521
+
522
+ function isCriticalProtectedPath(relPath) {
523
+ const rel = normalizeRelPath(relPath);
524
+ if (!rel) return false;
525
+ // Check protected prefixes (skill directories)
526
+ for (const prefix of CRITICAL_PROTECTED_PREFIXES) {
527
+ const p = prefix.replace(/\/+$/, '');
528
+ if (rel === p || rel.startsWith(p + '/')) return true;
529
+ }
530
+ // Check protected root files
531
+ for (const f of CRITICAL_PROTECTED_FILES) {
532
+ if (rel === f) return true;
533
+ }
534
+ return false;
535
+ }
536
+
537
+ function detectDestructiveChanges({ repoRoot, changedFiles, baselineUntracked }) {
538
+ const violations = [];
539
+ const baselineSet = new Set((Array.isArray(baselineUntracked) ? baselineUntracked : []).map(normalizeRelPath));
540
+
541
+ for (const rel of changedFiles) {
542
+ const norm = normalizeRelPath(rel);
543
+ if (!norm) continue;
544
+ if (!isCriticalProtectedPath(norm)) continue;
545
+
546
+ const abs = path.join(repoRoot, norm);
547
+ const normAbs = path.resolve(abs);
548
+ const normRepo = path.resolve(repoRoot);
549
+ if (!normAbs.startsWith(normRepo + path.sep) && normAbs !== normRepo) continue;
550
+
551
+ // If a critical file existed before but is now missing/empty, that is destructive.
552
+ if (!baselineSet.has(norm)) {
553
+ // It was tracked before, check if it still exists
554
+ if (!fs.existsSync(normAbs)) {
555
+ violations.push(`CRITICAL_FILE_DELETED: ${norm}`);
556
+ } else {
557
+ try {
558
+ const stat = fs.statSync(normAbs);
559
+ if (stat.isFile() && stat.size === 0) {
560
+ violations.push(`CRITICAL_FILE_EMPTIED: ${norm}`);
561
+ }
562
+ } catch (e) {}
563
+ }
564
+ }
565
+ }
566
+ return violations;
567
+ }
568
+
569
+ // --- Validation command safety ---
570
+ const VALIDATION_ALLOWED_PREFIXES = ['node ', 'npm ', 'npx '];
571
+
572
+ function isValidationCommandAllowed(cmd) {
573
+ const c = String(cmd || '').trim();
574
+ if (!c) return false;
575
+ if (!VALIDATION_ALLOWED_PREFIXES.some(p => c.startsWith(p))) return false;
576
+ if (/`|\$\(/.test(c)) return false;
577
+ const stripped = c.replace(/"[^"]*"/g, '').replace(/'[^']*'/g, '');
578
+ if (/[;&|><]/.test(stripped)) return false;
579
+ if (/^node\s+(-e|--eval|--print|-p)\b/.test(c)) return false;
580
+ return true;
581
+ }
582
+
583
+ function runValidations(gene, opts = {}) {
584
+ const repoRoot = opts.repoRoot || getRepoRoot();
585
+ const timeoutMs = Number.isFinite(Number(opts.timeoutMs)) ? Number(opts.timeoutMs) : 180000;
586
+ const validation = Array.isArray(gene && gene.validation) ? gene.validation : [];
587
+ const results = [];
588
+ const startedAt = Date.now();
589
+ for (const cmd of validation) {
590
+ const c = String(cmd || '').trim();
591
+ if (!c) continue;
592
+ if (!isValidationCommandAllowed(c)) {
593
+ results.push({ cmd: c, ok: false, out: '', err: 'BLOCKED: validation command rejected by safety check (allowed prefixes: node/npm/npx; shell operators prohibited)' });
594
+ return { ok: false, results, startedAt, finishedAt: Date.now() };
595
+ }
596
+ const r = tryRunCmd(c, { cwd: repoRoot, timeoutMs });
597
+ results.push({ cmd: c, ok: r.ok, out: String(r.out || ''), err: String(r.err || '') });
598
+ if (!r.ok) return { ok: false, results, startedAt, finishedAt: Date.now() };
599
+ }
600
+ return { ok: true, results, startedAt, finishedAt: Date.now() };
601
+ }
602
+
603
+ // --- Canary via Fork: verify index.js loads in an isolated child process ---
604
+ // This is the last safety net before solidify commits an evolution.
605
+ // If a patch broke index.js, the canary catches it BEFORE the daemon
606
+ // restarts with broken code. Runs with a short timeout to avoid blocking.
607
+ function runCanaryCheck(opts) {
608
+ const repoRoot = (opts && opts.repoRoot) ? opts.repoRoot : getRepoRoot();
609
+ const timeoutMs = (opts && Number.isFinite(Number(opts.timeoutMs))) ? Number(opts.timeoutMs) : 30000;
610
+ const canaryScript = path.join(repoRoot, 'src', 'canary.js');
611
+ if (!fs.existsSync(canaryScript)) {
612
+ return { ok: true, skipped: true, reason: 'canary.js not found' };
613
+ }
614
+ const r = tryRunCmd(`node "${canaryScript}"`, { cwd: repoRoot, timeoutMs });
615
+ return {
616
+ ok: r.ok,
617
+ skipped: false,
618
+ out: String(r.out || '').slice(0, 500),
619
+ err: String(r.err || '').slice(0, 500),
620
+ };
621
+ }
622
+
623
+ var DIFF_SNAPSHOT_MAX_CHARS = 8000;
624
+
625
+ function captureDiffSnapshot(repoRoot) {
626
+ var parts = [];
627
+ var unstaged = tryRunCmd('git diff', { cwd: repoRoot, timeoutMs: 30000 });
628
+ if (unstaged.ok && unstaged.out) parts.push(String(unstaged.out));
629
+ var staged = tryRunCmd('git diff --cached', { cwd: repoRoot, timeoutMs: 30000 });
630
+ if (staged.ok && staged.out) parts.push(String(staged.out));
631
+ var combined = parts.join('\n');
632
+ if (combined.length > DIFF_SNAPSHOT_MAX_CHARS) {
633
+ combined = combined.slice(0, DIFF_SNAPSHOT_MAX_CHARS) + '\n... [TRUNCATED]';
634
+ }
635
+ return combined || '';
636
+ }
637
+
638
+ function buildFailureReason(constraintCheck, validation, protocolViolations, canary) {
639
+ var reasons = [];
640
+ if (constraintCheck && Array.isArray(constraintCheck.violations)) {
641
+ for (var i = 0; i < constraintCheck.violations.length; i++) {
642
+ reasons.push('constraint: ' + constraintCheck.violations[i]);
643
+ }
644
+ }
645
+ if (Array.isArray(protocolViolations)) {
646
+ for (var j = 0; j < protocolViolations.length; j++) {
647
+ reasons.push('protocol: ' + protocolViolations[j]);
648
+ }
649
+ }
650
+ if (validation && Array.isArray(validation.results)) {
651
+ for (var k = 0; k < validation.results.length; k++) {
652
+ var r = validation.results[k];
653
+ if (r && !r.ok) {
654
+ reasons.push('validation_failed: ' + String(r.cmd || '').slice(0, 120) + ' => ' + String(r.err || '').slice(0, 200));
655
+ }
656
+ }
657
+ }
658
+ if (canary && !canary.ok && !canary.skipped) {
659
+ reasons.push('canary_failed: ' + String(canary.err || '').slice(0, 200));
660
+ }
661
+ return reasons.join('; ').slice(0, 2000) || 'unknown';
662
+ }
663
+
664
+ function rollbackTracked(repoRoot) {
665
+ const mode = String(process.env.EVOLVER_ROLLBACK_MODE || 'hard').toLowerCase();
666
+
667
+ if (mode === 'none') {
668
+ console.log('[Rollback] EVOLVER_ROLLBACK_MODE=none, skipping rollback');
669
+ return;
670
+ }
671
+
672
+ if (mode === 'stash') {
673
+ const stashRef = 'evolver-rollback-' + Date.now();
674
+ const result = tryRunCmd('git stash push -m "' + stashRef + '" --include-untracked', { cwd: repoRoot, timeoutMs: 60000 });
675
+ if (result.ok) {
676
+ console.log('[Rollback] Changes stashed with ref: ' + stashRef + '. Recover with "git stash list" and "git stash pop".');
677
+ } else {
678
+ console.log('[Rollback] Stash failed or no changes, using hard reset');
679
+ tryRunCmd('git restore --staged --worktree .', { cwd: repoRoot, timeoutMs: 60000 });
680
+ tryRunCmd('git reset --hard', { cwd: repoRoot, timeoutMs: 60000 });
681
+ }
682
+ return;
683
+ }
684
+
685
+ console.log('[Rollback] EVOLVER_ROLLBACK_MODE=hard, resetting tracked files in: ' + repoRoot);
686
+ tryRunCmd('git restore --staged --worktree .', { cwd: repoRoot, timeoutMs: 60000 });
687
+ tryRunCmd('git reset --hard', { cwd: repoRoot, timeoutMs: 60000 });
688
+ }
689
+
690
+ function gitListUntrackedFiles(repoRoot) {
691
+ const r = tryRunCmd('git ls-files --others --exclude-standard', { cwd: repoRoot, timeoutMs: 60000 });
692
+ if (!r.ok) return [];
693
+ return String(r.out).split('\n').map(l => l.trim()).filter(Boolean);
694
+ }
695
+
696
+ function rollbackNewUntrackedFiles({ repoRoot, baselineUntracked }) {
697
+ const baseline = new Set((Array.isArray(baselineUntracked) ? baselineUntracked : []).map(String));
698
+ const current = gitListUntrackedFiles(repoRoot);
699
+ const toDelete = current.filter(f => !baseline.has(String(f)));
700
+ const skipped = [];
701
+ const deleted = [];
702
+ for (const rel of toDelete) {
703
+ const safeRel = String(rel || '').replace(/\\/g, '/').replace(/^\.\/+/, '');
704
+ if (!safeRel) continue;
705
+ // CRITICAL: Never delete files inside protected skill directories during rollback.
706
+ if (isCriticalProtectedPath(safeRel)) {
707
+ skipped.push(safeRel);
708
+ continue;
709
+ }
710
+ const abs = path.join(repoRoot, safeRel);
711
+ const normRepo = path.resolve(repoRoot);
712
+ const normAbs = path.resolve(abs);
713
+ if (!normAbs.startsWith(normRepo + path.sep) && normAbs !== normRepo) continue;
714
+ try {
715
+ if (fs.existsSync(normAbs) && fs.statSync(normAbs).isFile()) {
716
+ fs.unlinkSync(normAbs);
717
+ deleted.push(safeRel);
718
+ }
719
+ } catch (e) {}
720
+ }
721
+ if (skipped.length > 0) {
722
+ console.log(`[Rollback] Skipped ${skipped.length} critical protected file(s): ${skipped.slice(0, 5).join(', ')}`);
723
+ }
724
+ // Clean up empty directories left after file deletion.
725
+ // This prevents "ghost skill directories" where mkdir succeeded but
726
+ // file creation failed/was rolled back. Without this, empty dirs like
727
+ // skills/anima/, skills/oblivion/ etc. accumulate after failed innovations.
728
+ // SAFETY: never remove top-level structural directories (skills/, src/, etc.)
729
+ // or critical protected directories. Only remove leaf subdirectories.
730
+ var dirsToCheck = new Set();
731
+ for (var di = 0; di < deleted.length; di++) {
732
+ var dir = path.dirname(deleted[di]);
733
+ while (dir && dir !== '.' && dir !== '/') {
734
+ var normalized = dir.replace(/\\/g, '/');
735
+ if (!normalized.includes('/')) break;
736
+ dirsToCheck.add(dir);
737
+ dir = path.dirname(dir);
738
+ }
739
+ }
740
+ // Sort deepest first to ensure children are removed before parents
741
+ var sortedDirs = Array.from(dirsToCheck).sort(function (a, b) { return b.length - a.length; });
742
+ var removedDirs = [];
743
+ for (var si = 0; si < sortedDirs.length; si++) {
744
+ if (isCriticalProtectedPath(sortedDirs[si] + '/')) continue;
745
+ var dirAbs = path.join(repoRoot, sortedDirs[si]);
746
+ try {
747
+ var entries = fs.readdirSync(dirAbs);
748
+ if (entries.length === 0) {
749
+ fs.rmdirSync(dirAbs);
750
+ removedDirs.push(sortedDirs[si]);
751
+ }
752
+ } catch (e) { /* ignore -- dir may already be gone */ }
753
+ }
754
+ if (removedDirs.length > 0) {
755
+ console.log('[Rollback] Removed ' + removedDirs.length + ' empty director' + (removedDirs.length === 1 ? 'y' : 'ies') + ': ' + removedDirs.slice(0, 5).join(', '));
756
+ }
757
+
758
+ return { deleted, skipped, removedDirs: removedDirs };
759
+ }
760
+
761
+ function inferCategoryFromSignals(signals) {
762
+ const list = Array.isArray(signals) ? signals.map(String) : [];
763
+ if (list.includes('log_error')) return 'repair';
764
+ if (list.includes('protocol_drift')) return 'optimize';
765
+ return 'optimize';
766
+ }
767
+
768
+ function buildSuccessReason({ gene, signals, blast, mutation, score }) {
769
+ const parts = [];
770
+
771
+ if (gene && gene.id) {
772
+ const category = gene.category || 'unknown';
773
+ parts.push(`Gene ${gene.id} (${category}) matched signals [${(signals || []).slice(0, 4).join(', ')}].`);
774
+ }
775
+
776
+ if (mutation && mutation.rationale) {
777
+ parts.push(`Rationale: ${String(mutation.rationale).slice(0, 200)}.`);
778
+ }
779
+
780
+ if (blast) {
781
+ parts.push(`Scope: ${blast.files} file(s), ${blast.lines} line(s) changed.`);
782
+ }
783
+
784
+ if (typeof score === 'number') {
785
+ parts.push(`Outcome score: ${score.toFixed(2)}.`);
786
+ }
787
+
788
+ if (gene && Array.isArray(gene.strategy) && gene.strategy.length > 0) {
789
+ parts.push(`Strategy applied: ${gene.strategy.slice(0, 3).join('; ').slice(0, 300)}.`);
790
+ }
791
+
792
+ return parts.join(' ').slice(0, 1000) || 'Evolution succeeded.';
793
+ }
794
+
795
+ var CAPSULE_CONTENT_MAX_CHARS = 8000;
796
+
797
+ function buildCapsuleContent({ intent, gene, signals, blast, mutation, score }) {
798
+ var parts = [];
799
+
800
+ if (intent) {
801
+ parts.push('Intent: ' + String(intent).slice(0, 500));
802
+ }
803
+
804
+ if (gene && gene.id) {
805
+ parts.push('Gene: ' + gene.id + ' (' + (gene.category || 'unknown') + ')');
806
+ }
807
+
808
+ if (signals && signals.length > 0) {
809
+ parts.push('Signals: ' + signals.slice(0, 8).join(', '));
810
+ }
811
+
812
+ if (gene && Array.isArray(gene.strategy) && gene.strategy.length > 0) {
813
+ parts.push('Strategy:\n' + gene.strategy.map(function (s, i) { return (i + 1) + '. ' + s; }).join('\n'));
814
+ }
815
+
816
+ if (blast) {
817
+ var fileList = blast.changed_files || blast.all_changed_files || [];
818
+ parts.push('Scope: ' + blast.files + ' file(s), ' + blast.lines + ' line(s)');
819
+ if (fileList.length > 0) {
820
+ parts.push('Changed files:\n' + fileList.slice(0, 20).join('\n'));
821
+ }
822
+ }
823
+
824
+ if (mutation && mutation.rationale) {
825
+ parts.push('Rationale: ' + String(mutation.rationale).slice(0, 500));
826
+ }
827
+
828
+ if (typeof score === 'number') {
829
+ parts.push('Outcome score: ' + score.toFixed(2));
830
+ }
831
+
832
+ var result = parts.join('\n\n');
833
+ if (result.length > CAPSULE_CONTENT_MAX_CHARS) {
834
+ result = result.slice(0, CAPSULE_CONTENT_MAX_CHARS) + '\n... [TRUNCATED]';
835
+ }
836
+ return result || 'Evolution completed successfully.';
837
+ }
838
+
839
+ // ---------------------------------------------------------------------------
840
+ // Epigenetic Marks -- environmental imprints on Gene expression
841
+ // ---------------------------------------------------------------------------
842
+ // Epigenetic marks record environmental conditions under which a Gene performs
843
+ // well or poorly. Unlike mutations (which change the Gene itself), epigenetic
844
+ // marks modify expression strength without altering the underlying strategy.
845
+ // Marks propagate when Genes are reused (horizontal gene transfer) and decay
846
+ // over time (like biological DNA methylation patterns fading across generations).
847
+
848
+ function buildEpigeneticMark(context, boost, reason) {
849
+ return {
850
+ context: String(context || '').slice(0, 100),
851
+ boost: Math.max(-0.5, Math.min(0.5, Number(boost) || 0)),
852
+ reason: String(reason || '').slice(0, 200),
853
+ created_at: new Date().toISOString(),
854
+ };
855
+ }
856
+
857
+ function applyEpigeneticMarks(gene, envFingerprint, outcomeStatus) {
858
+ if (!gene || gene.type !== 'Gene') return gene;
859
+
860
+ // Initialize epigenetic_marks array if not present
861
+ if (!Array.isArray(gene.epigenetic_marks)) {
862
+ gene.epigenetic_marks = [];
863
+ }
864
+
865
+ const platform = envFingerprint && envFingerprint.platform ? String(envFingerprint.platform) : '';
866
+ const arch = envFingerprint && envFingerprint.arch ? String(envFingerprint.arch) : '';
867
+ const nodeVersion = envFingerprint && envFingerprint.node_version ? String(envFingerprint.node_version) : '';
868
+ const envContext = [platform, arch, nodeVersion].filter(Boolean).join('/') || 'unknown';
869
+
870
+ // Check if a mark for this context already exists
871
+ const existingIdx = gene.epigenetic_marks.findIndex(
872
+ (m) => m && m.context === envContext
873
+ );
874
+
875
+ if (outcomeStatus === 'success') {
876
+ if (existingIdx >= 0) {
877
+ // Reinforce: increase boost (max 0.5)
878
+ const cur = gene.epigenetic_marks[existingIdx];
879
+ cur.boost = Math.min(0.5, (Number(cur.boost) || 0) + 0.05);
880
+ cur.reason = 'reinforced_by_success';
881
+ cur.created_at = new Date().toISOString();
882
+ } else {
883
+ // New positive mark
884
+ gene.epigenetic_marks.push(
885
+ buildEpigeneticMark(envContext, 0.1, 'success_in_environment')
886
+ );
887
+ }
888
+ } else if (outcomeStatus === 'failed') {
889
+ if (existingIdx >= 0) {
890
+ // Suppress: decrease boost
891
+ const cur = gene.epigenetic_marks[existingIdx];
892
+ cur.boost = Math.max(-0.5, (Number(cur.boost) || 0) - 0.1);
893
+ cur.reason = 'suppressed_by_failure';
894
+ cur.created_at = new Date().toISOString();
895
+ } else {
896
+ // New negative mark
897
+ gene.epigenetic_marks.push(
898
+ buildEpigeneticMark(envContext, -0.1, 'failure_in_environment')
899
+ );
900
+ }
901
+ }
902
+
903
+ // Decay old marks (keep max 10, remove marks older than 90 days)
904
+ const cutoff = Date.now() - 90 * 24 * 60 * 60 * 1000;
905
+ gene.epigenetic_marks = gene.epigenetic_marks
906
+ .filter((m) => m && new Date(m.created_at).getTime() > cutoff)
907
+ .slice(-10);
908
+
909
+ return gene;
910
+ }
911
+
912
+ function getEpigeneticBoost(gene, envFingerprint) {
913
+ if (!gene || !Array.isArray(gene.epigenetic_marks)) return 0;
914
+ const platform = envFingerprint && envFingerprint.platform ? String(envFingerprint.platform) : '';
915
+ const arch = envFingerprint && envFingerprint.arch ? String(envFingerprint.arch) : '';
916
+ const nodeVersion = envFingerprint && envFingerprint.node_version ? String(envFingerprint.node_version) : '';
917
+ const envContext = [platform, arch, nodeVersion].filter(Boolean).join('/') || 'unknown';
918
+
919
+ const mark = gene.epigenetic_marks.find((m) => m && m.context === envContext);
920
+ return mark ? Number(mark.boost) || 0 : 0;
921
+ }
922
+
923
+ function buildAutoGene({ signals, intent }) {
924
+ const sigs = Array.isArray(signals) ? Array.from(new Set(signals.map(String))).filter(Boolean) : [];
925
+ const signalKey = computeSignalKey(sigs);
926
+ const id = `gene_auto_${stableHash(signalKey)}`;
927
+ const category = intent && ['repair', 'optimize', 'innovate'].includes(String(intent))
928
+ ? String(intent)
929
+ : inferCategoryFromSignals(sigs);
930
+ const signalsMatch = sigs.length ? sigs.slice(0, 8) : ['(none)'];
931
+ const gene = {
932
+ type: 'Gene',
933
+ schema_version: SCHEMA_VERSION,
934
+ id,
935
+ category,
936
+ signals_match: signalsMatch,
937
+ preconditions: [`signals_key == ${signalKey}`],
938
+ strategy: [
939
+ 'Extract structured signals from logs and user instructions',
940
+ 'Select an existing Gene by signals match (no improvisation)',
941
+ 'Estimate blast radius (files, lines) before editing and record it',
942
+ 'Apply smallest reversible patch',
943
+ 'Validate using declared validation steps; rollback on failure',
944
+ 'Solidify knowledge: append EvolutionEvent, update Gene/Capsule store',
945
+ ],
946
+ constraints: {
947
+ max_files: 12,
948
+ forbidden_paths: [
949
+ '.git', 'node_modules',
950
+ 'skills/feishu-evolver-wrapper', 'skills/feishu-common',
951
+ 'skills/feishu-post', 'skills/feishu-card', 'skills/feishu-doc',
952
+ 'skills/skill-tools', 'skills/clawhub', 'skills/clawhub-batch-undelete',
953
+ 'skills/git-sync',
954
+ ],
955
+ },
956
+ validation: ['node scripts/validate-modules.js ./src/gep/solidify'],
957
+ epigenetic_marks: [], // Epigenetic marks: environment-specific expression modifiers
958
+ };
959
+ gene.asset_id = computeAssetId(gene);
960
+ return gene;
961
+ }
962
+
963
+ function ensureGene({ genes, selectedGene, signals, intent, dryRun }) {
964
+ if (selectedGene && selectedGene.type === 'Gene') return { gene: selectedGene, created: false, reason: 'selected_gene_id_present' };
965
+ const res = selectGene(Array.isArray(genes) ? genes : [], Array.isArray(signals) ? signals : [], {
966
+ bannedGeneIds: new Set(), preferredGeneId: null, driftEnabled: false,
967
+ });
968
+ if (res && res.selected) return { gene: res.selected, created: false, reason: 'reselected_from_existing' };
969
+ const auto = buildAutoGene({ signals, intent });
970
+ if (!dryRun) upsertGene(auto);
971
+ return { gene: auto, created: true, reason: 'no_match_create_new' };
972
+ }
973
+
974
+ function readRecentSessionInputs() {
975
+ const repoRoot = getRepoRoot();
976
+ const memoryDir = getMemoryDir();
977
+ const rootMemory = path.join(repoRoot, 'MEMORY.md');
978
+ const dirMemory = path.join(memoryDir, 'MEMORY.md');
979
+ const memoryFile = fs.existsSync(rootMemory) ? rootMemory : dirMemory;
980
+ const userFile = path.join(repoRoot, 'USER.md');
981
+ const todayLog = path.join(memoryDir, new Date().toISOString().split('T')[0] + '.md');
982
+ const todayLogContent = fs.existsSync(todayLog) ? fs.readFileSync(todayLog, 'utf8') : '';
983
+ const memorySnippet = fs.existsSync(memoryFile) ? fs.readFileSync(memoryFile, 'utf8').slice(0, 50000) : '';
984
+ const userSnippet = fs.existsSync(userFile) ? fs.readFileSync(userFile, 'utf8') : '';
985
+ const recentSessionTranscript = '';
986
+ return { recentSessionTranscript, todayLog: todayLogContent, memorySnippet, userSnippet };
987
+ }
988
+
989
+ function isGitRepo(dir) {
990
+ try {
991
+ execSync('git rev-parse --git-dir', {
992
+ cwd: dir, encoding: 'utf8',
993
+ stdio: ['ignore', 'pipe', 'pipe'], timeout: 5000,
994
+ });
995
+ return true;
996
+ } catch (_) {
997
+ return false;
998
+ }
999
+ }
1000
+
1001
+ function solidify({ intent, summary, dryRun = false, rollbackOnFailure = true } = {}) {
1002
+ const repoRoot = getRepoRoot();
1003
+
1004
+ if (!isGitRepo(repoRoot)) {
1005
+ console.error('[Solidify] FATAL: Not a git repository (' + repoRoot + ').');
1006
+ console.error('[Solidify] Solidify requires git for rollback, diff capture, and blast radius.');
1007
+ console.error('[Solidify] Run "git init && git add -A && git commit -m init" first.');
1008
+ return {
1009
+ ok: false,
1010
+ status: 'failed',
1011
+ failure_reason: 'not_a_git_repository',
1012
+ event: null,
1013
+ };
1014
+ }
1015
+ const state = readStateForSolidify();
1016
+ const lastRun = state && state.last_run ? state.last_run : null;
1017
+ const genes = loadGenes();
1018
+ const geneId = lastRun && lastRun.selected_gene_id ? String(lastRun.selected_gene_id) : null;
1019
+ const selectedGene = geneId ? genes.find(g => g && g.type === 'Gene' && g.id === geneId) : null;
1020
+ const parentEventId =
1021
+ lastRun && typeof lastRun.parent_event_id === 'string' ? lastRun.parent_event_id : getLastEventId();
1022
+ const signals =
1023
+ lastRun && Array.isArray(lastRun.signals) && lastRun.signals.length
1024
+ ? Array.from(new Set(lastRun.signals.map(String)))
1025
+ : extractSignals(readRecentSessionInputs());
1026
+ const signalKey = computeSignalKey(signals);
1027
+
1028
+ const mutationRaw = lastRun && lastRun.mutation && typeof lastRun.mutation === 'object' ? lastRun.mutation : null;
1029
+ const personalityRaw =
1030
+ lastRun && lastRun.personality_state && typeof lastRun.personality_state === 'object' ? lastRun.personality_state : null;
1031
+ const mutation = mutationRaw && isValidMutation(mutationRaw) ? normalizeMutation(mutationRaw) : null;
1032
+ const personalityState =
1033
+ personalityRaw && isValidPersonalityState(personalityRaw) ? normalizePersonalityState(personalityRaw) : null;
1034
+ const personalityKeyUsed = personalityState ? personalityKey(personalityState) : null;
1035
+ const protocolViolations = [];
1036
+ if (!mutation) protocolViolations.push('missing_or_invalid_mutation');
1037
+ if (!personalityState) protocolViolations.push('missing_or_invalid_personality_state');
1038
+ if (mutation && mutation.risk_level === 'high' && !isHighRiskMutationAllowed(personalityState || null)) {
1039
+ protocolViolations.push('high_risk_mutation_not_allowed_by_personality');
1040
+ }
1041
+ if (mutation && mutation.risk_level === 'high' && !(lastRun && lastRun.personality_known)) {
1042
+ protocolViolations.push('high_risk_mutation_forbidden_under_unknown_personality');
1043
+ }
1044
+ if (mutation && mutation.category === 'innovate' && personalityState && isHighRiskPersonality(personalityState)) {
1045
+ protocolViolations.push('forbidden_innovate_with_high_risk_personality');
1046
+ }
1047
+
1048
+ const ensured = ensureGene({ genes, selectedGene, signals, intent, dryRun: !!dryRun });
1049
+ const geneUsed = ensured.gene;
1050
+ const blast = computeBlastRadius({
1051
+ repoRoot,
1052
+ baselineUntracked: lastRun && Array.isArray(lastRun.baseline_untracked) ? lastRun.baseline_untracked : [],
1053
+ });
1054
+ const blastRadiusEstimate = lastRun && lastRun.blast_radius_estimate ? lastRun.blast_radius_estimate : null;
1055
+ const constraintCheck = checkConstraints({ gene: geneUsed, blast, blastRadiusEstimate, repoRoot });
1056
+
1057
+ // Log blast radius diagnostics when severity is elevated.
1058
+ if (constraintCheck.blastSeverity &&
1059
+ constraintCheck.blastSeverity.severity !== 'within_limit' &&
1060
+ constraintCheck.blastSeverity.severity !== 'approaching_limit') {
1061
+ const breakdown = analyzeBlastRadiusBreakdown(blast.all_changed_files || blast.changed_files || []);
1062
+ console.error(`[Solidify] Blast radius breakdown: ${JSON.stringify(breakdown)}`);
1063
+ const estComp = compareBlastEstimate(blastRadiusEstimate, blast);
1064
+ if (estComp) {
1065
+ console.error(`[Solidify] Estimate comparison: estimated ${estComp.estimateFiles} files, actual ${estComp.actualFiles} files (${estComp.ratio}x)`);
1066
+ }
1067
+ }
1068
+
1069
+ // Log warnings even on success (approaching limit, estimate drift).
1070
+ if (constraintCheck.warnings && constraintCheck.warnings.length > 0) {
1071
+ for (const w of constraintCheck.warnings) {
1072
+ console.log(`[Solidify] WARNING: ${w}`);
1073
+ }
1074
+ }
1075
+
1076
+ // Critical safety: detect destructive changes to core dependencies.
1077
+ const destructiveViolations = detectDestructiveChanges({
1078
+ repoRoot,
1079
+ changedFiles: blast.all_changed_files || blast.changed_files || [],
1080
+ baselineUntracked: lastRun && Array.isArray(lastRun.baseline_untracked) ? lastRun.baseline_untracked : [],
1081
+ });
1082
+ if (destructiveViolations.length > 0) {
1083
+ for (const v of destructiveViolations) {
1084
+ constraintCheck.violations.push(v);
1085
+ }
1086
+ constraintCheck.ok = false;
1087
+ console.error(`[Solidify] CRITICAL: Destructive changes detected: ${destructiveViolations.join('; ')}`);
1088
+ }
1089
+
1090
+ // Capture environment fingerprint before validation.
1091
+ const envFp = captureEnvFingerprint();
1092
+
1093
+ let validation = { ok: true, results: [], startedAt: null, finishedAt: null };
1094
+ if (geneUsed) {
1095
+ validation = runValidations(geneUsed, { repoRoot, timeoutMs: 180000 });
1096
+ }
1097
+
1098
+ // Canary safety: verify index.js loads in an isolated child process.
1099
+ // This catches broken entry points that gene validations might miss.
1100
+ const canary = runCanaryCheck({ repoRoot, timeoutMs: 30000 });
1101
+ if (!canary.ok && !canary.skipped) {
1102
+ constraintCheck.violations.push(
1103
+ `canary_failed: index.js cannot load in child process: ${canary.err}`
1104
+ );
1105
+ constraintCheck.ok = false;
1106
+ console.error(`[Solidify] CANARY FAILED: ${canary.err}`);
1107
+ }
1108
+
1109
+ // Optional LLM review: when EVOLVER_LLM_REVIEW=true, submit diff for review.
1110
+ let llmReviewResult = null;
1111
+ if (constraintCheck.ok && validation.ok && protocolViolations.length === 0 && isLlmReviewEnabled()) {
1112
+ try {
1113
+ const reviewDiff = captureDiffSnapshot(repoRoot);
1114
+ llmReviewResult = runLlmReview({
1115
+ diff: reviewDiff,
1116
+ gene: geneUsed,
1117
+ signals,
1118
+ mutation,
1119
+ });
1120
+ if (llmReviewResult && llmReviewResult.approved === false) {
1121
+ constraintCheck.violations.push('llm_review_rejected: ' + (llmReviewResult.summary || 'no reason'));
1122
+ constraintCheck.ok = false;
1123
+ console.log('[LLMReview] Change REJECTED: ' + (llmReviewResult.summary || ''));
1124
+ } else if (llmReviewResult) {
1125
+ console.log('[LLMReview] Change approved (confidence: ' + (llmReviewResult.confidence || '?') + ')');
1126
+ }
1127
+ } catch (e) {
1128
+ console.log('[LLMReview] Failed (non-fatal): ' + (e && e.message ? e.message : e));
1129
+ }
1130
+ }
1131
+
1132
+ // Build standardized ValidationReport (machine-readable, interoperable).
1133
+ const validationReport = buildValidationReport({
1134
+ geneId: geneUsed && geneUsed.id ? geneUsed.id : null,
1135
+ commands: validation.results.map(function (r) { return r.cmd; }),
1136
+ results: validation.results,
1137
+ envFp: envFp,
1138
+ startedAt: validation.startedAt,
1139
+ finishedAt: validation.finishedAt,
1140
+ });
1141
+
1142
+ const success = constraintCheck.ok && validation.ok && protocolViolations.length === 0;
1143
+ const ts = nowIso();
1144
+ const outcomeStatus = success ? 'success' : 'failed';
1145
+ const score = clamp01(success ? 0.85 : 0.2);
1146
+
1147
+ const selectedCapsuleId =
1148
+ lastRun && typeof lastRun.selected_capsule_id === 'string' && lastRun.selected_capsule_id.trim()
1149
+ ? String(lastRun.selected_capsule_id).trim() : null;
1150
+ const capsuleId = success ? selectedCapsuleId || buildCapsuleId(ts) : null;
1151
+ const derivedIntent = intent || (mutation && mutation.category) || (geneUsed && geneUsed.category) || 'repair';
1152
+ const intentMismatch =
1153
+ intent && mutation && typeof mutation.category === 'string' && String(intent) !== String(mutation.category);
1154
+ if (intentMismatch) protocolViolations.push(`intent_mismatch_with_mutation:${String(intent)}!=${String(mutation.category)}`);
1155
+
1156
+ const sourceType = lastRun && lastRun.source_type ? String(lastRun.source_type) : 'generated';
1157
+ const reusedAssetId = lastRun && lastRun.reused_asset_id ? String(lastRun.reused_asset_id) : null;
1158
+ const reusedChainId = lastRun && lastRun.reused_chain_id ? String(lastRun.reused_chain_id) : null;
1159
+
1160
+ // LessonL: carry applied lesson IDs for Hub effectiveness adjustment
1161
+ const appliedLessons = lastRun && Array.isArray(lastRun.applied_lessons) ? lastRun.applied_lessons : [];
1162
+
1163
+ const event = {
1164
+ type: 'EvolutionEvent',
1165
+ schema_version: SCHEMA_VERSION,
1166
+ id: buildEventId(ts),
1167
+ parent: parentEventId || null,
1168
+ intent: derivedIntent,
1169
+ signals,
1170
+ genes_used: geneUsed && geneUsed.id ? [geneUsed.id] : [],
1171
+ mutation_id: mutation && mutation.id ? mutation.id : null,
1172
+ personality_state: personalityState || null,
1173
+ blast_radius: { files: blast.files, lines: blast.lines },
1174
+ outcome: { status: outcomeStatus, score },
1175
+ capsule_id: capsuleId,
1176
+ source_type: sourceType,
1177
+ reused_asset_id: reusedAssetId,
1178
+ ...(appliedLessons.length > 0 ? { applied_lessons: appliedLessons } : {}),
1179
+ env_fingerprint: envFp,
1180
+ validation_report_id: validationReport.id,
1181
+ meta: {
1182
+ at: ts,
1183
+ signal_key: signalKey,
1184
+ selector: lastRun && lastRun.selector ? lastRun.selector : null,
1185
+ blast_radius_estimate: lastRun && lastRun.blast_radius_estimate ? lastRun.blast_radius_estimate : null,
1186
+ mutation: mutation || null,
1187
+ personality: {
1188
+ key: personalityKeyUsed,
1189
+ known: !!(lastRun && lastRun.personality_known),
1190
+ mutations: lastRun && Array.isArray(lastRun.personality_mutations) ? lastRun.personality_mutations : [],
1191
+ },
1192
+ gene: {
1193
+ id: geneUsed && geneUsed.id ? geneUsed.id : null,
1194
+ created: !!ensured.created,
1195
+ reason: ensured.reason,
1196
+ },
1197
+ constraints_ok: constraintCheck.ok,
1198
+ constraint_violations: constraintCheck.violations,
1199
+ constraint_warnings: constraintCheck.warnings || [],
1200
+ blast_severity: constraintCheck.blastSeverity ? constraintCheck.blastSeverity.severity : null,
1201
+ blast_breakdown: (!constraintCheck.ok && blast)
1202
+ ? analyzeBlastRadiusBreakdown(blast.all_changed_files || blast.changed_files || [])
1203
+ : null,
1204
+ blast_estimate_comparison: compareBlastEstimate(blastRadiusEstimate, blast),
1205
+ validation_ok: validation.ok,
1206
+ validation: validation.results.map(r => ({ cmd: r.cmd, ok: r.ok })),
1207
+ validation_report: validationReport,
1208
+ canary_ok: canary.ok,
1209
+ canary_skipped: !!canary.skipped,
1210
+ protocol_ok: protocolViolations.length === 0,
1211
+ protocol_violations: protocolViolations,
1212
+ memory_graph: memoryGraphPath(),
1213
+ },
1214
+ };
1215
+ event.asset_id = computeAssetId(event);
1216
+
1217
+ let capsule = null;
1218
+ if (success) {
1219
+ const s = String(summary || '').trim();
1220
+ const autoSummary = geneUsed
1221
+ ? `固化:${geneUsed.id} 命中信号 ${signals.join(', ') || '(none)'},变更 ${blast.files} 文件 / ${blast.lines} 行。`
1222
+ : `固化:命中信号 ${signals.join(', ') || '(none)'},变更 ${blast.files} 文件 / ${blast.lines} 行。`;
1223
+ let prevCapsule = null;
1224
+ try {
1225
+ if (selectedCapsuleId) {
1226
+ const list = require('./assetStore').loadCapsules();
1227
+ prevCapsule = Array.isArray(list) ? list.find(c => c && c.type === 'Capsule' && String(c.id) === selectedCapsuleId) : null;
1228
+ }
1229
+ } catch (e) {}
1230
+ const successReason = buildSuccessReason({ gene: geneUsed, signals, blast, mutation, score });
1231
+ const capsuleDiff = captureDiffSnapshot(repoRoot);
1232
+ const capsuleContent = buildCapsuleContent({ intent, gene: geneUsed, signals, blast, mutation, score });
1233
+ const capsuleStrategy = geneUsed && Array.isArray(geneUsed.strategy) && geneUsed.strategy.length > 0
1234
+ ? geneUsed.strategy : undefined;
1235
+ capsule = {
1236
+ type: 'Capsule',
1237
+ schema_version: SCHEMA_VERSION,
1238
+ id: capsuleId,
1239
+ trigger: prevCapsule && Array.isArray(prevCapsule.trigger) && prevCapsule.trigger.length ? prevCapsule.trigger : signals,
1240
+ gene: geneUsed && geneUsed.id ? geneUsed.id : prevCapsule && prevCapsule.gene ? prevCapsule.gene : null,
1241
+ summary: s || (prevCapsule && prevCapsule.summary ? String(prevCapsule.summary) : autoSummary),
1242
+ confidence: clamp01(score),
1243
+ blast_radius: { files: blast.files, lines: blast.lines },
1244
+ outcome: { status: 'success', score },
1245
+ success_streak: 1,
1246
+ success_reason: successReason,
1247
+ env_fingerprint: envFp,
1248
+ source_type: sourceType,
1249
+ reused_asset_id: reusedAssetId,
1250
+ a2a: { eligible_to_broadcast: false },
1251
+ content: capsuleContent,
1252
+ diff: capsuleDiff || undefined,
1253
+ strategy: capsuleStrategy,
1254
+ };
1255
+ capsule.asset_id = computeAssetId(capsule);
1256
+ }
1257
+
1258
+ // Capture failed mutation as a FailedCapsule before rollback destroys the diff.
1259
+ if (!dryRun && !success) {
1260
+ try {
1261
+ var diffSnapshot = captureDiffSnapshot(repoRoot);
1262
+ if (diffSnapshot) {
1263
+ var failedCapsule = {
1264
+ type: 'Capsule',
1265
+ schema_version: SCHEMA_VERSION,
1266
+ id: 'failed_' + buildCapsuleId(ts),
1267
+ outcome: { status: 'failed', score: score },
1268
+ gene: geneUsed && geneUsed.id ? geneUsed.id : null,
1269
+ trigger: Array.isArray(signals) ? signals.slice(0, 8) : [],
1270
+ summary: geneUsed
1271
+ ? 'Failed: ' + geneUsed.id + ' on signals [' + (signals.slice(0, 3).join(', ') || 'none') + ']'
1272
+ : 'Failed evolution on signals [' + (signals.slice(0, 3).join(', ') || 'none') + ']',
1273
+ diff_snapshot: diffSnapshot,
1274
+ failure_reason: buildFailureReason(constraintCheck, validation, protocolViolations, canary),
1275
+ constraint_violations: constraintCheck.violations || [],
1276
+ env_fingerprint: envFp,
1277
+ blast_radius: { files: blast.files, lines: blast.lines },
1278
+ created_at: ts,
1279
+ };
1280
+ failedCapsule.asset_id = computeAssetId(failedCapsule);
1281
+ appendFailedCapsule(failedCapsule);
1282
+ console.log('[Solidify] Preserved failed mutation as FailedCapsule: ' + failedCapsule.id);
1283
+ }
1284
+ } catch (e) {
1285
+ console.log('[Solidify] FailedCapsule capture error (non-fatal): ' + (e && e.message ? e.message : e));
1286
+ }
1287
+ }
1288
+
1289
+ if (!dryRun && !success && rollbackOnFailure) {
1290
+ rollbackTracked(repoRoot);
1291
+ // Only clean up new untracked files when a valid baseline exists.
1292
+ // Without a baseline, we cannot distinguish pre-existing untracked files
1293
+ // from AI-generated ones, so deleting would be destructive.
1294
+ if (lastRun && Array.isArray(lastRun.baseline_untracked)) {
1295
+ rollbackNewUntrackedFiles({ repoRoot, baselineUntracked: lastRun.baseline_untracked });
1296
+ }
1297
+ }
1298
+
1299
+ // Apply epigenetic marks to the gene based on outcome and environment
1300
+ if (!dryRun && geneUsed && geneUsed.type === 'Gene') {
1301
+ try {
1302
+ applyEpigeneticMarks(geneUsed, envFp, outcomeStatus);
1303
+ upsertGene(geneUsed);
1304
+ } catch (e) {
1305
+ // Non-blocking: epigenetic mark failure must not break solidify
1306
+ }
1307
+ }
1308
+
1309
+ if (!dryRun) {
1310
+ appendEventJsonl(validationReport);
1311
+ if (capsule) upsertCapsule(capsule);
1312
+ appendEventJsonl(event);
1313
+ if (capsule) {
1314
+ const streak = computeCapsuleSuccessStreak({ capsuleId: capsule.id });
1315
+ capsule.success_streak = streak || 1;
1316
+ capsule.a2a = {
1317
+ eligible_to_broadcast:
1318
+ isBlastRadiusSafe(capsule.blast_radius) &&
1319
+ (capsule.outcome.score || 0) >= 0.7 &&
1320
+ (capsule.success_streak || 0) >= 2,
1321
+ };
1322
+ capsule.asset_id = computeAssetId(capsule);
1323
+ upsertCapsule(capsule);
1324
+ }
1325
+ try {
1326
+ if (personalityState) {
1327
+ updatePersonalityStats({ personalityState, outcome: outcomeStatus, score, notes: `event:${event.id}` });
1328
+ }
1329
+ } catch (e) {}
1330
+ }
1331
+
1332
+ const runId = lastRun && lastRun.run_id ? String(lastRun.run_id) : stableHash(`${parentEventId || 'root'}|${geneId || 'none'}|${signalKey}`);
1333
+ state.last_solidify = {
1334
+ run_id: runId, at: ts, event_id: event.id, capsule_id: capsuleId, outcome: event.outcome,
1335
+ };
1336
+ if (!dryRun) writeStateForSolidify(state);
1337
+
1338
+ if (!dryRun) {
1339
+ try {
1340
+ recordNarrative({
1341
+ gene: geneUsed,
1342
+ signals,
1343
+ mutation,
1344
+ outcome: event.outcome,
1345
+ blast,
1346
+ capsule,
1347
+ });
1348
+ } catch (e) {
1349
+ console.log('[Narrative] Record failed (non-fatal): ' + (e && e.message ? e.message : e));
1350
+ }
1351
+ }
1352
+
1353
+ // Search-First Evolution: auto-publish eligible capsules to the Hub (as Gene+Capsule bundle).
1354
+ let publishResult = null;
1355
+ if (!dryRun && capsule && capsule.a2a && capsule.a2a.eligible_to_broadcast) {
1356
+ const autoPublish = String(process.env.EVOLVER_AUTO_PUBLISH || 'true').toLowerCase() !== 'false';
1357
+ const visibility = String(process.env.EVOLVER_DEFAULT_VISIBILITY || 'public').toLowerCase();
1358
+ const minPublishScore = Number(process.env.EVOLVER_MIN_PUBLISH_SCORE) || 0.78;
1359
+
1360
+ // Skip publishing if: disabled, private, direct-reused asset, or below minimum score.
1361
+ // 'reference' mode produces a new capsule inspired by hub -- eligible for publish.
1362
+ if (autoPublish && visibility === 'public' && sourceType !== 'reused' && (capsule.outcome.score || 0) >= minPublishScore) {
1363
+ try {
1364
+ const { buildPublishBundle, httpTransportSend } = require('./a2aProtocol');
1365
+ const { sanitizePayload } = require('./sanitize');
1366
+ const hubUrl = (process.env.A2A_HUB_URL || '').replace(/\/+$/, '');
1367
+
1368
+ if (hubUrl) {
1369
+ // Hub requires bundle format: Gene + Capsule published together.
1370
+ // Build a Gene object from geneUsed if available; otherwise synthesize a minimal Gene.
1371
+ var publishGene = null;
1372
+ if (geneUsed && geneUsed.type === 'Gene' && geneUsed.id) {
1373
+ publishGene = sanitizePayload(geneUsed);
1374
+ } else {
1375
+ publishGene = {
1376
+ type: 'Gene',
1377
+ id: capsule.gene || ('gene_auto_' + (capsule.id || Date.now())),
1378
+ category: event && event.intent ? event.intent : 'repair',
1379
+ signals_match: Array.isArray(capsule.trigger) ? capsule.trigger : [],
1380
+ summary: capsule.summary || '',
1381
+ };
1382
+ }
1383
+ var parentRef = reusedAssetId && sourceType === 'reference' && String(reusedAssetId).startsWith('sha256:')
1384
+ ? reusedAssetId : null;
1385
+ if (parentRef) {
1386
+ publishGene.parent = parentRef;
1387
+ }
1388
+ publishGene.asset_id = computeAssetId(publishGene);
1389
+
1390
+ var sanitizedCapsule = sanitizePayload(capsule);
1391
+ if (parentRef) {
1392
+ sanitizedCapsule.parent = parentRef;
1393
+ }
1394
+ sanitizedCapsule.asset_id = computeAssetId(sanitizedCapsule);
1395
+
1396
+ var sanitizedEvent = (event && event.type === 'EvolutionEvent') ? sanitizePayload(event) : null;
1397
+ if (sanitizedEvent) sanitizedEvent.asset_id = computeAssetId(sanitizedEvent);
1398
+
1399
+ var publishChainId = reusedChainId || null;
1400
+
1401
+ var evolverModelName = (process.env.EVOLVER_MODEL_NAME || '').trim().slice(0, 100);
1402
+
1403
+ var msg = buildPublishBundle({
1404
+ gene: publishGene,
1405
+ capsule: sanitizedCapsule,
1406
+ event: sanitizedEvent,
1407
+ chainId: publishChainId,
1408
+ modelName: evolverModelName || undefined,
1409
+ });
1410
+ var result = httpTransportSend(msg, { hubUrl });
1411
+ // httpTransportSend returns a Promise
1412
+ if (result && typeof result.then === 'function') {
1413
+ result
1414
+ .then(function (res) {
1415
+ if (res && res.ok) {
1416
+ console.log('[AutoPublish] Published bundle (Gene+Capsule) ' + (capsule.asset_id || capsule.id) + ' to Hub.');
1417
+ } else {
1418
+ console.log('[AutoPublish] Hub rejected: ' + JSON.stringify(res));
1419
+ }
1420
+ })
1421
+ .catch(function (err) {
1422
+ console.log('[AutoPublish] Failed (non-fatal): ' + err.message);
1423
+ });
1424
+ }
1425
+ publishResult = { attempted: true, asset_id: capsule.asset_id || capsule.id, bundle: true };
1426
+ logAssetCall({
1427
+ run_id: lastRun && lastRun.run_id ? lastRun.run_id : null,
1428
+ action: 'asset_publish',
1429
+ asset_id: capsule.asset_id || capsule.id,
1430
+ asset_type: 'Capsule',
1431
+ source_node_id: null,
1432
+ chain_id: publishChainId || null,
1433
+ signals: Array.isArray(capsule.trigger) ? capsule.trigger : [],
1434
+ extra: {
1435
+ source_type: sourceType,
1436
+ reused_asset_id: reusedAssetId,
1437
+ gene_id: publishGene && publishGene.id ? publishGene.id : null,
1438
+ parent: parentRef || null,
1439
+ },
1440
+ });
1441
+ } else {
1442
+ publishResult = { attempted: false, reason: 'no_hub_url' };
1443
+ }
1444
+ } catch (e) {
1445
+ console.log('[AutoPublish] Error (non-fatal): ' + e.message);
1446
+ publishResult = { attempted: false, reason: e.message };
1447
+ }
1448
+ } else {
1449
+ const reason = !autoPublish ? 'auto_publish_disabled'
1450
+ : visibility !== 'public' ? 'visibility_private'
1451
+ : sourceType === 'reused' ? 'skip_direct_reused_asset'
1452
+ : 'below_min_score';
1453
+ publishResult = { attempted: false, reason };
1454
+ logAssetCall({
1455
+ run_id: lastRun && lastRun.run_id ? lastRun.run_id : null,
1456
+ action: 'asset_publish_skip',
1457
+ asset_id: capsule.asset_id || capsule.id,
1458
+ asset_type: 'Capsule',
1459
+ reason,
1460
+ signals: Array.isArray(capsule.trigger) ? capsule.trigger : [],
1461
+ });
1462
+ }
1463
+ }
1464
+
1465
+ // --- Anti-pattern auto-publish ---
1466
+ // Publish high-information-value failures to the Hub as anti-pattern assets.
1467
+ // Only enabled via EVOLVER_PUBLISH_ANTI_PATTERNS=true (opt-in).
1468
+ // Only constraint violations or canary failures qualify (not routine validation failures).
1469
+ var antiPatternPublishResult = null;
1470
+ if (!dryRun && !success) {
1471
+ var publishAntiPatterns = String(process.env.EVOLVER_PUBLISH_ANTI_PATTERNS || '').toLowerCase() === 'true';
1472
+ var hubUrl = (process.env.A2A_HUB_URL || '').replace(/\/+$/, '');
1473
+ var hasHighInfoFailure = (constraintCheck.violations && constraintCheck.violations.length > 0)
1474
+ || (canary && !canary.ok && !canary.skipped);
1475
+ if (publishAntiPatterns && hubUrl && hasHighInfoFailure) {
1476
+ try {
1477
+ var { buildPublishBundle: buildApBundle, httpTransportSend: httpApSend } = require('./a2aProtocol');
1478
+ var { sanitizePayload: sanitizeAp } = require('./sanitize');
1479
+ var apGene = geneUsed && geneUsed.type === 'Gene' && geneUsed.id
1480
+ ? sanitizeAp(geneUsed)
1481
+ : { type: 'Gene', id: 'gene_unknown_' + Date.now(), category: derivedIntent, signals_match: signals.slice(0, 8), summary: 'Failed evolution gene' };
1482
+ apGene.anti_pattern = true;
1483
+ apGene.failure_reason = buildFailureReason(constraintCheck, validation, protocolViolations, canary);
1484
+ apGene.asset_id = computeAssetId(apGene);
1485
+ var apCapsule = {
1486
+ type: 'Capsule',
1487
+ schema_version: SCHEMA_VERSION,
1488
+ id: 'failed_' + buildCapsuleId(ts),
1489
+ trigger: signals.slice(0, 8),
1490
+ gene: apGene.id,
1491
+ summary: 'Anti-pattern: ' + String(apGene.failure_reason).slice(0, 200),
1492
+ confidence: 0,
1493
+ blast_radius: { files: blast.files, lines: blast.lines },
1494
+ outcome: { status: 'failed', score: score },
1495
+ failure_reason: apGene.failure_reason,
1496
+ a2a: { eligible_to_broadcast: false },
1497
+ };
1498
+ apCapsule.asset_id = computeAssetId(apCapsule);
1499
+ var apModelName = (process.env.EVOLVER_MODEL_NAME || '').trim().slice(0, 100);
1500
+ var apMsg = buildApBundle({ gene: apGene, capsule: sanitizeAp(apCapsule), event: null, modelName: apModelName || undefined });
1501
+ var apResult = httpApSend(apMsg, { hubUrl });
1502
+ if (apResult && typeof apResult.then === 'function') {
1503
+ apResult
1504
+ .then(function (res) {
1505
+ if (res && res.ok) console.log('[AntiPatternPublish] Published failed bundle to Hub: ' + apCapsule.id);
1506
+ else console.log('[AntiPatternPublish] Hub rejected: ' + JSON.stringify(res));
1507
+ })
1508
+ .catch(function (err) {
1509
+ console.log('[AntiPatternPublish] Failed (non-fatal): ' + err.message);
1510
+ });
1511
+ }
1512
+ antiPatternPublishResult = { attempted: true, asset_id: apCapsule.asset_id };
1513
+ } catch (e) {
1514
+ console.log('[AntiPatternPublish] Error (non-fatal): ' + e.message);
1515
+ antiPatternPublishResult = { attempted: false, reason: e.message };
1516
+ }
1517
+ }
1518
+ }
1519
+
1520
+ // --- LessonL: Auto-publish negative lesson to Hub (always-on, lightweight) ---
1521
+ // Unlike anti-pattern publishing (opt-in, full capsule bundle), this publishes
1522
+ // just the failure reason as a structured lesson via the EvolutionEvent.
1523
+ // The Hub's solicitLesson() hook on handlePublish will extract the lesson.
1524
+ // This is achieved by ensuring failure_reason is included in the event metadata,
1525
+ // which we already do above. The Hub-side solicitLesson() handles the rest.
1526
+ // For failures without a published event (no auto-publish), we still log locally.
1527
+ if (!dryRun && !success && event && event.outcome) {
1528
+ var failureContent = buildFailureReason(constraintCheck, validation, protocolViolations, canary);
1529
+ event.failure_reason = failureContent;
1530
+ event.summary = geneUsed
1531
+ ? 'Failed: ' + geneUsed.id + ' on signals [' + (signals.slice(0, 3).join(', ') || 'none') + '] - ' + failureContent.slice(0, 200)
1532
+ : 'Failed evolution on signals [' + (signals.slice(0, 3).join(', ') || 'none') + '] - ' + failureContent.slice(0, 200);
1533
+ }
1534
+
1535
+ // --- Auto-complete Hub task ---
1536
+ // If this evolution cycle was driven by a Hub task, mark it as completed
1537
+ // with the produced capsule's asset_id. Runs after publish so the Hub
1538
+ // can link the task result to the published asset.
1539
+ let taskCompleteResult = null;
1540
+ if (!dryRun && success && lastRun && lastRun.active_task_id) {
1541
+ const resultAssetId = capsule && capsule.asset_id ? capsule.asset_id : (capsule && capsule.id ? capsule.id : null);
1542
+ if (resultAssetId) {
1543
+ const workerAssignmentId = lastRun.worker_assignment_id || null;
1544
+ const workerPending = lastRun.worker_pending || false;
1545
+ if (workerPending && !workerAssignmentId) {
1546
+ // Deferred claim mode: claim + complete atomically now that we have a result
1547
+ try {
1548
+ const { claimAndCompleteWorkerTask } = require('./taskReceiver');
1549
+ const taskId = String(lastRun.active_task_id);
1550
+ console.log(`[WorkerPool] Atomic claim+complete for task "${lastRun.active_task_title || taskId}" with asset ${resultAssetId}`);
1551
+ const result = claimAndCompleteWorkerTask(taskId, resultAssetId);
1552
+ if (result && typeof result.then === 'function') {
1553
+ result
1554
+ .then(function (r) {
1555
+ if (r.ok) {
1556
+ console.log('[WorkerPool] Claim+complete succeeded, assignment=' + r.assignment_id);
1557
+ } else {
1558
+ console.log('[WorkerPool] Claim+complete failed: ' + (r.error || 'unknown') + (r.assignment_id ? ' assignment=' + r.assignment_id : ''));
1559
+ }
1560
+ })
1561
+ .catch(function (err) {
1562
+ console.log('[WorkerPool] Claim+complete error (non-fatal): ' + (err && err.message ? err.message : err));
1563
+ });
1564
+ }
1565
+ taskCompleteResult = { attempted: true, task_id: lastRun.active_task_id, asset_id: resultAssetId, worker: true, deferred: true };
1566
+ } catch (e) {
1567
+ console.log('[WorkerPool] Atomic claim+complete error (non-fatal): ' + e.message);
1568
+ taskCompleteResult = { attempted: false, reason: e.message, worker: true, deferred: true };
1569
+ }
1570
+ } else if (workerAssignmentId) {
1571
+ // Legacy path: already-claimed assignment, just complete it
1572
+ try {
1573
+ const { completeWorkerTask } = require('./taskReceiver');
1574
+ console.log(`[WorkerComplete] Completing worker assignment "${workerAssignmentId}" with asset ${resultAssetId}`);
1575
+ const completed = completeWorkerTask(workerAssignmentId, resultAssetId);
1576
+ if (completed && typeof completed.then === 'function') {
1577
+ completed
1578
+ .then(function (ok) {
1579
+ if (ok) {
1580
+ console.log('[WorkerComplete] Worker task completed successfully on Hub.');
1581
+ } else {
1582
+ console.log('[WorkerComplete] Hub rejected worker completion (non-fatal).');
1583
+ }
1584
+ })
1585
+ .catch(function (err) {
1586
+ console.log('[WorkerComplete] Failed (non-fatal): ' + (err && err.message ? err.message : err));
1587
+ });
1588
+ }
1589
+ taskCompleteResult = { attempted: true, task_id: lastRun.active_task_id, assignment_id: workerAssignmentId, asset_id: resultAssetId, worker: true };
1590
+ } catch (e) {
1591
+ console.log('[WorkerComplete] Error (non-fatal): ' + e.message);
1592
+ taskCompleteResult = { attempted: false, reason: e.message, worker: true };
1593
+ }
1594
+ } else {
1595
+ // Bounty task path: complete via /a2a/task/complete
1596
+ try {
1597
+ const { completeTask } = require('./taskReceiver');
1598
+ const taskId = String(lastRun.active_task_id);
1599
+ console.log(`[TaskComplete] Completing task "${lastRun.active_task_title || taskId}" with asset ${resultAssetId}`);
1600
+ const completed = completeTask(taskId, resultAssetId);
1601
+ if (completed && typeof completed.then === 'function') {
1602
+ completed
1603
+ .then(function (ok) {
1604
+ if (ok) {
1605
+ console.log('[TaskComplete] Task completed successfully on Hub.');
1606
+ } else {
1607
+ console.log('[TaskComplete] Hub rejected task completion (non-fatal).');
1608
+ }
1609
+ })
1610
+ .catch(function (err) {
1611
+ console.log('[TaskComplete] Failed (non-fatal): ' + (err && err.message ? err.message : err));
1612
+ });
1613
+ }
1614
+ taskCompleteResult = { attempted: true, task_id: taskId, asset_id: resultAssetId };
1615
+ } catch (e) {
1616
+ console.log('[TaskComplete] Error (non-fatal): ' + e.message);
1617
+ taskCompleteResult = { attempted: false, reason: e.message };
1618
+ }
1619
+ }
1620
+ }
1621
+ }
1622
+
1623
+
1624
+ // --- Auto Hub Review: rate fetched assets based on solidify outcome ---
1625
+ // When this cycle reused a Hub asset, submit a usage-verified review.
1626
+ // The promise is returned so callers can await it before process.exit().
1627
+ var hubReviewResult = null;
1628
+ var hubReviewPromise = null;
1629
+ if (!dryRun && reusedAssetId && (sourceType === 'reused' || sourceType === 'reference')) {
1630
+ try {
1631
+ var { submitHubReview } = require('./hubReview');
1632
+ hubReviewPromise = submitHubReview({
1633
+ reusedAssetId: reusedAssetId,
1634
+ sourceType: sourceType,
1635
+ outcome: event.outcome,
1636
+ gene: geneUsed,
1637
+ signals: signals,
1638
+ blast: blast,
1639
+ constraintCheck: constraintCheck,
1640
+ runId: lastRun && lastRun.run_id ? lastRun.run_id : null,
1641
+ });
1642
+ if (hubReviewPromise && typeof hubReviewPromise.then === 'function') {
1643
+ hubReviewPromise = hubReviewPromise
1644
+ .then(function (r) {
1645
+ hubReviewResult = r;
1646
+ if (r && r.submitted) {
1647
+ console.log('[HubReview] Review submitted successfully (rating=' + r.rating + ').');
1648
+ }
1649
+ return r;
1650
+ })
1651
+ .catch(function (err) {
1652
+ console.log('[HubReview] Error (non-fatal): ' + (err && err.message ? err.message : err));
1653
+ return null;
1654
+ });
1655
+ }
1656
+ } catch (e) {
1657
+ console.log('[HubReview] Error (non-fatal): ' + e.message);
1658
+ }
1659
+ }
1660
+ return { ok: success, event, capsule, gene: geneUsed, constraintCheck, validation, validationReport, blast, publishResult, antiPatternPublishResult, taskCompleteResult, hubReviewResult, hubReviewPromise };
1661
+ }
1662
+
1663
+ module.exports = {
1664
+ solidify,
1665
+ isGitRepo,
1666
+ readStateForSolidify,
1667
+ writeStateForSolidify,
1668
+ isValidationCommandAllowed,
1669
+ isCriticalProtectedPath,
1670
+ detectDestructiveChanges,
1671
+ classifyBlastSeverity,
1672
+ analyzeBlastRadiusBreakdown,
1673
+ compareBlastEstimate,
1674
+ runCanaryCheck,
1675
+ applyEpigeneticMarks,
1676
+ getEpigeneticBoost,
1677
+ buildEpigeneticMark,
1678
+ buildSuccessReason,
1679
+ BLAST_RADIUS_HARD_CAP_FILES,
1680
+ BLAST_RADIUS_HARD_CAP_LINES,
1681
+ };